Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
54 commits
Select commit Hold shift + click to select a range
4b4c534
🪲 [Enhancement]: Improve test results processing and logging in main.ps1
MariusStorhaug Mar 3, 2025
3defe44
🪲 [Enhancement]: Create TestResults directory for organized file down…
MariusStorhaug Mar 3, 2025
2f56a54
🪲 [Enhancement]: Add detailed test results summary and processing in …
MariusStorhaug Mar 3, 2025
2ea41f1
Fix: Correct property name for TotalNotRun in test results summary
MariusStorhaug Mar 3, 2025
0e56432
Fix: Update test results summary to correctly calculate totals for er…
MariusStorhaug Mar 3, 2025
86da151
Fix: Change output format of test results summary from list to table …
MariusStorhaug Mar 3, 2025
1bbe463
Fix: Update test results summary output format to table and round tot…
MariusStorhaug Mar 3, 2025
4994351
Fix: Correctly format total test results output as a table
MariusStorhaug Mar 3, 2025
8363d4f
Fix: Ensure test results summary correctly rounds and casts totals to…
MariusStorhaug Mar 3, 2025
d6c4dc5
Refactor: Comment out unused code for test results processing to impr…
MariusStorhaug Mar 3, 2025
8029fb5
Refactor: Simplify test results processing by removing unused code an…
MariusStorhaug Mar 3, 2025
8bfdc2a
Refactor: Streamline test results processing by removing unnecessary …
MariusStorhaug Mar 3, 2025
eab1b6b
Refactor: Sort test results files and enhance output formatting with …
MariusStorhaug Mar 3, 2025
2924e7f
Refactor: Improve test results summary output formatting and enhance …
MariusStorhaug Mar 3, 2025
d93311a
Refactor: Simplify logging output and improve test results summary fo…
MariusStorhaug Mar 3, 2025
2275cdb
Refactor: Update default working directory in action.yml for improved…
MariusStorhaug Mar 6, 2025
3cb6ec9
Refactor: Remove unnecessary environment variable from Auto-Release w…
MariusStorhaug Mar 8, 2025
26d2b11
Refactor: Enhance error handling in test results processing and impro…
MariusStorhaug Mar 22, 2025
f6856dc
Refactor: Improve code structure by consolidating error handling for …
MariusStorhaug Mar 22, 2025
43968e1
Refactor: Enhance failure condition checks in test results summary logic
MariusStorhaug Mar 22, 2025
a5642a7
Refactor: Improve test results processing by adding lists for failed …
MariusStorhaug Mar 22, 2025
84d0b03
Refactor: Simplify output for failed and unexecuted test files by rep…
MariusStorhaug Mar 22, 2025
447dd4b
Refactor: Improve output formatting for failed and unexecuted test fi…
MariusStorhaug Mar 22, 2025
caad80c
Refactor: Consolidate error handling and improve output for test resu…
MariusStorhaug Mar 22, 2025
7aadee5
Refactor: Reorder test result processing to handle unexecuted tests b…
MariusStorhaug Mar 22, 2025
e1d6572
Refactor: Add ShowInfo parameter to action.yml to control output verb…
MariusStorhaug Mar 22, 2025
8be1366
Refactor: Change output formatting to use Format-List for better read…
MariusStorhaug Mar 22, 2025
b4a5e46
Refactor: Add input parameters for SourceCodeTestSuites, PSModuleTest…
MariusStorhaug Mar 23, 2025
40fa088
Refactor: Add support for SourceCodeTestSuites, PSModuleTestSuites, a…
MariusStorhaug Mar 23, 2025
b330fec
Refactor: Wrap test suite listing in LogGroup for better logging context
MariusStorhaug Mar 23, 2025
9ad597e
Refactor: Validate presence of expected artifact files and handle mis…
MariusStorhaug Mar 23, 2025
52e6e64
Refactor: Enhance expected test suite validation and improve error ha…
MariusStorhaug Mar 23, 2025
c6c1756
Refactor: Improve logging and structure for expected test suites in m…
MariusStorhaug Mar 23, 2025
6235e46
Refactor: Simplify test suite logging and improve file validation log…
MariusStorhaug Mar 23, 2025
b9a025f
Refactor: Rename expected test suite file properties for consistency …
MariusStorhaug Mar 23, 2025
e84daa1
Refactor: Update expected test suite naming conventions for consisten…
MariusStorhaug Mar 23, 2025
097c738
Refactor: Sort test suite inputs by name for improved consistency and…
MariusStorhaug Mar 23, 2025
09c20c2
Refactor: Streamline test suite input processing and enhance failure …
MariusStorhaug Mar 23, 2025
49f304a
Refactor: Sort expected test suites by category and name for improved…
MariusStorhaug Mar 24, 2025
4d13111
Refactor: Enhance artifact download process by using GitHub API for i…
MariusStorhaug Mar 30, 2025
de154a4
Refactor: Improve artifact download process by adding authentication …
MariusStorhaug Mar 30, 2025
f250d13
Refactor: Update artifact output to include additional properties and…
MariusStorhaug Mar 30, 2025
0ce62a5
Refactor: Improve artifact extraction process by ensuring proper unzi…
MariusStorhaug Mar 30, 2025
ecbef6a
Refactor: Simplify test result evaluation and improve failure handlin…
MariusStorhaug Apr 2, 2025
1137cb5
Refactor: Streamline artifact handling by replacing manual download a…
MariusStorhaug Apr 2, 2025
dc3e3e1
Refactor: Remove unnecessary cleanup option from Save-GitHubArtifact …
MariusStorhaug Apr 17, 2025
cbb8491
Refactor: Update Get-GitHubArtifact call to use WorkflowRunId and imp…
MariusStorhaug Apr 17, 2025
aea7aa4
Refactor: Correct WorkflowRunId casing in Get-GitHubArtifact call for…
MariusStorhaug Apr 17, 2025
698a121
Refactor: Add Force parameter to Save-GitHubArtifact for improved fil…
MariusStorhaug Apr 17, 2025
7008402
Refactor: Update README.md to enhance documentation for Get-PesterTes…
MariusStorhaug Apr 17, 2025
f216752
Add test result reports for PSModuleTest-Module and PSModuleTest-Sour…
MariusStorhaug Apr 17, 2025
f0233d5
Refactor: Update artifact paths in Action-Test.yml to correct directo…
MariusStorhaug Apr 17, 2025
5c11ad5
Refactor: Add if-no-files-found error handling for artifact uploads i…
MariusStorhaug Apr 17, 2025
1c3c518
Refactor: Update comment formatting and suppress Write-Host warnings …
MariusStorhaug Apr 17, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
45 changes: 44 additions & 1 deletion .github/workflows/Action-Test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,50 @@ jobs:
- name: Checkout repo
uses: actions/checkout@v4

# Upload artifact from tests:
- name: Upload artifact [PATH-Windows-TestResults]
uses: actions/upload-artifact@v4
with:
name: PATH-Windows-TestResults
path: ./tests/TestResults/PATH-Windows-TestResults
retention-days: 1
if-no-files-found: error

- name: Upload artifact [PSModuleLint-Module-Windows-TestResults]
uses: actions/upload-artifact@v4
with:
name: PSModuleLint-Module-Windows-TestResults
path: ./tests/TestResults/PSModuleLint-Module-Windows-TestResults
retention-days: 1
if-no-files-found: error

- name: Upload artifact [PSModuleLint-SourceCode-Windows-TestResults]
uses: actions/upload-artifact@v4
with:
name: PSModuleLint-SourceCode-Windows-TestResults
path: ./tests/TestResults/PSModuleLint-SourceCode-Windows-TestResults
retention-days: 1
if-no-files-found: error

- name: Upload artifact [PSModuleTest-Module-Windows-TestResults]
uses: actions/upload-artifact@v4
with:
name: PSModuleTest-Module-Windows-TestResults
path: ./tests/TestResults/PSModuleTest-Module-Windows-TestResults
retention-days: 1
if-no-files-found: error

- name: Upload artifact [PSModuleTest-SourceCode-Windows-TestResults]
uses: actions/upload-artifact@v4
with:
name: PSModuleTest-SourceCode-Windows-TestResults
path: ./tests/TestResults/PSModuleTest-SourceCode-Windows-TestResults
retention-days: 1
if-no-files-found: error

- name: Action-Test
uses: ./
with:
WorkingDirectory: ./tests
SourceCodeTestSuites: '[{"OSName": "Windows"}'
PSModuleTestSuites: '[{"OSName": "Windows"}]'
ModuleTestSuites: '[{"TestName": "PATH", "OSName": "Windows"}]'
3 changes: 1 addition & 2 deletions .github/workflows/Auto-Release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,5 +30,4 @@ jobs:

- name: Auto-Release
uses: PSModule/Auto-Release@v1
env:
GITHUB_TOKEN: ${{ github.token }}

43 changes: 40 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,17 +1,54 @@
# Template-Action
# Get-PesterTestResults Action

A template repository for GitHub Actions
A GitHub Action that gathers Pester test results for the PSModule process by analyzing test artifacts from the workflow run.
It validates test execution and results, providing a summary and failing if any tests are unsuccessful.

This GitHub Action is a part of the [PSModule framework](https://github.com/PSModule). It is recommended to use the
[Process-PSModule workflow](https://github.com/PSModule/Process-PSModule) to automate the whole process of managing the PowerShell module.

## Usage

This action retrieves test artifacts named `*-TestResults`, processes the contained JSON files, and checks for test failures, unexecuted tests,
or missing results. It supports three categories of test suites: Source Code, PSModule, and Module tests.

### Inputs

| Input | Description | Required | Default |
|-------------------------|-------------------------------------------------------------------------------------------------------------------------------|----------|-----------|
| `SourceCodeTestSuites` | JSON array specifying OS names for Source Code test suites. Example: `[{"OSName": "Windows"}]` | Yes | |
| `PSModuleTestSuites` | JSON array specifying OS names for PSModule test suites. Example: `[{"OSName": "Linux"}]` | Yes | |
| `ModuleTestSuites` | JSON array specifying TestName and OSName for Module test suites. Example: `[{"TestName": "Integration", "OSName": "MacOS"}]` | Yes | |
| `Debug` | Enable debug output (`true`/`false`). | No | `false` |
| `Verbose` | Enable verbose output (`true`/`false`). | No | `false` |
| `Version` | Exact version of the GitHub module to install (e.g., `1.0.0`). | No | Latest |
| `Prerelease` | Allow installing prerelease module versions (`true`/`false`). | No | `false` |
| `WorkingDirectory` | Working directory for the script. | No | `.` |

### Secrets

No secrets are required if the action runs in the same repository. The action uses the default `GITHUB_TOKEN` provided by GitHub Actions to access workflow artifacts.

### Outputs

This action does not define explicit outputs. Instead:

- If any tests fail or errors occur, the action exits with a non-zero code, marking the workflow step as failed.
- Detailed results are logged in the workflow run's output.

### Example

```yaml
Example here
- name: Run and Collect Pester Tests
uses: PSModule/Get-PesterTestResults@v1
with:
SourceCodeTestSuites: '[{"OSName": "Windows"}, {"OSName": "Linux"}]'
PSModuleTestSuites: '[{"OSName": "Windows"}]'
ModuleTestSuites: '[{"TestName": "Integration", "OSName": "Windows"}]'
```

### Notes
- **Test Suite Inputs**: Must be valid JSON arrays.
- `SourceCodeTestSuites` and `PSModuleTestSuites` require `OSName`.
- `ModuleTestSuites` requires both `TestName` and `OSName`.
- **Artifact Names**: The action expects artifacts named `*-TestResults` containing Pester JSON reports.
- **Failure Conditions**: The action fails if tests are unexecuted, explicitly failed, or if result files are missing.
18 changes: 16 additions & 2 deletions action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,15 @@ branding:
color: white

inputs:
SourceCodeTestSuites:
description: The test suites to run for the source code.
required: true
PSModuleTestSuites:
description: The test suites to run for the PSModule.
required: true
ModuleTestSuites:
description: The test suites to run for the module.
required: true
Debug:
description: Enable debug output.
required: false
Expand All @@ -24,19 +33,24 @@ inputs:
WorkingDirectory:
description: The working directory where the script will run from.
required: false
default: ${{ github.workspace }}
default: '.'

runs:
using: composite
steps:
- name: Get-PesterTestResults
uses: PSModule/GitHub-Script@v1
env:
PSMODULE_GET_PESTERTESTRESULTS_INPUT_SourceCodeTestSuites: ${{ inputs.SourceCodeTestSuites }}
PSMODULE_GET_PESTERTESTRESULTS_INPUT_PSModuleTestSuites: ${{ inputs.PSModuleTestSuites }}
PSMODULE_GET_PESTERTESTRESULTS_INPUT_ModuleTestSuites: ${{ inputs.ModuleTestSuites }}
with:
Name: Get-PesterTestResults
Debug: ${{ inputs.Debug }}
Prerelease: ${{ inputs.Prerelease }}
Verbose: ${{ inputs.Verbose }}
Version: ${{ inputs.Version }}
WorkingDirectory: ${{ inputs.WorkingDirectory }}
ShowInfo: false
Script: |
# Get-PesterTestResults
${{ github.action_path }}/scripts/main.ps1
187 changes: 176 additions & 11 deletions scripts/main.ps1
Original file line number Diff line number Diff line change
@@ -1,21 +1,186 @@
#Requires -Modules GitHub
#Requires -Modules GitHub

[Diagnostics.CodeAnalysis.SuppressMessageAttribute(
'PSAvoidUsingWriteHost', '',
Justification = 'Outputs to GitHub Actions logs.'
)]
[CmdletBinding()]
param()

begin {
$scriptName = $MyInvocation.MyCommand.Name
Write-Debug "[$scriptName] - Start"
$owner = $env:GITHUB_REPOSITORY_OWNER
$repo = $env:GITHUB_REPOSITORY_NAME
$runId = $env:GITHUB_RUN_ID

$files = Get-GitHubArtifact -Owner $owner -Repository $repo -WorkflowRunID $runId -Name '*-TestResults' |
Save-GitHubArtifact -Path 'TestResults' -Force -Expand -PassThru | Get-ChildItem -Recurse -Filter *.json | Sort-Object Name -Unique

LogGroup 'List files' {
$files.Name | Out-String
}

process {
try {
Write-Output "Hello, $Subject!"
} catch {
throw $_
$sourceCodeTestSuites = $env:PSMODULE_GET_PESTERTESTRESULTS_INPUT_SourceCodeTestSuites | ConvertFrom-Json
$psModuleTestSuites = $env:PSMODULE_GET_PESTERTESTRESULTS_INPUT_PSModuleTestSuites | ConvertFrom-Json
$moduleTestSuites = $env:PSMODULE_GET_PESTERTESTRESULTS_INPUT_ModuleTestSuites | ConvertFrom-Json

LogGroup 'Expected test suites' {

# Build an array of expected test suite objects
$expectedTestSuites = @()

# SourceCodeTestSuites: expected file names start with "SourceCode-"
foreach ($suite in $sourceCodeTestSuites) {
$expectedTestSuites += [pscustomobject]@{
Name = "PSModuleTest-SourceCode-$($suite.OSName)-TestResult-Report"
Category = 'SourceCode'
OSName = $suite.OSName
TestName = $null
}
$expectedTestSuites += [pscustomobject]@{
Name = "PSModuleLint-SourceCode-$($suite.OSName)-TestResult-Report"
Category = 'SourceCode'
OSName = $suite.OSName
TestName = $null
}
}

# PSModuleTestSuites: expected file names start with "Module-"
foreach ($suite in $psModuleTestSuites) {
$expectedTestSuites += [pscustomobject]@{
Name = "PSModuleTest-Module-$($suite.OSName)-TestResult-Report"
Category = 'PSModuleTest'
OSName = $suite.OSName
TestName = $null
}
$expectedTestSuites += [pscustomobject]@{
Name = "PSModuleLint-Module-$($suite.OSName)-TestResult-Report"
Category = 'PSModuleTest'
OSName = $suite.OSName
TestName = $null
}
}

# ModuleTestSuites: expected file names use the TestName as prefix
foreach ($suite in $moduleTestSuites) {
$expectedTestSuites += [pscustomobject]@{
Name = "$($suite.TestName)-$($suite.OSName)-TestResult-Report"
Category = 'ModuleTest'
OSName = $suite.OSName
TestName = $suite.TestName
}
}

$expectedTestSuites = $expectedTestSuites | Sort-Object Category, Name
$expectedTestSuites | Format-Table | Out-String
}
$isFailure = $false

$testResults = [System.Collections.Generic.List[psobject]]::new()
$failedTests = [System.Collections.Generic.List[psobject]]::new()
$unexecutedTests = [System.Collections.Generic.List[psobject]]::new()
$totalErrors = 0

foreach ($expected in $expectedTestSuites) {
$file = $files | Where-Object { $_.BaseName -eq $expected.Name }
$result = if ($file) {
$object = $file | Get-Content | ConvertFrom-Json
[pscustomobject]@{
Result = $object.Result
Executed = $object.Executed
ResultFilePresent = $true
Tests = [int]([math]::Round(($object | Measure-Object -Sum -Property TotalCount).Sum))
Passed = [int]([math]::Round(($object | Measure-Object -Sum -Property PassedCount).Sum))
Failed = [int]([math]::Round(($object | Measure-Object -Sum -Property FailedCount).Sum))
NotRun = [int]([math]::Round(($object | Measure-Object -Sum -Property NotRunCount).Sum))
Inconclusive = [int]([math]::Round(($object | Measure-Object -Sum -Property InconclusiveCount).Sum))
Skipped = [int]([math]::Round(($object | Measure-Object -Sum -Property SkippedCount).Sum))
}
} else {
[pscustomobject]@{
Result = $null
Executed = $null
ResultFilePresent = $false
Tests = $null
Passed = $null
Failed = $null
NotRun = $null
Inconclusive = $null
Skipped = $null
}
}

# Determine if there’s any failure for this single test file
$testFailure = (
$result.Result -ne 'Passed' -or
$result.Executed -ne $true -or
$result.ResultFilePresent -eq $false -or
$result.Tests -eq 0 -or
$result.Passed -eq 0 -or
$result.Failed -gt 0 -or
$result.Inconclusive -gt 0
)

if ($testFailure) {
$conclusion = 'Failed'
$color = $PSStyle.Foreground.Red
$isFailure = $true
} else {
$conclusion = 'Passed'
$color = $PSStyle.Foreground.Green
}
$result | Add-Member -NotePropertyName 'Conclusion' -NotePropertyValue $conclusion

$reset = $PSStyle.Reset
$logGroupName = $expected.Name -replace '-TestResult-Report.*', ''

LogGroup " - $color$logGroupName$reset" {
if ($result.Executed -eq $false) {
$unexecutedTests.Add($expected.Name)
Write-GitHubError "Test was not executed as reported in file: $($expected.Name)"
$totalErrors++
} elseif ($result.Result -eq 'Failed') {
$failedTests.Add($expected.Name)
Write-GitHubError "Test result explicitly marked as Failed in file: $($expected.Name)"
$totalErrors++
}
$result | Format-Table | Out-String
}

if ($result.ResultFilePresent) {
$testResults.Add($result)
}
}

end {
Write-Debug "[$scriptName] - End"
Write-Output ('─' * 50)
$total = [pscustomobject]@{
Tests = [int]([math]::Round(($testResults | Measure-Object -Sum -Property Tests).Sum))
Passed = [int]([math]::Round(($testResults | Measure-Object -Sum -Property Passed).Sum))
Failed = [int]([math]::Round(($testResults | Measure-Object -Sum -Property Failed).Sum))
NotRun = [int]([math]::Round(($testResults | Measure-Object -Sum -Property NotRun).Sum))
Inconclusive = [int]([math]::Round(($testResults | Measure-Object -Sum -Property Inconclusive).Sum))
Skipped = [int]([math]::Round(($testResults | Measure-Object -Sum -Property Skipped).Sum))
}


$color = if ($isFailure) { $PSStyle.Foreground.Red } else { $PSStyle.Foreground.Green }
$reset = $PSStyle.Reset
LogGroup " - $color`Summary$reset" {
$total | Format-Table | Out-String
if ($total.Failed -gt 0) {
Write-GitHubError "There are $($total.Failed) failed tests of $($total.Tests) tests"
$totalErrors += $total.Failed
}
if ($total.Inconclusive -gt 0) {
Write-GitHubError "There are $($total.Inconclusive) inconclusive tests of $($total.Tests) tests"
$totalErrors += $total.Inconclusive
}
if ($failedTests.Count -gt 0) {
Write-Host 'Failed Test Files'
$failedTests | ForEach-Object { Write-Host " - $_" }
}
if ($unexecutedTests.Count -gt 0) {
Write-Host 'Unexecuted Test Files'
$unexecutedTests | ForEach-Object { Write-Host " - $_" }
}
}

exit $totalErrors
Loading
Loading