mirror of
https://github.com/pyTooling/Actions.git
synced 2026-02-12 02:56:56 +08:00
v4.0.0
This commit is contained in:
2
.github/dependabot.yml
vendored
2
.github/dependabot.yml
vendored
@@ -10,9 +10,7 @@ updates:
|
||||
- Dependencies
|
||||
assignees:
|
||||
- Paebbels
|
||||
- umarcor
|
||||
reviewers:
|
||||
- Paebbels
|
||||
- umarcor
|
||||
schedule:
|
||||
interval: "daily" # Checks on Monday trough Friday.
|
||||
|
||||
4
.github/workflows/ApplicationTesting.yml
vendored
4
.github/workflows/ApplicationTesting.yml
vendored
@@ -231,10 +231,10 @@ jobs:
|
||||
cd "${{ inputs.root_directory || '.' }}"
|
||||
[ -n '${{ inputs.apptest_xml_artifact }}' ] && PYTEST_ARGS='--junitxml=report/unit/TestReportSummary.xml' || unset PYTEST_ARGS
|
||||
if [ -n '${{ inputs.coverage_config }}' ]; then
|
||||
echo "coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.apptest_directory }}"
|
||||
printf "%s\n" "coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.apptest_directory }}"
|
||||
coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.apptest_directory }}
|
||||
else
|
||||
echo "python -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.apptest_directory }}"
|
||||
printf "%s\n" "python -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.apptest_directory }}"
|
||||
python -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.apptest_directory }}
|
||||
fi
|
||||
|
||||
|
||||
2
.github/workflows/BuildTheDocs.yml
vendored
2
.github/workflows/BuildTheDocs.yml
vendored
@@ -38,7 +38,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: '❗ Deprecation message'
|
||||
run: echo "::warning title=Deprecated::'BuildTheDocs.yml' is not maintained anymore. Please switch to 'SphinxDocumentation.yml', 'LaTeXDocumentation.yml' and 'ExtractConfiguration.yml'."
|
||||
run: printf "%s\n" "::warning title=Deprecated::'BuildTheDocs.yml' is not maintained anymore. Please switch to 'SphinxDocumentation.yml', 'LaTeXDocumentation.yml' and 'ExtractConfiguration.yml'."
|
||||
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
6
.github/workflows/CheckDocumentation.yml
vendored
6
.github/workflows/CheckDocumentation.yml
vendored
@@ -59,14 +59,14 @@ jobs:
|
||||
|
||||
- name: 🔧 Install wheel,tomli and pip dependencies (native)
|
||||
run: |
|
||||
python -m pip install --disable-pip-version-check -U docstr_coverage interrogate
|
||||
python -m pip install --disable-pip-version-check -U docstr_coverage interrogate[png]
|
||||
|
||||
- name: Run 'interrogate' Documentation Coverage Check
|
||||
continue-on-error: true
|
||||
run: |
|
||||
interrogate -c pyproject.toml --fail-under=${{ inputs.fail_under }} && echo "::error title=interrogate::Insufficient documentation quality (goal: ${{ inputs.fail_under }})"
|
||||
interrogate -c pyproject.toml --fail-under=${{ inputs.fail_under }} && printf "%s\n" "::error title=interrogate::Insufficient documentation quality (goal: ${{ inputs.fail_under }})"
|
||||
|
||||
- name: Run 'docstr_coverage' Documentation Coverage Check
|
||||
continue-on-error: true
|
||||
run: |
|
||||
docstr-coverage -v 2 --fail-under=${{ inputs.fail_under }} ${{ inputs.directory }} && echo "::error title=docstr-coverage::Insufficient documentation quality (goal: ${{ inputs.fail_under }})"
|
||||
docstr-coverage -v 2 --fail-under=${{ inputs.fail_under }} ${{ inputs.directory }} && printf "%s\n" "::error title=docstr-coverage::Insufficient documentation quality (goal: ${{ inputs.fail_under }})"
|
||||
|
||||
45
.github/workflows/CompletePipeline.yml
vendored
45
.github/workflows/CompletePipeline.yml
vendored
@@ -103,13 +103,13 @@ on:
|
||||
|
||||
jobs:
|
||||
ConfigParams:
|
||||
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r2
|
||||
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r4
|
||||
with:
|
||||
package_namespace: ${{ inputs.package_namespace }}
|
||||
package_name: ${{ inputs.package_name }}
|
||||
|
||||
UnitTestingParams:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r4
|
||||
with:
|
||||
package_namespace: ${{ inputs.package_namespace }}
|
||||
package_name: ${{ inputs.package_name }}
|
||||
@@ -121,7 +121,7 @@ jobs:
|
||||
disable_list: ${{ inputs.unittest_disable_list }}
|
||||
|
||||
AppTestingParams:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r4
|
||||
with:
|
||||
package_namespace: ${{ inputs.package_namespace }}
|
||||
package_name: ${{ inputs.package_name }}
|
||||
@@ -133,18 +133,22 @@ jobs:
|
||||
disable_list: ${{ inputs.apptest_disable_list }}
|
||||
|
||||
UnitTesting:
|
||||
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r2
|
||||
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r4
|
||||
needs:
|
||||
- ConfigParams
|
||||
- UnitTestingParams
|
||||
with:
|
||||
jobs: ${{ needs.UnitTestingParams.outputs.python_jobs }}
|
||||
requirements: "-r tests/unit/requirements.txt"
|
||||
# pacboy: "msys/git python-lxml:p"
|
||||
unittest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||
coverage_sqlite_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}
|
||||
unittest_report_xml_directory: ${{ needs.ConfigParams.outputs.unittest_report_xml_directory }}
|
||||
unittest_report_xml_filename: ${{ needs.ConfigParams.outputs.unittest_report_xml_filename }}
|
||||
coverage_report_html_directory: ${{ needs.ConfigParams.outputs.coverage_report_html_directory }}
|
||||
unittest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||
coverage_sqlite_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}
|
||||
|
||||
StaticTypeCheck:
|
||||
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r2
|
||||
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r4
|
||||
needs:
|
||||
- ConfigParams
|
||||
- UnitTestingParams
|
||||
@@ -157,7 +161,7 @@ jobs:
|
||||
html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
|
||||
|
||||
DocCoverage:
|
||||
uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@r2
|
||||
uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@r4
|
||||
needs:
|
||||
- ConfigParams
|
||||
- UnitTestingParams
|
||||
@@ -167,7 +171,7 @@ jobs:
|
||||
# fail_below: 70
|
||||
|
||||
Package:
|
||||
uses: pyTooling/Actions/.github/workflows/Package.yml@r2
|
||||
uses: pyTooling/Actions/.github/workflows/Package.yml@r4
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- UnitTesting
|
||||
@@ -176,7 +180,7 @@ jobs:
|
||||
artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
||||
|
||||
# AppTesting:
|
||||
# uses: pyTooling/Actions/.github/workflows/ApplicationTesting.yml@r2
|
||||
# uses: pyTooling/Actions/.github/workflows/ApplicationTesting.yml@r4
|
||||
# needs:
|
||||
# - AppTestingParams
|
||||
# - UnitTestingParams
|
||||
@@ -187,7 +191,7 @@ jobs:
|
||||
# apptest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).apptesting_xml }}
|
||||
|
||||
PublishCoverageResults:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@r2
|
||||
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@r4
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- UnitTesting
|
||||
@@ -200,22 +204,23 @@ jobs:
|
||||
codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||
|
||||
PublishTestResults:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@r2
|
||||
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@r4
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- UnitTesting
|
||||
with:
|
||||
additional_merge_args: '-d "--pytest=rewrite-dunder-init;reduce-depth:pytest.tests.unit"'
|
||||
merged_junit_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||
|
||||
# VerifyDocs:
|
||||
# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@r2
|
||||
# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@r4
|
||||
# needs:
|
||||
# - UnitTestingParams
|
||||
# with:
|
||||
# python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
|
||||
Documentation:
|
||||
uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@r2
|
||||
uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@r4
|
||||
needs:
|
||||
- ConfigParams
|
||||
- UnitTestingParams
|
||||
@@ -231,7 +236,7 @@ jobs:
|
||||
latex_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }}
|
||||
|
||||
IntermediateCleanUp:
|
||||
uses: pyTooling/Actions/.github/workflows/IntermediateCleanUp.yml@r2
|
||||
uses: pyTooling/Actions/.github/workflows/IntermediateCleanUp.yml@r4
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- PublishCoverageResults
|
||||
@@ -242,7 +247,7 @@ jobs:
|
||||
xml_unittest_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}-
|
||||
|
||||
# PDFDocumentation:
|
||||
# uses: pyTooling/Actions/.github/workflows/LaTeXDocumentation.yml@r2
|
||||
# uses: pyTooling/Actions/.github/workflows/LaTeXDocumentation.yml@r4
|
||||
# needs:
|
||||
# - UnitTestingParams
|
||||
# - Documentation
|
||||
@@ -252,7 +257,7 @@ jobs:
|
||||
# pdf_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_pdf }}
|
||||
|
||||
PublishToGitHubPages:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r2
|
||||
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r4
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- Documentation
|
||||
@@ -265,7 +270,7 @@ jobs:
|
||||
typing: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
|
||||
|
||||
ReleasePage:
|
||||
uses: pyTooling/Actions/.github/workflows/Release.yml@r2
|
||||
uses: pyTooling/Actions/.github/workflows/Release.yml@r4
|
||||
if: startsWith(github.ref, 'refs/tags')
|
||||
needs:
|
||||
- Package
|
||||
@@ -273,7 +278,7 @@ jobs:
|
||||
- PublishToGitHubPages
|
||||
|
||||
PublishOnPyPI:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@r2
|
||||
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@r4
|
||||
if: startsWith(github.ref, 'refs/tags')
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
@@ -286,7 +291,7 @@ jobs:
|
||||
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||
|
||||
ArtifactCleanUp:
|
||||
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r2
|
||||
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r4
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- UnitTesting
|
||||
|
||||
4
.github/workflows/CoverageCollection.yml
vendored
4
.github/workflows/CoverageCollection.yml
vendored
@@ -72,7 +72,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: '❗ Deprecation message'
|
||||
run: echo "::warning title=Deprecated::'CoverageCollection.yml' is not maintained anymore. Please switch to 'UnitTesting.yml', 'PublishCoverageResults.yml' and 'PublishTestResults.yml'."
|
||||
run: printf "%s\n" "::warning title=Deprecated::'CoverageCollection.yml' is not maintained anymore. Please switch to 'UnitTesting.yml', 'PublishCoverageResults.yml' and 'PublishTestResults.yml'."
|
||||
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
@@ -150,7 +150,7 @@ jobs:
|
||||
ABSDIR=$(pwd)
|
||||
cd "${{ inputs.tests_directory || '.' }}"
|
||||
[ -n '${{ inputs.coverage_config }}' ] && PYCOV_ARGS="--cov-config=${ABSDIR}/${{ inputs.coverage_config }}" || unset PYCOV_ARGS
|
||||
echo "python -m pytest -rA --cov=${ABSDIR} ${PYCOV_ARGS} ${{ inputs.unittest_directory }} --color=yes"
|
||||
printf "%s\n" "python -m pytest -rA --cov=${ABSDIR} ${PYCOV_ARGS} ${{ inputs.unittest_directory }} --color=yes"
|
||||
python -m pytest -rA --cov=${ABSDIR} $PYCOV_ARGS ${{ inputs.unittest_directory }} --color=yes
|
||||
|
||||
- name: Convert to cobertura format
|
||||
|
||||
59
.github/workflows/ExtractConfiguration.yml
vendored
59
.github/workflows/ExtractConfiguration.yml
vendored
@@ -59,18 +59,33 @@ on:
|
||||
mypy_prepare_command:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.mypy_prepare_command }}
|
||||
unittest_report_xml_directory:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.unittest_report_xml_directory }}
|
||||
unittest_report_xml_filename:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.unittest_report_xml_filename }}
|
||||
unittest_report_xml:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.unittest_report_xml }}
|
||||
coverage_report_html_directory:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.coverage_report_html_directory }}
|
||||
coverage_report_xml_directory:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.coverage_report_xml_directory }}
|
||||
coverage_report_xml_filename:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.coverage_report_xml_filename }}
|
||||
coverage_report_xml:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.coverage_report_xml }}
|
||||
coverage_report_json_directory:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.coverage_report_json_directory }}
|
||||
coverage_report_json_filename:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.coverage_report_json_filename }}
|
||||
coverage_report_json:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.coverage_report_json }}
|
||||
@@ -83,10 +98,15 @@ jobs:
|
||||
package_fullname: ${{ steps.getPackageName.outputs.package_fullname }}
|
||||
package_directory: ${{ steps.getPackageName.outputs.package_directory }}
|
||||
mypy_prepare_command: ${{ steps.getPackageName.outputs.mypy_prepare_command }}
|
||||
unittest_report_xml_directory: ${{ steps.getVariables.outputs.unittest_report_xml_directory }}
|
||||
unittest_report_xml_filename: ${{ steps.getVariables.outputs.unittest_report_xml_filename }}
|
||||
unittest_report_xml: ${{ steps.getVariables.outputs.unittest_report_xml }}
|
||||
coverage_report_html_directory: ${{ steps.getVariables.outputs.coverage_report_html_directory }}
|
||||
coverage_report_xml_directory: ${{ steps.getVariables.outputs.coverage_report_xml_directory }}
|
||||
coverage_report_xml_filename: ${{ steps.getVariables.outputs.coverage_report_xml_filename }}
|
||||
coverage_report_xml: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
||||
coverage_report_json_directory: ${{ steps.getVariables.outputs.coverage_report_json_directory }}
|
||||
coverage_report_json_filename: ${{ steps.getVariables.outputs.coverage_report_json_filename }}
|
||||
coverage_report_json: ${{ steps.getVariables.outputs.coverage_report_json }}
|
||||
|
||||
steps:
|
||||
@@ -144,10 +164,11 @@ jobs:
|
||||
|
||||
from tomli import load as tomli_load
|
||||
|
||||
htmlDirectory = Path("htmlcov")
|
||||
xmlFile = Path("./coverage.xml")
|
||||
jsonFile = Path("./coverage.json")
|
||||
coverageRC = "${{ inputs.coverage_config }}".strip()
|
||||
unittestXMLFile = Path("./unittest.xml")
|
||||
coverageHTMLDirectory = Path("htmlcov")
|
||||
coverageXMLFile = Path("./coverage.xml")
|
||||
coverageJSONFile = Path("./coverage.json")
|
||||
coverageRC = "${{ inputs.coverage_config }}".strip()
|
||||
|
||||
# Read output paths from 'pyproject.toml' file
|
||||
if coverageRC == "pyproject.toml":
|
||||
@@ -156,9 +177,10 @@ jobs:
|
||||
with pyProjectFile.open("rb") as file:
|
||||
pyProjectSettings = tomli_load(file)
|
||||
|
||||
htmlDirectory = Path(pyProjectSettings["tool"]["coverage"]["html"]["directory"])
|
||||
xmlFile = Path(pyProjectSettings["tool"]["coverage"]["xml"]["output"])
|
||||
jsonFile = Path(pyProjectSettings["tool"]["coverage"]["json"]["output"])
|
||||
unittestXMLFile = Path(pyProjectSettings["tool"]["pytest"]["junit_xml"])
|
||||
coverageHTMLDirectory = Path(pyProjectSettings["tool"]["coverage"]["html"]["directory"])
|
||||
coverageXMLFile = Path(pyProjectSettings["tool"]["coverage"]["xml"]["output"])
|
||||
coverageJSONFile= Path(pyProjectSettings["tool"]["coverage"]["json"]["output"])
|
||||
else:
|
||||
print(f"File '{pyProjectFile}' not found.")
|
||||
print(f"::error title=FileNotFoundError::File '{pyProjectFile}' not found.")
|
||||
@@ -171,9 +193,9 @@ jobs:
|
||||
with coverageRCFile.open("rb") as file:
|
||||
coverageRCSettings = tomli_load(file)
|
||||
|
||||
htmlDirectory = Path(coverageRCSettings["html"]["directory"])
|
||||
xmlFile = Path(coverageRCSettings["xml"]["output"])
|
||||
jsonFile = Path(coverageRCSettings["json"]["output"])
|
||||
coverageHTMLDirectory = Path(coverageRCSettings["html"]["directory"])
|
||||
coverageXMLFile = Path(coverageRCSettings["xml"]["output"])
|
||||
coverageJSONFile = Path(coverageRCSettings["json"]["output"])
|
||||
else:
|
||||
print(f"File '{coverageRCFile}' not found.")
|
||||
print(f"::error title=FileNotFoundError::File '{coverageRCFile}' not found.")
|
||||
@@ -184,11 +206,16 @@ jobs:
|
||||
print(f"GITHUB_OUTPUT: {github_output}")
|
||||
with github_output.open("a+", encoding="utf-8") as f:
|
||||
f.write(dedent(f"""\
|
||||
coverage_report_html_directory={htmlDirectory.as_posix()}
|
||||
coverage_report_xml_directory={xmlFile.parent.as_posix()}
|
||||
coverage_report_xml={xmlFile.as_posix()}
|
||||
coverage_report_json_directory={jsonFile.parent.as_posix()}
|
||||
coverage_report_json={jsonFile.as_posix()}
|
||||
unittest_report_xml_directory={unittestXMLFile.parent.as_posix()}
|
||||
unittest_report_xml_filename={unittestXMLFile.name}
|
||||
unittest_report_xml={unittestXMLFile.as_posix()}
|
||||
coverage_report_html_directory={coverageHTMLDirectory.as_posix()}
|
||||
coverage_report_xml_directory={coverageXMLFile.parent.as_posix()}
|
||||
coverage_report_xml_filename={coverageXMLFile.name}
|
||||
coverage_report_xml={coverageXMLFile.as_posix()}
|
||||
coverage_report_json_directory={coverageJSONFile.parent.as_posix()}
|
||||
coverage_report_json_filename={coverageJSONFile.name}
|
||||
coverage_report_json={coverageJSONFile.as_posix()}
|
||||
"""))
|
||||
|
||||
print(f"DEBUG:\n html={htmlDirectory}\n xml={xmlFile}\n json={jsonFile}")
|
||||
print(f"DEBUG:\n unittest xml: {unittestXMLFile}\n coverage html: {coverageHTMLDirectory}\n coverage xml: {coverageXMLFile}\n coverage json: {coverageJSONFile}")
|
||||
|
||||
19
.github/workflows/LaTeXDocumentation.yml
vendored
19
.github/workflows/LaTeXDocumentation.yml
vendored
@@ -55,17 +55,28 @@ jobs:
|
||||
name: ${{ inputs.latex_artifact }}
|
||||
path: latex
|
||||
|
||||
- name: Compile LaTeX document
|
||||
uses: xu-cheng/latex-action@master
|
||||
- name: Debug
|
||||
run: |
|
||||
tree -pash .
|
||||
|
||||
- name: Build LaTeX document using 'pytooling/miktex:sphinx'
|
||||
uses: addnab/docker-run-action@v3
|
||||
with:
|
||||
working_directory: latex
|
||||
root_file: ${{ inputs.document }}.tex
|
||||
image: pytooling/miktex:sphinx
|
||||
options: -v ${{ github.workspace }}/latex:/latex --workdir /latex
|
||||
run: |
|
||||
which pdflatex
|
||||
pwd
|
||||
ls -lAh
|
||||
|
||||
latexmk -xelatex ${{ inputs.document }}.tex
|
||||
|
||||
- name: 📤 Upload 'PDF Documentation' artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
if: inputs.pdf_artifact != ''
|
||||
with:
|
||||
name: ${{ inputs.pdf_artifact }}
|
||||
working-directory: latex
|
||||
path: ${{ inputs.document }}.pdf
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
291
.github/workflows/NightlyRelease.yml
vendored
291
.github/workflows/NightlyRelease.yml
vendored
@@ -68,6 +68,18 @@ on:
|
||||
description: 'Multi-line string containing artifact:file:title asset descriptions.'
|
||||
required: true
|
||||
type: string
|
||||
inventory-json:
|
||||
type: string
|
||||
required: false
|
||||
default: ''
|
||||
inventory-version:
|
||||
type: string
|
||||
required: false
|
||||
default: ''
|
||||
inventory-categories:
|
||||
type: string
|
||||
required: false
|
||||
default: ''
|
||||
tarball-name:
|
||||
type: string
|
||||
required: false
|
||||
@@ -97,23 +109,23 @@ jobs:
|
||||
run: |
|
||||
set +e
|
||||
|
||||
ANSI_LIGHT_RED="\e[91m"
|
||||
ANSI_LIGHT_GREEN="\e[92m"
|
||||
ANSI_LIGHT_YELLOW="\e[93m"
|
||||
ANSI_NOCOLOR="\e[0m"
|
||||
ANSI_LIGHT_RED=$'\x1b[91m'
|
||||
ANSI_LIGHT_GREEN=$'\x1b[92m'
|
||||
ANSI_LIGHT_YELLOW=$'\x1b[93m'
|
||||
ANSI_NOCOLOR=$'\x1b[0m'
|
||||
|
||||
export GH_TOKEN=${{ github.token }}
|
||||
|
||||
echo -n "Deleting release '${{ inputs.nightly_name }}' ... "
|
||||
printf "%s" "Deleting release '${{ inputs.nightly_name }}' ... "
|
||||
message="$(gh release delete ${{ inputs.nightly_name }} --yes 2>&1)"
|
||||
if [[ $? -eq 0 ]]; then
|
||||
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
elif [[ "${message}" == "release not found" ]]; then
|
||||
echo -e "${ANSI_LIGHT_YELLOW}[NOT FOUND]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_YELLOW}[NOT FOUND]${ANSI_NOCOLOR}"
|
||||
else
|
||||
echo -e "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
||||
echo -e "${ANSI_LIGHT_RED}Couldn't delete release '${{ inputs.nightly_name }}' -> Error: '${message}'.${ANSI_NOCOLOR}"
|
||||
echo "::error title=InternalError::Couldn't delete release '${{ inputs.nightly_name }}' -> Error: '${message}'."
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't delete release '${{ inputs.nightly_name }}' -> Error: '${message}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=InternalError::Couldn't delete release '${{ inputs.nightly_name }}' -> Error: '${message}'."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -122,19 +134,19 @@ jobs:
|
||||
run: |
|
||||
set +e
|
||||
|
||||
ANSI_LIGHT_RED="\e[91m"
|
||||
ANSI_LIGHT_GREEN="\e[92m"
|
||||
ANSI_NOCOLOR="\e[0m"
|
||||
ANSI_LIGHT_RED=$'\x1b[91m'
|
||||
ANSI_LIGHT_GREEN=$'\x1b[92m'
|
||||
ANSI_NOCOLOR=$'\x1b[0m'
|
||||
|
||||
export GH_TOKEN=${{ github.token }}
|
||||
|
||||
addDraft="--draft"
|
||||
|
||||
if ${{ inputs.prerelease }}; then
|
||||
if [[ "${{ inputs.prerelease }}" == "true" ]]; then
|
||||
addPreRelease="--prerelease"
|
||||
fi
|
||||
|
||||
if ! ${{ inputs.latest }}; then
|
||||
if [[ "${{ inputs.latest }}" == "false" ]]; then
|
||||
addLatest="--latest=false"
|
||||
fi
|
||||
|
||||
@@ -166,14 +178,14 @@ jobs:
|
||||
Published from [${{ github.workflow }}](https://github.com/Paebbels/ghdl/actions/runs/${{ github.run_id }}) workflow triggered by @${{ github.actor }} on $(date '+%Y-%m-%d %H:%M:%S').
|
||||
EOF
|
||||
|
||||
echo "Creating release '${{ inputs.nightly_name }}' ... "
|
||||
printf "%s\n" "Creating release '${{ inputs.nightly_name }}' ... "
|
||||
message="$(gh release create "${{ inputs.nightly_name }}" --verify-tag $addDraft $addPreRelease $addLatest "${addTitle[@]}" "${addNotes[@]}" 2>&1)"
|
||||
if [[ $? -eq 0 ]]; then
|
||||
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
else
|
||||
echo -e "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
||||
echo -e "${ANSI_LIGHT_RED}Couldn't create release '${{ inputs.nightly_name }}' -> Error: '${message}'.${ANSI_NOCOLOR}"
|
||||
echo "::error title=InternalError::Couldn't create release '${{ inputs.nightly_name }}' -> Error: '${message}'."
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't create release '${{ inputs.nightly_name }}' -> Error: '${message}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=InternalError::Couldn't create release '${{ inputs.nightly_name }}' -> Error: '${message}'."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -182,10 +194,11 @@ jobs:
|
||||
run: |
|
||||
set +e
|
||||
|
||||
ANSI_LIGHT_RED="\e[91m"
|
||||
ANSI_LIGHT_GREEN="\e[92m"
|
||||
ANSI_LIGHT_YELLOW="\e[93m"
|
||||
ANSI_NOCOLOR="\e[0m"
|
||||
ANSI_LIGHT_RED=$'\x1b[91m'
|
||||
ANSI_LIGHT_GREEN=$'\x1b[92m'
|
||||
ANSI_LIGHT_YELLOW=$'\x1b[93m'
|
||||
ANSI_LIGHT_BLUE="\e[94m"
|
||||
ANSI_NOCOLOR=$'\x1b[0m'
|
||||
|
||||
export GH_TOKEN=${{ github.token }}
|
||||
|
||||
@@ -199,9 +212,42 @@ jobs:
|
||||
replacement="${patternLine#*=}"
|
||||
line="${line//"%$pattern%"/"$replacement"}"
|
||||
done <<<'${{ inputs.replacements }}'
|
||||
echo "$line"
|
||||
printf "%s\n" "$line"
|
||||
}
|
||||
|
||||
# Create JSON inventory
|
||||
if [[ "${{ inputs.inventory-json }}" != "" ]]; then
|
||||
VERSION="1.0"
|
||||
|
||||
# Split categories by ',' into a Bash array.
|
||||
# See https://stackoverflow.com/a/45201229/3719459
|
||||
if [[ "${{ inputs.inventory-categories }}" != "" ]]; then
|
||||
readarray -td, inventoryCategories <<<"${{ inputs.inventory-categories }},"
|
||||
unset 'inventoryCategories[-1]'
|
||||
declare -p inventoryCategories
|
||||
else
|
||||
inventoryCategories=""
|
||||
fi
|
||||
|
||||
jsonInventory=$(jq -c -n \
|
||||
--arg version "${VERSION}" \
|
||||
--arg date "$(date +"%Y-%m-%dT%H-%M-%S%:z")" \
|
||||
--argjson jsonMeta "$(jq -c -n \
|
||||
--arg tag "${{ inputs.nightly_name }}" \
|
||||
--arg version "${{ inputs.inventory-version }}" \
|
||||
--arg hash "${{ github.sha }}" \
|
||||
--arg repo "${{ github.server_url }}/${{ github.repository }}" \
|
||||
--arg release "${{ github.server_url }}/${{ github.repository }}/releases/download/${{ inputs.nightly_name }}" \
|
||||
--argjson categories "$(jq -c -n \
|
||||
'$ARGS.positional' \
|
||||
--args "${inventoryCategories[@]}" \
|
||||
)" \
|
||||
'{"tag": $tag, "version": $version, "git-hash": $hash, "repository-url": $repo, "release-url": $release, "categories": $categories}' \
|
||||
)" \
|
||||
'{"version": 1.0, "timestamp": $date, "meta": $jsonMeta, "files": {}}'
|
||||
)
|
||||
fi
|
||||
|
||||
ERRORS=0
|
||||
# A dictionary of 0/1 to avoid duplicate downloads
|
||||
declare -A downloadedArtifacts
|
||||
@@ -214,184 +260,243 @@ jobs:
|
||||
|
||||
# split assetLine colon separated triple: artifact:asset:title
|
||||
artifact="${assetLine%%:*}"
|
||||
remaining="${assetLine#*:}"
|
||||
asset="${remaining%%:*}"
|
||||
title="${remaining##*:}"
|
||||
assetLine="${assetLine#*:}"
|
||||
asset="${assetLine%%:*}"
|
||||
assetLine="${assetLine#*:}"
|
||||
if [[ "${{ inputs.inventory-json }}" == "" ]]; then
|
||||
categories=""
|
||||
title="${assetLine##*:}"
|
||||
else
|
||||
categories="${assetLine%%:*}"
|
||||
title="${assetLine##*:}"
|
||||
fi
|
||||
|
||||
# remove leading whitespace
|
||||
asset="${asset#"${asset%%[![:space:]]*}"}"
|
||||
categories="${categories#"${categories%%[![:space:]]*}"}"
|
||||
title="${title#"${title%%[![:space:]]*}"}"
|
||||
|
||||
# apply replacements
|
||||
asset="$(Replace "${asset}")"
|
||||
title="$(Replace "${title}")"
|
||||
|
||||
echo "Publish asset '${asset}' from artifact '${artifact}' with title '${title}'"
|
||||
echo -n " Checked asset for duplicates ... "
|
||||
printf "%s\n" "Publish asset '${asset}' from artifact '${artifact}' with title '${title}'"
|
||||
printf " %s" "Checked asset for duplicates ... "
|
||||
if [[ -n "${assetFilenames[$asset]}" ]]; then
|
||||
echo -e "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
echo "::error title=DuplicateAsset::Asset '${asset}' from artifact '${artifact}' was already uploaded to release '${{ inputs.nightly_name }}'."
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=DuplicateAsset::Asset '${asset}' from artifact '${artifact}' was already uploaded to release '${{ inputs.nightly_name }}'."
|
||||
ERRORS=1
|
||||
continue
|
||||
else
|
||||
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
assetFilenames[$asset]=1
|
||||
fi
|
||||
|
||||
# Download artifact by artifact name
|
||||
if [[ -n "${downloadedArtifacts[$artifact]}" ]]; then
|
||||
echo -e " downloading '${artifact}' ... ${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}"
|
||||
printf " %s\n" "downloading '${artifact}' ... ${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}"
|
||||
else
|
||||
echo " downloading '${artifact}' ... "
|
||||
echo -n " gh run download $GITHUB_RUN_ID --dir \"${artifact}\" --name \"${artifact}\" "
|
||||
printf " %s" "gh run download $GITHUB_RUN_ID --dir \"${artifact}\" --name \"${artifact}\" "
|
||||
gh run download $GITHUB_RUN_ID --dir "${artifact}" --name "${artifact}"
|
||||
if [[ $? -eq 0 ]]; then
|
||||
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
else
|
||||
echo -e "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
echo -e "${ANSI_LIGHT_RED}Couldn't download artifact '${artifact}'.${ANSI_NOCOLOR}"
|
||||
echo "::error title=ArtifactNotFound::Couldn't download artifact '${artifact}'."
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't download artifact '${artifact}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=ArtifactNotFound::Couldn't download artifact '${artifact}'."
|
||||
ERRORS=1
|
||||
continue
|
||||
fi
|
||||
downloadedArtifacts[$artifact]=1
|
||||
|
||||
echo -n " Checking for embedded tarball ... "
|
||||
printf " %s" "Checking for embedded tarball ... "
|
||||
if [[ -f "${artifact}/${{ inputs.tarball-name }}" ]]; then
|
||||
echo -e "${ANSI_LIGHT_GREEN}[FOUND]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[FOUND]${ANSI_NOCOLOR}"
|
||||
|
||||
pushd "${artifact}" > /dev/null
|
||||
|
||||
echo -n " Extracting embedded tarball ... "
|
||||
printf " %s" "Extracting embedded tarball ... "
|
||||
tar -xf "${{ inputs.tarball-name }}"
|
||||
if [[ $? -ne 0 ]]; then
|
||||
echo -e "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
||||
else
|
||||
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
fi
|
||||
|
||||
echo -n " Removing temporary tarball ... "
|
||||
printf " %s" "Removing temporary tarball ... "
|
||||
rm -f "${{ inputs.tarball-name }}"
|
||||
if [[ $? -ne 0 ]]; then
|
||||
echo -e "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
||||
else
|
||||
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
fi
|
||||
|
||||
popd > /dev/null
|
||||
else
|
||||
echo -e "${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Check if artifact should be compressed (zip, tgz) or if asset was part of the downloaded artifact.
|
||||
echo -n " checking asset '${artifact}/${asset}' ... "
|
||||
printf " %s" "checking asset '${artifact}/${asset}' ... "
|
||||
if [[ "${asset}" == !*.zip ]]; then
|
||||
echo -e "${ANSI_LIGHT_GREEN}[ZIP]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[ZIP]${ANSI_NOCOLOR}"
|
||||
asset="${asset##*!}"
|
||||
echo " Compressing artifact '${artifact}' to '${asset}' ..."
|
||||
printf " %s\n" "Compressing artifact '${artifact}' to '${asset}' ..."
|
||||
(
|
||||
cd "${artifact}" && \
|
||||
zip -r "../${asset}" *
|
||||
)
|
||||
if [[ $? -eq 0 ]]; then
|
||||
echo -e " Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
printf " %s\n" "Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
uploadFile="${asset}"
|
||||
else
|
||||
echo -e " Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
echo -e "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zip file '${asset}'.${ANSI_NOCOLOR}"
|
||||
echo "::error title=CompressionError::Couldn't compress '${artifact}' to zip file '${asset}'."
|
||||
printf " %s\n" "Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zip file '${asset}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=CompressionError::Couldn't compress '${artifact}' to zip file '${asset}'."
|
||||
ERRORS=1
|
||||
continue
|
||||
fi
|
||||
elif [[ "${asset}" == !*.tgz || "${asset}" == !*.tar.gz || "${asset}" == \$*.tgz || "${asset}" == \$*.tar.gz ]]; then
|
||||
echo -e "${ANSI_LIGHT_GREEN}[TAR/GZ]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[TAR/GZ]${ANSI_NOCOLOR}"
|
||||
|
||||
if [[ "${asset:0:1}" == "\$" ]]; then
|
||||
asset="${asset##*$}"
|
||||
dirName="${asset%.*}"
|
||||
echo " Compressing artifact '${artifact}' to '${asset}' ..."
|
||||
tar -c --gzip --file="${asset}" --directory="${artifact}" --transform "s|^\.|${dirName%.tar}|" .
|
||||
printf " %s\n" "Compressing artifact '${artifact}' to '${asset}' ..."
|
||||
tar -c --gzip --owner=0 --group=0 --file="${asset}" --directory="${artifact}" --transform "s|^\.|${dirName%.tar}|" .
|
||||
retCode=$?
|
||||
else
|
||||
asset="${asset##*!}"
|
||||
echo " Compressing artifact '${artifact}' to '${asset}' ..."
|
||||
printf " %s\n" "Compressing artifact '${artifact}' to '${asset}' ..."
|
||||
(
|
||||
cd "${artifact}" && \
|
||||
tar -c --gzip --file="../${asset}" *
|
||||
tar -c --gzip --owner=0 --group=0 --file="../${asset}" *
|
||||
)
|
||||
retCode=$?
|
||||
fi
|
||||
|
||||
if [[ $retCode -eq 0 ]]; then
|
||||
echo -e " Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
printf " %s\n" "Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
uploadFile="${asset}"
|
||||
else
|
||||
echo -e " Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
echo -e "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to tgz file '${asset}'.${ANSI_NOCOLOR}"
|
||||
echo "::error title=CompressionError::Couldn't compress '${artifact}' to tgz file '${asset}'."
|
||||
printf " %s\n" "Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to tgz file '${asset}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=CompressionError::Couldn't compress '${artifact}' to tgz file '${asset}'."
|
||||
ERRORS=1
|
||||
continue
|
||||
fi
|
||||
elif [[ "${asset}" == !*.tzst || "${asset}" == !*.tar.zst || "${asset}" == \$*.tzst || "${asset}" == \$*.tar.zst ]]; then
|
||||
echo -e "${ANSI_LIGHT_GREEN}[ZST]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[ZST]${ANSI_NOCOLOR}"
|
||||
|
||||
if [[ "${asset:0:1}" == "\$" ]]; then
|
||||
asset="${asset##*$}"
|
||||
dirName="${asset%.*}"
|
||||
echo " Compressing artifact '${artifact}' to '${asset}' ..."
|
||||
tar -c --zstd --file="${asset}" --directory="${artifact}" --transform "s|^\.|${dirName%.tar}|" .
|
||||
printf " %s\n" "Compressing artifact '${artifact}' to '${asset}' ..."
|
||||
tar -c --zstd --owner=0 --group=0 --file="${asset}" --directory="${artifact}" --transform "s|^\.|${dirName%.tar}|" .
|
||||
retCode=$?
|
||||
else
|
||||
asset="${asset##*!}"
|
||||
echo " Compressing artifact '${artifact}' to '${asset}' ..."
|
||||
printf " %s\n" "Compressing artifact '${artifact}' to '${asset}' ..."
|
||||
(
|
||||
cd "${artifact}" && \
|
||||
tar -c --zstd --file="../${asset}" *
|
||||
tar -c --zstd --owner=0 --group=0 --file="../${asset}" *
|
||||
)
|
||||
retCode=$?
|
||||
fi
|
||||
|
||||
if [[ $retCode -eq 0 ]]; then
|
||||
echo -e " Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
printf " %s\n" "Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
uploadFile="${asset}"
|
||||
else
|
||||
echo -e " Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
echo -e "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zst file '${asset}'.${ANSI_NOCOLOR}"
|
||||
echo "::error title=CompressionError::Couldn't compress '${artifact}' to zst file '${asset}'."
|
||||
printf " %s\n" "Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zst file '${asset}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=CompressionError::Couldn't compress '${artifact}' to zst file '${asset}'."
|
||||
ERRORS=1
|
||||
continue
|
||||
fi
|
||||
elif [[ -e "${artifact}/${asset}" ]]; then
|
||||
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
uploadFile="${artifact}/${asset}"
|
||||
else
|
||||
echo -e "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
echo -e "${ANSI_LIGHT_RED}Couldn't find asset '${asset}' in artifact '${artifact}'.${ANSI_NOCOLOR}"
|
||||
echo "::error title=FileNotFound::Couldn't find asset '${asset}' in artifact '${artifact}'."
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't find asset '${asset}' in artifact '${artifact}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=FileNotFound::Couldn't find asset '${asset}' in artifact '${artifact}'."
|
||||
ERRORS=1
|
||||
continue
|
||||
fi
|
||||
|
||||
# Add asset to JSON inventory
|
||||
if [[ "${{ inputs.inventory-json }}" != "" ]]; then
|
||||
if [[ "${categories}" != "${title}" ]]; then
|
||||
printf " %s\n" "adding file '${uploadFile}' with '${categories//;/ → }' to JSON inventory ..."
|
||||
category=""
|
||||
jsonEntry=$(jq -c -n \
|
||||
--arg title "${title}" \
|
||||
--arg file "${uploadFile}" \
|
||||
'{"file": $file, "title": $title}' \
|
||||
)
|
||||
|
||||
while [[ "${categories}" != "${category}" ]]; do
|
||||
category="${categories##*;}"
|
||||
categories="${categories%;*}"
|
||||
jsonEntry=$(jq -c -n --arg cat "${category}" --argjson value "${jsonEntry}" '{$cat: $value}')
|
||||
done
|
||||
|
||||
jsonInventory=$(jq -c -n \
|
||||
--argjson inventory "${jsonInventory}" \
|
||||
--argjson file "${jsonEntry}" \
|
||||
'$inventory * {"files": $file}' \
|
||||
)
|
||||
else
|
||||
printf " %s\n" "adding file '${uploadFile}' to JSON inventory ... ${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Upload asset to existing release page
|
||||
echo -n " uploading asset '${asset}' from '${uploadFile}' with title '${title}' ... "
|
||||
printf " %s" "uploading asset '${asset}' from '${uploadFile}' with title '${title}' ... "
|
||||
gh release upload ${{ inputs.nightly_name }} "${uploadFile}#${title}" --clobber
|
||||
if [[ $? -eq 0 ]]; then
|
||||
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
else
|
||||
echo -e "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
echo -e "${ANSI_LIGHT_RED}Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
|
||||
echo "::error title=UploadError::Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'."
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=UploadError::Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'."
|
||||
ERRORS=1
|
||||
continue
|
||||
fi
|
||||
done <<<'${{ inputs.assets }}'
|
||||
|
||||
echo "Inspecting downloaded artifacts ..."
|
||||
tree -L 3 .
|
||||
if [[ "${{ inputs.inventory-json }}" != "" ]]; then
|
||||
inventoryTitle="Release Inventory (JSON)"
|
||||
|
||||
printf "%s\n" "Publish asset '${{ inputs.inventory-json }}' with title '${inventoryTitle}'"
|
||||
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Writing JSON inventory to '${{ inputs.inventory-json }}' ...."
|
||||
printf "%s\n" "$(jq -n --argjson inventory "${jsonInventory}" '$inventory')" > "${{ inputs.inventory-json }}"
|
||||
cat "${{ inputs.inventory-json }}"
|
||||
printf "::endgroup::\n"
|
||||
|
||||
# Upload inventory asset to existing release page
|
||||
printf " %s" "uploading asset '${{ inputs.inventory-json }}' title '${inventoryTitle}' ... "
|
||||
gh release upload ${{ inputs.nightly_name }} "${{ inputs.inventory-json }}#${inventoryTitle}" --clobber
|
||||
if [[ $? -eq 0 ]]; then
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
else
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't upload asset '${{ inputs.inventory-json }}' to release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=UploadError::Couldn't upload asset '${{ inputs.inventory-json }}' to release '${{ inputs.nightly_name }}'."
|
||||
ERRORS=1
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
|
||||
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Inspecting downloaded artifacts ..."
|
||||
tree -pash -L 3 .
|
||||
printf "::endgroup::\n"
|
||||
|
||||
if [[ $ERROR -ne 0 ]]; then
|
||||
echo -e "${ANSI_LIGHT_RED}Errors detected in previous steps.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}Errors detected in previous steps.${ANSI_NOCOLOR}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -400,19 +505,19 @@ jobs:
|
||||
run: |
|
||||
set +e
|
||||
|
||||
ANSI_LIGHT_RED="\e[91m"
|
||||
ANSI_LIGHT_GREEN="\e[92m"
|
||||
ANSI_NOCOLOR="\e[0m"
|
||||
ANSI_LIGHT_RED=$'\x1b[91m'
|
||||
ANSI_LIGHT_GREEN=$'\x1b[92m'
|
||||
ANSI_NOCOLOR=$'\x1b[0m'
|
||||
|
||||
export GH_TOKEN=${{ github.token }}
|
||||
|
||||
# Remove draft-state from release page
|
||||
echo -n "Remove draft-state from release '${title}' ... "
|
||||
printf "%s" "Remove draft-state from release '${title}' ... "
|
||||
gh release edit --draft=false "${{ inputs.nightly_name }}"
|
||||
if [[ $? -eq 0 ]]; then
|
||||
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
else
|
||||
echo -e "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
echo -e "${ANSI_LIGHT_RED}Couldn't remove draft-state from release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
|
||||
echo "::error title=ReleasePage::Couldn't remove draft-state from release '${{ inputs.nightly_name }}'."
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't remove draft-state from release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=ReleasePage::Couldn't remove draft-state from release '${{ inputs.nightly_name }}'."
|
||||
fi
|
||||
|
||||
10
.github/workflows/Parameters.yml
vendored
10
.github/workflows/Parameters.yml
vendored
@@ -147,7 +147,7 @@ jobs:
|
||||
else:
|
||||
name = f"{package_namespace}.{package_name}"
|
||||
|
||||
currentMSYS2Version = "3.11"
|
||||
currentMSYS2Version = "3.12"
|
||||
currentAlphaVersion = "3.14"
|
||||
currentAlphaRelease = "3.14.0-alpha.1"
|
||||
|
||||
@@ -337,7 +337,7 @@ jobs:
|
||||
- name: Verify out parameters
|
||||
id: verify
|
||||
run: |
|
||||
echo 'python_version: ${{ steps.params.outputs.python_version }}'
|
||||
echo 'python_jobs: ${{ steps.params.outputs.python_jobs }}'
|
||||
echo 'artifact_names: ${{ steps.params.outputs.artifact_names }}'
|
||||
echo 'params: ${{ steps.params.outputs.params }}'
|
||||
printf "python_version: %s\n" '${{ steps.params.outputs.python_version }}'
|
||||
printf "python_jobs: %s\n" '${{ steps.params.outputs.python_jobs }}'
|
||||
printf "artifact_names: %s\n" '${{ steps.params.outputs.artifact_names }}'
|
||||
printf "params: %s\n" '${{ steps.params.outputs.params }}'
|
||||
|
||||
12
.github/workflows/PublishCoverageResults.yml
vendored
12
.github/workflows/PublishCoverageResults.yml
vendored
@@ -31,7 +31,7 @@ on:
|
||||
type: string
|
||||
coverage_artifacts_pattern:
|
||||
required: false
|
||||
default: '*-CodeCoverage-*'
|
||||
default: '*-CodeCoverage-SQLite-*'
|
||||
type: string
|
||||
coverage_config:
|
||||
description: 'Path to the .coveragerc file. Use pyproject.toml by default.'
|
||||
@@ -84,7 +84,7 @@ jobs:
|
||||
|
||||
- name: 🔎 Inspect extracted artifact (tarball)
|
||||
run: |
|
||||
tree -psh artifacts
|
||||
tree -pash artifacts
|
||||
|
||||
- name: 🔧 Install coverage and tomli
|
||||
run: |
|
||||
@@ -150,13 +150,11 @@ jobs:
|
||||
|
||||
print(f"DEBUG:\n html={htmlDirectory}\n xml={xmlFile}\n json={jsonFile}")
|
||||
|
||||
- name: Rename .coverage files and collect them all to coverage/
|
||||
- name: Rename .coverage files and move them all into 'coverage/'
|
||||
run: |
|
||||
ls -lAh artifacts/
|
||||
ls -lAh artifacts/*/.coverage
|
||||
mkdir -p coverage
|
||||
find artifacts/ -type f -path "*SQLite*.coverage" -exec sh -c 'cp -v $0 "coverage/$(basename $0).$(basename $(dirname $0))"' {} ';'
|
||||
tree -a coverage
|
||||
tree -pash coverage
|
||||
|
||||
- name: Combine SQLite files (using Coverage.py)
|
||||
run: coverage combine --data-file=.coverage coverage/
|
||||
@@ -177,7 +175,7 @@ jobs:
|
||||
run: |
|
||||
coverage html --data-file=.coverage -d report/coverage/html
|
||||
rm report/coverage/html/.gitignore
|
||||
tree -a report/coverage/html
|
||||
tree -pash report/coverage/html
|
||||
|
||||
- name: 📤 Upload 'Coverage SQLite Database' artifact
|
||||
if: inputs.coverage_sqlite_artifact != ''
|
||||
|
||||
11
.github/workflows/PublishTestResults.yml
vendored
11
.github/workflows/PublishTestResults.yml
vendored
@@ -32,7 +32,7 @@ on:
|
||||
type: string
|
||||
unittest_artifacts_pattern:
|
||||
required: false
|
||||
default: '*-UnitTestReportSummary-*'
|
||||
default: '*-UnitTestReportSummary-XML-*'
|
||||
type: string
|
||||
merged_junit_artifact:
|
||||
description: 'Name of the merged JUnit Test Summary artifact.'
|
||||
@@ -73,23 +73,22 @@ jobs:
|
||||
|
||||
- name: 🔎 Inspect extracted artifact (tarball)
|
||||
run: |
|
||||
tree -psh artifacts
|
||||
tree -pash artifacts
|
||||
|
||||
- name: 🔧 Install pyEDAA.Reports (JUunit Parser and Merger)
|
||||
run: |
|
||||
python -m pip install --disable-pip-version-check --break-system-packages -U pyEDAA.Reports
|
||||
|
||||
- name: Move JUnit files and collect them all to junit/
|
||||
- name: Rename JUnit files and move them all into 'junit/'
|
||||
run: |
|
||||
mkdir -p junit
|
||||
ls -lAh artifacts/*/*.xml
|
||||
find artifacts/ -type f -path "*TestReportSummary*.xml" -exec sh -c 'cp -v $0 "junit/$(basename $(dirname $0)).$(basename $0)"' {} ';'
|
||||
tree -a junit
|
||||
tree -pash junit
|
||||
|
||||
- name: 🔁 Merge JUnit Unit Test Summaries
|
||||
run: |
|
||||
pyedaa-reports -v unittest "--merge=pyTest-JUnit:junit/*.xml" ${{ inputs.additional_merge_args }} "--output=pyTest-JUnit:Unittesting.xml"
|
||||
echo "cat Unittesting.xml"
|
||||
printf "%s\n" "cat Unittesting.xml"
|
||||
cat Unittesting.xml
|
||||
|
||||
- name: 📊 Publish Unit Test Results
|
||||
|
||||
2
.github/workflows/PublishToGitHubPages.yml
vendored
2
.github/workflows/PublishToGitHubPages.yml
vendored
@@ -55,7 +55,7 @@ jobs:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.doc }}' from 'BuildTheDocs' job
|
||||
- name: 📥 Download artifacts '${{ inputs.doc }}' from 'SphinxDocumentation' job
|
||||
uses: pyTooling/download-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.doc }}
|
||||
|
||||
82
.github/workflows/SphinxDocumentation.yml
vendored
82
.github/workflows/SphinxDocumentation.yml
vendored
@@ -180,6 +180,88 @@ jobs:
|
||||
sphinx-build -v -n -b latex -d _build/doctrees -j $(nproc) -w _build/latex.log . _build/latex
|
||||
# --builder html --doctree-dir _build/doctrees --verbose --fresh-env --write-all --nitpicky --warning-file _build/html.log . _build/html
|
||||
|
||||
- name: Workaround I - https://github.com/sphinx-doc/sphinx/issues/13190
|
||||
if: inputs.latex_artifact != ''
|
||||
run: |
|
||||
printf "Changing directory to 'doc/_build/latex' ...\n"
|
||||
cd doc/_build/latex
|
||||
|
||||
MIMETYPE_EXTENSIONS=(
|
||||
"image/png:png"
|
||||
"image/jpeg:jpg"
|
||||
"image/svg+xml:svg"
|
||||
)
|
||||
|
||||
printf "Changing file extension according to MIME type ...\n"
|
||||
while IFS=$'\n' read -r file; do
|
||||
printf " Checking '%s' ... " "${file}"
|
||||
mime="$(file --mime-type -b "${file}")"
|
||||
printf "[%s]\n" "${mime}"
|
||||
|
||||
found=0
|
||||
for MIME in "${MIMETYPE_EXTENSIONS[@]}"; do
|
||||
mimetype="${MIME%%:*}"
|
||||
extension="${MIME#*:}"
|
||||
|
||||
if [[ "${mime}" == "${mimetype}" && "${file##*.}" != "${extension}" ]]; then
|
||||
printf " Rename file to '%s' " "${file}.${extension}"
|
||||
mv "${file}" "${file}.${extension}"
|
||||
if [[ $? -eq 0 ]]; then
|
||||
printf "[OK]\n"
|
||||
else
|
||||
printf "[FAILED]\n"
|
||||
fi
|
||||
|
||||
printf " Patching LaTeX file for '%s' " "${file}"
|
||||
sed -i "s:{{${file%.*}}\.${file##*.}}:{{${file}}.${extension}}:g" *.tex
|
||||
if [[ $? -eq 0 ]]; then
|
||||
printf "[OK]\n"
|
||||
else
|
||||
printf "[FAILED]\n"
|
||||
fi
|
||||
|
||||
found=1
|
||||
break
|
||||
fi
|
||||
done
|
||||
if [[ $found -eq 0 ]]; then
|
||||
printf "[SKIPPED]\n"
|
||||
fi
|
||||
done <<<$(find . -type f -not -iname "*.cls" -not -iname "*.sty" -not -iname "*.xdy" -not -iname "*.svg" -not -iname "*.png" -not -iname "*.jpg" | sed 's:./::')
|
||||
|
||||
- name: Workaround II - https://github.com/sphinx-doc/sphinx/issues/13189
|
||||
if: inputs.latex_artifact != ''
|
||||
run: |
|
||||
printf "Changing directory to 'doc/_build/latex' ...\n"
|
||||
cd doc/_build/latex
|
||||
|
||||
printf "Searching for downloaded images, that need normalization ...\n"
|
||||
for imageExt in png svg jpg jpeg; do
|
||||
printf " Processing '%s' ...\n" "${imageExt}"
|
||||
while IFS=$'\n' read -r imageFile; do
|
||||
newFile="${imageFile//%/_}";
|
||||
|
||||
printf " %s\n" "$imageFile";
|
||||
if [[ "${imageFile}" != "${newFile}" ]]; then
|
||||
printf " Rename file to '%s' " "${newFile}"
|
||||
mv "${imageFile}" "${newFile}"
|
||||
if [[ $? -eq 0 ]]; then
|
||||
printf "[OK]\n"
|
||||
else
|
||||
printf "[FAILED]\n"
|
||||
fi
|
||||
|
||||
printf " Patching LaTeX file for '%s' " "${newFile}"
|
||||
sed -i "s:{{${imageFile%.*}}\.${imageFile##*.}}:{{${newFile%.*}}.${newFile##*.}}:g" *.tex
|
||||
if [[ $? -eq 0 ]]; then
|
||||
printf "[OK]\n"
|
||||
else
|
||||
printf "[FAILED]\n"
|
||||
fi
|
||||
fi
|
||||
done <<<$(find . -type f -iname "*.$imageExt" | sed 's:./::')
|
||||
done
|
||||
|
||||
- name: 📤 Upload 'LaTeX Documentation' artifact
|
||||
if: inputs.latex_artifact != ''
|
||||
continue-on-error: true
|
||||
|
||||
20
.github/workflows/TestReleaser.yml
vendored
20
.github/workflows/TestReleaser.yml
vendored
@@ -64,7 +64,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- run: echo "Build some tool and generate some (versioned) artifacts" > artifact-$(date -u +"%Y-%m-%dT%H-%M-%SZ").txt
|
||||
- run: printf "%s\n" "Build some tool and generate some (versioned) artifacts" > artifact-$(date -u +"%Y-%m-%dT%H-%M-%SZ").txt
|
||||
|
||||
- name: Single
|
||||
uses: ./releaser/composite
|
||||
@@ -84,7 +84,7 @@ jobs:
|
||||
- name: Add artifacts/*.txt
|
||||
run: |
|
||||
mkdir artifacts
|
||||
echo "Build some tool and generate some artifacts" > artifacts/artifact.txt
|
||||
printf "%s\n" "Build some tool and generate some artifacts" > artifacts/artifact.txt
|
||||
touch artifacts/empty_file.txt
|
||||
|
||||
- name: Single in subdir
|
||||
@@ -95,8 +95,8 @@ jobs:
|
||||
|
||||
- name: Add artifacts/*.md
|
||||
run: |
|
||||
echo "releaser hello" > artifacts/hello.md
|
||||
echo "releaser world" > artifacts/world.md
|
||||
printf "%s\n" "releaser hello" > artifacts/hello.md
|
||||
printf "%s\n" "releaser world" > artifacts/world.md
|
||||
|
||||
- name: Directory wildcard
|
||||
uses: ./releaser/composite
|
||||
@@ -107,7 +107,7 @@ jobs:
|
||||
- name: Add artifacts/subdir
|
||||
run: |
|
||||
mkdir artifacts/subdir
|
||||
echo "Test recursive glob" > artifacts/subdir/deep_file.txt
|
||||
printf "%s\n" "Test recursive glob" > artifacts/subdir/deep_file.txt
|
||||
|
||||
- name: Directory wildcard (recursive)
|
||||
uses: ./releaser/composite
|
||||
@@ -124,7 +124,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- run: echo "Build some tool and generate some (versioned) artifacts" > artifact-$(date -u +"%Y-%m-%dT%H-%M-%SZ").txt
|
||||
- run: printf "%s\n" "Build some tool and generate some (versioned) artifacts" > artifact-$(date -u +"%Y-%m-%dT%H-%M-%SZ").txt
|
||||
|
||||
- name: Single
|
||||
uses: ./releaser
|
||||
@@ -144,7 +144,7 @@ jobs:
|
||||
- name: Add artifacts/*.txt
|
||||
run: |
|
||||
mkdir artifacts
|
||||
echo "Build some tool and generate some artifacts" > artifacts/artifact.txt
|
||||
printf "%s\n" "Build some tool and generate some artifacts" > artifacts/artifact.txt
|
||||
touch artifacts/empty_file.txt
|
||||
|
||||
- name: Single in subdir
|
||||
@@ -155,8 +155,8 @@ jobs:
|
||||
|
||||
- name: Add artifacts/*.md
|
||||
run: |
|
||||
echo "releaser hello" > artifacts/hello.md
|
||||
echo "releaser world" > artifacts/world.md
|
||||
printf "%s\n" "releaser hello" > artifacts/hello.md
|
||||
printf "%s\n" "releaser world" > artifacts/world.md
|
||||
|
||||
- name: Directory wildcard
|
||||
uses: ./releaser
|
||||
@@ -167,7 +167,7 @@ jobs:
|
||||
- name: Add artifacts/subdir
|
||||
run: |
|
||||
mkdir artifacts/subdir
|
||||
echo "Test recursive glob" > artifacts/subdir/deep_file.txt
|
||||
printf "%s\n" "Test recursive glob" > artifacts/subdir/deep_file.txt
|
||||
|
||||
- name: Directory wildcard (recursive)
|
||||
uses: ./releaser
|
||||
|
||||
96
.github/workflows/UnitTesting.yml
vendored
96
.github/workflows/UnitTesting.yml
vendored
@@ -94,11 +94,26 @@ on:
|
||||
required: false
|
||||
default: 'unit'
|
||||
type: string
|
||||
unittest_report_xml_directory:
|
||||
description: 'Path where to save the unittest summary report XML.'
|
||||
required: false
|
||||
default: 'report/unit'
|
||||
type: string
|
||||
unittest_report_xml_filename:
|
||||
description: 'Filename of the unittest summary report XML.'
|
||||
required: false
|
||||
default: 'TestReportSummary.xml'
|
||||
type: string
|
||||
coverage_config:
|
||||
description: 'Path to the .coveragerc file. Use pyproject.toml by default.'
|
||||
required: false
|
||||
default: 'pyproject.toml'
|
||||
type: string
|
||||
coverage_report_html_directory:
|
||||
description: ''
|
||||
required: false
|
||||
default: 'report/coverage/html'
|
||||
type: string
|
||||
unittest_xml_artifact:
|
||||
description: "Generate unit test report with junitxml and upload results as an artifact."
|
||||
required: false
|
||||
@@ -323,69 +338,6 @@ jobs:
|
||||
if: matrix.system == 'msys2' && matrix.runtime == 'UCRT64' && inputs.ucrt64_before_script != ''
|
||||
run: ${{ inputs.ucrt64_before_script }}
|
||||
|
||||
# Read pyproject.toml
|
||||
|
||||
- name: 🔁 Extract configurations from pyproject.toml
|
||||
id: getVariables
|
||||
shell: python
|
||||
run: |
|
||||
from os import getenv
|
||||
from pathlib import Path
|
||||
from sys import version
|
||||
from textwrap import dedent
|
||||
|
||||
print(f"Python: {version}")
|
||||
|
||||
from tomli import load as tomli_load
|
||||
|
||||
htmlDirectory = Path("htmlcov")
|
||||
xmlFile = Path("./coverage.xml")
|
||||
jsonFile = Path("./coverage.json")
|
||||
coverageRC = "${{ inputs.coverage_config }}".strip()
|
||||
|
||||
# Read output paths from 'pyproject.toml' file
|
||||
if coverageRC == "pyproject.toml":
|
||||
pyProjectFile = Path("pyproject.toml")
|
||||
if pyProjectFile.exists():
|
||||
with pyProjectFile.open("rb") as file:
|
||||
pyProjectSettings = tomli_load(file)
|
||||
|
||||
htmlDirectory = Path(pyProjectSettings["tool"]["coverage"]["html"]["directory"])
|
||||
xmlFile = Path(pyProjectSettings["tool"]["coverage"]["xml"]["output"])
|
||||
jsonFile = Path(pyProjectSettings["tool"]["coverage"]["json"]["output"])
|
||||
else:
|
||||
print(f"File '{pyProjectFile}' not found.")
|
||||
print(f"::error title=FileNotFoundError::File '{pyProjectFile}' not found.")
|
||||
exit(1)
|
||||
|
||||
# Read output paths from '.coveragerc' file
|
||||
elif len(coverageRC) > 0:
|
||||
coverageRCFile = Path(coverageRC)
|
||||
if coverageRCFile.exists():
|
||||
with coverageRCFile.open("rb") as file:
|
||||
coverageRCSettings = tomli_load(file)
|
||||
|
||||
htmlDirectory = Path(coverageRCSettings["html"]["directory"])
|
||||
xmlFile = Path(coverageRCSettings["xml"]["output"])
|
||||
jsonFile = Path(coverageRCSettings["json"]["output"])
|
||||
else:
|
||||
print(f"File '{coverageRCFile}' not found.")
|
||||
print(f"::error title=FileNotFoundError::File '{coverageRCFile}' not found.")
|
||||
exit(1)
|
||||
|
||||
# Write jobs to special file
|
||||
github_output = Path(getenv("GITHUB_OUTPUT"))
|
||||
print(f"GITHUB_OUTPUT: {github_output}")
|
||||
with github_output.open("a+", encoding="utf-8") as f:
|
||||
f.write(dedent(f"""\
|
||||
unittest_report_html_directory={htmlDirectory}
|
||||
coverage_report_html_directory={htmlDirectory.as_posix()}
|
||||
coverage_report_xml={xmlFile}
|
||||
coverage_report_json={jsonFile}
|
||||
"""))
|
||||
|
||||
print(f"DEBUG:\n html={htmlDirectory}\n xml={xmlFile}\n json={jsonFile}")
|
||||
|
||||
# Run pytests
|
||||
|
||||
- name: ✅ Run unit tests (Ubuntu/macOS)
|
||||
@@ -395,12 +347,12 @@ jobs:
|
||||
export PYTHONPATH=$(pwd)
|
||||
|
||||
cd "${{ inputs.root_directory || '.' }}"
|
||||
[ -n '${{ inputs.unittest_xml_artifact }}' ] && PYTEST_ARGS='--junitxml=report/unit/TestReportSummary.xml' || unset PYTEST_ARGS
|
||||
[ -n '${{ inputs.unittest_xml_artifact }}' ] && PYTEST_ARGS='--junitxml=${{ inputs.unittest_report_xml_directory }}/${{ inputs.unittest_report_xml_filename }}' || unset PYTEST_ARGS
|
||||
if [ -n '${{ inputs.coverage_config }}' ]; then
|
||||
echo "coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }}"
|
||||
printf "%s\n" "coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }}"
|
||||
coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }}
|
||||
else
|
||||
echo "python -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }}"
|
||||
printf "%s\n" "python -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }}"
|
||||
python -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }}
|
||||
fi
|
||||
|
||||
@@ -411,7 +363,7 @@ jobs:
|
||||
$env:PYTHONPATH = (Get-Location).ToString()
|
||||
|
||||
cd "${{ inputs.root_directory || '.' }}"
|
||||
$PYTEST_ARGS = if ("${{ inputs.unittest_xml_artifact }}") { "--junitxml=report/unit/TestReportSummary.xml" } else { "" }
|
||||
$PYTEST_ARGS = if ("${{ inputs.unittest_xml_artifact }}") { "--junitxml=${{ inputs.unittest_report_xml_directory }}/${{ inputs.unittest_report_xml_filename }}" } else { "" }
|
||||
if ("${{ inputs.coverage_config }}") {
|
||||
Write-Host "coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }}"
|
||||
coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }}
|
||||
@@ -434,19 +386,19 @@ jobs:
|
||||
if: inputs.coverage_html_artifact != ''
|
||||
continue-on-error: true
|
||||
run: |
|
||||
coverage html --data-file=.coverage -d ${{ steps.getVariables.outputs.coverage_report_html_directory }}
|
||||
rm ${{ steps.getVariables.outputs.coverage_report_html_directory }}/.gitignore
|
||||
coverage html --data-file=.coverage -d ${{ inputs.coverage_report_html_directory }}
|
||||
rm ${{ inputs.coverage_report_html_directory }}/.gitignore
|
||||
|
||||
# Upload artifacts
|
||||
|
||||
- name: 📤 Upload 'TestReportSummary.xml' artifact
|
||||
- name: 📤 Upload '${{ inputs.unittest_report_xml_filename }}' artifact
|
||||
if: inputs.unittest_xml_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.unittest_xml_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
||||
working-directory: report/unit
|
||||
path: TestReportSummary.xml
|
||||
working-directory: ${{ inputs.unittest_report_xml_directory }}
|
||||
path: ${{ inputs.unittest_report_xml_filename }}
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ on:
|
||||
|
||||
jobs:
|
||||
Params:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r4
|
||||
with:
|
||||
name: Example
|
||||
python_version_list: "3.12 3.13"
|
||||
@@ -22,7 +22,7 @@ jobs:
|
||||
include: ${{ fromJson(needs.Params.outputs.python_jobs) }}
|
||||
steps:
|
||||
- name: Content creation for ${{ matrix.system }}-${{ matrix.python }}
|
||||
run: echo "${{ matrix.runs-on }}-${{ matrix.python }}" >> artifact.txt
|
||||
run: printf "%s\n" "${{ matrix.runs-on }}-${{ matrix.python }}" >> artifact.txt
|
||||
|
||||
- name: 📤 Upload artifact for ${{ matrix.system }}-${{ matrix.python }}
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Package creation
|
||||
run: echo "Package" >> package.txt
|
||||
run: printf "%s\n" "Package" >> package.txt
|
||||
|
||||
- name: 📤 Upload artifact for ${{ matrix.system }}-${{ matrix.python }}
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
@@ -50,7 +50,7 @@ jobs:
|
||||
retention-days: 1
|
||||
|
||||
ArtifactCleanUp:
|
||||
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r2
|
||||
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r4
|
||||
needs:
|
||||
- Params
|
||||
- Testing
|
||||
|
||||
104
.github/workflows/_Checking_JobTemplates.yml
vendored
104
.github/workflows/_Checking_JobTemplates.yml
vendored
@@ -6,64 +6,62 @@ on:
|
||||
|
||||
jobs:
|
||||
ConfigParams:
|
||||
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r3
|
||||
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r4
|
||||
with:
|
||||
package_name: pyDummy
|
||||
|
||||
UnitTestingParams:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r3
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r4
|
||||
with:
|
||||
name: pyDummy
|
||||
python_version_list: "3.9 3.10 3.11 3.12 3.13 pypy-3.9 pypy-3.10"
|
||||
# disable_list: "windows:pypy-3.10"
|
||||
|
||||
PlatformTestingParams:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r3
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r4
|
||||
with:
|
||||
name: Platform
|
||||
python_version_list: ""
|
||||
system_list: "ubuntu windows macos mingw32 mingw64 clang64 ucrt64"
|
||||
system_list: "ubuntu windows macos mingw64 clang64 ucrt64"
|
||||
|
||||
UnitTesting:
|
||||
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r3
|
||||
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r4
|
||||
needs:
|
||||
- ConfigParams
|
||||
- UnitTestingParams
|
||||
with:
|
||||
jobs: ${{ needs.UnitTestingParams.outputs.python_jobs }}
|
||||
unittest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||
unittest_html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_html }}
|
||||
# coverage_sqlite_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}
|
||||
unittest_report_xml_directory: ${{ needs.ConfigParams.outputs.unittest_report_xml_directory }}
|
||||
unittest_report_xml_filename: ${{ needs.ConfigParams.outputs.unittest_report_xml_filename }}
|
||||
coverage_report_html_directory: ${{ needs.ConfigParams.outputs.coverage_report_html_directory }}
|
||||
unittest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||
unittest_html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_html }}
|
||||
coverage_sqlite_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}
|
||||
# coverage_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }}
|
||||
# coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}
|
||||
# coverage_html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||
|
||||
PlatformTesting:
|
||||
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r3
|
||||
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r4
|
||||
needs:
|
||||
- ConfigParams
|
||||
- PlatformTestingParams
|
||||
with:
|
||||
jobs: ${{ needs.PlatformTestingParams.outputs.python_jobs }}
|
||||
# tests_directory: ""
|
||||
unittest_directory: platform
|
||||
unittest_xml_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||
unittest_html_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).unittesting_html }}
|
||||
coverage_sqlite_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_sqlite }}
|
||||
coverage_xml_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_xml }}
|
||||
coverage_json_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_json }}
|
||||
coverage_html_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||
|
||||
# Coverage:
|
||||
# uses: pyTooling/Actions/.github/workflows/CoverageCollection.yml@r3
|
||||
# needs:
|
||||
# - UnitTestingParams
|
||||
# with:
|
||||
# python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
# artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||
# secrets:
|
||||
# codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||
unittest_report_xml_directory: ${{ needs.ConfigParams.outputs.unittest_report_xml_directory }}
|
||||
unittest_report_xml_filename: ${{ needs.ConfigParams.outputs.unittest_report_xml_filename }}
|
||||
coverage_report_html_directory: ${{ needs.ConfigParams.outputs.coverage_report_html_directory }}
|
||||
unittest_xml_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||
unittest_html_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).unittesting_html }}
|
||||
coverage_sqlite_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_sqlite }}
|
||||
coverage_xml_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_xml }}
|
||||
coverage_json_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_json }}
|
||||
coverage_html_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||
|
||||
StaticTypeCheck:
|
||||
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r3
|
||||
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r4
|
||||
needs:
|
||||
- ConfigParams
|
||||
- UnitTestingParams
|
||||
@@ -76,7 +74,7 @@ jobs:
|
||||
html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
|
||||
|
||||
DocCoverage:
|
||||
uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@r1
|
||||
uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@r4
|
||||
needs:
|
||||
- ConfigParams
|
||||
- UnitTestingParams
|
||||
@@ -86,48 +84,48 @@ jobs:
|
||||
# fail_below: 70
|
||||
|
||||
Package:
|
||||
uses: pyTooling/Actions/.github/workflows/Package.yml@r3
|
||||
uses: pyTooling/Actions/.github/workflows/Package.yml@r4
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- UnitTesting
|
||||
# - Coverage
|
||||
- PlatformTesting
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
||||
|
||||
PublishCoverageResults:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@r3
|
||||
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@r4
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- UnitTesting
|
||||
- PlatformTesting
|
||||
# - Coverage
|
||||
with:
|
||||
coverage_sqlite_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}
|
||||
coverage_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }}
|
||||
coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}
|
||||
coverage_html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||
coverage_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }}
|
||||
coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}
|
||||
coverage_html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||
secrets:
|
||||
codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||
|
||||
PublishTestResults:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@r3
|
||||
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@r4
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- UnitTesting
|
||||
- PlatformTesting
|
||||
with:
|
||||
additional_merge_args: '-d "--pytest=rewrite-dunder-init;reduce-depth:pytest.tests.unit;reduce-depth:pytest.tests.platform"'
|
||||
merged_junit_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||
|
||||
# VerifyDocs:
|
||||
# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@r3
|
||||
# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@r4
|
||||
# needs:
|
||||
# - UnitTestingParams
|
||||
# with:
|
||||
# python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
|
||||
Documentation:
|
||||
uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@r3
|
||||
uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@r4
|
||||
needs:
|
||||
- ConfigParams
|
||||
- UnitTestingParams
|
||||
@@ -137,13 +135,13 @@ jobs:
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
coverage_report_json_directory: ${{ needs.ConfigParams.outputs.coverage_report_json_directory }}
|
||||
# unittest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||
# coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}
|
||||
html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_html }}
|
||||
unittest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||
coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}
|
||||
html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_html }}
|
||||
latex_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }}
|
||||
|
||||
IntermediateCleanUp:
|
||||
uses: pyTooling/Actions/.github/workflows/IntermediateCleanUp.yml@r1
|
||||
uses: pyTooling/Actions/.github/workflows/IntermediateCleanUp.yml@r4
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- PublishCoverageResults
|
||||
@@ -151,25 +149,24 @@ jobs:
|
||||
- Documentation
|
||||
with:
|
||||
sqlite_coverage_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}-
|
||||
xml_unittest_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}-
|
||||
xml_unittest_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}-
|
||||
|
||||
PDFDocumentation:
|
||||
uses: pyTooling/Actions/.github/workflows/LaTeXDocumentation.yml@r3
|
||||
uses: pyTooling/Actions/.github/workflows/LaTeXDocumentation.yml@r4
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- Documentation
|
||||
with:
|
||||
document: actions
|
||||
document: Actions
|
||||
latex_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }}
|
||||
pdf_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_pdf }}
|
||||
|
||||
PublishToGitHubPages:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r3
|
||||
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r4
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- Documentation
|
||||
# - PDFDocumentation
|
||||
# - Coverage
|
||||
- PDFDocumentation
|
||||
- PublishCoverageResults
|
||||
- StaticTypeCheck
|
||||
with:
|
||||
@@ -178,18 +175,17 @@ jobs:
|
||||
typing: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
|
||||
|
||||
ReleasePage:
|
||||
uses: pyTooling/Actions/.github/workflows/Release.yml@r3
|
||||
uses: pyTooling/Actions/.github/workflows/Release.yml@r4
|
||||
if: startsWith(github.ref, 'refs/tags')
|
||||
needs:
|
||||
- UnitTesting
|
||||
- PlatformTesting
|
||||
# - Coverage
|
||||
# - StaticTypeCheck
|
||||
- Package
|
||||
- PublishToGitHubPages
|
||||
|
||||
PublishOnPyPI:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@r3
|
||||
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@r4
|
||||
if: startsWith(github.ref, 'refs/tags')
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
@@ -203,16 +199,15 @@ jobs:
|
||||
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||
|
||||
ArtifactCleanUp:
|
||||
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r3
|
||||
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r4
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- PlatformTestingParams
|
||||
- UnitTesting
|
||||
# - Coverage
|
||||
- StaticTypeCheck
|
||||
- PlatformTesting
|
||||
- Documentation
|
||||
# - PDFDocumentation
|
||||
- PDFDocumentation
|
||||
- PublishTestResults
|
||||
- PublishCoverageResults
|
||||
- PublishToGitHubPages
|
||||
@@ -222,7 +217,6 @@ jobs:
|
||||
remaining: |
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}-*
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_html }}-*
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}-*
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }}-*
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}-*
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}-*
|
||||
@@ -234,9 +228,9 @@ jobs:
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_html }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }}
|
||||
${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).unittesting_xml }}-*
|
||||
${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).unittesting_html }}-*
|
||||
${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_sqlite }}-*
|
||||
${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_xml }}-*
|
||||
${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_json }}-*
|
||||
${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_html }}-*
|
||||
|
||||
@@ -6,7 +6,7 @@ on:
|
||||
|
||||
jobs:
|
||||
NamespacePackage:
|
||||
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@r2
|
||||
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@r4
|
||||
with:
|
||||
package_namespace: pyExamples
|
||||
package_name: Extensions
|
||||
|
||||
83
.github/workflows/_Checking_Nightly.yml
vendored
83
.github/workflows/_Checking_Nightly.yml
vendored
@@ -12,9 +12,9 @@ jobs:
|
||||
steps:
|
||||
- name: 🖉 Build 1
|
||||
run: |
|
||||
echo "Document 1 $(date --utc '+%d.%m.%Y - %H:%M:%S')" > document1.txt
|
||||
echo "Analysis log $(date --utc '+%d.%m.%Y - %H:%M:%S')" > analysis.log
|
||||
echo "Build log $(date --utc '+%d.%m.%Y - %H:%M:%S')" > build.log
|
||||
printf "%s\n" "Document 1 $(date --utc '+%d.%m.%Y - %H:%M:%S')" > document1.txt
|
||||
printf "%s\n" "Analysis log $(date --utc '+%d.%m.%Y - %H:%M:%S')" > analysis.log
|
||||
printf "%s\n" "Build log $(date --utc '+%d.%m.%Y - %H:%M:%S')" > build.log
|
||||
|
||||
- name: 📤 Upload artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
@@ -28,8 +28,8 @@ jobs:
|
||||
|
||||
- name: 🖉 Program
|
||||
run: |
|
||||
echo "Document other $(date --utc '+%d.%m.%Y - %H:%M:%S')" > document1.txt
|
||||
echo "Program $(date --utc '+%d.%m.%Y - %H:%M:%S')" > program.py
|
||||
printf "%s\n" "Document other $(date --utc '+%d.%m.%Y - %H:%M:%S')" > document1.txt
|
||||
printf "%s\n" "Program $(date --utc '+%d.%m.%Y - %H:%M:%S')" > program.py
|
||||
|
||||
- name: 📤 Upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
@@ -42,7 +42,7 @@ jobs:
|
||||
retention-days: 1
|
||||
|
||||
NightlyPage:
|
||||
uses: pyTooling/Actions/.github/workflows/NightlyRelease.yml@main
|
||||
uses: pyTooling/Actions/.github/workflows/NightlyRelease.yml@r4
|
||||
needs:
|
||||
- Build
|
||||
secrets: inherit
|
||||
@@ -56,35 +56,13 @@ jobs:
|
||||
version=4.2.0
|
||||
tool=myTool
|
||||
prog=program
|
||||
nightly_title: "Nightly Release"
|
||||
nightly_title: "Nightly Test Release"
|
||||
nightly_description: |
|
||||
This *nightly* release contains all latest and important artifacts created by GHDL's CI pipeline.
|
||||
This *nightly* release contains all latest and important artifacts created by %tool%'s CI pipeline.
|
||||
|
||||
# GHDL %version%
|
||||
# %tool% %version%
|
||||
|
||||
GHDL offers the simulator and synthesis tool for VHDL. GHDL can be build for various backends:
|
||||
* `gcc` - using the GCC compiler framework
|
||||
* `mcode` - in memory code generation
|
||||
* `llvm` - using the LLVM compiler framework
|
||||
* `llvm-jit` - using the LLVM compiler framework, but in memory
|
||||
|
||||
The following asset categories are provided for GHDL:
|
||||
* macOS x64-64 builds as TAR/GZ file
|
||||
* macOS aarch64 builds as TAR/GZ file
|
||||
* Ubuntu 24.04 LTS builds as TAR/GZ file
|
||||
* Windows builds for standalone usage (without MSYS2) as ZIP file
|
||||
* MSYS2 packages as TAR/ZST file
|
||||
|
||||
# pyGHDL %version%
|
||||
|
||||
The Python package `pyGHDL` offers Python binding (`pyGHDL.libghdl`) to a `libghdl` shared library (`*.so`/`*.dll`).
|
||||
In addition to the low-level binding layer, pyGHDL offers:
|
||||
* a Language Server Protocol (LSP) instance for e.g. live code checking by editors
|
||||
* a Code Document Object Model (CodeDOM) based on [pyVHDLModel](https://github.com/VHDL/pyVHDLModel)
|
||||
|
||||
The following asset categories are provided for pyGHDL:
|
||||
* Platform specific Python wheel package for Ubuntu incl. `pyGHDL...so`
|
||||
* Platform specific Python wheel package for Windows incl. `pyGHDL...dll`
|
||||
* %prog%
|
||||
assets: |
|
||||
document: document1.txt: Documentation
|
||||
document: build.log: Logfile - %tool% - %tool%
|
||||
@@ -99,3 +77,44 @@ jobs:
|
||||
document:$archive7.tar.gz: Archive 7 - tar.gz + dir
|
||||
document:$archive8.tzst: Archive 8 - tzst + dir
|
||||
document:$archive9.tar.zst:Archive 9 - tar.zst + dir
|
||||
|
||||
NightlyPageWithInventory:
|
||||
uses: ./.github/workflows/NightlyRelease.yml
|
||||
needs:
|
||||
- Build
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: write
|
||||
actions: write
|
||||
# attestations: write
|
||||
with:
|
||||
replacements: |
|
||||
version=4.2.0
|
||||
tool=myTool
|
||||
prog=program
|
||||
nightly_name: inventory
|
||||
nightly_title: "Nightly Test Release with Inventory"
|
||||
nightly_description: |
|
||||
This *nightly* release contains all latest and important artifacts created by %tool%'s CI pipeline.
|
||||
|
||||
# %tool% %version%
|
||||
|
||||
* %prog%
|
||||
* iventory.json
|
||||
inventory-json: "inventory.json"
|
||||
inventory-version: 4.2.5
|
||||
inventory-categories: "kind1,kind2"
|
||||
assets: |
|
||||
document: document1.txt: doc,html: Documentation
|
||||
document: build.log: build,log: Logfile - %tool% - %tool%
|
||||
other: document1.txt: build,SBOM:SBOM - %version%
|
||||
other: %prog%.py: app,binary:Application - %tool% - %version%
|
||||
document:!archive1.zip: Archive 1 - zip
|
||||
document:!archive2.tgz: Archive 2 - tgz
|
||||
document:!archive3.tar.gz: Archive 3 - tar.gz
|
||||
document:!archive4.tzst: Archive 4 - tzst
|
||||
document:!archive5.tar.zst: Archive 5 - tar.zst
|
||||
document:$archive6.tgz: Archive 6 - tgz + dir
|
||||
document:$archive7.tar.gz: Archive 7 - tar.gz + dir
|
||||
document:$archive8.tzst: Archive 8 - tzst + dir
|
||||
document:$archive9.tar.zst: Archive 9 - tar.zst + dir
|
||||
|
||||
20
.github/workflows/_Checking_Parameters.yml
vendored
20
.github/workflows/_Checking_Parameters.yml
vendored
@@ -6,24 +6,24 @@ on:
|
||||
|
||||
jobs:
|
||||
Params_Default:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r4
|
||||
with:
|
||||
name: Example
|
||||
|
||||
Params_PythonVersions:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r4
|
||||
with:
|
||||
name: Example
|
||||
python_version_list: "3.11 3.12 pypy-3.9 pypy-3.10"
|
||||
|
||||
Params_Systems:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r4
|
||||
with:
|
||||
name: Example
|
||||
system_list: "windows mingw32 mingw64"
|
||||
|
||||
Params_Include:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r4
|
||||
with:
|
||||
name: Example
|
||||
python_version_list: "3.11"
|
||||
@@ -31,7 +31,7 @@ jobs:
|
||||
include_list: "ubuntu:3.12 ubuntu:3.13"
|
||||
|
||||
Params_Exclude:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r4
|
||||
with:
|
||||
name: Example
|
||||
python_version_list: "3.12"
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
exclude_list: "windows:3.12 windows:3.13"
|
||||
|
||||
Params_Disable:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r4
|
||||
with:
|
||||
name: Example
|
||||
python_version_list: "3.12"
|
||||
@@ -47,7 +47,7 @@ jobs:
|
||||
disable_list: "windows:3.12 windows:3.13"
|
||||
|
||||
Params_All:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r4
|
||||
with:
|
||||
name: Example
|
||||
python_version_list: "3.12 3.13"
|
||||
@@ -83,7 +83,7 @@ jobs:
|
||||
expectedPythonVersion = "3.13"
|
||||
expectedPythons = ["3.9", "3.10", "3.11", "3.12", "3.13"]
|
||||
expectedSystems = ["ubuntu", "windows", "macos", "macos-arm"]
|
||||
expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons] + ["mingw64:3.11", "ucrt64:3.11"]
|
||||
expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons] + ["mingw64:3.12", "ucrt64:3.11"]
|
||||
expectedName = "Example"
|
||||
expectedArtifacts = {
|
||||
"unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML",
|
||||
@@ -146,7 +146,7 @@ jobs:
|
||||
expectedPythonVersion = "3.13"
|
||||
expectedPythons = ["3.11", "3.12", "pypy-3.9", "pypy-3.10"]
|
||||
expectedSystems = ["ubuntu", "windows", "macos", "macos-arm"]
|
||||
expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons] + ["mingw64:3.11", "ucrt64:3.11"]
|
||||
expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons] + ["mingw64:3.12", "ucrt64:3.11"]
|
||||
expectedName = "Example"
|
||||
expectedArtifacts = {
|
||||
"unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML",
|
||||
@@ -209,7 +209,7 @@ jobs:
|
||||
expectedPythonVersion = "3.13"
|
||||
expectedPythons = ["3.9", "3.10", "3.11", "3.12", "3.13"]
|
||||
expectedSystems = ["windows"]
|
||||
expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons] + ["mingw32:3.11", "mingw64:3.11"]
|
||||
expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons] + ["mingw32:3.12", "mingw64:3.11"]
|
||||
expectedName = "Example"
|
||||
expectedArtifacts = {
|
||||
"unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML",
|
||||
|
||||
@@ -6,7 +6,7 @@ on:
|
||||
|
||||
jobs:
|
||||
SimplePackage:
|
||||
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@r2
|
||||
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@r4
|
||||
with:
|
||||
package_name: pyDummy
|
||||
secrets:
|
||||
|
||||
Reference in New Issue
Block a user