diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 67b1a2a..e91b107 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -10,9 +10,7 @@ updates: - Dependencies assignees: - Paebbels - - umarcor reviewers: - Paebbels - - umarcor schedule: interval: "daily" # Checks on Monday trough Friday. diff --git a/.github/workflows/ApplicationTesting.yml b/.github/workflows/ApplicationTesting.yml index 5196901..266c412 100644 --- a/.github/workflows/ApplicationTesting.yml +++ b/.github/workflows/ApplicationTesting.yml @@ -231,10 +231,10 @@ jobs: cd "${{ inputs.root_directory || '.' }}" [ -n '${{ inputs.apptest_xml_artifact }}' ] && PYTEST_ARGS='--junitxml=report/unit/TestReportSummary.xml' || unset PYTEST_ARGS if [ -n '${{ inputs.coverage_config }}' ]; then - echo "coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.apptest_directory }}" + printf "%s\n" "coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.apptest_directory }}" coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.apptest_directory }} else - echo "python -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.apptest_directory }}" + printf "%s\n" "python -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.apptest_directory }}" python -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.apptest_directory }} fi diff --git a/.github/workflows/BuildTheDocs.yml b/.github/workflows/BuildTheDocs.yml index 330cd3c..1507b48 100644 --- a/.github/workflows/BuildTheDocs.yml +++ b/.github/workflows/BuildTheDocs.yml @@ -38,7 +38,7 @@ jobs: steps: - name: '❗ Deprecation message' - run: echo "::warning title=Deprecated::'BuildTheDocs.yml' is not maintained anymore. Please switch to 'SphinxDocumentation.yml', 'LaTeXDocumentation.yml' and 'ExtractConfiguration.yml'." + run: printf "%s\n" "::warning title=Deprecated::'BuildTheDocs.yml' is not maintained anymore. Please switch to 'SphinxDocumentation.yml', 'LaTeXDocumentation.yml' and 'ExtractConfiguration.yml'." - name: ⏬ Checkout repository uses: actions/checkout@v4 diff --git a/.github/workflows/CheckDocumentation.yml b/.github/workflows/CheckDocumentation.yml index 3a9fa1f..f634582 100644 --- a/.github/workflows/CheckDocumentation.yml +++ b/.github/workflows/CheckDocumentation.yml @@ -59,14 +59,14 @@ jobs: - name: 🔧 Install wheel,tomli and pip dependencies (native) run: | - python -m pip install --disable-pip-version-check -U docstr_coverage interrogate + python -m pip install --disable-pip-version-check -U docstr_coverage interrogate[png] - name: Run 'interrogate' Documentation Coverage Check continue-on-error: true run: | - interrogate -c pyproject.toml --fail-under=${{ inputs.fail_under }} && echo "::error title=interrogate::Insufficient documentation quality (goal: ${{ inputs.fail_under }})" + interrogate -c pyproject.toml --fail-under=${{ inputs.fail_under }} && printf "%s\n" "::error title=interrogate::Insufficient documentation quality (goal: ${{ inputs.fail_under }})" - name: Run 'docstr_coverage' Documentation Coverage Check continue-on-error: true run: | - docstr-coverage -v 2 --fail-under=${{ inputs.fail_under }} ${{ inputs.directory }} && echo "::error title=docstr-coverage::Insufficient documentation quality (goal: ${{ inputs.fail_under }})" + docstr-coverage -v 2 --fail-under=${{ inputs.fail_under }} ${{ inputs.directory }} && printf "%s\n" "::error title=docstr-coverage::Insufficient documentation quality (goal: ${{ inputs.fail_under }})" diff --git a/.github/workflows/CompletePipeline.yml b/.github/workflows/CompletePipeline.yml index 7412c82..28f9eab 100644 --- a/.github/workflows/CompletePipeline.yml +++ b/.github/workflows/CompletePipeline.yml @@ -103,13 +103,13 @@ on: jobs: ConfigParams: - uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r2 + uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r4 with: package_namespace: ${{ inputs.package_namespace }} package_name: ${{ inputs.package_name }} UnitTestingParams: - uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2 + uses: pyTooling/Actions/.github/workflows/Parameters.yml@r4 with: package_namespace: ${{ inputs.package_namespace }} package_name: ${{ inputs.package_name }} @@ -121,7 +121,7 @@ jobs: disable_list: ${{ inputs.unittest_disable_list }} AppTestingParams: - uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2 + uses: pyTooling/Actions/.github/workflows/Parameters.yml@r4 with: package_namespace: ${{ inputs.package_namespace }} package_name: ${{ inputs.package_name }} @@ -133,18 +133,22 @@ jobs: disable_list: ${{ inputs.apptest_disable_list }} UnitTesting: - uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r2 + uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r4 needs: + - ConfigParams - UnitTestingParams with: jobs: ${{ needs.UnitTestingParams.outputs.python_jobs }} requirements: "-r tests/unit/requirements.txt" # pacboy: "msys/git python-lxml:p" - unittest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }} - coverage_sqlite_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }} + unittest_report_xml_directory: ${{ needs.ConfigParams.outputs.unittest_report_xml_directory }} + unittest_report_xml_filename: ${{ needs.ConfigParams.outputs.unittest_report_xml_filename }} + coverage_report_html_directory: ${{ needs.ConfigParams.outputs.coverage_report_html_directory }} + unittest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }} + coverage_sqlite_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }} StaticTypeCheck: - uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r2 + uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r4 needs: - ConfigParams - UnitTestingParams @@ -157,7 +161,7 @@ jobs: html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }} DocCoverage: - uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@r2 + uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@r4 needs: - ConfigParams - UnitTestingParams @@ -167,7 +171,7 @@ jobs: # fail_below: 70 Package: - uses: pyTooling/Actions/.github/workflows/Package.yml@r2 + uses: pyTooling/Actions/.github/workflows/Package.yml@r4 needs: - UnitTestingParams - UnitTesting @@ -176,7 +180,7 @@ jobs: artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }} # AppTesting: -# uses: pyTooling/Actions/.github/workflows/ApplicationTesting.yml@r2 +# uses: pyTooling/Actions/.github/workflows/ApplicationTesting.yml@r4 # needs: # - AppTestingParams # - UnitTestingParams @@ -187,7 +191,7 @@ jobs: # apptest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).apptesting_xml }} PublishCoverageResults: - uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@r2 + uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@r4 needs: - UnitTestingParams - UnitTesting @@ -200,22 +204,23 @@ jobs: codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }} PublishTestResults: - uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@r2 + uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@r4 needs: - UnitTestingParams - UnitTesting with: + additional_merge_args: '-d "--pytest=rewrite-dunder-init;reduce-depth:pytest.tests.unit"' merged_junit_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }} # VerifyDocs: -# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@r2 +# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@r4 # needs: # - UnitTestingParams # with: # python_version: ${{ needs.UnitTestingParams.outputs.python_version }} Documentation: - uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@r2 + uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@r4 needs: - ConfigParams - UnitTestingParams @@ -231,7 +236,7 @@ jobs: latex_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }} IntermediateCleanUp: - uses: pyTooling/Actions/.github/workflows/IntermediateCleanUp.yml@r2 + uses: pyTooling/Actions/.github/workflows/IntermediateCleanUp.yml@r4 needs: - UnitTestingParams - PublishCoverageResults @@ -242,7 +247,7 @@ jobs: xml_unittest_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}- # PDFDocumentation: -# uses: pyTooling/Actions/.github/workflows/LaTeXDocumentation.yml@r2 +# uses: pyTooling/Actions/.github/workflows/LaTeXDocumentation.yml@r4 # needs: # - UnitTestingParams # - Documentation @@ -252,7 +257,7 @@ jobs: # pdf_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_pdf }} PublishToGitHubPages: - uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r2 + uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r4 needs: - UnitTestingParams - Documentation @@ -265,7 +270,7 @@ jobs: typing: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }} ReleasePage: - uses: pyTooling/Actions/.github/workflows/Release.yml@r2 + uses: pyTooling/Actions/.github/workflows/Release.yml@r4 if: startsWith(github.ref, 'refs/tags') needs: - Package @@ -273,7 +278,7 @@ jobs: - PublishToGitHubPages PublishOnPyPI: - uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@r2 + uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@r4 if: startsWith(github.ref, 'refs/tags') needs: - UnitTestingParams @@ -286,7 +291,7 @@ jobs: PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} ArtifactCleanUp: - uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r2 + uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r4 needs: - UnitTestingParams - UnitTesting diff --git a/.github/workflows/CoverageCollection.yml b/.github/workflows/CoverageCollection.yml index 3bdf272..a9df9dc 100644 --- a/.github/workflows/CoverageCollection.yml +++ b/.github/workflows/CoverageCollection.yml @@ -72,7 +72,7 @@ jobs: steps: - name: '❗ Deprecation message' - run: echo "::warning title=Deprecated::'CoverageCollection.yml' is not maintained anymore. Please switch to 'UnitTesting.yml', 'PublishCoverageResults.yml' and 'PublishTestResults.yml'." + run: printf "%s\n" "::warning title=Deprecated::'CoverageCollection.yml' is not maintained anymore. Please switch to 'UnitTesting.yml', 'PublishCoverageResults.yml' and 'PublishTestResults.yml'." - name: ⏬ Checkout repository uses: actions/checkout@v4 @@ -150,7 +150,7 @@ jobs: ABSDIR=$(pwd) cd "${{ inputs.tests_directory || '.' }}" [ -n '${{ inputs.coverage_config }}' ] && PYCOV_ARGS="--cov-config=${ABSDIR}/${{ inputs.coverage_config }}" || unset PYCOV_ARGS - echo "python -m pytest -rA --cov=${ABSDIR} ${PYCOV_ARGS} ${{ inputs.unittest_directory }} --color=yes" + printf "%s\n" "python -m pytest -rA --cov=${ABSDIR} ${PYCOV_ARGS} ${{ inputs.unittest_directory }} --color=yes" python -m pytest -rA --cov=${ABSDIR} $PYCOV_ARGS ${{ inputs.unittest_directory }} --color=yes - name: Convert to cobertura format diff --git a/.github/workflows/ExtractConfiguration.yml b/.github/workflows/ExtractConfiguration.yml index cf23938..781cbff 100644 --- a/.github/workflows/ExtractConfiguration.yml +++ b/.github/workflows/ExtractConfiguration.yml @@ -59,18 +59,33 @@ on: mypy_prepare_command: description: "" value: ${{ jobs.Extract.outputs.mypy_prepare_command }} + unittest_report_xml_directory: + description: "" + value: ${{ jobs.Extract.outputs.unittest_report_xml_directory }} + unittest_report_xml_filename: + description: "" + value: ${{ jobs.Extract.outputs.unittest_report_xml_filename }} + unittest_report_xml: + description: "" + value: ${{ jobs.Extract.outputs.unittest_report_xml }} coverage_report_html_directory: description: "" value: ${{ jobs.Extract.outputs.coverage_report_html_directory }} coverage_report_xml_directory: description: "" value: ${{ jobs.Extract.outputs.coverage_report_xml_directory }} + coverage_report_xml_filename: + description: "" + value: ${{ jobs.Extract.outputs.coverage_report_xml_filename }} coverage_report_xml: description: "" value: ${{ jobs.Extract.outputs.coverage_report_xml }} coverage_report_json_directory: description: "" value: ${{ jobs.Extract.outputs.coverage_report_json_directory }} + coverage_report_json_filename: + description: "" + value: ${{ jobs.Extract.outputs.coverage_report_json_filename }} coverage_report_json: description: "" value: ${{ jobs.Extract.outputs.coverage_report_json }} @@ -83,10 +98,15 @@ jobs: package_fullname: ${{ steps.getPackageName.outputs.package_fullname }} package_directory: ${{ steps.getPackageName.outputs.package_directory }} mypy_prepare_command: ${{ steps.getPackageName.outputs.mypy_prepare_command }} + unittest_report_xml_directory: ${{ steps.getVariables.outputs.unittest_report_xml_directory }} + unittest_report_xml_filename: ${{ steps.getVariables.outputs.unittest_report_xml_filename }} + unittest_report_xml: ${{ steps.getVariables.outputs.unittest_report_xml }} coverage_report_html_directory: ${{ steps.getVariables.outputs.coverage_report_html_directory }} coverage_report_xml_directory: ${{ steps.getVariables.outputs.coverage_report_xml_directory }} + coverage_report_xml_filename: ${{ steps.getVariables.outputs.coverage_report_xml_filename }} coverage_report_xml: ${{ steps.getVariables.outputs.coverage_report_xml }} coverage_report_json_directory: ${{ steps.getVariables.outputs.coverage_report_json_directory }} + coverage_report_json_filename: ${{ steps.getVariables.outputs.coverage_report_json_filename }} coverage_report_json: ${{ steps.getVariables.outputs.coverage_report_json }} steps: @@ -144,10 +164,11 @@ jobs: from tomli import load as tomli_load - htmlDirectory = Path("htmlcov") - xmlFile = Path("./coverage.xml") - jsonFile = Path("./coverage.json") - coverageRC = "${{ inputs.coverage_config }}".strip() + unittestXMLFile = Path("./unittest.xml") + coverageHTMLDirectory = Path("htmlcov") + coverageXMLFile = Path("./coverage.xml") + coverageJSONFile = Path("./coverage.json") + coverageRC = "${{ inputs.coverage_config }}".strip() # Read output paths from 'pyproject.toml' file if coverageRC == "pyproject.toml": @@ -156,9 +177,10 @@ jobs: with pyProjectFile.open("rb") as file: pyProjectSettings = tomli_load(file) - htmlDirectory = Path(pyProjectSettings["tool"]["coverage"]["html"]["directory"]) - xmlFile = Path(pyProjectSettings["tool"]["coverage"]["xml"]["output"]) - jsonFile = Path(pyProjectSettings["tool"]["coverage"]["json"]["output"]) + unittestXMLFile = Path(pyProjectSettings["tool"]["pytest"]["junit_xml"]) + coverageHTMLDirectory = Path(pyProjectSettings["tool"]["coverage"]["html"]["directory"]) + coverageXMLFile = Path(pyProjectSettings["tool"]["coverage"]["xml"]["output"]) + coverageJSONFile= Path(pyProjectSettings["tool"]["coverage"]["json"]["output"]) else: print(f"File '{pyProjectFile}' not found.") print(f"::error title=FileNotFoundError::File '{pyProjectFile}' not found.") @@ -171,9 +193,9 @@ jobs: with coverageRCFile.open("rb") as file: coverageRCSettings = tomli_load(file) - htmlDirectory = Path(coverageRCSettings["html"]["directory"]) - xmlFile = Path(coverageRCSettings["xml"]["output"]) - jsonFile = Path(coverageRCSettings["json"]["output"]) + coverageHTMLDirectory = Path(coverageRCSettings["html"]["directory"]) + coverageXMLFile = Path(coverageRCSettings["xml"]["output"]) + coverageJSONFile = Path(coverageRCSettings["json"]["output"]) else: print(f"File '{coverageRCFile}' not found.") print(f"::error title=FileNotFoundError::File '{coverageRCFile}' not found.") @@ -184,11 +206,16 @@ jobs: print(f"GITHUB_OUTPUT: {github_output}") with github_output.open("a+", encoding="utf-8") as f: f.write(dedent(f"""\ - coverage_report_html_directory={htmlDirectory.as_posix()} - coverage_report_xml_directory={xmlFile.parent.as_posix()} - coverage_report_xml={xmlFile.as_posix()} - coverage_report_json_directory={jsonFile.parent.as_posix()} - coverage_report_json={jsonFile.as_posix()} + unittest_report_xml_directory={unittestXMLFile.parent.as_posix()} + unittest_report_xml_filename={unittestXMLFile.name} + unittest_report_xml={unittestXMLFile.as_posix()} + coverage_report_html_directory={coverageHTMLDirectory.as_posix()} + coverage_report_xml_directory={coverageXMLFile.parent.as_posix()} + coverage_report_xml_filename={coverageXMLFile.name} + coverage_report_xml={coverageXMLFile.as_posix()} + coverage_report_json_directory={coverageJSONFile.parent.as_posix()} + coverage_report_json_filename={coverageJSONFile.name} + coverage_report_json={coverageJSONFile.as_posix()} """)) - print(f"DEBUG:\n html={htmlDirectory}\n xml={xmlFile}\n json={jsonFile}") + print(f"DEBUG:\n unittest xml: {unittestXMLFile}\n coverage html: {coverageHTMLDirectory}\n coverage xml: {coverageXMLFile}\n coverage json: {coverageJSONFile}") diff --git a/.github/workflows/LaTeXDocumentation.yml b/.github/workflows/LaTeXDocumentation.yml index 47d3933..9027bc2 100644 --- a/.github/workflows/LaTeXDocumentation.yml +++ b/.github/workflows/LaTeXDocumentation.yml @@ -55,17 +55,28 @@ jobs: name: ${{ inputs.latex_artifact }} path: latex - - name: Compile LaTeX document - uses: xu-cheng/latex-action@master + - name: Debug + run: | + tree -pash . + + - name: Build LaTeX document using 'pytooling/miktex:sphinx' + uses: addnab/docker-run-action@v3 with: - working_directory: latex - root_file: ${{ inputs.document }}.tex + image: pytooling/miktex:sphinx + options: -v ${{ github.workspace }}/latex:/latex --workdir /latex + run: | + which pdflatex + pwd + ls -lAh + + latexmk -xelatex ${{ inputs.document }}.tex - name: 📤 Upload 'PDF Documentation' artifact uses: pyTooling/upload-artifact@v4 if: inputs.pdf_artifact != '' with: name: ${{ inputs.pdf_artifact }} + working-directory: latex path: ${{ inputs.document }}.pdf if-no-files-found: error retention-days: 1 diff --git a/.github/workflows/NightlyRelease.yml b/.github/workflows/NightlyRelease.yml index d16056e..2ff8726 100644 --- a/.github/workflows/NightlyRelease.yml +++ b/.github/workflows/NightlyRelease.yml @@ -68,6 +68,18 @@ on: description: 'Multi-line string containing artifact:file:title asset descriptions.' required: true type: string + inventory-json: + type: string + required: false + default: '' + inventory-version: + type: string + required: false + default: '' + inventory-categories: + type: string + required: false + default: '' tarball-name: type: string required: false @@ -97,23 +109,23 @@ jobs: run: | set +e - ANSI_LIGHT_RED="\e[91m" - ANSI_LIGHT_GREEN="\e[92m" - ANSI_LIGHT_YELLOW="\e[93m" - ANSI_NOCOLOR="\e[0m" + ANSI_LIGHT_RED=$'\x1b[91m' + ANSI_LIGHT_GREEN=$'\x1b[92m' + ANSI_LIGHT_YELLOW=$'\x1b[93m' + ANSI_NOCOLOR=$'\x1b[0m' export GH_TOKEN=${{ github.token }} - echo -n "Deleting release '${{ inputs.nightly_name }}' ... " + printf "%s" "Deleting release '${{ inputs.nightly_name }}' ... " message="$(gh release delete ${{ inputs.nightly_name }} --yes 2>&1)" if [[ $? -eq 0 ]]; then - echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}" + printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}" elif [[ "${message}" == "release not found" ]]; then - echo -e "${ANSI_LIGHT_YELLOW}[NOT FOUND]${ANSI_NOCOLOR}" + printf "%s\n" "${ANSI_LIGHT_YELLOW}[NOT FOUND]${ANSI_NOCOLOR}" else - echo -e "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}" - echo -e "${ANSI_LIGHT_RED}Couldn't delete release '${{ inputs.nightly_name }}' -> Error: '${message}'.${ANSI_NOCOLOR}" - echo "::error title=InternalError::Couldn't delete release '${{ inputs.nightly_name }}' -> Error: '${message}'." + printf "%s\n" "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}" + printf "%s\n" "${ANSI_LIGHT_RED}Couldn't delete release '${{ inputs.nightly_name }}' -> Error: '${message}'.${ANSI_NOCOLOR}" + printf "%s\n" "::error title=InternalError::Couldn't delete release '${{ inputs.nightly_name }}' -> Error: '${message}'." exit 1 fi @@ -122,19 +134,19 @@ jobs: run: | set +e - ANSI_LIGHT_RED="\e[91m" - ANSI_LIGHT_GREEN="\e[92m" - ANSI_NOCOLOR="\e[0m" + ANSI_LIGHT_RED=$'\x1b[91m' + ANSI_LIGHT_GREEN=$'\x1b[92m' + ANSI_NOCOLOR=$'\x1b[0m' export GH_TOKEN=${{ github.token }} addDraft="--draft" - if ${{ inputs.prerelease }}; then + if [[ "${{ inputs.prerelease }}" == "true" ]]; then addPreRelease="--prerelease" fi - if ! ${{ inputs.latest }}; then + if [[ "${{ inputs.latest }}" == "false" ]]; then addLatest="--latest=false" fi @@ -166,14 +178,14 @@ jobs: Published from [${{ github.workflow }}](https://github.com/Paebbels/ghdl/actions/runs/${{ github.run_id }}) workflow triggered by @${{ github.actor }} on $(date '+%Y-%m-%d %H:%M:%S'). EOF - echo "Creating release '${{ inputs.nightly_name }}' ... " + printf "%s\n" "Creating release '${{ inputs.nightly_name }}' ... " message="$(gh release create "${{ inputs.nightly_name }}" --verify-tag $addDraft $addPreRelease $addLatest "${addTitle[@]}" "${addNotes[@]}" 2>&1)" if [[ $? -eq 0 ]]; then - echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}" + printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}" else - echo -e "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}" - echo -e "${ANSI_LIGHT_RED}Couldn't create release '${{ inputs.nightly_name }}' -> Error: '${message}'.${ANSI_NOCOLOR}" - echo "::error title=InternalError::Couldn't create release '${{ inputs.nightly_name }}' -> Error: '${message}'." + printf "%s\n" "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}" + printf "%s\n" "${ANSI_LIGHT_RED}Couldn't create release '${{ inputs.nightly_name }}' -> Error: '${message}'.${ANSI_NOCOLOR}" + printf "%s\n" "::error title=InternalError::Couldn't create release '${{ inputs.nightly_name }}' -> Error: '${message}'." exit 1 fi @@ -182,10 +194,11 @@ jobs: run: | set +e - ANSI_LIGHT_RED="\e[91m" - ANSI_LIGHT_GREEN="\e[92m" - ANSI_LIGHT_YELLOW="\e[93m" - ANSI_NOCOLOR="\e[0m" + ANSI_LIGHT_RED=$'\x1b[91m' + ANSI_LIGHT_GREEN=$'\x1b[92m' + ANSI_LIGHT_YELLOW=$'\x1b[93m' + ANSI_LIGHT_BLUE="\e[94m" + ANSI_NOCOLOR=$'\x1b[0m' export GH_TOKEN=${{ github.token }} @@ -199,9 +212,42 @@ jobs: replacement="${patternLine#*=}" line="${line//"%$pattern%"/"$replacement"}" done <<<'${{ inputs.replacements }}' - echo "$line" + printf "%s\n" "$line" } + # Create JSON inventory + if [[ "${{ inputs.inventory-json }}" != "" ]]; then + VERSION="1.0" + + # Split categories by ',' into a Bash array. + # See https://stackoverflow.com/a/45201229/3719459 + if [[ "${{ inputs.inventory-categories }}" != "" ]]; then + readarray -td, inventoryCategories <<<"${{ inputs.inventory-categories }}," + unset 'inventoryCategories[-1]' + declare -p inventoryCategories + else + inventoryCategories="" + fi + + jsonInventory=$(jq -c -n \ + --arg version "${VERSION}" \ + --arg date "$(date +"%Y-%m-%dT%H-%M-%S%:z")" \ + --argjson jsonMeta "$(jq -c -n \ + --arg tag "${{ inputs.nightly_name }}" \ + --arg version "${{ inputs.inventory-version }}" \ + --arg hash "${{ github.sha }}" \ + --arg repo "${{ github.server_url }}/${{ github.repository }}" \ + --arg release "${{ github.server_url }}/${{ github.repository }}/releases/download/${{ inputs.nightly_name }}" \ + --argjson categories "$(jq -c -n \ + '$ARGS.positional' \ + --args "${inventoryCategories[@]}" \ + )" \ + '{"tag": $tag, "version": $version, "git-hash": $hash, "repository-url": $repo, "release-url": $release, "categories": $categories}' \ + )" \ + '{"version": 1.0, "timestamp": $date, "meta": $jsonMeta, "files": {}}' + ) + fi + ERRORS=0 # A dictionary of 0/1 to avoid duplicate downloads declare -A downloadedArtifacts @@ -214,184 +260,243 @@ jobs: # split assetLine colon separated triple: artifact:asset:title artifact="${assetLine%%:*}" - remaining="${assetLine#*:}" - asset="${remaining%%:*}" - title="${remaining##*:}" + assetLine="${assetLine#*:}" + asset="${assetLine%%:*}" + assetLine="${assetLine#*:}" + if [[ "${{ inputs.inventory-json }}" == "" ]]; then + categories="" + title="${assetLine##*:}" + else + categories="${assetLine%%:*}" + title="${assetLine##*:}" + fi # remove leading whitespace asset="${asset#"${asset%%[![:space:]]*}"}" + categories="${categories#"${categories%%[![:space:]]*}"}" title="${title#"${title%%[![:space:]]*}"}" # apply replacements asset="$(Replace "${asset}")" title="$(Replace "${title}")" - echo "Publish asset '${asset}' from artifact '${artifact}' with title '${title}'" - echo -n " Checked asset for duplicates ... " + printf "%s\n" "Publish asset '${asset}' from artifact '${artifact}' with title '${title}'" + printf " %s" "Checked asset for duplicates ... " if [[ -n "${assetFilenames[$asset]}" ]]; then - echo -e "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}" - echo "::error title=DuplicateAsset::Asset '${asset}' from artifact '${artifact}' was already uploaded to release '${{ inputs.nightly_name }}'." + printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}" + printf "%s\n" "::error title=DuplicateAsset::Asset '${asset}' from artifact '${artifact}' was already uploaded to release '${{ inputs.nightly_name }}'." ERRORS=1 continue else - echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}" + printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}" assetFilenames[$asset]=1 fi # Download artifact by artifact name if [[ -n "${downloadedArtifacts[$artifact]}" ]]; then - echo -e " downloading '${artifact}' ... ${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}" + printf " %s\n" "downloading '${artifact}' ... ${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}" else echo " downloading '${artifact}' ... " - echo -n " gh run download $GITHUB_RUN_ID --dir \"${artifact}\" --name \"${artifact}\" " + printf " %s" "gh run download $GITHUB_RUN_ID --dir \"${artifact}\" --name \"${artifact}\" " gh run download $GITHUB_RUN_ID --dir "${artifact}" --name "${artifact}" if [[ $? -eq 0 ]]; then - echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}" + printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}" else - echo -e "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}" - echo -e "${ANSI_LIGHT_RED}Couldn't download artifact '${artifact}'.${ANSI_NOCOLOR}" - echo "::error title=ArtifactNotFound::Couldn't download artifact '${artifact}'." + printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}" + printf "%s\n" "${ANSI_LIGHT_RED}Couldn't download artifact '${artifact}'.${ANSI_NOCOLOR}" + printf "%s\n" "::error title=ArtifactNotFound::Couldn't download artifact '${artifact}'." ERRORS=1 continue fi downloadedArtifacts[$artifact]=1 - echo -n " Checking for embedded tarball ... " + printf " %s" "Checking for embedded tarball ... " if [[ -f "${artifact}/${{ inputs.tarball-name }}" ]]; then - echo -e "${ANSI_LIGHT_GREEN}[FOUND]${ANSI_NOCOLOR}" + printf "%s\n" "${ANSI_LIGHT_GREEN}[FOUND]${ANSI_NOCOLOR}" pushd "${artifact}" > /dev/null - echo -n " Extracting embedded tarball ... " + printf " %s" "Extracting embedded tarball ... " tar -xf "${{ inputs.tarball-name }}" if [[ $? -ne 0 ]]; then - echo -e "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}" + printf "%s\n" "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}" else - echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}" + printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}" fi - echo -n " Removing temporary tarball ... " + printf " %s" "Removing temporary tarball ... " rm -f "${{ inputs.tarball-name }}" if [[ $? -ne 0 ]]; then - echo -e "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}" + printf "%s\n" "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}" else - echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}" + printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}" fi popd > /dev/null else - echo -e "${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}" + printf "%s\n" "${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}" fi fi # Check if artifact should be compressed (zip, tgz) or if asset was part of the downloaded artifact. - echo -n " checking asset '${artifact}/${asset}' ... " + printf " %s" "checking asset '${artifact}/${asset}' ... " if [[ "${asset}" == !*.zip ]]; then - echo -e "${ANSI_LIGHT_GREEN}[ZIP]${ANSI_NOCOLOR}" + printf "%s\n" "${ANSI_LIGHT_GREEN}[ZIP]${ANSI_NOCOLOR}" asset="${asset##*!}" - echo " Compressing artifact '${artifact}' to '${asset}' ..." + printf " %s\n" "Compressing artifact '${artifact}' to '${asset}' ..." ( cd "${artifact}" && \ zip -r "../${asset}" * ) if [[ $? -eq 0 ]]; then - echo -e " Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}" + printf " %s\n" "Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}" uploadFile="${asset}" else - echo -e " Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}" - echo -e "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zip file '${asset}'.${ANSI_NOCOLOR}" - echo "::error title=CompressionError::Couldn't compress '${artifact}' to zip file '${asset}'." + printf " %s\n" "Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}" + printf "%s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zip file '${asset}'.${ANSI_NOCOLOR}" + printf "%s\n" "::error title=CompressionError::Couldn't compress '${artifact}' to zip file '${asset}'." ERRORS=1 continue fi elif [[ "${asset}" == !*.tgz || "${asset}" == !*.tar.gz || "${asset}" == \$*.tgz || "${asset}" == \$*.tar.gz ]]; then - echo -e "${ANSI_LIGHT_GREEN}[TAR/GZ]${ANSI_NOCOLOR}" + printf "%s\n" "${ANSI_LIGHT_GREEN}[TAR/GZ]${ANSI_NOCOLOR}" if [[ "${asset:0:1}" == "\$" ]]; then asset="${asset##*$}" dirName="${asset%.*}" - echo " Compressing artifact '${artifact}' to '${asset}' ..." - tar -c --gzip --file="${asset}" --directory="${artifact}" --transform "s|^\.|${dirName%.tar}|" . + printf " %s\n" "Compressing artifact '${artifact}' to '${asset}' ..." + tar -c --gzip --owner=0 --group=0 --file="${asset}" --directory="${artifact}" --transform "s|^\.|${dirName%.tar}|" . retCode=$? else asset="${asset##*!}" - echo " Compressing artifact '${artifact}' to '${asset}' ..." + printf " %s\n" "Compressing artifact '${artifact}' to '${asset}' ..." ( cd "${artifact}" && \ - tar -c --gzip --file="../${asset}" * + tar -c --gzip --owner=0 --group=0 --file="../${asset}" * ) retCode=$? fi if [[ $retCode -eq 0 ]]; then - echo -e " Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}" + printf " %s\n" "Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}" uploadFile="${asset}" else - echo -e " Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}" - echo -e "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to tgz file '${asset}'.${ANSI_NOCOLOR}" - echo "::error title=CompressionError::Couldn't compress '${artifact}' to tgz file '${asset}'." + printf " %s\n" "Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}" + printf "%s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to tgz file '${asset}'.${ANSI_NOCOLOR}" + printf "%s\n" "::error title=CompressionError::Couldn't compress '${artifact}' to tgz file '${asset}'." ERRORS=1 continue fi elif [[ "${asset}" == !*.tzst || "${asset}" == !*.tar.zst || "${asset}" == \$*.tzst || "${asset}" == \$*.tar.zst ]]; then - echo -e "${ANSI_LIGHT_GREEN}[ZST]${ANSI_NOCOLOR}" + printf "%s\n" "${ANSI_LIGHT_GREEN}[ZST]${ANSI_NOCOLOR}" if [[ "${asset:0:1}" == "\$" ]]; then asset="${asset##*$}" dirName="${asset%.*}" - echo " Compressing artifact '${artifact}' to '${asset}' ..." - tar -c --zstd --file="${asset}" --directory="${artifact}" --transform "s|^\.|${dirName%.tar}|" . + printf " %s\n" "Compressing artifact '${artifact}' to '${asset}' ..." + tar -c --zstd --owner=0 --group=0 --file="${asset}" --directory="${artifact}" --transform "s|^\.|${dirName%.tar}|" . retCode=$? else asset="${asset##*!}" - echo " Compressing artifact '${artifact}' to '${asset}' ..." + printf " %s\n" "Compressing artifact '${artifact}' to '${asset}' ..." ( cd "${artifact}" && \ - tar -c --zstd --file="../${asset}" * + tar -c --zstd --owner=0 --group=0 --file="../${asset}" * ) retCode=$? fi if [[ $retCode -eq 0 ]]; then - echo -e " Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}" + printf " %s\n" "Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}" uploadFile="${asset}" else - echo -e " Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}" - echo -e "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zst file '${asset}'.${ANSI_NOCOLOR}" - echo "::error title=CompressionError::Couldn't compress '${artifact}' to zst file '${asset}'." + printf " %s\n" "Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}" + printf "%s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zst file '${asset}'.${ANSI_NOCOLOR}" + printf "%s\n" "::error title=CompressionError::Couldn't compress '${artifact}' to zst file '${asset}'." ERRORS=1 continue fi elif [[ -e "${artifact}/${asset}" ]]; then - echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}" + printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}" uploadFile="${artifact}/${asset}" else - echo -e "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}" - echo -e "${ANSI_LIGHT_RED}Couldn't find asset '${asset}' in artifact '${artifact}'.${ANSI_NOCOLOR}" - echo "::error title=FileNotFound::Couldn't find asset '${asset}' in artifact '${artifact}'." + printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}" + printf "%s\n" "${ANSI_LIGHT_RED}Couldn't find asset '${asset}' in artifact '${artifact}'.${ANSI_NOCOLOR}" + printf "%s\n" "::error title=FileNotFound::Couldn't find asset '${asset}' in artifact '${artifact}'." ERRORS=1 continue fi + # Add asset to JSON inventory + if [[ "${{ inputs.inventory-json }}" != "" ]]; then + if [[ "${categories}" != "${title}" ]]; then + printf " %s\n" "adding file '${uploadFile}' with '${categories//;/ → }' to JSON inventory ..." + category="" + jsonEntry=$(jq -c -n \ + --arg title "${title}" \ + --arg file "${uploadFile}" \ + '{"file": $file, "title": $title}' \ + ) + + while [[ "${categories}" != "${category}" ]]; do + category="${categories##*;}" + categories="${categories%;*}" + jsonEntry=$(jq -c -n --arg cat "${category}" --argjson value "${jsonEntry}" '{$cat: $value}') + done + + jsonInventory=$(jq -c -n \ + --argjson inventory "${jsonInventory}" \ + --argjson file "${jsonEntry}" \ + '$inventory * {"files": $file}' \ + ) + else + printf " %s\n" "adding file '${uploadFile}' to JSON inventory ... ${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}" + fi + fi + # Upload asset to existing release page - echo -n " uploading asset '${asset}' from '${uploadFile}' with title '${title}' ... " + printf " %s" "uploading asset '${asset}' from '${uploadFile}' with title '${title}' ... " gh release upload ${{ inputs.nightly_name }} "${uploadFile}#${title}" --clobber if [[ $? -eq 0 ]]; then - echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}" + printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}" else - echo -e "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}" - echo -e "${ANSI_LIGHT_RED}Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}" - echo "::error title=UploadError::Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'." + printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}" + printf "%s\n" "${ANSI_LIGHT_RED}Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}" + printf "%s\n" "::error title=UploadError::Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'." ERRORS=1 continue fi done <<<'${{ inputs.assets }}' - echo "Inspecting downloaded artifacts ..." - tree -L 3 . + if [[ "${{ inputs.inventory-json }}" != "" ]]; then + inventoryTitle="Release Inventory (JSON)" + + printf "%s\n" "Publish asset '${{ inputs.inventory-json }}' with title '${inventoryTitle}'" + printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Writing JSON inventory to '${{ inputs.inventory-json }}' ...." + printf "%s\n" "$(jq -n --argjson inventory "${jsonInventory}" '$inventory')" > "${{ inputs.inventory-json }}" + cat "${{ inputs.inventory-json }}" + printf "::endgroup::\n" + + # Upload inventory asset to existing release page + printf " %s" "uploading asset '${{ inputs.inventory-json }}' title '${inventoryTitle}' ... " + gh release upload ${{ inputs.nightly_name }} "${{ inputs.inventory-json }}#${inventoryTitle}" --clobber + if [[ $? -eq 0 ]]; then + printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}" + else + printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}" + printf "%s\n" "${ANSI_LIGHT_RED}Couldn't upload asset '${{ inputs.inventory-json }}' to release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}" + printf "%s\n" "::error title=UploadError::Couldn't upload asset '${{ inputs.inventory-json }}' to release '${{ inputs.nightly_name }}'." + ERRORS=1 + continue + fi + fi + + printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Inspecting downloaded artifacts ..." + tree -pash -L 3 . + printf "::endgroup::\n" if [[ $ERROR -ne 0 ]]; then - echo -e "${ANSI_LIGHT_RED}Errors detected in previous steps.${ANSI_NOCOLOR}" + printf "%s\n" "${ANSI_LIGHT_RED}Errors detected in previous steps.${ANSI_NOCOLOR}" exit 1 fi @@ -400,19 +505,19 @@ jobs: run: | set +e - ANSI_LIGHT_RED="\e[91m" - ANSI_LIGHT_GREEN="\e[92m" - ANSI_NOCOLOR="\e[0m" + ANSI_LIGHT_RED=$'\x1b[91m' + ANSI_LIGHT_GREEN=$'\x1b[92m' + ANSI_NOCOLOR=$'\x1b[0m' export GH_TOKEN=${{ github.token }} # Remove draft-state from release page - echo -n "Remove draft-state from release '${title}' ... " + printf "%s" "Remove draft-state from release '${title}' ... " gh release edit --draft=false "${{ inputs.nightly_name }}" if [[ $? -eq 0 ]]; then - echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}" + printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}" else - echo -e "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}" - echo -e "${ANSI_LIGHT_RED}Couldn't remove draft-state from release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}" - echo "::error title=ReleasePage::Couldn't remove draft-state from release '${{ inputs.nightly_name }}'." + printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}" + printf "%s\n" "${ANSI_LIGHT_RED}Couldn't remove draft-state from release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}" + printf "%s\n" "::error title=ReleasePage::Couldn't remove draft-state from release '${{ inputs.nightly_name }}'." fi diff --git a/.github/workflows/Parameters.yml b/.github/workflows/Parameters.yml index 82cb950..179a726 100644 --- a/.github/workflows/Parameters.yml +++ b/.github/workflows/Parameters.yml @@ -147,7 +147,7 @@ jobs: else: name = f"{package_namespace}.{package_name}" - currentMSYS2Version = "3.11" + currentMSYS2Version = "3.12" currentAlphaVersion = "3.14" currentAlphaRelease = "3.14.0-alpha.1" @@ -337,7 +337,7 @@ jobs: - name: Verify out parameters id: verify run: | - echo 'python_version: ${{ steps.params.outputs.python_version }}' - echo 'python_jobs: ${{ steps.params.outputs.python_jobs }}' - echo 'artifact_names: ${{ steps.params.outputs.artifact_names }}' - echo 'params: ${{ steps.params.outputs.params }}' + printf "python_version: %s\n" '${{ steps.params.outputs.python_version }}' + printf "python_jobs: %s\n" '${{ steps.params.outputs.python_jobs }}' + printf "artifact_names: %s\n" '${{ steps.params.outputs.artifact_names }}' + printf "params: %s\n" '${{ steps.params.outputs.params }}' diff --git a/.github/workflows/PublishCoverageResults.yml b/.github/workflows/PublishCoverageResults.yml index 03b0cc3..efb2042 100644 --- a/.github/workflows/PublishCoverageResults.yml +++ b/.github/workflows/PublishCoverageResults.yml @@ -31,7 +31,7 @@ on: type: string coverage_artifacts_pattern: required: false - default: '*-CodeCoverage-*' + default: '*-CodeCoverage-SQLite-*' type: string coverage_config: description: 'Path to the .coveragerc file. Use pyproject.toml by default.' @@ -84,7 +84,7 @@ jobs: - name: 🔎 Inspect extracted artifact (tarball) run: | - tree -psh artifacts + tree -pash artifacts - name: 🔧 Install coverage and tomli run: | @@ -150,13 +150,11 @@ jobs: print(f"DEBUG:\n html={htmlDirectory}\n xml={xmlFile}\n json={jsonFile}") - - name: Rename .coverage files and collect them all to coverage/ + - name: Rename .coverage files and move them all into 'coverage/' run: | - ls -lAh artifacts/ - ls -lAh artifacts/*/.coverage mkdir -p coverage find artifacts/ -type f -path "*SQLite*.coverage" -exec sh -c 'cp -v $0 "coverage/$(basename $0).$(basename $(dirname $0))"' {} ';' - tree -a coverage + tree -pash coverage - name: Combine SQLite files (using Coverage.py) run: coverage combine --data-file=.coverage coverage/ @@ -177,7 +175,7 @@ jobs: run: | coverage html --data-file=.coverage -d report/coverage/html rm report/coverage/html/.gitignore - tree -a report/coverage/html + tree -pash report/coverage/html - name: 📤 Upload 'Coverage SQLite Database' artifact if: inputs.coverage_sqlite_artifact != '' diff --git a/.github/workflows/PublishTestResults.yml b/.github/workflows/PublishTestResults.yml index 91fca35..cf97de0 100644 --- a/.github/workflows/PublishTestResults.yml +++ b/.github/workflows/PublishTestResults.yml @@ -32,7 +32,7 @@ on: type: string unittest_artifacts_pattern: required: false - default: '*-UnitTestReportSummary-*' + default: '*-UnitTestReportSummary-XML-*' type: string merged_junit_artifact: description: 'Name of the merged JUnit Test Summary artifact.' @@ -73,23 +73,22 @@ jobs: - name: 🔎 Inspect extracted artifact (tarball) run: | - tree -psh artifacts + tree -pash artifacts - name: 🔧 Install pyEDAA.Reports (JUunit Parser and Merger) run: | python -m pip install --disable-pip-version-check --break-system-packages -U pyEDAA.Reports - - name: Move JUnit files and collect them all to junit/ + - name: Rename JUnit files and move them all into 'junit/' run: | mkdir -p junit - ls -lAh artifacts/*/*.xml find artifacts/ -type f -path "*TestReportSummary*.xml" -exec sh -c 'cp -v $0 "junit/$(basename $(dirname $0)).$(basename $0)"' {} ';' - tree -a junit + tree -pash junit - name: 🔁 Merge JUnit Unit Test Summaries run: | pyedaa-reports -v unittest "--merge=pyTest-JUnit:junit/*.xml" ${{ inputs.additional_merge_args }} "--output=pyTest-JUnit:Unittesting.xml" - echo "cat Unittesting.xml" + printf "%s\n" "cat Unittesting.xml" cat Unittesting.xml - name: 📊 Publish Unit Test Results diff --git a/.github/workflows/PublishToGitHubPages.yml b/.github/workflows/PublishToGitHubPages.yml index ef53cf2..fec7b73 100644 --- a/.github/workflows/PublishToGitHubPages.yml +++ b/.github/workflows/PublishToGitHubPages.yml @@ -55,7 +55,7 @@ jobs: - name: ⏬ Checkout repository uses: actions/checkout@v4 - - name: 📥 Download artifacts '${{ inputs.doc }}' from 'BuildTheDocs' job + - name: 📥 Download artifacts '${{ inputs.doc }}' from 'SphinxDocumentation' job uses: pyTooling/download-artifact@v4 with: name: ${{ inputs.doc }} diff --git a/.github/workflows/SphinxDocumentation.yml b/.github/workflows/SphinxDocumentation.yml index 8a6e730..d6fe396 100644 --- a/.github/workflows/SphinxDocumentation.yml +++ b/.github/workflows/SphinxDocumentation.yml @@ -180,6 +180,88 @@ jobs: sphinx-build -v -n -b latex -d _build/doctrees -j $(nproc) -w _build/latex.log . _build/latex # --builder html --doctree-dir _build/doctrees --verbose --fresh-env --write-all --nitpicky --warning-file _build/html.log . _build/html + - name: Workaround I - https://github.com/sphinx-doc/sphinx/issues/13190 + if: inputs.latex_artifact != '' + run: | + printf "Changing directory to 'doc/_build/latex' ...\n" + cd doc/_build/latex + + MIMETYPE_EXTENSIONS=( + "image/png:png" + "image/jpeg:jpg" + "image/svg+xml:svg" + ) + + printf "Changing file extension according to MIME type ...\n" + while IFS=$'\n' read -r file; do + printf " Checking '%s' ... " "${file}" + mime="$(file --mime-type -b "${file}")" + printf "[%s]\n" "${mime}" + + found=0 + for MIME in "${MIMETYPE_EXTENSIONS[@]}"; do + mimetype="${MIME%%:*}" + extension="${MIME#*:}" + + if [[ "${mime}" == "${mimetype}" && "${file##*.}" != "${extension}" ]]; then + printf " Rename file to '%s' " "${file}.${extension}" + mv "${file}" "${file}.${extension}" + if [[ $? -eq 0 ]]; then + printf "[OK]\n" + else + printf "[FAILED]\n" + fi + + printf " Patching LaTeX file for '%s' " "${file}" + sed -i "s:{{${file%.*}}\.${file##*.}}:{{${file}}.${extension}}:g" *.tex + if [[ $? -eq 0 ]]; then + printf "[OK]\n" + else + printf "[FAILED]\n" + fi + + found=1 + break + fi + done + if [[ $found -eq 0 ]]; then + printf "[SKIPPED]\n" + fi + done <<<$(find . -type f -not -iname "*.cls" -not -iname "*.sty" -not -iname "*.xdy" -not -iname "*.svg" -not -iname "*.png" -not -iname "*.jpg" | sed 's:./::') + + - name: Workaround II - https://github.com/sphinx-doc/sphinx/issues/13189 + if: inputs.latex_artifact != '' + run: | + printf "Changing directory to 'doc/_build/latex' ...\n" + cd doc/_build/latex + + printf "Searching for downloaded images, that need normalization ...\n" + for imageExt in png svg jpg jpeg; do + printf " Processing '%s' ...\n" "${imageExt}" + while IFS=$'\n' read -r imageFile; do + newFile="${imageFile//%/_}"; + + printf " %s\n" "$imageFile"; + if [[ "${imageFile}" != "${newFile}" ]]; then + printf " Rename file to '%s' " "${newFile}" + mv "${imageFile}" "${newFile}" + if [[ $? -eq 0 ]]; then + printf "[OK]\n" + else + printf "[FAILED]\n" + fi + + printf " Patching LaTeX file for '%s' " "${newFile}" + sed -i "s:{{${imageFile%.*}}\.${imageFile##*.}}:{{${newFile%.*}}.${newFile##*.}}:g" *.tex + if [[ $? -eq 0 ]]; then + printf "[OK]\n" + else + printf "[FAILED]\n" + fi + fi + done <<<$(find . -type f -iname "*.$imageExt" | sed 's:./::') + done + - name: 📤 Upload 'LaTeX Documentation' artifact if: inputs.latex_artifact != '' continue-on-error: true diff --git a/.github/workflows/TestReleaser.yml b/.github/workflows/TestReleaser.yml index 8b7056a..b874555 100644 --- a/.github/workflows/TestReleaser.yml +++ b/.github/workflows/TestReleaser.yml @@ -64,7 +64,7 @@ jobs: steps: - uses: actions/checkout@v4 - - run: echo "Build some tool and generate some (versioned) artifacts" > artifact-$(date -u +"%Y-%m-%dT%H-%M-%SZ").txt + - run: printf "%s\n" "Build some tool and generate some (versioned) artifacts" > artifact-$(date -u +"%Y-%m-%dT%H-%M-%SZ").txt - name: Single uses: ./releaser/composite @@ -84,7 +84,7 @@ jobs: - name: Add artifacts/*.txt run: | mkdir artifacts - echo "Build some tool and generate some artifacts" > artifacts/artifact.txt + printf "%s\n" "Build some tool and generate some artifacts" > artifacts/artifact.txt touch artifacts/empty_file.txt - name: Single in subdir @@ -95,8 +95,8 @@ jobs: - name: Add artifacts/*.md run: | - echo "releaser hello" > artifacts/hello.md - echo "releaser world" > artifacts/world.md + printf "%s\n" "releaser hello" > artifacts/hello.md + printf "%s\n" "releaser world" > artifacts/world.md - name: Directory wildcard uses: ./releaser/composite @@ -107,7 +107,7 @@ jobs: - name: Add artifacts/subdir run: | mkdir artifacts/subdir - echo "Test recursive glob" > artifacts/subdir/deep_file.txt + printf "%s\n" "Test recursive glob" > artifacts/subdir/deep_file.txt - name: Directory wildcard (recursive) uses: ./releaser/composite @@ -124,7 +124,7 @@ jobs: steps: - uses: actions/checkout@v4 - - run: echo "Build some tool and generate some (versioned) artifacts" > artifact-$(date -u +"%Y-%m-%dT%H-%M-%SZ").txt + - run: printf "%s\n" "Build some tool and generate some (versioned) artifacts" > artifact-$(date -u +"%Y-%m-%dT%H-%M-%SZ").txt - name: Single uses: ./releaser @@ -144,7 +144,7 @@ jobs: - name: Add artifacts/*.txt run: | mkdir artifacts - echo "Build some tool and generate some artifacts" > artifacts/artifact.txt + printf "%s\n" "Build some tool and generate some artifacts" > artifacts/artifact.txt touch artifacts/empty_file.txt - name: Single in subdir @@ -155,8 +155,8 @@ jobs: - name: Add artifacts/*.md run: | - echo "releaser hello" > artifacts/hello.md - echo "releaser world" > artifacts/world.md + printf "%s\n" "releaser hello" > artifacts/hello.md + printf "%s\n" "releaser world" > artifacts/world.md - name: Directory wildcard uses: ./releaser @@ -167,7 +167,7 @@ jobs: - name: Add artifacts/subdir run: | mkdir artifacts/subdir - echo "Test recursive glob" > artifacts/subdir/deep_file.txt + printf "%s\n" "Test recursive glob" > artifacts/subdir/deep_file.txt - name: Directory wildcard (recursive) uses: ./releaser diff --git a/.github/workflows/UnitTesting.yml b/.github/workflows/UnitTesting.yml index 7b32f23..c05c6f5 100644 --- a/.github/workflows/UnitTesting.yml +++ b/.github/workflows/UnitTesting.yml @@ -94,11 +94,26 @@ on: required: false default: 'unit' type: string + unittest_report_xml_directory: + description: 'Path where to save the unittest summary report XML.' + required: false + default: 'report/unit' + type: string + unittest_report_xml_filename: + description: 'Filename of the unittest summary report XML.' + required: false + default: 'TestReportSummary.xml' + type: string coverage_config: description: 'Path to the .coveragerc file. Use pyproject.toml by default.' required: false default: 'pyproject.toml' type: string + coverage_report_html_directory: + description: '' + required: false + default: 'report/coverage/html' + type: string unittest_xml_artifact: description: "Generate unit test report with junitxml and upload results as an artifact." required: false @@ -323,69 +338,6 @@ jobs: if: matrix.system == 'msys2' && matrix.runtime == 'UCRT64' && inputs.ucrt64_before_script != '' run: ${{ inputs.ucrt64_before_script }} -# Read pyproject.toml - - - name: 🔁 Extract configurations from pyproject.toml - id: getVariables - shell: python - run: | - from os import getenv - from pathlib import Path - from sys import version - from textwrap import dedent - - print(f"Python: {version}") - - from tomli import load as tomli_load - - htmlDirectory = Path("htmlcov") - xmlFile = Path("./coverage.xml") - jsonFile = Path("./coverage.json") - coverageRC = "${{ inputs.coverage_config }}".strip() - - # Read output paths from 'pyproject.toml' file - if coverageRC == "pyproject.toml": - pyProjectFile = Path("pyproject.toml") - if pyProjectFile.exists(): - with pyProjectFile.open("rb") as file: - pyProjectSettings = tomli_load(file) - - htmlDirectory = Path(pyProjectSettings["tool"]["coverage"]["html"]["directory"]) - xmlFile = Path(pyProjectSettings["tool"]["coverage"]["xml"]["output"]) - jsonFile = Path(pyProjectSettings["tool"]["coverage"]["json"]["output"]) - else: - print(f"File '{pyProjectFile}' not found.") - print(f"::error title=FileNotFoundError::File '{pyProjectFile}' not found.") - exit(1) - - # Read output paths from '.coveragerc' file - elif len(coverageRC) > 0: - coverageRCFile = Path(coverageRC) - if coverageRCFile.exists(): - with coverageRCFile.open("rb") as file: - coverageRCSettings = tomli_load(file) - - htmlDirectory = Path(coverageRCSettings["html"]["directory"]) - xmlFile = Path(coverageRCSettings["xml"]["output"]) - jsonFile = Path(coverageRCSettings["json"]["output"]) - else: - print(f"File '{coverageRCFile}' not found.") - print(f"::error title=FileNotFoundError::File '{coverageRCFile}' not found.") - exit(1) - - # Write jobs to special file - github_output = Path(getenv("GITHUB_OUTPUT")) - print(f"GITHUB_OUTPUT: {github_output}") - with github_output.open("a+", encoding="utf-8") as f: - f.write(dedent(f"""\ - unittest_report_html_directory={htmlDirectory} - coverage_report_html_directory={htmlDirectory.as_posix()} - coverage_report_xml={xmlFile} - coverage_report_json={jsonFile} - """)) - - print(f"DEBUG:\n html={htmlDirectory}\n xml={xmlFile}\n json={jsonFile}") - # Run pytests - name: ✅ Run unit tests (Ubuntu/macOS) @@ -395,12 +347,12 @@ jobs: export PYTHONPATH=$(pwd) cd "${{ inputs.root_directory || '.' }}" - [ -n '${{ inputs.unittest_xml_artifact }}' ] && PYTEST_ARGS='--junitxml=report/unit/TestReportSummary.xml' || unset PYTEST_ARGS + [ -n '${{ inputs.unittest_xml_artifact }}' ] && PYTEST_ARGS='--junitxml=${{ inputs.unittest_report_xml_directory }}/${{ inputs.unittest_report_xml_filename }}' || unset PYTEST_ARGS if [ -n '${{ inputs.coverage_config }}' ]; then - echo "coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }}" + printf "%s\n" "coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }}" coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }} else - echo "python -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }}" + printf "%s\n" "python -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }}" python -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }} fi @@ -411,7 +363,7 @@ jobs: $env:PYTHONPATH = (Get-Location).ToString() cd "${{ inputs.root_directory || '.' }}" - $PYTEST_ARGS = if ("${{ inputs.unittest_xml_artifact }}") { "--junitxml=report/unit/TestReportSummary.xml" } else { "" } + $PYTEST_ARGS = if ("${{ inputs.unittest_xml_artifact }}") { "--junitxml=${{ inputs.unittest_report_xml_directory }}/${{ inputs.unittest_report_xml_filename }}" } else { "" } if ("${{ inputs.coverage_config }}") { Write-Host "coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }}" coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }} @@ -434,19 +386,19 @@ jobs: if: inputs.coverage_html_artifact != '' continue-on-error: true run: | - coverage html --data-file=.coverage -d ${{ steps.getVariables.outputs.coverage_report_html_directory }} - rm ${{ steps.getVariables.outputs.coverage_report_html_directory }}/.gitignore + coverage html --data-file=.coverage -d ${{ inputs.coverage_report_html_directory }} + rm ${{ inputs.coverage_report_html_directory }}/.gitignore # Upload artifacts - - name: 📤 Upload 'TestReportSummary.xml' artifact + - name: 📤 Upload '${{ inputs.unittest_report_xml_filename }}' artifact if: inputs.unittest_xml_artifact != '' continue-on-error: true uses: pyTooling/upload-artifact@v4 with: name: ${{ inputs.unittest_xml_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }} - working-directory: report/unit - path: TestReportSummary.xml + working-directory: ${{ inputs.unittest_report_xml_directory }} + path: ${{ inputs.unittest_report_xml_filename }} if-no-files-found: error retention-days: 1 diff --git a/.github/workflows/_Checking_ArtifactCleanup.yml b/.github/workflows/_Checking_ArtifactCleanup.yml index 4b0b584..4614637 100644 --- a/.github/workflows/_Checking_ArtifactCleanup.yml +++ b/.github/workflows/_Checking_ArtifactCleanup.yml @@ -6,7 +6,7 @@ on: jobs: Params: - uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2 + uses: pyTooling/Actions/.github/workflows/Parameters.yml@r4 with: name: Example python_version_list: "3.12 3.13" @@ -22,7 +22,7 @@ jobs: include: ${{ fromJson(needs.Params.outputs.python_jobs) }} steps: - name: Content creation for ${{ matrix.system }}-${{ matrix.python }} - run: echo "${{ matrix.runs-on }}-${{ matrix.python }}" >> artifact.txt + run: printf "%s\n" "${{ matrix.runs-on }}-${{ matrix.python }}" >> artifact.txt - name: 📤 Upload artifact for ${{ matrix.system }}-${{ matrix.python }} uses: pyTooling/upload-artifact@v4 @@ -39,7 +39,7 @@ jobs: runs-on: ubuntu-24.04 steps: - name: Package creation - run: echo "Package" >> package.txt + run: printf "%s\n" "Package" >> package.txt - name: 📤 Upload artifact for ${{ matrix.system }}-${{ matrix.python }} uses: pyTooling/upload-artifact@v4 @@ -50,7 +50,7 @@ jobs: retention-days: 1 ArtifactCleanUp: - uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r2 + uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r4 needs: - Params - Testing diff --git a/.github/workflows/_Checking_JobTemplates.yml b/.github/workflows/_Checking_JobTemplates.yml index 39e63e3..4d5524c 100644 --- a/.github/workflows/_Checking_JobTemplates.yml +++ b/.github/workflows/_Checking_JobTemplates.yml @@ -6,64 +6,62 @@ on: jobs: ConfigParams: - uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r3 + uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r4 with: package_name: pyDummy UnitTestingParams: - uses: pyTooling/Actions/.github/workflows/Parameters.yml@r3 + uses: pyTooling/Actions/.github/workflows/Parameters.yml@r4 with: name: pyDummy python_version_list: "3.9 3.10 3.11 3.12 3.13 pypy-3.9 pypy-3.10" # disable_list: "windows:pypy-3.10" PlatformTestingParams: - uses: pyTooling/Actions/.github/workflows/Parameters.yml@r3 + uses: pyTooling/Actions/.github/workflows/Parameters.yml@r4 with: name: Platform python_version_list: "" - system_list: "ubuntu windows macos mingw32 mingw64 clang64 ucrt64" + system_list: "ubuntu windows macos mingw64 clang64 ucrt64" UnitTesting: - uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r3 + uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r4 needs: + - ConfigParams - UnitTestingParams with: jobs: ${{ needs.UnitTestingParams.outputs.python_jobs }} - unittest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }} - unittest_html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_html }} -# coverage_sqlite_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }} + unittest_report_xml_directory: ${{ needs.ConfigParams.outputs.unittest_report_xml_directory }} + unittest_report_xml_filename: ${{ needs.ConfigParams.outputs.unittest_report_xml_filename }} + coverage_report_html_directory: ${{ needs.ConfigParams.outputs.coverage_report_html_directory }} + unittest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }} + unittest_html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_html }} + coverage_sqlite_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }} # coverage_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }} # coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }} # coverage_html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }} PlatformTesting: - uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r3 + uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r4 needs: + - ConfigParams - PlatformTestingParams with: jobs: ${{ needs.PlatformTestingParams.outputs.python_jobs }} # tests_directory: "" unittest_directory: platform - unittest_xml_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).unittesting_xml }} - unittest_html_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).unittesting_html }} - coverage_sqlite_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_sqlite }} - coverage_xml_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_xml }} - coverage_json_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_json }} - coverage_html_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_html }} - -# Coverage: -# uses: pyTooling/Actions/.github/workflows/CoverageCollection.yml@r3 -# needs: -# - UnitTestingParams -# with: -# python_version: ${{ needs.UnitTestingParams.outputs.python_version }} -# artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }} -# secrets: -# codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }} + unittest_report_xml_directory: ${{ needs.ConfigParams.outputs.unittest_report_xml_directory }} + unittest_report_xml_filename: ${{ needs.ConfigParams.outputs.unittest_report_xml_filename }} + coverage_report_html_directory: ${{ needs.ConfigParams.outputs.coverage_report_html_directory }} + unittest_xml_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).unittesting_xml }} + unittest_html_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).unittesting_html }} + coverage_sqlite_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_sqlite }} + coverage_xml_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_xml }} + coverage_json_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_json }} + coverage_html_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_html }} StaticTypeCheck: - uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r3 + uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r4 needs: - ConfigParams - UnitTestingParams @@ -76,7 +74,7 @@ jobs: html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }} DocCoverage: - uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@r1 + uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@r4 needs: - ConfigParams - UnitTestingParams @@ -86,48 +84,48 @@ jobs: # fail_below: 70 Package: - uses: pyTooling/Actions/.github/workflows/Package.yml@r3 + uses: pyTooling/Actions/.github/workflows/Package.yml@r4 needs: - UnitTestingParams - UnitTesting -# - Coverage - PlatformTesting with: python_version: ${{ needs.UnitTestingParams.outputs.python_version }} artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }} PublishCoverageResults: - uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@r3 + uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@r4 needs: - UnitTestingParams - UnitTesting - PlatformTesting -# - Coverage with: coverage_sqlite_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }} - coverage_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }} - coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }} - coverage_html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }} + coverage_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }} + coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }} + coverage_html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }} secrets: codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }} PublishTestResults: - uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@r3 + uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@r4 needs: + - UnitTestingParams - UnitTesting - PlatformTesting with: additional_merge_args: '-d "--pytest=rewrite-dunder-init;reduce-depth:pytest.tests.unit;reduce-depth:pytest.tests.platform"' + merged_junit_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }} # VerifyDocs: -# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@r3 +# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@r4 # needs: # - UnitTestingParams # with: # python_version: ${{ needs.UnitTestingParams.outputs.python_version }} Documentation: - uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@r3 + uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@r4 needs: - ConfigParams - UnitTestingParams @@ -137,13 +135,13 @@ jobs: with: python_version: ${{ needs.UnitTestingParams.outputs.python_version }} coverage_report_json_directory: ${{ needs.ConfigParams.outputs.coverage_report_json_directory }} -# unittest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }} -# coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }} - html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_html }} + unittest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }} + coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }} + html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_html }} latex_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }} IntermediateCleanUp: - uses: pyTooling/Actions/.github/workflows/IntermediateCleanUp.yml@r1 + uses: pyTooling/Actions/.github/workflows/IntermediateCleanUp.yml@r4 needs: - UnitTestingParams - PublishCoverageResults @@ -151,25 +149,24 @@ jobs: - Documentation with: sqlite_coverage_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}- - xml_unittest_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}- + xml_unittest_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}- PDFDocumentation: - uses: pyTooling/Actions/.github/workflows/LaTeXDocumentation.yml@r3 + uses: pyTooling/Actions/.github/workflows/LaTeXDocumentation.yml@r4 needs: - UnitTestingParams - Documentation with: - document: actions + document: Actions latex_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }} pdf_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_pdf }} PublishToGitHubPages: - uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r3 + uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r4 needs: - UnitTestingParams - Documentation -# - PDFDocumentation -# - Coverage + - PDFDocumentation - PublishCoverageResults - StaticTypeCheck with: @@ -178,18 +175,17 @@ jobs: typing: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }} ReleasePage: - uses: pyTooling/Actions/.github/workflows/Release.yml@r3 + uses: pyTooling/Actions/.github/workflows/Release.yml@r4 if: startsWith(github.ref, 'refs/tags') needs: - UnitTesting - PlatformTesting -# - Coverage # - StaticTypeCheck - Package - PublishToGitHubPages PublishOnPyPI: - uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@r3 + uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@r4 if: startsWith(github.ref, 'refs/tags') needs: - UnitTestingParams @@ -203,16 +199,15 @@ jobs: PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} ArtifactCleanUp: - uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r3 + uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r4 needs: - UnitTestingParams - PlatformTestingParams - UnitTesting -# - Coverage - StaticTypeCheck - PlatformTesting - Documentation -# - PDFDocumentation + - PDFDocumentation - PublishTestResults - PublishCoverageResults - PublishToGitHubPages @@ -222,7 +217,6 @@ jobs: remaining: | ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}-* ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_html }}-* - ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}-* ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }}-* ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}-* ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}-* @@ -234,9 +228,9 @@ jobs: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }} ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }} ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_html }} + ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }} ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).unittesting_xml }}-* ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).unittesting_html }}-* - ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_sqlite }}-* ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_xml }}-* ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_json }}-* ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_html }}-* diff --git a/.github/workflows/_Checking_NamespacePackage_Pipeline.yml b/.github/workflows/_Checking_NamespacePackage_Pipeline.yml index fb5516f..71d95e8 100644 --- a/.github/workflows/_Checking_NamespacePackage_Pipeline.yml +++ b/.github/workflows/_Checking_NamespacePackage_Pipeline.yml @@ -6,7 +6,7 @@ on: jobs: NamespacePackage: - uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@r2 + uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@r4 with: package_namespace: pyExamples package_name: Extensions diff --git a/.github/workflows/_Checking_Nightly.yml b/.github/workflows/_Checking_Nightly.yml index cd436d3..28bedf0 100644 --- a/.github/workflows/_Checking_Nightly.yml +++ b/.github/workflows/_Checking_Nightly.yml @@ -12,9 +12,9 @@ jobs: steps: - name: 🖉 Build 1 run: | - echo "Document 1 $(date --utc '+%d.%m.%Y - %H:%M:%S')" > document1.txt - echo "Analysis log $(date --utc '+%d.%m.%Y - %H:%M:%S')" > analysis.log - echo "Build log $(date --utc '+%d.%m.%Y - %H:%M:%S')" > build.log + printf "%s\n" "Document 1 $(date --utc '+%d.%m.%Y - %H:%M:%S')" > document1.txt + printf "%s\n" "Analysis log $(date --utc '+%d.%m.%Y - %H:%M:%S')" > analysis.log + printf "%s\n" "Build log $(date --utc '+%d.%m.%Y - %H:%M:%S')" > build.log - name: 📤 Upload artifact uses: pyTooling/upload-artifact@v4 @@ -28,8 +28,8 @@ jobs: - name: 🖉 Program run: | - echo "Document other $(date --utc '+%d.%m.%Y - %H:%M:%S')" > document1.txt - echo "Program $(date --utc '+%d.%m.%Y - %H:%M:%S')" > program.py + printf "%s\n" "Document other $(date --utc '+%d.%m.%Y - %H:%M:%S')" > document1.txt + printf "%s\n" "Program $(date --utc '+%d.%m.%Y - %H:%M:%S')" > program.py - name: 📤 Upload artifact uses: actions/upload-artifact@v4 @@ -42,7 +42,7 @@ jobs: retention-days: 1 NightlyPage: - uses: pyTooling/Actions/.github/workflows/NightlyRelease.yml@main + uses: pyTooling/Actions/.github/workflows/NightlyRelease.yml@r4 needs: - Build secrets: inherit @@ -56,35 +56,13 @@ jobs: version=4.2.0 tool=myTool prog=program - nightly_title: "Nightly Release" + nightly_title: "Nightly Test Release" nightly_description: | - This *nightly* release contains all latest and important artifacts created by GHDL's CI pipeline. + This *nightly* release contains all latest and important artifacts created by %tool%'s CI pipeline. - # GHDL %version% + # %tool% %version% - GHDL offers the simulator and synthesis tool for VHDL. GHDL can be build for various backends: - * `gcc` - using the GCC compiler framework - * `mcode` - in memory code generation - * `llvm` - using the LLVM compiler framework - * `llvm-jit` - using the LLVM compiler framework, but in memory - - The following asset categories are provided for GHDL: - * macOS x64-64 builds as TAR/GZ file - * macOS aarch64 builds as TAR/GZ file - * Ubuntu 24.04 LTS builds as TAR/GZ file - * Windows builds for standalone usage (without MSYS2) as ZIP file - * MSYS2 packages as TAR/ZST file - - # pyGHDL %version% - - The Python package `pyGHDL` offers Python binding (`pyGHDL.libghdl`) to a `libghdl` shared library (`*.so`/`*.dll`). - In addition to the low-level binding layer, pyGHDL offers: - * a Language Server Protocol (LSP) instance for e.g. live code checking by editors - * a Code Document Object Model (CodeDOM) based on [pyVHDLModel](https://github.com/VHDL/pyVHDLModel) - - The following asset categories are provided for pyGHDL: - * Platform specific Python wheel package for Ubuntu incl. `pyGHDL...so` - * Platform specific Python wheel package for Windows incl. `pyGHDL...dll` + * %prog% assets: | document: document1.txt: Documentation document: build.log: Logfile - %tool% - %tool% @@ -99,3 +77,44 @@ jobs: document:$archive7.tar.gz: Archive 7 - tar.gz + dir document:$archive8.tzst: Archive 8 - tzst + dir document:$archive9.tar.zst:Archive 9 - tar.zst + dir + + NightlyPageWithInventory: + uses: ./.github/workflows/NightlyRelease.yml + needs: + - Build + secrets: inherit + permissions: + contents: write + actions: write +# attestations: write + with: + replacements: | + version=4.2.0 + tool=myTool + prog=program + nightly_name: inventory + nightly_title: "Nightly Test Release with Inventory" + nightly_description: | + This *nightly* release contains all latest and important artifacts created by %tool%'s CI pipeline. + + # %tool% %version% + + * %prog% + * iventory.json + inventory-json: "inventory.json" + inventory-version: 4.2.5 + inventory-categories: "kind1,kind2" + assets: | + document: document1.txt: doc,html: Documentation + document: build.log: build,log: Logfile - %tool% - %tool% + other: document1.txt: build,SBOM:SBOM - %version% + other: %prog%.py: app,binary:Application - %tool% - %version% + document:!archive1.zip: Archive 1 - zip + document:!archive2.tgz: Archive 2 - tgz + document:!archive3.tar.gz: Archive 3 - tar.gz + document:!archive4.tzst: Archive 4 - tzst + document:!archive5.tar.zst: Archive 5 - tar.zst + document:$archive6.tgz: Archive 6 - tgz + dir + document:$archive7.tar.gz: Archive 7 - tar.gz + dir + document:$archive8.tzst: Archive 8 - tzst + dir + document:$archive9.tar.zst: Archive 9 - tar.zst + dir diff --git a/.github/workflows/_Checking_Parameters.yml b/.github/workflows/_Checking_Parameters.yml index 6c9f2bf..4e15236 100644 --- a/.github/workflows/_Checking_Parameters.yml +++ b/.github/workflows/_Checking_Parameters.yml @@ -6,24 +6,24 @@ on: jobs: Params_Default: - uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2 + uses: pyTooling/Actions/.github/workflows/Parameters.yml@r4 with: name: Example Params_PythonVersions: - uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2 + uses: pyTooling/Actions/.github/workflows/Parameters.yml@r4 with: name: Example python_version_list: "3.11 3.12 pypy-3.9 pypy-3.10" Params_Systems: - uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2 + uses: pyTooling/Actions/.github/workflows/Parameters.yml@r4 with: name: Example system_list: "windows mingw32 mingw64" Params_Include: - uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2 + uses: pyTooling/Actions/.github/workflows/Parameters.yml@r4 with: name: Example python_version_list: "3.11" @@ -31,7 +31,7 @@ jobs: include_list: "ubuntu:3.12 ubuntu:3.13" Params_Exclude: - uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2 + uses: pyTooling/Actions/.github/workflows/Parameters.yml@r4 with: name: Example python_version_list: "3.12" @@ -39,7 +39,7 @@ jobs: exclude_list: "windows:3.12 windows:3.13" Params_Disable: - uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2 + uses: pyTooling/Actions/.github/workflows/Parameters.yml@r4 with: name: Example python_version_list: "3.12" @@ -47,7 +47,7 @@ jobs: disable_list: "windows:3.12 windows:3.13" Params_All: - uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2 + uses: pyTooling/Actions/.github/workflows/Parameters.yml@r4 with: name: Example python_version_list: "3.12 3.13" @@ -83,7 +83,7 @@ jobs: expectedPythonVersion = "3.13" expectedPythons = ["3.9", "3.10", "3.11", "3.12", "3.13"] expectedSystems = ["ubuntu", "windows", "macos", "macos-arm"] - expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons] + ["mingw64:3.11", "ucrt64:3.11"] + expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons] + ["mingw64:3.12", "ucrt64:3.11"] expectedName = "Example" expectedArtifacts = { "unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML", @@ -146,7 +146,7 @@ jobs: expectedPythonVersion = "3.13" expectedPythons = ["3.11", "3.12", "pypy-3.9", "pypy-3.10"] expectedSystems = ["ubuntu", "windows", "macos", "macos-arm"] - expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons] + ["mingw64:3.11", "ucrt64:3.11"] + expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons] + ["mingw64:3.12", "ucrt64:3.11"] expectedName = "Example" expectedArtifacts = { "unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML", @@ -209,7 +209,7 @@ jobs: expectedPythonVersion = "3.13" expectedPythons = ["3.9", "3.10", "3.11", "3.12", "3.13"] expectedSystems = ["windows"] - expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons] + ["mingw32:3.11", "mingw64:3.11"] + expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons] + ["mingw32:3.12", "mingw64:3.11"] expectedName = "Example" expectedArtifacts = { "unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML", diff --git a/.github/workflows/_Checking_SimplePackage_Pipeline.yml b/.github/workflows/_Checking_SimplePackage_Pipeline.yml index 9e1b41a..b5eb585 100644 --- a/.github/workflows/_Checking_SimplePackage_Pipeline.yml +++ b/.github/workflows/_Checking_SimplePackage_Pipeline.yml @@ -6,7 +6,7 @@ on: jobs: SimplePackage: - uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@r2 + uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@r4 with: package_name: pyDummy secrets: diff --git a/ExamplePipeline.yml b/ExamplePipeline.yml index 15c08c0..f8e7723 100644 --- a/ExamplePipeline.yml +++ b/ExamplePipeline.yml @@ -30,7 +30,7 @@ jobs: # This job is a workaround for global variables # See https://github.com/actions/runner/issues/480 Params: - uses: pyTooling/Actions/.github/workflows/Parameters.yml@main + uses: pyTooling/Actions/.github/workflows/Parameters.yml@r4 with: name: ToolName # Optional @@ -39,7 +39,7 @@ jobs: python_version_list: '3.8 3.9 3.10' UnitTesting: - uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@main + uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r4 needs: - Params with: @@ -57,7 +57,7 @@ jobs: artifact: ${{ fromJson(needs.Params.outputs.artifact_names).unittesting_xml }} Coverage: - uses: pyTooling/Actions/.github/workflows/CoverageCollection.yml@main + uses: pyTooling/Actions/.github/workflows/CoverageCollection.yml@r4 needs: - Params with: @@ -71,7 +71,7 @@ jobs: codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }} StaticTypeCheck: - uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@main + uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r4 needs: - Params with: @@ -87,7 +87,7 @@ jobs: allow_failure: true PublishTestResults: - uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@main + uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@r4 needs: - UnitTesting - StaticTypeCheck @@ -96,7 +96,7 @@ jobs: report_files: artifacts/**/*.xml Package: - uses: pyTooling/Actions/.github/workflows/Package.yml@main + uses: pyTooling/Actions/.github/workflows/Package.yml@r4 needs: - Params - Coverage @@ -107,7 +107,7 @@ jobs: requirements: 'wheel' Release: - uses: pyTooling/Actions/.github/workflows/Release.yml@main + uses: pyTooling/Actions/.github/workflows/Release.yml@r4 if: startsWith(github.ref, 'refs/tags') needs: - UnitTesting @@ -116,7 +116,7 @@ jobs: - Package PublishOnPyPI: - uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@main + uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@r4 if: startsWith(github.ref, 'refs/tags') needs: - Params @@ -131,7 +131,7 @@ jobs: PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} VerifyDocs: - uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@main + uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@r4 needs: - Params with: @@ -139,7 +139,7 @@ jobs: python_version: ${{ needs..Params.outputs.python_version }} BuildTheDocs: - uses: pyTooling/Actions/.github/workflows/BuildTheDocs.yml@main + uses: pyTooling/Actions/.github/workflows/BuildTheDocs.yml@r4 needs: - Params - VerifyDocs @@ -147,7 +147,7 @@ jobs: artifact: ${{ fromJson(needs.Params.outputs.artifact_names).documentation_html }} PublishToGitHubPages: - uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@main + uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r4 needs: - Params - BuildTheDocs @@ -160,7 +160,7 @@ jobs: typing: ${{ fromJson(needs.Params.outputs.artifact_names).statictyping_html }} ArtifactCleanUp: - uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@main + uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r4 needs: - Params - PublishTestResults diff --git a/ExamplePipeline_dark.png b/ExamplePipeline_dark.png deleted file mode 100644 index 9e4779c..0000000 Binary files a/ExamplePipeline_dark.png and /dev/null differ diff --git a/ExamplePipeline_light.png b/ExamplePipeline_light.png deleted file mode 100644 index 56bd965..0000000 Binary files a/ExamplePipeline_light.png and /dev/null differ diff --git a/README.md b/README.md index ef17387..2227e78 100644 --- a/README.md +++ b/README.md @@ -10,48 +10,93 @@ This repository gathers reusable CI tooling for testing, packaging and distribut See [GitHub Actions and GitHub Reusable Workflows](https://pytooling.github.io/Actions/Background.html) for more background information. -## Reusable workflows +## Reusable Actions + +- **Artifacts:** + [**pyTooling/upload-artifact**](https://github.com/pyTooling/upload-artifact): The upload-artifact action will + preserve file attributes like permissions. + + [**pyTooling/download-artifact**](https://github.com/pyTooling/download-artifact): The download-artifact action will + preserve file attributes like permissions. + +## Predefined Docker Images + +- **Documentation:** + [**MikTeX**](https://github.com/pyTooling/MikTeX): A predefined MikTeX image based on Debian Bookworm + Python 3.13 + with specific tools for documentation generation using e.g. Sphinx and related extensions. + +## Reusable Workflows This repository provides 10+ *Reusable Workflows* based on the CI pipelines of the repos in this GitHub organisation, [EDA²](https://github.com/edaa-org), [VHDL](https://github.com/vhdl), and others. By combining them, Python packages can be continuously tested and released along with Sphinx documentation sites, to GitHub Releases, GitHub Pages and PyPI. Optionally, coverage and static type check reports can be gathered and integrated into the online documentation. -[![](ExamplePipeline_dark.png)](ExamplePipeline_dark.png) - -[![](ExamplePipeline_light.png)](ExamplePipeline_light.png) +[![](doc/_static/pyTooling-Actions-SimplePackage.png)](doc/_static/pyTooling-Actions-SimplePackage.png) As shown in the screenshots above, the expected order is: -- Global: - - [Parameters](.github/workflows/Parameters.yml): a workaround for the limitations to handle global variables in - GitHub Actions workflows (see [actions/runner#480](https://github.com/actions/runner/issues/480)). - It generates outputs with artifact names and job matrices to be used in later running jobs. -- Code testing/analysis: - - [UnitTesting](.github/workflows/UnitTesting.yml): run unit test with `pytest` using multiple versions of Python, and - optionally upload results as XML reports. Configuration options to `pytest` should be given via section - `[tool.pytest.ini_options]` in a `pyproject.toml` file. - - [CoverageCollection](.github/workflows/CoverageCollection.yml): collect code coverage data (incl. branch coverage) - with `pytest`/`pytest-cov`/`coverage.py` using a single version of Python (latest). It generates HTML and Cobertura - (XML)reports, upload the HTML report as an artifact, and upload the test results to Codecov and Codacy. Configuration - options to `pytest` and `coverage.py` should be given via section `[tool.pytest.ini_options]` and `[tool.coverage.*]` - in a `pyproject.toml` file. - - [StaticTypeCheck](.github/workflows/StaticTypeCheck.yml): collect static type check result with `mypy`, and - optionally upload results as an HTML report. - Example `commands`: +- **Global:** + [**Parameters**](.github/workflows/Parameters.yml): It generates output parameters with artifact names and job matrices + to be used in later running jobs. + It's a workaround for the limitations to handle global variables in GitHub Actions workflows (see + [actions/runner#480](https://github.com/actions/runner/issues/480)). + + [**ExtractConfiguration**](.github/workflows/ExtractConfiguration.yml): extracts configuration values from + `pyproject.toml` and exposes configured paths and filenames as job output parameters. +- **Predefined pipelines:** + [**CompletePipeline**](.github/workflows/CompletePipeline.yml): is a predefined pipeline for typical Python projects + using all predefined job templates of pyTooling at once: (unit testing, code coverage, static typing, documentation + report generation and publishing, packaging, releasing, ...) +- **Code testing/analysis:** + [**ApplicationTesting**](.github/workflows/ApplicationTesting.yml): like UnitTesting, but running tests using an + installed Python package. + + [**UnitTesting**](.github/workflows/UnitTesting.yml): run unit test with `pytest` using multiple versions of Python, and + optionally upload results as XML reports. Configuration options to `pytest` should be given via section + `[tool.pytest.ini_options]` in a `pyproject.toml` file. + Besides test results, also code coverage data (incl. branch coverage) can be collected using + `pytest`/`pytest-cov`/`coverage.py`. Configuration options to `coverage.py` should be given via section + `[tool.coverage.*]` in a `pyproject.toml` file. + While multiple report formats can be created in the job, it's recommended to use `PublishTestResults` and/or + `PublishCoverageResults` to merge results from matrix runs and then generate final reports as XML, JSON or HTML. + Finally, reports can be published to GitHub Pages or cloud services like Codecov and Codacy. + + [**StaticTypeCheck**](.github/workflows/StaticTypeCheck.yml): collect static type check result with `mypy`, and + optionally upload results as an HTML report. + + [**VerifyDocs**](.github/workflows/VerifyDocs.yml): extract code examples from the README and test these code snippets. +- **Packaging and releasing:** + [**Package**](.github/workflows/Package.yml): generate source and wheel packages, and upload them as an artifact. + + [**PublishOnPyPI**](.github/workflows/PublishOnPyPI.yml): publish source and wheel packages to PyPI. + + [**PublishTestResults**](.github/workflows/PublishTestResults.yml): publish unit test results through GH action `dorny/test-reporter`. + + [**PublishCoverageResults**](.github/workflows/PublishCoverageResults.yml): publish ucode coverage results. + + [**NightlyRelease**](.github/workflows/NightlyRelease.yml): publish GitHub Release. + + [**Release**](.github/workflows/Release.yml): publish GitHub Release. +- **Documentation:** + [**SphinxDocumentation**](.github/workflows/PublishCoverageResults.yml): create HTML and LaTeX documentation using + Sphinx. + + [**LaTeXDocumentation**](.github/workflows/LaTeXDocumentation.yml): compile LaTeX documentation to a PDF file using + MikTeX. + + [**PublishToGitHubPages**](.github/workflows/PublishToGitHubPages.yml): publish HTML documentation to GitHub Pages. +- **Cleanup:** + [**IntermediateCleanUp**](.github/workflows/IntermediateCleanUp.yml): delete intermediate artifacts. + + [**ArtifactCleanUp**](.github/workflows/ArtifactCleanUp.yml): delete artifacts. +- **⚠ Deprecated ⚠:** + [**CoverageCollection**](.github/workflows/CoverageCollection.yml): Use `UnitTesting`, because is can collect code + coverage too. This avoids code duplication in job templates. + + [**BuildTheDocs**](.github/workflows/BuildTheDocs.yml): Use `SphinxDocumentation`, `LaTeXDocumentation` and + `PublishToGitHubPages`. BuildTheDocs isn't maintained anymore. - - [VerifyDocs](.github/workflows/VerifyDocs.yml): extract code examples from the README and test these code snippets. -- Packaging and releasing: - - [Release](.github/workflows/Release.yml): publish GitHub Release. - - [Package](.github/workflows/Package.yml): generate source and wheel packages, and upload them as an artifact. - - [PublishOnPyPI](.github/workflows/PublishOnPyPI.yml): publish source and wheel packages to PyPI. - - [PublishTestResults](.github/workflows/PublishTestResults.yml): publish unit test results through GH action `dorny/test-reporter`. -- Documentation: - - [BuildTheDocs](.github/workflows/BuildTheDocs.yml): build Sphinx documentation with BuildTheDocs, and upload HTML as - an artifact. - - [PublishToGitHubPages](.github/workflows/PublishToGitHubPages.yml): publish HTML documentation to GitHub Pages. -- Cleanup: - - [ArtifactCleanUp](.github/workflows/ArtifactCleanUp.yml): delete artifacts. ### Example pipeline diff --git a/dist/requirements.txt b/dist/requirements.txt index 918a0be..8586c51 100644 --- a/dist/requirements.txt +++ b/dist/requirements.txt @@ -1,2 +1,2 @@ wheel ~= 0.45 -twine ~= 5.1 +twine ~= 6.0 diff --git a/doc/Action/Releaser.rst b/doc/Action/Releaser.rst index c53c99b..87fb766 100644 --- a/doc/Action/Releaser.rst +++ b/doc/Action/Releaser.rst @@ -95,7 +95,7 @@ The following block shows a minimal YAML workflow file: # Update tag and pre-release # - Update (force-push) tag to the commit that is used in the workflow. # - Upload artifacts defined by the user. - - uses: pyTooling/Actions/releaser@r0 + - uses: pyTooling/Actions/releaser@r4 with: token: ${{ secrets.GITHUB_TOKEN }} files: | diff --git a/doc/Instantiation.rst b/doc/Instantiation.rst index 3b18faf..1a91657 100644 --- a/doc/Instantiation.rst +++ b/doc/Instantiation.rst @@ -60,12 +60,12 @@ Documentation Only (Sphinx) jobs: BuildTheDocs: - uses: pyTooling/Actions/.github/workflows/BuildTheDocs.yml@r0 + uses: pyTooling/Actions/.github/workflows/BuildTheDocs.yml@r4 with: artifact: Documentation PublishToGitHubPages: - uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r0 + uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r4 needs: - BuildTheDocs with: diff --git a/doc/JobTemplate/ArtifactCleanUp.rst b/doc/JobTemplate/ArtifactCleanUp.rst index c02b83d..581755f 100644 --- a/doc/JobTemplate/ArtifactCleanUp.rst +++ b/doc/JobTemplate/ArtifactCleanUp.rst @@ -27,7 +27,7 @@ The simplest variant just uses the artifact name for the package. jobs: ArtifactCleanUp: - uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r0 + uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r4 with: package: Package @@ -39,7 +39,7 @@ Complex Example jobs: ArtifactCleanUp: - uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r0 + uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r4 needs: - Params - UnitTesting diff --git a/doc/JobTemplate/BuildTheDocs.rst b/doc/JobTemplate/BuildTheDocs.rst index 8972d40..f9a3c88 100644 --- a/doc/JobTemplate/BuildTheDocs.rst +++ b/doc/JobTemplate/BuildTheDocs.rst @@ -30,7 +30,7 @@ Simple Example jobs: BuildTheDocs: - uses: pyTooling/Actions/.github/workflows/BuildTheDocs.yml@r0 + uses: pyTooling/Actions/.github/workflows/BuildTheDocs.yml@r4 Complex Example @@ -40,7 +40,7 @@ Complex Example jobs: BuildTheDocs: - uses: pyTooling/Actions/.github/workflows/BuildTheDocs.yml@r0 + uses: pyTooling/Actions/.github/workflows/BuildTheDocs.yml@r4 needs: - Params with: diff --git a/doc/JobTemplate/CoverageCollection.rst b/doc/JobTemplate/CoverageCollection.rst index 8b9c3dc..3ef5974 100644 --- a/doc/JobTemplate/CoverageCollection.rst +++ b/doc/JobTemplate/CoverageCollection.rst @@ -52,7 +52,7 @@ Simple Example jobs: Coverage: - uses: pyTooling/Actions/.github/workflows/CoverageCollection.yml@r0 + uses: pyTooling/Actions/.github/workflows/CoverageCollection.yml@r4 with: artifact: Coverage secrets: @@ -65,7 +65,7 @@ Complex Example jobs: Coverage: - uses: pyTooling/Actions/.github/workflows/CoverageCollection.yml@r0 + uses: pyTooling/Actions/.github/workflows/CoverageCollection.yml@r4 needs: - Params with: diff --git a/doc/JobTemplate/Package.rst b/doc/JobTemplate/Package.rst index b0ccbb1..af05e41 100644 --- a/doc/JobTemplate/Package.rst +++ b/doc/JobTemplate/Package.rst @@ -33,7 +33,7 @@ Simple Example jobs: Package: - uses: pyTooling/Actions/.github/workflows/Package.yml@r0 + uses: pyTooling/Actions/.github/workflows/Package.yml@r4 with: artifact: Package @@ -45,7 +45,7 @@ Complex Example jobs: Package: - uses: pyTooling/Actions/.github/workflows/Package.yml@r0 + uses: pyTooling/Actions/.github/workflows/Package.yml@r4 needs: - Params - Coverage diff --git a/doc/JobTemplate/Parameters.rst b/doc/JobTemplate/Parameters.rst index bdaf3cf..398d095 100644 --- a/doc/JobTemplate/Parameters.rst +++ b/doc/JobTemplate/Parameters.rst @@ -35,7 +35,7 @@ requires a `name` parameter to create the artifact names. jobs: Params: - uses: pyTooling/Actions/.github/workflows/Parameters.yml@r0 + uses: pyTooling/Actions/.github/workflows/Parameters.yml@r4 with: name: pyTooling @@ -81,7 +81,7 @@ over resulting in the following combinations: jobs: UnitTestingParams: - uses: pyTooling/Actions/.github/workflows/Parameters.yml@r0 + uses: pyTooling/Actions/.github/workflows/Parameters.yml@r4 with: name: pyTooling python_version_list: "3.8 3.9 3.10 3.11 pypy-3.9 pypy-3.10" @@ -89,14 +89,14 @@ over resulting in the following combinations: exclude_list: "windows:pypy-3.9 windows:pypy-3.10" PerformanceTestingParams: - uses: pyTooling/Actions/.github/workflows/Parameters.yml@r0 + uses: pyTooling/Actions/.github/workflows/Parameters.yml@r4 with: name: pyTooling python_version_list: "3.11 3.12" system_list: "ubuntu windows macos" PlatformTestingParams: - uses: pyTooling/Actions/.github/workflows/Parameters.yml@dev + uses: pyTooling/Actions/.github/workflows/Parameters.yml@r4 with: name: pyTooling python_version_list: "3.12" @@ -297,12 +297,12 @@ variables. Thus, this job is used to compute an output parameter that can be reu jobs: Params: - uses: pyTooling/Actions/.github/workflows/Parameters.yml@r0 + uses: pyTooling/Actions/.github/workflows/Parameters.yml@r4 with: name: pyTooling CodeCoverage: - uses: pyTooling/Actions/.github/workflows/CoverageCollection.yml@r0 + uses: pyTooling/Actions/.github/workflows/CoverageCollection.yml@r4 needs: - Params with: @@ -330,12 +330,12 @@ A job description contains the following key-value pairs: jobs: Params: - uses: pyTooling/Actions/.github/workflows/Parameters.yml@r0 + uses: pyTooling/Actions/.github/workflows/Parameters.yml@r4 with: name: pyTooling UnitTesting: - uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@dev + uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r4 needs: - Params with: @@ -389,12 +389,12 @@ The supported artifacts are: jobs: Params: - uses: pyTooling/Actions/.github/workflows/Parameters.yml@r0 + uses: pyTooling/Actions/.github/workflows/Parameters.yml@r4 with: name: pyTooling Coverage: - uses: pyTooling/Actions/.github/workflows/CoverageCollection.yml@dev + uses: pyTooling/Actions/.github/workflows/CoverageCollection.yml@r4 needs: - Params with: diff --git a/doc/JobTemplate/PublishOnPyPI.rst b/doc/JobTemplate/PublishOnPyPI.rst index 0bad1ed..73ae2bf 100644 --- a/doc/JobTemplate/PublishOnPyPI.rst +++ b/doc/JobTemplate/PublishOnPyPI.rst @@ -42,7 +42,7 @@ by a Git tag. A secret is forwarded from GitHub secrets to a job secret. # ... PublishOnPyPI: - uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@r0 + uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@r4 if: startsWith(github.ref, 'refs/tags') with: artifact: Package @@ -66,7 +66,7 @@ by that job. Finally, the list of requirements is overwritten to load a list of # ... PublishOnPyPI: - uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@r0 + uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@r4 if: startsWith(github.ref, 'refs/tags') needs: - Params diff --git a/doc/JobTemplate/PublishTestResults.rst b/doc/JobTemplate/PublishTestResults.rst index 71e43be..ee595ac 100644 --- a/doc/JobTemplate/PublishTestResults.rst +++ b/doc/JobTemplate/PublishTestResults.rst @@ -34,7 +34,7 @@ Simple Example jobs: PublishTestResults: - uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@r0 + uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@r4 Complex Example =============== @@ -49,7 +49,7 @@ Complex Example # ... PublishTestResults: - uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@r0 + uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@r4 needs: - CodeCoverage - UnitTesting diff --git a/doc/JobTemplate/PublishToGitHubPages.rst b/doc/JobTemplate/PublishToGitHubPages.rst index 849125b..c6705bf 100644 --- a/doc/JobTemplate/PublishToGitHubPages.rst +++ b/doc/JobTemplate/PublishToGitHubPages.rst @@ -29,7 +29,7 @@ Simple Example # ... PublishToGitHubPages: - uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r0 + uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r4 needs: - BuildTheDocs with: @@ -43,7 +43,7 @@ Complex Example jobs: PublishToGitHubPages: - uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r0 + uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r4 needs: - Params - BuildTheDocs diff --git a/doc/JobTemplate/Release.rst b/doc/JobTemplate/Release.rst index 149ea0d..8fd72b9 100644 --- a/doc/JobTemplate/Release.rst +++ b/doc/JobTemplate/Release.rst @@ -62,7 +62,7 @@ Simple Example jobs: Release: - uses: pyTooling/Actions/.github/workflows/Release.yml@r0 + uses: pyTooling/Actions/.github/workflows/Release.yml@r4 Complex Example @@ -72,7 +72,7 @@ Complex Example jobs: Release: - uses: pyTooling/Actions/.github/workflows/Release.yml@r0 + uses: pyTooling/Actions/.github/workflows/Release.yml@r4 if: startsWith(github.ref, 'refs/tags') needs: - Package diff --git a/doc/JobTemplate/StaticTypeCheck.rst b/doc/JobTemplate/StaticTypeCheck.rst index a4e4dba..153c9b2 100644 --- a/doc/JobTemplate/StaticTypeCheck.rst +++ b/doc/JobTemplate/StaticTypeCheck.rst @@ -29,7 +29,7 @@ Simple Example jobs: StaticTypeCheck: - uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r0 + uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r4 with: commands: | touch pyTooling/__init__.py @@ -44,7 +44,7 @@ Complex Example jobs: StaticTypeCheck: - uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r0 + uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r4 needs: - Params with: diff --git a/doc/JobTemplate/UnitTesting.rst b/doc/JobTemplate/UnitTesting.rst index db77b5a..da55621 100644 --- a/doc/JobTemplate/UnitTesting.rst +++ b/doc/JobTemplate/UnitTesting.rst @@ -36,7 +36,7 @@ Simple Example # ... UnitTesting: - uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r0 + uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r4 needs: - Params with: diff --git a/doc/_static/pyTooling-Actions-SimplePackage.png b/doc/_static/pyTooling-Actions-SimplePackage.png new file mode 100644 index 0000000..f0ee4d8 Binary files /dev/null and b/doc/_static/pyTooling-Actions-SimplePackage.png differ diff --git a/doc/requirements.txt b/doc/requirements.txt index 35b7576..ede9f77 100644 --- a/doc/requirements.txt +++ b/doc/requirements.txt @@ -11,7 +11,7 @@ docutils_stubs ~= 0.0.22 sphinx_rtd_theme ~= 3.0 # Sphinx Extenstions -sphinxcontrib-mermaid>=0.9.2 +sphinxcontrib-mermaid ~= 1.0 autoapi >= 2.0.1 sphinx_design ~= 0.6.1 sphinx-copybutton >= 0.5.2 diff --git a/doc/shields.inc b/doc/shields.inc index 08ea106..9498bc7 100644 --- a/doc/shields.inc +++ b/doc/shields.inc @@ -4,11 +4,11 @@ # percent encoding so that the URL is properly parsed. .. # Sourcecode link to GitHub -.. |SHIELD:svg:pyTooling-github| image:: https://img.shields.io/badge/pyTooling-Actions-63bf7f.svg?longCache=true&style=flat-square&longCache=true&logo=GitHub +.. |SHIELD:svg:pyTooling-github| image:: https://img.shields.io/badge/pyTooling-Actions-63bf7f?longCache=true&style=flat-square&longCache=true&logo=GitHub :alt: Sourcecode on GitHub :height: 22 :target: https://GitHub.com/pyTooling/Actions -.. |SHIELD:png:pyTooling-github| image:: https://raster.shields.io/badge/pyTooling-Actions-63bf7f.svg?longCache=true&style=flat-square&longCache=true&logo=GitHub +.. |SHIELD:png:pyTooling-github| image:: https://raster.shields.io/badge/pyTooling-Actions-63bf7f?longCache=true&style=flat-square&longCache=true&logo=GitHub :alt: Sourcecode on GitHub :height: 22 :target: https://GitHub.com/pyTooling/Actions @@ -18,7 +18,7 @@ :alt: Code license :height: 22 :target: Code-License.html -.. |SHIELD:png:pyTooling-src-license| image:: https://img.shields.io/pypi/l/pyTooling?longCache=true&style=flat-square&logo=Apache&label=code +.. |SHIELD:png:pyTooling-src-license| image:: https://raster.shields.io/pypi/l/pyTooling?longCache=true&style=flat-square&logo=Apache&label=code :alt: Code license :height: 22 :target: https://GitHub.com/pyTooling/Actions/blob/main/LICENSE.md @@ -64,11 +64,11 @@ :target: https://pyTooling.github.io/pyTooling/ .. # Gitter -.. |SHIELD:svg:pyTooling-gitter| image:: https://img.shields.io/badge/chat-on%20gitter-4db797.svg?longCache=true&style=flat-square&logo=gitter&logoColor=e8ecef +.. |SHIELD:svg:pyTooling-gitter| image:: https://img.shields.io/badge/chat-on%20gitter-4db797.?longCache=true&style=flat-square&logo=gitter&logoColor=e8ecef :alt: Documentation License :height: 22 :target: https://gitter.im/hdl/community -.. |SHIELD:png:pyTooling-gitter| image:: https://raster.shields.io/badge/chat-on%20gitter-4db797.svg?longCache=true&style=flat-square&logo=gitter&logoColor=e8ecef +.. |SHIELD:png:pyTooling-gitter| image:: https://raster.shields.io/badge/chat-on%20gitter-4db797.?longCache=true&style=flat-square&logo=gitter&logoColor=e8ecef :alt: Documentation License :height: 22 :target: https://gitter.im/hdl/community diff --git a/pyproject.toml b/pyproject.toml index f76c66f..ea7bdb4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,6 +20,9 @@ show_error_codes = true namespace_packages = true html_report = "report/typing" +[tool.pytest] +junit_xml = "report/unit/TestReportSummary.xml" + [tool.pytest.ini_options] addopts = "--tb=native" # Don't set 'python_classes = *' otherwise, pytest doesn't search for classes @@ -30,6 +33,7 @@ filterwarnings = [ "error::DeprecationWarning", "error::PendingDeprecationWarning" ] +junit_logging = "all" [tool.interrogate] color = true diff --git a/run.ps1 b/run.ps1 index 9e1ffd4..e18144f 100644 --- a/run.ps1 +++ b/run.ps1 @@ -88,7 +88,7 @@ if ($build) rm -Force .\build\bdist.win-amd64 rm -Force .\build\lib Write-Host -ForegroundColor Yellow "[live][BUILD] Building $PackageName package as wheel ..." - py -3.12 -m build --wheel + py -3.13 -m build --wheel Write-Host -ForegroundColor Yellow "[live][BUILD] Building wheel finished" } @@ -102,9 +102,9 @@ if ($install) } else { Write-Host -ForegroundColor Cyan "[ADMIN][UNINSTALL] Uninstalling $PackageName ..." - py -3.12 -m pip uninstall -y $PackageName + py -3.13 -m pip uninstall -y $PackageName Write-Host -ForegroundColor Cyan "[ADMIN][INSTALL] Installing $PackageName from wheel ..." - py -3.12 -m pip install .\dist\$PackageName-6.7.0-py3-none-any.whl + py -3.13 -m pip install .\dist\$PackageName-8.1.0-py3-none-any.whl Write-Host -ForegroundColor Cyan "[ADMIN][INSTALL] Closing window in 5 seconds ..." Start-Sleep -Seconds 5