mirror of
https://github.com/pyTooling/Actions.git
synced 2026-02-15 04:26:55 +08:00
Compare commits
35 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
33edd82e6f | ||
|
|
d08f312904 | ||
|
|
731d0d2369 | ||
|
|
de400ae2db | ||
|
|
cb3c338df6 | ||
|
|
79c8526437 | ||
|
|
0f7d062c38 | ||
|
|
d5635a8842 | ||
|
|
6aa6af95ee | ||
|
|
15bf375fe6 | ||
|
|
a1b03cfe2a | ||
|
|
79620e267d | ||
|
|
fffef5c814 | ||
|
|
42e17fae05 | ||
|
|
9b7032a585 | ||
|
|
9110c85738 | ||
|
|
c81d139080 | ||
|
|
c64e054bcd | ||
|
|
78fdb584aa | ||
|
|
a456635686 | ||
|
|
befc59f22d | ||
|
|
d6fc0efd47 | ||
|
|
c018acc3c1 | ||
|
|
d74c610bb4 | ||
|
|
edc4ab3e86 | ||
|
|
0a338ae8b7 | ||
|
|
4069da0a74 | ||
|
|
679ec24c80 | ||
|
|
3a13486ea6 | ||
|
|
34fb9c9869 | ||
|
|
7523c4adca | ||
|
|
530ad7a4a1 | ||
|
|
bd3f2afaf3 | ||
|
|
b1e4cb961f | ||
|
|
1e6b71e87b |
4
.github/workflows/ApplicationTesting.yml
vendored
4
.github/workflows/ApplicationTesting.yml
vendored
@@ -188,8 +188,8 @@ jobs:
|
||||
f.write(f"pacboy_packages={' '.join(pacboyPackages)}\n")
|
||||
|
||||
- name: '🟦 Setup MSYS2 for ${{ matrix.runtime }}'
|
||||
if: matrix.system == 'msys2'
|
||||
uses: msys2/setup-msys2@v2
|
||||
if: matrix.system == 'msys2'
|
||||
with:
|
||||
msystem: ${{ matrix.runtime }}
|
||||
update: true
|
||||
@@ -198,8 +198,8 @@ jobs:
|
||||
${{ inputs.pacboy }}
|
||||
|
||||
- name: 🐍 Setup Python ${{ matrix.python }}
|
||||
if: matrix.system != 'msys2'
|
||||
uses: actions/setup-python@v5
|
||||
if: matrix.system != 'msys2'
|
||||
with:
|
||||
python-version: ${{ matrix.python }}
|
||||
|
||||
|
||||
4
.github/workflows/ArtifactCleanUp.yml
vendored
4
.github/workflows/ArtifactCleanUp.yml
vendored
@@ -47,13 +47,13 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: 🗑️ Delete package Artifacts
|
||||
if: ${{ ! startsWith(github.ref, 'refs/tags') }}
|
||||
uses: geekyeggo/delete-artifact@v5
|
||||
if: ${{ ! startsWith(github.ref, 'refs/tags') }}
|
||||
with:
|
||||
name: ${{ inputs.package }}
|
||||
|
||||
- name: 🗑️ Delete remaining Artifacts
|
||||
if: ${{ inputs.remaining != '' }}
|
||||
uses: geekyeggo/delete-artifact@v5
|
||||
if: ${{ inputs.remaining != '' }}
|
||||
with:
|
||||
name: ${{ inputs.remaining }}
|
||||
|
||||
2
.github/workflows/BuildTheDocs.yml
vendored
2
.github/workflows/BuildTheDocs.yml
vendored
@@ -49,8 +49,8 @@ jobs:
|
||||
skip-deploy: true
|
||||
|
||||
- name: 📤 Upload 'documentation' artifacts
|
||||
if: inputs.artifact != ''
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
if: inputs.artifact != ''
|
||||
with:
|
||||
name: ${{ inputs.artifact }}
|
||||
working-directory: doc/_build/html
|
||||
|
||||
2
.github/workflows/CheckDocumentation.yml
vendored
2
.github/workflows/CheckDocumentation.yml
vendored
@@ -32,7 +32,7 @@ on:
|
||||
python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
default: '3.12'
|
||||
default: '3.13'
|
||||
type: string
|
||||
directory:
|
||||
description: 'Source code directory to check.'
|
||||
|
||||
84
.github/workflows/CompletePipeline.yml
vendored
84
.github/workflows/CompletePipeline.yml
vendored
@@ -93,10 +93,33 @@ on:
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
codecov:
|
||||
description: 'Publish merged coverage and unittest reports to Codecov.'
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
codacy:
|
||||
description: 'Publish merged coverage report to Codacy.'
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
dorny:
|
||||
description: 'Publish merged unittest report via Dorny Test-Reporter.'
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
cleanup:
|
||||
description: 'Cleanup artifacts afterwards.'
|
||||
required: false
|
||||
default: true
|
||||
type: boolean
|
||||
secrets:
|
||||
PYPI_TOKEN:
|
||||
description: "Token for pushing releases to PyPI."
|
||||
required: false
|
||||
CODECOV_TOKEN:
|
||||
description: "Token for pushing coverage and unittest results to Codecov."
|
||||
required: false
|
||||
CODACY_PROJECT_TOKEN:
|
||||
description: "Token for pushing coverage results to Codacy."
|
||||
required: false
|
||||
@@ -127,10 +150,10 @@ jobs:
|
||||
package_name: ${{ inputs.package_name }}
|
||||
python_version: ${{ inputs.apptest_python_version }}
|
||||
python_version_list: ${{ inputs.apptest_python_version_list }}
|
||||
system_list: ${{ inputs.apptest_system_list }}
|
||||
include_list: ${{ inputs.apptest_include_list }}
|
||||
exclude_list: ${{ inputs.apptest_exclude_list }}
|
||||
disable_list: ${{ inputs.apptest_disable_list }}
|
||||
system_list: ${{ inputs.apptest_system_list }}
|
||||
include_list: ${{ inputs.apptest_include_list }}
|
||||
exclude_list: ${{ inputs.apptest_exclude_list }}
|
||||
disable_list: ${{ inputs.apptest_disable_list }}
|
||||
|
||||
UnitTesting:
|
||||
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@main
|
||||
@@ -167,7 +190,7 @@ jobs:
|
||||
- UnitTestingParams
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
directory: ${{ inputs.package_namespace }}/${{ inputs.package_name }}
|
||||
directory: ${{ inputs.package_namespace }}/${{ inputs.package_name }}
|
||||
# fail_below: 70
|
||||
|
||||
Package:
|
||||
@@ -177,7 +200,7 @@ jobs:
|
||||
- UnitTesting
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
||||
artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
||||
|
||||
# AppTesting:
|
||||
# uses: pyTooling/Actions/.github/workflows/ApplicationTesting.yml@main
|
||||
@@ -193,24 +216,41 @@ jobs:
|
||||
PublishCoverageResults:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@main
|
||||
needs:
|
||||
- ConfigParams
|
||||
- UnitTestingParams
|
||||
- UnitTesting
|
||||
with:
|
||||
# coverage_sqlite_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}
|
||||
# coverage_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }}
|
||||
coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}
|
||||
coverage_html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||
# coverage_sqlite_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}
|
||||
# coverage_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }}
|
||||
# coverage_report_xml_directory: ${{ needs.ConfigParams.outputs.coverage_report_xml_directory }}
|
||||
# coverage_report_xml_filename: ${{ needs.ConfigParams.outputs.coverage_report_xml_filename }}
|
||||
coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}
|
||||
coverage_report_json_directory: ${{ needs.ConfigParams.outputs.coverage_report_json_directory }}
|
||||
coverage_report_json_filename: ${{ needs.ConfigParams.outputs.coverage_report_json_filename }}
|
||||
coverage_html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||
coverage_report_html_directory: ${{ needs.ConfigParams.outputs.coverage_report_html_directory }}
|
||||
codecov: ${{ inputs.codecov }}
|
||||
codacy: ${{ inputs.codacy }}
|
||||
secrets:
|
||||
codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
CODACY_TOKEN: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||
|
||||
PublishTestResults:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@main
|
||||
needs:
|
||||
- ConfigParams
|
||||
- UnitTestingParams
|
||||
- UnitTesting
|
||||
with:
|
||||
additional_merge_args: '-d "--pytest=rewrite-dunder-init;reduce-depth:pytest.tests.unit"'
|
||||
merged_junit_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||
additional_merge_args: '-d "--pytest=rewrite-dunder-init;reduce-depth:pytest.tests.unit"'
|
||||
testsuite-summary-name: ${{ inputs.package_name }}
|
||||
merged_junit_filename: ${{ needs.ConfigParams.outputs.unittest_merged_report_xml_filename }}
|
||||
merged_junit_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||
dorny: ${{ inputs.dorny }}
|
||||
codecov: ${{ inputs.codecov }}
|
||||
|
||||
secrets:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
# VerifyDocs:
|
||||
# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@main
|
||||
@@ -228,12 +268,12 @@ jobs:
|
||||
- PublishCoverageResults
|
||||
# - VerifyDocs
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
coverage_report_json_directory: ${{ needs.ConfigParams.outputs.coverage_report_json_directory }}
|
||||
unittest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}-ubuntu-native-3.12
|
||||
coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}
|
||||
html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_html }}
|
||||
latex_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }}
|
||||
unittest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||
coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}
|
||||
html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_html }}
|
||||
latex_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }}
|
||||
|
||||
IntermediateCleanUp:
|
||||
uses: pyTooling/Actions/.github/workflows/IntermediateCleanUp.yml@main
|
||||
@@ -242,9 +282,10 @@ jobs:
|
||||
- PublishCoverageResults
|
||||
- PublishTestResults
|
||||
- Documentation
|
||||
if: ${{ inputs.cleanup }}
|
||||
with:
|
||||
sqlite_coverage_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}-
|
||||
xml_unittest_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}-
|
||||
xml_unittest_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}-
|
||||
|
||||
# PDFDocumentation:
|
||||
# uses: pyTooling/Actions/.github/workflows/LaTeXDocumentation.yml@main
|
||||
@@ -265,9 +306,9 @@ jobs:
|
||||
- PublishCoverageResults
|
||||
- StaticTypeCheck
|
||||
with:
|
||||
doc: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_html }}
|
||||
doc: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_html }}
|
||||
coverage: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||
typing: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
|
||||
typing: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
|
||||
|
||||
ReleasePage:
|
||||
uses: pyTooling/Actions/.github/workflows/Release.yml@main
|
||||
@@ -303,6 +344,7 @@ jobs:
|
||||
- PublishToGitHubPages
|
||||
# - PublishOnPyPI
|
||||
- IntermediateCleanUp
|
||||
if: ${{ inputs.cleanup }}
|
||||
with:
|
||||
package: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
||||
remaining: |
|
||||
|
||||
46
.github/workflows/ExtractConfiguration.yml
vendored
46
.github/workflows/ExtractConfiguration.yml
vendored
@@ -32,7 +32,7 @@ on:
|
||||
python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
default: '3.12'
|
||||
default: '3.13'
|
||||
type: string
|
||||
package_namespace:
|
||||
description: 'Name of the tool''s namespace.'
|
||||
@@ -68,6 +68,15 @@ on:
|
||||
unittest_report_xml:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.unittest_report_xml }}
|
||||
unittest_merged_report_xml_directory:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.unittest_merged_report_xml_directory }}
|
||||
unittest_merged_report_xml_filename:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.unittest_merged_report_xml_filename }}
|
||||
unittest_merged_report_xml:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.unittest_merged_report_xml }}
|
||||
coverage_report_html_directory:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.coverage_report_html_directory }}
|
||||
@@ -95,19 +104,22 @@ jobs:
|
||||
name: 📓 Extract configurations from pyproject.toml
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
outputs:
|
||||
package_fullname: ${{ steps.getPackageName.outputs.package_fullname }}
|
||||
package_directory: ${{ steps.getPackageName.outputs.package_directory }}
|
||||
mypy_prepare_command: ${{ steps.getPackageName.outputs.mypy_prepare_command }}
|
||||
unittest_report_xml_directory: ${{ steps.getVariables.outputs.unittest_report_xml_directory }}
|
||||
unittest_report_xml_filename: ${{ steps.getVariables.outputs.unittest_report_xml_filename }}
|
||||
unittest_report_xml: ${{ steps.getVariables.outputs.unittest_report_xml }}
|
||||
coverage_report_html_directory: ${{ steps.getVariables.outputs.coverage_report_html_directory }}
|
||||
coverage_report_xml_directory: ${{ steps.getVariables.outputs.coverage_report_xml_directory }}
|
||||
coverage_report_xml_filename: ${{ steps.getVariables.outputs.coverage_report_xml_filename }}
|
||||
coverage_report_xml: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
||||
coverage_report_json_directory: ${{ steps.getVariables.outputs.coverage_report_json_directory }}
|
||||
coverage_report_json_filename: ${{ steps.getVariables.outputs.coverage_report_json_filename }}
|
||||
coverage_report_json: ${{ steps.getVariables.outputs.coverage_report_json }}
|
||||
package_fullname: ${{ steps.getPackageName.outputs.package_fullname }}
|
||||
package_directory: ${{ steps.getPackageName.outputs.package_directory }}
|
||||
mypy_prepare_command: ${{ steps.getPackageName.outputs.mypy_prepare_command }}
|
||||
unittest_report_xml_directory: ${{ steps.getVariables.outputs.unittest_report_xml_directory }}
|
||||
unittest_report_xml_filename: ${{ steps.getVariables.outputs.unittest_report_xml_filename }}
|
||||
unittest_report_xml: ${{ steps.getVariables.outputs.unittest_report_xml }}
|
||||
unittest_merged_report_xml_directory: ${{ steps.getVariables.outputs.unittest_merged_report_xml_directory }}
|
||||
unittest_merged_report_xml_filename: ${{ steps.getVariables.outputs.unittest_merged_report_xml_filename }}
|
||||
unittest_merged_report_xml: ${{ steps.getVariables.outputs.unittest_merged_report_xml }}
|
||||
coverage_report_html_directory: ${{ steps.getVariables.outputs.coverage_report_html_directory }}
|
||||
coverage_report_xml_directory: ${{ steps.getVariables.outputs.coverage_report_xml_directory }}
|
||||
coverage_report_xml_filename: ${{ steps.getVariables.outputs.coverage_report_xml_filename }}
|
||||
coverage_report_xml: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
||||
coverage_report_json_directory: ${{ steps.getVariables.outputs.coverage_report_json_directory }}
|
||||
coverage_report_json_filename: ${{ steps.getVariables.outputs.coverage_report_json_filename }}
|
||||
coverage_report_json: ${{ steps.getVariables.outputs.coverage_report_json }}
|
||||
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
@@ -178,6 +190,7 @@ jobs:
|
||||
pyProjectSettings = tomli_load(file)
|
||||
|
||||
unittestXMLFile = Path(pyProjectSettings["tool"]["pytest"]["junit_xml"])
|
||||
mergedUnittestXMLFile = Path(pyProjectSettings["tool"]["pyedaa-reports"]["junit_xml"])
|
||||
coverageHTMLDirectory = Path(pyProjectSettings["tool"]["coverage"]["html"]["directory"])
|
||||
coverageXMLFile = Path(pyProjectSettings["tool"]["coverage"]["xml"]["output"])
|
||||
coverageJSONFile= Path(pyProjectSettings["tool"]["coverage"]["json"]["output"])
|
||||
@@ -209,6 +222,9 @@ jobs:
|
||||
unittest_report_xml_directory={unittestXMLFile.parent.as_posix()}
|
||||
unittest_report_xml_filename={unittestXMLFile.name}
|
||||
unittest_report_xml={unittestXMLFile.as_posix()}
|
||||
unittest_merged_report_xml_directory={mergedUnittestXMLFile.parent.as_posix()}
|
||||
unittest_merged_report_xml_filename={mergedUnittestXMLFile.name}
|
||||
unittest_merged_report_xml={mergedUnittestXMLFile.as_posix()}
|
||||
coverage_report_html_directory={coverageHTMLDirectory.as_posix()}
|
||||
coverage_report_xml_directory={coverageXMLFile.parent.as_posix()}
|
||||
coverage_report_xml_filename={coverageXMLFile.name}
|
||||
@@ -218,4 +234,4 @@ jobs:
|
||||
coverage_report_json={coverageJSONFile.as_posix()}
|
||||
"""))
|
||||
|
||||
print(f"DEBUG:\n unittest xml: {unittestXMLFile}\n coverage html: {coverageHTMLDirectory}\n coverage xml: {coverageXMLFile}\n coverage json: {coverageJSONFile}")
|
||||
print(f"DEBUG:\n unittest xml: {unittestXMLFile}\n merged unittest xml: {mergedUnittestXMLFile}\n coverage html: {coverageHTMLDirectory}\n coverage xml: {coverageXMLFile}\n coverage json: {coverageJSONFile}")
|
||||
|
||||
67
.github/workflows/NightlyRelease.yml
vendored
67
.github/workflows/NightlyRelease.yml
vendored
@@ -84,11 +84,16 @@ on:
|
||||
type: string
|
||||
required: false
|
||||
default: '__pyTooling_upload_artifact__.tar'
|
||||
can-fail:
|
||||
type: boolean
|
||||
required: false
|
||||
default: false
|
||||
|
||||
jobs:
|
||||
Release:
|
||||
name: 📝 Update 'Nightly Page' on GitHub
|
||||
runs-on: ${{ inputs.ubuntu_image }}
|
||||
continue-on-error: ${{ inputs.can-fail }}
|
||||
permissions:
|
||||
contents: write
|
||||
actions: write
|
||||
@@ -124,7 +129,7 @@ jobs:
|
||||
printf "%s\n" "${ANSI_LIGHT_YELLOW}[NOT FOUND]${ANSI_NOCOLOR}"
|
||||
else
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't delete release '${{ inputs.nightly_name }}' -> Error: '${message}'.${ANSI_NOCOLOR}"
|
||||
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't delete release '${{ inputs.nightly_name }}' -> Error: '${message}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=InternalError::Couldn't delete release '${{ inputs.nightly_name }}' -> Error: '${message}'."
|
||||
exit 1
|
||||
fi
|
||||
@@ -175,7 +180,7 @@ jobs:
|
||||
cat <<EOF >> __NoTeS__.md
|
||||
|
||||
--------
|
||||
Published from [${{ github.workflow }}](https://github.com/Paebbels/ghdl/actions/runs/${{ github.run_id }}) workflow triggered by @${{ github.actor }} on $(date '+%Y-%m-%d %H:%M:%S').
|
||||
Published from [${{ github.workflow }}](https://github.com/Paebbels/ghdl/actions/runs/${{ github.run_id }}) workflow triggered by @${{ github.actor }} on $(date '+%Y-%m-%d %H:%M:%S %Z').
|
||||
EOF
|
||||
|
||||
printf "%s\n" "Creating release '${{ inputs.nightly_name }}' ... "
|
||||
@@ -184,7 +189,7 @@ jobs:
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
else
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't create release '${{ inputs.nightly_name }}' -> Error: '${message}'.${ANSI_NOCOLOR}"
|
||||
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't create release '${{ inputs.nightly_name }}' -> Error: '${message}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=InternalError::Couldn't create release '${{ inputs.nightly_name }}' -> Error: '${message}'."
|
||||
exit 1
|
||||
fi
|
||||
@@ -197,7 +202,7 @@ jobs:
|
||||
ANSI_LIGHT_RED=$'\x1b[91m'
|
||||
ANSI_LIGHT_GREEN=$'\x1b[92m'
|
||||
ANSI_LIGHT_YELLOW=$'\x1b[93m'
|
||||
ANSI_LIGHT_BLUE="\e[94m"
|
||||
ANSI_LIGHT_BLUE=$'\x1b[94m'
|
||||
ANSI_NOCOLOR=$'\x1b[0m'
|
||||
|
||||
export GH_TOKEN=${{ github.token }}
|
||||
@@ -254,7 +259,7 @@ jobs:
|
||||
# A dictionary to check for duplicate asset files in release
|
||||
declare -A assetFilenames
|
||||
while IFS=$'\r\n' read -r assetLine; do
|
||||
if [[ "${assetLine}" == "" ]]; then
|
||||
if [[ "${assetLine}" == "" || "${assetLine:0:1}" == "#" ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
@@ -285,7 +290,7 @@ jobs:
|
||||
if [[ -n "${assetFilenames[$asset]}" ]]; then
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=DuplicateAsset::Asset '${asset}' from artifact '${artifact}' was already uploaded to release '${{ inputs.nightly_name }}'."
|
||||
ERRORS=1
|
||||
ERRORS=$((ERRORS + 1))
|
||||
continue
|
||||
else
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
@@ -303,9 +308,9 @@ jobs:
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
else
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't download artifact '${artifact}'.${ANSI_NOCOLOR}"
|
||||
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't download artifact '${artifact}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=ArtifactNotFound::Couldn't download artifact '${artifact}'."
|
||||
ERRORS=1
|
||||
ERRORS=$((ERRORS + 1))
|
||||
continue
|
||||
fi
|
||||
downloadedArtifacts[$artifact]=1
|
||||
@@ -343,19 +348,21 @@ jobs:
|
||||
if [[ "${asset}" == !*.zip ]]; then
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[ZIP]${ANSI_NOCOLOR}"
|
||||
asset="${asset##*!}"
|
||||
printf " %s\n" "Compressing artifact '${artifact}' to '${asset}' ..."
|
||||
printf "::group:: %s\n" "Compressing artifact '${artifact}' to '${asset}' ..."
|
||||
(
|
||||
cd "${artifact}" && \
|
||||
zip -r "../${asset}" *
|
||||
)
|
||||
if [[ $? -eq 0 ]]; then
|
||||
retCode=$?
|
||||
printf "::endgroup::\n"
|
||||
if [[ $retCode -eq 0 ]]; then
|
||||
printf " %s\n" "Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
uploadFile="${asset}"
|
||||
else
|
||||
printf " %s\n" "Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zip file '${asset}'.${ANSI_NOCOLOR}"
|
||||
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zip file '${asset}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=CompressionError::Couldn't compress '${artifact}' to zip file '${asset}'."
|
||||
ERRORS=1
|
||||
ERRORS=$((ERRORS + 1))
|
||||
continue
|
||||
fi
|
||||
elif [[ "${asset}" == !*.tgz || "${asset}" == !*.tar.gz || "${asset}" == \$*.tgz || "${asset}" == \$*.tar.gz ]]; then
|
||||
@@ -382,9 +389,9 @@ jobs:
|
||||
uploadFile="${asset}"
|
||||
else
|
||||
printf " %s\n" "Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to tgz file '${asset}'.${ANSI_NOCOLOR}"
|
||||
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to tgz file '${asset}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=CompressionError::Couldn't compress '${artifact}' to tgz file '${asset}'."
|
||||
ERRORS=1
|
||||
ERRORS=$((ERRORS + 1))
|
||||
continue
|
||||
fi
|
||||
elif [[ "${asset}" == !*.tzst || "${asset}" == !*.tar.zst || "${asset}" == \$*.tzst || "${asset}" == \$*.tar.zst ]]; then
|
||||
@@ -411,9 +418,9 @@ jobs:
|
||||
uploadFile="${asset}"
|
||||
else
|
||||
printf " %s\n" "Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zst file '${asset}'.${ANSI_NOCOLOR}"
|
||||
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zst file '${asset}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=CompressionError::Couldn't compress '${artifact}' to zst file '${asset}'."
|
||||
ERRORS=1
|
||||
ERRORS=$((ERRORS + 1))
|
||||
continue
|
||||
fi
|
||||
elif [[ -e "${artifact}/${asset}" ]]; then
|
||||
@@ -421,26 +428,26 @@ jobs:
|
||||
uploadFile="${artifact}/${asset}"
|
||||
else
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't find asset '${asset}' in artifact '${artifact}'.${ANSI_NOCOLOR}"
|
||||
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't find asset '${asset}' in artifact '${artifact}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=FileNotFound::Couldn't find asset '${asset}' in artifact '${artifact}'."
|
||||
ERRORS=1
|
||||
ERRORS=$((ERRORS + 1))
|
||||
continue
|
||||
fi
|
||||
|
||||
# Add asset to JSON inventory
|
||||
if [[ "${{ inputs.inventory-json }}" != "" ]]; then
|
||||
if [[ "${categories}" != "${title}" ]]; then
|
||||
printf " %s\n" "adding file '${uploadFile}' with '${categories//;/ → }' to JSON inventory ..."
|
||||
printf " %s\n" "adding file '${uploadFile#*/}' with '${categories//;/ → }' to JSON inventory ..."
|
||||
category=""
|
||||
jsonEntry=$(jq -c -n \
|
||||
--arg title "${title}" \
|
||||
--arg file "${uploadFile}" \
|
||||
--arg file "${uploadFile#*/}" \
|
||||
'{"file": $file, "title": $title}' \
|
||||
)
|
||||
|
||||
while [[ "${categories}" != "${category}" ]]; do
|
||||
category="${categories##*;}"
|
||||
categories="${categories%;*}"
|
||||
category="${categories##*,}"
|
||||
categories="${categories%,*}"
|
||||
jsonEntry=$(jq -c -n --arg cat "${category}" --argjson value "${jsonEntry}" '{$cat: $value}')
|
||||
done
|
||||
|
||||
@@ -450,7 +457,7 @@ jobs:
|
||||
'$inventory * {"files": $file}' \
|
||||
)
|
||||
else
|
||||
printf " %s\n" "adding file '${uploadFile}' to JSON inventory ... ${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}"
|
||||
printf " %s\n" "adding file '${uploadFile#*/}' to JSON inventory ... ${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}"
|
||||
fi
|
||||
fi
|
||||
|
||||
@@ -461,9 +468,9 @@ jobs:
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
else
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
|
||||
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=UploadError::Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'."
|
||||
ERRORS=1
|
||||
ERRORS=$((ERRORS + 1))
|
||||
continue
|
||||
fi
|
||||
done <<<'${{ inputs.assets }}'
|
||||
@@ -484,9 +491,9 @@ jobs:
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
else
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't upload asset '${{ inputs.inventory-json }}' to release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
|
||||
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't upload asset '${{ inputs.inventory-json }}' to release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=UploadError::Couldn't upload asset '${{ inputs.inventory-json }}' to release '${{ inputs.nightly_name }}'."
|
||||
ERRORS=1
|
||||
ERRORS=$((ERRORS + 1))
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
@@ -495,8 +502,8 @@ jobs:
|
||||
tree -pash -L 3 .
|
||||
printf "::endgroup::\n"
|
||||
|
||||
if [[ $ERROR -ne 0 ]]; then
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}Errors detected in previous steps.${ANSI_NOCOLOR}"
|
||||
if [[ $ERRORS -ne 0 ]]; then
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}${ERRORS} errors detected in previous steps.${ANSI_NOCOLOR}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -518,6 +525,6 @@ jobs:
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
else
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't remove draft-state from release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
|
||||
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't remove draft-state from release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=ReleasePage::Couldn't remove draft-state from release '${{ inputs.nightly_name }}'."
|
||||
fi
|
||||
|
||||
2
.github/workflows/Package.yml
vendored
2
.github/workflows/Package.yml
vendored
@@ -33,7 +33,7 @@ on:
|
||||
python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
default: '3.12'
|
||||
default: '3.13'
|
||||
type: string
|
||||
requirements:
|
||||
description: 'Python dependencies to be installed through pip; if empty, use pyproject.toml through build.'
|
||||
|
||||
7
.github/workflows/Parameters.yml
vendored
7
.github/workflows/Parameters.yml
vendored
@@ -197,9 +197,10 @@ jobs:
|
||||
"3.13": { "icon": "🟢", "until": "2029.10" },
|
||||
"3.14": { "icon": "🟣", "until": "2030.10" },
|
||||
"pypy-3.7": { "icon": "⟲⚫", "until": "????.??" },
|
||||
"pypy-3.8": { "icon": "⟲🔴", "until": "????.??" },
|
||||
"pypy-3.9": { "icon": "⟲🟠", "until": "????.??" },
|
||||
"pypy-3.10": { "icon": "⟲🟡", "until": "????.??" },
|
||||
"pypy-3.8": { "icon": "⟲⚫", "until": "????.??" },
|
||||
"pypy-3.9": { "icon": "⟲🔴", "until": "????.??" },
|
||||
"pypy-3.10": { "icon": "⟲🟠", "until": "????.??" },
|
||||
"pypy-3.11": { "icon": "⟲🟡", "until": "????.??" },
|
||||
},
|
||||
# Runner systems (runner images) supported by GitHub Actions
|
||||
"sys": {
|
||||
|
||||
139
.github/workflows/PublishCoverageResults.yml
vendored
139
.github/workflows/PublishCoverageResults.yml
vendored
@@ -48,19 +48,57 @@ on:
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
coverage_report_xml_directory:
|
||||
description: 'Directory containing the XML coverage report file.'
|
||||
required: false
|
||||
default: 'report/coverage'
|
||||
type: string
|
||||
coverage_report_xml_filename:
|
||||
description: 'Filename of the XML coverage report file.'
|
||||
required: false
|
||||
default: 'coverage.xml'
|
||||
type: string
|
||||
coverage_json_artifact:
|
||||
description: 'Name of the JSON coverage artifact.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
coverage_report_json_directory:
|
||||
description: 'Directory containing the JSON coverage report file.'
|
||||
required: false
|
||||
default: 'report/coverage'
|
||||
type: string
|
||||
coverage_report_json_filename:
|
||||
description: 'Filename of the JSON coverage report file.'
|
||||
required: false
|
||||
default: 'coverage.json'
|
||||
type: string
|
||||
coverage_html_artifact:
|
||||
description: 'Name of the HTML coverage artifact.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
coverage_report_html_directory:
|
||||
description: 'HTML root directory of the generated coverage report.'
|
||||
required: false
|
||||
default: 'report/coverage/html'
|
||||
type: string
|
||||
codecov:
|
||||
description: 'Publish merged coverage report to Codecov.'
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
codacy:
|
||||
description: 'Publish merged coverage report to Codacy.'
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
secrets:
|
||||
codacy_token:
|
||||
description: 'Token to push result to codacy.'
|
||||
CODECOV_TOKEN:
|
||||
description: 'Token to push result to Codecov.'
|
||||
required: true
|
||||
CODACY_TOKEN:
|
||||
description: 'Token to push result to Codacy.'
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
@@ -76,7 +114,7 @@ jobs:
|
||||
lfs: true
|
||||
submodules: true
|
||||
|
||||
- name: Download Artifacts
|
||||
- name: 📥 Download Artifacts
|
||||
uses: pyTooling/download-artifact@v4
|
||||
with:
|
||||
pattern: ${{ inputs.coverage_artifacts_pattern }}
|
||||
@@ -90,66 +128,6 @@ jobs:
|
||||
run: |
|
||||
python -m pip install -U --disable-pip-version-check --break-system-packages coverage[toml] tomli
|
||||
|
||||
- name: 🔁 Extract configurations from pyproject.toml
|
||||
id: getVariables
|
||||
shell: python
|
||||
run: |
|
||||
from os import getenv
|
||||
from pathlib import Path
|
||||
from sys import version
|
||||
from textwrap import dedent
|
||||
|
||||
print(f"Python: {version}")
|
||||
|
||||
from tomli import load as tomli_load
|
||||
|
||||
htmlDirectory = Path("htmlcov")
|
||||
xmlFile = Path("./coverage.xml")
|
||||
jsonFile = Path("./coverage.json")
|
||||
coverageRC = "${{ inputs.coverage_config }}".strip()
|
||||
|
||||
# Read output paths from 'pyproject.toml' file
|
||||
if coverageRC == "pyproject.toml":
|
||||
pyProjectFile = Path("pyproject.toml")
|
||||
if pyProjectFile.exists():
|
||||
with pyProjectFile.open("rb") as file:
|
||||
pyProjectSettings = tomli_load(file)
|
||||
|
||||
htmlDirectory = Path(pyProjectSettings["tool"]["coverage"]["html"]["directory"])
|
||||
xmlFile = Path(pyProjectSettings["tool"]["coverage"]["xml"]["output"])
|
||||
jsonFile = Path(pyProjectSettings["tool"]["coverage"]["json"]["output"])
|
||||
else:
|
||||
print(f"File '{pyProjectFile}' not found.")
|
||||
print(f"::error title=FileNotFoundError::File '{pyProjectFile}' not found.")
|
||||
exit(1)
|
||||
|
||||
# Read output paths from '.coveragerc' file
|
||||
elif len(coverageRC) > 0:
|
||||
coverageRCFile = Path(coverageRC)
|
||||
if coverageRCFile.exists():
|
||||
with coverageRCFile.open("rb") as file:
|
||||
coverageRCSettings = tomli_load(file)
|
||||
|
||||
htmlDirectory = Path(coverageRCSettings["html"]["directory"])
|
||||
xmlFile = Path(coverageRCSettings["xml"]["output"])
|
||||
jsonFile = Path(coverageRCSettings["json"]["output"])
|
||||
else:
|
||||
print(f"File '{coverageRCFile}' not found.")
|
||||
print(f"::error title=FileNotFoundError::File '{coverageRCFile}' not found.")
|
||||
exit(1)
|
||||
|
||||
# Write jobs to special file
|
||||
github_output = Path(getenv("GITHUB_OUTPUT"))
|
||||
print(f"GITHUB_OUTPUT: {github_output}")
|
||||
with github_output.open("a+", encoding="utf-8") as f:
|
||||
f.write(dedent(f"""\
|
||||
coverage_report_html_directory={htmlDirectory.as_posix()}
|
||||
coverage_report_xml={xmlFile}
|
||||
coverage_report_json={jsonFile}
|
||||
"""))
|
||||
|
||||
print(f"DEBUG:\n html={htmlDirectory}\n xml={xmlFile}\n json={jsonFile}")
|
||||
|
||||
- name: Rename .coverage files and move them all into 'coverage/'
|
||||
run: |
|
||||
mkdir -p coverage
|
||||
@@ -163,7 +141,7 @@ jobs:
|
||||
run: coverage report --rcfile=pyproject.toml --data-file=.coverage
|
||||
|
||||
- name: Convert to XML format (Cobertura)
|
||||
if: inputs.coverage_xml_artifact != ''
|
||||
if: inputs.coverage_xml_artifact != '' || inputs.codecov || inputs.codacy
|
||||
run: coverage xml --data-file=.coverage
|
||||
|
||||
- name: Convert to JSON format
|
||||
@@ -178,9 +156,9 @@ jobs:
|
||||
tree -pash report/coverage/html
|
||||
|
||||
- name: 📤 Upload 'Coverage SQLite Database' artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
if: inputs.coverage_sqlite_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.coverage_sqlite_artifact }}
|
||||
path: .coverage
|
||||
@@ -188,49 +166,54 @@ jobs:
|
||||
retention-days: 1
|
||||
|
||||
- name: 📤 Upload 'Coverage XML Report' artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
if: inputs.coverage_xml_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.coverage_xml_artifact }}
|
||||
path: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
||||
working-directory: ${{ inputs.coverage_report_xml_directory }}
|
||||
path: ${{ inputs.coverage_report_xml_filename }}
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- name: 📤 Upload 'Coverage JSON Report' artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
if: inputs.coverage_json_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.coverage_json_artifact }}
|
||||
path: ${{ steps.getVariables.outputs.coverage_report_json }}
|
||||
working-directory: ${{ inputs.coverage_report_json_directory }}
|
||||
path: ${{ inputs.coverage_report_json_filename }}
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- name: 📤 Upload 'Coverage HTML Report' artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
if: inputs.coverage_html_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.coverage_html_artifact }}
|
||||
working-directory: ${{ steps.getVariables.outputs.coverage_report_html_directory }}
|
||||
working-directory: ${{ inputs.coverage_report_html_directory }}
|
||||
path: '*'
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- name: 📊 Publish code coverage at CodeCov
|
||||
if: inputs.CodeCov == true
|
||||
continue-on-error: true
|
||||
uses: codecov/codecov-action@v5
|
||||
if: inputs.codecov
|
||||
continue-on-error: true
|
||||
with:
|
||||
files: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
disable_search: true
|
||||
files: ${{ inputs.coverage_report_xml_directory }}/${{ inputs.coverage_report_xml_filename }}
|
||||
flags: unittests
|
||||
env_vars: PYTHON
|
||||
fail_ci_if_error: true
|
||||
|
||||
- name: 📉 Publish code coverage at Codacy
|
||||
if: inputs.Codacy == true
|
||||
continue-on-error: true
|
||||
uses: codacy/codacy-coverage-reporter-action@v1
|
||||
if: inputs.codacy
|
||||
continue-on-error: true
|
||||
with:
|
||||
project-token: ${{ secrets.codacy_token }}
|
||||
coverage-reports: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
||||
project-token: ${{ secrets.CODACY_TOKEN }}
|
||||
coverage-reports: ${{ inputs.coverage_report_xml_directory }}/${{ inputs.coverage_report_xml_filename }}
|
||||
|
||||
2
.github/workflows/PublishOnPyPI.yml
vendored
2
.github/workflows/PublishOnPyPI.yml
vendored
@@ -33,7 +33,7 @@ on:
|
||||
python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
default: '3.12'
|
||||
default: '3.13'
|
||||
type: string
|
||||
requirements:
|
||||
description: 'Python dependencies to be installed through pip.'
|
||||
|
||||
56
.github/workflows/PublishTestResults.yml
vendored
56
.github/workflows/PublishTestResults.yml
vendored
@@ -34,6 +34,11 @@ on:
|
||||
required: false
|
||||
default: '*-UnitTestReportSummary-XML-*'
|
||||
type: string
|
||||
merged_junit_filename:
|
||||
description: 'Filename of the merged JUnit Test Summary.'
|
||||
required: false
|
||||
default: 'Unittesting.xml'
|
||||
type: string
|
||||
merged_junit_artifact:
|
||||
description: 'Name of the merged JUnit Test Summary artifact.'
|
||||
required: false
|
||||
@@ -44,6 +49,11 @@ on:
|
||||
required: false
|
||||
default: '"--pytest=rewrite-dunder-init;reduce-depth:pytest.tests.unit"'
|
||||
type: string
|
||||
testsuite-summary-name:
|
||||
description: 'Set TestsuiteSummary name.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
publish:
|
||||
description: 'Publish test report summary via Dorny Test-Reporter'
|
||||
required: false
|
||||
@@ -54,6 +64,20 @@ on:
|
||||
required: false
|
||||
default: 'Unit Test Results'
|
||||
type: string
|
||||
dorny:
|
||||
description: 'Publish merged unittest results via Dorny Test-Reporter.'
|
||||
required: false
|
||||
default: true
|
||||
type: boolean
|
||||
codecov:
|
||||
description: 'Publish merged unittest results to Codecov.'
|
||||
required: false
|
||||
default: true
|
||||
type: boolean
|
||||
secrets:
|
||||
CODECOV_TOKEN:
|
||||
description: 'Token to push result to Codecov.'
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
PublishTestResults:
|
||||
@@ -65,11 +89,11 @@ jobs:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download Artifacts
|
||||
- name: 📥 Download Artifacts
|
||||
uses: pyTooling/download-artifact@v4
|
||||
with:
|
||||
pattern: ${{ inputs.unittest_artifacts_pattern }}
|
||||
path: artifacts
|
||||
path: artifacts
|
||||
|
||||
- name: 🔎 Inspect extracted artifact (tarball)
|
||||
run: |
|
||||
@@ -82,28 +106,38 @@ jobs:
|
||||
- name: Rename JUnit files and move them all into 'junit/'
|
||||
run: |
|
||||
mkdir -p junit
|
||||
find artifacts/ -type f -path "*TestReportSummary*.xml" -exec sh -c 'cp -v $0 "junit/$(basename $(dirname $0)).$(basename $0)"' {} ';'
|
||||
find artifacts/ -type f -path "*.xml" -exec sh -c 'cp -v $0 "junit/$(basename $(dirname $0)).$(basename $0)"' {} ';'
|
||||
tree -pash junit
|
||||
|
||||
- name: 🔁 Merge JUnit Unit Test Summaries
|
||||
run: |
|
||||
pyedaa-reports -v unittest "--merge=pyTest-JUnit:junit/*.xml" ${{ inputs.additional_merge_args }} "--output=pyTest-JUnit:Unittesting.xml"
|
||||
printf "%s\n" "cat Unittesting.xml"
|
||||
cat Unittesting.xml
|
||||
pyedaa-reports -v unittest "--name=${{ inputs.testsuite-summary-name }}" "--merge=pyTest-JUnit:junit/*.xml" ${{ inputs.additional_merge_args }} "--output=pyTest-JUnit:${{ inputs.merged_junit_filename }}"
|
||||
printf "%s\n" "cat ${{ inputs.merged_junit_filename }}"
|
||||
cat ${{ inputs.merged_junit_filename }}
|
||||
|
||||
- name: 📊 Publish Unit Test Results
|
||||
uses: dorny/test-reporter@v1
|
||||
if: inputs.publish && inputs.report_title != ''
|
||||
uses: dorny/test-reporter@v2
|
||||
if: (inputs.dorny || inputs.publish) && inputs.report_title != ''
|
||||
with:
|
||||
name: ${{ inputs.report_title }}
|
||||
path: Unittesting.xml
|
||||
path: ${{ inputs.merged_junit_filename }}
|
||||
reporter: java-junit
|
||||
|
||||
- name: 📊 Publish unittest results at CodeCov
|
||||
uses: codecov/test-results-action@v1
|
||||
if: inputs.codecov
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
disable_search: true
|
||||
files: ${{ inputs.merged_junit_filename }}
|
||||
fail_ci_if_error: true
|
||||
|
||||
- name: 📤 Upload merged 'JUnit Test Summary' artifact
|
||||
if: inputs.merged_junit_artifact != ''
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
if: inputs.merged_junit_artifact != ''
|
||||
with:
|
||||
name: ${{ inputs.merged_junit_artifact }}
|
||||
path: Unittesting.xml
|
||||
path: ${{ inputs.merged_junit_filename }}
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
investigate: true
|
||||
|
||||
4
.github/workflows/PublishToGitHubPages.yml
vendored
4
.github/workflows/PublishToGitHubPages.yml
vendored
@@ -62,15 +62,15 @@ jobs:
|
||||
path: public
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.coverage }}' from 'Coverage' job
|
||||
if: ${{ inputs.coverage != '' }}
|
||||
uses: pyTooling/download-artifact@v4
|
||||
if: ${{ inputs.coverage != '' }}
|
||||
with:
|
||||
name: ${{ inputs.coverage }}
|
||||
path: public/coverage
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.typing }}' from 'StaticTypeCheck' job
|
||||
if: ${{ inputs.typing != '' }}
|
||||
uses: pyTooling/download-artifact@v4
|
||||
if: ${{ inputs.typing != '' }}
|
||||
with:
|
||||
name: ${{ inputs.typing }}
|
||||
path: public/typing
|
||||
|
||||
8
.github/workflows/Release.yml
vendored
8
.github/workflows/Release.yml
vendored
@@ -44,13 +44,13 @@ jobs:
|
||||
RELEASE_VERSION=${GIT_TAG#v}
|
||||
RELEASE_DATETIME="$(date --utc '+%d.%m.%Y - %H:%M:%S')"
|
||||
# write to step outputs
|
||||
echo "gitTag=${GIT_TAG}" >> $GITHUB_OUTPUT
|
||||
echo "version=${RELEASE_VERSION}" >> $GITHUB_OUTPUT
|
||||
echo "datetime=${RELEASE_DATETIME}" >> $GITHUB_OUTPUT
|
||||
printf "%s\n" "gitTag=${GIT_TAG}" >> $GITHUB_OUTPUT
|
||||
printf "%s\n" "version=${RELEASE_VERSION}" >> $GITHUB_OUTPUT
|
||||
printf "%s\n" "datetime=${RELEASE_DATETIME}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: 📑 Create Release Page
|
||||
id: createReleasePage
|
||||
uses: actions/create-release@v1
|
||||
id: createReleasePage
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
with:
|
||||
|
||||
32
.github/workflows/SphinxDocumentation.yml
vendored
32
.github/workflows/SphinxDocumentation.yml
vendored
@@ -32,7 +32,7 @@ on:
|
||||
python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
default: '3.12'
|
||||
default: '3.13'
|
||||
type: string
|
||||
requirements:
|
||||
description: 'Python dependencies to be installed through pip.'
|
||||
@@ -46,7 +46,7 @@ on:
|
||||
type: string
|
||||
coverage_report_json_directory:
|
||||
description: ''
|
||||
required: true
|
||||
required: false
|
||||
type: string
|
||||
coverage_json_artifact:
|
||||
description: 'Name of the coverage JSON artifact.'
|
||||
@@ -100,18 +100,20 @@ jobs:
|
||||
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.unittest_xml_artifact }}' from 'Unittesting' job
|
||||
if: inputs.unittest_xml_artifact != ''
|
||||
uses: pyTooling/download-artifact@v4
|
||||
if: inputs.unittest_xml_artifact != ''
|
||||
with:
|
||||
name: ${{ inputs.unittest_xml_artifact }}
|
||||
path: ${{ inputs.unittest_xml_directory }}
|
||||
investigate: true
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.coverage_json_artifact }}' from 'PublishCoverageResults' job
|
||||
if: inputs.coverage_json_artifact != ''
|
||||
uses: pyTooling/download-artifact@v4
|
||||
if: inputs.coverage_json_artifact != ''
|
||||
with:
|
||||
name: ${{ inputs.coverage_json_artifact }}
|
||||
path: ${{ inputs.coverage_report_json_directory }}
|
||||
investigate: true
|
||||
|
||||
- name: ☑ Generate HTML documentation
|
||||
if: inputs.html_artifact != ''
|
||||
@@ -122,9 +124,9 @@ jobs:
|
||||
sphinx-build -v -n -b html -d _build/doctrees -j $(nproc) -w _build/html.log . _build/html
|
||||
|
||||
- name: 📤 Upload 'HTML Documentation' artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
if: inputs.html_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.html_artifact }}
|
||||
working-directory: ${{ inputs.doc_directory }}/_build/html
|
||||
@@ -157,18 +159,20 @@ jobs:
|
||||
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.unittest_xml_artifact }}' from 'Unittesting' job
|
||||
if: inputs.unittest_xml_artifact != ''
|
||||
uses: pyTooling/download-artifact@v4
|
||||
if: inputs.unittest_xml_artifact != ''
|
||||
with:
|
||||
name: ${{ inputs.unittest_xml_artifact }}
|
||||
path: ${{ inputs.unittest_xml_directory }}
|
||||
investigate: true
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.coverage_json_artifact }}' from 'PublishCoverageResults' job
|
||||
if: inputs.coverage_json_artifact != ''
|
||||
uses: pyTooling/download-artifact@v4
|
||||
if: inputs.coverage_json_artifact != ''
|
||||
with:
|
||||
name: ${{ inputs.coverage_json_artifact }}
|
||||
path: ${{ inputs.coverage_report_json_directory }}
|
||||
investigate: true
|
||||
|
||||
- name: ☑ Generate LaTeX documentation
|
||||
if: inputs.latex_artifact != ''
|
||||
@@ -183,8 +187,8 @@ jobs:
|
||||
- name: Workaround I - https://github.com/sphinx-doc/sphinx/issues/13190
|
||||
if: inputs.latex_artifact != ''
|
||||
run: |
|
||||
printf "Changing directory to 'doc/_build/latex' ...\n"
|
||||
cd doc/_build/latex
|
||||
printf "Changing directory to '${{ inputs.doc_directory || '.' }}/_build/latex' ...\n"
|
||||
cd ${{ inputs.doc_directory || '.' }}/_build/latex
|
||||
|
||||
MIMETYPE_EXTENSIONS=(
|
||||
"image/png:png"
|
||||
@@ -227,13 +231,13 @@ jobs:
|
||||
if [[ $found -eq 0 ]]; then
|
||||
printf "[SKIPPED]\n"
|
||||
fi
|
||||
done <<<$(find . -type f -not -iname "*.cls" -not -iname "*.sty" -not -iname "*.xdy" -not -iname "*.svg" -not -iname "*.png" -not -iname "*.jpg" | sed 's:./::')
|
||||
done < <(find . -type f -not -iname "*.cls" -not -iname "*.sty" -not -iname "*.xdy" -not -iname "*.svg" -not -iname "*.png" -not -iname "*.jpg" | sed 's:./::')
|
||||
|
||||
- name: Workaround II - https://github.com/sphinx-doc/sphinx/issues/13189
|
||||
if: inputs.latex_artifact != ''
|
||||
run: |
|
||||
printf "Changing directory to 'doc/_build/latex' ...\n"
|
||||
cd doc/_build/latex
|
||||
printf "Changing directory to '${{ inputs.doc_directory || '.' }}/_build/latex' ...\n"
|
||||
cd ${{ inputs.doc_directory || '.' }}/_build/latex
|
||||
|
||||
printf "Searching for downloaded images, that need normalization ...\n"
|
||||
for imageExt in png svg jpg jpeg; do
|
||||
@@ -259,13 +263,13 @@ jobs:
|
||||
printf "[FAILED]\n"
|
||||
fi
|
||||
fi
|
||||
done <<<$(find . -type f -iname "*.$imageExt" | sed 's:./::')
|
||||
done < <(find . -type f -iname "*.$imageExt" | sed 's:./::')
|
||||
done
|
||||
|
||||
- name: 📤 Upload 'LaTeX Documentation' artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
if: inputs.latex_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.latex_artifact }}
|
||||
working-directory: ${{ inputs.doc_directory }}/_build/latex
|
||||
|
||||
6
.github/workflows/StaticTypeCheck.yml
vendored
6
.github/workflows/StaticTypeCheck.yml
vendored
@@ -33,7 +33,7 @@ on:
|
||||
python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
default: '3.12'
|
||||
default: '3.13'
|
||||
type: string
|
||||
requirements:
|
||||
description: 'Python dependencies to be installed through pip.'
|
||||
@@ -87,9 +87,9 @@ jobs:
|
||||
run: ${{ inputs.commands }}
|
||||
|
||||
- name: 📤 Upload 'Static Typing Report' HTML artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
if: ${{ inputs.html_artifact != '' }}
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.html_artifact }}
|
||||
working-directory: ${{ inputs.html_report }}
|
||||
@@ -98,9 +98,9 @@ jobs:
|
||||
retention-days: 1
|
||||
|
||||
- name: 📤 Upload 'Static Typing Report' JUnit artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
if: ${{ inputs.junit_artifact != '' }}
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.junit_artifact }}
|
||||
path: ${{ inputs.junit_report }}
|
||||
|
||||
7
.github/workflows/UnitTesting.yml
vendored
7
.github/workflows/UnitTesting.yml
vendored
@@ -282,8 +282,8 @@ jobs:
|
||||
# Python setup
|
||||
|
||||
- name: '🟦 Setup MSYS2 for ${{ matrix.runtime }}'
|
||||
if: matrix.system == 'msys2'
|
||||
uses: msys2/setup-msys2@v2
|
||||
if: matrix.system == 'msys2'
|
||||
with:
|
||||
msystem: ${{ matrix.runtime }}
|
||||
update: true
|
||||
@@ -292,8 +292,8 @@ jobs:
|
||||
${{ inputs.pacboy }}
|
||||
|
||||
- name: 🐍 Setup Python ${{ matrix.python }}
|
||||
if: matrix.system != 'msys2'
|
||||
uses: actions/setup-python@v5
|
||||
if: matrix.system != 'msys2'
|
||||
with:
|
||||
python-version: ${{ matrix.python }}
|
||||
|
||||
@@ -358,6 +358,7 @@ jobs:
|
||||
|
||||
- name: ✅ Run unit tests (Windows)
|
||||
if: matrix.system == 'windows'
|
||||
continue-on-error: true
|
||||
run: |
|
||||
$env:ENVIRONMENT_NAME = "${{ matrix.envname }}"
|
||||
$env:PYTHONPATH = (Get-Location).ToString()
|
||||
@@ -392,9 +393,9 @@ jobs:
|
||||
# Upload artifacts
|
||||
|
||||
- name: 📤 Upload '${{ inputs.unittest_report_xml_filename }}' artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
if: inputs.unittest_xml_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.unittest_xml_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
||||
working-directory: ${{ inputs.unittest_report_xml_directory }}
|
||||
|
||||
2
.github/workflows/VerifyDocs.yml
vendored
2
.github/workflows/VerifyDocs.yml
vendored
@@ -33,7 +33,7 @@ on:
|
||||
python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
default: '3.12'
|
||||
default: '3.13'
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
|
||||
45
.github/workflows/_Checking_JobTemplates.yml
vendored
45
.github/workflows/_Checking_JobTemplates.yml
vendored
@@ -30,7 +30,7 @@ jobs:
|
||||
- ConfigParams
|
||||
- UnitTestingParams
|
||||
with:
|
||||
jobs: ${{ needs.UnitTestingParams.outputs.python_jobs }}
|
||||
jobs: ${{ needs.UnitTestingParams.outputs.python_jobs }}
|
||||
unittest_report_xml_directory: ${{ needs.ConfigParams.outputs.unittest_report_xml_directory }}
|
||||
unittest_report_xml_filename: ${{ needs.ConfigParams.outputs.unittest_report_xml_filename }}
|
||||
coverage_report_html_directory: ${{ needs.ConfigParams.outputs.coverage_report_html_directory }}
|
||||
@@ -47,9 +47,9 @@ jobs:
|
||||
- ConfigParams
|
||||
- PlatformTestingParams
|
||||
with:
|
||||
jobs: ${{ needs.PlatformTestingParams.outputs.python_jobs }}
|
||||
jobs: ${{ needs.PlatformTestingParams.outputs.python_jobs }}
|
||||
# tests_directory: ""
|
||||
unittest_directory: platform
|
||||
unittest_directory: platform
|
||||
unittest_report_xml_directory: ${{ needs.ConfigParams.outputs.unittest_report_xml_directory }}
|
||||
unittest_report_xml_filename: ${{ needs.ConfigParams.outputs.unittest_report_xml_filename }}
|
||||
coverage_report_html_directory: ${{ needs.ConfigParams.outputs.coverage_report_html_directory }}
|
||||
@@ -70,17 +70,17 @@ jobs:
|
||||
commands: |
|
||||
${{ needs.ConfigParams.outputs.mypy_prepare_command }}
|
||||
mypy --html-report htmlmypy -p ${{ needs.ConfigParams.outputs.package_fullname }}
|
||||
html_report: 'htmlmypy'
|
||||
html_report: 'htmlmypy'
|
||||
html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
|
||||
|
||||
DocCoverage:
|
||||
uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@dev
|
||||
uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@main
|
||||
needs:
|
||||
- ConfigParams
|
||||
- UnitTestingParams
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
directory: ${{ needs.ConfigParams.outputs.package_directors }}
|
||||
directory : ${{ needs.ConfigParams.outputs.package_directors }}
|
||||
# fail_below: 70
|
||||
|
||||
Package:
|
||||
@@ -91,31 +91,46 @@ jobs:
|
||||
- PlatformTesting
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
||||
artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
||||
|
||||
PublishCoverageResults:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@main
|
||||
needs:
|
||||
- ConfigParams
|
||||
- UnitTestingParams
|
||||
- UnitTesting
|
||||
- PlatformTesting
|
||||
with:
|
||||
coverage_sqlite_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}
|
||||
coverage_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }}
|
||||
coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}
|
||||
coverage_html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||
coverage_sqlite_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}
|
||||
coverage_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }}
|
||||
coverage_report_xml_directory: ${{ needs.ConfigParams.outputs.coverage_report_xml_directory }}
|
||||
coverage_report_xml_filename: ${{ needs.ConfigParams.outputs.coverage_report_xml_filename }}
|
||||
coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}
|
||||
coverage_report_json_directory: ${{ needs.ConfigParams.outputs.coverage_report_json_directory }}
|
||||
coverage_report_json_filename: ${{ needs.ConfigParams.outputs.coverage_report_json_filename }}
|
||||
coverage_html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||
coverage_report_html_directory: ${{ needs.ConfigParams.outputs.coverage_report_html_directory }}
|
||||
codecov: true
|
||||
codacy: true
|
||||
secrets:
|
||||
codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
CODACY_TOKEN: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||
|
||||
PublishTestResults:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@main
|
||||
needs:
|
||||
- ConfigParams
|
||||
- UnitTestingParams
|
||||
- UnitTesting
|
||||
- PlatformTesting
|
||||
with:
|
||||
additional_merge_args: '-d "--pytest=rewrite-dunder-init;reduce-depth:pytest.tests.unit;reduce-depth:pytest.tests.platform"'
|
||||
merged_junit_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||
additional_merge_args: '-d "--pytest=rewrite-dunder-init;reduce-depth:pytest.tests.unit;reduce-depth:pytest.tests.platform"'
|
||||
testsuite-summary-name: ${{ needs.ConfigParams.outputs.package_fullname }}
|
||||
merged_junit_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||
codecov: true
|
||||
dorny: true
|
||||
secrets:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
# VerifyDocs:
|
||||
# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@main
|
||||
@@ -141,7 +156,7 @@ jobs:
|
||||
latex_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }}
|
||||
|
||||
IntermediateCleanUp:
|
||||
uses: pyTooling/Actions/.github/workflows/IntermediateCleanUp.yml@dev
|
||||
uses: pyTooling/Actions/.github/workflows/IntermediateCleanUp.yml@main
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- PublishCoverageResults
|
||||
|
||||
@@ -10,6 +10,10 @@ jobs:
|
||||
with:
|
||||
package_namespace: pyExamples
|
||||
package_name: Extensions
|
||||
codecov: true
|
||||
codacy: true
|
||||
dorny: true
|
||||
secrets:
|
||||
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
CODACY_PROJECT_TOKEN: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||
|
||||
3
.github/workflows/_Checking_Nightly.yml
vendored
3
.github/workflows/_Checking_Nightly.yml
vendored
@@ -51,6 +51,7 @@ jobs:
|
||||
actions: write
|
||||
# attestations: write
|
||||
with:
|
||||
can-fail: true
|
||||
prerelease: true
|
||||
replacements: |
|
||||
version=4.2.0
|
||||
@@ -88,6 +89,7 @@ jobs:
|
||||
actions: write
|
||||
# attestations: write
|
||||
with:
|
||||
can-fail: true
|
||||
replacements: |
|
||||
version=4.2.0
|
||||
tool=myTool
|
||||
@@ -105,6 +107,7 @@ jobs:
|
||||
inventory-version: 4.2.5
|
||||
inventory-categories: "kind1,kind2"
|
||||
assets: |
|
||||
# artifact: file: labels: asset title
|
||||
document: document1.txt: doc,html: Documentation
|
||||
document: build.log: build,log: Logfile - %tool% - %tool%
|
||||
other: document1.txt: build,SBOM:SBOM - %version%
|
||||
|
||||
@@ -9,6 +9,11 @@ jobs:
|
||||
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@main
|
||||
with:
|
||||
package_name: pyDummy
|
||||
codecov: true
|
||||
codacy: true
|
||||
dorny: true
|
||||
cleanup: false
|
||||
secrets:
|
||||
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
CODACY_PROJECT_TOKEN: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||
|
||||
@@ -1,178 +0,0 @@
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: Pipeline
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
|
||||
# This job is a workaround for global variables
|
||||
# See https://github.com/actions/runner/issues/480
|
||||
Params:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
with:
|
||||
name: ToolName
|
||||
# Optional
|
||||
system_list: 'ubuntu windows msys2 macos'
|
||||
python_version: '3.10'
|
||||
python_version_list: '3.8 3.9 3.10'
|
||||
|
||||
UnitTesting:
|
||||
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@main
|
||||
needs:
|
||||
- Params
|
||||
with:
|
||||
jobs: ${{ needs.Params.outputs.python_jobs }}
|
||||
# Optional
|
||||
requirements: '-r tests/requirements.txt'
|
||||
pacboy: >-
|
||||
python-pip:p
|
||||
python-wheel:p
|
||||
python-coverage:p
|
||||
python-lxml:p
|
||||
mingw_requirements: '-r tests/requirements.mingw.txt'
|
||||
tests_directory: 'tests'
|
||||
unittest_directory: 'unit'
|
||||
artifact: ${{ fromJson(needs.Params.outputs.artifact_names).unittesting_xml }}
|
||||
|
||||
Coverage:
|
||||
uses: pyTooling/Actions/.github/workflows/CoverageCollection.yml@main
|
||||
needs:
|
||||
- Params
|
||||
with:
|
||||
artifact: ${{ fromJson(needs.Params.outputs.artifact_names).codecoverage_html }}
|
||||
# Optional
|
||||
python_version: ${{ needs..Params.outputs.python_version }}
|
||||
requirements: '-r tests/requirements.txt'
|
||||
tests_directory: 'tests'
|
||||
unittest_directory: 'unit'
|
||||
secrets:
|
||||
codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||
|
||||
StaticTypeCheck:
|
||||
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@main
|
||||
needs:
|
||||
- Params
|
||||
with:
|
||||
commands: |
|
||||
mypy --junit-xml StaticTypingSummary.xml --html-report htmlmypy -p ToolName
|
||||
html_artifact: ${{ fromJson(needs.Params.outputs.artifact_names).statictyping_html }}
|
||||
junit_artifact: ${{ fromJson(needs.Params.outputs.artifact_names).statictyping_junit }}
|
||||
# Optional
|
||||
python_version: ${{ needs..Params.outputs.python_version }}
|
||||
requirements: '-r tests/requirements.txt'
|
||||
html_report: 'htmlmypy'
|
||||
junit_report: 'StaticTypingSummary.xml'
|
||||
allow_failure: true
|
||||
|
||||
PublishTestResults:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@main
|
||||
needs:
|
||||
- UnitTesting
|
||||
- StaticTypeCheck
|
||||
with:
|
||||
# Optional
|
||||
report_files: artifacts/**/*.xml
|
||||
|
||||
Package:
|
||||
uses: pyTooling/Actions/.github/workflows/Package.yml@main
|
||||
needs:
|
||||
- Params
|
||||
- Coverage
|
||||
with:
|
||||
artifact: ${{ fromJson(needs.Params.outputs.artifact_names).package_all }}
|
||||
# Optional
|
||||
python_version: ${{ needs..Params.outputs.python_version }}
|
||||
requirements: 'wheel'
|
||||
|
||||
Release:
|
||||
uses: pyTooling/Actions/.github/workflows/Release.yml@main
|
||||
if: startsWith(github.ref, 'refs/tags')
|
||||
needs:
|
||||
- UnitTesting
|
||||
- Coverage
|
||||
- StaticTypeCheck
|
||||
- Package
|
||||
|
||||
PublishOnPyPI:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@main
|
||||
if: startsWith(github.ref, 'refs/tags')
|
||||
needs:
|
||||
- Params
|
||||
- Release
|
||||
- Package
|
||||
with:
|
||||
artifact: ${{ fromJson(needs.Params.outputs.artifact_names).package_all }}
|
||||
# Optional
|
||||
python_version: ${{ needs..Params.outputs.python_version }}
|
||||
requirements: 'wheel twine'
|
||||
secrets:
|
||||
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||
|
||||
VerifyDocs:
|
||||
uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@main
|
||||
needs:
|
||||
- Params
|
||||
with:
|
||||
# Optional
|
||||
python_version: ${{ needs..Params.outputs.python_version }}
|
||||
|
||||
BuildTheDocs:
|
||||
uses: pyTooling/Actions/.github/workflows/BuildTheDocs.yml@main
|
||||
needs:
|
||||
- Params
|
||||
- VerifyDocs
|
||||
with:
|
||||
artifact: ${{ fromJson(needs.Params.outputs.artifact_names).documentation_html }}
|
||||
|
||||
PublishToGitHubPages:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@main
|
||||
needs:
|
||||
- Params
|
||||
- BuildTheDocs
|
||||
- Coverage
|
||||
- StaticTypeCheck
|
||||
with:
|
||||
doc: ${{ fromJson(needs.Params.outputs.artifact_names).documentation_html }}
|
||||
# Optional
|
||||
coverage: ${{ fromJson(needs.Params.outputs.artifact_names).codecoverage_html }}
|
||||
typing: ${{ fromJson(needs.Params.outputs.artifact_names).statictyping_html }}
|
||||
|
||||
ArtifactCleanUp:
|
||||
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@main
|
||||
needs:
|
||||
- Params
|
||||
- PublishTestResults
|
||||
- Coverage
|
||||
- StaticTypeCheck
|
||||
- BuildTheDocs
|
||||
- PublishToGitHubPages
|
||||
with:
|
||||
package: ${{ fromJson(needs.Params.outputs.artifact_names).package_all }}
|
||||
remaining: |
|
||||
${{ fromJson(needs.Params.outputs.artifact_names).unittesting_xml }}-*
|
||||
${{ fromJson(needs.Params.outputs.artifact_names).codecoverage_html }}
|
||||
${{ fromJson(needs.Params.outputs.artifact_names).statictyping_html }}
|
||||
${{ fromJson(needs.Params.outputs.artifact_names).statictyping_junit }}
|
||||
${{ fromJson(needs.Params.outputs.artifact_names).documentation_html }}
|
||||
@@ -100,7 +100,7 @@ As shown in the screenshots above, the expected order is:
|
||||
|
||||
### Example pipeline
|
||||
|
||||
[ExamplePipeline.yml](ExamplePipeline.yml) is an example Workflow which uses all of the Reusable Workflows.
|
||||
ExamplePipeline.yml is an example Workflow which uses all of the Reusable Workflows.
|
||||
Python package/tool developers can copy it into their repos, in order to use al the reusable workflows straightaway.
|
||||
Minimal required modifications are the following:
|
||||
|
||||
|
||||
2
dist/requirements.txt
vendored
2
dist/requirements.txt
vendored
@@ -1,2 +1,2 @@
|
||||
wheel ~= 0.45
|
||||
twine ~= 6.0
|
||||
twine ~= 6.1
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
-r ../requirements.txt
|
||||
|
||||
pyTooling ~= 8.0
|
||||
pyTooling ~= 8.4
|
||||
|
||||
# Enforce latest version on ReadTheDocs
|
||||
sphinx ~= 8.1
|
||||
sphinx ~= 8.2
|
||||
docutils ~= 0.21
|
||||
docutils_stubs ~= 0.0.22
|
||||
|
||||
@@ -15,5 +15,5 @@ sphinxcontrib-mermaid ~= 1.0
|
||||
autoapi >= 2.0.1
|
||||
sphinx_design ~= 0.6.1
|
||||
sphinx-copybutton >= 0.5.2
|
||||
sphinx_autodoc_typehints ~= 2.5
|
||||
sphinx_autodoc_typehints ~= 3.1
|
||||
sphinx_reports ~= 0.7
|
||||
|
||||
@@ -64,11 +64,11 @@
|
||||
:target: https://pyTooling.github.io/pyTooling/
|
||||
|
||||
.. # Gitter
|
||||
.. |SHIELD:svg:pyTooling-gitter| image:: https://img.shields.io/badge/chat-on%20gitter-4db797.?longCache=true&style=flat-square&logo=gitter&logoColor=e8ecef
|
||||
.. |SHIELD:svg:pyTooling-gitter| image:: https://img.shields.io/badge/chat-on%20gitter-4db797?longCache=true&style=flat-square&logo=gitter&logoColor=e8ecef
|
||||
:alt: Documentation License
|
||||
:height: 22
|
||||
:target: https://gitter.im/hdl/community
|
||||
.. |SHIELD:png:pyTooling-gitter| image:: https://raster.shields.io/badge/chat-on%20gitter-4db797.?longCache=true&style=flat-square&logo=gitter&logoColor=e8ecef
|
||||
.. |SHIELD:png:pyTooling-gitter| image:: https://raster.shields.io/badge/chat-on%20gitter-4db797?longCache=true&style=flat-square&logo=gitter&logoColor=e8ecef
|
||||
:alt: Documentation License
|
||||
:height: 22
|
||||
:target: https://gitter.im/hdl/community
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
# #
|
||||
# License: #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2017-2024 Patrick Lehmann - Bötzingen, Germany #
|
||||
# Copyright 2017-2025 Patrick Lehmann - Bötzingen, Germany #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
@@ -34,7 +34,7 @@ A module for a set of dummy classes.
|
||||
|
||||
__author__ = "Patrick Lehmann"
|
||||
__email__ = "Paebbels@gmail.com"
|
||||
__copyright__ = "2017-2024, Patrick Lehmann"
|
||||
__copyright__ = "2017-2025, Patrick Lehmann"
|
||||
__license__ = "Apache License, Version 2.0"
|
||||
__version__ = "0.4.4"
|
||||
__keywords__ = ["GitHub Actions"]
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
# #
|
||||
# License: #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2017-2024 Patrick Lehmann - Bötzingen, Germany #
|
||||
# Copyright 2017-2025 Patrick Lehmann - Bötzingen, Germany #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
@@ -34,7 +34,7 @@ A module for a set of dummy classes.
|
||||
|
||||
__author__ = "Patrick Lehmann"
|
||||
__email__ = "Paebbels@gmail.com"
|
||||
__copyright__ = "2017-2024, Patrick Lehmann"
|
||||
__copyright__ = "2017-2025, Patrick Lehmann"
|
||||
__license__ = "Apache License, Version 2.0"
|
||||
__version__ = "0.14.8"
|
||||
__keywords__ = ["GitHub Actions"]
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
[build-system]
|
||||
requires = [
|
||||
"setuptools ~= 75.5",
|
||||
"setuptools ~= 78.1",
|
||||
"wheel ~= 0.45",
|
||||
"pyTooling ~= 8.0"
|
||||
"pyTooling ~= 8.4"
|
||||
]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
@@ -21,7 +21,10 @@ namespace_packages = true
|
||||
html_report = "report/typing"
|
||||
|
||||
[tool.pytest]
|
||||
junit_xml = "report/unit/TestReportSummary.xml"
|
||||
junit_xml = "report/unit/UnittestReportSummary.xml"
|
||||
|
||||
[tool.pyedaa-reports]
|
||||
junit_xml = "report/unit/unittest.xml"
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
addopts = "--tb=native"
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# Copyright 2020-2025 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
|
||||
@@ -1 +1 @@
|
||||
pyTooling ~= 8.0
|
||||
pyTooling ~= 8.4
|
||||
|
||||
2
setup.py
2
setup.py
@@ -11,7 +11,7 @@
|
||||
# #
|
||||
# License: #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2017-2024 Patrick Lehmann - Bötzingen, Germany #
|
||||
# Copyright 2017-2025 Patrick Lehmann - Bötzingen, Germany #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
# #
|
||||
# License: #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2017-2024 Patrick Lehmann - Bötzingen, Germany #
|
||||
# Copyright 2017-2025 Patrick Lehmann - Bötzingen, Germany #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
-r ../requirements.txt
|
||||
|
||||
# Coverage collection
|
||||
Coverage ~= 7.6
|
||||
Coverage ~= 7.8
|
||||
|
||||
# Test Runner
|
||||
pytest ~= 8.3
|
||||
pytest-cov ~= 6.0
|
||||
pytest-cov ~= 6.1
|
||||
|
||||
# Static Type Checking
|
||||
mypy ~= 1.13
|
||||
typing_extensions ~= 4.12
|
||||
mypy ~= 1.15
|
||||
typing_extensions ~= 4.13
|
||||
lxml ~= 5.3
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
# #
|
||||
# License: #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2017-2024 Patrick Lehmann - Bötzingen, Germany #
|
||||
# Copyright 2017-2025 Patrick Lehmann - Bötzingen, Germany #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
# #
|
||||
# License: #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2017-2024 Patrick Lehmann - Bötzingen, Germany #
|
||||
# Copyright 2017-2025 Patrick Lehmann - Bötzingen, Germany #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
|
||||
Reference in New Issue
Block a user