mirror of
https://github.com/pyTooling/Actions.git
synced 2026-02-16 13:06:57 +08:00
Compare commits
35 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
33edd82e6f | ||
|
|
d08f312904 | ||
|
|
731d0d2369 | ||
|
|
de400ae2db | ||
|
|
cb3c338df6 | ||
|
|
79c8526437 | ||
|
|
0f7d062c38 | ||
|
|
d5635a8842 | ||
|
|
6aa6af95ee | ||
|
|
15bf375fe6 | ||
|
|
a1b03cfe2a | ||
|
|
79620e267d | ||
|
|
fffef5c814 | ||
|
|
42e17fae05 | ||
|
|
9b7032a585 | ||
|
|
9110c85738 | ||
|
|
c81d139080 | ||
|
|
c64e054bcd | ||
|
|
78fdb584aa | ||
|
|
a456635686 | ||
|
|
befc59f22d | ||
|
|
d6fc0efd47 | ||
|
|
c018acc3c1 | ||
|
|
d74c610bb4 | ||
|
|
edc4ab3e86 | ||
|
|
0a338ae8b7 | ||
|
|
4069da0a74 | ||
|
|
679ec24c80 | ||
|
|
3a13486ea6 | ||
|
|
34fb9c9869 | ||
|
|
7523c4adca | ||
|
|
530ad7a4a1 | ||
|
|
bd3f2afaf3 | ||
|
|
b1e4cb961f | ||
|
|
1e6b71e87b |
4
.github/workflows/ApplicationTesting.yml
vendored
4
.github/workflows/ApplicationTesting.yml
vendored
@@ -188,8 +188,8 @@ jobs:
|
|||||||
f.write(f"pacboy_packages={' '.join(pacboyPackages)}\n")
|
f.write(f"pacboy_packages={' '.join(pacboyPackages)}\n")
|
||||||
|
|
||||||
- name: '🟦 Setup MSYS2 for ${{ matrix.runtime }}'
|
- name: '🟦 Setup MSYS2 for ${{ matrix.runtime }}'
|
||||||
if: matrix.system == 'msys2'
|
|
||||||
uses: msys2/setup-msys2@v2
|
uses: msys2/setup-msys2@v2
|
||||||
|
if: matrix.system == 'msys2'
|
||||||
with:
|
with:
|
||||||
msystem: ${{ matrix.runtime }}
|
msystem: ${{ matrix.runtime }}
|
||||||
update: true
|
update: true
|
||||||
@@ -198,8 +198,8 @@ jobs:
|
|||||||
${{ inputs.pacboy }}
|
${{ inputs.pacboy }}
|
||||||
|
|
||||||
- name: 🐍 Setup Python ${{ matrix.python }}
|
- name: 🐍 Setup Python ${{ matrix.python }}
|
||||||
if: matrix.system != 'msys2'
|
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
|
if: matrix.system != 'msys2'
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python }}
|
python-version: ${{ matrix.python }}
|
||||||
|
|
||||||
|
|||||||
4
.github/workflows/ArtifactCleanUp.yml
vendored
4
.github/workflows/ArtifactCleanUp.yml
vendored
@@ -47,13 +47,13 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: 🗑️ Delete package Artifacts
|
- name: 🗑️ Delete package Artifacts
|
||||||
if: ${{ ! startsWith(github.ref, 'refs/tags') }}
|
|
||||||
uses: geekyeggo/delete-artifact@v5
|
uses: geekyeggo/delete-artifact@v5
|
||||||
|
if: ${{ ! startsWith(github.ref, 'refs/tags') }}
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.package }}
|
name: ${{ inputs.package }}
|
||||||
|
|
||||||
- name: 🗑️ Delete remaining Artifacts
|
- name: 🗑️ Delete remaining Artifacts
|
||||||
if: ${{ inputs.remaining != '' }}
|
|
||||||
uses: geekyeggo/delete-artifact@v5
|
uses: geekyeggo/delete-artifact@v5
|
||||||
|
if: ${{ inputs.remaining != '' }}
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.remaining }}
|
name: ${{ inputs.remaining }}
|
||||||
|
|||||||
2
.github/workflows/BuildTheDocs.yml
vendored
2
.github/workflows/BuildTheDocs.yml
vendored
@@ -49,8 +49,8 @@ jobs:
|
|||||||
skip-deploy: true
|
skip-deploy: true
|
||||||
|
|
||||||
- name: 📤 Upload 'documentation' artifacts
|
- name: 📤 Upload 'documentation' artifacts
|
||||||
if: inputs.artifact != ''
|
|
||||||
uses: pyTooling/upload-artifact@v4
|
uses: pyTooling/upload-artifact@v4
|
||||||
|
if: inputs.artifact != ''
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.artifact }}
|
name: ${{ inputs.artifact }}
|
||||||
working-directory: doc/_build/html
|
working-directory: doc/_build/html
|
||||||
|
|||||||
2
.github/workflows/CheckDocumentation.yml
vendored
2
.github/workflows/CheckDocumentation.yml
vendored
@@ -32,7 +32,7 @@ on:
|
|||||||
python_version:
|
python_version:
|
||||||
description: 'Python version.'
|
description: 'Python version.'
|
||||||
required: false
|
required: false
|
||||||
default: '3.12'
|
default: '3.13'
|
||||||
type: string
|
type: string
|
||||||
directory:
|
directory:
|
||||||
description: 'Source code directory to check.'
|
description: 'Source code directory to check.'
|
||||||
|
|||||||
84
.github/workflows/CompletePipeline.yml
vendored
84
.github/workflows/CompletePipeline.yml
vendored
@@ -93,10 +93,33 @@ on:
|
|||||||
required: false
|
required: false
|
||||||
default: ''
|
default: ''
|
||||||
type: string
|
type: string
|
||||||
|
codecov:
|
||||||
|
description: 'Publish merged coverage and unittest reports to Codecov.'
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
|
codacy:
|
||||||
|
description: 'Publish merged coverage report to Codacy.'
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
|
dorny:
|
||||||
|
description: 'Publish merged unittest report via Dorny Test-Reporter.'
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
|
cleanup:
|
||||||
|
description: 'Cleanup artifacts afterwards.'
|
||||||
|
required: false
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
secrets:
|
secrets:
|
||||||
PYPI_TOKEN:
|
PYPI_TOKEN:
|
||||||
description: "Token for pushing releases to PyPI."
|
description: "Token for pushing releases to PyPI."
|
||||||
required: false
|
required: false
|
||||||
|
CODECOV_TOKEN:
|
||||||
|
description: "Token for pushing coverage and unittest results to Codecov."
|
||||||
|
required: false
|
||||||
CODACY_PROJECT_TOKEN:
|
CODACY_PROJECT_TOKEN:
|
||||||
description: "Token for pushing coverage results to Codacy."
|
description: "Token for pushing coverage results to Codacy."
|
||||||
required: false
|
required: false
|
||||||
@@ -127,10 +150,10 @@ jobs:
|
|||||||
package_name: ${{ inputs.package_name }}
|
package_name: ${{ inputs.package_name }}
|
||||||
python_version: ${{ inputs.apptest_python_version }}
|
python_version: ${{ inputs.apptest_python_version }}
|
||||||
python_version_list: ${{ inputs.apptest_python_version_list }}
|
python_version_list: ${{ inputs.apptest_python_version_list }}
|
||||||
system_list: ${{ inputs.apptest_system_list }}
|
system_list: ${{ inputs.apptest_system_list }}
|
||||||
include_list: ${{ inputs.apptest_include_list }}
|
include_list: ${{ inputs.apptest_include_list }}
|
||||||
exclude_list: ${{ inputs.apptest_exclude_list }}
|
exclude_list: ${{ inputs.apptest_exclude_list }}
|
||||||
disable_list: ${{ inputs.apptest_disable_list }}
|
disable_list: ${{ inputs.apptest_disable_list }}
|
||||||
|
|
||||||
UnitTesting:
|
UnitTesting:
|
||||||
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@main
|
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@main
|
||||||
@@ -167,7 +190,7 @@ jobs:
|
|||||||
- UnitTestingParams
|
- UnitTestingParams
|
||||||
with:
|
with:
|
||||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||||
directory: ${{ inputs.package_namespace }}/${{ inputs.package_name }}
|
directory: ${{ inputs.package_namespace }}/${{ inputs.package_name }}
|
||||||
# fail_below: 70
|
# fail_below: 70
|
||||||
|
|
||||||
Package:
|
Package:
|
||||||
@@ -177,7 +200,7 @@ jobs:
|
|||||||
- UnitTesting
|
- UnitTesting
|
||||||
with:
|
with:
|
||||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||||
artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
||||||
|
|
||||||
# AppTesting:
|
# AppTesting:
|
||||||
# uses: pyTooling/Actions/.github/workflows/ApplicationTesting.yml@main
|
# uses: pyTooling/Actions/.github/workflows/ApplicationTesting.yml@main
|
||||||
@@ -193,24 +216,41 @@ jobs:
|
|||||||
PublishCoverageResults:
|
PublishCoverageResults:
|
||||||
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@main
|
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@main
|
||||||
needs:
|
needs:
|
||||||
|
- ConfigParams
|
||||||
- UnitTestingParams
|
- UnitTestingParams
|
||||||
- UnitTesting
|
- UnitTesting
|
||||||
with:
|
with:
|
||||||
# coverage_sqlite_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}
|
# coverage_sqlite_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}
|
||||||
# coverage_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }}
|
# coverage_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }}
|
||||||
coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}
|
# coverage_report_xml_directory: ${{ needs.ConfigParams.outputs.coverage_report_xml_directory }}
|
||||||
coverage_html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
# coverage_report_xml_filename: ${{ needs.ConfigParams.outputs.coverage_report_xml_filename }}
|
||||||
|
coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}
|
||||||
|
coverage_report_json_directory: ${{ needs.ConfigParams.outputs.coverage_report_json_directory }}
|
||||||
|
coverage_report_json_filename: ${{ needs.ConfigParams.outputs.coverage_report_json_filename }}
|
||||||
|
coverage_html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||||
|
coverage_report_html_directory: ${{ needs.ConfigParams.outputs.coverage_report_html_directory }}
|
||||||
|
codecov: ${{ inputs.codecov }}
|
||||||
|
codacy: ${{ inputs.codacy }}
|
||||||
secrets:
|
secrets:
|
||||||
codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
CODACY_TOKEN: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||||
|
|
||||||
PublishTestResults:
|
PublishTestResults:
|
||||||
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@main
|
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@main
|
||||||
needs:
|
needs:
|
||||||
|
- ConfigParams
|
||||||
- UnitTestingParams
|
- UnitTestingParams
|
||||||
- UnitTesting
|
- UnitTesting
|
||||||
with:
|
with:
|
||||||
additional_merge_args: '-d "--pytest=rewrite-dunder-init;reduce-depth:pytest.tests.unit"'
|
additional_merge_args: '-d "--pytest=rewrite-dunder-init;reduce-depth:pytest.tests.unit"'
|
||||||
merged_junit_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
testsuite-summary-name: ${{ inputs.package_name }}
|
||||||
|
merged_junit_filename: ${{ needs.ConfigParams.outputs.unittest_merged_report_xml_filename }}
|
||||||
|
merged_junit_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||||
|
dorny: ${{ inputs.dorny }}
|
||||||
|
codecov: ${{ inputs.codecov }}
|
||||||
|
|
||||||
|
secrets:
|
||||||
|
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
|
||||||
# VerifyDocs:
|
# VerifyDocs:
|
||||||
# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@main
|
# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@main
|
||||||
@@ -228,12 +268,12 @@ jobs:
|
|||||||
- PublishCoverageResults
|
- PublishCoverageResults
|
||||||
# - VerifyDocs
|
# - VerifyDocs
|
||||||
with:
|
with:
|
||||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||||
coverage_report_json_directory: ${{ needs.ConfigParams.outputs.coverage_report_json_directory }}
|
coverage_report_json_directory: ${{ needs.ConfigParams.outputs.coverage_report_json_directory }}
|
||||||
unittest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}-ubuntu-native-3.12
|
unittest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||||
coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}
|
coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}
|
||||||
html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_html }}
|
html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_html }}
|
||||||
latex_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }}
|
latex_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }}
|
||||||
|
|
||||||
IntermediateCleanUp:
|
IntermediateCleanUp:
|
||||||
uses: pyTooling/Actions/.github/workflows/IntermediateCleanUp.yml@main
|
uses: pyTooling/Actions/.github/workflows/IntermediateCleanUp.yml@main
|
||||||
@@ -242,9 +282,10 @@ jobs:
|
|||||||
- PublishCoverageResults
|
- PublishCoverageResults
|
||||||
- PublishTestResults
|
- PublishTestResults
|
||||||
- Documentation
|
- Documentation
|
||||||
|
if: ${{ inputs.cleanup }}
|
||||||
with:
|
with:
|
||||||
sqlite_coverage_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}-
|
sqlite_coverage_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}-
|
||||||
xml_unittest_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}-
|
xml_unittest_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}-
|
||||||
|
|
||||||
# PDFDocumentation:
|
# PDFDocumentation:
|
||||||
# uses: pyTooling/Actions/.github/workflows/LaTeXDocumentation.yml@main
|
# uses: pyTooling/Actions/.github/workflows/LaTeXDocumentation.yml@main
|
||||||
@@ -265,9 +306,9 @@ jobs:
|
|||||||
- PublishCoverageResults
|
- PublishCoverageResults
|
||||||
- StaticTypeCheck
|
- StaticTypeCheck
|
||||||
with:
|
with:
|
||||||
doc: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_html }}
|
doc: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_html }}
|
||||||
coverage: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
coverage: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||||
typing: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
|
typing: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
|
||||||
|
|
||||||
ReleasePage:
|
ReleasePage:
|
||||||
uses: pyTooling/Actions/.github/workflows/Release.yml@main
|
uses: pyTooling/Actions/.github/workflows/Release.yml@main
|
||||||
@@ -303,6 +344,7 @@ jobs:
|
|||||||
- PublishToGitHubPages
|
- PublishToGitHubPages
|
||||||
# - PublishOnPyPI
|
# - PublishOnPyPI
|
||||||
- IntermediateCleanUp
|
- IntermediateCleanUp
|
||||||
|
if: ${{ inputs.cleanup }}
|
||||||
with:
|
with:
|
||||||
package: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
package: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
||||||
remaining: |
|
remaining: |
|
||||||
|
|||||||
46
.github/workflows/ExtractConfiguration.yml
vendored
46
.github/workflows/ExtractConfiguration.yml
vendored
@@ -32,7 +32,7 @@ on:
|
|||||||
python_version:
|
python_version:
|
||||||
description: 'Python version.'
|
description: 'Python version.'
|
||||||
required: false
|
required: false
|
||||||
default: '3.12'
|
default: '3.13'
|
||||||
type: string
|
type: string
|
||||||
package_namespace:
|
package_namespace:
|
||||||
description: 'Name of the tool''s namespace.'
|
description: 'Name of the tool''s namespace.'
|
||||||
@@ -68,6 +68,15 @@ on:
|
|||||||
unittest_report_xml:
|
unittest_report_xml:
|
||||||
description: ""
|
description: ""
|
||||||
value: ${{ jobs.Extract.outputs.unittest_report_xml }}
|
value: ${{ jobs.Extract.outputs.unittest_report_xml }}
|
||||||
|
unittest_merged_report_xml_directory:
|
||||||
|
description: ""
|
||||||
|
value: ${{ jobs.Extract.outputs.unittest_merged_report_xml_directory }}
|
||||||
|
unittest_merged_report_xml_filename:
|
||||||
|
description: ""
|
||||||
|
value: ${{ jobs.Extract.outputs.unittest_merged_report_xml_filename }}
|
||||||
|
unittest_merged_report_xml:
|
||||||
|
description: ""
|
||||||
|
value: ${{ jobs.Extract.outputs.unittest_merged_report_xml }}
|
||||||
coverage_report_html_directory:
|
coverage_report_html_directory:
|
||||||
description: ""
|
description: ""
|
||||||
value: ${{ jobs.Extract.outputs.coverage_report_html_directory }}
|
value: ${{ jobs.Extract.outputs.coverage_report_html_directory }}
|
||||||
@@ -95,19 +104,22 @@ jobs:
|
|||||||
name: 📓 Extract configurations from pyproject.toml
|
name: 📓 Extract configurations from pyproject.toml
|
||||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||||
outputs:
|
outputs:
|
||||||
package_fullname: ${{ steps.getPackageName.outputs.package_fullname }}
|
package_fullname: ${{ steps.getPackageName.outputs.package_fullname }}
|
||||||
package_directory: ${{ steps.getPackageName.outputs.package_directory }}
|
package_directory: ${{ steps.getPackageName.outputs.package_directory }}
|
||||||
mypy_prepare_command: ${{ steps.getPackageName.outputs.mypy_prepare_command }}
|
mypy_prepare_command: ${{ steps.getPackageName.outputs.mypy_prepare_command }}
|
||||||
unittest_report_xml_directory: ${{ steps.getVariables.outputs.unittest_report_xml_directory }}
|
unittest_report_xml_directory: ${{ steps.getVariables.outputs.unittest_report_xml_directory }}
|
||||||
unittest_report_xml_filename: ${{ steps.getVariables.outputs.unittest_report_xml_filename }}
|
unittest_report_xml_filename: ${{ steps.getVariables.outputs.unittest_report_xml_filename }}
|
||||||
unittest_report_xml: ${{ steps.getVariables.outputs.unittest_report_xml }}
|
unittest_report_xml: ${{ steps.getVariables.outputs.unittest_report_xml }}
|
||||||
coverage_report_html_directory: ${{ steps.getVariables.outputs.coverage_report_html_directory }}
|
unittest_merged_report_xml_directory: ${{ steps.getVariables.outputs.unittest_merged_report_xml_directory }}
|
||||||
coverage_report_xml_directory: ${{ steps.getVariables.outputs.coverage_report_xml_directory }}
|
unittest_merged_report_xml_filename: ${{ steps.getVariables.outputs.unittest_merged_report_xml_filename }}
|
||||||
coverage_report_xml_filename: ${{ steps.getVariables.outputs.coverage_report_xml_filename }}
|
unittest_merged_report_xml: ${{ steps.getVariables.outputs.unittest_merged_report_xml }}
|
||||||
coverage_report_xml: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
coverage_report_html_directory: ${{ steps.getVariables.outputs.coverage_report_html_directory }}
|
||||||
coverage_report_json_directory: ${{ steps.getVariables.outputs.coverage_report_json_directory }}
|
coverage_report_xml_directory: ${{ steps.getVariables.outputs.coverage_report_xml_directory }}
|
||||||
coverage_report_json_filename: ${{ steps.getVariables.outputs.coverage_report_json_filename }}
|
coverage_report_xml_filename: ${{ steps.getVariables.outputs.coverage_report_xml_filename }}
|
||||||
coverage_report_json: ${{ steps.getVariables.outputs.coverage_report_json }}
|
coverage_report_xml: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
||||||
|
coverage_report_json_directory: ${{ steps.getVariables.outputs.coverage_report_json_directory }}
|
||||||
|
coverage_report_json_filename: ${{ steps.getVariables.outputs.coverage_report_json_filename }}
|
||||||
|
coverage_report_json: ${{ steps.getVariables.outputs.coverage_report_json }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: ⏬ Checkout repository
|
- name: ⏬ Checkout repository
|
||||||
@@ -178,6 +190,7 @@ jobs:
|
|||||||
pyProjectSettings = tomli_load(file)
|
pyProjectSettings = tomli_load(file)
|
||||||
|
|
||||||
unittestXMLFile = Path(pyProjectSettings["tool"]["pytest"]["junit_xml"])
|
unittestXMLFile = Path(pyProjectSettings["tool"]["pytest"]["junit_xml"])
|
||||||
|
mergedUnittestXMLFile = Path(pyProjectSettings["tool"]["pyedaa-reports"]["junit_xml"])
|
||||||
coverageHTMLDirectory = Path(pyProjectSettings["tool"]["coverage"]["html"]["directory"])
|
coverageHTMLDirectory = Path(pyProjectSettings["tool"]["coverage"]["html"]["directory"])
|
||||||
coverageXMLFile = Path(pyProjectSettings["tool"]["coverage"]["xml"]["output"])
|
coverageXMLFile = Path(pyProjectSettings["tool"]["coverage"]["xml"]["output"])
|
||||||
coverageJSONFile= Path(pyProjectSettings["tool"]["coverage"]["json"]["output"])
|
coverageJSONFile= Path(pyProjectSettings["tool"]["coverage"]["json"]["output"])
|
||||||
@@ -209,6 +222,9 @@ jobs:
|
|||||||
unittest_report_xml_directory={unittestXMLFile.parent.as_posix()}
|
unittest_report_xml_directory={unittestXMLFile.parent.as_posix()}
|
||||||
unittest_report_xml_filename={unittestXMLFile.name}
|
unittest_report_xml_filename={unittestXMLFile.name}
|
||||||
unittest_report_xml={unittestXMLFile.as_posix()}
|
unittest_report_xml={unittestXMLFile.as_posix()}
|
||||||
|
unittest_merged_report_xml_directory={mergedUnittestXMLFile.parent.as_posix()}
|
||||||
|
unittest_merged_report_xml_filename={mergedUnittestXMLFile.name}
|
||||||
|
unittest_merged_report_xml={mergedUnittestXMLFile.as_posix()}
|
||||||
coverage_report_html_directory={coverageHTMLDirectory.as_posix()}
|
coverage_report_html_directory={coverageHTMLDirectory.as_posix()}
|
||||||
coverage_report_xml_directory={coverageXMLFile.parent.as_posix()}
|
coverage_report_xml_directory={coverageXMLFile.parent.as_posix()}
|
||||||
coverage_report_xml_filename={coverageXMLFile.name}
|
coverage_report_xml_filename={coverageXMLFile.name}
|
||||||
@@ -218,4 +234,4 @@ jobs:
|
|||||||
coverage_report_json={coverageJSONFile.as_posix()}
|
coverage_report_json={coverageJSONFile.as_posix()}
|
||||||
"""))
|
"""))
|
||||||
|
|
||||||
print(f"DEBUG:\n unittest xml: {unittestXMLFile}\n coverage html: {coverageHTMLDirectory}\n coverage xml: {coverageXMLFile}\n coverage json: {coverageJSONFile}")
|
print(f"DEBUG:\n unittest xml: {unittestXMLFile}\n merged unittest xml: {mergedUnittestXMLFile}\n coverage html: {coverageHTMLDirectory}\n coverage xml: {coverageXMLFile}\n coverage json: {coverageJSONFile}")
|
||||||
|
|||||||
67
.github/workflows/NightlyRelease.yml
vendored
67
.github/workflows/NightlyRelease.yml
vendored
@@ -84,11 +84,16 @@ on:
|
|||||||
type: string
|
type: string
|
||||||
required: false
|
required: false
|
||||||
default: '__pyTooling_upload_artifact__.tar'
|
default: '__pyTooling_upload_artifact__.tar'
|
||||||
|
can-fail:
|
||||||
|
type: boolean
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
Release:
|
Release:
|
||||||
name: 📝 Update 'Nightly Page' on GitHub
|
name: 📝 Update 'Nightly Page' on GitHub
|
||||||
runs-on: ${{ inputs.ubuntu_image }}
|
runs-on: ${{ inputs.ubuntu_image }}
|
||||||
|
continue-on-error: ${{ inputs.can-fail }}
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
actions: write
|
actions: write
|
||||||
@@ -124,7 +129,7 @@ jobs:
|
|||||||
printf "%s\n" "${ANSI_LIGHT_YELLOW}[NOT FOUND]${ANSI_NOCOLOR}"
|
printf "%s\n" "${ANSI_LIGHT_YELLOW}[NOT FOUND]${ANSI_NOCOLOR}"
|
||||||
else
|
else
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
printf "%s\n" "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't delete release '${{ inputs.nightly_name }}' -> Error: '${message}'.${ANSI_NOCOLOR}"
|
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't delete release '${{ inputs.nightly_name }}' -> Error: '${message}'.${ANSI_NOCOLOR}"
|
||||||
printf "%s\n" "::error title=InternalError::Couldn't delete release '${{ inputs.nightly_name }}' -> Error: '${message}'."
|
printf "%s\n" "::error title=InternalError::Couldn't delete release '${{ inputs.nightly_name }}' -> Error: '${message}'."
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
@@ -175,7 +180,7 @@ jobs:
|
|||||||
cat <<EOF >> __NoTeS__.md
|
cat <<EOF >> __NoTeS__.md
|
||||||
|
|
||||||
--------
|
--------
|
||||||
Published from [${{ github.workflow }}](https://github.com/Paebbels/ghdl/actions/runs/${{ github.run_id }}) workflow triggered by @${{ github.actor }} on $(date '+%Y-%m-%d %H:%M:%S').
|
Published from [${{ github.workflow }}](https://github.com/Paebbels/ghdl/actions/runs/${{ github.run_id }}) workflow triggered by @${{ github.actor }} on $(date '+%Y-%m-%d %H:%M:%S %Z').
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
printf "%s\n" "Creating release '${{ inputs.nightly_name }}' ... "
|
printf "%s\n" "Creating release '${{ inputs.nightly_name }}' ... "
|
||||||
@@ -184,7 +189,7 @@ jobs:
|
|||||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||||
else
|
else
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
printf "%s\n" "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't create release '${{ inputs.nightly_name }}' -> Error: '${message}'.${ANSI_NOCOLOR}"
|
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't create release '${{ inputs.nightly_name }}' -> Error: '${message}'.${ANSI_NOCOLOR}"
|
||||||
printf "%s\n" "::error title=InternalError::Couldn't create release '${{ inputs.nightly_name }}' -> Error: '${message}'."
|
printf "%s\n" "::error title=InternalError::Couldn't create release '${{ inputs.nightly_name }}' -> Error: '${message}'."
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
@@ -197,7 +202,7 @@ jobs:
|
|||||||
ANSI_LIGHT_RED=$'\x1b[91m'
|
ANSI_LIGHT_RED=$'\x1b[91m'
|
||||||
ANSI_LIGHT_GREEN=$'\x1b[92m'
|
ANSI_LIGHT_GREEN=$'\x1b[92m'
|
||||||
ANSI_LIGHT_YELLOW=$'\x1b[93m'
|
ANSI_LIGHT_YELLOW=$'\x1b[93m'
|
||||||
ANSI_LIGHT_BLUE="\e[94m"
|
ANSI_LIGHT_BLUE=$'\x1b[94m'
|
||||||
ANSI_NOCOLOR=$'\x1b[0m'
|
ANSI_NOCOLOR=$'\x1b[0m'
|
||||||
|
|
||||||
export GH_TOKEN=${{ github.token }}
|
export GH_TOKEN=${{ github.token }}
|
||||||
@@ -254,7 +259,7 @@ jobs:
|
|||||||
# A dictionary to check for duplicate asset files in release
|
# A dictionary to check for duplicate asset files in release
|
||||||
declare -A assetFilenames
|
declare -A assetFilenames
|
||||||
while IFS=$'\r\n' read -r assetLine; do
|
while IFS=$'\r\n' read -r assetLine; do
|
||||||
if [[ "${assetLine}" == "" ]]; then
|
if [[ "${assetLine}" == "" || "${assetLine:0:1}" == "#" ]]; then
|
||||||
continue
|
continue
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -285,7 +290,7 @@ jobs:
|
|||||||
if [[ -n "${assetFilenames[$asset]}" ]]; then
|
if [[ -n "${assetFilenames[$asset]}" ]]; then
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||||
printf "%s\n" "::error title=DuplicateAsset::Asset '${asset}' from artifact '${artifact}' was already uploaded to release '${{ inputs.nightly_name }}'."
|
printf "%s\n" "::error title=DuplicateAsset::Asset '${asset}' from artifact '${artifact}' was already uploaded to release '${{ inputs.nightly_name }}'."
|
||||||
ERRORS=1
|
ERRORS=$((ERRORS + 1))
|
||||||
continue
|
continue
|
||||||
else
|
else
|
||||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||||
@@ -303,9 +308,9 @@ jobs:
|
|||||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||||
else
|
else
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't download artifact '${artifact}'.${ANSI_NOCOLOR}"
|
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't download artifact '${artifact}'.${ANSI_NOCOLOR}"
|
||||||
printf "%s\n" "::error title=ArtifactNotFound::Couldn't download artifact '${artifact}'."
|
printf "%s\n" "::error title=ArtifactNotFound::Couldn't download artifact '${artifact}'."
|
||||||
ERRORS=1
|
ERRORS=$((ERRORS + 1))
|
||||||
continue
|
continue
|
||||||
fi
|
fi
|
||||||
downloadedArtifacts[$artifact]=1
|
downloadedArtifacts[$artifact]=1
|
||||||
@@ -343,19 +348,21 @@ jobs:
|
|||||||
if [[ "${asset}" == !*.zip ]]; then
|
if [[ "${asset}" == !*.zip ]]; then
|
||||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[ZIP]${ANSI_NOCOLOR}"
|
printf "%s\n" "${ANSI_LIGHT_GREEN}[ZIP]${ANSI_NOCOLOR}"
|
||||||
asset="${asset##*!}"
|
asset="${asset##*!}"
|
||||||
printf " %s\n" "Compressing artifact '${artifact}' to '${asset}' ..."
|
printf "::group:: %s\n" "Compressing artifact '${artifact}' to '${asset}' ..."
|
||||||
(
|
(
|
||||||
cd "${artifact}" && \
|
cd "${artifact}" && \
|
||||||
zip -r "../${asset}" *
|
zip -r "../${asset}" *
|
||||||
)
|
)
|
||||||
if [[ $? -eq 0 ]]; then
|
retCode=$?
|
||||||
|
printf "::endgroup::\n"
|
||||||
|
if [[ $retCode -eq 0 ]]; then
|
||||||
printf " %s\n" "Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
printf " %s\n" "Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||||
uploadFile="${asset}"
|
uploadFile="${asset}"
|
||||||
else
|
else
|
||||||
printf " %s\n" "Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
printf " %s\n" "Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zip file '${asset}'.${ANSI_NOCOLOR}"
|
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zip file '${asset}'.${ANSI_NOCOLOR}"
|
||||||
printf "%s\n" "::error title=CompressionError::Couldn't compress '${artifact}' to zip file '${asset}'."
|
printf "%s\n" "::error title=CompressionError::Couldn't compress '${artifact}' to zip file '${asset}'."
|
||||||
ERRORS=1
|
ERRORS=$((ERRORS + 1))
|
||||||
continue
|
continue
|
||||||
fi
|
fi
|
||||||
elif [[ "${asset}" == !*.tgz || "${asset}" == !*.tar.gz || "${asset}" == \$*.tgz || "${asset}" == \$*.tar.gz ]]; then
|
elif [[ "${asset}" == !*.tgz || "${asset}" == !*.tar.gz || "${asset}" == \$*.tgz || "${asset}" == \$*.tar.gz ]]; then
|
||||||
@@ -382,9 +389,9 @@ jobs:
|
|||||||
uploadFile="${asset}"
|
uploadFile="${asset}"
|
||||||
else
|
else
|
||||||
printf " %s\n" "Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
printf " %s\n" "Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to tgz file '${asset}'.${ANSI_NOCOLOR}"
|
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to tgz file '${asset}'.${ANSI_NOCOLOR}"
|
||||||
printf "%s\n" "::error title=CompressionError::Couldn't compress '${artifact}' to tgz file '${asset}'."
|
printf "%s\n" "::error title=CompressionError::Couldn't compress '${artifact}' to tgz file '${asset}'."
|
||||||
ERRORS=1
|
ERRORS=$((ERRORS + 1))
|
||||||
continue
|
continue
|
||||||
fi
|
fi
|
||||||
elif [[ "${asset}" == !*.tzst || "${asset}" == !*.tar.zst || "${asset}" == \$*.tzst || "${asset}" == \$*.tar.zst ]]; then
|
elif [[ "${asset}" == !*.tzst || "${asset}" == !*.tar.zst || "${asset}" == \$*.tzst || "${asset}" == \$*.tar.zst ]]; then
|
||||||
@@ -411,9 +418,9 @@ jobs:
|
|||||||
uploadFile="${asset}"
|
uploadFile="${asset}"
|
||||||
else
|
else
|
||||||
printf " %s\n" "Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
printf " %s\n" "Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zst file '${asset}'.${ANSI_NOCOLOR}"
|
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zst file '${asset}'.${ANSI_NOCOLOR}"
|
||||||
printf "%s\n" "::error title=CompressionError::Couldn't compress '${artifact}' to zst file '${asset}'."
|
printf "%s\n" "::error title=CompressionError::Couldn't compress '${artifact}' to zst file '${asset}'."
|
||||||
ERRORS=1
|
ERRORS=$((ERRORS + 1))
|
||||||
continue
|
continue
|
||||||
fi
|
fi
|
||||||
elif [[ -e "${artifact}/${asset}" ]]; then
|
elif [[ -e "${artifact}/${asset}" ]]; then
|
||||||
@@ -421,26 +428,26 @@ jobs:
|
|||||||
uploadFile="${artifact}/${asset}"
|
uploadFile="${artifact}/${asset}"
|
||||||
else
|
else
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't find asset '${asset}' in artifact '${artifact}'.${ANSI_NOCOLOR}"
|
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't find asset '${asset}' in artifact '${artifact}'.${ANSI_NOCOLOR}"
|
||||||
printf "%s\n" "::error title=FileNotFound::Couldn't find asset '${asset}' in artifact '${artifact}'."
|
printf "%s\n" "::error title=FileNotFound::Couldn't find asset '${asset}' in artifact '${artifact}'."
|
||||||
ERRORS=1
|
ERRORS=$((ERRORS + 1))
|
||||||
continue
|
continue
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Add asset to JSON inventory
|
# Add asset to JSON inventory
|
||||||
if [[ "${{ inputs.inventory-json }}" != "" ]]; then
|
if [[ "${{ inputs.inventory-json }}" != "" ]]; then
|
||||||
if [[ "${categories}" != "${title}" ]]; then
|
if [[ "${categories}" != "${title}" ]]; then
|
||||||
printf " %s\n" "adding file '${uploadFile}' with '${categories//;/ → }' to JSON inventory ..."
|
printf " %s\n" "adding file '${uploadFile#*/}' with '${categories//;/ → }' to JSON inventory ..."
|
||||||
category=""
|
category=""
|
||||||
jsonEntry=$(jq -c -n \
|
jsonEntry=$(jq -c -n \
|
||||||
--arg title "${title}" \
|
--arg title "${title}" \
|
||||||
--arg file "${uploadFile}" \
|
--arg file "${uploadFile#*/}" \
|
||||||
'{"file": $file, "title": $title}' \
|
'{"file": $file, "title": $title}' \
|
||||||
)
|
)
|
||||||
|
|
||||||
while [[ "${categories}" != "${category}" ]]; do
|
while [[ "${categories}" != "${category}" ]]; do
|
||||||
category="${categories##*;}"
|
category="${categories##*,}"
|
||||||
categories="${categories%;*}"
|
categories="${categories%,*}"
|
||||||
jsonEntry=$(jq -c -n --arg cat "${category}" --argjson value "${jsonEntry}" '{$cat: $value}')
|
jsonEntry=$(jq -c -n --arg cat "${category}" --argjson value "${jsonEntry}" '{$cat: $value}')
|
||||||
done
|
done
|
||||||
|
|
||||||
@@ -450,7 +457,7 @@ jobs:
|
|||||||
'$inventory * {"files": $file}' \
|
'$inventory * {"files": $file}' \
|
||||||
)
|
)
|
||||||
else
|
else
|
||||||
printf " %s\n" "adding file '${uploadFile}' to JSON inventory ... ${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}"
|
printf " %s\n" "adding file '${uploadFile#*/}' to JSON inventory ... ${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}"
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -461,9 +468,9 @@ jobs:
|
|||||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||||
else
|
else
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
|
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
|
||||||
printf "%s\n" "::error title=UploadError::Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'."
|
printf "%s\n" "::error title=UploadError::Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'."
|
||||||
ERRORS=1
|
ERRORS=$((ERRORS + 1))
|
||||||
continue
|
continue
|
||||||
fi
|
fi
|
||||||
done <<<'${{ inputs.assets }}'
|
done <<<'${{ inputs.assets }}'
|
||||||
@@ -484,9 +491,9 @@ jobs:
|
|||||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||||
else
|
else
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't upload asset '${{ inputs.inventory-json }}' to release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
|
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't upload asset '${{ inputs.inventory-json }}' to release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
|
||||||
printf "%s\n" "::error title=UploadError::Couldn't upload asset '${{ inputs.inventory-json }}' to release '${{ inputs.nightly_name }}'."
|
printf "%s\n" "::error title=UploadError::Couldn't upload asset '${{ inputs.inventory-json }}' to release '${{ inputs.nightly_name }}'."
|
||||||
ERRORS=1
|
ERRORS=$((ERRORS + 1))
|
||||||
continue
|
continue
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
@@ -495,8 +502,8 @@ jobs:
|
|||||||
tree -pash -L 3 .
|
tree -pash -L 3 .
|
||||||
printf "::endgroup::\n"
|
printf "::endgroup::\n"
|
||||||
|
|
||||||
if [[ $ERROR -ne 0 ]]; then
|
if [[ $ERRORS -ne 0 ]]; then
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}Errors detected in previous steps.${ANSI_NOCOLOR}"
|
printf "%s\n" "${ANSI_LIGHT_RED}${ERRORS} errors detected in previous steps.${ANSI_NOCOLOR}"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -518,6 +525,6 @@ jobs:
|
|||||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||||
else
|
else
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't remove draft-state from release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
|
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't remove draft-state from release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
|
||||||
printf "%s\n" "::error title=ReleasePage::Couldn't remove draft-state from release '${{ inputs.nightly_name }}'."
|
printf "%s\n" "::error title=ReleasePage::Couldn't remove draft-state from release '${{ inputs.nightly_name }}'."
|
||||||
fi
|
fi
|
||||||
|
|||||||
2
.github/workflows/Package.yml
vendored
2
.github/workflows/Package.yml
vendored
@@ -33,7 +33,7 @@ on:
|
|||||||
python_version:
|
python_version:
|
||||||
description: 'Python version.'
|
description: 'Python version.'
|
||||||
required: false
|
required: false
|
||||||
default: '3.12'
|
default: '3.13'
|
||||||
type: string
|
type: string
|
||||||
requirements:
|
requirements:
|
||||||
description: 'Python dependencies to be installed through pip; if empty, use pyproject.toml through build.'
|
description: 'Python dependencies to be installed through pip; if empty, use pyproject.toml through build.'
|
||||||
|
|||||||
7
.github/workflows/Parameters.yml
vendored
7
.github/workflows/Parameters.yml
vendored
@@ -197,9 +197,10 @@ jobs:
|
|||||||
"3.13": { "icon": "🟢", "until": "2029.10" },
|
"3.13": { "icon": "🟢", "until": "2029.10" },
|
||||||
"3.14": { "icon": "🟣", "until": "2030.10" },
|
"3.14": { "icon": "🟣", "until": "2030.10" },
|
||||||
"pypy-3.7": { "icon": "⟲⚫", "until": "????.??" },
|
"pypy-3.7": { "icon": "⟲⚫", "until": "????.??" },
|
||||||
"pypy-3.8": { "icon": "⟲🔴", "until": "????.??" },
|
"pypy-3.8": { "icon": "⟲⚫", "until": "????.??" },
|
||||||
"pypy-3.9": { "icon": "⟲🟠", "until": "????.??" },
|
"pypy-3.9": { "icon": "⟲🔴", "until": "????.??" },
|
||||||
"pypy-3.10": { "icon": "⟲🟡", "until": "????.??" },
|
"pypy-3.10": { "icon": "⟲🟠", "until": "????.??" },
|
||||||
|
"pypy-3.11": { "icon": "⟲🟡", "until": "????.??" },
|
||||||
},
|
},
|
||||||
# Runner systems (runner images) supported by GitHub Actions
|
# Runner systems (runner images) supported by GitHub Actions
|
||||||
"sys": {
|
"sys": {
|
||||||
|
|||||||
139
.github/workflows/PublishCoverageResults.yml
vendored
139
.github/workflows/PublishCoverageResults.yml
vendored
@@ -48,19 +48,57 @@ on:
|
|||||||
required: false
|
required: false
|
||||||
default: ''
|
default: ''
|
||||||
type: string
|
type: string
|
||||||
|
coverage_report_xml_directory:
|
||||||
|
description: 'Directory containing the XML coverage report file.'
|
||||||
|
required: false
|
||||||
|
default: 'report/coverage'
|
||||||
|
type: string
|
||||||
|
coverage_report_xml_filename:
|
||||||
|
description: 'Filename of the XML coverage report file.'
|
||||||
|
required: false
|
||||||
|
default: 'coverage.xml'
|
||||||
|
type: string
|
||||||
coverage_json_artifact:
|
coverage_json_artifact:
|
||||||
description: 'Name of the JSON coverage artifact.'
|
description: 'Name of the JSON coverage artifact.'
|
||||||
required: false
|
required: false
|
||||||
default: ''
|
default: ''
|
||||||
type: string
|
type: string
|
||||||
|
coverage_report_json_directory:
|
||||||
|
description: 'Directory containing the JSON coverage report file.'
|
||||||
|
required: false
|
||||||
|
default: 'report/coverage'
|
||||||
|
type: string
|
||||||
|
coverage_report_json_filename:
|
||||||
|
description: 'Filename of the JSON coverage report file.'
|
||||||
|
required: false
|
||||||
|
default: 'coverage.json'
|
||||||
|
type: string
|
||||||
coverage_html_artifact:
|
coverage_html_artifact:
|
||||||
description: 'Name of the HTML coverage artifact.'
|
description: 'Name of the HTML coverage artifact.'
|
||||||
required: false
|
required: false
|
||||||
default: ''
|
default: ''
|
||||||
type: string
|
type: string
|
||||||
|
coverage_report_html_directory:
|
||||||
|
description: 'HTML root directory of the generated coverage report.'
|
||||||
|
required: false
|
||||||
|
default: 'report/coverage/html'
|
||||||
|
type: string
|
||||||
|
codecov:
|
||||||
|
description: 'Publish merged coverage report to Codecov.'
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
|
codacy:
|
||||||
|
description: 'Publish merged coverage report to Codacy.'
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
secrets:
|
secrets:
|
||||||
codacy_token:
|
CODECOV_TOKEN:
|
||||||
description: 'Token to push result to codacy.'
|
description: 'Token to push result to Codecov.'
|
||||||
|
required: true
|
||||||
|
CODACY_TOKEN:
|
||||||
|
description: 'Token to push result to Codacy.'
|
||||||
required: true
|
required: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
@@ -76,7 +114,7 @@ jobs:
|
|||||||
lfs: true
|
lfs: true
|
||||||
submodules: true
|
submodules: true
|
||||||
|
|
||||||
- name: Download Artifacts
|
- name: 📥 Download Artifacts
|
||||||
uses: pyTooling/download-artifact@v4
|
uses: pyTooling/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
pattern: ${{ inputs.coverage_artifacts_pattern }}
|
pattern: ${{ inputs.coverage_artifacts_pattern }}
|
||||||
@@ -90,66 +128,6 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
python -m pip install -U --disable-pip-version-check --break-system-packages coverage[toml] tomli
|
python -m pip install -U --disable-pip-version-check --break-system-packages coverage[toml] tomli
|
||||||
|
|
||||||
- name: 🔁 Extract configurations from pyproject.toml
|
|
||||||
id: getVariables
|
|
||||||
shell: python
|
|
||||||
run: |
|
|
||||||
from os import getenv
|
|
||||||
from pathlib import Path
|
|
||||||
from sys import version
|
|
||||||
from textwrap import dedent
|
|
||||||
|
|
||||||
print(f"Python: {version}")
|
|
||||||
|
|
||||||
from tomli import load as tomli_load
|
|
||||||
|
|
||||||
htmlDirectory = Path("htmlcov")
|
|
||||||
xmlFile = Path("./coverage.xml")
|
|
||||||
jsonFile = Path("./coverage.json")
|
|
||||||
coverageRC = "${{ inputs.coverage_config }}".strip()
|
|
||||||
|
|
||||||
# Read output paths from 'pyproject.toml' file
|
|
||||||
if coverageRC == "pyproject.toml":
|
|
||||||
pyProjectFile = Path("pyproject.toml")
|
|
||||||
if pyProjectFile.exists():
|
|
||||||
with pyProjectFile.open("rb") as file:
|
|
||||||
pyProjectSettings = tomli_load(file)
|
|
||||||
|
|
||||||
htmlDirectory = Path(pyProjectSettings["tool"]["coverage"]["html"]["directory"])
|
|
||||||
xmlFile = Path(pyProjectSettings["tool"]["coverage"]["xml"]["output"])
|
|
||||||
jsonFile = Path(pyProjectSettings["tool"]["coverage"]["json"]["output"])
|
|
||||||
else:
|
|
||||||
print(f"File '{pyProjectFile}' not found.")
|
|
||||||
print(f"::error title=FileNotFoundError::File '{pyProjectFile}' not found.")
|
|
||||||
exit(1)
|
|
||||||
|
|
||||||
# Read output paths from '.coveragerc' file
|
|
||||||
elif len(coverageRC) > 0:
|
|
||||||
coverageRCFile = Path(coverageRC)
|
|
||||||
if coverageRCFile.exists():
|
|
||||||
with coverageRCFile.open("rb") as file:
|
|
||||||
coverageRCSettings = tomli_load(file)
|
|
||||||
|
|
||||||
htmlDirectory = Path(coverageRCSettings["html"]["directory"])
|
|
||||||
xmlFile = Path(coverageRCSettings["xml"]["output"])
|
|
||||||
jsonFile = Path(coverageRCSettings["json"]["output"])
|
|
||||||
else:
|
|
||||||
print(f"File '{coverageRCFile}' not found.")
|
|
||||||
print(f"::error title=FileNotFoundError::File '{coverageRCFile}' not found.")
|
|
||||||
exit(1)
|
|
||||||
|
|
||||||
# Write jobs to special file
|
|
||||||
github_output = Path(getenv("GITHUB_OUTPUT"))
|
|
||||||
print(f"GITHUB_OUTPUT: {github_output}")
|
|
||||||
with github_output.open("a+", encoding="utf-8") as f:
|
|
||||||
f.write(dedent(f"""\
|
|
||||||
coverage_report_html_directory={htmlDirectory.as_posix()}
|
|
||||||
coverage_report_xml={xmlFile}
|
|
||||||
coverage_report_json={jsonFile}
|
|
||||||
"""))
|
|
||||||
|
|
||||||
print(f"DEBUG:\n html={htmlDirectory}\n xml={xmlFile}\n json={jsonFile}")
|
|
||||||
|
|
||||||
- name: Rename .coverage files and move them all into 'coverage/'
|
- name: Rename .coverage files and move them all into 'coverage/'
|
||||||
run: |
|
run: |
|
||||||
mkdir -p coverage
|
mkdir -p coverage
|
||||||
@@ -163,7 +141,7 @@ jobs:
|
|||||||
run: coverage report --rcfile=pyproject.toml --data-file=.coverage
|
run: coverage report --rcfile=pyproject.toml --data-file=.coverage
|
||||||
|
|
||||||
- name: Convert to XML format (Cobertura)
|
- name: Convert to XML format (Cobertura)
|
||||||
if: inputs.coverage_xml_artifact != ''
|
if: inputs.coverage_xml_artifact != '' || inputs.codecov || inputs.codacy
|
||||||
run: coverage xml --data-file=.coverage
|
run: coverage xml --data-file=.coverage
|
||||||
|
|
||||||
- name: Convert to JSON format
|
- name: Convert to JSON format
|
||||||
@@ -178,9 +156,9 @@ jobs:
|
|||||||
tree -pash report/coverage/html
|
tree -pash report/coverage/html
|
||||||
|
|
||||||
- name: 📤 Upload 'Coverage SQLite Database' artifact
|
- name: 📤 Upload 'Coverage SQLite Database' artifact
|
||||||
|
uses: pyTooling/upload-artifact@v4
|
||||||
if: inputs.coverage_sqlite_artifact != ''
|
if: inputs.coverage_sqlite_artifact != ''
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
uses: pyTooling/upload-artifact@v4
|
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.coverage_sqlite_artifact }}
|
name: ${{ inputs.coverage_sqlite_artifact }}
|
||||||
path: .coverage
|
path: .coverage
|
||||||
@@ -188,49 +166,54 @@ jobs:
|
|||||||
retention-days: 1
|
retention-days: 1
|
||||||
|
|
||||||
- name: 📤 Upload 'Coverage XML Report' artifact
|
- name: 📤 Upload 'Coverage XML Report' artifact
|
||||||
|
uses: pyTooling/upload-artifact@v4
|
||||||
if: inputs.coverage_xml_artifact != ''
|
if: inputs.coverage_xml_artifact != ''
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
uses: pyTooling/upload-artifact@v4
|
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.coverage_xml_artifact }}
|
name: ${{ inputs.coverage_xml_artifact }}
|
||||||
path: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
working-directory: ${{ inputs.coverage_report_xml_directory }}
|
||||||
|
path: ${{ inputs.coverage_report_xml_filename }}
|
||||||
if-no-files-found: error
|
if-no-files-found: error
|
||||||
retention-days: 1
|
retention-days: 1
|
||||||
|
|
||||||
- name: 📤 Upload 'Coverage JSON Report' artifact
|
- name: 📤 Upload 'Coverage JSON Report' artifact
|
||||||
|
uses: pyTooling/upload-artifact@v4
|
||||||
if: inputs.coverage_json_artifact != ''
|
if: inputs.coverage_json_artifact != ''
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
uses: pyTooling/upload-artifact@v4
|
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.coverage_json_artifact }}
|
name: ${{ inputs.coverage_json_artifact }}
|
||||||
path: ${{ steps.getVariables.outputs.coverage_report_json }}
|
working-directory: ${{ inputs.coverage_report_json_directory }}
|
||||||
|
path: ${{ inputs.coverage_report_json_filename }}
|
||||||
if-no-files-found: error
|
if-no-files-found: error
|
||||||
retention-days: 1
|
retention-days: 1
|
||||||
|
|
||||||
- name: 📤 Upload 'Coverage HTML Report' artifact
|
- name: 📤 Upload 'Coverage HTML Report' artifact
|
||||||
|
uses: pyTooling/upload-artifact@v4
|
||||||
if: inputs.coverage_html_artifact != ''
|
if: inputs.coverage_html_artifact != ''
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
uses: pyTooling/upload-artifact@v4
|
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.coverage_html_artifact }}
|
name: ${{ inputs.coverage_html_artifact }}
|
||||||
working-directory: ${{ steps.getVariables.outputs.coverage_report_html_directory }}
|
working-directory: ${{ inputs.coverage_report_html_directory }}
|
||||||
path: '*'
|
path: '*'
|
||||||
if-no-files-found: error
|
if-no-files-found: error
|
||||||
retention-days: 1
|
retention-days: 1
|
||||||
|
|
||||||
- name: 📊 Publish code coverage at CodeCov
|
- name: 📊 Publish code coverage at CodeCov
|
||||||
if: inputs.CodeCov == true
|
|
||||||
continue-on-error: true
|
|
||||||
uses: codecov/codecov-action@v5
|
uses: codecov/codecov-action@v5
|
||||||
|
if: inputs.codecov
|
||||||
|
continue-on-error: true
|
||||||
with:
|
with:
|
||||||
files: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
disable_search: true
|
||||||
|
files: ${{ inputs.coverage_report_xml_directory }}/${{ inputs.coverage_report_xml_filename }}
|
||||||
flags: unittests
|
flags: unittests
|
||||||
env_vars: PYTHON
|
env_vars: PYTHON
|
||||||
|
fail_ci_if_error: true
|
||||||
|
|
||||||
- name: 📉 Publish code coverage at Codacy
|
- name: 📉 Publish code coverage at Codacy
|
||||||
if: inputs.Codacy == true
|
|
||||||
continue-on-error: true
|
|
||||||
uses: codacy/codacy-coverage-reporter-action@v1
|
uses: codacy/codacy-coverage-reporter-action@v1
|
||||||
|
if: inputs.codacy
|
||||||
|
continue-on-error: true
|
||||||
with:
|
with:
|
||||||
project-token: ${{ secrets.codacy_token }}
|
project-token: ${{ secrets.CODACY_TOKEN }}
|
||||||
coverage-reports: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
coverage-reports: ${{ inputs.coverage_report_xml_directory }}/${{ inputs.coverage_report_xml_filename }}
|
||||||
|
|||||||
2
.github/workflows/PublishOnPyPI.yml
vendored
2
.github/workflows/PublishOnPyPI.yml
vendored
@@ -33,7 +33,7 @@ on:
|
|||||||
python_version:
|
python_version:
|
||||||
description: 'Python version.'
|
description: 'Python version.'
|
||||||
required: false
|
required: false
|
||||||
default: '3.12'
|
default: '3.13'
|
||||||
type: string
|
type: string
|
||||||
requirements:
|
requirements:
|
||||||
description: 'Python dependencies to be installed through pip.'
|
description: 'Python dependencies to be installed through pip.'
|
||||||
|
|||||||
56
.github/workflows/PublishTestResults.yml
vendored
56
.github/workflows/PublishTestResults.yml
vendored
@@ -34,6 +34,11 @@ on:
|
|||||||
required: false
|
required: false
|
||||||
default: '*-UnitTestReportSummary-XML-*'
|
default: '*-UnitTestReportSummary-XML-*'
|
||||||
type: string
|
type: string
|
||||||
|
merged_junit_filename:
|
||||||
|
description: 'Filename of the merged JUnit Test Summary.'
|
||||||
|
required: false
|
||||||
|
default: 'Unittesting.xml'
|
||||||
|
type: string
|
||||||
merged_junit_artifact:
|
merged_junit_artifact:
|
||||||
description: 'Name of the merged JUnit Test Summary artifact.'
|
description: 'Name of the merged JUnit Test Summary artifact.'
|
||||||
required: false
|
required: false
|
||||||
@@ -44,6 +49,11 @@ on:
|
|||||||
required: false
|
required: false
|
||||||
default: '"--pytest=rewrite-dunder-init;reduce-depth:pytest.tests.unit"'
|
default: '"--pytest=rewrite-dunder-init;reduce-depth:pytest.tests.unit"'
|
||||||
type: string
|
type: string
|
||||||
|
testsuite-summary-name:
|
||||||
|
description: 'Set TestsuiteSummary name.'
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
type: string
|
||||||
publish:
|
publish:
|
||||||
description: 'Publish test report summary via Dorny Test-Reporter'
|
description: 'Publish test report summary via Dorny Test-Reporter'
|
||||||
required: false
|
required: false
|
||||||
@@ -54,6 +64,20 @@ on:
|
|||||||
required: false
|
required: false
|
||||||
default: 'Unit Test Results'
|
default: 'Unit Test Results'
|
||||||
type: string
|
type: string
|
||||||
|
dorny:
|
||||||
|
description: 'Publish merged unittest results via Dorny Test-Reporter.'
|
||||||
|
required: false
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
codecov:
|
||||||
|
description: 'Publish merged unittest results to Codecov.'
|
||||||
|
required: false
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
secrets:
|
||||||
|
CODECOV_TOKEN:
|
||||||
|
description: 'Token to push result to Codecov.'
|
||||||
|
required: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
PublishTestResults:
|
PublishTestResults:
|
||||||
@@ -65,11 +89,11 @@ jobs:
|
|||||||
- name: ⏬ Checkout repository
|
- name: ⏬ Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Download Artifacts
|
- name: 📥 Download Artifacts
|
||||||
uses: pyTooling/download-artifact@v4
|
uses: pyTooling/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
pattern: ${{ inputs.unittest_artifacts_pattern }}
|
pattern: ${{ inputs.unittest_artifacts_pattern }}
|
||||||
path: artifacts
|
path: artifacts
|
||||||
|
|
||||||
- name: 🔎 Inspect extracted artifact (tarball)
|
- name: 🔎 Inspect extracted artifact (tarball)
|
||||||
run: |
|
run: |
|
||||||
@@ -82,28 +106,38 @@ jobs:
|
|||||||
- name: Rename JUnit files and move them all into 'junit/'
|
- name: Rename JUnit files and move them all into 'junit/'
|
||||||
run: |
|
run: |
|
||||||
mkdir -p junit
|
mkdir -p junit
|
||||||
find artifacts/ -type f -path "*TestReportSummary*.xml" -exec sh -c 'cp -v $0 "junit/$(basename $(dirname $0)).$(basename $0)"' {} ';'
|
find artifacts/ -type f -path "*.xml" -exec sh -c 'cp -v $0 "junit/$(basename $(dirname $0)).$(basename $0)"' {} ';'
|
||||||
tree -pash junit
|
tree -pash junit
|
||||||
|
|
||||||
- name: 🔁 Merge JUnit Unit Test Summaries
|
- name: 🔁 Merge JUnit Unit Test Summaries
|
||||||
run: |
|
run: |
|
||||||
pyedaa-reports -v unittest "--merge=pyTest-JUnit:junit/*.xml" ${{ inputs.additional_merge_args }} "--output=pyTest-JUnit:Unittesting.xml"
|
pyedaa-reports -v unittest "--name=${{ inputs.testsuite-summary-name }}" "--merge=pyTest-JUnit:junit/*.xml" ${{ inputs.additional_merge_args }} "--output=pyTest-JUnit:${{ inputs.merged_junit_filename }}"
|
||||||
printf "%s\n" "cat Unittesting.xml"
|
printf "%s\n" "cat ${{ inputs.merged_junit_filename }}"
|
||||||
cat Unittesting.xml
|
cat ${{ inputs.merged_junit_filename }}
|
||||||
|
|
||||||
- name: 📊 Publish Unit Test Results
|
- name: 📊 Publish Unit Test Results
|
||||||
uses: dorny/test-reporter@v1
|
uses: dorny/test-reporter@v2
|
||||||
if: inputs.publish && inputs.report_title != ''
|
if: (inputs.dorny || inputs.publish) && inputs.report_title != ''
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.report_title }}
|
name: ${{ inputs.report_title }}
|
||||||
path: Unittesting.xml
|
path: ${{ inputs.merged_junit_filename }}
|
||||||
reporter: java-junit
|
reporter: java-junit
|
||||||
|
|
||||||
|
- name: 📊 Publish unittest results at CodeCov
|
||||||
|
uses: codecov/test-results-action@v1
|
||||||
|
if: inputs.codecov
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
disable_search: true
|
||||||
|
files: ${{ inputs.merged_junit_filename }}
|
||||||
|
fail_ci_if_error: true
|
||||||
|
|
||||||
- name: 📤 Upload merged 'JUnit Test Summary' artifact
|
- name: 📤 Upload merged 'JUnit Test Summary' artifact
|
||||||
if: inputs.merged_junit_artifact != ''
|
|
||||||
uses: pyTooling/upload-artifact@v4
|
uses: pyTooling/upload-artifact@v4
|
||||||
|
if: inputs.merged_junit_artifact != ''
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.merged_junit_artifact }}
|
name: ${{ inputs.merged_junit_artifact }}
|
||||||
path: Unittesting.xml
|
path: ${{ inputs.merged_junit_filename }}
|
||||||
if-no-files-found: error
|
if-no-files-found: error
|
||||||
retention-days: 1
|
retention-days: 1
|
||||||
|
investigate: true
|
||||||
|
|||||||
4
.github/workflows/PublishToGitHubPages.yml
vendored
4
.github/workflows/PublishToGitHubPages.yml
vendored
@@ -62,15 +62,15 @@ jobs:
|
|||||||
path: public
|
path: public
|
||||||
|
|
||||||
- name: 📥 Download artifacts '${{ inputs.coverage }}' from 'Coverage' job
|
- name: 📥 Download artifacts '${{ inputs.coverage }}' from 'Coverage' job
|
||||||
if: ${{ inputs.coverage != '' }}
|
|
||||||
uses: pyTooling/download-artifact@v4
|
uses: pyTooling/download-artifact@v4
|
||||||
|
if: ${{ inputs.coverage != '' }}
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.coverage }}
|
name: ${{ inputs.coverage }}
|
||||||
path: public/coverage
|
path: public/coverage
|
||||||
|
|
||||||
- name: 📥 Download artifacts '${{ inputs.typing }}' from 'StaticTypeCheck' job
|
- name: 📥 Download artifacts '${{ inputs.typing }}' from 'StaticTypeCheck' job
|
||||||
if: ${{ inputs.typing != '' }}
|
|
||||||
uses: pyTooling/download-artifact@v4
|
uses: pyTooling/download-artifact@v4
|
||||||
|
if: ${{ inputs.typing != '' }}
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.typing }}
|
name: ${{ inputs.typing }}
|
||||||
path: public/typing
|
path: public/typing
|
||||||
|
|||||||
8
.github/workflows/Release.yml
vendored
8
.github/workflows/Release.yml
vendored
@@ -44,13 +44,13 @@ jobs:
|
|||||||
RELEASE_VERSION=${GIT_TAG#v}
|
RELEASE_VERSION=${GIT_TAG#v}
|
||||||
RELEASE_DATETIME="$(date --utc '+%d.%m.%Y - %H:%M:%S')"
|
RELEASE_DATETIME="$(date --utc '+%d.%m.%Y - %H:%M:%S')"
|
||||||
# write to step outputs
|
# write to step outputs
|
||||||
echo "gitTag=${GIT_TAG}" >> $GITHUB_OUTPUT
|
printf "%s\n" "gitTag=${GIT_TAG}" >> $GITHUB_OUTPUT
|
||||||
echo "version=${RELEASE_VERSION}" >> $GITHUB_OUTPUT
|
printf "%s\n" "version=${RELEASE_VERSION}" >> $GITHUB_OUTPUT
|
||||||
echo "datetime=${RELEASE_DATETIME}" >> $GITHUB_OUTPUT
|
printf "%s\n" "datetime=${RELEASE_DATETIME}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: 📑 Create Release Page
|
- name: 📑 Create Release Page
|
||||||
id: createReleasePage
|
|
||||||
uses: actions/create-release@v1
|
uses: actions/create-release@v1
|
||||||
|
id: createReleasePage
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ github.token }}
|
GITHUB_TOKEN: ${{ github.token }}
|
||||||
with:
|
with:
|
||||||
|
|||||||
32
.github/workflows/SphinxDocumentation.yml
vendored
32
.github/workflows/SphinxDocumentation.yml
vendored
@@ -32,7 +32,7 @@ on:
|
|||||||
python_version:
|
python_version:
|
||||||
description: 'Python version.'
|
description: 'Python version.'
|
||||||
required: false
|
required: false
|
||||||
default: '3.12'
|
default: '3.13'
|
||||||
type: string
|
type: string
|
||||||
requirements:
|
requirements:
|
||||||
description: 'Python dependencies to be installed through pip.'
|
description: 'Python dependencies to be installed through pip.'
|
||||||
@@ -46,7 +46,7 @@ on:
|
|||||||
type: string
|
type: string
|
||||||
coverage_report_json_directory:
|
coverage_report_json_directory:
|
||||||
description: ''
|
description: ''
|
||||||
required: true
|
required: false
|
||||||
type: string
|
type: string
|
||||||
coverage_json_artifact:
|
coverage_json_artifact:
|
||||||
description: 'Name of the coverage JSON artifact.'
|
description: 'Name of the coverage JSON artifact.'
|
||||||
@@ -100,18 +100,20 @@ jobs:
|
|||||||
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
||||||
|
|
||||||
- name: 📥 Download artifacts '${{ inputs.unittest_xml_artifact }}' from 'Unittesting' job
|
- name: 📥 Download artifacts '${{ inputs.unittest_xml_artifact }}' from 'Unittesting' job
|
||||||
if: inputs.unittest_xml_artifact != ''
|
|
||||||
uses: pyTooling/download-artifact@v4
|
uses: pyTooling/download-artifact@v4
|
||||||
|
if: inputs.unittest_xml_artifact != ''
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.unittest_xml_artifact }}
|
name: ${{ inputs.unittest_xml_artifact }}
|
||||||
path: ${{ inputs.unittest_xml_directory }}
|
path: ${{ inputs.unittest_xml_directory }}
|
||||||
|
investigate: true
|
||||||
|
|
||||||
- name: 📥 Download artifacts '${{ inputs.coverage_json_artifact }}' from 'PublishCoverageResults' job
|
- name: 📥 Download artifacts '${{ inputs.coverage_json_artifact }}' from 'PublishCoverageResults' job
|
||||||
if: inputs.coverage_json_artifact != ''
|
|
||||||
uses: pyTooling/download-artifact@v4
|
uses: pyTooling/download-artifact@v4
|
||||||
|
if: inputs.coverage_json_artifact != ''
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.coverage_json_artifact }}
|
name: ${{ inputs.coverage_json_artifact }}
|
||||||
path: ${{ inputs.coverage_report_json_directory }}
|
path: ${{ inputs.coverage_report_json_directory }}
|
||||||
|
investigate: true
|
||||||
|
|
||||||
- name: ☑ Generate HTML documentation
|
- name: ☑ Generate HTML documentation
|
||||||
if: inputs.html_artifact != ''
|
if: inputs.html_artifact != ''
|
||||||
@@ -122,9 +124,9 @@ jobs:
|
|||||||
sphinx-build -v -n -b html -d _build/doctrees -j $(nproc) -w _build/html.log . _build/html
|
sphinx-build -v -n -b html -d _build/doctrees -j $(nproc) -w _build/html.log . _build/html
|
||||||
|
|
||||||
- name: 📤 Upload 'HTML Documentation' artifact
|
- name: 📤 Upload 'HTML Documentation' artifact
|
||||||
|
uses: pyTooling/upload-artifact@v4
|
||||||
if: inputs.html_artifact != ''
|
if: inputs.html_artifact != ''
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
uses: pyTooling/upload-artifact@v4
|
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.html_artifact }}
|
name: ${{ inputs.html_artifact }}
|
||||||
working-directory: ${{ inputs.doc_directory }}/_build/html
|
working-directory: ${{ inputs.doc_directory }}/_build/html
|
||||||
@@ -157,18 +159,20 @@ jobs:
|
|||||||
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
||||||
|
|
||||||
- name: 📥 Download artifacts '${{ inputs.unittest_xml_artifact }}' from 'Unittesting' job
|
- name: 📥 Download artifacts '${{ inputs.unittest_xml_artifact }}' from 'Unittesting' job
|
||||||
if: inputs.unittest_xml_artifact != ''
|
|
||||||
uses: pyTooling/download-artifact@v4
|
uses: pyTooling/download-artifact@v4
|
||||||
|
if: inputs.unittest_xml_artifact != ''
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.unittest_xml_artifact }}
|
name: ${{ inputs.unittest_xml_artifact }}
|
||||||
path: ${{ inputs.unittest_xml_directory }}
|
path: ${{ inputs.unittest_xml_directory }}
|
||||||
|
investigate: true
|
||||||
|
|
||||||
- name: 📥 Download artifacts '${{ inputs.coverage_json_artifact }}' from 'PublishCoverageResults' job
|
- name: 📥 Download artifacts '${{ inputs.coverage_json_artifact }}' from 'PublishCoverageResults' job
|
||||||
if: inputs.coverage_json_artifact != ''
|
|
||||||
uses: pyTooling/download-artifact@v4
|
uses: pyTooling/download-artifact@v4
|
||||||
|
if: inputs.coverage_json_artifact != ''
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.coverage_json_artifact }}
|
name: ${{ inputs.coverage_json_artifact }}
|
||||||
path: ${{ inputs.coverage_report_json_directory }}
|
path: ${{ inputs.coverage_report_json_directory }}
|
||||||
|
investigate: true
|
||||||
|
|
||||||
- name: ☑ Generate LaTeX documentation
|
- name: ☑ Generate LaTeX documentation
|
||||||
if: inputs.latex_artifact != ''
|
if: inputs.latex_artifact != ''
|
||||||
@@ -183,8 +187,8 @@ jobs:
|
|||||||
- name: Workaround I - https://github.com/sphinx-doc/sphinx/issues/13190
|
- name: Workaround I - https://github.com/sphinx-doc/sphinx/issues/13190
|
||||||
if: inputs.latex_artifact != ''
|
if: inputs.latex_artifact != ''
|
||||||
run: |
|
run: |
|
||||||
printf "Changing directory to 'doc/_build/latex' ...\n"
|
printf "Changing directory to '${{ inputs.doc_directory || '.' }}/_build/latex' ...\n"
|
||||||
cd doc/_build/latex
|
cd ${{ inputs.doc_directory || '.' }}/_build/latex
|
||||||
|
|
||||||
MIMETYPE_EXTENSIONS=(
|
MIMETYPE_EXTENSIONS=(
|
||||||
"image/png:png"
|
"image/png:png"
|
||||||
@@ -227,13 +231,13 @@ jobs:
|
|||||||
if [[ $found -eq 0 ]]; then
|
if [[ $found -eq 0 ]]; then
|
||||||
printf "[SKIPPED]\n"
|
printf "[SKIPPED]\n"
|
||||||
fi
|
fi
|
||||||
done <<<$(find . -type f -not -iname "*.cls" -not -iname "*.sty" -not -iname "*.xdy" -not -iname "*.svg" -not -iname "*.png" -not -iname "*.jpg" | sed 's:./::')
|
done < <(find . -type f -not -iname "*.cls" -not -iname "*.sty" -not -iname "*.xdy" -not -iname "*.svg" -not -iname "*.png" -not -iname "*.jpg" | sed 's:./::')
|
||||||
|
|
||||||
- name: Workaround II - https://github.com/sphinx-doc/sphinx/issues/13189
|
- name: Workaround II - https://github.com/sphinx-doc/sphinx/issues/13189
|
||||||
if: inputs.latex_artifact != ''
|
if: inputs.latex_artifact != ''
|
||||||
run: |
|
run: |
|
||||||
printf "Changing directory to 'doc/_build/latex' ...\n"
|
printf "Changing directory to '${{ inputs.doc_directory || '.' }}/_build/latex' ...\n"
|
||||||
cd doc/_build/latex
|
cd ${{ inputs.doc_directory || '.' }}/_build/latex
|
||||||
|
|
||||||
printf "Searching for downloaded images, that need normalization ...\n"
|
printf "Searching for downloaded images, that need normalization ...\n"
|
||||||
for imageExt in png svg jpg jpeg; do
|
for imageExt in png svg jpg jpeg; do
|
||||||
@@ -259,13 +263,13 @@ jobs:
|
|||||||
printf "[FAILED]\n"
|
printf "[FAILED]\n"
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
done <<<$(find . -type f -iname "*.$imageExt" | sed 's:./::')
|
done < <(find . -type f -iname "*.$imageExt" | sed 's:./::')
|
||||||
done
|
done
|
||||||
|
|
||||||
- name: 📤 Upload 'LaTeX Documentation' artifact
|
- name: 📤 Upload 'LaTeX Documentation' artifact
|
||||||
|
uses: pyTooling/upload-artifact@v4
|
||||||
if: inputs.latex_artifact != ''
|
if: inputs.latex_artifact != ''
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
uses: pyTooling/upload-artifact@v4
|
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.latex_artifact }}
|
name: ${{ inputs.latex_artifact }}
|
||||||
working-directory: ${{ inputs.doc_directory }}/_build/latex
|
working-directory: ${{ inputs.doc_directory }}/_build/latex
|
||||||
|
|||||||
6
.github/workflows/StaticTypeCheck.yml
vendored
6
.github/workflows/StaticTypeCheck.yml
vendored
@@ -33,7 +33,7 @@ on:
|
|||||||
python_version:
|
python_version:
|
||||||
description: 'Python version.'
|
description: 'Python version.'
|
||||||
required: false
|
required: false
|
||||||
default: '3.12'
|
default: '3.13'
|
||||||
type: string
|
type: string
|
||||||
requirements:
|
requirements:
|
||||||
description: 'Python dependencies to be installed through pip.'
|
description: 'Python dependencies to be installed through pip.'
|
||||||
@@ -87,9 +87,9 @@ jobs:
|
|||||||
run: ${{ inputs.commands }}
|
run: ${{ inputs.commands }}
|
||||||
|
|
||||||
- name: 📤 Upload 'Static Typing Report' HTML artifact
|
- name: 📤 Upload 'Static Typing Report' HTML artifact
|
||||||
|
uses: pyTooling/upload-artifact@v4
|
||||||
if: ${{ inputs.html_artifact != '' }}
|
if: ${{ inputs.html_artifact != '' }}
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
uses: pyTooling/upload-artifact@v4
|
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.html_artifact }}
|
name: ${{ inputs.html_artifact }}
|
||||||
working-directory: ${{ inputs.html_report }}
|
working-directory: ${{ inputs.html_report }}
|
||||||
@@ -98,9 +98,9 @@ jobs:
|
|||||||
retention-days: 1
|
retention-days: 1
|
||||||
|
|
||||||
- name: 📤 Upload 'Static Typing Report' JUnit artifact
|
- name: 📤 Upload 'Static Typing Report' JUnit artifact
|
||||||
|
uses: pyTooling/upload-artifact@v4
|
||||||
if: ${{ inputs.junit_artifact != '' }}
|
if: ${{ inputs.junit_artifact != '' }}
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
uses: pyTooling/upload-artifact@v4
|
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.junit_artifact }}
|
name: ${{ inputs.junit_artifact }}
|
||||||
path: ${{ inputs.junit_report }}
|
path: ${{ inputs.junit_report }}
|
||||||
|
|||||||
7
.github/workflows/UnitTesting.yml
vendored
7
.github/workflows/UnitTesting.yml
vendored
@@ -282,8 +282,8 @@ jobs:
|
|||||||
# Python setup
|
# Python setup
|
||||||
|
|
||||||
- name: '🟦 Setup MSYS2 for ${{ matrix.runtime }}'
|
- name: '🟦 Setup MSYS2 for ${{ matrix.runtime }}'
|
||||||
if: matrix.system == 'msys2'
|
|
||||||
uses: msys2/setup-msys2@v2
|
uses: msys2/setup-msys2@v2
|
||||||
|
if: matrix.system == 'msys2'
|
||||||
with:
|
with:
|
||||||
msystem: ${{ matrix.runtime }}
|
msystem: ${{ matrix.runtime }}
|
||||||
update: true
|
update: true
|
||||||
@@ -292,8 +292,8 @@ jobs:
|
|||||||
${{ inputs.pacboy }}
|
${{ inputs.pacboy }}
|
||||||
|
|
||||||
- name: 🐍 Setup Python ${{ matrix.python }}
|
- name: 🐍 Setup Python ${{ matrix.python }}
|
||||||
if: matrix.system != 'msys2'
|
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
|
if: matrix.system != 'msys2'
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python }}
|
python-version: ${{ matrix.python }}
|
||||||
|
|
||||||
@@ -358,6 +358,7 @@ jobs:
|
|||||||
|
|
||||||
- name: ✅ Run unit tests (Windows)
|
- name: ✅ Run unit tests (Windows)
|
||||||
if: matrix.system == 'windows'
|
if: matrix.system == 'windows'
|
||||||
|
continue-on-error: true
|
||||||
run: |
|
run: |
|
||||||
$env:ENVIRONMENT_NAME = "${{ matrix.envname }}"
|
$env:ENVIRONMENT_NAME = "${{ matrix.envname }}"
|
||||||
$env:PYTHONPATH = (Get-Location).ToString()
|
$env:PYTHONPATH = (Get-Location).ToString()
|
||||||
@@ -392,9 +393,9 @@ jobs:
|
|||||||
# Upload artifacts
|
# Upload artifacts
|
||||||
|
|
||||||
- name: 📤 Upload '${{ inputs.unittest_report_xml_filename }}' artifact
|
- name: 📤 Upload '${{ inputs.unittest_report_xml_filename }}' artifact
|
||||||
|
uses: pyTooling/upload-artifact@v4
|
||||||
if: inputs.unittest_xml_artifact != ''
|
if: inputs.unittest_xml_artifact != ''
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
uses: pyTooling/upload-artifact@v4
|
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.unittest_xml_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
name: ${{ inputs.unittest_xml_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
||||||
working-directory: ${{ inputs.unittest_report_xml_directory }}
|
working-directory: ${{ inputs.unittest_report_xml_directory }}
|
||||||
|
|||||||
2
.github/workflows/VerifyDocs.yml
vendored
2
.github/workflows/VerifyDocs.yml
vendored
@@ -33,7 +33,7 @@ on:
|
|||||||
python_version:
|
python_version:
|
||||||
description: 'Python version.'
|
description: 'Python version.'
|
||||||
required: false
|
required: false
|
||||||
default: '3.12'
|
default: '3.13'
|
||||||
type: string
|
type: string
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
|||||||
45
.github/workflows/_Checking_JobTemplates.yml
vendored
45
.github/workflows/_Checking_JobTemplates.yml
vendored
@@ -30,7 +30,7 @@ jobs:
|
|||||||
- ConfigParams
|
- ConfigParams
|
||||||
- UnitTestingParams
|
- UnitTestingParams
|
||||||
with:
|
with:
|
||||||
jobs: ${{ needs.UnitTestingParams.outputs.python_jobs }}
|
jobs: ${{ needs.UnitTestingParams.outputs.python_jobs }}
|
||||||
unittest_report_xml_directory: ${{ needs.ConfigParams.outputs.unittest_report_xml_directory }}
|
unittest_report_xml_directory: ${{ needs.ConfigParams.outputs.unittest_report_xml_directory }}
|
||||||
unittest_report_xml_filename: ${{ needs.ConfigParams.outputs.unittest_report_xml_filename }}
|
unittest_report_xml_filename: ${{ needs.ConfigParams.outputs.unittest_report_xml_filename }}
|
||||||
coverage_report_html_directory: ${{ needs.ConfigParams.outputs.coverage_report_html_directory }}
|
coverage_report_html_directory: ${{ needs.ConfigParams.outputs.coverage_report_html_directory }}
|
||||||
@@ -47,9 +47,9 @@ jobs:
|
|||||||
- ConfigParams
|
- ConfigParams
|
||||||
- PlatformTestingParams
|
- PlatformTestingParams
|
||||||
with:
|
with:
|
||||||
jobs: ${{ needs.PlatformTestingParams.outputs.python_jobs }}
|
jobs: ${{ needs.PlatformTestingParams.outputs.python_jobs }}
|
||||||
# tests_directory: ""
|
# tests_directory: ""
|
||||||
unittest_directory: platform
|
unittest_directory: platform
|
||||||
unittest_report_xml_directory: ${{ needs.ConfigParams.outputs.unittest_report_xml_directory }}
|
unittest_report_xml_directory: ${{ needs.ConfigParams.outputs.unittest_report_xml_directory }}
|
||||||
unittest_report_xml_filename: ${{ needs.ConfigParams.outputs.unittest_report_xml_filename }}
|
unittest_report_xml_filename: ${{ needs.ConfigParams.outputs.unittest_report_xml_filename }}
|
||||||
coverage_report_html_directory: ${{ needs.ConfigParams.outputs.coverage_report_html_directory }}
|
coverage_report_html_directory: ${{ needs.ConfigParams.outputs.coverage_report_html_directory }}
|
||||||
@@ -70,17 +70,17 @@ jobs:
|
|||||||
commands: |
|
commands: |
|
||||||
${{ needs.ConfigParams.outputs.mypy_prepare_command }}
|
${{ needs.ConfigParams.outputs.mypy_prepare_command }}
|
||||||
mypy --html-report htmlmypy -p ${{ needs.ConfigParams.outputs.package_fullname }}
|
mypy --html-report htmlmypy -p ${{ needs.ConfigParams.outputs.package_fullname }}
|
||||||
html_report: 'htmlmypy'
|
html_report: 'htmlmypy'
|
||||||
html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
|
html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
|
||||||
|
|
||||||
DocCoverage:
|
DocCoverage:
|
||||||
uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@dev
|
uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@main
|
||||||
needs:
|
needs:
|
||||||
- ConfigParams
|
- ConfigParams
|
||||||
- UnitTestingParams
|
- UnitTestingParams
|
||||||
with:
|
with:
|
||||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||||
directory: ${{ needs.ConfigParams.outputs.package_directors }}
|
directory : ${{ needs.ConfigParams.outputs.package_directors }}
|
||||||
# fail_below: 70
|
# fail_below: 70
|
||||||
|
|
||||||
Package:
|
Package:
|
||||||
@@ -91,31 +91,46 @@ jobs:
|
|||||||
- PlatformTesting
|
- PlatformTesting
|
||||||
with:
|
with:
|
||||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||||
artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
||||||
|
|
||||||
PublishCoverageResults:
|
PublishCoverageResults:
|
||||||
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@main
|
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@main
|
||||||
needs:
|
needs:
|
||||||
|
- ConfigParams
|
||||||
- UnitTestingParams
|
- UnitTestingParams
|
||||||
- UnitTesting
|
- UnitTesting
|
||||||
- PlatformTesting
|
- PlatformTesting
|
||||||
with:
|
with:
|
||||||
coverage_sqlite_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}
|
coverage_sqlite_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}
|
||||||
coverage_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }}
|
coverage_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }}
|
||||||
coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}
|
coverage_report_xml_directory: ${{ needs.ConfigParams.outputs.coverage_report_xml_directory }}
|
||||||
coverage_html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
coverage_report_xml_filename: ${{ needs.ConfigParams.outputs.coverage_report_xml_filename }}
|
||||||
|
coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}
|
||||||
|
coverage_report_json_directory: ${{ needs.ConfigParams.outputs.coverage_report_json_directory }}
|
||||||
|
coverage_report_json_filename: ${{ needs.ConfigParams.outputs.coverage_report_json_filename }}
|
||||||
|
coverage_html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||||
|
coverage_report_html_directory: ${{ needs.ConfigParams.outputs.coverage_report_html_directory }}
|
||||||
|
codecov: true
|
||||||
|
codacy: true
|
||||||
secrets:
|
secrets:
|
||||||
codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
CODACY_TOKEN: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||||
|
|
||||||
PublishTestResults:
|
PublishTestResults:
|
||||||
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@main
|
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@main
|
||||||
needs:
|
needs:
|
||||||
|
- ConfigParams
|
||||||
- UnitTestingParams
|
- UnitTestingParams
|
||||||
- UnitTesting
|
- UnitTesting
|
||||||
- PlatformTesting
|
- PlatformTesting
|
||||||
with:
|
with:
|
||||||
additional_merge_args: '-d "--pytest=rewrite-dunder-init;reduce-depth:pytest.tests.unit;reduce-depth:pytest.tests.platform"'
|
additional_merge_args: '-d "--pytest=rewrite-dunder-init;reduce-depth:pytest.tests.unit;reduce-depth:pytest.tests.platform"'
|
||||||
merged_junit_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
testsuite-summary-name: ${{ needs.ConfigParams.outputs.package_fullname }}
|
||||||
|
merged_junit_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||||
|
codecov: true
|
||||||
|
dorny: true
|
||||||
|
secrets:
|
||||||
|
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
|
||||||
# VerifyDocs:
|
# VerifyDocs:
|
||||||
# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@main
|
# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@main
|
||||||
@@ -141,7 +156,7 @@ jobs:
|
|||||||
latex_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }}
|
latex_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }}
|
||||||
|
|
||||||
IntermediateCleanUp:
|
IntermediateCleanUp:
|
||||||
uses: pyTooling/Actions/.github/workflows/IntermediateCleanUp.yml@dev
|
uses: pyTooling/Actions/.github/workflows/IntermediateCleanUp.yml@main
|
||||||
needs:
|
needs:
|
||||||
- UnitTestingParams
|
- UnitTestingParams
|
||||||
- PublishCoverageResults
|
- PublishCoverageResults
|
||||||
|
|||||||
@@ -10,6 +10,10 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
package_namespace: pyExamples
|
package_namespace: pyExamples
|
||||||
package_name: Extensions
|
package_name: Extensions
|
||||||
|
codecov: true
|
||||||
|
codacy: true
|
||||||
|
dorny: true
|
||||||
secrets:
|
secrets:
|
||||||
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||||
|
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||||
CODACY_PROJECT_TOKEN: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
CODACY_PROJECT_TOKEN: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||||
|
|||||||
3
.github/workflows/_Checking_Nightly.yml
vendored
3
.github/workflows/_Checking_Nightly.yml
vendored
@@ -51,6 +51,7 @@ jobs:
|
|||||||
actions: write
|
actions: write
|
||||||
# attestations: write
|
# attestations: write
|
||||||
with:
|
with:
|
||||||
|
can-fail: true
|
||||||
prerelease: true
|
prerelease: true
|
||||||
replacements: |
|
replacements: |
|
||||||
version=4.2.0
|
version=4.2.0
|
||||||
@@ -88,6 +89,7 @@ jobs:
|
|||||||
actions: write
|
actions: write
|
||||||
# attestations: write
|
# attestations: write
|
||||||
with:
|
with:
|
||||||
|
can-fail: true
|
||||||
replacements: |
|
replacements: |
|
||||||
version=4.2.0
|
version=4.2.0
|
||||||
tool=myTool
|
tool=myTool
|
||||||
@@ -105,6 +107,7 @@ jobs:
|
|||||||
inventory-version: 4.2.5
|
inventory-version: 4.2.5
|
||||||
inventory-categories: "kind1,kind2"
|
inventory-categories: "kind1,kind2"
|
||||||
assets: |
|
assets: |
|
||||||
|
# artifact: file: labels: asset title
|
||||||
document: document1.txt: doc,html: Documentation
|
document: document1.txt: doc,html: Documentation
|
||||||
document: build.log: build,log: Logfile - %tool% - %tool%
|
document: build.log: build,log: Logfile - %tool% - %tool%
|
||||||
other: document1.txt: build,SBOM:SBOM - %version%
|
other: document1.txt: build,SBOM:SBOM - %version%
|
||||||
|
|||||||
@@ -9,6 +9,11 @@ jobs:
|
|||||||
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@main
|
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@main
|
||||||
with:
|
with:
|
||||||
package_name: pyDummy
|
package_name: pyDummy
|
||||||
|
codecov: true
|
||||||
|
codacy: true
|
||||||
|
dorny: true
|
||||||
|
cleanup: false
|
||||||
secrets:
|
secrets:
|
||||||
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||||
|
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||||
CODACY_PROJECT_TOKEN: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
CODACY_PROJECT_TOKEN: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||||
|
|||||||
@@ -1,178 +0,0 @@
|
|||||||
# ==================================================================================================================== #
|
|
||||||
# Authors: #
|
|
||||||
# Patrick Lehmann #
|
|
||||||
# Unai Martinez-Corral #
|
|
||||||
# #
|
|
||||||
# ==================================================================================================================== #
|
|
||||||
# Copyright 2020-2024 The pyTooling Authors #
|
|
||||||
# #
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
|
||||||
# you may not use this file except in compliance with the License. #
|
|
||||||
# You may obtain a copy of the License at #
|
|
||||||
# #
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
|
||||||
# #
|
|
||||||
# Unless required by applicable law or agreed to in writing, software #
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
|
||||||
# See the License for the specific language governing permissions and #
|
|
||||||
# limitations under the License. #
|
|
||||||
# #
|
|
||||||
# SPDX-License-Identifier: Apache-2.0 #
|
|
||||||
# ==================================================================================================================== #
|
|
||||||
name: Pipeline
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
|
|
||||||
# This job is a workaround for global variables
|
|
||||||
# See https://github.com/actions/runner/issues/480
|
|
||||||
Params:
|
|
||||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
|
||||||
with:
|
|
||||||
name: ToolName
|
|
||||||
# Optional
|
|
||||||
system_list: 'ubuntu windows msys2 macos'
|
|
||||||
python_version: '3.10'
|
|
||||||
python_version_list: '3.8 3.9 3.10'
|
|
||||||
|
|
||||||
UnitTesting:
|
|
||||||
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@main
|
|
||||||
needs:
|
|
||||||
- Params
|
|
||||||
with:
|
|
||||||
jobs: ${{ needs.Params.outputs.python_jobs }}
|
|
||||||
# Optional
|
|
||||||
requirements: '-r tests/requirements.txt'
|
|
||||||
pacboy: >-
|
|
||||||
python-pip:p
|
|
||||||
python-wheel:p
|
|
||||||
python-coverage:p
|
|
||||||
python-lxml:p
|
|
||||||
mingw_requirements: '-r tests/requirements.mingw.txt'
|
|
||||||
tests_directory: 'tests'
|
|
||||||
unittest_directory: 'unit'
|
|
||||||
artifact: ${{ fromJson(needs.Params.outputs.artifact_names).unittesting_xml }}
|
|
||||||
|
|
||||||
Coverage:
|
|
||||||
uses: pyTooling/Actions/.github/workflows/CoverageCollection.yml@main
|
|
||||||
needs:
|
|
||||||
- Params
|
|
||||||
with:
|
|
||||||
artifact: ${{ fromJson(needs.Params.outputs.artifact_names).codecoverage_html }}
|
|
||||||
# Optional
|
|
||||||
python_version: ${{ needs..Params.outputs.python_version }}
|
|
||||||
requirements: '-r tests/requirements.txt'
|
|
||||||
tests_directory: 'tests'
|
|
||||||
unittest_directory: 'unit'
|
|
||||||
secrets:
|
|
||||||
codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
|
||||||
|
|
||||||
StaticTypeCheck:
|
|
||||||
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@main
|
|
||||||
needs:
|
|
||||||
- Params
|
|
||||||
with:
|
|
||||||
commands: |
|
|
||||||
mypy --junit-xml StaticTypingSummary.xml --html-report htmlmypy -p ToolName
|
|
||||||
html_artifact: ${{ fromJson(needs.Params.outputs.artifact_names).statictyping_html }}
|
|
||||||
junit_artifact: ${{ fromJson(needs.Params.outputs.artifact_names).statictyping_junit }}
|
|
||||||
# Optional
|
|
||||||
python_version: ${{ needs..Params.outputs.python_version }}
|
|
||||||
requirements: '-r tests/requirements.txt'
|
|
||||||
html_report: 'htmlmypy'
|
|
||||||
junit_report: 'StaticTypingSummary.xml'
|
|
||||||
allow_failure: true
|
|
||||||
|
|
||||||
PublishTestResults:
|
|
||||||
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@main
|
|
||||||
needs:
|
|
||||||
- UnitTesting
|
|
||||||
- StaticTypeCheck
|
|
||||||
with:
|
|
||||||
# Optional
|
|
||||||
report_files: artifacts/**/*.xml
|
|
||||||
|
|
||||||
Package:
|
|
||||||
uses: pyTooling/Actions/.github/workflows/Package.yml@main
|
|
||||||
needs:
|
|
||||||
- Params
|
|
||||||
- Coverage
|
|
||||||
with:
|
|
||||||
artifact: ${{ fromJson(needs.Params.outputs.artifact_names).package_all }}
|
|
||||||
# Optional
|
|
||||||
python_version: ${{ needs..Params.outputs.python_version }}
|
|
||||||
requirements: 'wheel'
|
|
||||||
|
|
||||||
Release:
|
|
||||||
uses: pyTooling/Actions/.github/workflows/Release.yml@main
|
|
||||||
if: startsWith(github.ref, 'refs/tags')
|
|
||||||
needs:
|
|
||||||
- UnitTesting
|
|
||||||
- Coverage
|
|
||||||
- StaticTypeCheck
|
|
||||||
- Package
|
|
||||||
|
|
||||||
PublishOnPyPI:
|
|
||||||
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@main
|
|
||||||
if: startsWith(github.ref, 'refs/tags')
|
|
||||||
needs:
|
|
||||||
- Params
|
|
||||||
- Release
|
|
||||||
- Package
|
|
||||||
with:
|
|
||||||
artifact: ${{ fromJson(needs.Params.outputs.artifact_names).package_all }}
|
|
||||||
# Optional
|
|
||||||
python_version: ${{ needs..Params.outputs.python_version }}
|
|
||||||
requirements: 'wheel twine'
|
|
||||||
secrets:
|
|
||||||
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
|
||||||
|
|
||||||
VerifyDocs:
|
|
||||||
uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@main
|
|
||||||
needs:
|
|
||||||
- Params
|
|
||||||
with:
|
|
||||||
# Optional
|
|
||||||
python_version: ${{ needs..Params.outputs.python_version }}
|
|
||||||
|
|
||||||
BuildTheDocs:
|
|
||||||
uses: pyTooling/Actions/.github/workflows/BuildTheDocs.yml@main
|
|
||||||
needs:
|
|
||||||
- Params
|
|
||||||
- VerifyDocs
|
|
||||||
with:
|
|
||||||
artifact: ${{ fromJson(needs.Params.outputs.artifact_names).documentation_html }}
|
|
||||||
|
|
||||||
PublishToGitHubPages:
|
|
||||||
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@main
|
|
||||||
needs:
|
|
||||||
- Params
|
|
||||||
- BuildTheDocs
|
|
||||||
- Coverage
|
|
||||||
- StaticTypeCheck
|
|
||||||
with:
|
|
||||||
doc: ${{ fromJson(needs.Params.outputs.artifact_names).documentation_html }}
|
|
||||||
# Optional
|
|
||||||
coverage: ${{ fromJson(needs.Params.outputs.artifact_names).codecoverage_html }}
|
|
||||||
typing: ${{ fromJson(needs.Params.outputs.artifact_names).statictyping_html }}
|
|
||||||
|
|
||||||
ArtifactCleanUp:
|
|
||||||
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@main
|
|
||||||
needs:
|
|
||||||
- Params
|
|
||||||
- PublishTestResults
|
|
||||||
- Coverage
|
|
||||||
- StaticTypeCheck
|
|
||||||
- BuildTheDocs
|
|
||||||
- PublishToGitHubPages
|
|
||||||
with:
|
|
||||||
package: ${{ fromJson(needs.Params.outputs.artifact_names).package_all }}
|
|
||||||
remaining: |
|
|
||||||
${{ fromJson(needs.Params.outputs.artifact_names).unittesting_xml }}-*
|
|
||||||
${{ fromJson(needs.Params.outputs.artifact_names).codecoverage_html }}
|
|
||||||
${{ fromJson(needs.Params.outputs.artifact_names).statictyping_html }}
|
|
||||||
${{ fromJson(needs.Params.outputs.artifact_names).statictyping_junit }}
|
|
||||||
${{ fromJson(needs.Params.outputs.artifact_names).documentation_html }}
|
|
||||||
@@ -100,7 +100,7 @@ As shown in the screenshots above, the expected order is:
|
|||||||
|
|
||||||
### Example pipeline
|
### Example pipeline
|
||||||
|
|
||||||
[ExamplePipeline.yml](ExamplePipeline.yml) is an example Workflow which uses all of the Reusable Workflows.
|
ExamplePipeline.yml is an example Workflow which uses all of the Reusable Workflows.
|
||||||
Python package/tool developers can copy it into their repos, in order to use al the reusable workflows straightaway.
|
Python package/tool developers can copy it into their repos, in order to use al the reusable workflows straightaway.
|
||||||
Minimal required modifications are the following:
|
Minimal required modifications are the following:
|
||||||
|
|
||||||
|
|||||||
2
dist/requirements.txt
vendored
2
dist/requirements.txt
vendored
@@ -1,2 +1,2 @@
|
|||||||
wheel ~= 0.45
|
wheel ~= 0.45
|
||||||
twine ~= 6.0
|
twine ~= 6.1
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
-r ../requirements.txt
|
-r ../requirements.txt
|
||||||
|
|
||||||
pyTooling ~= 8.0
|
pyTooling ~= 8.4
|
||||||
|
|
||||||
# Enforce latest version on ReadTheDocs
|
# Enforce latest version on ReadTheDocs
|
||||||
sphinx ~= 8.1
|
sphinx ~= 8.2
|
||||||
docutils ~= 0.21
|
docutils ~= 0.21
|
||||||
docutils_stubs ~= 0.0.22
|
docutils_stubs ~= 0.0.22
|
||||||
|
|
||||||
@@ -15,5 +15,5 @@ sphinxcontrib-mermaid ~= 1.0
|
|||||||
autoapi >= 2.0.1
|
autoapi >= 2.0.1
|
||||||
sphinx_design ~= 0.6.1
|
sphinx_design ~= 0.6.1
|
||||||
sphinx-copybutton >= 0.5.2
|
sphinx-copybutton >= 0.5.2
|
||||||
sphinx_autodoc_typehints ~= 2.5
|
sphinx_autodoc_typehints ~= 3.1
|
||||||
sphinx_reports ~= 0.7
|
sphinx_reports ~= 0.7
|
||||||
|
|||||||
@@ -64,11 +64,11 @@
|
|||||||
:target: https://pyTooling.github.io/pyTooling/
|
:target: https://pyTooling.github.io/pyTooling/
|
||||||
|
|
||||||
.. # Gitter
|
.. # Gitter
|
||||||
.. |SHIELD:svg:pyTooling-gitter| image:: https://img.shields.io/badge/chat-on%20gitter-4db797.?longCache=true&style=flat-square&logo=gitter&logoColor=e8ecef
|
.. |SHIELD:svg:pyTooling-gitter| image:: https://img.shields.io/badge/chat-on%20gitter-4db797?longCache=true&style=flat-square&logo=gitter&logoColor=e8ecef
|
||||||
:alt: Documentation License
|
:alt: Documentation License
|
||||||
:height: 22
|
:height: 22
|
||||||
:target: https://gitter.im/hdl/community
|
:target: https://gitter.im/hdl/community
|
||||||
.. |SHIELD:png:pyTooling-gitter| image:: https://raster.shields.io/badge/chat-on%20gitter-4db797.?longCache=true&style=flat-square&logo=gitter&logoColor=e8ecef
|
.. |SHIELD:png:pyTooling-gitter| image:: https://raster.shields.io/badge/chat-on%20gitter-4db797?longCache=true&style=flat-square&logo=gitter&logoColor=e8ecef
|
||||||
:alt: Documentation License
|
:alt: Documentation License
|
||||||
:height: 22
|
:height: 22
|
||||||
:target: https://gitter.im/hdl/community
|
:target: https://gitter.im/hdl/community
|
||||||
|
|||||||
@@ -11,7 +11,7 @@
|
|||||||
# #
|
# #
|
||||||
# License: #
|
# License: #
|
||||||
# ==================================================================================================================== #
|
# ==================================================================================================================== #
|
||||||
# Copyright 2017-2024 Patrick Lehmann - Bötzingen, Germany #
|
# Copyright 2017-2025 Patrick Lehmann - Bötzingen, Germany #
|
||||||
# #
|
# #
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
# you may not use this file except in compliance with the License. #
|
# you may not use this file except in compliance with the License. #
|
||||||
@@ -34,7 +34,7 @@ A module for a set of dummy classes.
|
|||||||
|
|
||||||
__author__ = "Patrick Lehmann"
|
__author__ = "Patrick Lehmann"
|
||||||
__email__ = "Paebbels@gmail.com"
|
__email__ = "Paebbels@gmail.com"
|
||||||
__copyright__ = "2017-2024, Patrick Lehmann"
|
__copyright__ = "2017-2025, Patrick Lehmann"
|
||||||
__license__ = "Apache License, Version 2.0"
|
__license__ = "Apache License, Version 2.0"
|
||||||
__version__ = "0.4.4"
|
__version__ = "0.4.4"
|
||||||
__keywords__ = ["GitHub Actions"]
|
__keywords__ = ["GitHub Actions"]
|
||||||
|
|||||||
@@ -11,7 +11,7 @@
|
|||||||
# #
|
# #
|
||||||
# License: #
|
# License: #
|
||||||
# ==================================================================================================================== #
|
# ==================================================================================================================== #
|
||||||
# Copyright 2017-2024 Patrick Lehmann - Bötzingen, Germany #
|
# Copyright 2017-2025 Patrick Lehmann - Bötzingen, Germany #
|
||||||
# #
|
# #
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
# you may not use this file except in compliance with the License. #
|
# you may not use this file except in compliance with the License. #
|
||||||
@@ -34,7 +34,7 @@ A module for a set of dummy classes.
|
|||||||
|
|
||||||
__author__ = "Patrick Lehmann"
|
__author__ = "Patrick Lehmann"
|
||||||
__email__ = "Paebbels@gmail.com"
|
__email__ = "Paebbels@gmail.com"
|
||||||
__copyright__ = "2017-2024, Patrick Lehmann"
|
__copyright__ = "2017-2025, Patrick Lehmann"
|
||||||
__license__ = "Apache License, Version 2.0"
|
__license__ = "Apache License, Version 2.0"
|
||||||
__version__ = "0.14.8"
|
__version__ = "0.14.8"
|
||||||
__keywords__ = ["GitHub Actions"]
|
__keywords__ = ["GitHub Actions"]
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
[build-system]
|
[build-system]
|
||||||
requires = [
|
requires = [
|
||||||
"setuptools ~= 75.5",
|
"setuptools ~= 78.1",
|
||||||
"wheel ~= 0.45",
|
"wheel ~= 0.45",
|
||||||
"pyTooling ~= 8.0"
|
"pyTooling ~= 8.4"
|
||||||
]
|
]
|
||||||
build-backend = "setuptools.build_meta"
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
@@ -21,7 +21,10 @@ namespace_packages = true
|
|||||||
html_report = "report/typing"
|
html_report = "report/typing"
|
||||||
|
|
||||||
[tool.pytest]
|
[tool.pytest]
|
||||||
junit_xml = "report/unit/TestReportSummary.xml"
|
junit_xml = "report/unit/UnittestReportSummary.xml"
|
||||||
|
|
||||||
|
[tool.pyedaa-reports]
|
||||||
|
junit_xml = "report/unit/unittest.xml"
|
||||||
|
|
||||||
[tool.pytest.ini_options]
|
[tool.pytest.ini_options]
|
||||||
addopts = "--tb=native"
|
addopts = "--tb=native"
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
# Unai Martinez-Corral #
|
# Unai Martinez-Corral #
|
||||||
# #
|
# #
|
||||||
# ==================================================================================================================== #
|
# ==================================================================================================================== #
|
||||||
# Copyright 2020-2024 The pyTooling Authors #
|
# Copyright 2020-2025 The pyTooling Authors #
|
||||||
# #
|
# #
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
# you may not use this file except in compliance with the License. #
|
# you may not use this file except in compliance with the License. #
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
pyTooling ~= 8.0
|
pyTooling ~= 8.4
|
||||||
|
|||||||
2
setup.py
2
setup.py
@@ -11,7 +11,7 @@
|
|||||||
# #
|
# #
|
||||||
# License: #
|
# License: #
|
||||||
# ==================================================================================================================== #
|
# ==================================================================================================================== #
|
||||||
# Copyright 2017-2024 Patrick Lehmann - Bötzingen, Germany #
|
# Copyright 2017-2025 Patrick Lehmann - Bötzingen, Germany #
|
||||||
# #
|
# #
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
# you may not use this file except in compliance with the License. #
|
# you may not use this file except in compliance with the License. #
|
||||||
|
|||||||
@@ -11,7 +11,7 @@
|
|||||||
# #
|
# #
|
||||||
# License: #
|
# License: #
|
||||||
# ==================================================================================================================== #
|
# ==================================================================================================================== #
|
||||||
# Copyright 2017-2024 Patrick Lehmann - Bötzingen, Germany #
|
# Copyright 2017-2025 Patrick Lehmann - Bötzingen, Germany #
|
||||||
# #
|
# #
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
# you may not use this file except in compliance with the License. #
|
# you may not use this file except in compliance with the License. #
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
-r ../requirements.txt
|
-r ../requirements.txt
|
||||||
|
|
||||||
# Coverage collection
|
# Coverage collection
|
||||||
Coverage ~= 7.6
|
Coverage ~= 7.8
|
||||||
|
|
||||||
# Test Runner
|
# Test Runner
|
||||||
pytest ~= 8.3
|
pytest ~= 8.3
|
||||||
pytest-cov ~= 6.0
|
pytest-cov ~= 6.1
|
||||||
|
|
||||||
# Static Type Checking
|
# Static Type Checking
|
||||||
mypy ~= 1.13
|
mypy ~= 1.15
|
||||||
typing_extensions ~= 4.12
|
typing_extensions ~= 4.13
|
||||||
lxml ~= 5.3
|
lxml ~= 5.3
|
||||||
|
|||||||
@@ -11,7 +11,7 @@
|
|||||||
# #
|
# #
|
||||||
# License: #
|
# License: #
|
||||||
# ==================================================================================================================== #
|
# ==================================================================================================================== #
|
||||||
# Copyright 2017-2024 Patrick Lehmann - Bötzingen, Germany #
|
# Copyright 2017-2025 Patrick Lehmann - Bötzingen, Germany #
|
||||||
# #
|
# #
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
# you may not use this file except in compliance with the License. #
|
# you may not use this file except in compliance with the License. #
|
||||||
|
|||||||
@@ -11,7 +11,7 @@
|
|||||||
# #
|
# #
|
||||||
# License: #
|
# License: #
|
||||||
# ==================================================================================================================== #
|
# ==================================================================================================================== #
|
||||||
# Copyright 2017-2024 Patrick Lehmann - Bötzingen, Germany #
|
# Copyright 2017-2025 Patrick Lehmann - Bötzingen, Germany #
|
||||||
# #
|
# #
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
# you may not use this file except in compliance with the License. #
|
# you may not use this file except in compliance with the License. #
|
||||||
|
|||||||
Reference in New Issue
Block a user