mirror of
https://github.com/pyTooling/Actions.git
synced 2026-02-14 20:16:56 +08:00
Compare commits
23 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1c42072471 | ||
|
|
74afc5a42a | ||
|
|
2e5a79e0c2 | ||
|
|
db99e35dec | ||
|
|
6cfc6e0f8f | ||
|
|
5adddda1a1 | ||
|
|
91289c4257 | ||
|
|
527e94b245 | ||
|
|
f11c335674 | ||
|
|
5bed864443 | ||
|
|
37ec436eb4 | ||
|
|
6a7a4212c3 | ||
|
|
f5b6f17d4e | ||
|
|
883238547a | ||
|
|
7cd852db58 | ||
|
|
ce0d30fe3f | ||
|
|
34dacf7bcf | ||
|
|
48090e113d | ||
|
|
e082d77e7a | ||
|
|
181035b0ba | ||
|
|
643f95bbb6 | ||
|
|
424b75ca96 | ||
|
|
f0610331b9 |
9
.btd.yml
Normal file
9
.btd.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
input: doc
|
||||
output: _build
|
||||
requirements: requirements.txt
|
||||
target: gh-pages
|
||||
formats: [ html ]
|
||||
images:
|
||||
base: btdi/sphinx:pytooling
|
||||
latex: btdi/latex
|
||||
theme: https://codeload.GitHub.com/buildthedocs/sphinx.theme/tar.gz/v1
|
||||
7
.github/workflows/ApplicationTesting.yml
vendored
7
.github/workflows/ApplicationTesting.yml
vendored
@@ -89,7 +89,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.wheel }}' from 'Package' job
|
||||
uses: pyTooling/download-artifact@v4
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.wheel }}
|
||||
path: install
|
||||
@@ -255,10 +255,9 @@ jobs:
|
||||
|
||||
- name: 📤 Upload 'TestReportSummary.xml' artifact
|
||||
if: inputs.apptest_xml_artifact != ''
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.apptest_xml_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
||||
working-directory: report/unit
|
||||
path: TestReportSummary.xml
|
||||
path: report/unit/TestReportSummary.xml
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
7
.github/workflows/ArtifactCleanUp.yml
vendored
7
.github/workflows/ArtifactCleanUp.yml
vendored
@@ -25,11 +25,6 @@ name: ArtifactCleanUp
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
package:
|
||||
description: 'Artifacts to be removed on not tagged runs.'
|
||||
required: true
|
||||
@@ -43,7 +38,7 @@ on:
|
||||
jobs:
|
||||
ArtifactCleanUp:
|
||||
name: 🗑️ Artifact Cleanup
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
steps:
|
||||
- name: 🗑️ Delete package Artifacts
|
||||
|
||||
8
.github/workflows/BuildTheDocs.yml
vendored
8
.github/workflows/BuildTheDocs.yml
vendored
@@ -37,9 +37,6 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
steps:
|
||||
- name: '❗ Deprecation message'
|
||||
run: echo "::warning title=Deprecated::'BuildTheDocs.yml' is not maintained anymore. Please switch to 'SphinxDocumentation.yml', 'LaTeXDocumentation.yml' and 'ExtractConfiguration.yml'."
|
||||
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
@@ -50,11 +47,10 @@ jobs:
|
||||
|
||||
- name: 📤 Upload 'documentation' artifacts
|
||||
if: inputs.artifact != ''
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.artifact }}
|
||||
working-directory: doc/_build/html
|
||||
path: '*'
|
||||
path: doc/_build/html
|
||||
retention-days: 1
|
||||
|
||||
- name: '📓 Publish site to GitHub Pages'
|
||||
|
||||
7
.github/workflows/CheckDocumentation.yml
vendored
7
.github/workflows/CheckDocumentation.yml
vendored
@@ -24,11 +24,6 @@ name: Check Documentation
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
@@ -47,7 +42,7 @@ on:
|
||||
jobs:
|
||||
DocCoverage:
|
||||
name: 👀 Check documentation coverage
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
318
.github/workflows/CompletePipeline.yml
vendored
318
.github/workflows/CompletePipeline.yml
vendored
@@ -1,318 +0,0 @@
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: Namespace Package
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
package_namespace:
|
||||
description: 'Name of the tool''s namespace.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
package_name:
|
||||
description: 'Name of the tool''s package.'
|
||||
required: true
|
||||
type: string
|
||||
unittest_python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
default: '3.13'
|
||||
type: string
|
||||
unittest_python_version_list:
|
||||
description: 'Space separated list of Python versions to run tests with.'
|
||||
required: false
|
||||
default: '3.9 3.10 3.11 3.12 3.13'
|
||||
type: string
|
||||
unittest_system_list:
|
||||
description: 'Space separated list of systems to run tests on.'
|
||||
required: false
|
||||
default: 'ubuntu windows macos macos-arm mingw64 ucrt64'
|
||||
type: string
|
||||
unittest_include_list:
|
||||
description: 'Space separated list of system:python items to be included into the list of test.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
unittest_exclude_list:
|
||||
description: 'Space separated list of system:python items to be excluded from the list of test.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
unittest_disable_list:
|
||||
description: 'Space separated list of system:python items to be disabled from the list of test.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
apptest_python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
default: '3.13'
|
||||
type: string
|
||||
apptest_python_version_list:
|
||||
description: 'Space separated list of Python versions to run tests with.'
|
||||
required: false
|
||||
default: ""
|
||||
type: string
|
||||
apptest_system_list:
|
||||
description: 'Space separated list of systems to run tests on.'
|
||||
required: false
|
||||
default: 'ubuntu windows macos macos-arm ucrt64'
|
||||
type: string
|
||||
apptest_include_list:
|
||||
description: 'Space separated list of system:python items to be included into the list of test.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
apptest_exclude_list:
|
||||
description: 'Space separated list of system:python items to be excluded from the list of test.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
apptest_disable_list:
|
||||
description: 'Space separated list of system:python items to be disabled from the list of test.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
secrets:
|
||||
PYPI_TOKEN:
|
||||
description: "Token for pushing releases to PyPI."
|
||||
required: false
|
||||
CODACY_PROJECT_TOKEN:
|
||||
description: "Token for pushing coverage results to Codacy."
|
||||
required: false
|
||||
|
||||
jobs:
|
||||
ConfigParams:
|
||||
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@main
|
||||
with:
|
||||
package_namespace: ${{ inputs.package_namespace }}
|
||||
package_name: ${{ inputs.package_name }}
|
||||
|
||||
UnitTestingParams:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
with:
|
||||
package_namespace: ${{ inputs.package_namespace }}
|
||||
package_name: ${{ inputs.package_name }}
|
||||
python_version: ${{ inputs.unittest_python_version }}
|
||||
python_version_list: ${{ inputs.unittest_python_version_list }}
|
||||
system_list: ${{ inputs.unittest_system_list }}
|
||||
include_list: ${{ inputs.unittest_include_list }}
|
||||
exclude_list: ${{ inputs.unittest_exclude_list }}
|
||||
disable_list: ${{ inputs.unittest_disable_list }}
|
||||
|
||||
AppTestingParams:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
with:
|
||||
package_namespace: ${{ inputs.package_namespace }}
|
||||
package_name: ${{ inputs.package_name }}
|
||||
python_version: ${{ inputs.apptest_python_version }}
|
||||
python_version_list: ${{ inputs.apptest_python_version_list }}
|
||||
system_list: ${{ inputs.apptest_system_list }}
|
||||
include_list: ${{ inputs.apptest_include_list }}
|
||||
exclude_list: ${{ inputs.apptest_exclude_list }}
|
||||
disable_list: ${{ inputs.apptest_disable_list }}
|
||||
|
||||
UnitTesting:
|
||||
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@main
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
with:
|
||||
jobs: ${{ needs.UnitTestingParams.outputs.python_jobs }}
|
||||
requirements: "-r tests/unit/requirements.txt"
|
||||
# pacboy: "msys/git python-lxml:p"
|
||||
unittest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||
coverage_sqlite_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}
|
||||
|
||||
StaticTypeCheck:
|
||||
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@main
|
||||
needs:
|
||||
- ConfigParams
|
||||
- UnitTestingParams
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
commands: |
|
||||
${{ needs.ConfigParams.outputs.mypy_prepare_command }}
|
||||
mypy --html-report report/typing -p ${{ needs.ConfigParams.outputs.package_fullname }}
|
||||
html_report: 'report/typing'
|
||||
html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
|
||||
|
||||
DocCoverage:
|
||||
uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@main
|
||||
needs:
|
||||
- ConfigParams
|
||||
- UnitTestingParams
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
directory: ${{ inputs.package_namespace }}/${{ inputs.package_name }}
|
||||
# fail_below: 70
|
||||
|
||||
Package:
|
||||
uses: pyTooling/Actions/.github/workflows/Package.yml@main
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- UnitTesting
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
||||
|
||||
# AppTesting:
|
||||
# uses: pyTooling/Actions/.github/workflows/ApplicationTesting.yml@main
|
||||
# needs:
|
||||
# - AppTestingParams
|
||||
# - UnitTestingParams
|
||||
# - Package
|
||||
# with:
|
||||
# jobs: ${{ needs.AppTestingParams.outputs.python_jobs }}
|
||||
# wheel: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
||||
# apptest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).apptesting_xml }}
|
||||
|
||||
PublishCoverageResults:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@main
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- UnitTesting
|
||||
with:
|
||||
# coverage_sqlite_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}
|
||||
# coverage_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }}
|
||||
coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}
|
||||
coverage_html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||
secrets:
|
||||
codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||
|
||||
PublishTestResults:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@main
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- UnitTesting
|
||||
with:
|
||||
merged_junit_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||
|
||||
# VerifyDocs:
|
||||
# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@main
|
||||
# needs:
|
||||
# - UnitTestingParams
|
||||
# with:
|
||||
# python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
|
||||
Documentation:
|
||||
uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@main
|
||||
needs:
|
||||
- ConfigParams
|
||||
- UnitTestingParams
|
||||
- PublishTestResults
|
||||
- PublishCoverageResults
|
||||
# - VerifyDocs
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
coverage_report_json_directory: ${{ needs.ConfigParams.outputs.coverage_report_json_directory }}
|
||||
unittest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}-ubuntu-native-3.12
|
||||
coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}
|
||||
html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_html }}
|
||||
latex_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }}
|
||||
|
||||
IntermediateCleanUp:
|
||||
uses: pyTooling/Actions/.github/workflows/IntermediateCleanUp.yml@main
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- PublishCoverageResults
|
||||
- PublishTestResults
|
||||
- Documentation
|
||||
with:
|
||||
sqlite_coverage_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}-
|
||||
xml_unittest_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}-
|
||||
|
||||
# PDFDocumentation:
|
||||
# uses: pyTooling/Actions/.github/workflows/LaTeXDocumentation.yml@main
|
||||
# needs:
|
||||
# - UnitTestingParams
|
||||
# - Documentation
|
||||
# with:
|
||||
# document: pyEDAA.ProjectModel
|
||||
# latex_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }}
|
||||
# pdf_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_pdf }}
|
||||
|
||||
PublishToGitHubPages:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@main
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- Documentation
|
||||
# - PDFDocumentation
|
||||
- PublishCoverageResults
|
||||
- StaticTypeCheck
|
||||
with:
|
||||
doc: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_html }}
|
||||
coverage: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||
typing: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
|
||||
|
||||
ReleasePage:
|
||||
uses: pyTooling/Actions/.github/workflows/Release.yml@main
|
||||
if: startsWith(github.ref, 'refs/tags')
|
||||
needs:
|
||||
- Package
|
||||
# - AppTesting
|
||||
- PublishToGitHubPages
|
||||
|
||||
PublishOnPyPI:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@main
|
||||
if: startsWith(github.ref, 'refs/tags')
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- ReleasePage
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
requirements: -r dist/requirements.txt
|
||||
artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
||||
secrets:
|
||||
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||
|
||||
ArtifactCleanUp:
|
||||
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@main
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- UnitTesting
|
||||
- StaticTypeCheck
|
||||
- Documentation
|
||||
# - PDFDocumentation
|
||||
- PublishTestResults
|
||||
- PublishCoverageResults
|
||||
- PublishToGitHubPages
|
||||
# - PublishOnPyPI
|
||||
- IntermediateCleanUp
|
||||
with:
|
||||
package: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
||||
remaining: |
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_html }}-*
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }}-*
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}-*
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}-*
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_html }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_html }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }}
|
||||
# ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).apptesting_xml }}-*
|
||||
# ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_pdf }}
|
||||
20
.github/workflows/CoverageCollection.yml
vendored
20
.github/workflows/CoverageCollection.yml
vendored
@@ -25,11 +25,6 @@ name: Coverage Collection
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
@@ -68,17 +63,11 @@ jobs:
|
||||
|
||||
Coverage:
|
||||
name: 📈 Collect Coverage Data using Python ${{ inputs.python_version }}
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
steps:
|
||||
- name: '❗ Deprecation message'
|
||||
run: echo "::warning title=Deprecated::'CoverageCollection.yml' is not maintained anymore. Please switch to 'UnitTesting.yml', 'PublishCoverageResults.yml' and 'PublishTestResults.yml'."
|
||||
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
lfs: true
|
||||
submodules: true
|
||||
|
||||
- name: 🐍 Setup Python ${{ inputs.python_version }}
|
||||
uses: actions/setup-python@v5
|
||||
@@ -163,17 +152,16 @@ jobs:
|
||||
|
||||
- name: 📤 Upload 'Coverage Report' artifact
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.artifact }}
|
||||
working-directory: ${{ steps.getVariables.outputs.coverage_report_html_directory }}
|
||||
path: '*'
|
||||
path: ${{ steps.getVariables.outputs.coverage_report_html_directory }}
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- name: 📊 Publish coverage at CodeCov
|
||||
continue-on-error: true
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
files: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
||||
flags: unittests
|
||||
|
||||
194
.github/workflows/ExtractConfiguration.yml
vendored
194
.github/workflows/ExtractConfiguration.yml
vendored
@@ -1,194 +0,0 @@
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: Extract Configuration
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
default: '3.12'
|
||||
type: string
|
||||
package_namespace:
|
||||
description: 'Name of the tool''s namespace.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
package_name:
|
||||
description: 'Name of the tool''s package.'
|
||||
required: true
|
||||
type: string
|
||||
coverage_config:
|
||||
description: 'Path to the .coveragerc file. Use pyproject.toml by default.'
|
||||
required: false
|
||||
default: 'pyproject.toml'
|
||||
type: string
|
||||
|
||||
outputs:
|
||||
package_fullname:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.package_fullname }}
|
||||
package_directory:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.package_directory }}
|
||||
mypy_prepare_command:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.mypy_prepare_command }}
|
||||
coverage_report_html_directory:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.coverage_report_html_directory }}
|
||||
coverage_report_xml_directory:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.coverage_report_xml_directory }}
|
||||
coverage_report_xml:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.coverage_report_xml }}
|
||||
coverage_report_json_directory:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.coverage_report_json_directory }}
|
||||
coverage_report_json:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.coverage_report_json }}
|
||||
|
||||
jobs:
|
||||
Extract:
|
||||
name: 📓 Extract configurations from pyproject.toml
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
outputs:
|
||||
package_fullname: ${{ steps.getPackageName.outputs.package_fullname }}
|
||||
package_directory: ${{ steps.getPackageName.outputs.package_directory }}
|
||||
mypy_prepare_command: ${{ steps.getPackageName.outputs.mypy_prepare_command }}
|
||||
coverage_report_html_directory: ${{ steps.getVariables.outputs.coverage_report_html_directory }}
|
||||
coverage_report_xml_directory: ${{ steps.getVariables.outputs.coverage_report_xml_directory }}
|
||||
coverage_report_xml: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
||||
coverage_report_json_directory: ${{ steps.getVariables.outputs.coverage_report_json_directory }}
|
||||
coverage_report_json: ${{ steps.getVariables.outputs.coverage_report_json }}
|
||||
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: 🐍 Setup Python ${{ inputs.python_version }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ inputs.python_version }}
|
||||
|
||||
- name: 🔧 Install wheel,tomli and pip dependencies (native)
|
||||
run: |
|
||||
python -m pip install --disable-pip-version-check -U wheel tomli
|
||||
|
||||
- name: 🔁 Full package name and directory
|
||||
id: getPackageName
|
||||
shell: python
|
||||
run: |
|
||||
from os import getenv
|
||||
from pathlib import Path
|
||||
from textwrap import dedent
|
||||
|
||||
namespace = "${{ inputs.package_namespace }}".strip()
|
||||
name = "${{ inputs.package_name }}".strip()
|
||||
|
||||
if namespace == "" or namespace == ".":
|
||||
fullname = f"{name}"
|
||||
directory = f"{name}"
|
||||
mypy_prepare_command = ""
|
||||
else:
|
||||
fullname = f"{namespace}.{name}"
|
||||
directory = f"{namespace}/{name}"
|
||||
mypy_prepare_command = f"touch {namespace}/__init__.py"
|
||||
|
||||
github_output = Path(getenv("GITHUB_OUTPUT"))
|
||||
print(f"GITHUB_OUTPUT: {github_output}")
|
||||
with github_output.open("a+", encoding="utf-8") as f:
|
||||
f.write(dedent(f"""\
|
||||
package_fullname={fullname}
|
||||
package_directory={directory}
|
||||
mypy_prepare_command={mypy_prepare_command}
|
||||
"""))
|
||||
|
||||
- name: 🔁 Extract configurations from pyproject.toml
|
||||
id: getVariables
|
||||
shell: python
|
||||
run: |
|
||||
from os import getenv
|
||||
from pathlib import Path
|
||||
from sys import version
|
||||
from textwrap import dedent
|
||||
|
||||
print(f"Python: {version}")
|
||||
|
||||
from tomli import load as tomli_load
|
||||
|
||||
htmlDirectory = Path("htmlcov")
|
||||
xmlFile = Path("./coverage.xml")
|
||||
jsonFile = Path("./coverage.json")
|
||||
coverageRC = "${{ inputs.coverage_config }}".strip()
|
||||
|
||||
# Read output paths from 'pyproject.toml' file
|
||||
if coverageRC == "pyproject.toml":
|
||||
pyProjectFile = Path("pyproject.toml")
|
||||
if pyProjectFile.exists():
|
||||
with pyProjectFile.open("rb") as file:
|
||||
pyProjectSettings = tomli_load(file)
|
||||
|
||||
htmlDirectory = Path(pyProjectSettings["tool"]["coverage"]["html"]["directory"])
|
||||
xmlFile = Path(pyProjectSettings["tool"]["coverage"]["xml"]["output"])
|
||||
jsonFile = Path(pyProjectSettings["tool"]["coverage"]["json"]["output"])
|
||||
else:
|
||||
print(f"File '{pyProjectFile}' not found.")
|
||||
print(f"::error title=FileNotFoundError::File '{pyProjectFile}' not found.")
|
||||
exit(1)
|
||||
|
||||
# Read output paths from '.coveragerc' file
|
||||
elif len(coverageRC) > 0:
|
||||
coverageRCFile = Path(coverageRC)
|
||||
if coverageRCFile.exists():
|
||||
with coverageRCFile.open("rb") as file:
|
||||
coverageRCSettings = tomli_load(file)
|
||||
|
||||
htmlDirectory = Path(coverageRCSettings["html"]["directory"])
|
||||
xmlFile = Path(coverageRCSettings["xml"]["output"])
|
||||
jsonFile = Path(coverageRCSettings["json"]["output"])
|
||||
else:
|
||||
print(f"File '{coverageRCFile}' not found.")
|
||||
print(f"::error title=FileNotFoundError::File '{coverageRCFile}' not found.")
|
||||
exit(1)
|
||||
|
||||
# Write jobs to special file
|
||||
github_output = Path(getenv("GITHUB_OUTPUT"))
|
||||
print(f"GITHUB_OUTPUT: {github_output}")
|
||||
with github_output.open("a+", encoding="utf-8") as f:
|
||||
f.write(dedent(f"""\
|
||||
coverage_report_html_directory={htmlDirectory.as_posix()}
|
||||
coverage_report_xml_directory={xmlFile.parent.as_posix()}
|
||||
coverage_report_xml={xmlFile.as_posix()}
|
||||
coverage_report_json_directory={jsonFile.parent.as_posix()}
|
||||
coverage_report_json={jsonFile.as_posix()}
|
||||
"""))
|
||||
|
||||
print(f"DEBUG:\n html={htmlDirectory}\n xml={xmlFile}\n json={jsonFile}")
|
||||
9
.github/workflows/IntermediateCleanUp.yml
vendored
9
.github/workflows/IntermediateCleanUp.yml
vendored
@@ -24,11 +24,6 @@ name: Intermediate Cleanup
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
sqlite_coverage_artifacts_prefix:
|
||||
description: 'Prefix for SQLite coverage artifacts'
|
||||
required: false
|
||||
@@ -41,7 +36,7 @@ on:
|
||||
jobs:
|
||||
IntermediateCleanUp:
|
||||
name: 🗑️ Intermediate Artifact Cleanup
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: 🗑️ Delete SQLite coverage artifacts from matrix jobs
|
||||
uses: geekyeggo/delete-artifact@v5
|
||||
@@ -50,7 +45,7 @@ jobs:
|
||||
with:
|
||||
name: ${{ inputs.sqlite_coverage_artifacts_prefix }}*
|
||||
|
||||
- name: 🗑️ Delete JUnit XML artifacts from matrix jobs
|
||||
- name: 🗑️ Delete XML coverage artifacts from matrix jobs
|
||||
uses: geekyeggo/delete-artifact@v5
|
||||
if: inputs.xml_unittest_artifacts_prefix != ''
|
||||
continue-on-error: true
|
||||
|
||||
11
.github/workflows/LaTeXDocumentation.yml
vendored
11
.github/workflows/LaTeXDocumentation.yml
vendored
@@ -24,11 +24,6 @@ name: LaTeX Documentation
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
document:
|
||||
description: 'LaTeX root document without *.tex extension.'
|
||||
required: true
|
||||
@@ -47,10 +42,10 @@ on:
|
||||
jobs:
|
||||
PDFDocumentation:
|
||||
name: 📓 Converting LaTeX Documentation to PDF
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: 📥 Download artifacts '${{ inputs.latex_artifact }}' from 'SphinxDocumentation' job
|
||||
uses: pyTooling/download-artifact@v4
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.latex_artifact }}
|
||||
path: latex
|
||||
@@ -62,7 +57,7 @@ jobs:
|
||||
root_file: ${{ inputs.document }}.tex
|
||||
|
||||
- name: 📤 Upload 'PDF Documentation' artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v4
|
||||
if: inputs.pdf_artifact != ''
|
||||
with:
|
||||
name: ${{ inputs.pdf_artifact }}
|
||||
|
||||
410
.github/workflows/NightlyRelease.yml
vendored
410
.github/workflows/NightlyRelease.yml
vendored
@@ -1,410 +0,0 @@
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: Nightly
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image:
|
||||
description: 'Name of the Ubuntu image.'
|
||||
required: false
|
||||
default: 'ubuntu-24.04'
|
||||
type: string
|
||||
nightly_name:
|
||||
description: 'Name of the nightly release.'
|
||||
required: false
|
||||
default: 'nightly'
|
||||
type: string
|
||||
nightly_title:
|
||||
description: 'Title of the nightly release.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
nightly_description:
|
||||
description: 'Description of the nightly release.'
|
||||
required: false
|
||||
default: 'Release of artifacts from latest CI pipeline.'
|
||||
type: string
|
||||
draft:
|
||||
description: 'Specify if this is a draft.'
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
prerelease:
|
||||
description: 'Specify if this is a pre-release.'
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
latest:
|
||||
description: 'Specify if this is the latest release.'
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
replacements:
|
||||
description: 'Multi-line string containing search=replace patterns.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
assets:
|
||||
description: 'Multi-line string containing artifact:file:title asset descriptions.'
|
||||
required: true
|
||||
type: string
|
||||
tarball-name:
|
||||
type: string
|
||||
required: false
|
||||
default: '__pyTooling_upload_artifact__.tar'
|
||||
|
||||
jobs:
|
||||
Release:
|
||||
name: 📝 Update 'Nightly Page' on GitHub
|
||||
runs-on: ${{ inputs.ubuntu_image }}
|
||||
permissions:
|
||||
contents: write
|
||||
actions: write
|
||||
# attestations: write
|
||||
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
# The command 'git describe' (used for version) needs the history.
|
||||
fetch-depth: 0
|
||||
|
||||
- name: 🔧 Install zstd
|
||||
run: sudo apt-get install -y --no-install-recommends zstd
|
||||
|
||||
- name: 📑 Delete (old) Release Page
|
||||
id: deleteReleasePage
|
||||
run: |
|
||||
set +e
|
||||
|
||||
ANSI_LIGHT_RED="\e[91m"
|
||||
ANSI_LIGHT_GREEN="\e[92m"
|
||||
ANSI_LIGHT_YELLOW="\e[93m"
|
||||
ANSI_NOCOLOR="\e[0m"
|
||||
|
||||
export GH_TOKEN=${{ github.token }}
|
||||
|
||||
echo -n "Deleting release '${{ inputs.nightly_name }}' ... "
|
||||
message="$(gh release delete ${{ inputs.nightly_name }} --yes 2>&1)"
|
||||
if [[ $? -eq 0 ]]; then
|
||||
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
elif [[ "${message}" == "release not found" ]]; then
|
||||
echo -e "${ANSI_LIGHT_YELLOW}[NOT FOUND]${ANSI_NOCOLOR}"
|
||||
else
|
||||
echo -e "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
||||
echo -e "${ANSI_LIGHT_RED}Couldn't delete release '${{ inputs.nightly_name }}' -> Error: '${message}'.${ANSI_NOCOLOR}"
|
||||
echo "::error title=InternalError::Couldn't delete release '${{ inputs.nightly_name }}' -> Error: '${message}'."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: 📑 (Re)create (new) Release Page
|
||||
id: createReleasePage
|
||||
run: |
|
||||
set +e
|
||||
|
||||
ANSI_LIGHT_RED="\e[91m"
|
||||
ANSI_LIGHT_GREEN="\e[92m"
|
||||
ANSI_NOCOLOR="\e[0m"
|
||||
|
||||
export GH_TOKEN=${{ github.token }}
|
||||
|
||||
addDraft="--draft"
|
||||
|
||||
if ${{ inputs.prerelease }}; then
|
||||
addPreRelease="--prerelease"
|
||||
fi
|
||||
|
||||
if ! ${{ inputs.latest }}; then
|
||||
addLatest="--latest=false"
|
||||
fi
|
||||
|
||||
if [[ "${{ inputs.nightly_title }}" != "" ]]; then
|
||||
addTitle=("--title" "${{ inputs.nightly_title }}")
|
||||
fi
|
||||
|
||||
cat <<'EOF' > __NoTeS__.md
|
||||
${{ inputs.nightly_description }}
|
||||
EOF
|
||||
if [[ -s __NoTeS__.md ]]; then
|
||||
addNotes=("--notes-file" "__NoTeS__.md")
|
||||
fi
|
||||
|
||||
# Apply replacements
|
||||
while IFS=$'\r\n' read -r patternLine; do
|
||||
# skip empty lines
|
||||
[[ "$patternLine" == "" ]] && continue
|
||||
|
||||
pattern="${patternLine%%=*}"
|
||||
replacement="${patternLine#*=}"
|
||||
sed -i -e "s/%$pattern%/$replacement/g" "__NoTeS__.md"
|
||||
done <<<'${{ inputs.replacements }}'
|
||||
|
||||
# Add footer line
|
||||
cat <<EOF >> __NoTeS__.md
|
||||
|
||||
--------
|
||||
Published from [${{ github.workflow }}](https://github.com/Paebbels/ghdl/actions/runs/${{ github.run_id }}) workflow triggered by @${{ github.actor }} on $(date '+%Y-%m-%d %H:%M:%S').
|
||||
EOF
|
||||
|
||||
echo "Creating release '${{ inputs.nightly_name }}' ... "
|
||||
message="$(gh release create "${{ inputs.nightly_name }}" --verify-tag $addDraft $addPreRelease $addLatest "${addTitle[@]}" "${addNotes[@]}" 2>&1)"
|
||||
if [[ $? -eq 0 ]]; then
|
||||
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
else
|
||||
echo -e "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
||||
echo -e "${ANSI_LIGHT_RED}Couldn't create release '${{ inputs.nightly_name }}' -> Error: '${message}'.${ANSI_NOCOLOR}"
|
||||
echo "::error title=InternalError::Couldn't create release '${{ inputs.nightly_name }}' -> Error: '${message}'."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: 📥 Download artifacts and upload as assets
|
||||
id: uploadAssets
|
||||
run: |
|
||||
set +e
|
||||
|
||||
ANSI_LIGHT_RED="\e[91m"
|
||||
ANSI_LIGHT_GREEN="\e[92m"
|
||||
ANSI_LIGHT_YELLOW="\e[93m"
|
||||
ANSI_NOCOLOR="\e[0m"
|
||||
|
||||
export GH_TOKEN=${{ github.token }}
|
||||
|
||||
Replace() {
|
||||
line="$1"
|
||||
while IFS=$'\r\n' read -r patternLine; do
|
||||
# skip empty lines
|
||||
[[ "$patternLine" == "" ]] && continue
|
||||
|
||||
pattern="${patternLine%%=*}"
|
||||
replacement="${patternLine#*=}"
|
||||
line="${line//"%$pattern%"/"$replacement"}"
|
||||
done <<<'${{ inputs.replacements }}'
|
||||
echo "$line"
|
||||
}
|
||||
|
||||
ERRORS=0
|
||||
# A dictionary of 0/1 to avoid duplicate downloads
|
||||
declare -A downloadedArtifacts
|
||||
# A dictionary to check for duplicate asset files in release
|
||||
declare -A assetFilenames
|
||||
while IFS=$'\r\n' read -r assetLine; do
|
||||
if [[ "${assetLine}" == "" ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
# split assetLine colon separated triple: artifact:asset:title
|
||||
artifact="${assetLine%%:*}"
|
||||
remaining="${assetLine#*:}"
|
||||
asset="${remaining%%:*}"
|
||||
title="${remaining##*:}"
|
||||
|
||||
# remove leading whitespace
|
||||
asset="${asset#"${asset%%[![:space:]]*}"}"
|
||||
title="${title#"${title%%[![:space:]]*}"}"
|
||||
|
||||
# apply replacements
|
||||
asset="$(Replace "${asset}")"
|
||||
title="$(Replace "${title}")"
|
||||
|
||||
echo "Publish asset '${asset}' from artifact '${artifact}' with title '${title}'"
|
||||
echo -n " Checked asset for duplicates ... "
|
||||
if [[ -n "${assetFilenames[$asset]}" ]]; then
|
||||
echo -e "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
echo "::error title=DuplicateAsset::Asset '${asset}' from artifact '${artifact}' was already uploaded to release '${{ inputs.nightly_name }}'."
|
||||
ERRORS=1
|
||||
continue
|
||||
else
|
||||
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
assetFilenames[$asset]=1
|
||||
fi
|
||||
|
||||
# Download artifact by artifact name
|
||||
if [[ -n "${downloadedArtifacts[$artifact]}" ]]; then
|
||||
echo -e " downloading '${artifact}' ... ${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}"
|
||||
else
|
||||
echo " downloading '${artifact}' ... "
|
||||
echo -n " gh run download $GITHUB_RUN_ID --dir \"${artifact}\" --name \"${artifact}\" "
|
||||
gh run download $GITHUB_RUN_ID --dir "${artifact}" --name "${artifact}"
|
||||
if [[ $? -eq 0 ]]; then
|
||||
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
else
|
||||
echo -e "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
echo -e "${ANSI_LIGHT_RED}Couldn't download artifact '${artifact}'.${ANSI_NOCOLOR}"
|
||||
echo "::error title=ArtifactNotFound::Couldn't download artifact '${artifact}'."
|
||||
ERRORS=1
|
||||
continue
|
||||
fi
|
||||
downloadedArtifacts[$artifact]=1
|
||||
|
||||
echo -n " Checking for embedded tarball ... "
|
||||
if [[ -f "${artifact}/${{ inputs.tarball-name }}" ]]; then
|
||||
echo -e "${ANSI_LIGHT_GREEN}[FOUND]${ANSI_NOCOLOR}"
|
||||
|
||||
pushd "${artifact}" > /dev/null
|
||||
|
||||
echo -n " Extracting embedded tarball ... "
|
||||
tar -xf "${{ inputs.tarball-name }}"
|
||||
if [[ $? -ne 0 ]]; then
|
||||
echo -e "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
||||
else
|
||||
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
fi
|
||||
|
||||
popd > /dev/null
|
||||
else
|
||||
echo -e "${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Check if artifact should be compressed (zip, tgz) or if asset was part of the downloaded artifact.
|
||||
echo -n " checking asset '${artifact}/${asset}' ... "
|
||||
if [[ "${asset}" == !*.zip ]]; then
|
||||
echo -e "${ANSI_LIGHT_GREEN}[ZIP]${ANSI_NOCOLOR}"
|
||||
asset="${asset##*!}"
|
||||
echo " Compressing artifact '${artifact}' to '${asset}' ..."
|
||||
(
|
||||
cd "${artifact}" && \
|
||||
zip -r "../${asset}" *
|
||||
)
|
||||
if [[ $? -eq 0 ]]; then
|
||||
echo -e " Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
uploadFile="${asset}"
|
||||
else
|
||||
echo -e " Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
echo -e "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zip file '${asset}'.${ANSI_NOCOLOR}"
|
||||
echo "::error title=CompressionError::Couldn't compress '${artifact}' to zip file '${asset}'."
|
||||
ERRORS=1
|
||||
continue
|
||||
fi
|
||||
elif [[ "${asset}" == !*.tgz || "${asset}" == !*.tar.gz || "${asset}" == \$*.tgz || "${asset}" == \$*.tar.gz ]]; then
|
||||
echo -e "${ANSI_LIGHT_GREEN}[TAR/GZ]${ANSI_NOCOLOR}"
|
||||
|
||||
if [[ "${asset:0:1}" == "\$" ]]; then
|
||||
asset="${asset##*$}"
|
||||
dirName="${asset%.*}"
|
||||
echo " Compressing artifact '${artifact}' to '${asset}' ..."
|
||||
tar -c --gzip --file="${asset}" --directory="${artifact}" --transform "s|^\.|${dirName%.tar}|" .
|
||||
retCode=$?
|
||||
else
|
||||
asset="${asset##*!}"
|
||||
echo " Compressing artifact '${artifact}' to '${asset}' ..."
|
||||
(
|
||||
cd "${artifact}" && \
|
||||
tar -c --gzip --file="../${asset}" *
|
||||
)
|
||||
retCode=$?
|
||||
fi
|
||||
|
||||
if [[ $retCode -eq 0 ]]; then
|
||||
echo -e " Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
uploadFile="${asset}"
|
||||
else
|
||||
echo -e " Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
echo -e "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to tgz file '${asset}'.${ANSI_NOCOLOR}"
|
||||
echo "::error title=CompressionError::Couldn't compress '${artifact}' to tgz file '${asset}'."
|
||||
ERRORS=1
|
||||
continue
|
||||
fi
|
||||
elif [[ "${asset}" == !*.tzst || "${asset}" == !*.tar.zst || "${asset}" == \$*.tzst || "${asset}" == \$*.tar.zst ]]; then
|
||||
echo -e "${ANSI_LIGHT_GREEN}[ZST]${ANSI_NOCOLOR}"
|
||||
|
||||
if [[ "${asset:0:1}" == "\$" ]]; then
|
||||
asset="${asset##*$}"
|
||||
dirName="${asset%.*}"
|
||||
echo " Compressing artifact '${artifact}' to '${asset}' ..."
|
||||
tar -c --zstd --file="${asset}" --directory="${artifact}" --transform "s|^\.|${dirName%.tar}|" .
|
||||
retCode=$?
|
||||
else
|
||||
asset="${asset##*!}"
|
||||
echo " Compressing artifact '${artifact}' to '${asset}' ..."
|
||||
(
|
||||
cd "${artifact}" && \
|
||||
tar -c --zstd --file="../${asset}" *
|
||||
)
|
||||
retCode=$?
|
||||
fi
|
||||
|
||||
if [[ $retCode -eq 0 ]]; then
|
||||
echo -e " Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
uploadFile="${asset}"
|
||||
else
|
||||
echo -e " Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
echo -e "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zst file '${asset}'.${ANSI_NOCOLOR}"
|
||||
echo "::error title=CompressionError::Couldn't compress '${artifact}' to zst file '${asset}'."
|
||||
ERRORS=1
|
||||
continue
|
||||
fi
|
||||
elif [[ -e "${artifact}/${asset}" ]]; then
|
||||
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
uploadFile="${artifact}/${asset}"
|
||||
else
|
||||
echo -e "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
echo -e "${ANSI_LIGHT_RED}Couldn't find asset '${asset}' in artifact '${artifact}'.${ANSI_NOCOLOR}"
|
||||
echo "::error title=FileNotFound::Couldn't find asset '${asset}' in artifact '${artifact}'."
|
||||
ERRORS=1
|
||||
continue
|
||||
fi
|
||||
|
||||
# Upload asset to existing release page
|
||||
echo -n " uploading asset '${asset}' from '${uploadFile}' with title '${title}' ... "
|
||||
gh release upload ${{ inputs.nightly_name }} "${uploadFile}#${title}" --clobber
|
||||
if [[ $? -eq 0 ]]; then
|
||||
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
else
|
||||
echo -e "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
echo -e "${ANSI_LIGHT_RED}Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
|
||||
echo "::error title=UploadError::Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'."
|
||||
ERRORS=1
|
||||
continue
|
||||
fi
|
||||
done <<<'${{ inputs.assets }}'
|
||||
|
||||
echo "Inspecting downloaded artifacts ..."
|
||||
tree -L 3 .
|
||||
|
||||
if [[ $ERROR -ne 0 ]]; then
|
||||
echo -e "${ANSI_LIGHT_RED}Errors detected in previous steps.${ANSI_NOCOLOR}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: 📑 Remove draft state from Release Page
|
||||
if: ${{ ! inputs.draft }}
|
||||
run: |
|
||||
set +e
|
||||
|
||||
ANSI_LIGHT_RED="\e[91m"
|
||||
ANSI_LIGHT_GREEN="\e[92m"
|
||||
ANSI_NOCOLOR="\e[0m"
|
||||
|
||||
export GH_TOKEN=${{ github.token }}
|
||||
|
||||
# Remove draft-state from release page
|
||||
echo -n "Remove draft-state from release '${title}' ... "
|
||||
gh release edit --draft=false "${{ inputs.nightly_name }}"
|
||||
if [[ $? -eq 0 ]]; then
|
||||
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
else
|
||||
echo -e "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
echo -e "${ANSI_LIGHT_RED}Couldn't remove draft-state from release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
|
||||
echo "::error title=ReleasePage::Couldn't remove draft-state from release '${{ inputs.nightly_name }}'."
|
||||
fi
|
||||
15
.github/workflows/Package.yml
vendored
15
.github/workflows/Package.yml
vendored
@@ -25,11 +25,6 @@ name: Package
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
@@ -49,14 +44,11 @@ jobs:
|
||||
|
||||
Package:
|
||||
name: 📦 Package in Source and Wheel Format
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
lfs: true
|
||||
submodules: true
|
||||
|
||||
- name: 🐍 Setup Python ${{ inputs.python_version }}
|
||||
uses: actions/setup-python@v5
|
||||
@@ -106,10 +98,9 @@ jobs:
|
||||
run: python setup.py bdist_wheel
|
||||
|
||||
- name: 📤 Upload wheel artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.artifact }}
|
||||
working-directory: dist
|
||||
path: '*'
|
||||
path: dist/
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
80
.github/workflows/Parameters.yml
vendored
80
.github/workflows/Parameters.yml
vendored
@@ -25,40 +25,24 @@ name: Parameters
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
name:
|
||||
description: 'Name of the tool.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
package_namespace:
|
||||
description: 'Name of the tool''s namespace.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
package_name:
|
||||
description: 'Name of the tool''s package.'
|
||||
required: false
|
||||
default: ''
|
||||
required: true
|
||||
type: string
|
||||
python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
default: '3.13'
|
||||
default: '3.12'
|
||||
type: string
|
||||
python_version_list:
|
||||
description: 'Space separated list of Python versions to run tests with.'
|
||||
required: false
|
||||
default: '3.9 3.10 3.11 3.12 3.13'
|
||||
default: '3.8 3.9 3.10 3.11 3.12'
|
||||
type: string
|
||||
system_list:
|
||||
description: 'Space separated list of systems to run tests on.'
|
||||
required: false
|
||||
default: 'ubuntu windows macos macos-arm mingw64 ucrt64'
|
||||
default: 'ubuntu windows macos-arm mingw64 ucrt64'
|
||||
type: string
|
||||
include_list:
|
||||
description: 'Space separated list of system:python items to be included into the list of test.'
|
||||
@@ -83,17 +67,17 @@ on:
|
||||
windows_image:
|
||||
description: 'The used GitHub Action image for Windows based jobs.'
|
||||
required: false
|
||||
default: 'windows-2022'
|
||||
default: 'windows-latest'
|
||||
type: string
|
||||
macos_intel_image:
|
||||
description: 'The used GitHub Action image for macOS (Intel x86-64) based jobs.'
|
||||
required: false
|
||||
default: 'macos-13'
|
||||
default: 'macos-latest-large'
|
||||
type: string
|
||||
macos_arm_image:
|
||||
description: 'The used GitHub Action image for macOS (ARM aarch64) based jobs.'
|
||||
description: 'The used GitHub Action image for macOS (ARM arm64) based jobs.'
|
||||
required: false
|
||||
default: 'macos-14'
|
||||
default: 'macos-latest'
|
||||
type: string
|
||||
|
||||
outputs:
|
||||
@@ -112,8 +96,7 @@ on:
|
||||
|
||||
jobs:
|
||||
Parameters:
|
||||
name: ✎ Generate pipeline parameters
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
python_version: ${{ steps.params.outputs.python_version }}
|
||||
python_jobs: ${{ steps.params.outputs.python_jobs }}
|
||||
@@ -131,9 +114,7 @@ jobs:
|
||||
from textwrap import dedent
|
||||
from typing import Iterable
|
||||
|
||||
package_namespace = "${{ inputs.package_namespace }}".strip()
|
||||
package_name = "${{ inputs.package_name }}".strip()
|
||||
name = "${{ inputs.name }}".strip()
|
||||
name = "${{ inputs.name }}".strip()
|
||||
python_version = "${{ inputs.python_version }}".strip()
|
||||
systems = "${{ inputs.system_list }}".strip()
|
||||
versions = "${{ inputs.python_version_list }}".strip()
|
||||
@@ -141,15 +122,9 @@ jobs:
|
||||
exclude_list = "${{ inputs.exclude_list }}".strip()
|
||||
disable_list = "${{ inputs.disable_list }}".strip()
|
||||
|
||||
if name == "":
|
||||
if package_namespace == "" or package_namespace == ".":
|
||||
name = f"{package_name}"
|
||||
else:
|
||||
name = f"{package_namespace}.{package_name}"
|
||||
|
||||
currentMSYS2Version = "3.11"
|
||||
currentAlphaVersion = "3.14"
|
||||
currentAlphaRelease = "3.14.0-alpha.1"
|
||||
currentAlphaVersion = "3.13"
|
||||
currentAlphaRelease = "3.13.0-alpha.1"
|
||||
|
||||
if systems == "":
|
||||
print("::error title=Parameter::system_list is empty.")
|
||||
@@ -176,8 +151,8 @@ jobs:
|
||||
else:
|
||||
disabled = [disable.strip() for disable in disable_list.split(" ")]
|
||||
|
||||
if "3.8" in versions:
|
||||
print("::warning title=Deprecated::Support for Python 3.8 ended in 2024.10.")
|
||||
if "3.7" in versions:
|
||||
print("::warning title=Deprecated::Support for Python 3.7 ended in 2023.06.27.")
|
||||
if "msys2" in systems:
|
||||
print("::warning title=Deprecated::System 'msys2' will be replaced by 'mingw64'.")
|
||||
if currentAlphaVersion in versions:
|
||||
@@ -189,13 +164,13 @@ jobs:
|
||||
data = {
|
||||
# Python and PyPy versions supported by "setup-python" action
|
||||
"python": {
|
||||
"3.8": { "icon": "⚫", "until": "2024.10" },
|
||||
"3.9": { "icon": "🔴", "until": "2025.10" },
|
||||
"3.10": { "icon": "🟠", "until": "2026.10" },
|
||||
"3.11": { "icon": "🟡", "until": "2027.10" },
|
||||
"3.7": { "icon": "⚫", "until": "2023.06.27" },
|
||||
"3.8": { "icon": "🔴", "until": "2024.10" },
|
||||
"3.9": { "icon": "🟠", "until": "2025.10" },
|
||||
"3.10": { "icon": "🟡", "until": "2026.10" },
|
||||
"3.11": { "icon": "🟢", "until": "2027.10" },
|
||||
"3.12": { "icon": "🟢", "until": "2028.10" },
|
||||
"3.13": { "icon": "🟢", "until": "2029.10" },
|
||||
"3.14": { "icon": "🟣", "until": "2030.10" },
|
||||
# "3.13": { "icon": "🟣", "until": "2028.10" },
|
||||
"pypy-3.7": { "icon": "⟲⚫", "until": "????.??" },
|
||||
"pypy-3.8": { "icon": "⟲🔴", "until": "????.??" },
|
||||
"pypy-3.9": { "icon": "⟲🟠", "until": "????.??" },
|
||||
@@ -206,7 +181,7 @@ jobs:
|
||||
"ubuntu": { "icon": "🐧", "runs-on": "${{ inputs.ubuntu_image }}", "shell": "bash", "name": "Linux (x86-64)" },
|
||||
"windows": { "icon": "🪟", "runs-on": "${{ inputs.windows_image }}", "shell": "pwsh", "name": "Windows (x86-64)" },
|
||||
"macos": { "icon": "🍎", "runs-on": "${{ inputs.macos_intel_image }}", "shell": "bash", "name": "macOS (x86-64)" },
|
||||
"macos-arm": { "icon": "🍏", "runs-on": "${{ inputs.macos_arm_image }}", "shell": "bash", "name": "macOS (aarch64)" },
|
||||
"macos-arm": { "icon": "🍏", "runs-on": "${{ inputs.macos_arm_image }}", "shell": "bash", "name": "macOS (arm64)" },
|
||||
},
|
||||
# Runtimes provided by MSYS2
|
||||
"runtime": {
|
||||
@@ -315,6 +290,18 @@ jobs:
|
||||
"documentation_pdf": f"{name}-Documentation-PDF",
|
||||
}
|
||||
|
||||
# Deprecated structure
|
||||
params = {
|
||||
"python_version": python_version,
|
||||
"artifacts": {
|
||||
"unittesting": f"{artifact_names['unittesting_xml']}",
|
||||
"coverage": f"{artifact_names['codecoverage_html']}",
|
||||
"typing": f"{artifact_names['statictyping_html']}",
|
||||
"package": f"{artifact_names['package_all']}",
|
||||
"doc": f"{artifact_names['documentation_html']}",
|
||||
}
|
||||
}
|
||||
|
||||
print("Parameters:")
|
||||
print(f" python_version: {python_version}")
|
||||
print(f" python_jobs ({len(jobs)}):\n" +
|
||||
@@ -332,6 +319,7 @@ jobs:
|
||||
python_version={python_version}
|
||||
python_jobs={json_dumps(jobs)}
|
||||
artifact_names={json_dumps(artifact_names)}
|
||||
params={json_dumps(params)}
|
||||
"""))
|
||||
|
||||
- name: Verify out parameters
|
||||
|
||||
34
.github/workflows/PublishCoverageResults.yml
vendored
34
.github/workflows/PublishCoverageResults.yml
vendored
@@ -24,15 +24,6 @@ name: Publish Code Coverage Results
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
coverage_artifacts_pattern:
|
||||
required: false
|
||||
default: '*-CodeCoverage-*'
|
||||
type: string
|
||||
coverage_config:
|
||||
description: 'Path to the .coveragerc file. Use pyproject.toml by default.'
|
||||
required: false
|
||||
@@ -66,26 +57,18 @@ on:
|
||||
jobs:
|
||||
PublishCoverageResults:
|
||||
name: 📊 Publish Code Coverage Results
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
runs-on: ubuntu-24.04
|
||||
if: always()
|
||||
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
lfs: true
|
||||
submodules: true
|
||||
|
||||
- name: Download Artifacts
|
||||
uses: pyTooling/download-artifact@v4
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: ${{ inputs.coverage_artifacts_pattern }}
|
||||
path: artifacts
|
||||
|
||||
- name: 🔎 Inspect extracted artifact (tarball)
|
||||
run: |
|
||||
tree -psh artifacts
|
||||
|
||||
- name: 🔧 Install coverage and tomli
|
||||
run: |
|
||||
python -m pip install -U --disable-pip-version-check --break-system-packages coverage[toml] tomli
|
||||
@@ -182,7 +165,7 @@ jobs:
|
||||
- name: 📤 Upload 'Coverage SQLite Database' artifact
|
||||
if: inputs.coverage_sqlite_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.coverage_sqlite_artifact }}
|
||||
path: .coverage
|
||||
@@ -192,7 +175,7 @@ jobs:
|
||||
- name: 📤 Upload 'Coverage XML Report' artifact
|
||||
if: inputs.coverage_xml_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.coverage_xml_artifact }}
|
||||
path: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
||||
@@ -202,7 +185,7 @@ jobs:
|
||||
- name: 📤 Upload 'Coverage JSON Report' artifact
|
||||
if: inputs.coverage_json_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.coverage_json_artifact }}
|
||||
path: ${{ steps.getVariables.outputs.coverage_report_json }}
|
||||
@@ -212,18 +195,17 @@ jobs:
|
||||
- name: 📤 Upload 'Coverage HTML Report' artifact
|
||||
if: inputs.coverage_html_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.coverage_html_artifact }}
|
||||
working-directory: ${{ steps.getVariables.outputs.coverage_report_html_directory }}
|
||||
path: '*'
|
||||
path: ${{ steps.getVariables.outputs.coverage_report_html_directory }}
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- name: 📊 Publish code coverage at CodeCov
|
||||
if: inputs.CodeCov == true
|
||||
continue-on-error: true
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
files: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
||||
flags: unittests
|
||||
|
||||
11
.github/workflows/PublishOnPyPI.yml
vendored
11
.github/workflows/PublishOnPyPI.yml
vendored
@@ -25,11 +25,6 @@ name: Publish on PyPI
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
@@ -53,14 +48,14 @@ jobs:
|
||||
|
||||
PublishOnPyPI:
|
||||
name: 🚀 Publish to PyPI
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
steps:
|
||||
- name: 📥 Download artifacts '${{ inputs.artifact }}' from 'Package' job
|
||||
uses: pyTooling/download-artifact@v4
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.artifact }}
|
||||
path: dist
|
||||
path: dist/
|
||||
|
||||
- name: 🐍 Setup Python ${{ inputs.python_version }}
|
||||
uses: actions/setup-python@v5
|
||||
|
||||
28
.github/workflows/PublishTestResults.yml
vendored
28
.github/workflows/PublishTestResults.yml
vendored
@@ -25,15 +25,6 @@ name: Publish Unit Test Results
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
unittest_artifacts_pattern:
|
||||
required: false
|
||||
default: '*-UnitTestReportSummary-*'
|
||||
type: string
|
||||
merged_junit_artifact:
|
||||
description: 'Name of the merged JUnit Test Summary artifact.'
|
||||
required: false
|
||||
@@ -44,11 +35,6 @@ on:
|
||||
required: false
|
||||
default: '"--pytest=rewrite-dunder-init;reduce-depth:pytest.tests.unit"'
|
||||
type: string
|
||||
publish:
|
||||
description: 'Publish test report summary via Dorny Test-Reporter'
|
||||
required: false
|
||||
default: true
|
||||
type: boolean
|
||||
report_title:
|
||||
description: 'Title of the summary report in the pipeline''s sidebar'
|
||||
required: false
|
||||
@@ -58,7 +44,7 @@ on:
|
||||
jobs:
|
||||
PublishTestResults:
|
||||
name: 📊 Publish Test Results
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
runs-on: ubuntu-24.04
|
||||
if: always()
|
||||
|
||||
steps:
|
||||
@@ -66,15 +52,10 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download Artifacts
|
||||
uses: pyTooling/download-artifact@v4
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: ${{ inputs.unittest_artifacts_pattern }}
|
||||
path: artifacts
|
||||
|
||||
- name: 🔎 Inspect extracted artifact (tarball)
|
||||
run: |
|
||||
tree -psh artifacts
|
||||
|
||||
- name: 🔧 Install pyEDAA.Reports (JUunit Parser and Merger)
|
||||
run: |
|
||||
python -m pip install --disable-pip-version-check --break-system-packages -U pyEDAA.Reports
|
||||
@@ -88,13 +69,12 @@ jobs:
|
||||
|
||||
- name: 🔁 Merge JUnit Unit Test Summaries
|
||||
run: |
|
||||
pyedaa-reports -v unittest "--merge=pyTest-JUnit:junit/*.xml" ${{ inputs.additional_merge_args }} "--output=pyTest-JUnit:Unittesting.xml"
|
||||
pyedaa-reports -v unittest "--merge=pytest-junit:junit/*.xml" ${{ inputs.additional_merge_args }} "--output=ant-junit:Unittesting.xml"
|
||||
echo "cat Unittesting.xml"
|
||||
cat Unittesting.xml
|
||||
|
||||
- name: 📊 Publish Unit Test Results
|
||||
uses: dorny/test-reporter@v1
|
||||
if: inputs.publish && inputs.report_title != ''
|
||||
with:
|
||||
name: ${{ inputs.report_title }}
|
||||
path: Unittesting.xml
|
||||
@@ -102,7 +82,7 @@ jobs:
|
||||
|
||||
- name: 📤 Upload merged 'JUnit Test Summary' artifact
|
||||
if: inputs.merged_junit_artifact != ''
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.merged_junit_artifact }}
|
||||
path: Unittesting.xml
|
||||
|
||||
13
.github/workflows/PublishToGitHubPages.yml
vendored
13
.github/workflows/PublishToGitHubPages.yml
vendored
@@ -25,11 +25,6 @@ name: Publish to GitHub Pages
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
doc:
|
||||
description: 'Name of the documentation artifact.'
|
||||
required: true
|
||||
@@ -49,28 +44,28 @@ jobs:
|
||||
|
||||
PublishToGitHubPages:
|
||||
name: 📚 Publish to GH-Pages
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.doc }}' from 'BuildTheDocs' job
|
||||
uses: pyTooling/download-artifact@v4
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.doc }}
|
||||
path: public
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.coverage }}' from 'Coverage' job
|
||||
if: ${{ inputs.coverage != '' }}
|
||||
uses: pyTooling/download-artifact@v4
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.coverage }}
|
||||
path: public/coverage
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.typing }}' from 'StaticTypeCheck' job
|
||||
if: ${{ inputs.typing != '' }}
|
||||
uses: pyTooling/download-artifact@v4
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.typing }}
|
||||
path: public/typing
|
||||
|
||||
9
.github/workflows/Release.yml
vendored
9
.github/workflows/Release.yml
vendored
@@ -24,17 +24,12 @@ name: Release
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
|
||||
Release:
|
||||
name: 📝 Create 'Release Page' on GitHub
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
steps:
|
||||
- name: 🔁 Extract Git tag from GITHUB_REF
|
||||
|
||||
157
.github/workflows/SphinxDocumentation.yml
vendored
157
.github/workflows/SphinxDocumentation.yml
vendored
@@ -24,11 +24,6 @@ name: Documentation
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
@@ -39,15 +34,16 @@ on:
|
||||
required: false
|
||||
default: '-r doc/requirements.txt'
|
||||
type: string
|
||||
coverage_config:
|
||||
description: 'Path to the .coveragerc file. Use pyproject.toml by default.'
|
||||
required: false
|
||||
default: 'pyproject.toml'
|
||||
type: string
|
||||
doc_directory:
|
||||
description: 'Path to the directory containing documentation (Sphinx working directory).'
|
||||
required: false
|
||||
default: 'doc'
|
||||
type: string
|
||||
coverage_report_json_directory:
|
||||
description: ''
|
||||
required: true
|
||||
type: string
|
||||
coverage_json_artifact:
|
||||
description: 'Name of the coverage JSON artifact.'
|
||||
required: false
|
||||
@@ -75,16 +71,13 @@ on:
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
Sphinx-HTML:
|
||||
name: 📓 HTML Documentation using Sphinx and Python ${{ inputs.python_version }}
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
Sphinx:
|
||||
name: 📓 Documentation generation using Sphinx and Python ${{ inputs.python_version }}
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
lfs: true
|
||||
submodules: true
|
||||
|
||||
- name: 🔧 Install graphviz
|
||||
run: sudo apt-get install -y --no-install-recommends graphviz
|
||||
@@ -96,22 +89,84 @@ jobs:
|
||||
|
||||
- name: 🔧 Install wheel,tomli and pip dependencies (native)
|
||||
run: |
|
||||
python -m pip install --disable-pip-version-check -U wheel
|
||||
python -m pip install --disable-pip-version-check -U wheel tomli
|
||||
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
||||
|
||||
- name: 🔁 Extract configurations from pyproject.toml
|
||||
id: getVariables
|
||||
shell: python
|
||||
run: |
|
||||
from os import getenv
|
||||
from pathlib import Path
|
||||
from sys import version
|
||||
from textwrap import dedent
|
||||
|
||||
print(f"Python: {version}")
|
||||
|
||||
from tomli import load as tomli_load
|
||||
|
||||
htmlDirectory = Path("htmlcov")
|
||||
xmlFile = Path("./coverage.xml")
|
||||
jsonFile = Path("./coverage.json")
|
||||
coverageRC = "${{ inputs.coverage_config }}".strip()
|
||||
|
||||
# Read output paths from 'pyproject.toml' file
|
||||
if coverageRC == "pyproject.toml":
|
||||
pyProjectFile = Path("pyproject.toml")
|
||||
if pyProjectFile.exists():
|
||||
with pyProjectFile.open("rb") as file:
|
||||
pyProjectSettings = tomli_load(file)
|
||||
|
||||
htmlDirectory = Path(pyProjectSettings["tool"]["coverage"]["html"]["directory"])
|
||||
xmlFile = Path(pyProjectSettings["tool"]["coverage"]["xml"]["output"])
|
||||
jsonFile = Path(pyProjectSettings["tool"]["coverage"]["json"]["output"])
|
||||
else:
|
||||
print(f"File '{pyProjectFile}' not found.")
|
||||
print(f"::error title=FileNotFoundError::File '{pyProjectFile}' not found.")
|
||||
exit(1)
|
||||
|
||||
# Read output paths from '.coveragerc' file
|
||||
elif len(coverageRC) > 0:
|
||||
coverageRCFile = Path(coverageRC)
|
||||
if coverageRCFile.exists():
|
||||
with coverageRCFile.open("rb") as file:
|
||||
coverageRCSettings = tomli_load(file)
|
||||
|
||||
htmlDirectory = Path(coverageRCSettings["html"]["directory"])
|
||||
xmlFile = Path(coverageRCSettings["xml"]["output"])
|
||||
jsonFile = Path(coverageRCSettings["json"]["output"])
|
||||
else:
|
||||
print(f"File '{coverageRCFile}' not found.")
|
||||
print(f"::error title=FileNotFoundError::File '{coverageRCFile}' not found.")
|
||||
exit(1)
|
||||
|
||||
# Write jobs to special file
|
||||
github_output = Path(getenv("GITHUB_OUTPUT"))
|
||||
print(f"GITHUB_OUTPUT: {github_output}")
|
||||
with github_output.open("a+", encoding="utf-8") as f:
|
||||
f.write(dedent(f"""\
|
||||
coverage_report_html_directory={htmlDirectory.as_posix()}
|
||||
coverage_report_xml_directory={xmlFile.parent.as_posix()}
|
||||
coverage_report_xml={xmlFile.as_posix()}
|
||||
coverage_report_json_directory={jsonFile.parent.as_posix()}
|
||||
coverage_report_json={jsonFile.as_posix()}
|
||||
"""))
|
||||
|
||||
print(f"DEBUG:\n html={htmlDirectory}\n xml={xmlFile}\n json={jsonFile}")
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.unittest_xml_artifact }}' from 'Unittesting' job
|
||||
if: inputs.unittest_xml_artifact != ''
|
||||
uses: pyTooling/download-artifact@v4
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.unittest_xml_artifact }}
|
||||
path: ${{ inputs.unittest_xml_directory }}
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.coverage_json_artifact }}' from 'PublishCoverageResults' job
|
||||
if: inputs.coverage_json_artifact != ''
|
||||
uses: pyTooling/download-artifact@v4
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.coverage_json_artifact }}
|
||||
path: ${{ inputs.coverage_report_json_directory }}
|
||||
path: ${{ steps.getVariables.outputs.coverage_report_json_directory }}
|
||||
|
||||
- name: ☑ Generate HTML documentation
|
||||
if: inputs.html_artifact != ''
|
||||
@@ -121,55 +176,6 @@ jobs:
|
||||
cd "${{ inputs.doc_directory || '.' }}"
|
||||
sphinx-build -v -n -b html -d _build/doctrees -j $(nproc) -w _build/html.log . _build/html
|
||||
|
||||
- name: 📤 Upload 'HTML Documentation' artifact
|
||||
if: inputs.html_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.html_artifact }}
|
||||
working-directory: ${{ inputs.doc_directory }}/_build/html
|
||||
path: '*'
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
Sphinx-LaTeX:
|
||||
name: 📓 LaTeX Documentation using Sphinx and Python ${{ inputs.python_version }}
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
lfs: true
|
||||
submodules: true
|
||||
|
||||
- name: 🔧 Install graphviz
|
||||
run: sudo apt-get install -y --no-install-recommends graphviz
|
||||
|
||||
- name: 🐍 Setup Python ${{ inputs.python_version }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ inputs.python_version }}
|
||||
|
||||
- name: 🔧 Install wheel,tomli and pip dependencies (native)
|
||||
run: |
|
||||
python -m pip install --disable-pip-version-check -U wheel
|
||||
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.unittest_xml_artifact }}' from 'Unittesting' job
|
||||
if: inputs.unittest_xml_artifact != ''
|
||||
uses: pyTooling/download-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.unittest_xml_artifact }}
|
||||
path: ${{ inputs.unittest_xml_directory }}
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.coverage_json_artifact }}' from 'PublishCoverageResults' job
|
||||
if: inputs.coverage_json_artifact != ''
|
||||
uses: pyTooling/download-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.coverage_json_artifact }}
|
||||
path: ${{ inputs.coverage_report_json_directory }}
|
||||
|
||||
- name: ☑ Generate LaTeX documentation
|
||||
if: inputs.latex_artifact != ''
|
||||
# continue-on-error: true
|
||||
@@ -180,13 +186,22 @@ jobs:
|
||||
sphinx-build -v -n -b latex -d _build/doctrees -j $(nproc) -w _build/latex.log . _build/latex
|
||||
# --builder html --doctree-dir _build/doctrees --verbose --fresh-env --write-all --nitpicky --warning-file _build/html.log . _build/html
|
||||
|
||||
- name: 📤 Upload 'HTML Documentation' artifact
|
||||
if: inputs.html_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.html_artifact }}
|
||||
path: ${{ inputs.doc_directory }}/_build/html
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- name: 📤 Upload 'LaTeX Documentation' artifact
|
||||
if: inputs.latex_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.latex_artifact }}
|
||||
working-directory: ${{ inputs.doc_directory }}/_build/latex
|
||||
path: '*'
|
||||
path: ${{ inputs.doc_directory }}/_build/latex
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
14
.github/workflows/StaticTypeCheck.yml
vendored
14
.github/workflows/StaticTypeCheck.yml
vendored
@@ -25,11 +25,6 @@ name: Static Type Check
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
@@ -68,7 +63,7 @@ jobs:
|
||||
|
||||
StaticTypeCheck:
|
||||
name: 👀 Check Static Typing using Python ${{ inputs.python_version }}
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
@@ -89,18 +84,17 @@ jobs:
|
||||
- name: 📤 Upload 'Static Typing Report' HTML artifact
|
||||
if: ${{ inputs.html_artifact != '' }}
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.html_artifact }}
|
||||
working-directory: ${{ inputs.html_report }}
|
||||
path: '*'
|
||||
path: ${{ inputs.html_report }}
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- name: 📤 Upload 'Static Typing Report' JUnit artifact
|
||||
if: ${{ inputs.junit_artifact != '' }}
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.junit_artifact }}
|
||||
path: ${{ inputs.junit_report }}
|
||||
|
||||
30
.github/workflows/UnitTesting.yml
vendored
30
.github/workflows/UnitTesting.yml
vendored
@@ -147,9 +147,6 @@ jobs:
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
lfs: true
|
||||
submodules: true
|
||||
|
||||
# Package Manager steps
|
||||
- name: 🔧 Install homebrew dependencies on macOS
|
||||
@@ -210,7 +207,7 @@ jobs:
|
||||
|
||||
packages = {
|
||||
"coverage": "python-coverage:p",
|
||||
"docstr_coverage": "python-pyaml:p",
|
||||
"docstr_coverage": "python-pyyaml:p",
|
||||
"igraph": "igraph:p",
|
||||
"jinja2": "python-markupsafe:p",
|
||||
"lxml": "python-lxml:p",
|
||||
@@ -218,13 +215,12 @@ jobs:
|
||||
"markupsafe": "python-markupsafe:p",
|
||||
"pip": "python-pip:p",
|
||||
"pyyaml": "python-pyyaml:p",
|
||||
"ruamel.yaml": "python-ruamel-yaml:p",
|
||||
# "ruamel.yaml": "python-ruamel-yaml:p python-ruamel.yaml.clib:p",
|
||||
"ruamel.yaml": "python-ruamel-yaml:p python-ruamel.yaml.clib:p",
|
||||
"sphinx": "python-markupsafe:p",
|
||||
"tomli": "python-tomli:p",
|
||||
"wheel": "python-wheel:p",
|
||||
"pyedaa.projectmodel": "python-ruamel-yaml:p python-ruamel.yaml.clib:p python-lxml:p",
|
||||
"pyedaa.reports": "python-ruamel-yaml:p python-ruamel.yaml.clib:p python-lxml:p",
|
||||
"pyEDAA.ProjectModel": "python-ruamel-yaml:p python-ruamel.yaml.clib:p python-lxml:p",
|
||||
"pyEDAA.Reports": "python-ruamel-yaml:p python-ruamel.yaml.clib:p python-lxml:p",
|
||||
}
|
||||
subPackages = {
|
||||
"pytooling": {
|
||||
@@ -442,18 +438,17 @@ jobs:
|
||||
- name: 📤 Upload 'TestReportSummary.xml' artifact
|
||||
if: inputs.unittest_xml_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.unittest_xml_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
||||
working-directory: report/unit
|
||||
path: TestReportSummary.xml
|
||||
path: report/unit/TestReportSummary.xml
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
# - name: 📤 Upload 'Unit Tests HTML Report' artifact
|
||||
# if: inputs.unittest_html_artifact != ''
|
||||
# continue-on-error: true
|
||||
# uses: pyTooling/upload-artifact@v4
|
||||
# uses: actions/upload-artifact@v4
|
||||
# with:
|
||||
# name: ${{ inputs.unittest_html_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
||||
# path: ${{ steps.getVariables.outputs.unittest_report_html_directory }}
|
||||
@@ -463,7 +458,7 @@ jobs:
|
||||
- name: 📤 Upload 'Coverage SQLite Database' artifact
|
||||
if: inputs.coverage_sqlite_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.coverage_sqlite_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
||||
path: .coverage
|
||||
@@ -474,7 +469,7 @@ jobs:
|
||||
- name: 📤 Upload 'Coverage XML Report' artifact
|
||||
if: inputs.coverage_xml_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.coverage_xml_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
||||
path: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
||||
@@ -484,7 +479,7 @@ jobs:
|
||||
- name: 📤 Upload 'Coverage JSON Report' artifact
|
||||
if: inputs.coverage_json_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.coverage_json_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
||||
path: ${{ steps.getVariables.outputs.coverage_report_json }}
|
||||
@@ -494,10 +489,9 @@ jobs:
|
||||
- name: 📤 Upload 'Coverage HTML Report' artifact
|
||||
if: inputs.coverage_html_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.coverage_html_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
||||
working-directory: ${{ steps.getVariables.outputs.coverage_report_html_directory }}
|
||||
path: '*'
|
||||
path: ${{ steps.getVariables.outputs.coverage_report_html_directory }}
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
7
.github/workflows/VerifyDocs.yml
vendored
7
.github/workflows/VerifyDocs.yml
vendored
@@ -25,11 +25,6 @@ name: Verify examples
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
@@ -40,7 +35,7 @@ jobs:
|
||||
|
||||
VerifyDocs:
|
||||
name: 👍 Verify example snippets using Python ${{ inputs.python_version }}
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
|
||||
10
.github/workflows/_Checking_ArtifactCleanup.yml
vendored
10
.github/workflows/_Checking_ArtifactCleanup.yml
vendored
@@ -6,10 +6,10 @@ on:
|
||||
|
||||
jobs:
|
||||
Params:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r1
|
||||
with:
|
||||
name: Example
|
||||
python_version_list: "3.12 3.13"
|
||||
python_version_list: "3.10 3.11"
|
||||
system_list: "ubuntu windows"
|
||||
|
||||
Testing:
|
||||
@@ -25,7 +25,7 @@ jobs:
|
||||
run: echo "${{ matrix.runs-on }}-${{ matrix.python }}" >> artifact.txt
|
||||
|
||||
- name: 📤 Upload artifact for ${{ matrix.system }}-${{ matrix.python }}
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ fromJson(needs.Params.outputs.artifact_names).unittesting_xml }}-${{ matrix.system }}-${{ matrix.python }}
|
||||
path: artifact.txt
|
||||
@@ -42,7 +42,7 @@ jobs:
|
||||
run: echo "Package" >> package.txt
|
||||
|
||||
- name: 📤 Upload artifact for ${{ matrix.system }}-${{ matrix.python }}
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ fromJson(needs.Params.outputs.artifact_names).package_all }}
|
||||
path: package.txt
|
||||
@@ -50,7 +50,7 @@ jobs:
|
||||
retention-days: 1
|
||||
|
||||
ArtifactCleanUp:
|
||||
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@main
|
||||
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r1
|
||||
needs:
|
||||
- Params
|
||||
- Testing
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
name: Verification of Pipeline Templates (Namespace Package)
|
||||
|
||||
on:
|
||||
push:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
NamespacePackage:
|
||||
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@main
|
||||
with:
|
||||
package_namespace: pyExamples
|
||||
package_name: Extensions
|
||||
secrets:
|
||||
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||
CODACY_PROJECT_TOKEN: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||
101
.github/workflows/_Checking_Nightly.yml
vendored
101
.github/workflows/_Checking_Nightly.yml
vendored
@@ -1,101 +0,0 @@
|
||||
name: Verification of Nightly Releases
|
||||
|
||||
on:
|
||||
push:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
Build:
|
||||
name: Build something
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
steps:
|
||||
- name: 🖉 Build 1
|
||||
run: |
|
||||
echo "Document 1 $(date --utc '+%d.%m.%Y - %H:%M:%S')" > document1.txt
|
||||
echo "Analysis log $(date --utc '+%d.%m.%Y - %H:%M:%S')" > analysis.log
|
||||
echo "Build log $(date --utc '+%d.%m.%Y - %H:%M:%S')" > build.log
|
||||
|
||||
- name: 📤 Upload artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: document
|
||||
path: |
|
||||
document1.txt
|
||||
*.log
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- name: 🖉 Program
|
||||
run: |
|
||||
echo "Document other $(date --utc '+%d.%m.%Y - %H:%M:%S')" > document1.txt
|
||||
echo "Program $(date --utc '+%d.%m.%Y - %H:%M:%S')" > program.py
|
||||
|
||||
- name: 📤 Upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: other
|
||||
path: |
|
||||
*.txt
|
||||
*.py
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
NightlyPage:
|
||||
uses: pyTooling/Actions/.github/workflows/NightlyRelease.yml@main
|
||||
needs:
|
||||
- Build
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: write
|
||||
actions: write
|
||||
# attestations: write
|
||||
with:
|
||||
prerelease: true
|
||||
replacements: |
|
||||
version=4.2.0
|
||||
tool=myTool
|
||||
prog=program
|
||||
nightly_title: "Nightly Release"
|
||||
nightly_description: |
|
||||
This *nightly* release contains all latest and important artifacts created by GHDL's CI pipeline.
|
||||
|
||||
# GHDL %version%
|
||||
|
||||
GHDL offers the simulator and synthesis tool for VHDL. GHDL can be build for various backends:
|
||||
* `gcc` - using the GCC compiler framework
|
||||
* `mcode` - in memory code generation
|
||||
* `llvm` - using the LLVM compiler framework
|
||||
* `llvm-jit` - using the LLVM compiler framework, but in memory
|
||||
|
||||
The following asset categories are provided for GHDL:
|
||||
* macOS x64-64 builds as TAR/GZ file
|
||||
* macOS aarch64 builds as TAR/GZ file
|
||||
* Ubuntu 24.04 LTS builds as TAR/GZ file
|
||||
* Windows builds for standalone usage (without MSYS2) as ZIP file
|
||||
* MSYS2 packages as TAR/ZST file
|
||||
|
||||
# pyGHDL %version%
|
||||
|
||||
The Python package `pyGHDL` offers Python binding (`pyGHDL.libghdl`) to a `libghdl` shared library (`*.so`/`*.dll`).
|
||||
In addition to the low-level binding layer, pyGHDL offers:
|
||||
* a Language Server Protocol (LSP) instance for e.g. live code checking by editors
|
||||
* a Code Document Object Model (CodeDOM) based on [pyVHDLModel](https://github.com/VHDL/pyVHDLModel)
|
||||
|
||||
The following asset categories are provided for pyGHDL:
|
||||
* Platform specific Python wheel package for Ubuntu incl. `pyGHDL...so`
|
||||
* Platform specific Python wheel package for Windows incl. `pyGHDL...dll`
|
||||
assets: |
|
||||
document: document1.txt: Documentation
|
||||
document: build.log: Logfile - %tool% - %tool%
|
||||
other: document1.txt: SBOM - %version%
|
||||
other: %prog%.py: Application - %tool% - %version%
|
||||
document:!archive1.zip: Archive 1 - zip
|
||||
document:!archive2.tgz: Archive 2 - tgz
|
||||
document:!archive3.tar.gz: Archive 3 - tar.gz
|
||||
document:!archive4.tzst: Archive 4 - tzst
|
||||
document:!archive5.tar.zst:Archive 5 - tar.zst
|
||||
document:$archive6.tgz: Archive 6 - tgz + dir
|
||||
document:$archive7.tar.gz: Archive 7 - tar.gz + dir
|
||||
document:$archive8.tzst: Archive 8 - tzst + dir
|
||||
document:$archive9.tar.zst:Archive 9 - tar.zst + dir
|
||||
124
.github/workflows/_Checking_Parameters.yml
vendored
124
.github/workflows/_Checking_Parameters.yml
vendored
@@ -6,54 +6,54 @@ on:
|
||||
|
||||
jobs:
|
||||
Params_Default:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r1
|
||||
with:
|
||||
name: Example
|
||||
|
||||
Params_PythonVersions:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r1
|
||||
with:
|
||||
name: Example
|
||||
python_version_list: "3.11 3.12 pypy-3.9 pypy-3.10"
|
||||
python_version_list: "3.9 3.10 pypy-3.8 pypy-3.9"
|
||||
|
||||
Params_Systems:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r1
|
||||
with:
|
||||
name: Example
|
||||
system_list: "windows mingw32 mingw64"
|
||||
|
||||
Params_Include:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r1
|
||||
with:
|
||||
name: Example
|
||||
python_version_list: "3.11"
|
||||
system_list: "ubuntu windows macos macos-arm"
|
||||
include_list: "ubuntu:3.12 ubuntu:3.13"
|
||||
python_version_list: "3.10"
|
||||
system_list: "ubuntu windows macos"
|
||||
include_list: "ubuntu:3.11 ubuntu:3.12"
|
||||
|
||||
Params_Exclude:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r1
|
||||
with:
|
||||
name: Example
|
||||
python_version_list: "3.12"
|
||||
system_list: "ubuntu windows macos macos-arm"
|
||||
exclude_list: "windows:3.12 windows:3.13"
|
||||
python_version_list: "3.10"
|
||||
system_list: "ubuntu windows macos"
|
||||
exclude_list: "windows:3.10 windows:3.11"
|
||||
|
||||
Params_Disable:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r1
|
||||
with:
|
||||
name: Example
|
||||
python_version_list: "3.12"
|
||||
system_list: "ubuntu windows macos macos-arm"
|
||||
disable_list: "windows:3.12 windows:3.13"
|
||||
python_version_list: "3.10"
|
||||
system_list: "ubuntu windows macos"
|
||||
disable_list: "windows:3.10 windows:3.11"
|
||||
|
||||
Params_All:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r1
|
||||
with:
|
||||
name: Example
|
||||
python_version_list: "3.12 3.13"
|
||||
system_list: "ubuntu windows macos macos-arm"
|
||||
include_list: "windows:3.10 windows:3.11 windows:3.13"
|
||||
exclude_list: "macos:3.12 macos:3.13"
|
||||
python_version_list: "3.10 3.11"
|
||||
system_list: "ubuntu windows macos"
|
||||
include_list: "windows:3.8 windows:3.9 windows:3.12"
|
||||
exclude_list: "macos:3.10 macos:3.11"
|
||||
|
||||
Params_Check:
|
||||
needs:
|
||||
@@ -80,10 +80,12 @@ jobs:
|
||||
|
||||
from pyTooling.Common import zipdicts
|
||||
|
||||
expectedPythonVersion = "3.13"
|
||||
expectedPythons = ["3.9", "3.10", "3.11", "3.12", "3.13"]
|
||||
expectedSystems = ["ubuntu", "windows", "macos", "macos-arm"]
|
||||
expectedPythonVersion = "3.12"
|
||||
expectedPythons = ["3.8", "3.9", "3.10", "3.11", "3.12"]
|
||||
expectedSystems = ["ubuntu", "windows", "macos"]
|
||||
expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons] + ["mingw64:3.11", "ucrt64:3.11"]
|
||||
expectedJobs.remove("macos:3.8")
|
||||
expectedJobs.remove("macos:3.9")
|
||||
expectedName = "Example"
|
||||
expectedArtifacts = {
|
||||
"unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML",
|
||||
@@ -114,10 +116,7 @@ jobs:
|
||||
print(f"Number of 'python_jobs' does not match: {len(actualPythonJobs)} != {len(expectedJobs)}.")
|
||||
print("Actual jobs:")
|
||||
for job in actualPythonJobs:
|
||||
if job['system'] == "msys2":
|
||||
print(f" {job['runtime'].lower()}:{job['python']}")
|
||||
else:
|
||||
print(f" {job['system']}:{job['python']}")
|
||||
print(f" {job['system']}:{job['python']}")
|
||||
print("Expected jobs:")
|
||||
for job in expectedJobs:
|
||||
print(f" {job}")
|
||||
@@ -143,10 +142,13 @@ jobs:
|
||||
|
||||
from pyTooling.Common import zipdicts
|
||||
|
||||
expectedPythonVersion = "3.13"
|
||||
expectedPythons = ["3.11", "3.12", "pypy-3.9", "pypy-3.10"]
|
||||
expectedSystems = ["ubuntu", "windows", "macos", "macos-arm"]
|
||||
expectedPythonVersion = "3.12"
|
||||
expectedPythons = ["3.9", "3.10", "pypy-3.8", "pypy-3.9"]
|
||||
expectedSystems = ["ubuntu", "windows", "macos"]
|
||||
expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons] + ["mingw64:3.11", "ucrt64:3.11"]
|
||||
expectedJobs.remove("macos:3.9")
|
||||
expectedJobs.remove("macos:pypy-3.8")
|
||||
expectedJobs.remove("macos:pypy-3.9")
|
||||
expectedName = "Example"
|
||||
expectedArtifacts = {
|
||||
"unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML",
|
||||
@@ -177,10 +179,7 @@ jobs:
|
||||
print(f"Number of 'python_jobs' does not match: {len(actualPythonJobs)} != {len(expectedJobs)}.")
|
||||
print("Actual jobs:")
|
||||
for job in actualPythonJobs:
|
||||
if job['system'] == "msys2":
|
||||
print(f" {job['runtime'].lower()}:{job['python']}")
|
||||
else:
|
||||
print(f" {job['system']}:{job['python']}")
|
||||
print(f" {job['system']}:{job['python']}")
|
||||
print("Expected jobs:")
|
||||
for job in expectedJobs:
|
||||
print(f" {job}")
|
||||
@@ -206,8 +205,8 @@ jobs:
|
||||
|
||||
from pyTooling.Common import zipdicts
|
||||
|
||||
expectedPythonVersion = "3.13"
|
||||
expectedPythons = ["3.9", "3.10", "3.11", "3.12", "3.13"]
|
||||
expectedPythonVersion = "3.12"
|
||||
expectedPythons = ["3.8", "3.9", "3.10", "3.11", "3.12"]
|
||||
expectedSystems = ["windows"]
|
||||
expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons] + ["mingw32:3.11", "mingw64:3.11"]
|
||||
expectedName = "Example"
|
||||
@@ -240,10 +239,7 @@ jobs:
|
||||
print(f"Number of 'python_jobs' does not match: {len(actualPythonJobs)} != {len(expectedJobs)}.")
|
||||
print("Actual jobs:")
|
||||
for job in actualPythonJobs:
|
||||
if job['system'] == "msys2":
|
||||
print(f" {job['runtime'].lower()}:{job['python']}")
|
||||
else:
|
||||
print(f" {job['system']}:{job['python']}")
|
||||
print(f" {job['system']}:{job['python']}")
|
||||
print("Expected jobs:")
|
||||
for job in expectedJobs:
|
||||
print(f" {job}")
|
||||
@@ -269,9 +265,9 @@ jobs:
|
||||
|
||||
from pyTooling.Common import zipdicts
|
||||
|
||||
expectedPythonVersion = "3.13"
|
||||
expectedPythons = ["3.12"]
|
||||
expectedSystems = ["ubuntu", "windows", "macos", "macos-arm"]
|
||||
expectedPythonVersion = "3.12"
|
||||
expectedPythons = ["3.10"]
|
||||
expectedSystems = ["ubuntu", "windows", "macos"]
|
||||
expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons] + ["ubuntu:3.11", "ubuntu:3.12"]
|
||||
expectedName = "Example"
|
||||
expectedArtifacts = {
|
||||
@@ -303,10 +299,7 @@ jobs:
|
||||
print(f"Number of 'python_jobs' does not match: {len(actualPythonJobs)} != {len(expectedJobs)}.")
|
||||
print("Actual jobs:")
|
||||
for job in actualPythonJobs:
|
||||
if job['system'] == "msys2":
|
||||
print(f" {job['runtime'].lower()}:{job['python']}")
|
||||
else:
|
||||
print(f" {job['system']}:{job['python']}")
|
||||
print(f" {job['system']}:{job['python']}")
|
||||
print("Expected jobs:")
|
||||
for job in expectedJobs:
|
||||
print(f" {job}")
|
||||
@@ -332,9 +325,9 @@ jobs:
|
||||
|
||||
from pyTooling.Common import zipdicts
|
||||
|
||||
expectedPythonVersion = "3.13"
|
||||
expectedPythons = ["3.12"]
|
||||
expectedSystems = ["ubuntu", "macos", "macos-arm"]
|
||||
expectedPythonVersion = "3.12"
|
||||
expectedPythons = ["3.10"]
|
||||
expectedSystems = ["ubuntu", "macos"]
|
||||
expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons]
|
||||
expectedName = "Example"
|
||||
expectedArtifacts = {
|
||||
@@ -366,10 +359,7 @@ jobs:
|
||||
print(f"Number of 'python_jobs' does not match: {len(actualPythonJobs)} != {len(expectedJobs)}.")
|
||||
print("Actual jobs:")
|
||||
for job in actualPythonJobs:
|
||||
if job['system'] == "msys2":
|
||||
print(f" {job['runtime'].lower()}:{job['python']}")
|
||||
else:
|
||||
print(f" {job['system']}:{job['python']}")
|
||||
print(f" {job['system']}:{job['python']}")
|
||||
print("Expected jobs:")
|
||||
for job in expectedJobs:
|
||||
print(f" {job}")
|
||||
@@ -395,9 +385,9 @@ jobs:
|
||||
|
||||
from pyTooling.Common import zipdicts
|
||||
|
||||
expectedPythonVersion = "3.13"
|
||||
expectedPythons = ["3.12"]
|
||||
expectedSystems = ["ubuntu", "macos", "macos-arm"]
|
||||
expectedPythonVersion = "3.12"
|
||||
expectedPythons = ["3.10"]
|
||||
expectedSystems = ["ubuntu", "macos"]
|
||||
expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons]
|
||||
expectedName = "Example"
|
||||
expectedArtifacts = {
|
||||
@@ -429,10 +419,7 @@ jobs:
|
||||
print(f"Number of 'python_jobs' does not match: {len(actualPythonJobs)} != {len(expectedJobs)}.")
|
||||
print("Actual jobs:")
|
||||
for job in actualPythonJobs:
|
||||
if job['system'] == "msys2":
|
||||
print(f" {job['runtime'].lower()}:{job['python']}")
|
||||
else:
|
||||
print(f" {job['system']}:{job['python']}")
|
||||
print(f" {job['system']}:{job['python']}")
|
||||
print("Expected jobs:")
|
||||
for job in expectedJobs:
|
||||
print(f" {job}")
|
||||
@@ -458,10 +445,10 @@ jobs:
|
||||
|
||||
from pyTooling.Common import zipdicts
|
||||
|
||||
expectedPythonVersion = "3.13"
|
||||
expectedPythons = ["3.12", "3.13"]
|
||||
expectedSystems = ["ubuntu", "macos-arm", "windows"]
|
||||
expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons] + ["windows:3.10", "windows:3.11", "windows:3.13"]
|
||||
expectedPythonVersion = "3.12"
|
||||
expectedPythons = ["3.10", "3.11"]
|
||||
expectedSystems = ["ubuntu", "windows"]
|
||||
expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons] + ["windows:3.8", "windows:3.9", "windows:3.12"]
|
||||
expectedName = "Example"
|
||||
expectedArtifacts = {
|
||||
"unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML",
|
||||
@@ -492,10 +479,7 @@ jobs:
|
||||
print(f"Number of 'python_jobs' does not match: {len(actualPythonJobs)} != {len(expectedJobs)}.")
|
||||
print("Actual jobs:")
|
||||
for job in actualPythonJobs:
|
||||
if job['system'] == "msys2":
|
||||
print(f" {job['runtime'].lower()}:{job['python']}")
|
||||
else:
|
||||
print(f" {job['system']}:{job['python']}")
|
||||
print(f" {job['system']}:{job['python']}")
|
||||
print("Expected jobs:")
|
||||
for job in expectedJobs:
|
||||
print(f" {job}")
|
||||
|
||||
@@ -1,31 +1,26 @@
|
||||
name: Verification of Job Templates
|
||||
name: Verification of Complete Pipeline
|
||||
|
||||
on:
|
||||
push:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
ConfigParams:
|
||||
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@main
|
||||
with:
|
||||
package_name: pyDummy
|
||||
|
||||
UnitTestingParams:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r1
|
||||
with:
|
||||
name: pyDummy
|
||||
python_version_list: "3.9 3.10 3.11 3.12 3.13 pypy-3.9 pypy-3.10"
|
||||
# disable_list: "windows:pypy-3.10"
|
||||
python_version_list: "3.8 3.9 3.10 3.11 3.12 pypy-3.8 pypy-3.9 pypy-3.10"
|
||||
disable_list: "windows:pypy-3.10"
|
||||
|
||||
PlatformTestingParams:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r1
|
||||
with:
|
||||
name: Platform
|
||||
python_version_list: ""
|
||||
system_list: "ubuntu windows macos mingw32 mingw64 clang64 ucrt64"
|
||||
|
||||
UnitTesting:
|
||||
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@main
|
||||
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r1
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
with:
|
||||
@@ -38,7 +33,7 @@ jobs:
|
||||
# coverage_html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||
|
||||
PlatformTesting:
|
||||
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@main
|
||||
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r1
|
||||
needs:
|
||||
- PlatformTestingParams
|
||||
with:
|
||||
@@ -53,7 +48,7 @@ jobs:
|
||||
coverage_html_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||
|
||||
# Coverage:
|
||||
# uses: pyTooling/Actions/.github/workflows/CoverageCollection.yml@main
|
||||
# uses: pyTooling/Actions/.github/workflows/CoverageCollection.yml@r1
|
||||
# needs:
|
||||
# - UnitTestingParams
|
||||
# with:
|
||||
@@ -63,41 +58,18 @@ jobs:
|
||||
# codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||
|
||||
StaticTypeCheck:
|
||||
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@main
|
||||
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r1
|
||||
needs:
|
||||
- ConfigParams
|
||||
- UnitTestingParams
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
commands: |
|
||||
${{ needs.ConfigParams.outputs.mypy_prepare_command }}
|
||||
mypy --html-report htmlmypy -p ${{ needs.ConfigParams.outputs.package_fullname }}
|
||||
mypy --html-report htmlmypy -p pyDummy
|
||||
html_report: 'htmlmypy'
|
||||
html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
|
||||
|
||||
DocCoverage:
|
||||
uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@r1
|
||||
needs:
|
||||
- ConfigParams
|
||||
- UnitTestingParams
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
directory: ${{ needs.ConfigParams.outputs.package_directors }}
|
||||
# fail_below: 70
|
||||
|
||||
Package:
|
||||
uses: pyTooling/Actions/.github/workflows/Package.yml@main
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- UnitTesting
|
||||
# - Coverage
|
||||
- PlatformTesting
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
||||
|
||||
PublishCoverageResults:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@main
|
||||
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@r1
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- UnitTesting
|
||||
@@ -112,63 +84,44 @@ jobs:
|
||||
codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||
|
||||
PublishTestResults:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@main
|
||||
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@dev
|
||||
needs:
|
||||
- UnitTesting
|
||||
- PlatformTesting
|
||||
with:
|
||||
additional_merge_args: '-d "--pytest=rewrite-dunder-init;reduce-depth:pytest.tests.unit;reduce-depth:pytest.tests.platform"'
|
||||
|
||||
Package:
|
||||
uses: pyTooling/Actions/.github/workflows/Package.yml@r1
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- UnitTesting
|
||||
# - Coverage
|
||||
- PlatformTesting
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
||||
|
||||
# VerifyDocs:
|
||||
# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@main
|
||||
# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@r1
|
||||
# needs:
|
||||
# - UnitTestingParams
|
||||
# with:
|
||||
# python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
|
||||
Documentation:
|
||||
uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@main
|
||||
BuildTheDocs:
|
||||
uses: pyTooling/Actions/.github/workflows/BuildTheDocs.yml@r1
|
||||
needs:
|
||||
- ConfigParams
|
||||
- UnitTestingParams
|
||||
- PublishTestResults
|
||||
- PublishCoverageResults
|
||||
# - VerifyDocs
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
coverage_report_json_directory: ${{ needs.ConfigParams.outputs.coverage_report_json_directory }}
|
||||
# unittest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||
# coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}
|
||||
html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_html }}
|
||||
latex_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }}
|
||||
|
||||
IntermediateCleanUp:
|
||||
uses: pyTooling/Actions/.github/workflows/IntermediateCleanUp.yml@r1
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- PublishCoverageResults
|
||||
- PublishTestResults
|
||||
- Documentation
|
||||
with:
|
||||
sqlite_coverage_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}-
|
||||
xml_unittest_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}-
|
||||
|
||||
PDFDocumentation:
|
||||
uses: pyTooling/Actions/.github/workflows/LaTeXDocumentation.yml@main
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- Documentation
|
||||
with:
|
||||
document: actions
|
||||
latex_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }}
|
||||
pdf_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_pdf }}
|
||||
artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_html }}
|
||||
|
||||
PublishToGitHubPages:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@main
|
||||
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r1
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- Documentation
|
||||
# - PDFDocumentation
|
||||
- BuildTheDocs
|
||||
# - Coverage
|
||||
- PublishCoverageResults
|
||||
- StaticTypeCheck
|
||||
@@ -178,7 +131,7 @@ jobs:
|
||||
typing: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
|
||||
|
||||
ReleasePage:
|
||||
uses: pyTooling/Actions/.github/workflows/Release.yml@main
|
||||
uses: pyTooling/Actions/.github/workflows/Release.yml@r1
|
||||
if: startsWith(github.ref, 'refs/tags')
|
||||
needs:
|
||||
- UnitTesting
|
||||
@@ -189,7 +142,7 @@ jobs:
|
||||
- PublishToGitHubPages
|
||||
|
||||
PublishOnPyPI:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@main
|
||||
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@r1
|
||||
if: startsWith(github.ref, 'refs/tags')
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
@@ -203,20 +156,18 @@ jobs:
|
||||
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||
|
||||
ArtifactCleanUp:
|
||||
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@main
|
||||
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r1
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- PlatformTestingParams
|
||||
- UnitTesting
|
||||
- PlatformTesting
|
||||
# - Coverage
|
||||
- StaticTypeCheck
|
||||
- PlatformTesting
|
||||
- Documentation
|
||||
# - PDFDocumentation
|
||||
- PublishTestResults
|
||||
- PublishCoverageResults
|
||||
# - BuildTheDocs
|
||||
- PublishToGitHubPages
|
||||
- IntermediateCleanUp
|
||||
- PublishCoverageResults
|
||||
- PublishTestResults
|
||||
with:
|
||||
package: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
||||
remaining: |
|
||||
@@ -1,14 +0,0 @@
|
||||
name: Verification of Pipeline Templates (Simple Package)
|
||||
|
||||
on:
|
||||
push:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
SimplePackage:
|
||||
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@main
|
||||
with:
|
||||
package_name: pyDummy
|
||||
secrets:
|
||||
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||
CODACY_PROJECT_TOKEN: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -31,7 +31,7 @@ doc/pyDummy/**/*.*
|
||||
# BuildTheDocs
|
||||
doc/_theme/**/*.*
|
||||
|
||||
# PyCharm project files
|
||||
# IntelliJ project files
|
||||
/.idea/workspace.xml
|
||||
|
||||
# Git files
|
||||
|
||||
2
.idea/Actions.iml
generated
2
.idea/Actions.iml
generated
@@ -2,7 +2,7 @@
|
||||
<module type="PYTHON_MODULE" version="4">
|
||||
<component name="NewModuleRootManager">
|
||||
<content url="file://$MODULE_DIR$" />
|
||||
<orderEntry type="jdk" jdkName="Python 3.13" jdkType="Python SDK" />
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
</component>
|
||||
</module>
|
||||
2
dist/requirements.txt
vendored
2
dist/requirements.txt
vendored
@@ -1,2 +1,2 @@
|
||||
wheel ~= 0.45
|
||||
wheel ~= 0.44
|
||||
twine ~= 5.1
|
||||
|
||||
@@ -44,7 +44,7 @@ Complex Example
|
||||
|
||||
The following instantiation example creates 3 jobs from the same template, but with differing input parameters. The
|
||||
first job `UnitTestingParams` might be used to create a job matrix of unit tests. It creates the cross of default
|
||||
systems (Windows, Ubuntu, macOS, MinGW64, UCRT64) and the given list of Python versions including some mypy versions. In
|
||||
systems (Windows, Ubuntu, MacOS, MinGW64, UCRT64) and the given list of Python versions including some mypy versions. In
|
||||
addition a list of excludes (marked as :deletion:`deletions`) and includes (marked as :addition:`additions`) is handed
|
||||
over resulting in the following combinations:
|
||||
|
||||
@@ -55,7 +55,7 @@ over resulting in the following combinations:
|
||||
+------------+-------------+-------------+--------------+--------------+-------------------------+------------+-------------+------------------------------+-------------------------------+
|
||||
| Ubuntu 🐧 | ubuntu:3.8 | ubuntu:3.9 | ubuntu:3.10 | ubuntu:3.11 | :addition:`ubuntu:3.12` | | | ubuntu:pypy-3.9 | ubuntu:pypy-3.10 |
|
||||
+------------+-------------+-------------+--------------+--------------+-------------------------+------------+-------------+------------------------------+-------------------------------+
|
||||
| macOS 🍎 | macos:3.8 | macos:3.9 | macos:3.10 | macos:3.11 | :addition:`macos:3.12` | | | macos:pypy-3.9 | macos:pypy-3.10 |
|
||||
| MacOS 🍎 | macos:3.8 | macos:3.9 | macos:3.10 | macos:3.11 | :addition:`macos:3.12` | | | macos:pypy-3.9 | macos:pypy-3.10 |
|
||||
+------------+-------------+-------------+--------------+--------------+-------------------------+------------+-------------+------------------------------+-------------------------------+
|
||||
| MSYS 🟪 | | | | | | | | | |
|
||||
+------------+-------------+-------------+--------------+--------------+-------------------------+------------+-------------+------------------------------+-------------------------------+
|
||||
@@ -138,7 +138,7 @@ python_version_list
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+======================+==========+==========+============================+
|
||||
| python_version_list | optional | string | ``3.8 3.9 3.10 3.11 3.12`` |
|
||||
+----------------------+----------+----------+----------------------------+
|
||||
+----------------------+----------+----------+-------------------------- -+
|
||||
|
||||
Space separated list of CPython versions and/or mypy version to run tests with.
|
||||
|
||||
@@ -197,7 +197,7 @@ Space separated list of systems to run tests on.
|
||||
+------+-----------+------------------------------+-----------------------------------------------------------------+
|
||||
| 🐧 | Ubuntu | Ubuntu 22.04 (LTS) (latest) | |
|
||||
+------+-----------+------------------------------+-----------------------------------------------------------------+
|
||||
| 🍎 | macOS | macOS Monterey 12 (latest) | While this marked latest, macOS Ventura 13 is already provided. |
|
||||
| 🍎 | MacOS | macOS Monterey 12 (latest) | While this marked latest, macOS Ventura 13 is already provided. |
|
||||
+------+-----------+------------------------------+-----------------------------------------------------------------+
|
||||
| 🟪 | MSYS | | |
|
||||
+------+-----------+------------------------------+-----------------------------------------------------------------+
|
||||
|
||||
115
doc/_static/css/override.css
vendored
115
doc/_static/css/override.css
vendored
@@ -1,115 +0,0 @@
|
||||
/* theme overrides */
|
||||
.rst-content h1,
|
||||
.rst-content h2 {
|
||||
margin-top: 24px;
|
||||
margin-bottom: 6px;
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.rst-content h3,
|
||||
.rst-content h4,
|
||||
.rst-content h5,
|
||||
.rst-content h6 {
|
||||
margin-top: 12px;
|
||||
margin-bottom: 6px;
|
||||
}
|
||||
|
||||
.rst-content p {
|
||||
margin-bottom: 6px
|
||||
}
|
||||
|
||||
/* general overrides */
|
||||
html {
|
||||
font-size: 15px;
|
||||
}
|
||||
|
||||
footer {
|
||||
font-size: 95%;
|
||||
text-align: center
|
||||
}
|
||||
|
||||
footer p {
|
||||
margin-bottom: 0px /* 12px */;
|
||||
font-size: 95%
|
||||
}
|
||||
|
||||
section > p,
|
||||
.section p,
|
||||
.simple li {
|
||||
text-align: justify
|
||||
}
|
||||
|
||||
.rst-content .topic-title {
|
||||
font-size: larger;
|
||||
font-weight: 700;
|
||||
margin-top: 18px;
|
||||
margin-bottom: 6px;
|
||||
}
|
||||
|
||||
.rst-content p.rubric {
|
||||
text-decoration: underline;
|
||||
font-weight: 700;
|
||||
margin-top: 18px;
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
|
||||
/* wyrm overrides */
|
||||
.wy-menu-vertical header,
|
||||
.wy-menu-vertical p.caption {
|
||||
color: #9b9b9b /* #55a5d9 */;
|
||||
padding: 0 0.809em /* 0 1.618em */;
|
||||
margin: 6px 0 0 0 /* 12px 0 0 */;
|
||||
border-top: 1px solid #9b9b9b;
|
||||
}
|
||||
|
||||
.wy-side-nav-search {
|
||||
margin-bottom: 0 /* .809em */;
|
||||
background-color: #333333 /* #2980b9 */;
|
||||
/* BTD: */
|
||||
/*color: #fcfcfc*/
|
||||
}
|
||||
|
||||
.wy-side-nav-search input[type=text] {
|
||||
border-radius: 0px /* 50px */;
|
||||
}
|
||||
|
||||
.wy-side-nav-search .wy-dropdown > a, .wy-side-nav-search > a {
|
||||
/* BTD: */
|
||||
/*color: #fcfcfc;*/
|
||||
margin-bottom: 0.404em /* .809em */;
|
||||
}
|
||||
|
||||
.wy-side-nav-search > div.version {
|
||||
margin: 0 0 6px 0;
|
||||
/* BTD: */
|
||||
/*margin-top: -.4045em;*/
|
||||
}
|
||||
|
||||
.wy-nav .wy-menu-vertical a:hover {
|
||||
background-color: #333333 /* #2980b9 */;
|
||||
}
|
||||
|
||||
.wy-nav-content {
|
||||
max-width: 1600px /* 800px */ ;
|
||||
}
|
||||
|
||||
.wy-nav-top {
|
||||
background: #333333 /* #2980b9 */;
|
||||
}
|
||||
|
||||
/* Sphinx Design */
|
||||
.sd-tab-set {
|
||||
margin: 0
|
||||
}
|
||||
|
||||
.sd-tab-set > label {
|
||||
padding-top: .5em;
|
||||
padding-right: 1em;
|
||||
padding-bottom: .5em;
|
||||
padding-left: 1em
|
||||
}
|
||||
|
||||
.sd-container-fluid {
|
||||
padding-left: 0;
|
||||
padding-right: 0;
|
||||
}
|
||||
113
doc/conf.py
113
doc/conf.py
@@ -14,7 +14,7 @@ ROOT = Path(__file__).resolve().parent
|
||||
sys_path.insert(0, abspath("."))
|
||||
sys_path.insert(0, abspath(".."))
|
||||
sys_path.insert(0, abspath("../pyDummy"))
|
||||
# sys_path.insert(0, abspath("_extensions"))
|
||||
sys_path.insert(0, abspath("_extensions"))
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
@@ -23,11 +23,9 @@ sys_path.insert(0, abspath("../pyDummy"))
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
githubNamespace = "pyTooling"
|
||||
githubProject = "Actions"
|
||||
project = "pyDummy"
|
||||
project = "Actions"
|
||||
|
||||
packageInformationFile = Path(f"../{project}/__init__.py")
|
||||
packageInformationFile = Path(f"../pyDummy/__init__.py")
|
||||
versionInformation = extractVersionInformation(packageInformationFile)
|
||||
|
||||
author = versionInformation.Author
|
||||
@@ -75,15 +73,30 @@ except Exception as ex:
|
||||
# ==============================================================================
|
||||
# Options for HTML output
|
||||
# ==============================================================================
|
||||
html_theme = "sphinx_rtd_theme"
|
||||
html_theme_options = {
|
||||
"logo_only": True,
|
||||
"vcs_pageview_mode": 'blob',
|
||||
"navigation_depth": 5,
|
||||
}
|
||||
html_css_files = [
|
||||
'css/override.css',
|
||||
]
|
||||
html_context = {}
|
||||
ctx = ROOT / "context.json"
|
||||
if ctx.is_file():
|
||||
html_context.update(loads(ctx.open('r').read()))
|
||||
|
||||
if (ROOT / "_theme").is_dir():
|
||||
html_theme_path = ["."]
|
||||
html_theme = "_theme"
|
||||
html_theme_options = {
|
||||
"logo_only": True,
|
||||
"home_breadcrumbs": False,
|
||||
"vcs_pageview_mode": 'blob',
|
||||
# "body_max_width": None
|
||||
# "navigation_depth": 5,
|
||||
}
|
||||
elif find_spec("sphinx_rtd_theme") is not None:
|
||||
html_theme = "sphinx_rtd_theme"
|
||||
html_theme_options = {
|
||||
"logo_only": True,
|
||||
"vcs_pageview_mode": 'blob',
|
||||
# "navigation_depth": 5,
|
||||
}
|
||||
else:
|
||||
html_theme = "alabaster"
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
@@ -94,7 +107,7 @@ html_logo = str(Path(html_static_path[0]) / "logo.png")
|
||||
html_favicon = str(Path(html_static_path[0]) / "icon.png")
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = f"{githubProject}Doc"
|
||||
htmlhelp_basename = "ActionsDoc"
|
||||
|
||||
# If not None, a 'Last updated on:' timestamp is inserted at every page
|
||||
# bottom, using the given strftime format.
|
||||
@@ -147,10 +160,10 @@ latex_elements = {
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
( master_doc,
|
||||
f"{githubProject}.tex",
|
||||
f"The {githubProject} Documentation",
|
||||
f"Patrick Lehmann",
|
||||
f"manual"
|
||||
"Actions.tex",
|
||||
"The pyTooling Actions Documentation",
|
||||
"Patrick Lehmann",
|
||||
"manual"
|
||||
),
|
||||
]
|
||||
|
||||
@@ -161,6 +174,7 @@ latex_documents = [
|
||||
extensions = [
|
||||
# Standard Sphinx extensions
|
||||
"sphinx.ext.autodoc",
|
||||
"sphinx.ext.coverage",
|
||||
"sphinx.ext.extlinks",
|
||||
"sphinx.ext.intersphinx",
|
||||
"sphinx.ext.inheritance_diagram",
|
||||
@@ -172,12 +186,10 @@ extensions = [
|
||||
# SphinxContrib extensions
|
||||
"sphinxcontrib.mermaid",
|
||||
# Other extensions
|
||||
"sphinx_design",
|
||||
"sphinx_copybutton",
|
||||
"sphinx_fontawesome",
|
||||
"sphinx_autodoc_typehints",
|
||||
"sphinx_inline_tabs",
|
||||
"autoapi.sphinx",
|
||||
"sphinx_reports",
|
||||
# User defined extensions
|
||||
]
|
||||
|
||||
|
||||
@@ -209,11 +221,11 @@ autodoc_typehints = "both"
|
||||
# Sphinx.Ext.ExtLinks
|
||||
# ==============================================================================
|
||||
extlinks = {
|
||||
"gh": (f"https://GitHub.com/%s", "gh:%s"),
|
||||
"ghissue": (f"https://GitHub.com/{githubNamespace}/{githubProject}/issues/%s", "issue #%s"),
|
||||
"ghpull": (f"https://GitHub.com/{githubNamespace}/{githubProject}/pull/%s", "pull request #%s"),
|
||||
"ghsrc": (f"https://GitHub.com/{githubNamespace}/{githubProject}/blob/main/%s", None),
|
||||
"wiki": (f"https://en.wikipedia.org/wiki/%s", None),
|
||||
"gh": ("https://GitHub.com/%s", "gh:%s"),
|
||||
"ghissue": ("https://GitHub.com/pyTooling/Actions/issues/%s", "issue #%s"),
|
||||
"ghpull": ("https://GitHub.com/pyTooling/Actions/pull/%s", "pull request #%s"),
|
||||
"ghsrc": ("https://GitHub.com/pyTooling/Actions/blob/main/%s", None),
|
||||
"wiki": ("https://en.wikipedia.org/wiki/%s", None),
|
||||
}
|
||||
|
||||
|
||||
@@ -253,53 +265,18 @@ todo_link_only = True
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
# sphinx-reports
|
||||
# Sphinx.Ext.Coverage
|
||||
# ==============================================================================
|
||||
# report_unittest_testsuites = {
|
||||
# "src": {
|
||||
# "name": f"{project}",
|
||||
# "xml_report": "../report/unit/unittest.xml",
|
||||
# }
|
||||
# }
|
||||
# report_codecov_packages = {
|
||||
# "src": {
|
||||
# "name": f"{project}",
|
||||
# "json_report": "../report/coverage/coverage.json",
|
||||
# "fail_below": 80,
|
||||
# "levels": "default"
|
||||
# }
|
||||
# }
|
||||
# report_doccov_packages = {
|
||||
# "src": {
|
||||
# "name": f"{project}",
|
||||
# "directory": f"../{project}",
|
||||
# "fail_below": 80,
|
||||
# "levels": "default"
|
||||
# }
|
||||
# }
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
# Sphinx_Design
|
||||
# ==============================================================================
|
||||
# sd_fontawesome_latex = True
|
||||
coverage_show_missing_items = True
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
# AutoAPI.Sphinx
|
||||
# ==============================================================================
|
||||
autoapi_modules = {
|
||||
f"{project}": {
|
||||
"template": "package",
|
||||
"output": project,
|
||||
"pyDummy": {
|
||||
"template": "module",
|
||||
"output": "pyDummy",
|
||||
"override": True
|
||||
}
|
||||
}
|
||||
|
||||
for directory in [mod for mod in Path(f"../{project}").iterdir() if mod.is_dir() and mod.name != "__pycache__"]:
|
||||
print(f"Adding module rule for '{project}.{directory.name}'")
|
||||
autoapi_modules[f"{project}.{directory.name}"] = {
|
||||
"template": "module",
|
||||
"output": project,
|
||||
"override": True
|
||||
}
|
||||
|
||||
@@ -3,5 +3,5 @@ Code Coverage Report
|
||||
|
||||
Code coverage report generated with `pytest <https://github.com/pytest-dev/pytest>`__ and `Coverage.py <https://github.com/nedbat/coveragepy/tree/master>`__.
|
||||
|
||||
.. #report:code-coverage::
|
||||
.. report:code-coverage::
|
||||
:packageid: src
|
||||
|
||||
@@ -1,19 +1,20 @@
|
||||
-r ../requirements.txt
|
||||
|
||||
pyTooling ~= 8.0
|
||||
pyTooling ~= 6.6
|
||||
|
||||
# Enforce latest version on ReadTheDocs
|
||||
sphinx ~= 8.1
|
||||
docutils ~= 0.21
|
||||
docutils_stubs ~= 0.0.22
|
||||
|
||||
# ReadTheDocs Theme
|
||||
sphinx_rtd_theme ~= 3.0
|
||||
sphinx ~= 7.4
|
||||
docutils ~= 0.20
|
||||
|
||||
# Sphinx Extenstions
|
||||
#sphinx.ext.coverage
|
||||
#sphinxcontrib-actdiag>=0.8.5
|
||||
sphinxcontrib-mermaid>=0.9.2
|
||||
#sphinxcontrib-seqdiag>=0.8.5
|
||||
#sphinxcontrib-textstyle>=0.2.1
|
||||
#sphinxcontrib-spelling>=2.2.0
|
||||
autoapi >= 2.0.1
|
||||
sphinx_design ~= 0.6.1
|
||||
sphinx-copybutton >= 0.5.2
|
||||
sphinx_autodoc_typehints ~= 2.5
|
||||
sphinx_reports ~= 0.7
|
||||
sphinx_fontawesome >= 0.0.6
|
||||
sphinx-inline-tabs >= 2023.4.21
|
||||
sphinx_autodoc_typehints ~= 2.3
|
||||
# changelog>=0.3.5
|
||||
|
||||
@@ -7,11 +7,11 @@
|
||||
.. |SHIELD:svg:pyTooling-github| image:: https://img.shields.io/badge/pyTooling-Actions-63bf7f.svg?longCache=true&style=flat-square&longCache=true&logo=GitHub
|
||||
:alt: Sourcecode on GitHub
|
||||
:height: 22
|
||||
:target: https://GitHub.com/pyTooling/Actions
|
||||
:target: https://GitHub.com/pyTooling/pyTooling
|
||||
.. |SHIELD:png:pyTooling-github| image:: https://raster.shields.io/badge/pyTooling-Actions-63bf7f.svg?longCache=true&style=flat-square&longCache=true&logo=GitHub
|
||||
:alt: Sourcecode on GitHub
|
||||
:height: 22
|
||||
:target: https://GitHub.com/pyTooling/Actions
|
||||
:target: https://GitHub.com/pyTooling/pyTooling
|
||||
|
||||
.. # Sourcecode license
|
||||
.. |SHIELD:svg:pyTooling-src-license| image:: https://img.shields.io/pypi/l/pyTooling?longCache=true&style=flat-square&logo=Apache&label=code
|
||||
|
||||
@@ -3,5 +3,5 @@ Unittest Summary Report
|
||||
|
||||
Unittest report generated with `pytest <https://github.com/pytest-dev/pytest>`__.
|
||||
|
||||
.. #report:unittest-summary::
|
||||
.. report:unittest-summary::
|
||||
:reportid: src
|
||||
|
||||
@@ -1,101 +0,0 @@
|
||||
# ==================================================================================================================== #
|
||||
# _____ _ _ _ _ _ #
|
||||
# _ __ _ |_ _|__ ___ | (_)_ __ __ _ / \ ___| |_(_) ___ _ __ ___ #
|
||||
# | '_ \| | | || |/ _ \ / _ \| | | '_ \ / _` | / _ \ / __| __| |/ _ \| '_ \/ __| #
|
||||
# | |_) | |_| || | (_) | (_) | | | | | | (_| |_ / ___ \ (__| |_| | (_) | | | \__ \ #
|
||||
# | .__/ \__, ||_|\___/ \___/|_|_|_| |_|\__, (_)_/ \_\___|\__|_|\___/|_| |_|___/ #
|
||||
# |_| |___/ |___/ #
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# #
|
||||
# License: #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2017-2024 Patrick Lehmann - Bötzingen, Germany #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
#
|
||||
"""
|
||||
A module for a set of dummy classes.
|
||||
"""
|
||||
|
||||
__author__ = "Patrick Lehmann"
|
||||
__email__ = "Paebbels@gmail.com"
|
||||
__copyright__ = "2017-2024, Patrick Lehmann"
|
||||
__license__ = "Apache License, Version 2.0"
|
||||
__version__ = "0.14.8"
|
||||
__keywords__ = ["GitHub Actions"]
|
||||
__issue_tracker__ = "https://GitHub.com/pyTooling/Actions/issues"
|
||||
|
||||
from pyTooling.Decorators import export, readonly
|
||||
from pyTooling.Platform import Platform
|
||||
|
||||
|
||||
@export
|
||||
class Base:
|
||||
"""
|
||||
A base-class for dummy applications.
|
||||
"""
|
||||
|
||||
_value: int #: An internal value.
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""
|
||||
Initializes the base-class.
|
||||
"""
|
||||
self._value = 0
|
||||
|
||||
@readonly
|
||||
def Value(self) -> int:
|
||||
"""
|
||||
Read-only property to return the internal value.
|
||||
|
||||
:return: Internal value.
|
||||
"""
|
||||
return self._value
|
||||
|
||||
|
||||
@export
|
||||
class Application(Base):
|
||||
"""
|
||||
A dummy application for demonstration purposes.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""
|
||||
Initializes the dummy application.
|
||||
"""
|
||||
super().__init__()
|
||||
|
||||
platform = Platform()
|
||||
if platform.IsNativeLinux:
|
||||
self._value += 1
|
||||
elif platform.IsNativeMacOS:
|
||||
self._value += 2
|
||||
elif platform.IsNativeWindows:
|
||||
self._value += 3
|
||||
elif platform.IsMSYSOnWindows:
|
||||
self._value += 11
|
||||
elif platform.IsMinGW32OnWindows:
|
||||
self._value += 12
|
||||
elif platform.IsMinGW64OnWindows:
|
||||
self._value += 13
|
||||
elif platform.IsUCRT64OnWindows:
|
||||
self._value += 14
|
||||
elif platform.IsClang32OnWindows:
|
||||
self._value += 15
|
||||
elif platform.IsClang64OnWindows:
|
||||
self._value += 16
|
||||
@@ -1,8 +1,8 @@
|
||||
[build-system]
|
||||
requires = [
|
||||
"setuptools ~= 75.5",
|
||||
"wheel ~= 0.45",
|
||||
"pyTooling ~= 8.0"
|
||||
"setuptools ~= 75.1",
|
||||
"wheel ~= 0.44",
|
||||
"pyTooling ~= 6.6"
|
||||
]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
@@ -21,7 +21,6 @@ namespace_packages = true
|
||||
html_report = "report/typing"
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
addopts = "--tb=native"
|
||||
# Don't set 'python_classes = *' otherwise, pytest doesn't search for classes
|
||||
# derived from unittest.Testcase
|
||||
python_files = "*"
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM python:3.12-slim-bookworm
|
||||
FROM python:3.9-slim-bullseye
|
||||
COPY releaser.py /releaser.py
|
||||
RUN pip install PyGithub --progress-bar off \
|
||||
&& apt update -qq \
|
||||
|
||||
@@ -1 +1 @@
|
||||
pyTooling ~= 8.0
|
||||
pyTooling ~= 6.6
|
||||
|
||||
316
run.ps1
316
run.ps1
@@ -1,316 +0,0 @@
|
||||
[CmdletBinding()]
|
||||
Param(
|
||||
# Clean up all files and directories
|
||||
[switch]$clean,
|
||||
|
||||
# Commands
|
||||
[switch]$all,
|
||||
[switch]$copyall,
|
||||
|
||||
[switch]$doc,
|
||||
[switch]$livedoc,
|
||||
[switch]$doccov,
|
||||
|
||||
[switch]$unit,
|
||||
[switch]$liveunit,
|
||||
[switch]$copyunit,
|
||||
|
||||
[switch]$cov,
|
||||
[switch]$livecov,
|
||||
[switch]$copycov,
|
||||
|
||||
[switch]$type,
|
||||
[switch]$livetype,
|
||||
[switch]$copytype,
|
||||
|
||||
[switch]$nooutput,
|
||||
|
||||
[switch]$build,
|
||||
[switch]$install,
|
||||
|
||||
# Display this help"
|
||||
[switch]$help
|
||||
)
|
||||
|
||||
$PackageName = "Actions"
|
||||
|
||||
# set default values
|
||||
$EnableDebug = [bool]$PSCmdlet.MyInvocation.BoundParameters["Debug"]
|
||||
$EnableVerbose = [bool]$PSCmdlet.MyInvocation.BoundParameters["Verbose"] -or $EnableDebug
|
||||
|
||||
# Display help if no command was selected
|
||||
$help = $help -or ( -not(
|
||||
$all -or $copyall -or
|
||||
$clean -or
|
||||
$doc -or $livedoc -or $doccov -or
|
||||
$unit -or $liveunit -or $copyunit -or
|
||||
$cov -or $livecov -or $copycov -or
|
||||
$type -or $livetype -or $copytype -or
|
||||
$build -or $install
|
||||
)
|
||||
)
|
||||
|
||||
Write-Host "================================================================================" -ForegroundColor Magenta
|
||||
Write-Host "$PackageName Documentation Compilation and Assembly Tool" -ForegroundColor Magenta
|
||||
Write-Host "================================================================================" -ForegroundColor Magenta
|
||||
|
||||
if ($help)
|
||||
{ Get-Help $MYINVOCATION.MyCommand.Path -Detailed
|
||||
exit 0
|
||||
}
|
||||
|
||||
if ($all)
|
||||
{ $doc = $true
|
||||
$unit = $true
|
||||
# $copyunit = $true
|
||||
$cov = $true
|
||||
# $copycov = $true
|
||||
$type = $true
|
||||
$copytype = $true
|
||||
}
|
||||
if ($copyall)
|
||||
{# $copyunit = $true
|
||||
# $copycov = $true
|
||||
$copytype = $true
|
||||
}
|
||||
|
||||
if ($clean)
|
||||
{ Write-Host -ForegroundColor DarkYellow "[live][DOC] Cleaning documentation directories ..."
|
||||
rm -Force .\doc\$PackageName\*
|
||||
.\doc\make.bat clean
|
||||
Write-Host -ForegroundColor DarkYellow "[live][BUILD] Cleaning build directories ..."
|
||||
rm -Force .\build\bdist.win-amd64
|
||||
rm -Force .\build\lib
|
||||
}
|
||||
|
||||
if ($build)
|
||||
{ Write-Host -ForegroundColor Yellow "[live][BUILD] Cleaning build directories ..."
|
||||
rm -Force .\build\bdist.win-amd64
|
||||
rm -Force .\build\lib
|
||||
Write-Host -ForegroundColor Yellow "[live][BUILD] Building $PackageName package as wheel ..."
|
||||
py -3.12 -m build --wheel
|
||||
|
||||
Write-Host -ForegroundColor Yellow "[live][BUILD] Building wheel finished"
|
||||
}
|
||||
if ($install)
|
||||
{ if (!([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator"))
|
||||
{ Write-Host -ForegroundColor Yellow "[live][INSTALL] Installing $PackageName with administrator rights ..."
|
||||
$proc = Start-Process pwsh.exe "-NoProfile -ExecutionPolicy Bypass -WorkingDirectory `"$PSScriptRoot`" -File `"$PSCommandPath`" `"-install`"" -Verb RunAs -Wait
|
||||
|
||||
# Write-Host -ForegroundColor Yellow "[live][INSTALL] Wait on administrator console ..."
|
||||
# Wait-Process -Id $proc.Id
|
||||
}
|
||||
else
|
||||
{ Write-Host -ForegroundColor Cyan "[ADMIN][UNINSTALL] Uninstalling $PackageName ..."
|
||||
py -3.12 -m pip uninstall -y $PackageName
|
||||
Write-Host -ForegroundColor Cyan "[ADMIN][INSTALL] Installing $PackageName from wheel ..."
|
||||
py -3.12 -m pip install .\dist\$PackageName-6.7.0-py3-none-any.whl
|
||||
|
||||
Write-Host -ForegroundColor Cyan "[ADMIN][INSTALL] Closing window in 5 seconds ..."
|
||||
Start-Sleep -Seconds 5
|
||||
}
|
||||
}
|
||||
|
||||
$jobs = @()
|
||||
|
||||
if ($livedoc)
|
||||
{ Write-Host -ForegroundColor DarkYellow "[live][DOC] Building documentation using Sphinx ..."
|
||||
|
||||
.\doc\make.bat html --verbose
|
||||
|
||||
Write-Host -ForegroundColor DarkYellow "[live][DOC] Documentation finished"
|
||||
}
|
||||
elseif ($doc)
|
||||
{ Write-Host -ForegroundColor DarkYellow "[Job1][DOC] Building documentation using Sphinx ..."
|
||||
Write-Host -ForegroundColor DarkGreen "[SCRIPT] Starting Documentation job ..."
|
||||
|
||||
# Compile documentation
|
||||
$compileDocFunc = {
|
||||
.\doc\make.bat html --verbose
|
||||
}
|
||||
$docJob = Start-Job -Name "Documentation" -ScriptBlock $compileDocFunc
|
||||
# $jobs += $docJob
|
||||
}
|
||||
|
||||
|
||||
if ($doccov)
|
||||
{
|
||||
.\doc\make.bat coverage
|
||||
}
|
||||
|
||||
if ($liveunit)
|
||||
{ Write-Host -ForegroundColor DarkYellow "[live][UNIT] Running Unit Tests using pytest ..."
|
||||
|
||||
$env:ENVIRONMENT_NAME = "Windows (x86-64)"
|
||||
pytest -raP --color=yes --junitxml=report/unit/unittest.xml --template=html1/index.html --report=report/unit/html/index.html --split-report tests/unit
|
||||
|
||||
if ($copyunit)
|
||||
{ cp -Recurse -Force .\report\unit\html\* .\doc\_build\html\unittests
|
||||
Write-Host -ForegroundColor DarkBlue "[live][UNIT] Copyed unit testing report to 'unittests' directory in HTML directory"
|
||||
}
|
||||
|
||||
Write-Host -ForegroundColor DarkYellow "[live][UNIT] Unit Tests finished"
|
||||
}
|
||||
elseif ($unit)
|
||||
{ Write-Host -ForegroundColor DarkYellow "[Job2][UNIT] Running Unit Tests using pytest ..."
|
||||
Write-Host -ForegroundColor DarkGreen "[SCRIPT] Starting UnitTests jobs ..."
|
||||
|
||||
# Run unit tests
|
||||
$runUnitFunc = {
|
||||
$env:ENVIRONMENT_NAME = "Windows (x86-64)"
|
||||
pytest -raP --color=yes --junitxml=report/unit/unittest.xml --template=html1/index.html --report=report/unit/html/index.html --split-report tests/unit
|
||||
}
|
||||
$unitJob = Start-Job -Name "UnitTests" -ScriptBlock $runUnitFunc
|
||||
$jobs += $unitJob
|
||||
}
|
||||
|
||||
if ($livecov)
|
||||
{ Write-Host -ForegroundColor DarkMagenta "[live][COV] Running Unit Tests with coverage ..."
|
||||
|
||||
$env:ENVIRONMENT_NAME = "Windows (x86-64)"
|
||||
coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -ra --tb=line --color=yes tests/unit
|
||||
|
||||
Write-Host -ForegroundColor DarkMagenta "[live][COV] Convert coverage report to HTML ..."
|
||||
coverage html
|
||||
|
||||
Write-Host -ForegroundColor DarkMagenta "[live][COV] Convert coverage report to XML (Cobertura) ..."
|
||||
coverage xml
|
||||
|
||||
Write-Host -ForegroundColor DarkMagenta "[live][COV] Convert coverage report to JSON ..."
|
||||
coverage json
|
||||
|
||||
Write-Host -ForegroundColor DarkMagenta "[live][COV] Write coverage report to console ..."
|
||||
coverage report
|
||||
|
||||
if ($copycov)
|
||||
{ cp -Recurse -Force .\report\coverage\html\* .\doc\_build\html\coverage
|
||||
Write-Host -ForegroundColor DarkMagenta "[live][COV] Copyed code coverage report to 'coverage' directory in HTML directory"
|
||||
}
|
||||
|
||||
Write-Host -ForegroundColor DarkMagenta "[live][COV] Coverage finished"
|
||||
}
|
||||
elseif ($cov)
|
||||
{ Write-Host -ForegroundColor DarkMagenta "[live][COV] Running Unit Tests with coverage ..."
|
||||
Write-Host -ForegroundColor DarkMagenta "[SCRIPT] Starting Coverage jobs ..."
|
||||
|
||||
# Collect coverage
|
||||
$collectCovFunc = {
|
||||
$env:ENVIRONMENT_NAME = "Windows (x86-64)"
|
||||
coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -ra --tb=line --color=yes tests/unit
|
||||
|
||||
Write-Host -ForegroundColor DarkMagenta "[Job3][COV] Convert coverage report to HTML ..."
|
||||
coverage html
|
||||
|
||||
Write-Host -ForegroundColor DarkMagenta "[Job3][COV] Convert coverage report to XML (Cobertura) ..."
|
||||
coverage xml
|
||||
|
||||
Write-Host -ForegroundColor DarkMagenta "[Job3][COV] Convert coverage report to JSON ..."
|
||||
coverage json
|
||||
}
|
||||
$covJob = Start-Job -Name "Coverage" -ScriptBlock $collectCovFunc
|
||||
$jobs += $covJob
|
||||
}
|
||||
|
||||
if ($livetype)
|
||||
{ Write-Host -ForegroundColor DarkCyan "[live][TYPE] Running static type analysis using mypy ..."
|
||||
|
||||
$env:MYPY_FORCE_COLOR = 1
|
||||
mypy.exe -p $PackageName
|
||||
|
||||
if ($copytype)
|
||||
{ cp -Recurse -Force .\report\typing\* .\doc\_build\html\typing
|
||||
Write-Host -ForegroundColor DarkCyan "[live][TYPE] Copyed typing report to 'typing' directory in HTML directory."
|
||||
}
|
||||
|
||||
Write-Host -ForegroundColor DarkCyan "[live][TYPE] Static type analysis finished"
|
||||
}
|
||||
elseif ($type)
|
||||
{ Write-Host -ForegroundColor DarkCyan "[live][TYPE] Running static type analysis using mypy ..."
|
||||
Write-Host -ForegroundColor DarkCyan "[SCRIPT] Starting Typing jobs ..."
|
||||
|
||||
# Analyze types
|
||||
$analyzeTypesFunc = {
|
||||
$env:MYPY_FORCE_COLOR = 1
|
||||
mypy.exe -p $PackageName
|
||||
}
|
||||
$typeJob = Start-Job -Name "Typing" -ScriptBlock $analyzeTypesFunc
|
||||
$jobs += $typeJob
|
||||
}
|
||||
|
||||
|
||||
if ($doc)
|
||||
{ Write-Host -ForegroundColor DarkGreen "[SCRIPT] Waiting on Documentation job ..."
|
||||
Wait-Job -Job $docJob
|
||||
Write-Host -ForegroundColor DarkYellow "[Job1][DOC] Documentation finished"
|
||||
}
|
||||
if ($jobs.Count -ne 0)
|
||||
{
|
||||
Write-Host -ForegroundColor DarkGreen ( "[SCRIPT] Waiting on {0} jobs ({1}) ..." -f $jobs.Count, (($jobs | %{ $_.Name }) -join ", "))
|
||||
Wait-Job -Job $jobs
|
||||
}
|
||||
|
||||
|
||||
if (-not $liveunit -and $copyunit)
|
||||
{
|
||||
# if ($unit)
|
||||
# { Wait-Job -Job $unitJob
|
||||
# Write-Host -ForegroundColor DarkBlue "[Job2][UNIT] Unit tests finished"
|
||||
# }
|
||||
cp -Recurse -Force .\report\unit\html\* .\doc\_build\html\unittests
|
||||
Write-Host -ForegroundColor DarkBlue "[post][UNIT] Copyed unit testing report to 'unittests' directory in HTML directory"
|
||||
}
|
||||
if (-not ($livecov -or $cov) -and $copycov)
|
||||
{
|
||||
# if ($cov)
|
||||
# { Wait-Job -Job $unitJob
|
||||
# Write-Host -ForegroundColor DarkMagenta "[Job3][UNIT] Coverage collection finished"
|
||||
# }
|
||||
cp -Recurse -Force .\report\coverage\html\* .\doc\_build\html\coverage
|
||||
Write-Host -ForegroundColor DarkMagenta "[post][COV] Copyed code coverage report to 'coverage' directory in HTML directory"
|
||||
}
|
||||
if (-not $livetype -and $copytype)
|
||||
{
|
||||
# if ($type)
|
||||
# { Wait-Job -Job $typeJob
|
||||
# Write-Host -ForegroundColor DarkCyan "[Job4][UNIT] Static type analysis finished"
|
||||
# }
|
||||
cp -Recurse -Force .\report\typing\* .\doc\_build\html\typing
|
||||
Write-Host -ForegroundColor DarkCyan "[post][TYPE] Copyed typing report to 'typing' directory in HTML directory."
|
||||
}
|
||||
|
||||
|
||||
if ($type)
|
||||
{ Write-Host -ForegroundColor DarkCyan "================================================================================"
|
||||
if (-not $nooutput)
|
||||
{ Receive-Job -Job $typeJob
|
||||
}
|
||||
Remove-Job -Job $typeJob
|
||||
}
|
||||
if ($doc)
|
||||
{ Write-Host -ForegroundColor DarkYellow "================================================================================"
|
||||
if (-not $nooutput)
|
||||
{ Receive-Job -Job $docJob
|
||||
}
|
||||
Remove-Job -Job $docJob
|
||||
}
|
||||
if ($unit)
|
||||
{ Write-Host -ForegroundColor DarkBlue "================================================================================"
|
||||
if (-not $nooutput)
|
||||
{ Receive-Job -Job $unitJob
|
||||
}
|
||||
Remove-Job -Job $unitJob
|
||||
}
|
||||
if ($cov)
|
||||
{ Write-Host -ForegroundColor DarkMagenta "================================================================================"
|
||||
if (-not $nooutput)
|
||||
{ Receive-Job -Job $covJob
|
||||
}
|
||||
Remove-Job -Job $covJob
|
||||
|
||||
if ($copycov)
|
||||
{ cp -Recurse -Force .\report\coverage\html\* .\doc\_build\html\coverage
|
||||
Write-Host -ForegroundColor DarkMagenta "[post][COV] Copyed code coverage report to 'coverage' directory in HTML directory"
|
||||
}
|
||||
}
|
||||
Write-Host -ForegroundColor DarkGreen "================================================================================"
|
||||
Write-Host -ForegroundColor DarkGreen "[SCRIPT] Finished"
|
||||
@@ -8,7 +8,7 @@ print(f"Python: {version}")
|
||||
|
||||
def loadRequirementsFile(requirementsFile: Path):
|
||||
requirements = []
|
||||
with requirementsFile.open("r", encoding="utf-8") as file:
|
||||
with requirementsFile.open("r") as file:
|
||||
for line in file.readlines():
|
||||
line = line.strip()
|
||||
if line.startswith("#") or line.startswith("https") or line == "":
|
||||
@@ -84,7 +84,7 @@ for dependency in dependencies:
|
||||
# Write jobs to special file
|
||||
github_output = Path(getenv("GITHUB_OUTPUT"))
|
||||
print(f"GITHUB_OUTPUT: {github_output}")
|
||||
with github_output.open("a+", encoding="utf-8") as f:
|
||||
with github_output.open("a+") as f:
|
||||
f.write(f"pacboy_packages={' '.join(pacboyPackages)}\n")
|
||||
|
||||
print(f"GITHUB_OUTPUT:")
|
||||
|
||||
@@ -71,7 +71,7 @@ data = {
|
||||
"ubuntu": {"icon": "🐧", "runs-on": "ubuntu-24.04", "shell": "bash", "name": "Linux (x86-64)"},
|
||||
"windows": {"icon": "🪟", "runs-on": "windows-latest", "shell": "pwsh", "name": "Windows (x86-64)"},
|
||||
"macos": {"icon": "🍎", "runs-on": "macos-latest-large", "shell": "bash", "name": "macOS (x86-64)"},
|
||||
"macos-arm": {"icon": "🍏", "runs-on": "macos-latest", "shell": "bash", "name": "macOS (aarch64)"},
|
||||
"macos-arm": {"icon": "🍏", "runs-on": "macos-latest", "shell": "bash", "name": "macOS (arm64)"},
|
||||
},
|
||||
# Runtimes provided by MSYS2
|
||||
"runtime": {
|
||||
|
||||
@@ -5,9 +5,9 @@ Coverage ~= 7.6
|
||||
|
||||
# Test Runner
|
||||
pytest ~= 8.3
|
||||
pytest-cov ~= 6.0
|
||||
pytest-cov ~= 5.0
|
||||
|
||||
# Static Type Checking
|
||||
mypy ~= 1.13
|
||||
mypy ~= 1.11
|
||||
typing_extensions ~= 4.12
|
||||
lxml ~= 5.3
|
||||
|
||||
Reference in New Issue
Block a user