Updating r7 from v7.0.1

This commit is contained in:
Patrick Lehmann
2025-12-20 00:31:50 +01:00
71 changed files with 654 additions and 1869 deletions

View File

@@ -86,10 +86,10 @@ jobs:
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
- name: 📥 Download artifacts '${{ inputs.wheel }}' from 'Package' job
uses: pyTooling/download-artifact@v6
uses: pyTooling/download-artifact@v7
with:
name: ${{ inputs.wheel }}
path: install
@@ -262,7 +262,7 @@ jobs:
- name: 📤 Upload 'TestReportSummary.xml' artifact
if: inputs.apptest_xml_artifact != ''
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
with:
name: ${{ inputs.apptest_xml_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
working-directory: report/unit

View File

@@ -1,72 +0,0 @@
# ==================================================================================================================== #
# Authors: #
# Patrick Lehmann #
# Unai Martinez-Corral #
# #
# ==================================================================================================================== #
# Copyright 2020-2025 The pyTooling Authors #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
# #
# SPDX-License-Identifier: Apache-2.0 #
# ==================================================================================================================== #
name: Documentation
on:
workflow_call:
inputs:
artifact:
description: 'Name of the documentation artifact.'
required: false
default: ''
type: string
jobs:
BuildTheDocs:
name: 📓 Run BuildTheDocs
runs-on: ubuntu-24.04
steps:
- name: ⚠️ Deprecation Warning
run: printf "::warning title=%s::%s\n" "Deprecated" "'BuildTheDocs.yml' template is deprecated. Please switch to 'SphinxDocumentation.yml'. See https://pytooling.github.io/Actions/JobTemplate/Documentation/SphinxDocumentation.html"
- name: ⏬ Checkout repository
uses: actions/checkout@v5
- name: 🛳️ Build documentation
uses: buildthedocs/btd@v0
with:
skip-deploy: true
- name: 📤 Upload 'documentation' artifacts
uses: pyTooling/upload-artifact@v5
if: inputs.artifact != ''
with:
name: ${{ inputs.artifact }}
working-directory: doc/_build/html
path: '*'
retention-days: 1
- name: '📓 Publish site to GitHub Pages'
if: inputs.artifact == '' && github.event_name != 'pull_request'
run: |
cp --recursive -T doc/_build/html public
cd public
touch .nojekyll
git init
cp ../.git/config ./.git/config
git add .
git config --local user.email "BuildTheDocs@GitHubActions"
git config --local user.name "GitHub Actions"
git commit -a -m "update ${{ github.sha }}"
git push -u origin +HEAD:gh-pages

View File

@@ -71,7 +71,7 @@ jobs:
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
with:
lfs: true
submodules: true
@@ -140,7 +140,7 @@ jobs:
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
with:
lfs: true
submodules: true
@@ -180,7 +180,7 @@ jobs:
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
with:
lfs: true
submodules: true

View File

@@ -50,7 +50,7 @@ jobs:
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
- name: 🐍 Setup Python ${{ inputs.python_version }}
uses: actions/setup-python@v6

View File

@@ -136,13 +136,13 @@ on:
jobs:
Prepare:
uses: pyTooling/Actions/.github/workflows/PrepareJob.yml@r6
uses: pyTooling/Actions/.github/workflows/PrepareJob.yml@r7
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
UnitTestingParams:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
package_namespace: ${{ inputs.package_namespace }}
package_name: ${{ inputs.package_name }}
@@ -154,7 +154,7 @@ jobs:
disable_list: ${{ inputs.unittest_disable_list }}
# AppTestingParams:
# uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
# uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
# with:
# package_namespace: ${{ inputs.package_namespace }}
# package_name: ${{ inputs.package_name }}
@@ -166,7 +166,7 @@ jobs:
# disable_list: ${{ inputs.apptest_disable_list }}
InstallParams:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
package_namespace: ${{ inputs.package_namespace }}
package_name: ${{ inputs.package_name }}
@@ -188,7 +188,7 @@ jobs:
code_version: ${{ steps.extract.outputs.code_version }}
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
with:
# The command 'git describe' (used for version) needs the history.
fetch-depth: 0
@@ -230,15 +230,13 @@ jobs:
"""))
UnitTesting:
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r6
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r7
needs:
- ConfigParams
- UnitTestingParams
with:
jobs: ${{ needs.UnitTestingParams.outputs.python_jobs }}
# TODO: shouldn't this be configured by a parameter? Same as directories
requirements: "-r tests/unit/requirements.txt"
# pacboy: "msys/git python-lxml:p"
unittest_report_xml: ${{ needs.ConfigParams.outputs.unittest_report_xml }}
coverage_report_xml: ${{ needs.ConfigParams.outputs.coverage_report_xml }}
coverage_report_json: ${{ needs.ConfigParams.outputs.coverage_report_json }}
@@ -247,7 +245,7 @@ jobs:
coverage_sqlite_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}
StaticTypeCheck:
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r6
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r7
needs:
- ConfigParams
- UnitTestingParams
@@ -261,7 +259,7 @@ jobs:
html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
CodeQuality:
uses: pyTooling/Actions/.github/workflows/CheckCodeQuality.yml@r6
uses: pyTooling/Actions/.github/workflows/CheckCodeQuality.yml@r7
needs:
- UnitTestingParams
with:
@@ -272,7 +270,7 @@ jobs:
artifact: CodeQuality
DocCoverage:
uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@r6
uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@r7
needs:
- UnitTestingParams
with:
@@ -280,7 +278,7 @@ jobs:
directory: ${{ needs.UnitTestingParams.outputs.package_directory }}
Package:
uses: pyTooling/Actions/.github/workflows/Package.yml@r6
uses: pyTooling/Actions/.github/workflows/Package.yml@r7
needs:
- UnitTestingParams
with:
@@ -288,7 +286,7 @@ jobs:
artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
Install:
uses: pyTooling/Actions/.github/workflows/InstallPackage.yml@r6
uses: pyTooling/Actions/.github/workflows/InstallPackage.yml@r7
needs:
- UnitTestingParams
- InstallParams
@@ -299,7 +297,7 @@ jobs:
package_name: ${{ needs.UnitTestingParams.outputs.package_fullname }}
# AppTesting:
# uses: pyTooling/Actions/.github/workflows/ApplicationTesting.yml@r6
# uses: pyTooling/Actions/.github/workflows/ApplicationTesting.yml@r7
# needs:
# - AppTestingParams
# - UnitTestingParams
@@ -310,7 +308,7 @@ jobs:
# apptest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).apptesting_xml }}
PublishCoverageResults:
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@r6
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@r7
needs:
- ConfigParams
- UnitTestingParams
@@ -332,7 +330,7 @@ jobs:
CODACY_TOKEN: ${{ secrets.CODACY_TOKEN }}
PublishTestResults:
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@r6
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@r7
needs:
- ConfigParams
- UnitTestingParams
@@ -348,14 +346,14 @@ jobs:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
# VerifyDocs:
# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@r6
# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@r7
# needs:
# - UnitTestingParams
# with:
# python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
Documentation:
uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@r6
uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@r7
needs:
- ConfigParams
- UnitTestingParams
@@ -372,7 +370,7 @@ jobs:
latex_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }}
IntermediateCleanUp:
uses: pyTooling/Actions/.github/workflows/IntermediateCleanUp.yml@r6
uses: pyTooling/Actions/.github/workflows/IntermediateCleanUp.yml@r7
needs:
- UnitTestingParams
- PublishCoverageResults
@@ -383,7 +381,7 @@ jobs:
xml_unittest_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}-
PDFDocumentation:
uses: pyTooling/Actions/.github/workflows/LaTeXDocumentation.yml@r6
uses: pyTooling/Actions/.github/workflows/LaTeXDocumentation.yml@r7
needs:
- UnitTestingParams
- Documentation
@@ -394,7 +392,7 @@ jobs:
can-fail: 'true'
PublishToGitHubPages:
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r6
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r7
needs:
- UnitTestingParams
- Documentation
@@ -407,7 +405,7 @@ jobs:
typing: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
TriggerTaggedRelease:
uses: pyTooling/Actions/.github/workflows/TagReleaseCommit.yml@r6
uses: pyTooling/Actions/.github/workflows/TagReleaseCommit.yml@r7
needs:
- Prepare
- UnitTesting
@@ -426,7 +424,7 @@ jobs:
secrets: inherit
ReleasePage:
uses: pyTooling/Actions/.github/workflows/PublishReleaseNotes.yml@r6
uses: pyTooling/Actions/.github/workflows/PublishReleaseNotes.yml@r7
needs:
- Prepare
- UnitTesting
@@ -444,7 +442,7 @@ jobs:
secrets: inherit
PublishOnPyPI:
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@r6
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@r7
needs:
- Prepare
- UnitTestingParams
@@ -459,7 +457,7 @@ jobs:
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
ArtifactCleanUp:
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r6
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r7
needs:
- UnitTestingParams
- UnitTesting

View File

@@ -1,187 +0,0 @@
# ==================================================================================================================== #
# Authors: #
# Patrick Lehmann #
# Unai Martinez-Corral #
# #
# ==================================================================================================================== #
# Copyright 2020-2025 The pyTooling Authors #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
# #
# SPDX-License-Identifier: Apache-2.0 #
# ==================================================================================================================== #
name: Coverage Collection
on:
workflow_call:
inputs:
ubuntu_image_version:
description: 'Ubuntu image version.'
required: false
default: '24.04'
type: string
python_version:
description: 'Python version.'
required: false
default: '3.11'
type: string
requirements:
description: 'Python dependencies to be installed through pip.'
required: false
default: '-r tests/requirements.txt'
type: string
tests_directory:
description: 'Path to the directory containing tests (test working directory).'
required: false
default: 'tests'
type: string
unittest_directory:
description: 'Path to the directory containing unit tests (relative to tests_directory).'
required: false
default: 'unit'
type: string
coverage_config:
description: 'Path to the .coveragerc file. Use pyproject.toml by default.'
required: false
default: 'pyproject.toml'
type: string
artifact:
description: 'Name of the coverage artifact.'
required: true
type: string
secrets:
codacy_token:
description: 'Token to push result to codacy.'
required: true
jobs:
Coverage:
name: 📈 Collect Coverage Data using Python ${{ inputs.python_version }}
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
steps:
- name: ⚠️ Deprecation Warning
run: printf "::warning title=%s::%s\n" "Deprecated" "'CoverageCollection.yml' template is deprecated. Please switch to 'PublishReleaseNotes.yml'. See https://pytooling.github.io/Actions/JobTemplate/Testing/UnitTesting.html"
- name: ⏬ Checkout repository
uses: actions/checkout@v5
with:
lfs: true
submodules: true
- name: 🐍 Setup Python ${{ inputs.python_version }}
uses: actions/setup-python@v6
with:
python-version: ${{ inputs.python_version }}
- name: 🗂 Install dependencies
run: |
python -m pip install --disable-pip-version-check tomli
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
- name: 🔁 Extract configurations from pyproject.toml
id: getVariables
shell: python
run: |
from os import getenv
from pathlib import Path
from tomli import load as tomli_load
from textwrap import dedent
htmlDirectory = 'htmlcov'
xmlFile = './coverage.xml'
coverageRC = "${{ inputs.coverage_config }}".strip()
# Read output paths from 'pyproject.toml' file
if coverageRC == "pyproject.toml":
pyProjectFile = Path("pyproject.toml")
if pyProjectFile.exists():
with pyProjectFile.open("rb") as file:
pyProjectSettings = tomli_load(file)
htmlDirectory = pyProjectSettings["tool"]["coverage"]["html"]["directory"]
xmlFile = pyProjectSettings["tool"]["coverage"]["xml"]["output"]
else:
print(f"File '{pyProjectFile}' not found.")
print(f"::error title=FileNotFoundError::File '{pyProjectFile}' not found.")
exit(1)
# Read output paths from '.coveragerc' file
elif len(coverageRC) > 0:
coverageRCFile = Path(coverageRC)
if coverageRCFile.exists():
with coverageRCFile.open("rb") as file:
coverageRCSettings = tomli_load(file)
htmlDirectory = coverageRCSettings["html"]["directory"]
xmlFile = coverageRCSettings["xml"]["output"]
else:
print(f"File '{coverageRCFile}' not found.")
print(f"::error title=FileNotFoundError::File '{coverageRCFile}' not found.")
exit(1)
# Write jobs to special file
github_output = Path(getenv("GITHUB_OUTPUT"))
print(f"GITHUB_OUTPUT: {github_output}")
with github_output.open("a+", encoding="utf-8") as f:
f.write(dedent(f"""\
coverage_report_html_directory={htmlDirectory}
coverage_report_xml={xmlFile}
"""))
print(f"DEBUG:\n html={htmlDirectory}\n xml={xmlFile}")
- name: Collect coverage
continue-on-error: true
run: |
export ENVIRONMENT_NAME="Linux (x86-64)"
export PYTHONPATH=$(pwd)
ABSDIR=$(pwd)
cd "${{ inputs.tests_directory || '.' }}"
[ -n '${{ inputs.coverage_config }}' ] && PYCOV_ARGS="--cov-config=${ABSDIR}/${{ inputs.coverage_config }}" || unset PYCOV_ARGS
printf "%s\n" "python -m pytest -rA --cov=${ABSDIR} ${PYCOV_ARGS} ${{ inputs.unittest_directory }} --color=yes"
python -m pytest -rA --cov=${ABSDIR} $PYCOV_ARGS ${{ inputs.unittest_directory }} --color=yes
- name: Convert to cobertura format
run: coverage xml --data-file=${{ inputs.tests_directory || '.' }}/.coverage
- name: Convert to HTML format
run: |
coverage html --data-file=${{ inputs.tests_directory || '.' }}/.coverage -d ${{ steps.getVariables.outputs.coverage_report_html_directory }}
rm ${{ steps.getVariables.outputs.coverage_report_html_directory }}/.gitignore
- name: 📤 Upload 'Coverage Report' artifact
continue-on-error: true
uses: pyTooling/upload-artifact@v5
with:
name: ${{ inputs.artifact }}
working-directory: ${{ steps.getVariables.outputs.coverage_report_html_directory }}
path: '*'
if-no-files-found: error
retention-days: 1
- name: 📊 Publish coverage at CodeCov
continue-on-error: true
uses: codecov/codecov-action@v5
with:
files: ${{ steps.getVariables.outputs.coverage_report_xml }}
flags: unittests
env_vars: PYTHON
- name: 📉 Publish coverage at Codacy
continue-on-error: true
uses: codacy/codacy-coverage-reporter-action@v1
with:
project-token: ${{ secrets.codacy_token }}
coverage-reports: ${{ steps.getVariables.outputs.coverage_report_xml }}

View File

@@ -68,7 +68,7 @@ on:
jobs:
Extract:
name: 📓 Extract configurations from pyproject.toml
name: 🔬 Extract configurations from pyproject.toml
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
outputs:
unittest_report_xml: ${{ steps.getVariables.outputs.unittest_report_xml }}
@@ -82,7 +82,7 @@ jobs:
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
- name: 🐍 Setup Python ${{ inputs.python_version }}
uses: actions/setup-python@v6

View File

@@ -53,7 +53,7 @@ jobs:
steps:
- name: 📥 Download artifacts '${{ inputs.wheel }}' from 'Package' job
uses: pyTooling/download-artifact@v6
uses: pyTooling/download-artifact@v7
with:
name: ${{ inputs.wheel }}
path: install

View File

@@ -60,7 +60,7 @@ jobs:
continue-on-error: ${{ inputs.can-fail == 'true' }}
steps:
- name: 📥 Download artifacts '${{ inputs.latex_artifact }}' from 'SphinxDocumentation' job
uses: pyTooling/download-artifact@v6
uses: pyTooling/download-artifact@v7
with:
name: ${{ inputs.latex_artifact }}
path: latex
@@ -83,7 +83,7 @@ jobs:
latexmk -${{ inputs.processor }} "${{ inputs.document }}.tex"
- name: 📤 Upload 'PDF Documentation' artifact
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
if: inputs.pdf_artifact != ''
with:
name: ${{ inputs.pdf_artifact }}

View File

@@ -1,533 +0,0 @@
# ==================================================================================================================== #
# Authors: #
# Patrick Lehmann #
# #
# ==================================================================================================================== #
# Copyright 2020-2025 The pyTooling Authors #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
# #
# SPDX-License-Identifier: Apache-2.0 #
# ==================================================================================================================== #
name: Nightly
on:
workflow_call:
inputs:
ubuntu_image:
description: 'Name of the Ubuntu image.'
required: false
default: 'ubuntu-24.04'
type: string
nightly_name:
description: 'Name of the nightly release.'
required: false
default: 'nightly'
type: string
nightly_title:
description: 'Title of the nightly release.'
required: false
default: ''
type: string
nightly_description:
description: 'Description of the nightly release.'
required: false
default: 'Release of artifacts from latest CI pipeline.'
type: string
draft:
description: 'Specify if this is a draft.'
required: false
default: false
type: boolean
prerelease:
description: 'Specify if this is a pre-release.'
required: false
default: false
type: boolean
latest:
description: 'Specify if this is the latest release.'
required: false
default: false
type: boolean
replacements:
description: 'Multi-line string containing search=replace patterns.'
required: false
default: ''
type: string
assets:
description: 'Multi-line string containing artifact:file:title asset descriptions.'
required: true
type: string
inventory-json:
type: string
required: false
default: ''
inventory-version:
type: string
required: false
default: ''
inventory-categories:
type: string
required: false
default: ''
tarball-name:
type: string
required: false
default: '__pyTooling_upload_artifact__.tar'
can-fail:
type: boolean
required: false
default: false
jobs:
Release:
name: 📝 Update 'Nightly Page' on GitHub
runs-on: ${{ inputs.ubuntu_image }}
continue-on-error: ${{ inputs.can-fail }}
permissions:
contents: write
actions: write
# attestations: write
steps:
- name: ⚠️ Deprecation Warning
run: printf "::warning title=%s::%s\n" "NightlyRelease" "'NightlyRelease.yml' template is deprecated. Please switch to 'PublishReleaseNotes.yml'. See https://pytooling.github.io/Actions/JobTemplate/Release/PublishReleaseNotes.html"
- name: ⏬ Checkout repository
uses: actions/checkout@v5
with:
# The command 'git describe' (used for version) needs the history.
fetch-depth: 0
- name: 🔧 Install zstd
run: sudo apt-get install -y --no-install-recommends zstd
- name: 📑 Delete (old) Release Page
id: deleteReleasePage
run: |
set +e
ANSI_LIGHT_RED=$'\x1b[91m'
ANSI_LIGHT_GREEN=$'\x1b[92m'
ANSI_LIGHT_YELLOW=$'\x1b[93m'
ANSI_NOCOLOR=$'\x1b[0m'
export GH_TOKEN=${{ github.token }}
printf "%s" "Deleting release '${{ inputs.nightly_name }}' ... "
message="$(gh release delete ${{ inputs.nightly_name }} --yes 2>&1)"
if [[ $? -eq 0 ]]; then
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
elif [[ "${message}" == "release not found" ]]; then
printf "%s\n" "${ANSI_LIGHT_YELLOW}[NOT FOUND]${ANSI_NOCOLOR}"
else
printf "%s\n" "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't delete release '${{ inputs.nightly_name }}' -> Error: '${message}'.${ANSI_NOCOLOR}"
printf "::error title=%s::%s\n" "InternalError" "Couldn't delete release '${{ inputs.nightly_name }}' -> Error: '${message}'."
exit 1
fi
- name: 📑 (Re)create (new) Release Page
id: createReleasePage
run: |
set +e
ANSI_LIGHT_RED=$'\x1b[91m'
ANSI_LIGHT_GREEN=$'\x1b[92m'
ANSI_NOCOLOR=$'\x1b[0m'
export GH_TOKEN=${{ github.token }}
addDraft="--draft"
if [[ "${{ inputs.prerelease }}" == "true" ]]; then
addPreRelease="--prerelease"
fi
if [[ "${{ inputs.latest }}" == "false" ]]; then
addLatest="--latest=false"
fi
if [[ "${{ inputs.nightly_title }}" != "" ]]; then
addTitle=("--title" "${{ inputs.nightly_title }}")
fi
cat <<'EOF' > __NoTeS__.md
${{ inputs.nightly_description }}
EOF
if [[ -s __NoTeS__.md ]]; then
addNotes=("--notes-file" "__NoTeS__.md")
fi
# Apply replacements
while IFS=$'\r\n' read -r patternLine; do
# skip empty lines
[[ "$patternLine" == "" ]] && continue
pattern="${patternLine%%=*}"
replacement="${patternLine#*=}"
sed -i -e "s/%$pattern%/$replacement/g" "__NoTeS__.md"
done <<<'${{ inputs.replacements }}'
# Add footer line
cat <<EOF >> __NoTeS__.md
--------
Published from [${{ github.workflow }}](https://github.com/Paebbels/ghdl/actions/runs/${{ github.run_id }}) workflow triggered by @${{ github.actor }} on $(date '+%Y-%m-%d %H:%M:%S %Z').
EOF
printf "%s\n" "Creating release '${{ inputs.nightly_name }}' ... "
message="$(gh release create "${{ inputs.nightly_name }}" --verify-tag $addDraft $addPreRelease $addLatest "${addTitle[@]}" "${addNotes[@]}" 2>&1)"
if [[ $? -eq 0 ]]; then
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
else
printf "%s\n" "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't create release '${{ inputs.nightly_name }}' -> Error: '${message}'.${ANSI_NOCOLOR}"
printf "::error title=%s::%s\n" "InternalError" "Couldn't create release '${{ inputs.nightly_name }}' -> Error: '${message}'."
exit 1
fi
- name: 📥 Download artifacts and upload as assets
id: uploadAssets
run: |
set +e
ANSI_LIGHT_RED=$'\x1b[91m'
ANSI_LIGHT_GREEN=$'\x1b[92m'
ANSI_LIGHT_YELLOW=$'\x1b[93m'
ANSI_LIGHT_BLUE=$'\x1b[94m'
ANSI_NOCOLOR=$'\x1b[0m'
export GH_TOKEN=${{ github.token }}
Replace() {
line="$1"
while IFS=$'\r\n' read -r patternLine; do
# skip empty lines
[[ "$patternLine" == "" ]] && continue
pattern="${patternLine%%=*}"
replacement="${patternLine#*=}"
line="${line//"%$pattern%"/"$replacement"}"
done <<<'${{ inputs.replacements }}'
printf "%s\n" "$line"
}
# Create JSON inventory
if [[ "${{ inputs.inventory-json }}" != "" ]]; then
VERSION="1.0"
# Split categories by ',' into a Bash array.
# See https://stackoverflow.com/a/45201229/3719459
if [[ "${{ inputs.inventory-categories }}" != "" ]]; then
readarray -td, inventoryCategories <<<"${{ inputs.inventory-categories }},"
unset 'inventoryCategories[-1]'
declare -p inventoryCategories
else
inventoryCategories=""
fi
jsonInventory=$(jq -c -n \
--arg version "${VERSION}" \
--arg date "$(date +"%Y-%m-%dT%H-%M-%S%:z")" \
--argjson jsonMeta "$(jq -c -n \
--arg tag "${{ inputs.nightly_name }}" \
--arg version "${{ inputs.inventory-version }}" \
--arg hash "${{ github.sha }}" \
--arg repo "${{ github.server_url }}/${{ github.repository }}" \
--arg release "${{ github.server_url }}/${{ github.repository }}/releases/download/${{ inputs.nightly_name }}" \
--argjson categories "$(jq -c -n \
'$ARGS.positional' \
--args "${inventoryCategories[@]}" \
)" \
'{"tag": $tag, "version": $version, "git-hash": $hash, "repository-url": $repo, "release-url": $release, "categories": $categories}' \
)" \
'{"version": 1.0, "timestamp": $date, "meta": $jsonMeta, "files": {}}'
)
fi
ERRORS=0
# A dictionary of 0/1 to avoid duplicate downloads
declare -A downloadedArtifacts
# A dictionary to check for duplicate asset files in release
declare -A assetFilenames
while IFS=$'\r\n' read -r assetLine; do
if [[ "${assetLine}" == "" || "${assetLine:0:1}" == "#" ]]; then
continue
fi
# split assetLine colon separated triple: artifact:asset:title
artifact="${assetLine%%:*}"
assetLine="${assetLine#*:}"
asset="${assetLine%%:*}"
assetLine="${assetLine#*:}"
if [[ "${{ inputs.inventory-json }}" == "" ]]; then
categories=""
title="${assetLine##*:}"
else
categories="${assetLine%%:*}"
title="${assetLine##*:}"
fi
# remove leading whitespace
asset="${asset#"${asset%%[![:space:]]*}"}"
categories="${categories#"${categories%%[![:space:]]*}"}"
title="${title#"${title%%[![:space:]]*}"}"
# apply replacements
asset="$(Replace "${asset}")"
title="$(Replace "${title}")"
printf "%s\n" "Publish asset '${asset}' from artifact '${artifact}' with title '${title}'"
printf " %s" "Checked asset for duplicates ... "
if [[ -n "${assetFilenames[$asset]}" ]]; then
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
printf "::error title=%s::%s\n" "DuplicateAsset" "Asset '${asset}' from artifact '${artifact}' was already uploaded to release '${{ inputs.nightly_name }}'."
ERRORS=$((ERRORS + 1))
continue
else
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
assetFilenames[$asset]=1
fi
# Download artifact by artifact name
if [[ -n "${downloadedArtifacts[$artifact]}" ]]; then
printf " %s\n" "downloading '${artifact}' ... ${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}"
else
printf " downloading '${artifact}' ...\n"
printf " %s" "gh run download $GITHUB_RUN_ID --dir \"${artifact}\" --name \"${artifact}\" "
gh run download $GITHUB_RUN_ID --dir "${artifact}" --name "${artifact}"
if [[ $? -eq 0 ]]; then
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
else
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't download artifact '${artifact}'.${ANSI_NOCOLOR}"
printf "::error title=%s::%s\n" "ArtifactNotFound" "Couldn't download artifact '${artifact}'."
ERRORS=$((ERRORS + 1))
continue
fi
downloadedArtifacts[$artifact]=1
printf " %s" "Checking for embedded tarball ... "
if [[ -f "${artifact}/${{ inputs.tarball-name }}" ]]; then
printf "%s\n" "${ANSI_LIGHT_GREEN}[FOUND]${ANSI_NOCOLOR}"
pushd "${artifact}" > /dev/null
printf " %s" "Extracting embedded tarball ... "
tar -xf "${{ inputs.tarball-name }}"
if [[ $? -ne 0 ]]; then
printf "%s\n" "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
else
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
fi
printf " %s" "Removing temporary tarball ... "
rm -f "${{ inputs.tarball-name }}"
if [[ $? -ne 0 ]]; then
printf "%s\n" "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
else
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
fi
popd > /dev/null
else
printf "%s\n" "${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}"
fi
fi
# Check if artifact should be compressed (zip, tgz) or if asset was part of the downloaded artifact.
printf " %s" "checking asset '${artifact}/${asset}' ... "
if [[ "${asset}" == !*.zip ]]; then
printf "%s\n" "${ANSI_LIGHT_GREEN}[ZIP]${ANSI_NOCOLOR}"
asset="${asset##*!}"
printf "::group:: %s\n" "Compressing artifact '${artifact}' to '${asset}' ..."
(
cd "${artifact}" && \
zip -r "../${asset}" *
)
retCode=$?
printf "::endgroup::\n"
if [[ $retCode -eq 0 ]]; then
printf " %s\n" "Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
uploadFile="${asset}"
else
printf " %s\n" "Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zip file '${asset}'.${ANSI_NOCOLOR}"
printf "::error title=%s::%s\n" "CompressionError" "Couldn't compress '${artifact}' to zip file '${asset}'."
ERRORS=$((ERRORS + 1))
continue
fi
elif [[ "${asset}" == !*.tgz || "${asset}" == !*.tar.gz || "${asset}" == \$*.tgz || "${asset}" == \$*.tar.gz ]]; then
printf "%s\n" "${ANSI_LIGHT_GREEN}[TAR/GZ]${ANSI_NOCOLOR}"
if [[ "${asset:0:1}" == "\$" ]]; then
asset="${asset##*$}"
dirName="${asset%.*}"
printf " %s\n" "Compressing artifact '${artifact}' to '${asset}' ..."
tar -c --gzip --owner=0 --group=0 --file="${asset}" --directory="${artifact}" --transform "s|^\.|${dirName%.tar}|" .
retCode=$?
else
asset="${asset##*!}"
printf " %s\n" "Compressing artifact '${artifact}' to '${asset}' ..."
(
cd "${artifact}" && \
tar -c --gzip --owner=0 --group=0 --file="../${asset}" *
)
retCode=$?
fi
if [[ $retCode -eq 0 ]]; then
printf " %s\n" "Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
uploadFile="${asset}"
else
printf " %s\n" "Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to tgz file '${asset}'.${ANSI_NOCOLOR}"
printf "::error title=%s::%s\n" "CompressionError" "Couldn't compress '${artifact}' to tgz file '${asset}'."
ERRORS=$((ERRORS + 1))
continue
fi
elif [[ "${asset}" == !*.tzst || "${asset}" == !*.tar.zst || "${asset}" == \$*.tzst || "${asset}" == \$*.tar.zst ]]; then
printf "%s\n" "${ANSI_LIGHT_GREEN}[ZST]${ANSI_NOCOLOR}"
if [[ "${asset:0:1}" == "\$" ]]; then
asset="${asset##*$}"
dirName="${asset%.*}"
printf " %s\n" "Compressing artifact '${artifact}' to '${asset}' ..."
tar -c --zstd --owner=0 --group=0 --file="${asset}" --directory="${artifact}" --transform "s|^\.|${dirName%.tar}|" .
retCode=$?
else
asset="${asset##*!}"
printf " %s\n" "Compressing artifact '${artifact}' to '${asset}' ..."
(
cd "${artifact}" && \
tar -c --zstd --owner=0 --group=0 --file="../${asset}" *
)
retCode=$?
fi
if [[ $retCode -eq 0 ]]; then
printf " %s\n" "Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
uploadFile="${asset}"
else
printf " %s\n" "Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zst file '${asset}'.${ANSI_NOCOLOR}"
printf "::error title=%s::%s\n" "CompressionError" "Couldn't compress '${artifact}' to zst file '${asset}'."
ERRORS=$((ERRORS + 1))
continue
fi
elif [[ -e "${artifact}/${asset}" ]]; then
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
uploadFile="${artifact}/${asset}"
else
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't find asset '${asset}' in artifact '${artifact}'.${ANSI_NOCOLOR}"
printf "::error title=%s::%s\n" "FileNotFound" "Couldn't find asset '${asset}' in artifact '${artifact}'."
ERRORS=$((ERRORS + 1))
continue
fi
# Add asset to JSON inventory
if [[ "${{ inputs.inventory-json }}" != "" ]]; then
if [[ "${categories}" != "${title}" ]]; then
printf " %s\n" "adding file '${uploadFile#*/}' with '${categories//;/ → }' to JSON inventory ..."
category=""
jsonEntry=$(jq -c -n \
--arg title "${title}" \
--arg file "${uploadFile#*/}" \
'{"file": $file, "title": $title}' \
)
while [[ "${categories}" != "${category}" ]]; do
category="${categories##*,}"
categories="${categories%,*}"
jsonEntry=$(jq -c -n --arg cat "${category}" --argjson value "${jsonEntry}" '{$cat: $value}')
done
jsonInventory=$(jq -c -n \
--argjson inventory "${jsonInventory}" \
--argjson file "${jsonEntry}" \
'$inventory * {"files": $file}' \
)
else
printf " %s\n" "adding file '${uploadFile#*/}' to JSON inventory ... ${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}"
fi
fi
# Upload asset to existing release page
printf " %s" "uploading asset '${asset}' from '${uploadFile}' with title '${title}' ... "
gh release upload ${{ inputs.nightly_name }} "${uploadFile}#${title}" --clobber
if [[ $? -eq 0 ]]; then
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
else
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
printf "::error title=%s::%s\n" "UploadError" "Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'."
ERRORS=$((ERRORS + 1))
continue
fi
done <<<'${{ inputs.assets }}'
if [[ "${{ inputs.inventory-json }}" != "" ]]; then
inventoryTitle="Release Inventory (JSON)"
printf "%s\n" "Publish asset '${{ inputs.inventory-json }}' with title '${inventoryTitle}'"
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Writing JSON inventory to '${{ inputs.inventory-json }}' ...."
printf "%s\n" "$(jq -n --argjson inventory "${jsonInventory}" '$inventory')" > "${{ inputs.inventory-json }}"
cat "${{ inputs.inventory-json }}"
printf "::endgroup::\n"
# Upload inventory asset to existing release page
printf " %s" "uploading asset '${{ inputs.inventory-json }}' title '${inventoryTitle}' ... "
gh release upload ${{ inputs.nightly_name }} "${{ inputs.inventory-json }}#${inventoryTitle}" --clobber
if [[ $? -eq 0 ]]; then
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
else
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't upload asset '${{ inputs.inventory-json }}' to release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
printf "::error title=%s::%s\n" "UploadError" "Couldn't upload asset '${{ inputs.inventory-json }}' to release '${{ inputs.nightly_name }}'."
ERRORS=$((ERRORS + 1))
continue
fi
fi
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Inspecting downloaded artifacts ..."
tree -pash -L 3 .
printf "::endgroup::\n"
if [[ $ERRORS -ne 0 ]]; then
printf "%s\n" "${ANSI_LIGHT_RED}${ERRORS} errors detected in previous steps.${ANSI_NOCOLOR}"
exit 1
fi
- name: 📑 Remove draft state from Release Page
if: ${{ ! inputs.draft }}
run: |
set +e
ANSI_LIGHT_RED=$'\x1b[91m'
ANSI_LIGHT_GREEN=$'\x1b[92m'
ANSI_NOCOLOR=$'\x1b[0m'
export GH_TOKEN=${{ github.token }}
# Remove draft-state from release page
printf "%s" "Remove draft-state from release '${title}' ... "
gh release edit --draft=false "${{ inputs.nightly_name }}"
if [[ $? -eq 0 ]]; then
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
else
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't remove draft-state from release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
printf "::error title=%s::%s\n" "ReleasePage" "Couldn't remove draft-state from release '${{ inputs.nightly_name }}'."
fi

View File

@@ -53,7 +53,7 @@ jobs:
artifact: ${{ inputs.artifact }}
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
with:
lfs: true
submodules: true
@@ -106,7 +106,7 @@ jobs:
run: python setup.py bdist_wheel
- name: 📤 Upload wheel artifact
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
with:
name: ${{ inputs.artifact }}
working-directory: dist

View File

@@ -103,7 +103,7 @@ on:
macos_intel_image:
description: 'The used GitHub Action image for macOS (Intel x86-64) based jobs.'
required: false
default: 'macos-13'
default: 'macos-15-intel'
type: string
macos_arm_image:
description: 'The used GitHub Action image for macOS (ARM aarch64) based jobs.'
@@ -154,7 +154,7 @@ jobs:
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
with:
# The command 'git describe' (used for version) needs the history.
fetch-depth: 0

View File

@@ -131,7 +131,7 @@ jobs:
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
with:
# The command 'git describe' (used for version) needs the history.
fetch-depth: 0

View File

@@ -109,13 +109,13 @@ jobs:
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
with:
lfs: true
submodules: true
- name: 📥 Download Artifacts
uses: pyTooling/download-artifact@v6
uses: pyTooling/download-artifact@v7
with:
pattern: ${{ inputs.coverage_artifacts_pattern }}
path: artifacts
@@ -156,7 +156,7 @@ jobs:
tree -pash ${{ fromJson(inputs.coverage_report_html).directory }}
- name: 📤 Upload 'Coverage SQLite Database' artifact
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
if: inputs.coverage_sqlite_artifact != ''
continue-on-error: true
with:
@@ -166,7 +166,7 @@ jobs:
retention-days: 1
- name: 📤 Upload 'Coverage XML Report' artifact
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
if: inputs.coverage_xml_artifact != ''
continue-on-error: true
with:
@@ -177,7 +177,7 @@ jobs:
retention-days: 1
- name: 📤 Upload 'Coverage JSON Report' artifact
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
if: inputs.coverage_json_artifact != ''
continue-on-error: true
with:
@@ -188,7 +188,7 @@ jobs:
retention-days: 1
- name: 📤 Upload 'Coverage HTML Report' artifact
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
if: inputs.coverage_html_artifact != ''
continue-on-error: true
with:

View File

@@ -56,7 +56,7 @@ jobs:
steps:
- name: 📥 Download artifacts '${{ inputs.artifact }}' from 'Package' job
uses: pyTooling/download-artifact@v6
uses: pyTooling/download-artifact@v7
with:
name: ${{ inputs.artifact }}
path: dist

View File

@@ -132,7 +132,7 @@ jobs:
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
with:
# The command 'git describe' (used for version) needs the history.
fetch-depth: 0
@@ -191,198 +191,6 @@ jobs:
exit 1
fi
- name: 📑 Assemble Release Notes
id: createReleaseNotes
run: |
set +e
ANSI_LIGHT_RED=$'\x1b[91m'
ANSI_LIGHT_GREEN=$'\x1b[92m'
ANSI_LIGHT_YELLOW=$'\x1b[93m'
ANSI_LIGHT_BLUE=$'\x1b[94m'
ANSI_NOCOLOR=$'\x1b[0m'
export GH_TOKEN=${{ github.token }}
# Save release description (from parameter in a file)
head -c -1 <<'EOF' > __DESCRIPTION__.md
${{ inputs.description }}
EOF
# Save release footer (from parameter in a file)
head -c -1 <<'EOF' > __FOOTER__.md
${{ inputs.description_footer }}
EOF
# Download Markdown from PullRequest
# Readout second parent's SHA
# Search PR with that SHA
# Load description of that PR
printf "Read second parent of current SHA (%s) ... " "${{ github.ref }}"
FATHER_SHA=$(git rev-parse ${{ github.ref }}^2 -- 2> /dev/null)
if [[ $? -ne 0 || "{FATHER_SHA}" == "" ]]; then
printf "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n"
printf "→ ${ANSI_LIGHT_YELLOW}Skipped readout of pull request description. This is not a merge commit.${ANSI_NOCOLOR}\n"
else
printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n"
printf "Search Pull Request to '%s' and branch containing SHA %s ... " "${{ inputs.release_branch }}" "${FATHER_SHA}"
PULL_REQUESTS=$(gh pr list --base "${{ inputs.release_branch }}" --search "${FATHER_SHA}" --state "merged" --json "title,number,mergedBy,mergedAt,body")
if [[ $? -ne 0 || "${PULL_REQUESTS}" == "" ]]; then
printf "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n"
printf "${ANSI_LIGHT_RED}Couldn't find a merged Pull Request to '%s'. -> %s${ANSI_NOCOLOR}\n" "${{ inputs.release_branch }}" "${PULL_REQUESTS}"
printf "::error title=PullRequest::Couldn't find a merged Pull Request to '%s'. -> %s\n" "${{ inputs.release_branch }}" "${PULL_REQUESTS}"
exit 1
else
printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n"
PR_TITLE="$( printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].title")"
PR_NUMBER="$( printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].number")"
PR_BODY="$( printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].body")"
PR_MERGED_BY="$(printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].mergedBy.login")"
PR_MERGED_AT="$(printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].mergedAt")"
printf "Found Pull Request:\n"
printf " %s\n" "Title: ${PR_TITLE}"
printf " %s\n" "Number: ${PR_NUMBER}"
printf " %s\n" "MergedBy: ${PR_MERGED_BY}"
printf " %s\n" "MergedAt: ${PR_MERGED_AT} ($(date -d"${PR_MERGED_AT}" '+%d.%m.%Y - %H:%M:%S'))"
fi
printf "%s\n" "${PR_BODY}" > __PULLREQUEST__.md
fi
# Check if a release description file should be used and exists.
if [[ "${{ inputs.description_file }}" != "" ]]; then
if [[ ! -f "${{ inputs.description_file }}" ]]; then
printf "${ANSI_LIGHT_RED}Release description file '%s' not found.${ANSI_NOCOLOR}\n" "${{ inputs.description_file }}"
printf "::error title=%s::%s\n" "FileNotFound" "Release description file '${{ inputs.description_file }}' not found."
exit 1
elif [[ -s "${{ inputs.description_file }}" ]]; then
printf "Use '%s' as main release description.\n" "${{ inputs.description_file }}"
cp -v "${{ inputs.description_file }}" __NOTES__.md
else
printf "${ANSI_LIGHT_RED}Release description file '%s' is empty.${ANSI_NOCOLOR}\n" "${{ inputs.description_file }}"
printf "::error title=%s::%s\n" "FileNotFound" "Release description file '${{ inputs.description_file }}' is empty."
exit 1
fi
# Check if the main release description is provided by a template parameter
elif [[ -s __DESCRIPTION__.md ]]; then
printf "Use '__DESCRIPTION__.md' as main release description.\n"
mv -v __DESCRIPTION__.md __NOTES__.md
# Check if the pull request serves as the main release description text.
elif [[ -s __PULLREQUEST__.md ]]; then
printf "Use '__PULLREQUEST__.md' as main release description.\n"
mv -v __PULLREQUEST__.md __NOTES__.md
printf "Append '%%%%FOOTER%%%%' to '__NOTES__.md'.\n"
printf "\n%%%%FOOTER%%%%\n" >> __NOTES__.md
else
printf "${ANSI_LIGHT_RED}No release description specified (file, parameter, PR text).${ANSI_NOCOLOR}\n"
printf "::error title=%s::%s\n" "MissingDescription" "No release description specified (file, parameter, PR text)."
exit 1
fi
# Read release notes main file for placeholder substitution
NOTES=$(<__NOTES__.md)
# Inline description
if [[ -s __DESCRIPTION__.md ]]; then
NOTES="${NOTES//%%DESCRIPTION%%/$(<__DESCRIPTION__.md)}"
else
NOTES="${NOTES//%%DESCRIPTION%%/}"
fi
# Inline PullRequest and increase headline levels
if [[ -s __PULLREQUEST__.md ]]; then
while [[ "${NOTES}" =~ %%(PULLREQUEST(\+[0-3])?)%% ]]; do
case "${BASH_REMATCH[1]}" in
"PULLREQUEST+0" | "PULLREQUEST")
NOTES="${NOTES//${BASH_REMATCH[0]}/$(<__PULLREQUEST__.md)}"
;;
"PULLREQUEST+1")
NOTES="${NOTES//${BASH_REMATCH[0]}/$(cat __PULLREQUEST__.md | sed -E 's/^(#+) /\1# /gm;t')}"
;;
"PULLREQUEST+2")
NOTES="${NOTES//${BASH_REMATCH[0]}/$(cat __PULLREQUEST__.md | sed -E 's/^(#+) /\1### /gm;t')}"
;;
"PULLREQUEST+3")
NOTES="${NOTES//${BASH_REMATCH[0]}/$(cat __PULLREQUEST__.md | sed -E 's/^(#+) /\1### /gm;t')}"
;;
esac
done
else
while [[ "${NOTES}" =~ %%(PULLREQUEST(\+[0-3])?)%% ]]; do
NOTES="${NOTES//${BASH_REMATCH[0]}/}"
done
fi
# inline Footer
if [[ -s __FOOTER__.md ]]; then
NOTES="${NOTES//%%FOOTER%%/$(<__FOOTER__.md)}"
else
NOTES="${NOTES//%%FOOTER%%/}"
fi
# Apply replacements
while IFS=$'\r\n' read -r patternLine; do
# skip empty lines
[[ "$patternLine" == "" ]] && continue
pattern="%${patternLine%%=*}%"
replacement="${patternLine#*=}"
NOTES="${NOTES//$pattern/$replacement}"
done <<<'${{ inputs.replacements }}'
# Workarounds for stupid GitHub variables
owner_repo="${{ github.repository }}"
repo=${owner_repo##*/}
# Replace special identifiers
NOTES="${NOTES//%%gh_server%%/${{ github.server_url }}}"
NOTES="${NOTES//%%gh_workflow_name%%/${{ github.workflow }}}"
NOTES="${NOTES//%%gh_owner%%/${{ github.repository_owner }}}"
NOTES="${NOTES//%%gh_repo%%/${repo}}"
NOTES="${NOTES//%%gh_owner_repo%%/${{ github.repository }}}"
#NOTES="${NOTES//%%gh_pages%%/https://${{ github.repository_owner }}.github.io/${repo}/}"
NOTES="${NOTES//%%gh_runid%%/${{ github.run_id }}}"
NOTES="${NOTES//%%gh_actor%%/${{ github.actor }}}"
NOTES="${NOTES//%%gh_sha%%/${{ github.sha }}}"
NOTES="${NOTES//%%date%%/$(date '+%Y-%m-%d')}"
NOTES="${NOTES//%%time%%/$(date '+%H:%M:%S %Z')}"
NOTES="${NOTES//%%datetime%%/$(date '+%Y-%m-%d %H:%M:%S %Z')}"
# Write final release notes to file
printf "%s\n" "${NOTES}" > __NOTES__.md
# Display partial contents for debugging
if [[ -s __DESCRIPTION__.md ]]; then
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__DESCRIPTION__.md' ($(stat --printf="%s" "__DESCRIPTION__.md") B) ...."
cat __DESCRIPTION__.md
printf "::endgroup::\n"
else
printf "${ANSI_LIGHT_YELLOW}No '__DESCRIPTION__.md' found.${ANSI_NOCOLOR}\n"
fi
if [[ -s __PULLREQUEST__.md ]]; then
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__PULLREQUEST__.md' ($(stat --printf="%s" "__PULLREQUEST__.md") B) ...."
cat __PULLREQUEST__.md
printf "::endgroup::\n"
else
printf "${ANSI_LIGHT_YELLOW}No '__PULLREQUEST__.md' found.${ANSI_NOCOLOR}\n"
fi
if [[ -s __FOOTER__.md ]]; then
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__FOOTER__.md' ($(stat --printf="%s" "__FOOTER__.md") B) ...."
cat __FOOTER__.md
printf "::endgroup::\n"
else
printf "${ANSI_LIGHT_YELLOW}No '__FOOTER__.md' found.${ANSI_NOCOLOR}\n"
fi
# Print final release notes
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__NOTES__.md' ($(stat --printf="%s" "__NOTES__.md") B) ...."
cat __NOTES__.md
printf "::endgroup::\n"
- name: 📑 Create new Release Page
id: createReleasePage
if: inputs.mode == 'release'
@@ -397,6 +205,15 @@ jobs:
export GH_TOKEN=${{ github.token }}
tee "__PRELIMINARY_NOTES__.md" <<EOF
Release notes for ${{ inputs.tag }} are created right now ...
1. download artifacts &rarr; (compression?) &rarr; upload as assets
2. optional: create inventory.json
3. assemble release notes &rarr; update this text
4. optional: remove draft state
EOF
if [[ "${{ inputs.prerelease }}" == "true" ]]; then
addPreRelease="--prerelease"
fi
@@ -409,9 +226,7 @@ jobs:
addTitle=("--title" "${{ inputs.title }}")
fi
if [[ -s __NOTES__.md ]]; then
addNotes=("--notes-file" "__NOTES__.md")
fi
addNotes=("--notes-file" "__PRELIMINARY_NOTES__.md")
printf "Creating release '%s' ... " "${{ inputs.tag }}"
message="$(gh release create "${{ inputs.tag }}" --verify-tag --draft $addPreRelease $addLatest "${addTitle[@]}" "${addNotes[@]}" 2>&1)"
@@ -439,6 +254,14 @@ jobs:
export GH_TOKEN=${{ github.token }}
tee "__PRELIMINARY_NOTES__.md" <<EOF
Release notes for ${{ inputs.tag }} are updated right now ...
1. download artifacts &rarr; (compression?) &rarr; upload as assets
2. optional: create inventory.json
3. assemble release notes &rarr; update this text
EOF
addDraft="--draft"
if [[ "${{ inputs.prerelease }}" == "true" ]]; then
addPreRelease="--prerelease"
@@ -452,9 +275,7 @@ jobs:
addTitle=("--title" "${{ inputs.title }}")
fi
if [[ -s __NOTES__.md ]]; then
addNotes=("--notes-file" "__NOTES__.md")
fi
addNotes=("--notes-file" "__PRELIMINARY_NOTES__.md")
printf "Creating release '%s' ... " "${{ inputs.tag }}"
message="$(gh release create "${{ inputs.tag }}" --verify-tag --draft $addPreRelease $addLatest "${addTitle[@]}" "${addNotes[@]}" 2>&1)"
@@ -553,6 +374,10 @@ jobs:
)
fi
# Write Markdown table header
printf "| Asset Name | File Size | SHA256 |\n" > __ASSETS__.md
printf "|------------|-----------|--------|\n" >> __ASSETS__.md
ERRORS=0
# A dictionary of 0/1 to avoid duplicate downloads
declare -A downloadedArtifacts
@@ -741,6 +566,13 @@ jobs:
sha256Checksums[$asset]="sha256:${sha256}"
printf "${ANSI_LIGHT_BLUE}${sha256}${ANSI_NOCOLOR}\n"
# Add asset to Markdown table
printf "| %s | %s | %s |\n" \
"[${title}](${{ github.server_url }}/${{ github.repository }}/releases/download/${{ inputs.tag }}/${uploadFile#*/})" \
"$(stat --printf="%s" "${uploadFile}" | numfmt --format "%.1f" --suffix=B --to=iec-i)" \
"\`${sha256}\`" \
>> __ASSETS__.md
# Add asset to JSON inventory
if [[ "${{ inputs.inventory-json }}" != "" ]]; then
if [[ "${categories}" != "${title}" ]]; then
@@ -775,7 +607,7 @@ jobs:
if [[ $? -eq 0 ]]; then
printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n"
printf " checking assets SHA256 checksum ... \n"
printf " checking assets SHA256 checksum ... "
ghSHA256=$(gh release view --json assets --jq ".assets[] | select(.name == \"${asset}\") | .digest" ${{ inputs.tag }})
if [[ "${ghSHA256}" == "${sha256Checksums[$asset]}" ]]; then
printf "${ANSI_LIGHT_GREEN}[PASSED]${ANSI_NOCOLOR}\n"
@@ -829,6 +661,245 @@ jobs:
exit 1
fi
- name: 📑 Assemble Release Notes
id: createReleaseNotes
run: |
set +e
ANSI_LIGHT_RED=$'\x1b[91m'
ANSI_LIGHT_GREEN=$'\x1b[92m'
ANSI_LIGHT_YELLOW=$'\x1b[93m'
ANSI_LIGHT_BLUE=$'\x1b[94m'
ANSI_NOCOLOR=$'\x1b[0m'
export GH_TOKEN=${{ github.token }}
# Save release description (from parameter in a file)
head -c -1 <<'EOF' > __DESCRIPTION__.md
${{ inputs.description }}
EOF
# Save release footer (from parameter in a file)
head -c -1 <<'EOF' > __FOOTER__.md
${{ inputs.description_footer }}
EOF
# Download Markdown from PullRequest
# Readout second parent's SHA
# Search PR with that SHA
# Load description of that PR
printf "Read second parent of current SHA (%s) ... " "${{ github.ref }}"
FATHER_SHA=$(git rev-parse ${{ github.ref }}^2 -- 2> /dev/null)
if [[ $? -ne 0 || "{FATHER_SHA}" == "" ]]; then
printf "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n"
printf "→ ${ANSI_LIGHT_YELLOW}Skipped readout of pull request description. This is not a merge commit.${ANSI_NOCOLOR}\n"
else
printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n"
printf "Search Pull Request to '%s' and branch containing SHA %s ... " "${{ inputs.release_branch }}" "${FATHER_SHA}"
PULL_REQUESTS=$(gh pr list --base "${{ inputs.release_branch }}" --search "${FATHER_SHA}" --state "merged" --json "title,number,mergedBy,mergedAt,body")
if [[ $? -ne 0 || "${PULL_REQUESTS}" == "" ]]; then
printf "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n"
printf "${ANSI_LIGHT_RED}Couldn't find a merged Pull Request to '%s'. -> %s${ANSI_NOCOLOR}\n" "${{ inputs.release_branch }}" "${PULL_REQUESTS}"
printf "::error title=PullRequest::Couldn't find a merged Pull Request to '%s'. -> %s\n" "${{ inputs.release_branch }}" "${PULL_REQUESTS}"
exit 1
else
printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n"
PR_TITLE="$( printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].title")"
PR_NUMBER="$( printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].number")"
PR_BODY="$( printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].body")"
PR_MERGED_BY="$(printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].mergedBy.login")"
PR_MERGED_AT="$(printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].mergedAt")"
printf "Found Pull Request:\n"
printf " %s\n" "Title: ${PR_TITLE}"
printf " %s\n" "Number: ${PR_NUMBER}"
printf " %s\n" "MergedBy: ${PR_MERGED_BY}"
printf " %s\n" "MergedAt: ${PR_MERGED_AT} ($(date -d"${PR_MERGED_AT}" '+%d.%m.%Y - %H:%M:%S'))"
fi
printf "%s\n" "${PR_BODY}" > __PULLREQUEST__.md
fi
# Check if a release description file should be used and exists.
if [[ "${{ inputs.description_file }}" != "" ]]; then
if [[ ! -f "${{ inputs.description_file }}" ]]; then
printf "${ANSI_LIGHT_RED}Release description file '%s' not found.${ANSI_NOCOLOR}\n" "${{ inputs.description_file }}"
printf "::error title=%s::%s\n" "FileNotFound" "Release description file '${{ inputs.description_file }}' not found."
exit 1
elif [[ -s "${{ inputs.description_file }}" ]]; then
printf "Use '%s' as main release description.\n" "${{ inputs.description_file }}"
cp -v "${{ inputs.description_file }}" __NOTES__.md
else
printf "${ANSI_LIGHT_RED}Release description file '%s' is empty.${ANSI_NOCOLOR}\n" "${{ inputs.description_file }}"
printf "::error title=%s::%s\n" "FileNotFound" "Release description file '${{ inputs.description_file }}' is empty."
exit 1
fi
# Check if the main release description is provided by a template parameter
elif [[ -s __DESCRIPTION__.md ]]; then
printf "Use '__DESCRIPTION__.md' as main release description.\n"
mv -v __DESCRIPTION__.md __NOTES__.md
# Check if the pull request serves as the main release description text.
elif [[ -s __PULLREQUEST__.md ]]; then
printf "Use '__PULLREQUEST__.md' as main release description.\n"
mv -v __PULLREQUEST__.md __NOTES__.md
printf "Append '%%%%FOOTER%%%%' to '__NOTES__.md'.\n"
printf "\n%%%%FOOTER%%%%\n" >> __NOTES__.md
else
printf "${ANSI_LIGHT_RED}No release description specified (file, parameter, PR text).${ANSI_NOCOLOR}\n"
printf "::error title=%s::%s\n" "MissingDescription" "No release description specified (file, parameter, PR text)."
exit 1
fi
# Read release notes main file for placeholder substitution
NOTES=$(<__NOTES__.md)
# Inline description
if [[ -s __DESCRIPTION__.md ]]; then
NOTES="${NOTES//%%DESCRIPTION%%/$(<__DESCRIPTION__.md)}"
else
NOTES="${NOTES//%%DESCRIPTION%%/}"
fi
# Inline PullRequest and increase headline levels
if [[ -s __PULLREQUEST__.md ]]; then
while [[ "${NOTES}" =~ %%(PULLREQUEST(\+[0-3])?)%% ]]; do
case "${BASH_REMATCH[1]}" in
"PULLREQUEST+0" | "PULLREQUEST")
NOTES="${NOTES//${BASH_REMATCH[0]}/$(<__PULLREQUEST__.md)}"
;;
"PULLREQUEST+1")
NOTES="${NOTES//${BASH_REMATCH[0]}/$(cat __PULLREQUEST__.md | sed -E 's/^(#+) /\1# /gm;t')}"
;;
"PULLREQUEST+2")
NOTES="${NOTES//${BASH_REMATCH[0]}/$(cat __PULLREQUEST__.md | sed -E 's/^(#+) /\1### /gm;t')}"
;;
"PULLREQUEST+3")
NOTES="${NOTES//${BASH_REMATCH[0]}/$(cat __PULLREQUEST__.md | sed -E 's/^(#+) /\1### /gm;t')}"
;;
esac
done
else
while [[ "${NOTES}" =~ %%(PULLREQUEST(\+[0-3])?)%% ]]; do
NOTES="${NOTES//${BASH_REMATCH[0]}/}"
done
fi
# Inline Files table
if [[ -s __ASSETS__.md ]]; then
NOTES="${NOTES//%%ASSETS%%/$(<__ASSETS__.md)}"
else
NOTES="${NOTES//%%ASSETS%%/}"
fi
# Inline Footer
if [[ -s __FOOTER__.md ]]; then
NOTES="${NOTES//%%FOOTER%%/$(<__FOOTER__.md)}"
else
NOTES="${NOTES//%%FOOTER%%/}"
fi
# Apply replacements
while IFS=$'\r\n' read -r patternLine; do
# skip empty lines
[[ "$patternLine" == "" ]] && continue
pattern="%${patternLine%%=*}%"
replacement="${patternLine#*=}"
NOTES="${NOTES//$pattern/$replacement}"
done <<<'${{ inputs.replacements }}'
# Workarounds for stupid GitHub variables
owner_repo="${{ github.repository }}"
repo=${owner_repo##*/}
# Replace special identifiers
NOTES="${NOTES//%%gh_server%%/${{ github.server_url }}}"
NOTES="${NOTES//%%gh_workflow_name%%/${{ github.workflow }}}"
NOTES="${NOTES//%%gh_owner%%/${{ github.repository_owner }}}"
NOTES="${NOTES//%%gh_repo%%/${repo}}"
NOTES="${NOTES//%%gh_owner_repo%%/${{ github.repository }}}"
#NOTES="${NOTES//%%gh_pages%%/https://${{ github.repository_owner }}.github.io/${repo}/}"
NOTES="${NOTES//%%gh_runid%%/${{ github.run_id }}}"
NOTES="${NOTES//%%gh_actor%%/${{ github.actor }}}"
NOTES="${NOTES//%%gh_sha%%/${{ github.sha }}}"
NOTES="${NOTES//%%date%%/$(date '+%Y-%m-%d')}"
NOTES="${NOTES//%%time%%/$(date '+%H:%M:%S %Z')}"
NOTES="${NOTES//%%datetime%%/$(date '+%Y-%m-%d %H:%M:%S %Z')}"
# Write final release notes to file
printf "%s\n" "${NOTES}" > __NOTES__.md
# Display partial contents for debugging
if [[ -s __DESCRIPTION__.md ]]; then
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__DESCRIPTION__.md' ($(stat --printf="%s" "__DESCRIPTION__.md") B) ...."
cat __DESCRIPTION__.md
printf "::endgroup::\n"
else
printf "${ANSI_LIGHT_YELLOW}No '__DESCRIPTION__.md' found.${ANSI_NOCOLOR}\n"
fi
if [[ -s __PULLREQUEST__.md ]]; then
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__PULLREQUEST__.md' ($(stat --printf="%s" "__PULLREQUEST__.md") B) ...."
cat __PULLREQUEST__.md
printf "::endgroup::\n"
else
printf "${ANSI_LIGHT_YELLOW}No '__PULLREQUEST__.md' found.${ANSI_NOCOLOR}\n"
fi
if [[ -s __ASSETS__.md ]]; then
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__ASSETS__.md' ($(stat --printf="%s" "__ASSETS__.md") B) ...."
cat __ASSETS__.md
printf "::endgroup::\n"
else
printf "${ANSI_LIGHT_YELLOW}No '__ASSETS__.md' found.${ANSI_NOCOLOR}\n"
fi
if [[ -s __FOOTER__.md ]]; then
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__FOOTER__.md' ($(stat --printf="%s" "__FOOTER__.md") B) ...."
cat __FOOTER__.md
printf "::endgroup::\n"
else
printf "${ANSI_LIGHT_YELLOW}No '__FOOTER__.md' found.${ANSI_NOCOLOR}\n"
fi
# Print final release notes
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__NOTES__.md' ($(stat --printf="%s" "__NOTES__.md") B) ...."
cat __NOTES__.md
printf "::endgroup::\n"
- name: 📑 Update release notes
id: updateReleaseNotes
run: |
set +e
ANSI_LIGHT_RED=$'\x1b[91m'
ANSI_LIGHT_GREEN=$'\x1b[92m'
ANSI_LIGHT_YELLOW=$'\x1b[93m'
ANSI_LIGHT_BLUE=$'\x1b[94m'
ANSI_NOCOLOR=$'\x1b[0m'
export GH_TOKEN=${{ github.token }}
if [[ -s __ASSETS__.md ]]; then
addNotes=("--notes-file" "__ASSETS__.md")
else
printf " ${ANSI_LIGHT_RED}File '%s' not found.${ANSI_NOCOLOR}\n" "__ASSETS__.md"
printf "::error title=%s::%s\n" "InternalError" "File '__ASSETS__.md' not found."
exit 1
fi
printf "Updating release '%s' ... " "${{ inputs.tag }}"
message="$(gh release edit "${addNotes[@]}" "${{ inputs.tag }}" 2>&1)"
if [[ $? -eq 0 ]]; then
printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n"
printf " Release page: %s\n" "${message}"
else
printf "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n"
printf " ${ANSI_LIGHT_RED}Couldn't update release '%s' -> Error: '%s'.${ANSI_NOCOLOR}\n" "${{ inputs.tag }}" "${message}"
printf "::error title=%s::%s\n" "InternalError" "Couldn't update release '${{ inputs.tag }}' -> Error: '${message}'."
exit 1
fi
- name: 📑 Remove draft state from Release Page
id: removeDraft
if: ${{ ! inputs.draft }}

View File

@@ -102,10 +102,10 @@ jobs:
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
- name: 📥 Download Artifacts
uses: pyTooling/download-artifact@v6
uses: pyTooling/download-artifact@v7
with:
pattern: ${{ inputs.unittest_artifacts_pattern }}
path: artifacts
@@ -156,7 +156,7 @@ jobs:
fail_ci_if_error: true
- name: 📤 Upload merged 'JUnit Test Summary' artifact
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
if: inputs.merged_junit_artifact != ''
with:
name: ${{ inputs.merged_junit_artifact }}

View File

@@ -45,45 +45,51 @@ on:
default: ''
type: string
outputs:
github_pages_url:
description: "URL to GitHub Pages."
value: ${{ jobs.PrepareGitHubPages.outputs.github_pages_url }}
jobs:
PublishToGitHubPages:
name: 📚 Publish to GH-Pages
PrepareGitHubPages:
name: 📖 Merge multiple contents for publishing
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
permissions:
pages: write # to deploy to Pages
id-token: write # to verify the deployment originates from an appropriate source
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
outputs:
github_pages_url: ${{ steps.deployment.outputs.page_url }}
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
- name: 📥 Download artifacts '${{ inputs.doc }}' from 'SphinxDocumentation' job
uses: pyTooling/download-artifact@v6
uses: pyTooling/download-artifact@v7
with:
name: ${{ inputs.doc }}
path: public
- name: 📥 Download artifacts '${{ inputs.coverage }}' from 'Coverage' job
uses: pyTooling/download-artifact@v6
uses: pyTooling/download-artifact@v7
if: ${{ inputs.coverage != '' }}
with:
name: ${{ inputs.coverage }}
path: public/coverage
- name: 📥 Download artifacts '${{ inputs.typing }}' from 'StaticTypeCheck' job
uses: pyTooling/download-artifact@v6
uses: pyTooling/download-artifact@v7
if: ${{ inputs.typing != '' }}
with:
name: ${{ inputs.typing }}
path: public/typing
- name: '📓 Publish site to GitHub Pages'
- name: 📑 Upload static files as artifact
if: github.event_name != 'pull_request'
run: |
cd public
touch .nojekyll
git init
cp ../.git/config ./.git/config
git add .
git config --local user.email "BuildTheDocs@GitHubActions"
git config --local user.name "GitHub Actions"
git commit -a -m "update ${{ github.sha }}"
git push -u origin +HEAD:gh-pages
uses: actions/upload-pages-artifact@v4
with:
path: public/
- name: 📖 Deploy to GitHub Pages
id: deployment
if: github.event_name != 'pull_request'
uses: actions/deploy-pages@v4

View File

@@ -86,7 +86,7 @@ jobs:
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
with:
lfs: true
submodules: true
@@ -105,7 +105,7 @@ jobs:
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
- name: 📥 Download artifacts '${{ inputs.unittest_xml_artifact }}' from 'Unittesting' job
uses: pyTooling/download-artifact@v6
uses: pyTooling/download-artifact@v7
if: inputs.unittest_xml_artifact != ''
with:
name: ${{ inputs.unittest_xml_artifact }}
@@ -113,7 +113,7 @@ jobs:
investigate: true
- name: 📥 Download artifacts '${{ inputs.coverage_json_artifact }}' from 'PublishCoverageResults' job
uses: pyTooling/download-artifact@v6
uses: pyTooling/download-artifact@v7
if: inputs.coverage_json_artifact != ''
with:
name: ${{ inputs.coverage_json_artifact }}
@@ -129,7 +129,7 @@ jobs:
sphinx-build -v -n -b html -d _build/doctrees -j $(nproc) -w _build/html.log . _build/html
- name: 📤 Upload 'HTML Documentation' artifact
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
if: inputs.html_artifact != ''
continue-on-error: true
with:
@@ -145,7 +145,7 @@ jobs:
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
with:
lfs: true
submodules: true
@@ -164,7 +164,7 @@ jobs:
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
- name: 📥 Download artifacts '${{ inputs.unittest_xml_artifact }}' from 'Unittesting' job
uses: pyTooling/download-artifact@v6
uses: pyTooling/download-artifact@v7
if: inputs.unittest_xml_artifact != ''
with:
name: ${{ inputs.unittest_xml_artifact }}
@@ -172,7 +172,7 @@ jobs:
investigate: true
- name: 📥 Download artifacts '${{ inputs.coverage_json_artifact }}' from 'PublishCoverageResults' job
uses: pyTooling/download-artifact@v6
uses: pyTooling/download-artifact@v7
if: inputs.coverage_json_artifact != ''
with:
name: ${{ inputs.coverage_json_artifact }}
@@ -272,7 +272,7 @@ jobs:
done
- name: 📤 Upload 'LaTeX Documentation' artifact
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
if: inputs.latex_artifact != ''
continue-on-error: true
with:

View File

@@ -38,7 +38,7 @@ on:
requirements:
description: 'Python dependencies to be installed through pip.'
required: false
default: '-r tests/requirements.txt'
default: '-r tests/typing/requirements.txt'
type: string
mypy_options:
description: 'Additional mypy options.'
@@ -49,18 +49,18 @@ on:
description: 'Cobertura file to upload as an artifact.'
required: false
default: >-
{ "fullpath": "report/typing/cobertura.xml",
{ "fullpath": "report/typing/cobertura.xml",
"directory": "report/typing",
"filename": "cobertura.xml"
"filename": "cobertura.xml"
}
type: string
junit_report:
description: 'JUnit file to upload as an artifact.'
required: false
default: >-
{ "fullpath": "report/typing/StaticTypingSummary.xml",
{ "fullpath": "report/typing/StaticTypingSummary.xml",
"directory": "report/typing",
"filename": "StaticTypingSummary.xml"
"filename": "StaticTypingSummary.xml"
}
type: string
html_report:
@@ -94,7 +94,7 @@ jobs:
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
- name: 🐍 Setup Python ${{ inputs.python_version }}
uses: actions/setup-python@v6
@@ -142,7 +142,7 @@ jobs:
fi
- name: 📤 Upload '${{ inputs.html_artifact }}' HTML artifact
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
if: ${{ inputs.html_artifact != '' }}
continue-on-error: true
with:
@@ -153,7 +153,7 @@ jobs:
retention-days: 1
- name: 📤 Upload '${{ inputs.junit_artifact }}' JUnit artifact
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
if: ${{ inputs.junit_artifact != '' }}
continue-on-error: true
with:
@@ -164,7 +164,7 @@ jobs:
retention-days: 1
- name: 📤 Upload '${{ inputs.cobertura_artifact }}' Cobertura artifact
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
if: ${{ inputs.cobertura_artifact != '' }}
continue-on-error: true
with:

View File

@@ -47,7 +47,7 @@ on:
requirements:
description: 'Python dependencies to be installed through pip.'
required: false
default: '-r tests/requirements.txt'
default: '-r ./requirements.txt'
type: string
mingw_requirements:
description: 'Override Python dependencies to be installed through pip on MSYS2 (MINGW64) only.'
@@ -82,7 +82,7 @@ on:
root_directory:
description: 'Working directory for running tests.'
required: false
default: ''
default: '.'
type: string
tests_directory:
description: 'Path to the directory containing tests (relative from root_directory).'
@@ -181,7 +181,7 @@ jobs:
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
with:
lfs: true
submodules: true
@@ -205,6 +205,38 @@ jobs:
run: |
py -3.9 -m pip install --disable-pip-version-check --break-system-packages -U tomli
- name: Compute path to requirements file
id: requirements
shell: python
run: |
from os import getenv
from pathlib import Path
from sys import version
print(f"Python: {version}")
requirements = "${{ inputs.requirements }}"
if requirements.startswith("-r"):
requirements = requirements[2:].lstrip()
if requirements.startswith("./"):
requirementsFile = Path("${{ inputs.root_directory || '.' }}") / Path("${{ inputs.tests_directory || '.' }}") / Path("${{ inputs.unittest_directory || '.' }}") / Path(requirements[2:])
else:
requirementsFile = Path(requirements)
if not requirementsFile.exists():
print(f"::error title=FileNotFoundError::{ex}")
exit(1)
print(f"requirements file: {requirementsFile.as_posix()}")
# Write requirements path to special file
github_output = Path(getenv("GITHUB_OUTPUT"))
print(f"GITHUB_OUTPUT: {github_output}")
with github_output.open("a+") as f:
f.write(f"requirements=-r {requirementsFile.as_posix()}\n")
else:
print(f"requirements list: {requirements}")
- name: Compute pacman/pacboy packages
id: pacboy
if: matrix.system == 'msys2'
@@ -215,8 +247,6 @@ jobs:
from re import compile
from sys import version
print(f"Python: {version}")
def loadRequirementsFile(requirementsFile: Path):
requirements = []
with requirementsFile.open("r") as file:
@@ -232,11 +262,10 @@ jobs:
return requirements
requirements = "${{ inputs.requirements }}"
requirements = "${{ steps.requirements.outputs.requirements }}"
if requirements.startswith("-r"):
requirementsFile = Path(requirements[2:].lstrip())
try:
dependencies = loadRequirementsFile(requirementsFile)
dependencies = loadRequirementsFile(Path(requirements[2:].lstrip()))
except FileNotFoundError as ex:
print(f"::error title=FileNotFoundError::{ex}")
exit(1)
@@ -324,7 +353,7 @@ jobs:
if: matrix.system != 'msys2'
run: |
python -m pip install --disable-pip-version-check -U wheel tomli
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
python -m pip install --disable-pip-version-check ${{ steps.requirements.outputs.requirements }}
- name: 🔧 Install pip dependencies (MSYS2)
if: matrix.system == 'msys2'
@@ -332,7 +361,7 @@ jobs:
if [ -n '${{ inputs.mingw_requirements }}' ]; then
python -m pip install --disable-pip-version-check --break-system-packages ${{ inputs.mingw_requirements }}
else
python -m pip install --disable-pip-version-check --break-system-packages ${{ inputs.requirements }}
python -m pip install --disable-pip-version-check --break-system-packages ${{ steps.requirements.outputs.requirements }}
fi
# Before scripts
@@ -421,7 +450,7 @@ jobs:
# Upload artifacts
- name: 📤 Upload '${{ fromJson(inputs.unittest_report_xml).filename }}' artifact
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
if: inputs.unittest_xml_artifact != ''
continue-on-error: true
with:
@@ -434,7 +463,7 @@ jobs:
# - name: 📤 Upload 'Unit Tests HTML Report' artifact
# if: inputs.unittest_html_artifact != ''
# continue-on-error: true
# uses: pyTooling/upload-artifact@v5
# uses: pyTooling/upload-artifact@v6
# with:
# name: ${{ inputs.unittest_html_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
# path: ${{ inputs.unittest_report_html_directory }}
@@ -444,7 +473,7 @@ jobs:
- name: 📤 Upload 'Coverage SQLite Database' artifact
if: inputs.coverage_sqlite_artifact != ''
continue-on-error: true
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
with:
name: ${{ inputs.coverage_sqlite_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
path: .coverage
@@ -455,7 +484,7 @@ jobs:
- name: 📤 Upload 'Coverage XML Report' artifact
if: inputs.coverage_xml_artifact != '' && steps.convert_xml.outcome == 'success'
continue-on-error: true
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
with:
name: ${{ inputs.coverage_xml_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
working-directory: ${{ fromJson(inputs.coverage_report_xml).directory }}
@@ -466,7 +495,7 @@ jobs:
- name: 📤 Upload 'Coverage JSON Report' artifact
if: inputs.coverage_json_artifact != '' && steps.convert_json.outcome == 'success'
continue-on-error: true
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
with:
name: ${{ inputs.coverage_json_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
working-directory: ${{ fromJson(inputs.coverage_report_json).directory }}
@@ -477,7 +506,7 @@ jobs:
- name: 📤 Upload 'Coverage HTML Report' artifact
if: inputs.coverage_html_artifact != '' && steps.convert_html.outcome == 'success'
continue-on-error: true
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
with:
name: ${{ inputs.coverage_html_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
working-directory: ${{ fromJson(inputs.coverage_report_html).directory }}

View File

@@ -44,7 +44,7 @@ jobs:
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
- name: 🐍 Setup Python
uses: actions/setup-python@v6

View File

@@ -6,7 +6,7 @@ on:
jobs:
Params:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
name: Example
python_version_list: "3.13 3.14" # py-1, py-0
@@ -25,7 +25,7 @@ jobs:
run: printf "%s\n" "${{ matrix.runs-on }}-${{ matrix.python }}" >> artifact.txt
- name: 📤 Upload artifact for ${{ matrix.system }}-${{ matrix.python }}
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
with:
name: ${{ fromJson(needs.Params.outputs.artifact_names).unittesting_xml }}-${{ matrix.system }}-${{ matrix.python }}
path: artifact.txt
@@ -42,7 +42,7 @@ jobs:
run: printf "%s\n" "Package" >> package.txt
- name: 📤 Upload artifact for ${{ matrix.system }}-${{ matrix.python }}
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
with:
name: ${{ fromJson(needs.Params.outputs.artifact_names).package_all }}
path: package.txt
@@ -50,7 +50,7 @@ jobs:
retention-days: 1
ArtifactCleanUp:
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r6
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r7
needs:
- Params
- Testing

View File

@@ -16,10 +16,9 @@ jobs:
include:
- {icon: '🐧', name: 'Ubuntu 22.04 (x86-64)', image: 'ubuntu-22.04', shell: 'bash', can-fail: false}
- {icon: '🐧', name: 'Ubuntu 24.04 (x86-64)', image: 'ubuntu-24.04', shell: 'bash', can-fail: false} # latest
- {icon: '🍎', name: 'macOS-13 (x86-64)', image: 'macos-13', shell: 'bash', can-fail: false}
- {icon: '🍎', name: 'macOS-14 (x86-64)', image: 'macos-14-large', shell: 'bash', can-fail: true } # not in free plan
- {icon: '🍎', name: 'macOS-15 (x86-64)', image: 'macos-15-large', shell: 'bash', can-fail: true } # not in free plan
- {icon: '🍏', name: 'macOS-13 (aarch64)', image: 'macos-13-xlarge', shell: 'bash', can-fail: true } # not in free plan
### - {icon: '🍎', name: 'macOS-15 (x86-64)', image: 'macos-15-large', shell: 'bash', can-fail: true } # same as -intel; not in free plan
- {icon: '🍎', name: 'macOS-15 (x86-64)', image: 'macos-15-intel', shell: 'bash', can-fail: false}
- {icon: '🍏', name: 'macOS-14 (aarch64)', image: 'macos-14', shell: 'bash', can-fail: false} # latest
- {icon: '🍏', name: 'macOS-15 (aarch64)', image: 'macos-15', shell: 'bash', can-fail: false}
- {icon: '🪟', name: 'Windows Server 2022', image: 'windows-2022', shell: 'bash', can-fail: false}

View File

@@ -6,20 +6,20 @@ on:
jobs:
Prepare:
uses: pyTooling/Actions/.github/workflows/PrepareJob.yml@r6
uses: pyTooling/Actions/.github/workflows/PrepareJob.yml@r7
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
UnitTestingParams:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
package_name: 'myPackage'
python_version_list: '3.11 3.12 3.13 3.14 pypy-3.11'
disable_list: 'windows-arm:pypy-3.11'
PlatformTestingParams:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
package_name: 'myPackage'
name: 'Platform'
@@ -27,14 +27,14 @@ jobs:
system_list: 'ubuntu ubuntu-arm windows windows-arm macos mingw64 clang64 ucrt64'
InstallParams:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
package_name: 'myPackage'
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
python_version_list: ''
UnitTesting:
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r6
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r7
needs:
- ConfigParams
- UnitTestingParams
@@ -52,7 +52,7 @@ jobs:
coverage_html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
PlatformTesting:
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r6
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r7
needs:
- ConfigParams
- PlatformTestingParams
@@ -72,7 +72,7 @@ jobs:
coverage_html_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_html }}
StaticTypeCheck:
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r6
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r7
needs:
- ConfigParams
- UnitTestingParams
@@ -82,7 +82,7 @@ jobs:
html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
CodeQuality:
uses: pyTooling/Actions/.github/workflows/CheckCodeQuality.yml@r6
uses: pyTooling/Actions/.github/workflows/CheckCodeQuality.yml@r7
needs:
- UnitTestingParams
with:
@@ -93,7 +93,7 @@ jobs:
artifact: 'CodeQuality'
DocCoverage:
uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@r6
uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@r7
needs:
- ConfigParams
- UnitTestingParams
@@ -103,7 +103,7 @@ jobs:
# fail_below: 70
Package:
uses: pyTooling/Actions/.github/workflows/Package.yml@r6
uses: pyTooling/Actions/.github/workflows/Package.yml@r7
needs:
- UnitTestingParams
# - UnitTesting
@@ -113,7 +113,7 @@ jobs:
artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
Install:
uses: pyTooling/Actions/.github/workflows/InstallPackage.yml@r6
uses: pyTooling/Actions/.github/workflows/InstallPackage.yml@r7
needs:
- UnitTestingParams
- InstallParams
@@ -124,7 +124,7 @@ jobs:
package_name: ${{ needs.UnitTestingParams.outputs.package_fullname }}
PublishCoverageResults:
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@r6
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@r7
needs:
- ConfigParams
- UnitTestingParams
@@ -143,7 +143,7 @@ jobs:
secrets: inherit
PublishTestResults:
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@r6
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@r7
needs:
- ConfigParams
- UnitTestingParams
@@ -158,14 +158,14 @@ jobs:
secrets: inherit
# VerifyDocs:
# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@r6
# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@r7
# needs:
# - UnitTestingParams
# with:
# python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
Documentation:
uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@r6
uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@r7
needs:
- ConfigParams
- UnitTestingParams
@@ -182,7 +182,7 @@ jobs:
latex_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }}
IntermediateCleanUp:
uses: pyTooling/Actions/.github/workflows/IntermediateCleanUp.yml@r6
uses: pyTooling/Actions/.github/workflows/IntermediateCleanUp.yml@r7
needs:
- UnitTestingParams
- PublishCoverageResults
@@ -192,7 +192,7 @@ jobs:
xml_unittest_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}-
PDFDocumentation:
uses: pyTooling/Actions/.github/workflows/LaTeXDocumentation.yml@r6
uses: pyTooling/Actions/.github/workflows/LaTeXDocumentation.yml@r7
needs:
- UnitTestingParams
- Documentation
@@ -202,7 +202,7 @@ jobs:
pdf_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_pdf }}
PublishToGitHubPages:
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r6
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r7
needs:
- UnitTestingParams
- Documentation
@@ -215,7 +215,7 @@ jobs:
typing: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
TriggerTaggedRelease:
uses: pyTooling/Actions/.github/workflows/TagReleaseCommit.yml@r6
uses: pyTooling/Actions/.github/workflows/TagReleaseCommit.yml@r7
needs:
- Prepare
- UnitTesting
@@ -233,7 +233,7 @@ jobs:
secrets: inherit
ReleasePage:
uses: pyTooling/Actions/.github/workflows/PublishReleaseNotes.yml@r6
uses: pyTooling/Actions/.github/workflows/PublishReleaseNotes.yml@r7
needs:
- Prepare
- UnitTesting
@@ -251,7 +251,7 @@ jobs:
secrets: inherit
PublishOnPyPI:
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@r6
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@r7
needs:
- UnitTestingParams
- ReleasePage
@@ -264,7 +264,7 @@ jobs:
secrets: inherit
ArtifactCleanUp:
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r6
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r7
needs:
- UnitTestingParams
- PlatformTestingParams

View File

@@ -6,7 +6,7 @@ on:
jobs:
NamespacePackage:
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@r6
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@r7
with:
package_namespace: 'myFramework'
package_name: 'Extension'

View File

@@ -17,7 +17,7 @@ jobs:
printf "%s\n" "Build log $(date --utc '+%d.%m.%Y - %H:%M:%S')" > build.log
- name: 📤 Upload artifact
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
with:
name: document
path: |
@@ -29,10 +29,11 @@ jobs:
- name: 🖉 Program
run: |
printf "%s\n" "Document other $(date --utc '+%d.%m.%Y - %H:%M:%S')" > document1.txt
printf "%s\n" "Document other $(date --utc '+%d.%m.%Y - %H:%M:%S')" > document2.txt
printf "%s\n" "Program $(date --utc '+%d.%m.%Y - %H:%M:%S')" > program.py
- name: 📤 Upload artifact
uses: actions/upload-artifact@v5
uses: actions/upload-artifact@v6
with:
name: other
path: |
@@ -55,7 +56,7 @@ jobs:
version=4.2.0
tool=myTool
prog=program
tag: 4.2.0
tag: v4.2.0
title: "Nightly Test Release"
description: |
This *nightly* release contains all latest and important artifacts created by %tool%'s CI pipeline.
@@ -63,10 +64,14 @@ jobs:
# %tool% %version%
* %prog%
# Attached files:
%%ASSETS%%
assets: |
document: document1.txt: Documentation
document: build.log: Logfile - %tool% - %tool%
other: document1.txt: SBOM - %version%
other: document2.txt: SBOM - %version%
other: %prog%.py: Application - %tool% - %version%
document:!archive1.zip: Archive 1 - zip
document:!archive2.tgz: Archive 2 - tgz
@@ -108,7 +113,7 @@ jobs:
# artifact: file: labels: asset title
document: document1.txt: doc,html: Documentation
document: build.log: build,log: Logfile - %tool% - %tool%
other: document1.txt: build,SBOM:SBOM - %version%
other: document2.txt: build,SBOM:SBOM - %version%
other: %prog%.py: app,binary:Application - %tool% - %version%
document:!archive1.zip: Archive 1 - zip
document:!archive2.tgz: Archive 2 - tgz

View File

@@ -6,24 +6,24 @@ on:
jobs:
Params_Default:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
name: Example
Params_PythonVersions:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
name: Example
python_version_list: "3.12 3.13 pypy-3.10 pypy-3.11" # py-2, py-1, pypy-1, pypy-0
Params_Systems:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
name: Example
system_list: "windows mingw32 mingw64"
Params_Include:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
name: Example
python_version_list: "3.12" # py-2
@@ -31,7 +31,7 @@ jobs:
include_list: "ubuntu:3.13 ubuntu:3.14 ubuntu-arm:3.12"
Params_Exclude:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
name: Example
python_version_list: "3.13" # py-1
@@ -39,7 +39,7 @@ jobs:
exclude_list: "windows:3.13 windows:3.14"
Params_Disable:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
name: Example
python_version_list: "3.13" # py-1
@@ -47,7 +47,7 @@ jobs:
disable_list: "windows:3.13 windows:3.14"
Params_All:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
name: Example
python_version_list: "3.12 3.13" # py-2, py-1
@@ -64,7 +64,7 @@ jobs:
shell: python
steps:
- name: Checkout repository to access local Action
uses: actions/checkout@v5
uses: actions/checkout@v6
- name: Checking job matrix from 'Params_Default'
uses: ./.github/actions/CheckJobMatrix
@@ -92,7 +92,7 @@ jobs:
shell: python
steps:
- name: Checkout repository to access local Action
uses: actions/checkout@v5
uses: actions/checkout@v6
- name: Checking job matrix from 'Params_PythonVersions'
uses: ./.github/actions/CheckJobMatrix
@@ -114,7 +114,7 @@ jobs:
shell: python
steps:
- name: Checkout repository to access local Action
uses: actions/checkout@v5
uses: actions/checkout@v6
- name: Checking job matrix from 'Params_Systems'
uses: ./.github/actions/CheckJobMatrix
@@ -136,7 +136,7 @@ jobs:
shell: python
steps:
- name: Checkout repository to access local Action
uses: actions/checkout@v5
uses: actions/checkout@v6
- name: Checking job matrix from 'Params_Include'
uses: ./.github/actions/CheckJobMatrix
@@ -158,7 +158,7 @@ jobs:
shell: python
steps:
- name: Checkout repository to access local Action
uses: actions/checkout@v5
uses: actions/checkout@v6
- name: Checking job matrix from 'Params_Exclude'
uses: ./.github/actions/CheckJobMatrix
@@ -180,7 +180,7 @@ jobs:
shell: python
steps:
- name: Checkout repository to access local Action
uses: actions/checkout@v5
uses: actions/checkout@v6
- name: Checking job matrix from 'Params_Disable'
uses: ./.github/actions/CheckJobMatrix
@@ -202,7 +202,7 @@ jobs:
shell: python
steps:
- name: Checkout repository to access local Action
uses: actions/checkout@v5
uses: actions/checkout@v6
- name: Checking job matrix from 'Params_All'
uses: ./.github/actions/CheckJobMatrix

View File

@@ -6,7 +6,7 @@ on:
jobs:
SimplePackage:
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@r6
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@r7
with:
package_name: 'myPackage'
unittest_python_version_list: '3.11 3.12 3.13 3.14 pypy-3.11'

2
.idea/Actions.iml generated
View File

@@ -8,7 +8,7 @@
<excludeFolder url="file://$MODULE_DIR$/doc/_build" />
<excludeFolder url="file://$MODULE_DIR$/report" />
</content>
<orderEntry type="jdk" jdkName="Python 3.13" jdkType="Python SDK" />
<orderEntry type="jdk" jdkName="Python 3.14" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

View File

@@ -75,8 +75,6 @@ As shown in the screenshots above, the expected order is:
[**PublishCoverageResults**](.github/workflows/PublishCoverageResults.yml): publish ucode coverage results.
[**NightlyRelease**](.github/workflows/NightlyRelease.yml): publish GitHub Release.
[**PublishReleaseNotes**](.github/workflows/PublishReleaseNotes.yml): publish GitHub Release.
- **Documentation:**
[**SphinxDocumentation**](.github/workflows/PublishCoverageResults.yml): create HTML and LaTeX documentation using
@@ -90,12 +88,11 @@ As shown in the screenshots above, the expected order is:
[**IntermediateCleanUp**](.github/workflows/IntermediateCleanUp.yml): delete intermediate artifacts.
[**ArtifactCleanUp**](.github/workflows/ArtifactCleanUp.yml): delete artifacts.
- **⚠ Deprecated ⚠:**
[**CoverageCollection**](.github/workflows/CoverageCollection.yml): Use `UnitTesting`, because is can collect code
coverage too. This avoids code duplication in job templates.
- **Removed:**
**NightlyRelease**: Use `PublishReleaseNotes`, because it's more advanced and not limited to nightly releases.
**CoverageCollection**: Use `UnitTesting`, because is can collect code coverage too.
[**BuildTheDocs**](.github/workflows/BuildTheDocs.yml): Use `SphinxDocumentation`, `LaTeXDocumentation` and
`PublishToGitHubPages`. BuildTheDocs isn't maintained anymore.
**BuildTheDocs**: Use `SphinxDocumentation`, `LaTeXDocumentation` and `PublishToGitHubPages`.
### Example pipeline
@@ -116,8 +113,8 @@ Find further usage cases in the following list of projects:
## Contributors
* [Patrick Lehmann](https://GitHub.com/Paebbels)
* [Unai Martinez-Corral](https://GitHub.com/umarcor) (Maintainer)
* [Patrick Lehmann](https://GitHub.com/Paebbels) (Maintainer)
* [Unai Martinez-Corral](https://GitHub.com/umarcor)
* [and more...](https://GitHub.com/pyTooling/Actions/graphs/contributors)

View File

@@ -42,7 +42,7 @@ to handover input parameters to the template.
jobs:
<InstanceName>:
uses: <GitHubOrganization>/<Repository>/.github/workflows/<Template>.yml@r6
uses: <GitHubOrganization>/<Repository>/.github/workflows/<Template>.yml@r7
with:
<Param1>: <Value>
@@ -66,12 +66,12 @@ Documentation Only (Sphinx)
jobs:
BuildTheDocs:
uses: pyTooling/Actions/.github/workflows/BuildTheDocs.yml@r6
uses: pyTooling/Actions/.github/workflows/BuildTheDocs.yml@r7
with:
artifact: Documentation
PublishToGitHubPages:
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r6
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r7
needs:
- BuildTheDocs
with:

View File

@@ -289,7 +289,7 @@ Instantiation
*************
The following instantiation example creates a ``SimplePackage`` job derived from job template ``CompletePipeline``
version ``@r6``. It only requires the `package_name` parameter to run a full pipeline suitable for a Python project.
version ``@r7``. It only requires the `package_name` parameter to run a full pipeline suitable for a Python project.
.. grid:: 2
@@ -307,7 +307,7 @@ version ``@r6``. It only requires the `package_name` parameter to run a full pip
jobs:
SimplePackage:
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@r6
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@r7
with:
package_name: myPackage
@@ -321,7 +321,7 @@ version ``@r6``. It only requires the `package_name` parameter to run a full pip
jobs:
NamespacePackage:
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@r6
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@r7
with:
package_namespace: myFramework
package_name: Extension
@@ -457,7 +457,7 @@ package_namespace
jobs:
NamespacePackage:
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@r6
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@r7
with:
package_namespace: myFramework
package_name: Extension
@@ -505,7 +505,7 @@ package_name
jobs:
SimplePackage:
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@r6
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@r7
with:
package_name: myPackage

View File

@@ -41,7 +41,7 @@ The simplest variant just uses the artifact name for the package.
jobs:
ArtifactCleanUp:
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r6
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r7
with:
package: Package
@@ -53,7 +53,7 @@ Complex Example
jobs:
ArtifactCleanUp:
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r6
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r7
needs:
- Params
- UnitTesting

View File

@@ -33,14 +33,14 @@ variant after test results have been merged into a single file.
Instantiation
*************
The following instantiation example creates a ``Params`` job derived from job template ``Parameters`` version ``@r6``. It only
The following instantiation example creates a ``Params`` job derived from job template ``Parameters`` version ``@r7``. It only
requires a `name` parameter to create the artifact names.
.. code-block:: yaml
jobs:
IntermediateCleanUp:
uses: pyTooling/Actions/.github/workflows/IntermediateCleanUp.yml@r6
uses: pyTooling/Actions/.github/workflows/IntermediateCleanUp.yml@r7
needs:
- UnitTestingParams
- PublishCoverageResults

View File

@@ -1,10 +0,0 @@
.. _JOBTMPL/BuildTheDocs:
BuildTheDocs
############
.. attention::
The ``BuildTheDocs`` job template is deprecated.
See :ref:`JOBTMPL/SphinxDocumentation` and :ref:`JOBTMPL/LaTeXDocumentation`.

View File

@@ -1,10 +0,0 @@
.. _JOBTMPL/CoverageCollection:
CoverageCollection
##################
.. attention::
The ``CoverageCollection`` job template is deprecated.
See :ref:`JOBTMPL/UnitTesting` and :ref:`JOBTMPL/PublishCoverageResults`.

View File

@@ -1,10 +0,0 @@
.. _JOBTMPL/NightlyRelease:
NightlyRelease
##############
.. attention::
The ``NightlyRelease`` job template is deprecated.
See :ref:`JOBTMPL/PublishReleaseNotes`.

View File

@@ -5,16 +5,14 @@ Deprecated
The category *deprecated* collects outdated job templates:
:ref:`JOBTMPL/CoverageCollection`
CoverageCollection
replaced by :ref:`JOBTMPL/UnitTesting`
:ref:`JOBTMPL/NightlyRelease`
NightlyRelease
replaced by :ref:`JOBTMPL/PublishReleaseNotes`
:ref:`JOBTMPL/BuildTheDocs`
BuildTheDocs
replaced by :ref:`JOBTMPL/SphinxDocumentation` and :ref:`JOBTMPL/LaTeXDocumentation`
.. toctree::
.. #toctree::
:hidden:
CoverageCollection
NightlyRelease
BuildTheDocs

View File

@@ -46,12 +46,12 @@ Instantiation
jobs:
UnitTestingParams:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
package_name: myPackage
Documentation:
uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@r6
uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@r7
needs:
- UnitTestingParams
with:
@@ -60,7 +60,7 @@ Instantiation
latex_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }}
PDFDocumentation:
uses: pyTooling/Actions/.github/workflows/LaTeXDocumentation.yml@r6
uses: pyTooling/Actions/.github/workflows/LaTeXDocumentation.yml@r7
needs:
- UnitTestingParams
- Documentation

View File

@@ -51,7 +51,7 @@ Instantiation
# ...
PublishToGitHubPages:
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r6
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r7
needs:
- BuildTheDocs
with:
@@ -66,7 +66,7 @@ Instantiation
jobs:
PublishToGitHubPages:
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r6
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r7
needs:
- Params
- BuildTheDocs

View File

@@ -72,12 +72,12 @@ Instantiation
jobs:
UnitTestingParams:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
package_name: myPackage
Documentation:
uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@r6
uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@r7
needs:
- UnitTestingParams
with:
@@ -207,10 +207,10 @@ coverage_report_json
.. code-block:: yaml
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
Documentation:
uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@r6
uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@r7
needs:
- ConfigParams
with:
@@ -253,10 +253,10 @@ unittest_xml
.. code-block:: yaml
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
Documentation:
uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@r6
uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@r7
needs:
- ConfigParams
with:

View File

@@ -47,7 +47,7 @@ Instantiation
*************
The following instantiation example creates a ``Install`` job derived from job template ``InstallPackage`` version
`@r6`. It installs the Python package on various platforms using a precomputed job-matrix created by job
`@r7`. It installs the Python package on various platforms using a precomputed job-matrix created by job
``InstallParams``. This job uses the same ``Parameters`` job template as job ``UnitTestingParams``, which was used to
define parameters for the packaging job ``Package``.
@@ -55,25 +55,25 @@ define parameters for the packaging job ``Package``.
jobs:
UnitTestingParams:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
package_name: myPackage
InstallParams:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
package_name: myPackage
python_version_list: ''
Package:
uses: pyTooling/Actions/.github/workflows/Package.yml@r6
uses: pyTooling/Actions/.github/workflows/Package.yml@r7
needs:
- UnitTestingParams
with:
artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
Install:
uses: pyTooling/Actions/.github/workflows/InstallPackage.yml@r6
uses: pyTooling/Actions/.github/workflows/InstallPackage.yml@r7
needs:
- UnitTestingParams
- InstallParams

View File

@@ -58,7 +58,7 @@ Simple Example
jobs:
Package:
uses: pyTooling/Actions/.github/workflows/Package.yml@r6
uses: pyTooling/Actions/.github/workflows/Package.yml@r7
with:
artifact: Package
@@ -70,7 +70,7 @@ Complex Example
jobs:
Package:
uses: pyTooling/Actions/.github/workflows/Package.yml@r6
uses: pyTooling/Actions/.github/workflows/Package.yml@r7
needs:
- Params
with:

View File

@@ -63,7 +63,7 @@ by a Git tag. A secret is forwarded from GitHub secrets to a job secret.
# ...
PublishOnPyPI:
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@r6
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@r7
if: startsWith(github.ref, 'refs/tags')
with:
artifact: Package
@@ -87,7 +87,7 @@ by that job. Finally, the list of requirements is overwritten to load a list of
# ...
PublishOnPyPI:
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@r6
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@r7
if: startsWith(github.ref, 'refs/tags')
needs:
- Params

View File

@@ -77,17 +77,17 @@ The following
jobs:
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
with:
package_name: myPackage
UnitTestingParams:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
package_name: myPackage
UnitTesting:
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r6
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r7
needs:
- ConfigParams
- UnitTestingParams
@@ -96,7 +96,7 @@ The following
coverage_sqlite_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}
PublishCoverageResults:
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@r6
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@r7
needs:
- ConfigParams
- UnitTestingParams
@@ -253,10 +253,10 @@ coverage_report_xml
.. code-block:: yaml
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
PublishCoverageResults:
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@r6
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@r7
needs:
- ConfigParams
with:
@@ -293,10 +293,10 @@ coverage_report_json
.. code-block:: yaml
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
PublishCoverageResults:
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@r6
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@r7
needs:
- ConfigParams
with:
@@ -326,10 +326,10 @@ coverage_report_html
.. code-block:: yaml
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
PublishCoverageResults:
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@r6
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@r7
needs:
- ConfigParams
with:

View File

@@ -85,7 +85,7 @@ Simple Example
jobs:
PublishTestResults:
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@r6
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@r7
Complex Example
===============
@@ -100,7 +100,7 @@ Complex Example
# ...
PublishTestResults:
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@r6
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@r7
needs:
- CodeCoverage
- UnitTesting

View File

@@ -41,19 +41,19 @@ The ``CheckDocumentation`` job checks the level of documentation coverage for Py
Instantiation
*************
The following instantiation example creates a ``Params`` job derived from job template ``Parameters`` version ``@r6``. It only
The following instantiation example creates a ``Params`` job derived from job template ``Parameters`` version ``@r7``. It only
requires a `name` parameter to create the artifact names.
.. code-block:: yaml
jobs:
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
with:
package_name: myPackage
DocCoverage:
uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@r6
uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@r7
needs:
- ConfigParams
with:

View File

@@ -59,7 +59,7 @@ directory ``report/typing``.
jobs:
StaticTypeCheck:
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r6
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r7
with:
cobertura_artifact: 'TypeChecking-Cobertura'
junit_artifact: 'TypeChecking-JUnit'
@@ -91,17 +91,17 @@ precompute the artifact's name.
jobs:
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
with:
package_name: myPackage
Params:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
package_name: myPackage
StaticTypeCheck:
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r6
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r7
needs:
- ConfigParams
- Params
@@ -221,10 +221,10 @@ cobertura_report
.. code-block:: yaml
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
UnitTesting:
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r6
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r7
needs:
- ConfigParams
with:
@@ -261,10 +261,10 @@ junit_report
.. code-block:: yaml
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
UnitTesting:
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r6
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r7
needs:
- ConfigParams
with:
@@ -294,10 +294,10 @@ html_report
.. code-block:: yaml
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
UnitTesting:
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r6
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r7
needs:
- ConfigParams
with:

View File

@@ -64,10 +64,10 @@ Instantiation
jobs:
Prepare:
uses: pyTooling/Actions/.github/workflows/PrepareJob.yml@r6
uses: pyTooling/Actions/.github/workflows/PrepareJob.yml@r7
Release:
uses: pyTooling/Actions/.github/workflows/PublishReleaseNotes.yml@r6
uses: pyTooling/Actions/.github/workflows/PublishReleaseNotes.yml@r7
needs:
- Prepare
if: needs.Prepare.outputs.is_release_tag == 'true'
@@ -441,7 +441,7 @@ replacements
.. code-block:: yaml
ReleasePage:
uses: pyTooling/Actions/.github/workflows/PublishReleaseNotes.yml@r6
uses: pyTooling/Actions/.github/workflows/PublishReleaseNotes.yml@r7
needs:
- Prepare
if: needs.Prepare.outputs.is_release_tag == 'true'

View File

@@ -47,21 +47,21 @@ Instantiation
*************
The following instantiation example depicts three jobs within a bigger pipeline. The ``prepare`` job derived from job
template ``PrepareJob`` version ``@r6`` figures out if a pipeline runs for a release merge-commit, for a tag or any
template ``PrepareJob`` version ``@r7`` figures out if a pipeline runs for a release merge-commit, for a tag or any
other reason. Its outputs are used to either run a ``TriggerTaggedRelease`` job derived from job template
``TagReleaseCommit`` version ``@r6``, or alternatively run the ``ReleasePage`` job derived from job template
``PublishReleaseNotes`` version ``@r6``.
``TagReleaseCommit`` version ``@r7``, or alternatively run the ``ReleasePage`` job derived from job template
``PublishReleaseNotes`` version ``@r7``.
.. code-block:: yaml
jobs:
Prepare:
uses: pyTooling/Actions/.github/workflows/PrepareJob.yml@r6
uses: pyTooling/Actions/.github/workflows/PrepareJob.yml@r7
# Other pipeline jobs
TriggerTaggedRelease:
uses: pyTooling/Actions/.github/workflows/TagReleaseCommit.yml@r6
uses: pyTooling/Actions/.github/workflows/TagReleaseCommit.yml@r7
needs:
- Prepare
if: needs.Prepare.outputs.is_release_commit == 'true' && github.event_name != 'schedule'
@@ -74,7 +74,7 @@ other reason. Its outputs are used to either run a ``TriggerTaggedRelease`` job
secrets: inherit
ReleasePage:
uses: pyTooling/Actions/.github/workflows/PublishReleaseNotes.yml@r6
uses: pyTooling/Actions/.github/workflows/PublishReleaseNotes.yml@r7
needs:
- Prepare
if: needs.Prepare.outputs.is_release_tag == 'true'

View File

@@ -58,17 +58,17 @@ Instantiation
*************
The following instantiation example creates a ``ConfigParams`` job derived from job template ``ExtractConfiguration``
version ``@r6``. It requires no special parameters to extract unit test (pytest) and code coverage (Coverage.py)
version ``@r7``. It requires no special parameters to extract unit test (pytest) and code coverage (Coverage.py)
settings.
.. code-block:: yaml
jobs:
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
UnitTesting:
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r6
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r7
needs:
- ConfigParams
with:
@@ -250,10 +250,10 @@ unittest_report_xml
.. code-block:: yaml
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
OtherJob:
uses: some/path/to/a/template@r6
uses: some/path/to/a/template@r7
needs:
- ConfigParams
with:
@@ -265,10 +265,10 @@ unittest_report_xml
.. code-block:: yaml
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
OtherJob:
uses: some/path/to/a/template@r6
uses: some/path/to/a/template@r7
needs:
- ConfigParams
with:
@@ -283,10 +283,10 @@ unittest_report_xml
.. code-block:: yaml
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
OtherJob:
uses: some/path/to/a/template@r6
uses: some/path/to/a/template@r7
needs:
- ConfigParams
with:
@@ -333,10 +333,10 @@ unittest_merged_report_xml
.. code-block:: yaml
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
OtherJob:
uses: some/path/to/a/template@r6
uses: some/path/to/a/template@r7
needs:
- ConfigParams
with:
@@ -348,10 +348,10 @@ unittest_merged_report_xml
.. code-block:: yaml
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
OtherJob:
uses: some/path/to/a/template@r6
uses: some/path/to/a/template@r7
needs:
- ConfigParams
with:
@@ -366,10 +366,10 @@ unittest_merged_report_xml
.. code-block:: yaml
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
OtherJob:
uses: some/path/to/a/template@r6
uses: some/path/to/a/template@r7
needs:
- ConfigParams
with:
@@ -414,10 +414,10 @@ coverage_report_html
.. code-block:: yaml
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
OtherJob:
uses: some/path/to/a/template@r6
uses: some/path/to/a/template@r7
needs:
- ConfigParams
with:
@@ -429,10 +429,10 @@ coverage_report_html
.. code-block:: yaml
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
OtherJob:
uses: some/path/to/a/template@r6
uses: some/path/to/a/template@r7
needs:
- ConfigParams
with:
@@ -447,10 +447,10 @@ coverage_report_html
.. code-block:: yaml
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
OtherJob:
uses: some/path/to/a/template@r6
uses: some/path/to/a/template@r7
needs:
- ConfigParams
with:
@@ -497,10 +497,10 @@ coverage_report_xml
.. code-block:: yaml
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
OtherJob:
uses: some/path/to/a/template@r6
uses: some/path/to/a/template@r7
needs:
- ConfigParams
with:
@@ -512,10 +512,10 @@ coverage_report_xml
.. code-block:: yaml
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
OtherJob:
uses: some/path/to/a/template@r6
uses: some/path/to/a/template@r7
needs:
- ConfigParams
with:
@@ -530,10 +530,10 @@ coverage_report_xml
.. code-block:: yaml
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
OtherJob:
uses: some/path/to/a/template@r6
uses: some/path/to/a/template@r7
needs:
- ConfigParams
with:
@@ -580,10 +580,10 @@ coverage_report_json
.. code-block:: yaml
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
OtherJob:
uses: some/path/to/a/template@r6
uses: some/path/to/a/template@r7
needs:
- ConfigParams
with:
@@ -595,10 +595,10 @@ coverage_report_json
.. code-block:: yaml
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
OtherJob:
uses: some/path/to/a/template@r6
uses: some/path/to/a/template@r7
needs:
- ConfigParams
with:
@@ -613,10 +613,10 @@ coverage_report_json
.. code-block:: yaml
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
OtherJob:
uses: some/path/to/a/template@r6
uses: some/path/to/a/template@r7
needs:
- ConfigParams
with:
@@ -663,10 +663,10 @@ typing_report_cobertura
.. code-block:: yaml
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
OtherJob:
uses: some/path/to/a/template@r6
uses: some/path/to/a/template@r7
needs:
- ConfigParams
with:
@@ -678,10 +678,10 @@ typing_report_cobertura
.. code-block:: yaml
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
OtherJob:
uses: some/path/to/a/template@r6
uses: some/path/to/a/template@r7
needs:
- ConfigParams
with:
@@ -696,10 +696,10 @@ typing_report_cobertura
.. code-block:: yaml
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
OtherJob:
uses: some/path/to/a/template@r6
uses: some/path/to/a/template@r7
needs:
- ConfigParams
with:
@@ -746,10 +746,10 @@ typing_report_junit
.. code-block:: yaml
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
OtherJob:
uses: some/path/to/a/template@r6
uses: some/path/to/a/template@r7
needs:
- ConfigParams
with:
@@ -761,10 +761,10 @@ typing_report_junit
.. code-block:: yaml
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
OtherJob:
uses: some/path/to/a/template@r6
uses: some/path/to/a/template@r7
needs:
- ConfigParams
with:
@@ -779,10 +779,10 @@ typing_report_junit
.. code-block:: yaml
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
OtherJob:
uses: some/path/to/a/template@r6
uses: some/path/to/a/template@r7
needs:
- ConfigParams
with:
@@ -826,10 +826,10 @@ typing_report_html
.. code-block:: yaml
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
OtherJob:
uses: some/path/to/a/template@r6
uses: some/path/to/a/template@r7
needs:
- ConfigParams
with:
@@ -841,10 +841,10 @@ typing_report_html
.. code-block:: yaml
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
OtherJob:
uses: some/path/to/a/template@r6
uses: some/path/to/a/template@r7
needs:
- ConfigParams
with:
@@ -859,10 +859,10 @@ typing_report_html
.. code-block:: yaml
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
OtherJob:
uses: some/path/to/a/template@r6
uses: some/path/to/a/template@r7
needs:
- ConfigParams
with:

View File

@@ -53,7 +53,7 @@ Simple Example
:columns: 5
The following instantiation example creates a ``Params`` job derived from job template ``Parameters`` version
``@r6``. It only requires a :ref:`JOBTMPL/Parameters/Input/package_name` parameter to create the artifact names.
``@r7``. It only requires a :ref:`JOBTMPL/Parameters/Input/package_name` parameter to create the artifact names.
.. grid-item::
:columns: 7
@@ -62,12 +62,12 @@ Simple Example
jobs:
Params:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
package_name: myPackage
UnitTesting:
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r6
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r7
needs:
- Params
with:
@@ -101,7 +101,7 @@ Complex Example
jobs:
UnitTestingParams:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
package_namespace: myFramework
package_name: Extension
@@ -111,7 +111,7 @@ Complex Example
exclude_list: 'windows:pypy-3.10 windows:pypy-3.11'
PerformanceTestingParams:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
package_namespace: myFramework
package_name: Extension
@@ -119,7 +119,7 @@ Complex Example
system_list: 'ubuntu windows macos macos-arm'
PlatformTestingParams:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
package_namespace: myFramework
package_name: Extension
@@ -272,7 +272,7 @@ package_namespace
jobs:
ConfigParams:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
package_namespace: myFramework
package_name: Extension
@@ -321,7 +321,7 @@ package_name
jobs:
ConfigParams:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
package_name: myPackage
@@ -404,7 +404,7 @@ include_list
jobs:
ConfigParams:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
package_name: myPackage
include_list: "ubuntu:3.11 macos:3.11"
@@ -426,7 +426,7 @@ exclude_list
jobs:
ConfigParams:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
package_name: myPackage
exclude_list: "windows:pypy-3.8 windows:pypy-3.9"
@@ -449,7 +449,7 @@ disable_list
jobs:
ConfigParams:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
package_name: myPackage
disable_list: "windows:3.10 windows:3.11"
@@ -577,12 +577,12 @@ python_version
jobs:
Params:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
name: pyTooling
CodeCoverage:
uses: pyTooling/Actions/.github/workflows/CoverageCollection.yml@r6
uses: pyTooling/Actions/.github/workflows/CoverageCollection.yml@r7
needs:
- Params
with:
@@ -658,12 +658,12 @@ artifact_names
jobs:
Params:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
name: pyTooling
Coverage:
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r6
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r7
needs:
- Params
with:
@@ -692,12 +692,12 @@ python_jobs
jobs:
Params:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
name: pyDummy
UnitTesting:
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r6
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r7
needs:
- Params
with:

View File

@@ -64,7 +64,7 @@ The job template generates various output parameters derived from
Instantiation
*************
The following instantiation example creates a ``Prepare`` job derived from job template ``PrepareJob`` version ``@r6``.
The following instantiation example creates a ``Prepare`` job derived from job template ``PrepareJob`` version ``@r7``.
In a default usecase, no input parameters need to be specified for the job template assuming a main-branch and
release-branch called ``main``, a development-branch called ``dev``, as well as semantic versioning for tags and
pull-request titles.
@@ -73,7 +73,7 @@ pull-request titles.
jobs:
Prepare:
uses: pyTooling/Actions/.github/workflows/PrepareJob.yml@r6
uses: pyTooling/Actions/.github/workflows/PrepareJob.yml@r7
<ReleaseJob>:
needs:

View File

@@ -67,7 +67,7 @@
* :ref:`JOBTMPL/IntermediateCleanup`
* :ref:`JOBTMPL/ArtifactCleanup`
.. grid-item::
.. #grid-item::
:columns: 2
.. rubric:: :ref:`JOBTMPL/Deprecated`

View File

@@ -71,7 +71,7 @@ Instantiation
*************
The following instantiation example creates a ``UnitTesting`` job derived from job template ``UnitTesting`` version
`@r6`. For providing the job matrix as a JSON string, the :ref:`JOBTMPL/Parameters` job template is used. Additionally,
`@r7`. For providing the job matrix as a JSON string, the :ref:`JOBTMPL/Parameters` job template is used. Additionally,
the job needs configuration settings, which are stored in :file:`pyproject.toml`. Instead of duplicating these settings,
the :ref:`JOBTMPL/ExtractConfiguration` job template is used to extract these settings.
@@ -79,15 +79,15 @@ the :ref:`JOBTMPL/ExtractConfiguration` job template is used to extract these se
jobs:
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
UnitTestingParams:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
package_name: myPackage
UnitTesting:
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r6
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r7
needs:
- ConfigParams
- UnitTestingParams
@@ -513,10 +513,10 @@ unittest_report_xml
.. code-block:: yaml
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
UnitTesting:
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r6
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r7
needs:
- ConfigParams
with:
@@ -564,10 +564,10 @@ coverage_report_xml
.. code-block:: yaml
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
UnitTesting:
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r6
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r7
needs:
- ConfigParams
with:
@@ -604,10 +604,10 @@ coverage_report_json
.. code-block:: yaml
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
UnitTesting:
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r6
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r7
needs:
- ConfigParams
with:
@@ -637,10 +637,10 @@ coverage_report_html
.. code-block:: yaml
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
UnitTesting:
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r6
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r7
needs:
- ConfigParams
with:

View File

@@ -45,7 +45,7 @@ Some templates might provide output parameters, which can be used in dependent j
jobs:
<InstanceName>:
uses: <GitHubOrganization>/<Repository>/.github/workflows/<Template>.yml@r6
uses: <GitHubOrganization>/<Repository>/.github/workflows/<Template>.yml@r7
with:
<Param1>: <Value1>
<Param2>: <Value2>

View File

@@ -121,7 +121,7 @@ Example Pipelines
jobs:
SimplePackage:
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@r6
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@r7
with:
package_name: myPackage
codecov: true
@@ -147,7 +147,7 @@ Example Pipelines
jobs:
NamespacePackage:
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@r6
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@r7
with:
package_namespace: myFramework
package_name: Extension
@@ -232,8 +232,8 @@ References
Contributors
************
* `Patrick Lehmann <https://GitHub.com/Paebbels>`__
* `Unai Martinez-Corral <https://GitHub.com/umarcor>`__ (Maintainer)
* `Patrick Lehmann <https://GitHub.com/Paebbels>`__ (Maintainer)
* `Unai Martinez-Corral <https://GitHub.com/umarcor>`__
* `and more... <https://GitHub.com/pyTooling/Actions/graphs/contributors>`__

View File

@@ -11,9 +11,9 @@ docutils_stubs ~= 0.0.22
sphinx_rtd_theme ~= 3.0
# Sphinx Extenstions
sphinxcontrib-mermaid ~= 1.0
sphinxcontrib-mermaid ~= 1.2
autoapi >= 2.0.1
sphinx_design ~= 0.6
sphinx-copybutton >= 0.5
sphinx_autodoc_typehints ~= 3.5
sphinx_autodoc_typehints ~= 3.5 # 3.6 is conflicting with old sphinx_design and rtd theme due to sphinx<9 and docutils<0.22
sphinx_reports ~= 0.9

View File

@@ -36,7 +36,7 @@ __author__ = "Patrick Lehmann"
__email__ = "Paebbels@gmail.com"
__copyright__ = "2017-2025, Patrick Lehmann"
__license__ = "Apache License, Version 2.0"
__version__ = "0.4.5"
__version__ = "7.0.1"
__keywords__ = ["GitHub Actions"]
__issue_tracker__ = "https://GitHub.com/pyTooling/Actions/issues"

View File

@@ -1,8 +0,0 @@
# Releaser Development
- [pyTooling/pyAttributes](https://github.com/pyTooling/pyAttributes) or
[willmcgugan/rich](https://github.com/willmcgugan/rich) might be used to enhance the UX.
- It might be desirable to have pyTooling.Version.SemVersion handle the regular expression from
[semver.org](https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string), and use
proper Python classes in **Releaser**.

View File

@@ -1,12 +0,0 @@
FROM python:3.12-slim-bookworm
COPY releaser.py /releaser.py
RUN pip install PyGithub --progress-bar off \
&& apt update -qq \
&& apt install -y curl \
&& curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | \
dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg \
&& echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | \
tee /etc/apt/sources.list.d/github-cli.list > /dev/null \
&& apt update -qq \
&& apt install -y gh
CMD ["/releaser.py"]

View File

@@ -1,181 +0,0 @@
# Releaser
**Releaser** is a Docker GitHub Action written in Python.
**Releaser** allows to keep a GitHub Release of type pre-release and its artifacts up to date with latest builds.
Combined with a workflow that is executed periodically, **Releaser** allows to provide a fixed release name for users willing
to use daily/nightly artifacts of a project.
Furthermore, when any [semver](https://semver.org) compilant tagged commit is pushed, **Releaser** can create a release
and upload assets.
## Context
GitHub provides official clients for the GitHub API through [github.com/octokit](https://github.com/octokit):
- [octokit.js](https://github.com/octokit/octokit.js) ([octokit.github.io/rest.js](https://octokit.github.io/rest.js))
- [octokit.rb](https://github.com/octokit/octokit.rb) ([octokit.github.io/octokit.rb](http://octokit.github.io/octokit.rb))
- [octokit.net](https://github.com/octokit/octokit.net) ([octokitnet.rtfd.io](https://octokitnet.rtfd.io))
When GitHub Actions was released in 2019, two Actions were made available through
[github.com/actions](https://github.com/actions) for dealing with GitHub Releases:
- [actions/create-release](https://github.com/actions/create-release)
- [actions/upload-release-asset](https://github.com/actions/upload-release-asset)
However, those Actions were contributed by an employee in spare time, not officially supported by GitHub.
Therefore, they were unmaintained before GitHub Actions was out of the private beta
(see [actions/upload-release-asset#58](https://github.com/actions/upload-release-asset/issues/58))
and, a year later, archived.
Those Actions are based on [actions/toolkit](https://github.com/actions/toolkit)'s hydrated version of octokit.js.
From a practical point of view, [actions/github-script](https://github.com/actions/github-script) is the natural replacement to those Actions, since it allows to use a pre-authenticated *octokit.js* client along with the workflow run context.
Still, it requires writing plain JavaScript.
Alternatively, there are non-official GitHub API libraries available in other languages (see [docs.github.com: rest/overview/libraries](https://docs.github.com/en/rest/overview/libraries)).
**Releaser** is based on [PyGithub/PyGithub](https://github.com/PyGithub/PyGithub), a Python client for the GitHub API.
**Releaser** was originally created in [eine/tip](https://github.com/eine/tip), as an enhanced alternative to using
`actions/create-release` and `actions/upload-release-asset`, in order to cover certain use cases that were being
migrated from Travis CI to GitHub Actions.
The main limitation of GitHub's Actions was/is verbosity and not being possible to dynamically define the list of assets
to be uploaded.
On the other hand, GitHub Actions artifacts do require login in order to download them.
Conversely, assets of GitHub Releases can be downloaded without login.
Therefore, in order to make CI results available to the widest audience, some projects prefer having tarballs available
as assets.
In this context, one of the main use cases of **Releaser** is pushing artifacts as release assets.
Thus, the name of the Action.
GitHub provides an official CLI tool, written in golang: [cli/cli](https://github.com/cli/cli).
When the Python version of **Releaser** was written, `cli` was evaluated as an alternative to *PyGitHub*.
`gh release` was (and still is) not flexible enough to update the reference of a release, without deleting and
recreating it (see [cli.github.com: manual/gh_release_create](https://cli.github.com/manual/gh_release_create)).
Deletion and recreation is unfortunate, because it notifies all the watchers of a repository
(see [eine/tip#111](https://github.com/eine/tip/issues/111)).
However, [cli.github.com: manual/gh_release_upload](https://cli.github.com/manual/gh_release_upload) handles uploading
artifacts as assets faster and with better stability for larger files than *PyGitHub*
(see [msys2/msys2-installer#36](https://github.com/msys2/msys2-installer/pull/36)).
Furthermore, the GitHub CLI is installed on GitHub Actions' default virtual environments.
Although `gh` does not support login through SSH (see [cli/cli#3715](https://github.com/cli/cli/issues/3715)), on GitHub
Actions a token is available `${{ github.token }}`.
Therefore, **Releaser** uses `gh release upload` internally.
## Usage
The following block shows a minimal YAML workflow file:
```yml
name: 'workflow'
on:
schedule:
- cron: '0 0 * * 5'
jobs:
mwe:
runs-on: ubuntu-24.04
steps:
# Clone repository
- uses: actions/checkout@v5
# Build your application, tool, artifacts, etc.
- name: Build
run: |
echo "Build some tool and generate some artifacts" > artifact.txt
# Update tag and pre-release
# - Update (force-push) tag to the commit that is used in the workflow.
# - Upload artifacts defined by the user.
- uses: pyTooling/Actions/releaser@r0
with:
token: ${{ secrets.GITHUB_TOKEN }}
files: |
artifact.txt
README.md
```
### Composite Action
The default implementation of **Releaser** is a Container Action.
Therefore, a pre-built container image is pulled before starting the job.
Alternatively, a Composite Action version is available: `uses: pyTooling/Actions/releaser/composite@main`.
The Composite version installs the dependencies on the host (the runner environment), instead of using a container.
Both implementations are functionally equivalent from **Releaser**'s point of view; however, the Composite Action allows
users to tweak the version of Python by using [actions/setup-python](https://github.com/actions/setup-python) before.
## Options
All options can be optionally provided as environment variables: `INPUT_TOKEN`, `INPUT_FILES`, `INPUT_TAG`, `INPUT_RM`
and/or `INPUT_SNAPSHOTS`.
### token (required)
Token to make authenticated API calls; can be passed in using `{{ secrets.GITHUB_TOKEN }}`.
### files (required)
Either a single filename/pattern or a multi-line list can be provided. All the artifacts are uploaded regardless of the
hierarchy.
For creating/updating a release without uploading assets, set `files: none`.
### tag
The default tag name for the tip/nightly pre-release is `tip`, but it can be optionally overriden through option `tag`.
### rm
Set option `rm` to `true` for systematically removing previous artifacts (e.g. old versions).
Otherwise (by default), all previours artifacts are preserved or overwritten.
Note:
If all the assets are removed, or if the release itself is removed, tip/nightly assets won't be available for
users until the workflow is successfully run.
For instance, Action [setup-ghdl-ci](https://github.com/ghdl/setup-ghdl-ci) uses assets from [ghdl/ghdl: releases/tag/nightly](https://github.com/ghdl/ghdl/releases/tag/nightly).
Hence, it is recommended to try removing the conflictive assets only, in order to maximise the availability.
### snapshots
Whether to create releases from any tag or to treat some as snapshots.
By default, all the tags with non-empty `prerelease` field (see [semver.org: Is there a suggested regular expression (RegEx) to check a SemVer string?](https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string))
are considered snapshots; neither a release is created nor assets are uploaded.
## Advanced/complex use cases
**Releaser** is essentially a very thin wrapper to use the GitHub Actions context data along with the classes
and methods of PyGithub.
Similarly to [actions/github-script](https://github.com/actions/github-script), users with advanced/complex requirements
might find it desirable to write their own Python script, instead of using **Releaser**.
In fact, since `shell: python` is supported in GitHub Actions, using Python does *not* require any Action.
For prototyping purposes, the following job might be useful:
```yml
Release:
name: '📦 Release'
runs-on: ubuntu-24.04
needs:
- ...
if: github.event_name != 'pull_request' && (github.ref == 'refs/heads/master' || contains(github.ref, 'refs/tags/'))
steps:
- uses: actions/download-artifact@v3
- shell: bash
run: pip install PyGithub --progress-bar off
- name: Set list of files for uploading
id: files
shell: python
run: |
from github import Github
print("· Get GitHub API handler (authenticate)")
gh = Github('${{ github.token }}')
print("· Get Repository handler")
gh_repo = gh.get_repo('${{ github.repository }}')
```
Find a non-trivial use case at [msys2/msys2-autobuild](https://github.com/msys2/msys2-autobuild).

View File

@@ -1,45 +0,0 @@
# ==================================================================================================================== #
# Authors: #
# Unai Martinez-Corral #
# #
# ==================================================================================================================== #
# Copyright 2020-2025 The pyTooling Authors #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
# #
# SPDX-License-Identifier: Apache-2.0 #
# ==================================================================================================================== #
name: 'Releaser'
description: 'Publish releases, upload assets and update tip/nightly tags'
inputs:
token:
description: 'Token to make authenticated API calls; can be passed in using {{ secrets.GITHUB_TOKEN }}'
required: true
files:
description: 'Multi-line list of glob patterns describing the artifacts to be uploaded'
required: true
tag:
description: 'Name of the tag that corresponds to the tip/nightly pre-release'
required: false
default: tip
rm:
description: 'Whether to delete all the previous artifacts, or only replacing the ones with the same name'
required: false
default: false
snapshots:
description: 'Whether to create releases from any tag or to treat some as snapshots'
required: false
default: true
runs:
using: 'docker'
image: 'docker://ghcr.io/pytooling/releaser'

View File

@@ -1,59 +0,0 @@
# ==================================================================================================================== #
# Authors: #
# Unai Martinez-Corral #
# #
# ==================================================================================================================== #
# Copyright 2020-2025 The pyTooling Authors #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
# #
# SPDX-License-Identifier: Apache-2.0 #
# ==================================================================================================================== #
name: 'Releaser'
description: 'Publish releases, upload assets and update tip/nightly tags'
inputs:
token:
description: 'Token to make authenticated API calls; can be passed in using {{ secrets.GITHUB_TOKEN }}'
required: true
files:
description: 'Multi-line list of glob patterns describing the artifacts to be uploaded'
required: true
tag:
description: 'Name of the tag that corresponds to the tip/nightly pre-release'
required: false
default: tip
rm:
description: 'Whether to delete all the previous artifacts, or only replacing the ones with the same name'
required: false
default: false
snapshots:
description: 'Whether to create releases from any tag or to treat some as snapshots'
required: false
default: true
runs:
using: 'composite'
steps:
- shell: bash
run: |
[ "$(source /etc/os-release && echo $VERSION_ID)" == "24.04" ] && UBUNTU_2404_ARGS='--break-system-packages' || unset UBUNTU_2404_ARGS
pip install --disable-pip-version-check --progress-bar off $UBUNTU_2404_ARGS PyGithub
- shell: bash
run: '''${{ github.action_path }}/../releaser.py'''
env:
INPUT_TOKEN: ${{ inputs.token }}
INPUT_FILES: ${{ inputs.files }}
INPUT_TAG: ${{ inputs.tag }}
INPUT_RM: ${{ inputs.rm }}
INPUT_SNAPSHOTS: ${{ inputs.snapshots }}

View File

@@ -1,2 +0,0 @@
[tool.black]
line-length = 120

View File

@@ -1,193 +0,0 @@
#!/usr/bin/env python3
# ==================================================================================================================== #
# Authors: #
# Patrick Lehmann #
# Unai Martinez-Corral #
# #
# ==================================================================================================================== #
# Copyright 2020-2025 The pyTooling Authors #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
# #
# SPDX-License-Identifier: Apache-2.0 #
# ==================================================================================================================== #
import re
from sys import argv as sys_argv, stdout, exit as sys_exit
from os import environ, getenv
from glob import glob
from pathlib import Path
from github import Github, GithubException
from subprocess import check_call
paramTag = getenv("INPUT_TAG", "tip")
paramFiles = getenv("INPUT_FILES", None).split()
paramRM = getenv("INPUT_RM", "false") == "true"
paramSnapshots = getenv("INPUT_SNAPSHOTS", "true").lower() == "true"
paramToken = (
environ["GITHUB_TOKEN"]
if "GITHUB_TOKEN" in environ
else environ["INPUT_TOKEN"]
if "INPUT_TOKEN" in environ
else None
)
paramRepo = getenv("GITHUB_REPOSITORY", None)
paramRef = getenv("GITHUB_REF", None)
paramSHA = getenv("GITHUB_SHA", None)
def GetListOfArtifacts(argv, files):
print("· Get list of artifacts to be uploaded")
args = files if files is not None else []
if len(argv) > 1:
args += argv[1:]
if len(args) == 1 and args[0].lower() == "none":
print("! Skipping 'files' because it's set to 'none'.")
return []
elif len(args) == 0:
stdout.flush()
raise (Exception("Glob patterns need to be provided as positional arguments or through envvar 'INPUT_FILES'!"))
else:
flist = []
for item in args:
print(f" glob({item!s}):")
for fname in [fname for fname in glob(item, recursive=True) if not Path(fname).is_dir()]:
if Path(fname).stat().st_size == 0:
print(f" - ! Skipping empty file {fname!s}.")
continue
print(f" - {fname!s}")
flist.append(fname)
if len(flist) < 1:
stdout.flush()
raise (Exception("Empty list of files to upload/update!"))
return sorted(flist)
def GetGitHubAPIHandler(token):
print("· Get GitHub API handler (authenticate)")
if token is not None:
return Github(token)
raise (Exception("Need credentials to authenticate! Please, provide 'GITHUB_TOKEN' or 'INPUT_TOKEN'"))
def CheckRefSemVer(gh_ref, tag, snapshots):
print("· Check SemVer compliance of the reference/tag")
env_tag = None
if gh_ref[0:10] == "refs/tags/":
env_tag = gh_ref[10:]
if env_tag != tag:
rexp = r"^(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$"
semver = re.search(rexp, env_tag)
if semver == None and env_tag[0] == "v":
semver = re.search(rexp, env_tag[1:])
tag = env_tag
if semver == None:
print(f"! Could not get semver from {gh_ref!s}")
print(f"! Treat tag '{tag!s}' as a release")
return (tag, env_tag, False)
else:
if semver.group("prerelease") is None:
# is a regular semver compilant tag
return (tag, env_tag, False)
elif snapshots:
# is semver compilant prerelease tag, thus a snapshot (we skip it)
print("! Skipping snapshot prerelease.")
sys_exit()
return (tag, env_tag, True)
def GetRepositoryHandler(gh, repo):
print("· Get Repository handler")
if repo is None:
stdout.flush()
raise (Exception("Repository name not defined! Please set 'GITHUB_REPOSITORY"))
return gh.get_repo(repo)
def GetOrCreateRelease(gh_repo, tag, sha, is_prerelease):
print("· Get Release handler")
gh_tag = None
try:
gh_tag = gh_repo.get_git_ref(f"tags/{tag!s}")
except Exception:
stdout.flush()
if gh_tag:
try:
return (gh_repo.get_release(tag), False)
except Exception:
return (gh_repo.create_git_release(tag, tag, "", draft=True, prerelease=is_prerelease), True)
else:
err_msg = f"Tag/release '{tag!s}' does not exist and could not create it!"
if sha is None:
raise (Exception(err_msg))
try:
return (
gh_repo.create_git_tag_and_release(
tag, "", tag, "", sha, "commit", draft=True, prerelease=is_prerelease
),
True,
)
except Exception:
raise (Exception(err_msg))
def UpdateReference(gh_release, tag, sha, is_prerelease, is_draft):
print("· Update Release reference (force-push tag)")
if is_draft:
# Unfortunately, it seems not possible to update fields 'created_at' or 'published_at'.
print(" > Update (pre-)release")
gh_release.update_release(
gh_release.title,
"" if gh_release.body is None else gh_release.body,
draft=False,
prerelease=is_prerelease,
tag_name=gh_release.tag_name,
target_commitish=gh_release.target_commitish,
)
if sha is not None:
print(f" > Force-push '{tag!s}' to {sha!s}")
gh_repo.get_git_ref(f"tags/{tag!s}").edit(sha)
files = GetListOfArtifacts(sys_argv, paramFiles)
stdout.flush()
[tag, env_tag, is_prerelease] = CheckRefSemVer(paramRef, paramTag, paramSnapshots)
stdout.flush()
gh_repo = GetRepositoryHandler(GetGitHubAPIHandler(paramToken), paramRepo)
stdout.flush()
[gh_release, is_draft] = GetOrCreateRelease(gh_repo, tag, paramSHA, is_prerelease)
stdout.flush()
if paramRM:
print("· RM set. All previous assets are being cleared...")
for asset in gh_release.get_assets():
print(f" - {asset.name}")
asset.delete_asset()
stdout.flush()
if len(files) > 0:
print("· Upload assets")
env = environ.copy()
env["GITHUB_TOKEN"] = paramToken
cmd = ["gh", "release", "upload", "--repo", paramRepo, "--clobber", tag] + files
print(f" > {' '.join(cmd)}")
check_call(cmd, env=env)
stdout.flush()
else:
print("! Skipping uploading assets because the file list is empty.")
UpdateReference(gh_release, tag, paramSHA if env_tag is None else None, is_prerelease, is_draft)

View File

@@ -1 +1 @@
-r ../requirements.txt
-r ../unit/requirements.txt

View File

@@ -1,13 +1,4 @@
-r ../requirements.txt
# Coverage collection
Coverage ~= 7.11
# Test Runner
pytest ~= 9.0
pytest-cov ~= 7.0
# Static Type Checking
mypy[reports] ~= 1.18
typing_extensions ~= 4.15
lxml >= 5.4, <7.0
# Collect all testing requirements
-r platform/requirements.txt
-r typing/requirements.txt
-r unit/requirements.txt

View File

@@ -0,0 +1,6 @@
-r ../../requirements.txt
# Static Type Checking
mypy[reports] ~= 1.19
typing_extensions ~= 4.15
lxml >= 5.4, <7.0

View File

@@ -1 +1,8 @@
-r ../requirements.txt
-r ../../requirements.txt
# Coverage collection
Coverage ~= 7.13
# Test Runner
pytest ~= 9.0
pytest-cov ~= 7.0