Updating r7 from v7.0.1

This commit is contained in:
Patrick Lehmann
2025-12-20 00:31:50 +01:00
71 changed files with 654 additions and 1869 deletions

View File

@@ -86,10 +86,10 @@ jobs:
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
- name: 📥 Download artifacts '${{ inputs.wheel }}' from 'Package' job
uses: pyTooling/download-artifact@v6
uses: pyTooling/download-artifact@v7
with:
name: ${{ inputs.wheel }}
path: install
@@ -262,7 +262,7 @@ jobs:
- name: 📤 Upload 'TestReportSummary.xml' artifact
if: inputs.apptest_xml_artifact != ''
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
with:
name: ${{ inputs.apptest_xml_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
working-directory: report/unit

View File

@@ -1,72 +0,0 @@
# ==================================================================================================================== #
# Authors: #
# Patrick Lehmann #
# Unai Martinez-Corral #
# #
# ==================================================================================================================== #
# Copyright 2020-2025 The pyTooling Authors #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
# #
# SPDX-License-Identifier: Apache-2.0 #
# ==================================================================================================================== #
name: Documentation
on:
workflow_call:
inputs:
artifact:
description: 'Name of the documentation artifact.'
required: false
default: ''
type: string
jobs:
BuildTheDocs:
name: 📓 Run BuildTheDocs
runs-on: ubuntu-24.04
steps:
- name: ⚠️ Deprecation Warning
run: printf "::warning title=%s::%s\n" "Deprecated" "'BuildTheDocs.yml' template is deprecated. Please switch to 'SphinxDocumentation.yml'. See https://pytooling.github.io/Actions/JobTemplate/Documentation/SphinxDocumentation.html"
- name: ⏬ Checkout repository
uses: actions/checkout@v5
- name: 🛳️ Build documentation
uses: buildthedocs/btd@v0
with:
skip-deploy: true
- name: 📤 Upload 'documentation' artifacts
uses: pyTooling/upload-artifact@v5
if: inputs.artifact != ''
with:
name: ${{ inputs.artifact }}
working-directory: doc/_build/html
path: '*'
retention-days: 1
- name: '📓 Publish site to GitHub Pages'
if: inputs.artifact == '' && github.event_name != 'pull_request'
run: |
cp --recursive -T doc/_build/html public
cd public
touch .nojekyll
git init
cp ../.git/config ./.git/config
git add .
git config --local user.email "BuildTheDocs@GitHubActions"
git config --local user.name "GitHub Actions"
git commit -a -m "update ${{ github.sha }}"
git push -u origin +HEAD:gh-pages

View File

@@ -71,7 +71,7 @@ jobs:
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
with:
lfs: true
submodules: true
@@ -140,7 +140,7 @@ jobs:
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
with:
lfs: true
submodules: true
@@ -180,7 +180,7 @@ jobs:
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
with:
lfs: true
submodules: true

View File

@@ -50,7 +50,7 @@ jobs:
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
- name: 🐍 Setup Python ${{ inputs.python_version }}
uses: actions/setup-python@v6

View File

@@ -136,13 +136,13 @@ on:
jobs:
Prepare:
uses: pyTooling/Actions/.github/workflows/PrepareJob.yml@r6
uses: pyTooling/Actions/.github/workflows/PrepareJob.yml@r7
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
UnitTestingParams:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
package_namespace: ${{ inputs.package_namespace }}
package_name: ${{ inputs.package_name }}
@@ -154,7 +154,7 @@ jobs:
disable_list: ${{ inputs.unittest_disable_list }}
# AppTestingParams:
# uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
# uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
# with:
# package_namespace: ${{ inputs.package_namespace }}
# package_name: ${{ inputs.package_name }}
@@ -166,7 +166,7 @@ jobs:
# disable_list: ${{ inputs.apptest_disable_list }}
InstallParams:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
package_namespace: ${{ inputs.package_namespace }}
package_name: ${{ inputs.package_name }}
@@ -188,7 +188,7 @@ jobs:
code_version: ${{ steps.extract.outputs.code_version }}
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
with:
# The command 'git describe' (used for version) needs the history.
fetch-depth: 0
@@ -230,15 +230,13 @@ jobs:
"""))
UnitTesting:
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r6
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r7
needs:
- ConfigParams
- UnitTestingParams
with:
jobs: ${{ needs.UnitTestingParams.outputs.python_jobs }}
# TODO: shouldn't this be configured by a parameter? Same as directories
requirements: "-r tests/unit/requirements.txt"
# pacboy: "msys/git python-lxml:p"
unittest_report_xml: ${{ needs.ConfigParams.outputs.unittest_report_xml }}
coverage_report_xml: ${{ needs.ConfigParams.outputs.coverage_report_xml }}
coverage_report_json: ${{ needs.ConfigParams.outputs.coverage_report_json }}
@@ -247,7 +245,7 @@ jobs:
coverage_sqlite_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}
StaticTypeCheck:
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r6
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r7
needs:
- ConfigParams
- UnitTestingParams
@@ -261,7 +259,7 @@ jobs:
html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
CodeQuality:
uses: pyTooling/Actions/.github/workflows/CheckCodeQuality.yml@r6
uses: pyTooling/Actions/.github/workflows/CheckCodeQuality.yml@r7
needs:
- UnitTestingParams
with:
@@ -272,7 +270,7 @@ jobs:
artifact: CodeQuality
DocCoverage:
uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@r6
uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@r7
needs:
- UnitTestingParams
with:
@@ -280,7 +278,7 @@ jobs:
directory: ${{ needs.UnitTestingParams.outputs.package_directory }}
Package:
uses: pyTooling/Actions/.github/workflows/Package.yml@r6
uses: pyTooling/Actions/.github/workflows/Package.yml@r7
needs:
- UnitTestingParams
with:
@@ -288,7 +286,7 @@ jobs:
artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
Install:
uses: pyTooling/Actions/.github/workflows/InstallPackage.yml@r6
uses: pyTooling/Actions/.github/workflows/InstallPackage.yml@r7
needs:
- UnitTestingParams
- InstallParams
@@ -299,7 +297,7 @@ jobs:
package_name: ${{ needs.UnitTestingParams.outputs.package_fullname }}
# AppTesting:
# uses: pyTooling/Actions/.github/workflows/ApplicationTesting.yml@r6
# uses: pyTooling/Actions/.github/workflows/ApplicationTesting.yml@r7
# needs:
# - AppTestingParams
# - UnitTestingParams
@@ -310,7 +308,7 @@ jobs:
# apptest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).apptesting_xml }}
PublishCoverageResults:
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@r6
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@r7
needs:
- ConfigParams
- UnitTestingParams
@@ -332,7 +330,7 @@ jobs:
CODACY_TOKEN: ${{ secrets.CODACY_TOKEN }}
PublishTestResults:
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@r6
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@r7
needs:
- ConfigParams
- UnitTestingParams
@@ -348,14 +346,14 @@ jobs:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
# VerifyDocs:
# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@r6
# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@r7
# needs:
# - UnitTestingParams
# with:
# python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
Documentation:
uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@r6
uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@r7
needs:
- ConfigParams
- UnitTestingParams
@@ -372,7 +370,7 @@ jobs:
latex_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }}
IntermediateCleanUp:
uses: pyTooling/Actions/.github/workflows/IntermediateCleanUp.yml@r6
uses: pyTooling/Actions/.github/workflows/IntermediateCleanUp.yml@r7
needs:
- UnitTestingParams
- PublishCoverageResults
@@ -383,7 +381,7 @@ jobs:
xml_unittest_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}-
PDFDocumentation:
uses: pyTooling/Actions/.github/workflows/LaTeXDocumentation.yml@r6
uses: pyTooling/Actions/.github/workflows/LaTeXDocumentation.yml@r7
needs:
- UnitTestingParams
- Documentation
@@ -394,7 +392,7 @@ jobs:
can-fail: 'true'
PublishToGitHubPages:
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r6
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r7
needs:
- UnitTestingParams
- Documentation
@@ -407,7 +405,7 @@ jobs:
typing: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
TriggerTaggedRelease:
uses: pyTooling/Actions/.github/workflows/TagReleaseCommit.yml@r6
uses: pyTooling/Actions/.github/workflows/TagReleaseCommit.yml@r7
needs:
- Prepare
- UnitTesting
@@ -426,7 +424,7 @@ jobs:
secrets: inherit
ReleasePage:
uses: pyTooling/Actions/.github/workflows/PublishReleaseNotes.yml@r6
uses: pyTooling/Actions/.github/workflows/PublishReleaseNotes.yml@r7
needs:
- Prepare
- UnitTesting
@@ -444,7 +442,7 @@ jobs:
secrets: inherit
PublishOnPyPI:
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@r6
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@r7
needs:
- Prepare
- UnitTestingParams
@@ -459,7 +457,7 @@ jobs:
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
ArtifactCleanUp:
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r6
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r7
needs:
- UnitTestingParams
- UnitTesting

View File

@@ -1,187 +0,0 @@
# ==================================================================================================================== #
# Authors: #
# Patrick Lehmann #
# Unai Martinez-Corral #
# #
# ==================================================================================================================== #
# Copyright 2020-2025 The pyTooling Authors #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
# #
# SPDX-License-Identifier: Apache-2.0 #
# ==================================================================================================================== #
name: Coverage Collection
on:
workflow_call:
inputs:
ubuntu_image_version:
description: 'Ubuntu image version.'
required: false
default: '24.04'
type: string
python_version:
description: 'Python version.'
required: false
default: '3.11'
type: string
requirements:
description: 'Python dependencies to be installed through pip.'
required: false
default: '-r tests/requirements.txt'
type: string
tests_directory:
description: 'Path to the directory containing tests (test working directory).'
required: false
default: 'tests'
type: string
unittest_directory:
description: 'Path to the directory containing unit tests (relative to tests_directory).'
required: false
default: 'unit'
type: string
coverage_config:
description: 'Path to the .coveragerc file. Use pyproject.toml by default.'
required: false
default: 'pyproject.toml'
type: string
artifact:
description: 'Name of the coverage artifact.'
required: true
type: string
secrets:
codacy_token:
description: 'Token to push result to codacy.'
required: true
jobs:
Coverage:
name: 📈 Collect Coverage Data using Python ${{ inputs.python_version }}
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
steps:
- name: ⚠️ Deprecation Warning
run: printf "::warning title=%s::%s\n" "Deprecated" "'CoverageCollection.yml' template is deprecated. Please switch to 'PublishReleaseNotes.yml'. See https://pytooling.github.io/Actions/JobTemplate/Testing/UnitTesting.html"
- name: ⏬ Checkout repository
uses: actions/checkout@v5
with:
lfs: true
submodules: true
- name: 🐍 Setup Python ${{ inputs.python_version }}
uses: actions/setup-python@v6
with:
python-version: ${{ inputs.python_version }}
- name: 🗂 Install dependencies
run: |
python -m pip install --disable-pip-version-check tomli
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
- name: 🔁 Extract configurations from pyproject.toml
id: getVariables
shell: python
run: |
from os import getenv
from pathlib import Path
from tomli import load as tomli_load
from textwrap import dedent
htmlDirectory = 'htmlcov'
xmlFile = './coverage.xml'
coverageRC = "${{ inputs.coverage_config }}".strip()
# Read output paths from 'pyproject.toml' file
if coverageRC == "pyproject.toml":
pyProjectFile = Path("pyproject.toml")
if pyProjectFile.exists():
with pyProjectFile.open("rb") as file:
pyProjectSettings = tomli_load(file)
htmlDirectory = pyProjectSettings["tool"]["coverage"]["html"]["directory"]
xmlFile = pyProjectSettings["tool"]["coverage"]["xml"]["output"]
else:
print(f"File '{pyProjectFile}' not found.")
print(f"::error title=FileNotFoundError::File '{pyProjectFile}' not found.")
exit(1)
# Read output paths from '.coveragerc' file
elif len(coverageRC) > 0:
coverageRCFile = Path(coverageRC)
if coverageRCFile.exists():
with coverageRCFile.open("rb") as file:
coverageRCSettings = tomli_load(file)
htmlDirectory = coverageRCSettings["html"]["directory"]
xmlFile = coverageRCSettings["xml"]["output"]
else:
print(f"File '{coverageRCFile}' not found.")
print(f"::error title=FileNotFoundError::File '{coverageRCFile}' not found.")
exit(1)
# Write jobs to special file
github_output = Path(getenv("GITHUB_OUTPUT"))
print(f"GITHUB_OUTPUT: {github_output}")
with github_output.open("a+", encoding="utf-8") as f:
f.write(dedent(f"""\
coverage_report_html_directory={htmlDirectory}
coverage_report_xml={xmlFile}
"""))
print(f"DEBUG:\n html={htmlDirectory}\n xml={xmlFile}")
- name: Collect coverage
continue-on-error: true
run: |
export ENVIRONMENT_NAME="Linux (x86-64)"
export PYTHONPATH=$(pwd)
ABSDIR=$(pwd)
cd "${{ inputs.tests_directory || '.' }}"
[ -n '${{ inputs.coverage_config }}' ] && PYCOV_ARGS="--cov-config=${ABSDIR}/${{ inputs.coverage_config }}" || unset PYCOV_ARGS
printf "%s\n" "python -m pytest -rA --cov=${ABSDIR} ${PYCOV_ARGS} ${{ inputs.unittest_directory }} --color=yes"
python -m pytest -rA --cov=${ABSDIR} $PYCOV_ARGS ${{ inputs.unittest_directory }} --color=yes
- name: Convert to cobertura format
run: coverage xml --data-file=${{ inputs.tests_directory || '.' }}/.coverage
- name: Convert to HTML format
run: |
coverage html --data-file=${{ inputs.tests_directory || '.' }}/.coverage -d ${{ steps.getVariables.outputs.coverage_report_html_directory }}
rm ${{ steps.getVariables.outputs.coverage_report_html_directory }}/.gitignore
- name: 📤 Upload 'Coverage Report' artifact
continue-on-error: true
uses: pyTooling/upload-artifact@v5
with:
name: ${{ inputs.artifact }}
working-directory: ${{ steps.getVariables.outputs.coverage_report_html_directory }}
path: '*'
if-no-files-found: error
retention-days: 1
- name: 📊 Publish coverage at CodeCov
continue-on-error: true
uses: codecov/codecov-action@v5
with:
files: ${{ steps.getVariables.outputs.coverage_report_xml }}
flags: unittests
env_vars: PYTHON
- name: 📉 Publish coverage at Codacy
continue-on-error: true
uses: codacy/codacy-coverage-reporter-action@v1
with:
project-token: ${{ secrets.codacy_token }}
coverage-reports: ${{ steps.getVariables.outputs.coverage_report_xml }}

View File

@@ -68,7 +68,7 @@ on:
jobs:
Extract:
name: 📓 Extract configurations from pyproject.toml
name: 🔬 Extract configurations from pyproject.toml
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
outputs:
unittest_report_xml: ${{ steps.getVariables.outputs.unittest_report_xml }}
@@ -82,7 +82,7 @@ jobs:
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
- name: 🐍 Setup Python ${{ inputs.python_version }}
uses: actions/setup-python@v6

View File

@@ -53,7 +53,7 @@ jobs:
steps:
- name: 📥 Download artifacts '${{ inputs.wheel }}' from 'Package' job
uses: pyTooling/download-artifact@v6
uses: pyTooling/download-artifact@v7
with:
name: ${{ inputs.wheel }}
path: install

View File

@@ -60,7 +60,7 @@ jobs:
continue-on-error: ${{ inputs.can-fail == 'true' }}
steps:
- name: 📥 Download artifacts '${{ inputs.latex_artifact }}' from 'SphinxDocumentation' job
uses: pyTooling/download-artifact@v6
uses: pyTooling/download-artifact@v7
with:
name: ${{ inputs.latex_artifact }}
path: latex
@@ -83,7 +83,7 @@ jobs:
latexmk -${{ inputs.processor }} "${{ inputs.document }}.tex"
- name: 📤 Upload 'PDF Documentation' artifact
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
if: inputs.pdf_artifact != ''
with:
name: ${{ inputs.pdf_artifact }}

View File

@@ -1,533 +0,0 @@
# ==================================================================================================================== #
# Authors: #
# Patrick Lehmann #
# #
# ==================================================================================================================== #
# Copyright 2020-2025 The pyTooling Authors #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
# #
# SPDX-License-Identifier: Apache-2.0 #
# ==================================================================================================================== #
name: Nightly
on:
workflow_call:
inputs:
ubuntu_image:
description: 'Name of the Ubuntu image.'
required: false
default: 'ubuntu-24.04'
type: string
nightly_name:
description: 'Name of the nightly release.'
required: false
default: 'nightly'
type: string
nightly_title:
description: 'Title of the nightly release.'
required: false
default: ''
type: string
nightly_description:
description: 'Description of the nightly release.'
required: false
default: 'Release of artifacts from latest CI pipeline.'
type: string
draft:
description: 'Specify if this is a draft.'
required: false
default: false
type: boolean
prerelease:
description: 'Specify if this is a pre-release.'
required: false
default: false
type: boolean
latest:
description: 'Specify if this is the latest release.'
required: false
default: false
type: boolean
replacements:
description: 'Multi-line string containing search=replace patterns.'
required: false
default: ''
type: string
assets:
description: 'Multi-line string containing artifact:file:title asset descriptions.'
required: true
type: string
inventory-json:
type: string
required: false
default: ''
inventory-version:
type: string
required: false
default: ''
inventory-categories:
type: string
required: false
default: ''
tarball-name:
type: string
required: false
default: '__pyTooling_upload_artifact__.tar'
can-fail:
type: boolean
required: false
default: false
jobs:
Release:
name: 📝 Update 'Nightly Page' on GitHub
runs-on: ${{ inputs.ubuntu_image }}
continue-on-error: ${{ inputs.can-fail }}
permissions:
contents: write
actions: write
# attestations: write
steps:
- name: ⚠️ Deprecation Warning
run: printf "::warning title=%s::%s\n" "NightlyRelease" "'NightlyRelease.yml' template is deprecated. Please switch to 'PublishReleaseNotes.yml'. See https://pytooling.github.io/Actions/JobTemplate/Release/PublishReleaseNotes.html"
- name: ⏬ Checkout repository
uses: actions/checkout@v5
with:
# The command 'git describe' (used for version) needs the history.
fetch-depth: 0
- name: 🔧 Install zstd
run: sudo apt-get install -y --no-install-recommends zstd
- name: 📑 Delete (old) Release Page
id: deleteReleasePage
run: |
set +e
ANSI_LIGHT_RED=$'\x1b[91m'
ANSI_LIGHT_GREEN=$'\x1b[92m'
ANSI_LIGHT_YELLOW=$'\x1b[93m'
ANSI_NOCOLOR=$'\x1b[0m'
export GH_TOKEN=${{ github.token }}
printf "%s" "Deleting release '${{ inputs.nightly_name }}' ... "
message="$(gh release delete ${{ inputs.nightly_name }} --yes 2>&1)"
if [[ $? -eq 0 ]]; then
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
elif [[ "${message}" == "release not found" ]]; then
printf "%s\n" "${ANSI_LIGHT_YELLOW}[NOT FOUND]${ANSI_NOCOLOR}"
else
printf "%s\n" "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't delete release '${{ inputs.nightly_name }}' -> Error: '${message}'.${ANSI_NOCOLOR}"
printf "::error title=%s::%s\n" "InternalError" "Couldn't delete release '${{ inputs.nightly_name }}' -> Error: '${message}'."
exit 1
fi
- name: 📑 (Re)create (new) Release Page
id: createReleasePage
run: |
set +e
ANSI_LIGHT_RED=$'\x1b[91m'
ANSI_LIGHT_GREEN=$'\x1b[92m'
ANSI_NOCOLOR=$'\x1b[0m'
export GH_TOKEN=${{ github.token }}
addDraft="--draft"
if [[ "${{ inputs.prerelease }}" == "true" ]]; then
addPreRelease="--prerelease"
fi
if [[ "${{ inputs.latest }}" == "false" ]]; then
addLatest="--latest=false"
fi
if [[ "${{ inputs.nightly_title }}" != "" ]]; then
addTitle=("--title" "${{ inputs.nightly_title }}")
fi
cat <<'EOF' > __NoTeS__.md
${{ inputs.nightly_description }}
EOF
if [[ -s __NoTeS__.md ]]; then
addNotes=("--notes-file" "__NoTeS__.md")
fi
# Apply replacements
while IFS=$'\r\n' read -r patternLine; do
# skip empty lines
[[ "$patternLine" == "" ]] && continue
pattern="${patternLine%%=*}"
replacement="${patternLine#*=}"
sed -i -e "s/%$pattern%/$replacement/g" "__NoTeS__.md"
done <<<'${{ inputs.replacements }}'
# Add footer line
cat <<EOF >> __NoTeS__.md
--------
Published from [${{ github.workflow }}](https://github.com/Paebbels/ghdl/actions/runs/${{ github.run_id }}) workflow triggered by @${{ github.actor }} on $(date '+%Y-%m-%d %H:%M:%S %Z').
EOF
printf "%s\n" "Creating release '${{ inputs.nightly_name }}' ... "
message="$(gh release create "${{ inputs.nightly_name }}" --verify-tag $addDraft $addPreRelease $addLatest "${addTitle[@]}" "${addNotes[@]}" 2>&1)"
if [[ $? -eq 0 ]]; then
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
else
printf "%s\n" "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't create release '${{ inputs.nightly_name }}' -> Error: '${message}'.${ANSI_NOCOLOR}"
printf "::error title=%s::%s\n" "InternalError" "Couldn't create release '${{ inputs.nightly_name }}' -> Error: '${message}'."
exit 1
fi
- name: 📥 Download artifacts and upload as assets
id: uploadAssets
run: |
set +e
ANSI_LIGHT_RED=$'\x1b[91m'
ANSI_LIGHT_GREEN=$'\x1b[92m'
ANSI_LIGHT_YELLOW=$'\x1b[93m'
ANSI_LIGHT_BLUE=$'\x1b[94m'
ANSI_NOCOLOR=$'\x1b[0m'
export GH_TOKEN=${{ github.token }}
Replace() {
line="$1"
while IFS=$'\r\n' read -r patternLine; do
# skip empty lines
[[ "$patternLine" == "" ]] && continue
pattern="${patternLine%%=*}"
replacement="${patternLine#*=}"
line="${line//"%$pattern%"/"$replacement"}"
done <<<'${{ inputs.replacements }}'
printf "%s\n" "$line"
}
# Create JSON inventory
if [[ "${{ inputs.inventory-json }}" != "" ]]; then
VERSION="1.0"
# Split categories by ',' into a Bash array.
# See https://stackoverflow.com/a/45201229/3719459
if [[ "${{ inputs.inventory-categories }}" != "" ]]; then
readarray -td, inventoryCategories <<<"${{ inputs.inventory-categories }},"
unset 'inventoryCategories[-1]'
declare -p inventoryCategories
else
inventoryCategories=""
fi
jsonInventory=$(jq -c -n \
--arg version "${VERSION}" \
--arg date "$(date +"%Y-%m-%dT%H-%M-%S%:z")" \
--argjson jsonMeta "$(jq -c -n \
--arg tag "${{ inputs.nightly_name }}" \
--arg version "${{ inputs.inventory-version }}" \
--arg hash "${{ github.sha }}" \
--arg repo "${{ github.server_url }}/${{ github.repository }}" \
--arg release "${{ github.server_url }}/${{ github.repository }}/releases/download/${{ inputs.nightly_name }}" \
--argjson categories "$(jq -c -n \
'$ARGS.positional' \
--args "${inventoryCategories[@]}" \
)" \
'{"tag": $tag, "version": $version, "git-hash": $hash, "repository-url": $repo, "release-url": $release, "categories": $categories}' \
)" \
'{"version": 1.0, "timestamp": $date, "meta": $jsonMeta, "files": {}}'
)
fi
ERRORS=0
# A dictionary of 0/1 to avoid duplicate downloads
declare -A downloadedArtifacts
# A dictionary to check for duplicate asset files in release
declare -A assetFilenames
while IFS=$'\r\n' read -r assetLine; do
if [[ "${assetLine}" == "" || "${assetLine:0:1}" == "#" ]]; then
continue
fi
# split assetLine colon separated triple: artifact:asset:title
artifact="${assetLine%%:*}"
assetLine="${assetLine#*:}"
asset="${assetLine%%:*}"
assetLine="${assetLine#*:}"
if [[ "${{ inputs.inventory-json }}" == "" ]]; then
categories=""
title="${assetLine##*:}"
else
categories="${assetLine%%:*}"
title="${assetLine##*:}"
fi
# remove leading whitespace
asset="${asset#"${asset%%[![:space:]]*}"}"
categories="${categories#"${categories%%[![:space:]]*}"}"
title="${title#"${title%%[![:space:]]*}"}"
# apply replacements
asset="$(Replace "${asset}")"
title="$(Replace "${title}")"
printf "%s\n" "Publish asset '${asset}' from artifact '${artifact}' with title '${title}'"
printf " %s" "Checked asset for duplicates ... "
if [[ -n "${assetFilenames[$asset]}" ]]; then
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
printf "::error title=%s::%s\n" "DuplicateAsset" "Asset '${asset}' from artifact '${artifact}' was already uploaded to release '${{ inputs.nightly_name }}'."
ERRORS=$((ERRORS + 1))
continue
else
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
assetFilenames[$asset]=1
fi
# Download artifact by artifact name
if [[ -n "${downloadedArtifacts[$artifact]}" ]]; then
printf " %s\n" "downloading '${artifact}' ... ${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}"
else
printf " downloading '${artifact}' ...\n"
printf " %s" "gh run download $GITHUB_RUN_ID --dir \"${artifact}\" --name \"${artifact}\" "
gh run download $GITHUB_RUN_ID --dir "${artifact}" --name "${artifact}"
if [[ $? -eq 0 ]]; then
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
else
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't download artifact '${artifact}'.${ANSI_NOCOLOR}"
printf "::error title=%s::%s\n" "ArtifactNotFound" "Couldn't download artifact '${artifact}'."
ERRORS=$((ERRORS + 1))
continue
fi
downloadedArtifacts[$artifact]=1
printf " %s" "Checking for embedded tarball ... "
if [[ -f "${artifact}/${{ inputs.tarball-name }}" ]]; then
printf "%s\n" "${ANSI_LIGHT_GREEN}[FOUND]${ANSI_NOCOLOR}"
pushd "${artifact}" > /dev/null
printf " %s" "Extracting embedded tarball ... "
tar -xf "${{ inputs.tarball-name }}"
if [[ $? -ne 0 ]]; then
printf "%s\n" "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
else
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
fi
printf " %s" "Removing temporary tarball ... "
rm -f "${{ inputs.tarball-name }}"
if [[ $? -ne 0 ]]; then
printf "%s\n" "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
else
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
fi
popd > /dev/null
else
printf "%s\n" "${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}"
fi
fi
# Check if artifact should be compressed (zip, tgz) or if asset was part of the downloaded artifact.
printf " %s" "checking asset '${artifact}/${asset}' ... "
if [[ "${asset}" == !*.zip ]]; then
printf "%s\n" "${ANSI_LIGHT_GREEN}[ZIP]${ANSI_NOCOLOR}"
asset="${asset##*!}"
printf "::group:: %s\n" "Compressing artifact '${artifact}' to '${asset}' ..."
(
cd "${artifact}" && \
zip -r "../${asset}" *
)
retCode=$?
printf "::endgroup::\n"
if [[ $retCode -eq 0 ]]; then
printf " %s\n" "Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
uploadFile="${asset}"
else
printf " %s\n" "Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zip file '${asset}'.${ANSI_NOCOLOR}"
printf "::error title=%s::%s\n" "CompressionError" "Couldn't compress '${artifact}' to zip file '${asset}'."
ERRORS=$((ERRORS + 1))
continue
fi
elif [[ "${asset}" == !*.tgz || "${asset}" == !*.tar.gz || "${asset}" == \$*.tgz || "${asset}" == \$*.tar.gz ]]; then
printf "%s\n" "${ANSI_LIGHT_GREEN}[TAR/GZ]${ANSI_NOCOLOR}"
if [[ "${asset:0:1}" == "\$" ]]; then
asset="${asset##*$}"
dirName="${asset%.*}"
printf " %s\n" "Compressing artifact '${artifact}' to '${asset}' ..."
tar -c --gzip --owner=0 --group=0 --file="${asset}" --directory="${artifact}" --transform "s|^\.|${dirName%.tar}|" .
retCode=$?
else
asset="${asset##*!}"
printf " %s\n" "Compressing artifact '${artifact}' to '${asset}' ..."
(
cd "${artifact}" && \
tar -c --gzip --owner=0 --group=0 --file="../${asset}" *
)
retCode=$?
fi
if [[ $retCode -eq 0 ]]; then
printf " %s\n" "Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
uploadFile="${asset}"
else
printf " %s\n" "Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to tgz file '${asset}'.${ANSI_NOCOLOR}"
printf "::error title=%s::%s\n" "CompressionError" "Couldn't compress '${artifact}' to tgz file '${asset}'."
ERRORS=$((ERRORS + 1))
continue
fi
elif [[ "${asset}" == !*.tzst || "${asset}" == !*.tar.zst || "${asset}" == \$*.tzst || "${asset}" == \$*.tar.zst ]]; then
printf "%s\n" "${ANSI_LIGHT_GREEN}[ZST]${ANSI_NOCOLOR}"
if [[ "${asset:0:1}" == "\$" ]]; then
asset="${asset##*$}"
dirName="${asset%.*}"
printf " %s\n" "Compressing artifact '${artifact}' to '${asset}' ..."
tar -c --zstd --owner=0 --group=0 --file="${asset}" --directory="${artifact}" --transform "s|^\.|${dirName%.tar}|" .
retCode=$?
else
asset="${asset##*!}"
printf " %s\n" "Compressing artifact '${artifact}' to '${asset}' ..."
(
cd "${artifact}" && \
tar -c --zstd --owner=0 --group=0 --file="../${asset}" *
)
retCode=$?
fi
if [[ $retCode -eq 0 ]]; then
printf " %s\n" "Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
uploadFile="${asset}"
else
printf " %s\n" "Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zst file '${asset}'.${ANSI_NOCOLOR}"
printf "::error title=%s::%s\n" "CompressionError" "Couldn't compress '${artifact}' to zst file '${asset}'."
ERRORS=$((ERRORS + 1))
continue
fi
elif [[ -e "${artifact}/${asset}" ]]; then
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
uploadFile="${artifact}/${asset}"
else
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't find asset '${asset}' in artifact '${artifact}'.${ANSI_NOCOLOR}"
printf "::error title=%s::%s\n" "FileNotFound" "Couldn't find asset '${asset}' in artifact '${artifact}'."
ERRORS=$((ERRORS + 1))
continue
fi
# Add asset to JSON inventory
if [[ "${{ inputs.inventory-json }}" != "" ]]; then
if [[ "${categories}" != "${title}" ]]; then
printf " %s\n" "adding file '${uploadFile#*/}' with '${categories//;/ → }' to JSON inventory ..."
category=""
jsonEntry=$(jq -c -n \
--arg title "${title}" \
--arg file "${uploadFile#*/}" \
'{"file": $file, "title": $title}' \
)
while [[ "${categories}" != "${category}" ]]; do
category="${categories##*,}"
categories="${categories%,*}"
jsonEntry=$(jq -c -n --arg cat "${category}" --argjson value "${jsonEntry}" '{$cat: $value}')
done
jsonInventory=$(jq -c -n \
--argjson inventory "${jsonInventory}" \
--argjson file "${jsonEntry}" \
'$inventory * {"files": $file}' \
)
else
printf " %s\n" "adding file '${uploadFile#*/}' to JSON inventory ... ${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}"
fi
fi
# Upload asset to existing release page
printf " %s" "uploading asset '${asset}' from '${uploadFile}' with title '${title}' ... "
gh release upload ${{ inputs.nightly_name }} "${uploadFile}#${title}" --clobber
if [[ $? -eq 0 ]]; then
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
else
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
printf "::error title=%s::%s\n" "UploadError" "Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'."
ERRORS=$((ERRORS + 1))
continue
fi
done <<<'${{ inputs.assets }}'
if [[ "${{ inputs.inventory-json }}" != "" ]]; then
inventoryTitle="Release Inventory (JSON)"
printf "%s\n" "Publish asset '${{ inputs.inventory-json }}' with title '${inventoryTitle}'"
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Writing JSON inventory to '${{ inputs.inventory-json }}' ...."
printf "%s\n" "$(jq -n --argjson inventory "${jsonInventory}" '$inventory')" > "${{ inputs.inventory-json }}"
cat "${{ inputs.inventory-json }}"
printf "::endgroup::\n"
# Upload inventory asset to existing release page
printf " %s" "uploading asset '${{ inputs.inventory-json }}' title '${inventoryTitle}' ... "
gh release upload ${{ inputs.nightly_name }} "${{ inputs.inventory-json }}#${inventoryTitle}" --clobber
if [[ $? -eq 0 ]]; then
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
else
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't upload asset '${{ inputs.inventory-json }}' to release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
printf "::error title=%s::%s\n" "UploadError" "Couldn't upload asset '${{ inputs.inventory-json }}' to release '${{ inputs.nightly_name }}'."
ERRORS=$((ERRORS + 1))
continue
fi
fi
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Inspecting downloaded artifacts ..."
tree -pash -L 3 .
printf "::endgroup::\n"
if [[ $ERRORS -ne 0 ]]; then
printf "%s\n" "${ANSI_LIGHT_RED}${ERRORS} errors detected in previous steps.${ANSI_NOCOLOR}"
exit 1
fi
- name: 📑 Remove draft state from Release Page
if: ${{ ! inputs.draft }}
run: |
set +e
ANSI_LIGHT_RED=$'\x1b[91m'
ANSI_LIGHT_GREEN=$'\x1b[92m'
ANSI_NOCOLOR=$'\x1b[0m'
export GH_TOKEN=${{ github.token }}
# Remove draft-state from release page
printf "%s" "Remove draft-state from release '${title}' ... "
gh release edit --draft=false "${{ inputs.nightly_name }}"
if [[ $? -eq 0 ]]; then
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
else
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't remove draft-state from release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
printf "::error title=%s::%s\n" "ReleasePage" "Couldn't remove draft-state from release '${{ inputs.nightly_name }}'."
fi

View File

@@ -53,7 +53,7 @@ jobs:
artifact: ${{ inputs.artifact }}
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
with:
lfs: true
submodules: true
@@ -106,7 +106,7 @@ jobs:
run: python setup.py bdist_wheel
- name: 📤 Upload wheel artifact
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
with:
name: ${{ inputs.artifact }}
working-directory: dist

View File

@@ -103,7 +103,7 @@ on:
macos_intel_image:
description: 'The used GitHub Action image for macOS (Intel x86-64) based jobs.'
required: false
default: 'macos-13'
default: 'macos-15-intel'
type: string
macos_arm_image:
description: 'The used GitHub Action image for macOS (ARM aarch64) based jobs.'
@@ -154,7 +154,7 @@ jobs:
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
with:
# The command 'git describe' (used for version) needs the history.
fetch-depth: 0

View File

@@ -131,7 +131,7 @@ jobs:
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
with:
# The command 'git describe' (used for version) needs the history.
fetch-depth: 0

View File

@@ -109,13 +109,13 @@ jobs:
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
with:
lfs: true
submodules: true
- name: 📥 Download Artifacts
uses: pyTooling/download-artifact@v6
uses: pyTooling/download-artifact@v7
with:
pattern: ${{ inputs.coverage_artifacts_pattern }}
path: artifacts
@@ -156,7 +156,7 @@ jobs:
tree -pash ${{ fromJson(inputs.coverage_report_html).directory }}
- name: 📤 Upload 'Coverage SQLite Database' artifact
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
if: inputs.coverage_sqlite_artifact != ''
continue-on-error: true
with:
@@ -166,7 +166,7 @@ jobs:
retention-days: 1
- name: 📤 Upload 'Coverage XML Report' artifact
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
if: inputs.coverage_xml_artifact != ''
continue-on-error: true
with:
@@ -177,7 +177,7 @@ jobs:
retention-days: 1
- name: 📤 Upload 'Coverage JSON Report' artifact
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
if: inputs.coverage_json_artifact != ''
continue-on-error: true
with:
@@ -188,7 +188,7 @@ jobs:
retention-days: 1
- name: 📤 Upload 'Coverage HTML Report' artifact
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
if: inputs.coverage_html_artifact != ''
continue-on-error: true
with:

View File

@@ -56,7 +56,7 @@ jobs:
steps:
- name: 📥 Download artifacts '${{ inputs.artifact }}' from 'Package' job
uses: pyTooling/download-artifact@v6
uses: pyTooling/download-artifact@v7
with:
name: ${{ inputs.artifact }}
path: dist

View File

@@ -132,7 +132,7 @@ jobs:
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
with:
# The command 'git describe' (used for version) needs the history.
fetch-depth: 0
@@ -191,198 +191,6 @@ jobs:
exit 1
fi
- name: 📑 Assemble Release Notes
id: createReleaseNotes
run: |
set +e
ANSI_LIGHT_RED=$'\x1b[91m'
ANSI_LIGHT_GREEN=$'\x1b[92m'
ANSI_LIGHT_YELLOW=$'\x1b[93m'
ANSI_LIGHT_BLUE=$'\x1b[94m'
ANSI_NOCOLOR=$'\x1b[0m'
export GH_TOKEN=${{ github.token }}
# Save release description (from parameter in a file)
head -c -1 <<'EOF' > __DESCRIPTION__.md
${{ inputs.description }}
EOF
# Save release footer (from parameter in a file)
head -c -1 <<'EOF' > __FOOTER__.md
${{ inputs.description_footer }}
EOF
# Download Markdown from PullRequest
# Readout second parent's SHA
# Search PR with that SHA
# Load description of that PR
printf "Read second parent of current SHA (%s) ... " "${{ github.ref }}"
FATHER_SHA=$(git rev-parse ${{ github.ref }}^2 -- 2> /dev/null)
if [[ $? -ne 0 || "{FATHER_SHA}" == "" ]]; then
printf "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n"
printf "→ ${ANSI_LIGHT_YELLOW}Skipped readout of pull request description. This is not a merge commit.${ANSI_NOCOLOR}\n"
else
printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n"
printf "Search Pull Request to '%s' and branch containing SHA %s ... " "${{ inputs.release_branch }}" "${FATHER_SHA}"
PULL_REQUESTS=$(gh pr list --base "${{ inputs.release_branch }}" --search "${FATHER_SHA}" --state "merged" --json "title,number,mergedBy,mergedAt,body")
if [[ $? -ne 0 || "${PULL_REQUESTS}" == "" ]]; then
printf "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n"
printf "${ANSI_LIGHT_RED}Couldn't find a merged Pull Request to '%s'. -> %s${ANSI_NOCOLOR}\n" "${{ inputs.release_branch }}" "${PULL_REQUESTS}"
printf "::error title=PullRequest::Couldn't find a merged Pull Request to '%s'. -> %s\n" "${{ inputs.release_branch }}" "${PULL_REQUESTS}"
exit 1
else
printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n"
PR_TITLE="$( printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].title")"
PR_NUMBER="$( printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].number")"
PR_BODY="$( printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].body")"
PR_MERGED_BY="$(printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].mergedBy.login")"
PR_MERGED_AT="$(printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].mergedAt")"
printf "Found Pull Request:\n"
printf " %s\n" "Title: ${PR_TITLE}"
printf " %s\n" "Number: ${PR_NUMBER}"
printf " %s\n" "MergedBy: ${PR_MERGED_BY}"
printf " %s\n" "MergedAt: ${PR_MERGED_AT} ($(date -d"${PR_MERGED_AT}" '+%d.%m.%Y - %H:%M:%S'))"
fi
printf "%s\n" "${PR_BODY}" > __PULLREQUEST__.md
fi
# Check if a release description file should be used and exists.
if [[ "${{ inputs.description_file }}" != "" ]]; then
if [[ ! -f "${{ inputs.description_file }}" ]]; then
printf "${ANSI_LIGHT_RED}Release description file '%s' not found.${ANSI_NOCOLOR}\n" "${{ inputs.description_file }}"
printf "::error title=%s::%s\n" "FileNotFound" "Release description file '${{ inputs.description_file }}' not found."
exit 1
elif [[ -s "${{ inputs.description_file }}" ]]; then
printf "Use '%s' as main release description.\n" "${{ inputs.description_file }}"
cp -v "${{ inputs.description_file }}" __NOTES__.md
else
printf "${ANSI_LIGHT_RED}Release description file '%s' is empty.${ANSI_NOCOLOR}\n" "${{ inputs.description_file }}"
printf "::error title=%s::%s\n" "FileNotFound" "Release description file '${{ inputs.description_file }}' is empty."
exit 1
fi
# Check if the main release description is provided by a template parameter
elif [[ -s __DESCRIPTION__.md ]]; then
printf "Use '__DESCRIPTION__.md' as main release description.\n"
mv -v __DESCRIPTION__.md __NOTES__.md
# Check if the pull request serves as the main release description text.
elif [[ -s __PULLREQUEST__.md ]]; then
printf "Use '__PULLREQUEST__.md' as main release description.\n"
mv -v __PULLREQUEST__.md __NOTES__.md
printf "Append '%%%%FOOTER%%%%' to '__NOTES__.md'.\n"
printf "\n%%%%FOOTER%%%%\n" >> __NOTES__.md
else
printf "${ANSI_LIGHT_RED}No release description specified (file, parameter, PR text).${ANSI_NOCOLOR}\n"
printf "::error title=%s::%s\n" "MissingDescription" "No release description specified (file, parameter, PR text)."
exit 1
fi
# Read release notes main file for placeholder substitution
NOTES=$(<__NOTES__.md)
# Inline description
if [[ -s __DESCRIPTION__.md ]]; then
NOTES="${NOTES//%%DESCRIPTION%%/$(<__DESCRIPTION__.md)}"
else
NOTES="${NOTES//%%DESCRIPTION%%/}"
fi
# Inline PullRequest and increase headline levels
if [[ -s __PULLREQUEST__.md ]]; then
while [[ "${NOTES}" =~ %%(PULLREQUEST(\+[0-3])?)%% ]]; do
case "${BASH_REMATCH[1]}" in
"PULLREQUEST+0" | "PULLREQUEST")
NOTES="${NOTES//${BASH_REMATCH[0]}/$(<__PULLREQUEST__.md)}"
;;
"PULLREQUEST+1")
NOTES="${NOTES//${BASH_REMATCH[0]}/$(cat __PULLREQUEST__.md | sed -E 's/^(#+) /\1# /gm;t')}"
;;
"PULLREQUEST+2")
NOTES="${NOTES//${BASH_REMATCH[0]}/$(cat __PULLREQUEST__.md | sed -E 's/^(#+) /\1### /gm;t')}"
;;
"PULLREQUEST+3")
NOTES="${NOTES//${BASH_REMATCH[0]}/$(cat __PULLREQUEST__.md | sed -E 's/^(#+) /\1### /gm;t')}"
;;
esac
done
else
while [[ "${NOTES}" =~ %%(PULLREQUEST(\+[0-3])?)%% ]]; do
NOTES="${NOTES//${BASH_REMATCH[0]}/}"
done
fi
# inline Footer
if [[ -s __FOOTER__.md ]]; then
NOTES="${NOTES//%%FOOTER%%/$(<__FOOTER__.md)}"
else
NOTES="${NOTES//%%FOOTER%%/}"
fi
# Apply replacements
while IFS=$'\r\n' read -r patternLine; do
# skip empty lines
[[ "$patternLine" == "" ]] && continue
pattern="%${patternLine%%=*}%"
replacement="${patternLine#*=}"
NOTES="${NOTES//$pattern/$replacement}"
done <<<'${{ inputs.replacements }}'
# Workarounds for stupid GitHub variables
owner_repo="${{ github.repository }}"
repo=${owner_repo##*/}
# Replace special identifiers
NOTES="${NOTES//%%gh_server%%/${{ github.server_url }}}"
NOTES="${NOTES//%%gh_workflow_name%%/${{ github.workflow }}}"
NOTES="${NOTES//%%gh_owner%%/${{ github.repository_owner }}}"
NOTES="${NOTES//%%gh_repo%%/${repo}}"
NOTES="${NOTES//%%gh_owner_repo%%/${{ github.repository }}}"
#NOTES="${NOTES//%%gh_pages%%/https://${{ github.repository_owner }}.github.io/${repo}/}"
NOTES="${NOTES//%%gh_runid%%/${{ github.run_id }}}"
NOTES="${NOTES//%%gh_actor%%/${{ github.actor }}}"
NOTES="${NOTES//%%gh_sha%%/${{ github.sha }}}"
NOTES="${NOTES//%%date%%/$(date '+%Y-%m-%d')}"
NOTES="${NOTES//%%time%%/$(date '+%H:%M:%S %Z')}"
NOTES="${NOTES//%%datetime%%/$(date '+%Y-%m-%d %H:%M:%S %Z')}"
# Write final release notes to file
printf "%s\n" "${NOTES}" > __NOTES__.md
# Display partial contents for debugging
if [[ -s __DESCRIPTION__.md ]]; then
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__DESCRIPTION__.md' ($(stat --printf="%s" "__DESCRIPTION__.md") B) ...."
cat __DESCRIPTION__.md
printf "::endgroup::\n"
else
printf "${ANSI_LIGHT_YELLOW}No '__DESCRIPTION__.md' found.${ANSI_NOCOLOR}\n"
fi
if [[ -s __PULLREQUEST__.md ]]; then
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__PULLREQUEST__.md' ($(stat --printf="%s" "__PULLREQUEST__.md") B) ...."
cat __PULLREQUEST__.md
printf "::endgroup::\n"
else
printf "${ANSI_LIGHT_YELLOW}No '__PULLREQUEST__.md' found.${ANSI_NOCOLOR}\n"
fi
if [[ -s __FOOTER__.md ]]; then
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__FOOTER__.md' ($(stat --printf="%s" "__FOOTER__.md") B) ...."
cat __FOOTER__.md
printf "::endgroup::\n"
else
printf "${ANSI_LIGHT_YELLOW}No '__FOOTER__.md' found.${ANSI_NOCOLOR}\n"
fi
# Print final release notes
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__NOTES__.md' ($(stat --printf="%s" "__NOTES__.md") B) ...."
cat __NOTES__.md
printf "::endgroup::\n"
- name: 📑 Create new Release Page
id: createReleasePage
if: inputs.mode == 'release'
@@ -397,6 +205,15 @@ jobs:
export GH_TOKEN=${{ github.token }}
tee "__PRELIMINARY_NOTES__.md" <<EOF
Release notes for ${{ inputs.tag }} are created right now ...
1. download artifacts &rarr; (compression?) &rarr; upload as assets
2. optional: create inventory.json
3. assemble release notes &rarr; update this text
4. optional: remove draft state
EOF
if [[ "${{ inputs.prerelease }}" == "true" ]]; then
addPreRelease="--prerelease"
fi
@@ -409,9 +226,7 @@ jobs:
addTitle=("--title" "${{ inputs.title }}")
fi
if [[ -s __NOTES__.md ]]; then
addNotes=("--notes-file" "__NOTES__.md")
fi
addNotes=("--notes-file" "__PRELIMINARY_NOTES__.md")
printf "Creating release '%s' ... " "${{ inputs.tag }}"
message="$(gh release create "${{ inputs.tag }}" --verify-tag --draft $addPreRelease $addLatest "${addTitle[@]}" "${addNotes[@]}" 2>&1)"
@@ -439,6 +254,14 @@ jobs:
export GH_TOKEN=${{ github.token }}
tee "__PRELIMINARY_NOTES__.md" <<EOF
Release notes for ${{ inputs.tag }} are updated right now ...
1. download artifacts &rarr; (compression?) &rarr; upload as assets
2. optional: create inventory.json
3. assemble release notes &rarr; update this text
EOF
addDraft="--draft"
if [[ "${{ inputs.prerelease }}" == "true" ]]; then
addPreRelease="--prerelease"
@@ -452,9 +275,7 @@ jobs:
addTitle=("--title" "${{ inputs.title }}")
fi
if [[ -s __NOTES__.md ]]; then
addNotes=("--notes-file" "__NOTES__.md")
fi
addNotes=("--notes-file" "__PRELIMINARY_NOTES__.md")
printf "Creating release '%s' ... " "${{ inputs.tag }}"
message="$(gh release create "${{ inputs.tag }}" --verify-tag --draft $addPreRelease $addLatest "${addTitle[@]}" "${addNotes[@]}" 2>&1)"
@@ -553,6 +374,10 @@ jobs:
)
fi
# Write Markdown table header
printf "| Asset Name | File Size | SHA256 |\n" > __ASSETS__.md
printf "|------------|-----------|--------|\n" >> __ASSETS__.md
ERRORS=0
# A dictionary of 0/1 to avoid duplicate downloads
declare -A downloadedArtifacts
@@ -741,6 +566,13 @@ jobs:
sha256Checksums[$asset]="sha256:${sha256}"
printf "${ANSI_LIGHT_BLUE}${sha256}${ANSI_NOCOLOR}\n"
# Add asset to Markdown table
printf "| %s | %s | %s |\n" \
"[${title}](${{ github.server_url }}/${{ github.repository }}/releases/download/${{ inputs.tag }}/${uploadFile#*/})" \
"$(stat --printf="%s" "${uploadFile}" | numfmt --format "%.1f" --suffix=B --to=iec-i)" \
"\`${sha256}\`" \
>> __ASSETS__.md
# Add asset to JSON inventory
if [[ "${{ inputs.inventory-json }}" != "" ]]; then
if [[ "${categories}" != "${title}" ]]; then
@@ -775,7 +607,7 @@ jobs:
if [[ $? -eq 0 ]]; then
printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n"
printf " checking assets SHA256 checksum ... \n"
printf " checking assets SHA256 checksum ... "
ghSHA256=$(gh release view --json assets --jq ".assets[] | select(.name == \"${asset}\") | .digest" ${{ inputs.tag }})
if [[ "${ghSHA256}" == "${sha256Checksums[$asset]}" ]]; then
printf "${ANSI_LIGHT_GREEN}[PASSED]${ANSI_NOCOLOR}\n"
@@ -829,6 +661,245 @@ jobs:
exit 1
fi
- name: 📑 Assemble Release Notes
id: createReleaseNotes
run: |
set +e
ANSI_LIGHT_RED=$'\x1b[91m'
ANSI_LIGHT_GREEN=$'\x1b[92m'
ANSI_LIGHT_YELLOW=$'\x1b[93m'
ANSI_LIGHT_BLUE=$'\x1b[94m'
ANSI_NOCOLOR=$'\x1b[0m'
export GH_TOKEN=${{ github.token }}
# Save release description (from parameter in a file)
head -c -1 <<'EOF' > __DESCRIPTION__.md
${{ inputs.description }}
EOF
# Save release footer (from parameter in a file)
head -c -1 <<'EOF' > __FOOTER__.md
${{ inputs.description_footer }}
EOF
# Download Markdown from PullRequest
# Readout second parent's SHA
# Search PR with that SHA
# Load description of that PR
printf "Read second parent of current SHA (%s) ... " "${{ github.ref }}"
FATHER_SHA=$(git rev-parse ${{ github.ref }}^2 -- 2> /dev/null)
if [[ $? -ne 0 || "{FATHER_SHA}" == "" ]]; then
printf "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n"
printf "→ ${ANSI_LIGHT_YELLOW}Skipped readout of pull request description. This is not a merge commit.${ANSI_NOCOLOR}\n"
else
printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n"
printf "Search Pull Request to '%s' and branch containing SHA %s ... " "${{ inputs.release_branch }}" "${FATHER_SHA}"
PULL_REQUESTS=$(gh pr list --base "${{ inputs.release_branch }}" --search "${FATHER_SHA}" --state "merged" --json "title,number,mergedBy,mergedAt,body")
if [[ $? -ne 0 || "${PULL_REQUESTS}" == "" ]]; then
printf "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n"
printf "${ANSI_LIGHT_RED}Couldn't find a merged Pull Request to '%s'. -> %s${ANSI_NOCOLOR}\n" "${{ inputs.release_branch }}" "${PULL_REQUESTS}"
printf "::error title=PullRequest::Couldn't find a merged Pull Request to '%s'. -> %s\n" "${{ inputs.release_branch }}" "${PULL_REQUESTS}"
exit 1
else
printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n"
PR_TITLE="$( printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].title")"
PR_NUMBER="$( printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].number")"
PR_BODY="$( printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].body")"
PR_MERGED_BY="$(printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].mergedBy.login")"
PR_MERGED_AT="$(printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].mergedAt")"
printf "Found Pull Request:\n"
printf " %s\n" "Title: ${PR_TITLE}"
printf " %s\n" "Number: ${PR_NUMBER}"
printf " %s\n" "MergedBy: ${PR_MERGED_BY}"
printf " %s\n" "MergedAt: ${PR_MERGED_AT} ($(date -d"${PR_MERGED_AT}" '+%d.%m.%Y - %H:%M:%S'))"
fi
printf "%s\n" "${PR_BODY}" > __PULLREQUEST__.md
fi
# Check if a release description file should be used and exists.
if [[ "${{ inputs.description_file }}" != "" ]]; then
if [[ ! -f "${{ inputs.description_file }}" ]]; then
printf "${ANSI_LIGHT_RED}Release description file '%s' not found.${ANSI_NOCOLOR}\n" "${{ inputs.description_file }}"
printf "::error title=%s::%s\n" "FileNotFound" "Release description file '${{ inputs.description_file }}' not found."
exit 1
elif [[ -s "${{ inputs.description_file }}" ]]; then
printf "Use '%s' as main release description.\n" "${{ inputs.description_file }}"
cp -v "${{ inputs.description_file }}" __NOTES__.md
else
printf "${ANSI_LIGHT_RED}Release description file '%s' is empty.${ANSI_NOCOLOR}\n" "${{ inputs.description_file }}"
printf "::error title=%s::%s\n" "FileNotFound" "Release description file '${{ inputs.description_file }}' is empty."
exit 1
fi
# Check if the main release description is provided by a template parameter
elif [[ -s __DESCRIPTION__.md ]]; then
printf "Use '__DESCRIPTION__.md' as main release description.\n"
mv -v __DESCRIPTION__.md __NOTES__.md
# Check if the pull request serves as the main release description text.
elif [[ -s __PULLREQUEST__.md ]]; then
printf "Use '__PULLREQUEST__.md' as main release description.\n"
mv -v __PULLREQUEST__.md __NOTES__.md
printf "Append '%%%%FOOTER%%%%' to '__NOTES__.md'.\n"
printf "\n%%%%FOOTER%%%%\n" >> __NOTES__.md
else
printf "${ANSI_LIGHT_RED}No release description specified (file, parameter, PR text).${ANSI_NOCOLOR}\n"
printf "::error title=%s::%s\n" "MissingDescription" "No release description specified (file, parameter, PR text)."
exit 1
fi
# Read release notes main file for placeholder substitution
NOTES=$(<__NOTES__.md)
# Inline description
if [[ -s __DESCRIPTION__.md ]]; then
NOTES="${NOTES//%%DESCRIPTION%%/$(<__DESCRIPTION__.md)}"
else
NOTES="${NOTES//%%DESCRIPTION%%/}"
fi
# Inline PullRequest and increase headline levels
if [[ -s __PULLREQUEST__.md ]]; then
while [[ "${NOTES}" =~ %%(PULLREQUEST(\+[0-3])?)%% ]]; do
case "${BASH_REMATCH[1]}" in
"PULLREQUEST+0" | "PULLREQUEST")
NOTES="${NOTES//${BASH_REMATCH[0]}/$(<__PULLREQUEST__.md)}"
;;
"PULLREQUEST+1")
NOTES="${NOTES//${BASH_REMATCH[0]}/$(cat __PULLREQUEST__.md | sed -E 's/^(#+) /\1# /gm;t')}"
;;
"PULLREQUEST+2")
NOTES="${NOTES//${BASH_REMATCH[0]}/$(cat __PULLREQUEST__.md | sed -E 's/^(#+) /\1### /gm;t')}"
;;
"PULLREQUEST+3")
NOTES="${NOTES//${BASH_REMATCH[0]}/$(cat __PULLREQUEST__.md | sed -E 's/^(#+) /\1### /gm;t')}"
;;
esac
done
else
while [[ "${NOTES}" =~ %%(PULLREQUEST(\+[0-3])?)%% ]]; do
NOTES="${NOTES//${BASH_REMATCH[0]}/}"
done
fi
# Inline Files table
if [[ -s __ASSETS__.md ]]; then
NOTES="${NOTES//%%ASSETS%%/$(<__ASSETS__.md)}"
else
NOTES="${NOTES//%%ASSETS%%/}"
fi
# Inline Footer
if [[ -s __FOOTER__.md ]]; then
NOTES="${NOTES//%%FOOTER%%/$(<__FOOTER__.md)}"
else
NOTES="${NOTES//%%FOOTER%%/}"
fi
# Apply replacements
while IFS=$'\r\n' read -r patternLine; do
# skip empty lines
[[ "$patternLine" == "" ]] && continue
pattern="%${patternLine%%=*}%"
replacement="${patternLine#*=}"
NOTES="${NOTES//$pattern/$replacement}"
done <<<'${{ inputs.replacements }}'
# Workarounds for stupid GitHub variables
owner_repo="${{ github.repository }}"
repo=${owner_repo##*/}
# Replace special identifiers
NOTES="${NOTES//%%gh_server%%/${{ github.server_url }}}"
NOTES="${NOTES//%%gh_workflow_name%%/${{ github.workflow }}}"
NOTES="${NOTES//%%gh_owner%%/${{ github.repository_owner }}}"
NOTES="${NOTES//%%gh_repo%%/${repo}}"
NOTES="${NOTES//%%gh_owner_repo%%/${{ github.repository }}}"
#NOTES="${NOTES//%%gh_pages%%/https://${{ github.repository_owner }}.github.io/${repo}/}"
NOTES="${NOTES//%%gh_runid%%/${{ github.run_id }}}"
NOTES="${NOTES//%%gh_actor%%/${{ github.actor }}}"
NOTES="${NOTES//%%gh_sha%%/${{ github.sha }}}"
NOTES="${NOTES//%%date%%/$(date '+%Y-%m-%d')}"
NOTES="${NOTES//%%time%%/$(date '+%H:%M:%S %Z')}"
NOTES="${NOTES//%%datetime%%/$(date '+%Y-%m-%d %H:%M:%S %Z')}"
# Write final release notes to file
printf "%s\n" "${NOTES}" > __NOTES__.md
# Display partial contents for debugging
if [[ -s __DESCRIPTION__.md ]]; then
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__DESCRIPTION__.md' ($(stat --printf="%s" "__DESCRIPTION__.md") B) ...."
cat __DESCRIPTION__.md
printf "::endgroup::\n"
else
printf "${ANSI_LIGHT_YELLOW}No '__DESCRIPTION__.md' found.${ANSI_NOCOLOR}\n"
fi
if [[ -s __PULLREQUEST__.md ]]; then
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__PULLREQUEST__.md' ($(stat --printf="%s" "__PULLREQUEST__.md") B) ...."
cat __PULLREQUEST__.md
printf "::endgroup::\n"
else
printf "${ANSI_LIGHT_YELLOW}No '__PULLREQUEST__.md' found.${ANSI_NOCOLOR}\n"
fi
if [[ -s __ASSETS__.md ]]; then
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__ASSETS__.md' ($(stat --printf="%s" "__ASSETS__.md") B) ...."
cat __ASSETS__.md
printf "::endgroup::\n"
else
printf "${ANSI_LIGHT_YELLOW}No '__ASSETS__.md' found.${ANSI_NOCOLOR}\n"
fi
if [[ -s __FOOTER__.md ]]; then
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__FOOTER__.md' ($(stat --printf="%s" "__FOOTER__.md") B) ...."
cat __FOOTER__.md
printf "::endgroup::\n"
else
printf "${ANSI_LIGHT_YELLOW}No '__FOOTER__.md' found.${ANSI_NOCOLOR}\n"
fi
# Print final release notes
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__NOTES__.md' ($(stat --printf="%s" "__NOTES__.md") B) ...."
cat __NOTES__.md
printf "::endgroup::\n"
- name: 📑 Update release notes
id: updateReleaseNotes
run: |
set +e
ANSI_LIGHT_RED=$'\x1b[91m'
ANSI_LIGHT_GREEN=$'\x1b[92m'
ANSI_LIGHT_YELLOW=$'\x1b[93m'
ANSI_LIGHT_BLUE=$'\x1b[94m'
ANSI_NOCOLOR=$'\x1b[0m'
export GH_TOKEN=${{ github.token }}
if [[ -s __ASSETS__.md ]]; then
addNotes=("--notes-file" "__ASSETS__.md")
else
printf " ${ANSI_LIGHT_RED}File '%s' not found.${ANSI_NOCOLOR}\n" "__ASSETS__.md"
printf "::error title=%s::%s\n" "InternalError" "File '__ASSETS__.md' not found."
exit 1
fi
printf "Updating release '%s' ... " "${{ inputs.tag }}"
message="$(gh release edit "${addNotes[@]}" "${{ inputs.tag }}" 2>&1)"
if [[ $? -eq 0 ]]; then
printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n"
printf " Release page: %s\n" "${message}"
else
printf "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n"
printf " ${ANSI_LIGHT_RED}Couldn't update release '%s' -> Error: '%s'.${ANSI_NOCOLOR}\n" "${{ inputs.tag }}" "${message}"
printf "::error title=%s::%s\n" "InternalError" "Couldn't update release '${{ inputs.tag }}' -> Error: '${message}'."
exit 1
fi
- name: 📑 Remove draft state from Release Page
id: removeDraft
if: ${{ ! inputs.draft }}

View File

@@ -102,10 +102,10 @@ jobs:
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
- name: 📥 Download Artifacts
uses: pyTooling/download-artifact@v6
uses: pyTooling/download-artifact@v7
with:
pattern: ${{ inputs.unittest_artifacts_pattern }}
path: artifacts
@@ -156,7 +156,7 @@ jobs:
fail_ci_if_error: true
- name: 📤 Upload merged 'JUnit Test Summary' artifact
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
if: inputs.merged_junit_artifact != ''
with:
name: ${{ inputs.merged_junit_artifact }}

View File

@@ -45,45 +45,51 @@ on:
default: ''
type: string
outputs:
github_pages_url:
description: "URL to GitHub Pages."
value: ${{ jobs.PrepareGitHubPages.outputs.github_pages_url }}
jobs:
PublishToGitHubPages:
name: 📚 Publish to GH-Pages
PrepareGitHubPages:
name: 📖 Merge multiple contents for publishing
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
permissions:
pages: write # to deploy to Pages
id-token: write # to verify the deployment originates from an appropriate source
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
outputs:
github_pages_url: ${{ steps.deployment.outputs.page_url }}
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
- name: 📥 Download artifacts '${{ inputs.doc }}' from 'SphinxDocumentation' job
uses: pyTooling/download-artifact@v6
uses: pyTooling/download-artifact@v7
with:
name: ${{ inputs.doc }}
path: public
- name: 📥 Download artifacts '${{ inputs.coverage }}' from 'Coverage' job
uses: pyTooling/download-artifact@v6
uses: pyTooling/download-artifact@v7
if: ${{ inputs.coverage != '' }}
with:
name: ${{ inputs.coverage }}
path: public/coverage
- name: 📥 Download artifacts '${{ inputs.typing }}' from 'StaticTypeCheck' job
uses: pyTooling/download-artifact@v6
uses: pyTooling/download-artifact@v7
if: ${{ inputs.typing != '' }}
with:
name: ${{ inputs.typing }}
path: public/typing
- name: '📓 Publish site to GitHub Pages'
- name: 📑 Upload static files as artifact
if: github.event_name != 'pull_request'
run: |
cd public
touch .nojekyll
git init
cp ../.git/config ./.git/config
git add .
git config --local user.email "BuildTheDocs@GitHubActions"
git config --local user.name "GitHub Actions"
git commit -a -m "update ${{ github.sha }}"
git push -u origin +HEAD:gh-pages
uses: actions/upload-pages-artifact@v4
with:
path: public/
- name: 📖 Deploy to GitHub Pages
id: deployment
if: github.event_name != 'pull_request'
uses: actions/deploy-pages@v4

View File

@@ -86,7 +86,7 @@ jobs:
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
with:
lfs: true
submodules: true
@@ -105,7 +105,7 @@ jobs:
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
- name: 📥 Download artifacts '${{ inputs.unittest_xml_artifact }}' from 'Unittesting' job
uses: pyTooling/download-artifact@v6
uses: pyTooling/download-artifact@v7
if: inputs.unittest_xml_artifact != ''
with:
name: ${{ inputs.unittest_xml_artifact }}
@@ -113,7 +113,7 @@ jobs:
investigate: true
- name: 📥 Download artifacts '${{ inputs.coverage_json_artifact }}' from 'PublishCoverageResults' job
uses: pyTooling/download-artifact@v6
uses: pyTooling/download-artifact@v7
if: inputs.coverage_json_artifact != ''
with:
name: ${{ inputs.coverage_json_artifact }}
@@ -129,7 +129,7 @@ jobs:
sphinx-build -v -n -b html -d _build/doctrees -j $(nproc) -w _build/html.log . _build/html
- name: 📤 Upload 'HTML Documentation' artifact
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
if: inputs.html_artifact != ''
continue-on-error: true
with:
@@ -145,7 +145,7 @@ jobs:
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
with:
lfs: true
submodules: true
@@ -164,7 +164,7 @@ jobs:
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
- name: 📥 Download artifacts '${{ inputs.unittest_xml_artifact }}' from 'Unittesting' job
uses: pyTooling/download-artifact@v6
uses: pyTooling/download-artifact@v7
if: inputs.unittest_xml_artifact != ''
with:
name: ${{ inputs.unittest_xml_artifact }}
@@ -172,7 +172,7 @@ jobs:
investigate: true
- name: 📥 Download artifacts '${{ inputs.coverage_json_artifact }}' from 'PublishCoverageResults' job
uses: pyTooling/download-artifact@v6
uses: pyTooling/download-artifact@v7
if: inputs.coverage_json_artifact != ''
with:
name: ${{ inputs.coverage_json_artifact }}
@@ -272,7 +272,7 @@ jobs:
done
- name: 📤 Upload 'LaTeX Documentation' artifact
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
if: inputs.latex_artifact != ''
continue-on-error: true
with:

View File

@@ -38,7 +38,7 @@ on:
requirements:
description: 'Python dependencies to be installed through pip.'
required: false
default: '-r tests/requirements.txt'
default: '-r tests/typing/requirements.txt'
type: string
mypy_options:
description: 'Additional mypy options.'
@@ -49,18 +49,18 @@ on:
description: 'Cobertura file to upload as an artifact.'
required: false
default: >-
{ "fullpath": "report/typing/cobertura.xml",
{ "fullpath": "report/typing/cobertura.xml",
"directory": "report/typing",
"filename": "cobertura.xml"
"filename": "cobertura.xml"
}
type: string
junit_report:
description: 'JUnit file to upload as an artifact.'
required: false
default: >-
{ "fullpath": "report/typing/StaticTypingSummary.xml",
{ "fullpath": "report/typing/StaticTypingSummary.xml",
"directory": "report/typing",
"filename": "StaticTypingSummary.xml"
"filename": "StaticTypingSummary.xml"
}
type: string
html_report:
@@ -94,7 +94,7 @@ jobs:
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
- name: 🐍 Setup Python ${{ inputs.python_version }}
uses: actions/setup-python@v6
@@ -142,7 +142,7 @@ jobs:
fi
- name: 📤 Upload '${{ inputs.html_artifact }}' HTML artifact
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
if: ${{ inputs.html_artifact != '' }}
continue-on-error: true
with:
@@ -153,7 +153,7 @@ jobs:
retention-days: 1
- name: 📤 Upload '${{ inputs.junit_artifact }}' JUnit artifact
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
if: ${{ inputs.junit_artifact != '' }}
continue-on-error: true
with:
@@ -164,7 +164,7 @@ jobs:
retention-days: 1
- name: 📤 Upload '${{ inputs.cobertura_artifact }}' Cobertura artifact
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
if: ${{ inputs.cobertura_artifact != '' }}
continue-on-error: true
with:

View File

@@ -47,7 +47,7 @@ on:
requirements:
description: 'Python dependencies to be installed through pip.'
required: false
default: '-r tests/requirements.txt'
default: '-r ./requirements.txt'
type: string
mingw_requirements:
description: 'Override Python dependencies to be installed through pip on MSYS2 (MINGW64) only.'
@@ -82,7 +82,7 @@ on:
root_directory:
description: 'Working directory for running tests.'
required: false
default: ''
default: '.'
type: string
tests_directory:
description: 'Path to the directory containing tests (relative from root_directory).'
@@ -181,7 +181,7 @@ jobs:
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
with:
lfs: true
submodules: true
@@ -205,6 +205,38 @@ jobs:
run: |
py -3.9 -m pip install --disable-pip-version-check --break-system-packages -U tomli
- name: Compute path to requirements file
id: requirements
shell: python
run: |
from os import getenv
from pathlib import Path
from sys import version
print(f"Python: {version}")
requirements = "${{ inputs.requirements }}"
if requirements.startswith("-r"):
requirements = requirements[2:].lstrip()
if requirements.startswith("./"):
requirementsFile = Path("${{ inputs.root_directory || '.' }}") / Path("${{ inputs.tests_directory || '.' }}") / Path("${{ inputs.unittest_directory || '.' }}") / Path(requirements[2:])
else:
requirementsFile = Path(requirements)
if not requirementsFile.exists():
print(f"::error title=FileNotFoundError::{ex}")
exit(1)
print(f"requirements file: {requirementsFile.as_posix()}")
# Write requirements path to special file
github_output = Path(getenv("GITHUB_OUTPUT"))
print(f"GITHUB_OUTPUT: {github_output}")
with github_output.open("a+") as f:
f.write(f"requirements=-r {requirementsFile.as_posix()}\n")
else:
print(f"requirements list: {requirements}")
- name: Compute pacman/pacboy packages
id: pacboy
if: matrix.system == 'msys2'
@@ -215,8 +247,6 @@ jobs:
from re import compile
from sys import version
print(f"Python: {version}")
def loadRequirementsFile(requirementsFile: Path):
requirements = []
with requirementsFile.open("r") as file:
@@ -232,11 +262,10 @@ jobs:
return requirements
requirements = "${{ inputs.requirements }}"
requirements = "${{ steps.requirements.outputs.requirements }}"
if requirements.startswith("-r"):
requirementsFile = Path(requirements[2:].lstrip())
try:
dependencies = loadRequirementsFile(requirementsFile)
dependencies = loadRequirementsFile(Path(requirements[2:].lstrip()))
except FileNotFoundError as ex:
print(f"::error title=FileNotFoundError::{ex}")
exit(1)
@@ -324,7 +353,7 @@ jobs:
if: matrix.system != 'msys2'
run: |
python -m pip install --disable-pip-version-check -U wheel tomli
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
python -m pip install --disable-pip-version-check ${{ steps.requirements.outputs.requirements }}
- name: 🔧 Install pip dependencies (MSYS2)
if: matrix.system == 'msys2'
@@ -332,7 +361,7 @@ jobs:
if [ -n '${{ inputs.mingw_requirements }}' ]; then
python -m pip install --disable-pip-version-check --break-system-packages ${{ inputs.mingw_requirements }}
else
python -m pip install --disable-pip-version-check --break-system-packages ${{ inputs.requirements }}
python -m pip install --disable-pip-version-check --break-system-packages ${{ steps.requirements.outputs.requirements }}
fi
# Before scripts
@@ -421,7 +450,7 @@ jobs:
# Upload artifacts
- name: 📤 Upload '${{ fromJson(inputs.unittest_report_xml).filename }}' artifact
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
if: inputs.unittest_xml_artifact != ''
continue-on-error: true
with:
@@ -434,7 +463,7 @@ jobs:
# - name: 📤 Upload 'Unit Tests HTML Report' artifact
# if: inputs.unittest_html_artifact != ''
# continue-on-error: true
# uses: pyTooling/upload-artifact@v5
# uses: pyTooling/upload-artifact@v6
# with:
# name: ${{ inputs.unittest_html_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
# path: ${{ inputs.unittest_report_html_directory }}
@@ -444,7 +473,7 @@ jobs:
- name: 📤 Upload 'Coverage SQLite Database' artifact
if: inputs.coverage_sqlite_artifact != ''
continue-on-error: true
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
with:
name: ${{ inputs.coverage_sqlite_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
path: .coverage
@@ -455,7 +484,7 @@ jobs:
- name: 📤 Upload 'Coverage XML Report' artifact
if: inputs.coverage_xml_artifact != '' && steps.convert_xml.outcome == 'success'
continue-on-error: true
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
with:
name: ${{ inputs.coverage_xml_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
working-directory: ${{ fromJson(inputs.coverage_report_xml).directory }}
@@ -466,7 +495,7 @@ jobs:
- name: 📤 Upload 'Coverage JSON Report' artifact
if: inputs.coverage_json_artifact != '' && steps.convert_json.outcome == 'success'
continue-on-error: true
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
with:
name: ${{ inputs.coverage_json_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
working-directory: ${{ fromJson(inputs.coverage_report_json).directory }}
@@ -477,7 +506,7 @@ jobs:
- name: 📤 Upload 'Coverage HTML Report' artifact
if: inputs.coverage_html_artifact != '' && steps.convert_html.outcome == 'success'
continue-on-error: true
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
with:
name: ${{ inputs.coverage_html_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
working-directory: ${{ fromJson(inputs.coverage_report_html).directory }}

View File

@@ -44,7 +44,7 @@ jobs:
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
- name: 🐍 Setup Python
uses: actions/setup-python@v6

View File

@@ -6,7 +6,7 @@ on:
jobs:
Params:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
name: Example
python_version_list: "3.13 3.14" # py-1, py-0
@@ -25,7 +25,7 @@ jobs:
run: printf "%s\n" "${{ matrix.runs-on }}-${{ matrix.python }}" >> artifact.txt
- name: 📤 Upload artifact for ${{ matrix.system }}-${{ matrix.python }}
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
with:
name: ${{ fromJson(needs.Params.outputs.artifact_names).unittesting_xml }}-${{ matrix.system }}-${{ matrix.python }}
path: artifact.txt
@@ -42,7 +42,7 @@ jobs:
run: printf "%s\n" "Package" >> package.txt
- name: 📤 Upload artifact for ${{ matrix.system }}-${{ matrix.python }}
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
with:
name: ${{ fromJson(needs.Params.outputs.artifact_names).package_all }}
path: package.txt
@@ -50,7 +50,7 @@ jobs:
retention-days: 1
ArtifactCleanUp:
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r6
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r7
needs:
- Params
- Testing

View File

@@ -16,10 +16,9 @@ jobs:
include:
- {icon: '🐧', name: 'Ubuntu 22.04 (x86-64)', image: 'ubuntu-22.04', shell: 'bash', can-fail: false}
- {icon: '🐧', name: 'Ubuntu 24.04 (x86-64)', image: 'ubuntu-24.04', shell: 'bash', can-fail: false} # latest
- {icon: '🍎', name: 'macOS-13 (x86-64)', image: 'macos-13', shell: 'bash', can-fail: false}
- {icon: '🍎', name: 'macOS-14 (x86-64)', image: 'macos-14-large', shell: 'bash', can-fail: true } # not in free plan
- {icon: '🍎', name: 'macOS-15 (x86-64)', image: 'macos-15-large', shell: 'bash', can-fail: true } # not in free plan
- {icon: '🍏', name: 'macOS-13 (aarch64)', image: 'macos-13-xlarge', shell: 'bash', can-fail: true } # not in free plan
### - {icon: '🍎', name: 'macOS-15 (x86-64)', image: 'macos-15-large', shell: 'bash', can-fail: true } # same as -intel; not in free plan
- {icon: '🍎', name: 'macOS-15 (x86-64)', image: 'macos-15-intel', shell: 'bash', can-fail: false}
- {icon: '🍏', name: 'macOS-14 (aarch64)', image: 'macos-14', shell: 'bash', can-fail: false} # latest
- {icon: '🍏', name: 'macOS-15 (aarch64)', image: 'macos-15', shell: 'bash', can-fail: false}
- {icon: '🪟', name: 'Windows Server 2022', image: 'windows-2022', shell: 'bash', can-fail: false}

View File

@@ -6,20 +6,20 @@ on:
jobs:
Prepare:
uses: pyTooling/Actions/.github/workflows/PrepareJob.yml@r6
uses: pyTooling/Actions/.github/workflows/PrepareJob.yml@r7
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r6
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r7
UnitTestingParams:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
package_name: 'myPackage'
python_version_list: '3.11 3.12 3.13 3.14 pypy-3.11'
disable_list: 'windows-arm:pypy-3.11'
PlatformTestingParams:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
package_name: 'myPackage'
name: 'Platform'
@@ -27,14 +27,14 @@ jobs:
system_list: 'ubuntu ubuntu-arm windows windows-arm macos mingw64 clang64 ucrt64'
InstallParams:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
package_name: 'myPackage'
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
python_version_list: ''
UnitTesting:
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r6
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r7
needs:
- ConfigParams
- UnitTestingParams
@@ -52,7 +52,7 @@ jobs:
coverage_html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
PlatformTesting:
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r6
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r7
needs:
- ConfigParams
- PlatformTestingParams
@@ -72,7 +72,7 @@ jobs:
coverage_html_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_html }}
StaticTypeCheck:
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r6
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r7
needs:
- ConfigParams
- UnitTestingParams
@@ -82,7 +82,7 @@ jobs:
html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
CodeQuality:
uses: pyTooling/Actions/.github/workflows/CheckCodeQuality.yml@r6
uses: pyTooling/Actions/.github/workflows/CheckCodeQuality.yml@r7
needs:
- UnitTestingParams
with:
@@ -93,7 +93,7 @@ jobs:
artifact: 'CodeQuality'
DocCoverage:
uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@r6
uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@r7
needs:
- ConfigParams
- UnitTestingParams
@@ -103,7 +103,7 @@ jobs:
# fail_below: 70
Package:
uses: pyTooling/Actions/.github/workflows/Package.yml@r6
uses: pyTooling/Actions/.github/workflows/Package.yml@r7
needs:
- UnitTestingParams
# - UnitTesting
@@ -113,7 +113,7 @@ jobs:
artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
Install:
uses: pyTooling/Actions/.github/workflows/InstallPackage.yml@r6
uses: pyTooling/Actions/.github/workflows/InstallPackage.yml@r7
needs:
- UnitTestingParams
- InstallParams
@@ -124,7 +124,7 @@ jobs:
package_name: ${{ needs.UnitTestingParams.outputs.package_fullname }}
PublishCoverageResults:
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@r6
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@r7
needs:
- ConfigParams
- UnitTestingParams
@@ -143,7 +143,7 @@ jobs:
secrets: inherit
PublishTestResults:
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@r6
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@r7
needs:
- ConfigParams
- UnitTestingParams
@@ -158,14 +158,14 @@ jobs:
secrets: inherit
# VerifyDocs:
# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@r6
# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@r7
# needs:
# - UnitTestingParams
# with:
# python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
Documentation:
uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@r6
uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@r7
needs:
- ConfigParams
- UnitTestingParams
@@ -182,7 +182,7 @@ jobs:
latex_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }}
IntermediateCleanUp:
uses: pyTooling/Actions/.github/workflows/IntermediateCleanUp.yml@r6
uses: pyTooling/Actions/.github/workflows/IntermediateCleanUp.yml@r7
needs:
- UnitTestingParams
- PublishCoverageResults
@@ -192,7 +192,7 @@ jobs:
xml_unittest_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}-
PDFDocumentation:
uses: pyTooling/Actions/.github/workflows/LaTeXDocumentation.yml@r6
uses: pyTooling/Actions/.github/workflows/LaTeXDocumentation.yml@r7
needs:
- UnitTestingParams
- Documentation
@@ -202,7 +202,7 @@ jobs:
pdf_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_pdf }}
PublishToGitHubPages:
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r6
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r7
needs:
- UnitTestingParams
- Documentation
@@ -215,7 +215,7 @@ jobs:
typing: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
TriggerTaggedRelease:
uses: pyTooling/Actions/.github/workflows/TagReleaseCommit.yml@r6
uses: pyTooling/Actions/.github/workflows/TagReleaseCommit.yml@r7
needs:
- Prepare
- UnitTesting
@@ -233,7 +233,7 @@ jobs:
secrets: inherit
ReleasePage:
uses: pyTooling/Actions/.github/workflows/PublishReleaseNotes.yml@r6
uses: pyTooling/Actions/.github/workflows/PublishReleaseNotes.yml@r7
needs:
- Prepare
- UnitTesting
@@ -251,7 +251,7 @@ jobs:
secrets: inherit
PublishOnPyPI:
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@r6
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@r7
needs:
- UnitTestingParams
- ReleasePage
@@ -264,7 +264,7 @@ jobs:
secrets: inherit
ArtifactCleanUp:
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r6
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r7
needs:
- UnitTestingParams
- PlatformTestingParams

View File

@@ -6,7 +6,7 @@ on:
jobs:
NamespacePackage:
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@r6
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@r7
with:
package_namespace: 'myFramework'
package_name: 'Extension'

View File

@@ -17,7 +17,7 @@ jobs:
printf "%s\n" "Build log $(date --utc '+%d.%m.%Y - %H:%M:%S')" > build.log
- name: 📤 Upload artifact
uses: pyTooling/upload-artifact@v5
uses: pyTooling/upload-artifact@v6
with:
name: document
path: |
@@ -29,10 +29,11 @@ jobs:
- name: 🖉 Program
run: |
printf "%s\n" "Document other $(date --utc '+%d.%m.%Y - %H:%M:%S')" > document1.txt
printf "%s\n" "Document other $(date --utc '+%d.%m.%Y - %H:%M:%S')" > document2.txt
printf "%s\n" "Program $(date --utc '+%d.%m.%Y - %H:%M:%S')" > program.py
- name: 📤 Upload artifact
uses: actions/upload-artifact@v5
uses: actions/upload-artifact@v6
with:
name: other
path: |
@@ -55,7 +56,7 @@ jobs:
version=4.2.0
tool=myTool
prog=program
tag: 4.2.0
tag: v4.2.0
title: "Nightly Test Release"
description: |
This *nightly* release contains all latest and important artifacts created by %tool%'s CI pipeline.
@@ -63,10 +64,14 @@ jobs:
# %tool% %version%
* %prog%
# Attached files:
%%ASSETS%%
assets: |
document: document1.txt: Documentation
document: build.log: Logfile - %tool% - %tool%
other: document1.txt: SBOM - %version%
other: document2.txt: SBOM - %version%
other: %prog%.py: Application - %tool% - %version%
document:!archive1.zip: Archive 1 - zip
document:!archive2.tgz: Archive 2 - tgz
@@ -108,7 +113,7 @@ jobs:
# artifact: file: labels: asset title
document: document1.txt: doc,html: Documentation
document: build.log: build,log: Logfile - %tool% - %tool%
other: document1.txt: build,SBOM:SBOM - %version%
other: document2.txt: build,SBOM:SBOM - %version%
other: %prog%.py: app,binary:Application - %tool% - %version%
document:!archive1.zip: Archive 1 - zip
document:!archive2.tgz: Archive 2 - tgz

View File

@@ -6,24 +6,24 @@ on:
jobs:
Params_Default:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
name: Example
Params_PythonVersions:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
name: Example
python_version_list: "3.12 3.13 pypy-3.10 pypy-3.11" # py-2, py-1, pypy-1, pypy-0
Params_Systems:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
name: Example
system_list: "windows mingw32 mingw64"
Params_Include:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
name: Example
python_version_list: "3.12" # py-2
@@ -31,7 +31,7 @@ jobs:
include_list: "ubuntu:3.13 ubuntu:3.14 ubuntu-arm:3.12"
Params_Exclude:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
name: Example
python_version_list: "3.13" # py-1
@@ -39,7 +39,7 @@ jobs:
exclude_list: "windows:3.13 windows:3.14"
Params_Disable:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
name: Example
python_version_list: "3.13" # py-1
@@ -47,7 +47,7 @@ jobs:
disable_list: "windows:3.13 windows:3.14"
Params_All:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r6
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r7
with:
name: Example
python_version_list: "3.12 3.13" # py-2, py-1
@@ -64,7 +64,7 @@ jobs:
shell: python
steps:
- name: Checkout repository to access local Action
uses: actions/checkout@v5
uses: actions/checkout@v6
- name: Checking job matrix from 'Params_Default'
uses: ./.github/actions/CheckJobMatrix
@@ -92,7 +92,7 @@ jobs:
shell: python
steps:
- name: Checkout repository to access local Action
uses: actions/checkout@v5
uses: actions/checkout@v6
- name: Checking job matrix from 'Params_PythonVersions'
uses: ./.github/actions/CheckJobMatrix
@@ -114,7 +114,7 @@ jobs:
shell: python
steps:
- name: Checkout repository to access local Action
uses: actions/checkout@v5
uses: actions/checkout@v6
- name: Checking job matrix from 'Params_Systems'
uses: ./.github/actions/CheckJobMatrix
@@ -136,7 +136,7 @@ jobs:
shell: python
steps:
- name: Checkout repository to access local Action
uses: actions/checkout@v5
uses: actions/checkout@v6
- name: Checking job matrix from 'Params_Include'
uses: ./.github/actions/CheckJobMatrix
@@ -158,7 +158,7 @@ jobs:
shell: python
steps:
- name: Checkout repository to access local Action
uses: actions/checkout@v5
uses: actions/checkout@v6
- name: Checking job matrix from 'Params_Exclude'
uses: ./.github/actions/CheckJobMatrix
@@ -180,7 +180,7 @@ jobs:
shell: python
steps:
- name: Checkout repository to access local Action
uses: actions/checkout@v5
uses: actions/checkout@v6
- name: Checking job matrix from 'Params_Disable'
uses: ./.github/actions/CheckJobMatrix
@@ -202,7 +202,7 @@ jobs:
shell: python
steps:
- name: Checkout repository to access local Action
uses: actions/checkout@v5
uses: actions/checkout@v6
- name: Checking job matrix from 'Params_All'
uses: ./.github/actions/CheckJobMatrix

View File

@@ -6,7 +6,7 @@ on:
jobs:
SimplePackage:
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@r6
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@r7
with:
package_name: 'myPackage'
unittest_python_version_list: '3.11 3.12 3.13 3.14 pypy-3.11'