This commit is contained in:
Patrick Lehmann
2025-12-17 00:21:12 +01:00
committed by GitHub
39 changed files with 437 additions and 825 deletions

View File

@@ -86,10 +86,10 @@ jobs:
steps: steps:
- name: ⏬ Checkout repository - name: ⏬ Checkout repository
uses: actions/checkout@v5 uses: actions/checkout@v6
- name: 📥 Download artifacts '${{ inputs.wheel }}' from 'Package' job - name: 📥 Download artifacts '${{ inputs.wheel }}' from 'Package' job
uses: pyTooling/download-artifact@v6 uses: pyTooling/download-artifact@v7
with: with:
name: ${{ inputs.wheel }} name: ${{ inputs.wheel }}
path: install path: install
@@ -262,7 +262,7 @@ jobs:
- name: 📤 Upload 'TestReportSummary.xml' artifact - name: 📤 Upload 'TestReportSummary.xml' artifact
if: inputs.apptest_xml_artifact != '' if: inputs.apptest_xml_artifact != ''
uses: pyTooling/upload-artifact@v5 uses: pyTooling/upload-artifact@v6
with: with:
name: ${{ inputs.apptest_xml_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }} name: ${{ inputs.apptest_xml_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
working-directory: report/unit working-directory: report/unit

View File

@@ -41,7 +41,7 @@ jobs:
run: printf "::warning title=%s::%s\n" "Deprecated" "'BuildTheDocs.yml' template is deprecated. Please switch to 'SphinxDocumentation.yml'. See https://pytooling.github.io/Actions/JobTemplate/Documentation/SphinxDocumentation.html" run: printf "::warning title=%s::%s\n" "Deprecated" "'BuildTheDocs.yml' template is deprecated. Please switch to 'SphinxDocumentation.yml'. See https://pytooling.github.io/Actions/JobTemplate/Documentation/SphinxDocumentation.html"
- name: ⏬ Checkout repository - name: ⏬ Checkout repository
uses: actions/checkout@v5 uses: actions/checkout@v6
- name: 🛳️ Build documentation - name: 🛳️ Build documentation
uses: buildthedocs/btd@v0 uses: buildthedocs/btd@v0
@@ -49,7 +49,7 @@ jobs:
skip-deploy: true skip-deploy: true
- name: 📤 Upload 'documentation' artifacts - name: 📤 Upload 'documentation' artifacts
uses: pyTooling/upload-artifact@v5 uses: pyTooling/upload-artifact@v6
if: inputs.artifact != '' if: inputs.artifact != ''
with: with:
name: ${{ inputs.artifact }} name: ${{ inputs.artifact }}

View File

@@ -71,7 +71,7 @@ jobs:
steps: steps:
- name: ⏬ Checkout repository - name: ⏬ Checkout repository
uses: actions/checkout@v5 uses: actions/checkout@v6
with: with:
lfs: true lfs: true
submodules: true submodules: true
@@ -140,7 +140,7 @@ jobs:
steps: steps:
- name: ⏬ Checkout repository - name: ⏬ Checkout repository
uses: actions/checkout@v5 uses: actions/checkout@v6
with: with:
lfs: true lfs: true
submodules: true submodules: true
@@ -180,7 +180,7 @@ jobs:
steps: steps:
- name: ⏬ Checkout repository - name: ⏬ Checkout repository
uses: actions/checkout@v5 uses: actions/checkout@v6
with: with:
lfs: true lfs: true
submodules: true submodules: true

View File

@@ -50,7 +50,7 @@ jobs:
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}" runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
steps: steps:
- name: ⏬ Checkout repository - name: ⏬ Checkout repository
uses: actions/checkout@v5 uses: actions/checkout@v6
- name: 🐍 Setup Python ${{ inputs.python_version }} - name: 🐍 Setup Python ${{ inputs.python_version }}
uses: actions/setup-python@v6 uses: actions/setup-python@v6

View File

@@ -188,7 +188,7 @@ jobs:
code_version: ${{ steps.extract.outputs.code_version }} code_version: ${{ steps.extract.outputs.code_version }}
steps: steps:
- name: ⏬ Checkout repository - name: ⏬ Checkout repository
uses: actions/checkout@v5 uses: actions/checkout@v6
with: with:
# The command 'git describe' (used for version) needs the history. # The command 'git describe' (used for version) needs the history.
fetch-depth: 0 fetch-depth: 0
@@ -237,8 +237,6 @@ jobs:
with: with:
jobs: ${{ needs.UnitTestingParams.outputs.python_jobs }} jobs: ${{ needs.UnitTestingParams.outputs.python_jobs }}
# TODO: shouldn't this be configured by a parameter? Same as directories # TODO: shouldn't this be configured by a parameter? Same as directories
requirements: "-r tests/unit/requirements.txt"
# pacboy: "msys/git python-lxml:p"
unittest_report_xml: ${{ needs.ConfigParams.outputs.unittest_report_xml }} unittest_report_xml: ${{ needs.ConfigParams.outputs.unittest_report_xml }}
coverage_report_xml: ${{ needs.ConfigParams.outputs.coverage_report_xml }} coverage_report_xml: ${{ needs.ConfigParams.outputs.coverage_report_xml }}
coverage_report_json: ${{ needs.ConfigParams.outputs.coverage_report_json }} coverage_report_json: ${{ needs.ConfigParams.outputs.coverage_report_json }}

View File

@@ -75,7 +75,7 @@ jobs:
run: printf "::warning title=%s::%s\n" "Deprecated" "'CoverageCollection.yml' template is deprecated. Please switch to 'PublishReleaseNotes.yml'. See https://pytooling.github.io/Actions/JobTemplate/Testing/UnitTesting.html" run: printf "::warning title=%s::%s\n" "Deprecated" "'CoverageCollection.yml' template is deprecated. Please switch to 'PublishReleaseNotes.yml'. See https://pytooling.github.io/Actions/JobTemplate/Testing/UnitTesting.html"
- name: ⏬ Checkout repository - name: ⏬ Checkout repository
uses: actions/checkout@v5 uses: actions/checkout@v6
with: with:
lfs: true lfs: true
submodules: true submodules: true
@@ -163,7 +163,7 @@ jobs:
- name: 📤 Upload 'Coverage Report' artifact - name: 📤 Upload 'Coverage Report' artifact
continue-on-error: true continue-on-error: true
uses: pyTooling/upload-artifact@v5 uses: pyTooling/upload-artifact@v6
with: with:
name: ${{ inputs.artifact }} name: ${{ inputs.artifact }}
working-directory: ${{ steps.getVariables.outputs.coverage_report_html_directory }} working-directory: ${{ steps.getVariables.outputs.coverage_report_html_directory }}

View File

@@ -68,7 +68,7 @@ on:
jobs: jobs:
Extract: Extract:
name: 📓 Extract configurations from pyproject.toml name: 🔬 Extract configurations from pyproject.toml
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}" runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
outputs: outputs:
unittest_report_xml: ${{ steps.getVariables.outputs.unittest_report_xml }} unittest_report_xml: ${{ steps.getVariables.outputs.unittest_report_xml }}
@@ -82,7 +82,7 @@ jobs:
steps: steps:
- name: ⏬ Checkout repository - name: ⏬ Checkout repository
uses: actions/checkout@v5 uses: actions/checkout@v6
- name: 🐍 Setup Python ${{ inputs.python_version }} - name: 🐍 Setup Python ${{ inputs.python_version }}
uses: actions/setup-python@v6 uses: actions/setup-python@v6

View File

@@ -53,7 +53,7 @@ jobs:
steps: steps:
- name: 📥 Download artifacts '${{ inputs.wheel }}' from 'Package' job - name: 📥 Download artifacts '${{ inputs.wheel }}' from 'Package' job
uses: pyTooling/download-artifact@v6 uses: pyTooling/download-artifact@v7
with: with:
name: ${{ inputs.wheel }} name: ${{ inputs.wheel }}
path: install path: install

View File

@@ -60,7 +60,7 @@ jobs:
continue-on-error: ${{ inputs.can-fail == 'true' }} continue-on-error: ${{ inputs.can-fail == 'true' }}
steps: steps:
- name: 📥 Download artifacts '${{ inputs.latex_artifact }}' from 'SphinxDocumentation' job - name: 📥 Download artifacts '${{ inputs.latex_artifact }}' from 'SphinxDocumentation' job
uses: pyTooling/download-artifact@v6 uses: pyTooling/download-artifact@v7
with: with:
name: ${{ inputs.latex_artifact }} name: ${{ inputs.latex_artifact }}
path: latex path: latex
@@ -83,7 +83,7 @@ jobs:
latexmk -${{ inputs.processor }} "${{ inputs.document }}.tex" latexmk -${{ inputs.processor }} "${{ inputs.document }}.tex"
- name: 📤 Upload 'PDF Documentation' artifact - name: 📤 Upload 'PDF Documentation' artifact
uses: pyTooling/upload-artifact@v5 uses: pyTooling/upload-artifact@v6
if: inputs.pdf_artifact != '' if: inputs.pdf_artifact != ''
with: with:
name: ${{ inputs.pdf_artifact }} name: ${{ inputs.pdf_artifact }}

View File

@@ -104,7 +104,7 @@ jobs:
run: printf "::warning title=%s::%s\n" "NightlyRelease" "'NightlyRelease.yml' template is deprecated. Please switch to 'PublishReleaseNotes.yml'. See https://pytooling.github.io/Actions/JobTemplate/Release/PublishReleaseNotes.html" run: printf "::warning title=%s::%s\n" "NightlyRelease" "'NightlyRelease.yml' template is deprecated. Please switch to 'PublishReleaseNotes.yml'. See https://pytooling.github.io/Actions/JobTemplate/Release/PublishReleaseNotes.html"
- name: ⏬ Checkout repository - name: ⏬ Checkout repository
uses: actions/checkout@v5 uses: actions/checkout@v6
with: with:
# The command 'git describe' (used for version) needs the history. # The command 'git describe' (used for version) needs the history.
fetch-depth: 0 fetch-depth: 0

View File

@@ -53,7 +53,7 @@ jobs:
artifact: ${{ inputs.artifact }} artifact: ${{ inputs.artifact }}
steps: steps:
- name: ⏬ Checkout repository - name: ⏬ Checkout repository
uses: actions/checkout@v5 uses: actions/checkout@v6
with: with:
lfs: true lfs: true
submodules: true submodules: true
@@ -106,7 +106,7 @@ jobs:
run: python setup.py bdist_wheel run: python setup.py bdist_wheel
- name: 📤 Upload wheel artifact - name: 📤 Upload wheel artifact
uses: pyTooling/upload-artifact@v5 uses: pyTooling/upload-artifact@v6
with: with:
name: ${{ inputs.artifact }} name: ${{ inputs.artifact }}
working-directory: dist working-directory: dist

View File

@@ -103,7 +103,7 @@ on:
macos_intel_image: macos_intel_image:
description: 'The used GitHub Action image for macOS (Intel x86-64) based jobs.' description: 'The used GitHub Action image for macOS (Intel x86-64) based jobs.'
required: false required: false
default: 'macos-13' default: 'macos-15-intel'
type: string type: string
macos_arm_image: macos_arm_image:
description: 'The used GitHub Action image for macOS (ARM aarch64) based jobs.' description: 'The used GitHub Action image for macOS (ARM aarch64) based jobs.'
@@ -154,7 +154,7 @@ jobs:
steps: steps:
- name: ⏬ Checkout repository - name: ⏬ Checkout repository
uses: actions/checkout@v5 uses: actions/checkout@v6
with: with:
# The command 'git describe' (used for version) needs the history. # The command 'git describe' (used for version) needs the history.
fetch-depth: 0 fetch-depth: 0

View File

@@ -131,7 +131,7 @@ jobs:
steps: steps:
- name: ⏬ Checkout repository - name: ⏬ Checkout repository
uses: actions/checkout@v5 uses: actions/checkout@v6
with: with:
# The command 'git describe' (used for version) needs the history. # The command 'git describe' (used for version) needs the history.
fetch-depth: 0 fetch-depth: 0

View File

@@ -109,13 +109,13 @@ jobs:
steps: steps:
- name: ⏬ Checkout repository - name: ⏬ Checkout repository
uses: actions/checkout@v5 uses: actions/checkout@v6
with: with:
lfs: true lfs: true
submodules: true submodules: true
- name: 📥 Download Artifacts - name: 📥 Download Artifacts
uses: pyTooling/download-artifact@v6 uses: pyTooling/download-artifact@v7
with: with:
pattern: ${{ inputs.coverage_artifacts_pattern }} pattern: ${{ inputs.coverage_artifacts_pattern }}
path: artifacts path: artifacts
@@ -156,7 +156,7 @@ jobs:
tree -pash ${{ fromJson(inputs.coverage_report_html).directory }} tree -pash ${{ fromJson(inputs.coverage_report_html).directory }}
- name: 📤 Upload 'Coverage SQLite Database' artifact - name: 📤 Upload 'Coverage SQLite Database' artifact
uses: pyTooling/upload-artifact@v5 uses: pyTooling/upload-artifact@v6
if: inputs.coverage_sqlite_artifact != '' if: inputs.coverage_sqlite_artifact != ''
continue-on-error: true continue-on-error: true
with: with:
@@ -166,7 +166,7 @@ jobs:
retention-days: 1 retention-days: 1
- name: 📤 Upload 'Coverage XML Report' artifact - name: 📤 Upload 'Coverage XML Report' artifact
uses: pyTooling/upload-artifact@v5 uses: pyTooling/upload-artifact@v6
if: inputs.coverage_xml_artifact != '' if: inputs.coverage_xml_artifact != ''
continue-on-error: true continue-on-error: true
with: with:
@@ -177,7 +177,7 @@ jobs:
retention-days: 1 retention-days: 1
- name: 📤 Upload 'Coverage JSON Report' artifact - name: 📤 Upload 'Coverage JSON Report' artifact
uses: pyTooling/upload-artifact@v5 uses: pyTooling/upload-artifact@v6
if: inputs.coverage_json_artifact != '' if: inputs.coverage_json_artifact != ''
continue-on-error: true continue-on-error: true
with: with:
@@ -188,7 +188,7 @@ jobs:
retention-days: 1 retention-days: 1
- name: 📤 Upload 'Coverage HTML Report' artifact - name: 📤 Upload 'Coverage HTML Report' artifact
uses: pyTooling/upload-artifact@v5 uses: pyTooling/upload-artifact@v6
if: inputs.coverage_html_artifact != '' if: inputs.coverage_html_artifact != ''
continue-on-error: true continue-on-error: true
with: with:

View File

@@ -56,7 +56,7 @@ jobs:
steps: steps:
- name: 📥 Download artifacts '${{ inputs.artifact }}' from 'Package' job - name: 📥 Download artifacts '${{ inputs.artifact }}' from 'Package' job
uses: pyTooling/download-artifact@v6 uses: pyTooling/download-artifact@v7
with: with:
name: ${{ inputs.artifact }} name: ${{ inputs.artifact }}
path: dist path: dist

View File

@@ -132,7 +132,7 @@ jobs:
steps: steps:
- name: ⏬ Checkout repository - name: ⏬ Checkout repository
uses: actions/checkout@v5 uses: actions/checkout@v6
with: with:
# The command 'git describe' (used for version) needs the history. # The command 'git describe' (used for version) needs the history.
fetch-depth: 0 fetch-depth: 0
@@ -191,198 +191,6 @@ jobs:
exit 1 exit 1
fi fi
- name: 📑 Assemble Release Notes
id: createReleaseNotes
run: |
set +e
ANSI_LIGHT_RED=$'\x1b[91m'
ANSI_LIGHT_GREEN=$'\x1b[92m'
ANSI_LIGHT_YELLOW=$'\x1b[93m'
ANSI_LIGHT_BLUE=$'\x1b[94m'
ANSI_NOCOLOR=$'\x1b[0m'
export GH_TOKEN=${{ github.token }}
# Save release description (from parameter in a file)
head -c -1 <<'EOF' > __DESCRIPTION__.md
${{ inputs.description }}
EOF
# Save release footer (from parameter in a file)
head -c -1 <<'EOF' > __FOOTER__.md
${{ inputs.description_footer }}
EOF
# Download Markdown from PullRequest
# Readout second parent's SHA
# Search PR with that SHA
# Load description of that PR
printf "Read second parent of current SHA (%s) ... " "${{ github.ref }}"
FATHER_SHA=$(git rev-parse ${{ github.ref }}^2 -- 2> /dev/null)
if [[ $? -ne 0 || "{FATHER_SHA}" == "" ]]; then
printf "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n"
printf "→ ${ANSI_LIGHT_YELLOW}Skipped readout of pull request description. This is not a merge commit.${ANSI_NOCOLOR}\n"
else
printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n"
printf "Search Pull Request to '%s' and branch containing SHA %s ... " "${{ inputs.release_branch }}" "${FATHER_SHA}"
PULL_REQUESTS=$(gh pr list --base "${{ inputs.release_branch }}" --search "${FATHER_SHA}" --state "merged" --json "title,number,mergedBy,mergedAt,body")
if [[ $? -ne 0 || "${PULL_REQUESTS}" == "" ]]; then
printf "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n"
printf "${ANSI_LIGHT_RED}Couldn't find a merged Pull Request to '%s'. -> %s${ANSI_NOCOLOR}\n" "${{ inputs.release_branch }}" "${PULL_REQUESTS}"
printf "::error title=PullRequest::Couldn't find a merged Pull Request to '%s'. -> %s\n" "${{ inputs.release_branch }}" "${PULL_REQUESTS}"
exit 1
else
printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n"
PR_TITLE="$( printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].title")"
PR_NUMBER="$( printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].number")"
PR_BODY="$( printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].body")"
PR_MERGED_BY="$(printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].mergedBy.login")"
PR_MERGED_AT="$(printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].mergedAt")"
printf "Found Pull Request:\n"
printf " %s\n" "Title: ${PR_TITLE}"
printf " %s\n" "Number: ${PR_NUMBER}"
printf " %s\n" "MergedBy: ${PR_MERGED_BY}"
printf " %s\n" "MergedAt: ${PR_MERGED_AT} ($(date -d"${PR_MERGED_AT}" '+%d.%m.%Y - %H:%M:%S'))"
fi
printf "%s\n" "${PR_BODY}" > __PULLREQUEST__.md
fi
# Check if a release description file should be used and exists.
if [[ "${{ inputs.description_file }}" != "" ]]; then
if [[ ! -f "${{ inputs.description_file }}" ]]; then
printf "${ANSI_LIGHT_RED}Release description file '%s' not found.${ANSI_NOCOLOR}\n" "${{ inputs.description_file }}"
printf "::error title=%s::%s\n" "FileNotFound" "Release description file '${{ inputs.description_file }}' not found."
exit 1
elif [[ -s "${{ inputs.description_file }}" ]]; then
printf "Use '%s' as main release description.\n" "${{ inputs.description_file }}"
cp -v "${{ inputs.description_file }}" __NOTES__.md
else
printf "${ANSI_LIGHT_RED}Release description file '%s' is empty.${ANSI_NOCOLOR}\n" "${{ inputs.description_file }}"
printf "::error title=%s::%s\n" "FileNotFound" "Release description file '${{ inputs.description_file }}' is empty."
exit 1
fi
# Check if the main release description is provided by a template parameter
elif [[ -s __DESCRIPTION__.md ]]; then
printf "Use '__DESCRIPTION__.md' as main release description.\n"
mv -v __DESCRIPTION__.md __NOTES__.md
# Check if the pull request serves as the main release description text.
elif [[ -s __PULLREQUEST__.md ]]; then
printf "Use '__PULLREQUEST__.md' as main release description.\n"
mv -v __PULLREQUEST__.md __NOTES__.md
printf "Append '%%%%FOOTER%%%%' to '__NOTES__.md'.\n"
printf "\n%%%%FOOTER%%%%\n" >> __NOTES__.md
else
printf "${ANSI_LIGHT_RED}No release description specified (file, parameter, PR text).${ANSI_NOCOLOR}\n"
printf "::error title=%s::%s\n" "MissingDescription" "No release description specified (file, parameter, PR text)."
exit 1
fi
# Read release notes main file for placeholder substitution
NOTES=$(<__NOTES__.md)
# Inline description
if [[ -s __DESCRIPTION__.md ]]; then
NOTES="${NOTES//%%DESCRIPTION%%/$(<__DESCRIPTION__.md)}"
else
NOTES="${NOTES//%%DESCRIPTION%%/}"
fi
# Inline PullRequest and increase headline levels
if [[ -s __PULLREQUEST__.md ]]; then
while [[ "${NOTES}" =~ %%(PULLREQUEST(\+[0-3])?)%% ]]; do
case "${BASH_REMATCH[1]}" in
"PULLREQUEST+0" | "PULLREQUEST")
NOTES="${NOTES//${BASH_REMATCH[0]}/$(<__PULLREQUEST__.md)}"
;;
"PULLREQUEST+1")
NOTES="${NOTES//${BASH_REMATCH[0]}/$(cat __PULLREQUEST__.md | sed -E 's/^(#+) /\1# /gm;t')}"
;;
"PULLREQUEST+2")
NOTES="${NOTES//${BASH_REMATCH[0]}/$(cat __PULLREQUEST__.md | sed -E 's/^(#+) /\1### /gm;t')}"
;;
"PULLREQUEST+3")
NOTES="${NOTES//${BASH_REMATCH[0]}/$(cat __PULLREQUEST__.md | sed -E 's/^(#+) /\1### /gm;t')}"
;;
esac
done
else
while [[ "${NOTES}" =~ %%(PULLREQUEST(\+[0-3])?)%% ]]; do
NOTES="${NOTES//${BASH_REMATCH[0]}/}"
done
fi
# inline Footer
if [[ -s __FOOTER__.md ]]; then
NOTES="${NOTES//%%FOOTER%%/$(<__FOOTER__.md)}"
else
NOTES="${NOTES//%%FOOTER%%/}"
fi
# Apply replacements
while IFS=$'\r\n' read -r patternLine; do
# skip empty lines
[[ "$patternLine" == "" ]] && continue
pattern="%${patternLine%%=*}%"
replacement="${patternLine#*=}"
NOTES="${NOTES//$pattern/$replacement}"
done <<<'${{ inputs.replacements }}'
# Workarounds for stupid GitHub variables
owner_repo="${{ github.repository }}"
repo=${owner_repo##*/}
# Replace special identifiers
NOTES="${NOTES//%%gh_server%%/${{ github.server_url }}}"
NOTES="${NOTES//%%gh_workflow_name%%/${{ github.workflow }}}"
NOTES="${NOTES//%%gh_owner%%/${{ github.repository_owner }}}"
NOTES="${NOTES//%%gh_repo%%/${repo}}"
NOTES="${NOTES//%%gh_owner_repo%%/${{ github.repository }}}"
#NOTES="${NOTES//%%gh_pages%%/https://${{ github.repository_owner }}.github.io/${repo}/}"
NOTES="${NOTES//%%gh_runid%%/${{ github.run_id }}}"
NOTES="${NOTES//%%gh_actor%%/${{ github.actor }}}"
NOTES="${NOTES//%%gh_sha%%/${{ github.sha }}}"
NOTES="${NOTES//%%date%%/$(date '+%Y-%m-%d')}"
NOTES="${NOTES//%%time%%/$(date '+%H:%M:%S %Z')}"
NOTES="${NOTES//%%datetime%%/$(date '+%Y-%m-%d %H:%M:%S %Z')}"
# Write final release notes to file
printf "%s\n" "${NOTES}" > __NOTES__.md
# Display partial contents for debugging
if [[ -s __DESCRIPTION__.md ]]; then
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__DESCRIPTION__.md' ($(stat --printf="%s" "__DESCRIPTION__.md") B) ...."
cat __DESCRIPTION__.md
printf "::endgroup::\n"
else
printf "${ANSI_LIGHT_YELLOW}No '__DESCRIPTION__.md' found.${ANSI_NOCOLOR}\n"
fi
if [[ -s __PULLREQUEST__.md ]]; then
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__PULLREQUEST__.md' ($(stat --printf="%s" "__PULLREQUEST__.md") B) ...."
cat __PULLREQUEST__.md
printf "::endgroup::\n"
else
printf "${ANSI_LIGHT_YELLOW}No '__PULLREQUEST__.md' found.${ANSI_NOCOLOR}\n"
fi
if [[ -s __FOOTER__.md ]]; then
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__FOOTER__.md' ($(stat --printf="%s" "__FOOTER__.md") B) ...."
cat __FOOTER__.md
printf "::endgroup::\n"
else
printf "${ANSI_LIGHT_YELLOW}No '__FOOTER__.md' found.${ANSI_NOCOLOR}\n"
fi
# Print final release notes
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__NOTES__.md' ($(stat --printf="%s" "__NOTES__.md") B) ...."
cat __NOTES__.md
printf "::endgroup::\n"
- name: 📑 Create new Release Page - name: 📑 Create new Release Page
id: createReleasePage id: createReleasePage
if: inputs.mode == 'release' if: inputs.mode == 'release'
@@ -397,6 +205,15 @@ jobs:
export GH_TOKEN=${{ github.token }} export GH_TOKEN=${{ github.token }}
tee "__PRELIMINARY_NOTES__.md" <<EOF
Release notes for ${{ inputs.tag }} are created right now ...
1. download artifacts &rarr; (compression?) &rarr; upload as assets
2. optional: create inventory.json
3. assemble release notes &rarr; update this text
4. optional: remove draft state
EOF
if [[ "${{ inputs.prerelease }}" == "true" ]]; then if [[ "${{ inputs.prerelease }}" == "true" ]]; then
addPreRelease="--prerelease" addPreRelease="--prerelease"
fi fi
@@ -409,9 +226,7 @@ jobs:
addTitle=("--title" "${{ inputs.title }}") addTitle=("--title" "${{ inputs.title }}")
fi fi
if [[ -s __NOTES__.md ]]; then addNotes=("--notes-file" "__PRELIMINARY_NOTES__.md")
addNotes=("--notes-file" "__NOTES__.md")
fi
printf "Creating release '%s' ... " "${{ inputs.tag }}" printf "Creating release '%s' ... " "${{ inputs.tag }}"
message="$(gh release create "${{ inputs.tag }}" --verify-tag --draft $addPreRelease $addLatest "${addTitle[@]}" "${addNotes[@]}" 2>&1)" message="$(gh release create "${{ inputs.tag }}" --verify-tag --draft $addPreRelease $addLatest "${addTitle[@]}" "${addNotes[@]}" 2>&1)"
@@ -439,6 +254,14 @@ jobs:
export GH_TOKEN=${{ github.token }} export GH_TOKEN=${{ github.token }}
tee "__PRELIMINARY_NOTES__.md" <<EOF
Release notes for ${{ inputs.tag }} are updated right now ...
1. download artifacts &rarr; (compression?) &rarr; upload as assets
2. optional: create inventory.json
3. assemble release notes &rarr; update this text
EOF
addDraft="--draft" addDraft="--draft"
if [[ "${{ inputs.prerelease }}" == "true" ]]; then if [[ "${{ inputs.prerelease }}" == "true" ]]; then
addPreRelease="--prerelease" addPreRelease="--prerelease"
@@ -452,9 +275,7 @@ jobs:
addTitle=("--title" "${{ inputs.title }}") addTitle=("--title" "${{ inputs.title }}")
fi fi
if [[ -s __NOTES__.md ]]; then addNotes=("--notes-file" "__PRELIMINARY_NOTES__.md")
addNotes=("--notes-file" "__NOTES__.md")
fi
printf "Creating release '%s' ... " "${{ inputs.tag }}" printf "Creating release '%s' ... " "${{ inputs.tag }}"
message="$(gh release create "${{ inputs.tag }}" --verify-tag --draft $addPreRelease $addLatest "${addTitle[@]}" "${addNotes[@]}" 2>&1)" message="$(gh release create "${{ inputs.tag }}" --verify-tag --draft $addPreRelease $addLatest "${addTitle[@]}" "${addNotes[@]}" 2>&1)"
@@ -553,6 +374,10 @@ jobs:
) )
fi fi
# Write Markdown table header
printf "| Asset Name | File Size | SHA256 |\n" > __ASSETS__.md
printf "|------------|-----------|--------|\n" >> __ASSETS__.md
ERRORS=0 ERRORS=0
# A dictionary of 0/1 to avoid duplicate downloads # A dictionary of 0/1 to avoid duplicate downloads
declare -A downloadedArtifacts declare -A downloadedArtifacts
@@ -741,6 +566,13 @@ jobs:
sha256Checksums[$asset]="sha256:${sha256}" sha256Checksums[$asset]="sha256:${sha256}"
printf "${ANSI_LIGHT_BLUE}${sha256}${ANSI_NOCOLOR}\n" printf "${ANSI_LIGHT_BLUE}${sha256}${ANSI_NOCOLOR}\n"
# Add asset to Markdown table
printf "| %s | %s | %s |\n" \
"[${title}](${{ github.server_url }}/${{ github.repository }}/releases/download/${{ inputs.tag }}/${uploadFile#*/})" \
"$(stat --printf="%s" "${uploadFile}" | numfmt --format "%.1f" --suffix=B --to=iec-i)" \
"\`${sha256}\`" \
>> __ASSETS__.md
# Add asset to JSON inventory # Add asset to JSON inventory
if [[ "${{ inputs.inventory-json }}" != "" ]]; then if [[ "${{ inputs.inventory-json }}" != "" ]]; then
if [[ "${categories}" != "${title}" ]]; then if [[ "${categories}" != "${title}" ]]; then
@@ -775,7 +607,7 @@ jobs:
if [[ $? -eq 0 ]]; then if [[ $? -eq 0 ]]; then
printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n" printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n"
printf " checking assets SHA256 checksum ... \n" printf " checking assets SHA256 checksum ... "
ghSHA256=$(gh release view --json assets --jq ".assets[] | select(.name == \"${asset}\") | .digest" ${{ inputs.tag }}) ghSHA256=$(gh release view --json assets --jq ".assets[] | select(.name == \"${asset}\") | .digest" ${{ inputs.tag }})
if [[ "${ghSHA256}" == "${sha256Checksums[$asset]}" ]]; then if [[ "${ghSHA256}" == "${sha256Checksums[$asset]}" ]]; then
printf "${ANSI_LIGHT_GREEN}[PASSED]${ANSI_NOCOLOR}\n" printf "${ANSI_LIGHT_GREEN}[PASSED]${ANSI_NOCOLOR}\n"
@@ -829,6 +661,245 @@ jobs:
exit 1 exit 1
fi fi
- name: 📑 Assemble Release Notes
id: createReleaseNotes
run: |
set +e
ANSI_LIGHT_RED=$'\x1b[91m'
ANSI_LIGHT_GREEN=$'\x1b[92m'
ANSI_LIGHT_YELLOW=$'\x1b[93m'
ANSI_LIGHT_BLUE=$'\x1b[94m'
ANSI_NOCOLOR=$'\x1b[0m'
export GH_TOKEN=${{ github.token }}
# Save release description (from parameter in a file)
head -c -1 <<'EOF' > __DESCRIPTION__.md
${{ inputs.description }}
EOF
# Save release footer (from parameter in a file)
head -c -1 <<'EOF' > __FOOTER__.md
${{ inputs.description_footer }}
EOF
# Download Markdown from PullRequest
# Readout second parent's SHA
# Search PR with that SHA
# Load description of that PR
printf "Read second parent of current SHA (%s) ... " "${{ github.ref }}"
FATHER_SHA=$(git rev-parse ${{ github.ref }}^2 -- 2> /dev/null)
if [[ $? -ne 0 || "{FATHER_SHA}" == "" ]]; then
printf "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n"
printf "→ ${ANSI_LIGHT_YELLOW}Skipped readout of pull request description. This is not a merge commit.${ANSI_NOCOLOR}\n"
else
printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n"
printf "Search Pull Request to '%s' and branch containing SHA %s ... " "${{ inputs.release_branch }}" "${FATHER_SHA}"
PULL_REQUESTS=$(gh pr list --base "${{ inputs.release_branch }}" --search "${FATHER_SHA}" --state "merged" --json "title,number,mergedBy,mergedAt,body")
if [[ $? -ne 0 || "${PULL_REQUESTS}" == "" ]]; then
printf "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n"
printf "${ANSI_LIGHT_RED}Couldn't find a merged Pull Request to '%s'. -> %s${ANSI_NOCOLOR}\n" "${{ inputs.release_branch }}" "${PULL_REQUESTS}"
printf "::error title=PullRequest::Couldn't find a merged Pull Request to '%s'. -> %s\n" "${{ inputs.release_branch }}" "${PULL_REQUESTS}"
exit 1
else
printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n"
PR_TITLE="$( printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].title")"
PR_NUMBER="$( printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].number")"
PR_BODY="$( printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].body")"
PR_MERGED_BY="$(printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].mergedBy.login")"
PR_MERGED_AT="$(printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].mergedAt")"
printf "Found Pull Request:\n"
printf " %s\n" "Title: ${PR_TITLE}"
printf " %s\n" "Number: ${PR_NUMBER}"
printf " %s\n" "MergedBy: ${PR_MERGED_BY}"
printf " %s\n" "MergedAt: ${PR_MERGED_AT} ($(date -d"${PR_MERGED_AT}" '+%d.%m.%Y - %H:%M:%S'))"
fi
printf "%s\n" "${PR_BODY}" > __PULLREQUEST__.md
fi
# Check if a release description file should be used and exists.
if [[ "${{ inputs.description_file }}" != "" ]]; then
if [[ ! -f "${{ inputs.description_file }}" ]]; then
printf "${ANSI_LIGHT_RED}Release description file '%s' not found.${ANSI_NOCOLOR}\n" "${{ inputs.description_file }}"
printf "::error title=%s::%s\n" "FileNotFound" "Release description file '${{ inputs.description_file }}' not found."
exit 1
elif [[ -s "${{ inputs.description_file }}" ]]; then
printf "Use '%s' as main release description.\n" "${{ inputs.description_file }}"
cp -v "${{ inputs.description_file }}" __NOTES__.md
else
printf "${ANSI_LIGHT_RED}Release description file '%s' is empty.${ANSI_NOCOLOR}\n" "${{ inputs.description_file }}"
printf "::error title=%s::%s\n" "FileNotFound" "Release description file '${{ inputs.description_file }}' is empty."
exit 1
fi
# Check if the main release description is provided by a template parameter
elif [[ -s __DESCRIPTION__.md ]]; then
printf "Use '__DESCRIPTION__.md' as main release description.\n"
mv -v __DESCRIPTION__.md __NOTES__.md
# Check if the pull request serves as the main release description text.
elif [[ -s __PULLREQUEST__.md ]]; then
printf "Use '__PULLREQUEST__.md' as main release description.\n"
mv -v __PULLREQUEST__.md __NOTES__.md
printf "Append '%%%%FOOTER%%%%' to '__NOTES__.md'.\n"
printf "\n%%%%FOOTER%%%%\n" >> __NOTES__.md
else
printf "${ANSI_LIGHT_RED}No release description specified (file, parameter, PR text).${ANSI_NOCOLOR}\n"
printf "::error title=%s::%s\n" "MissingDescription" "No release description specified (file, parameter, PR text)."
exit 1
fi
# Read release notes main file for placeholder substitution
NOTES=$(<__NOTES__.md)
# Inline description
if [[ -s __DESCRIPTION__.md ]]; then
NOTES="${NOTES//%%DESCRIPTION%%/$(<__DESCRIPTION__.md)}"
else
NOTES="${NOTES//%%DESCRIPTION%%/}"
fi
# Inline PullRequest and increase headline levels
if [[ -s __PULLREQUEST__.md ]]; then
while [[ "${NOTES}" =~ %%(PULLREQUEST(\+[0-3])?)%% ]]; do
case "${BASH_REMATCH[1]}" in
"PULLREQUEST+0" | "PULLREQUEST")
NOTES="${NOTES//${BASH_REMATCH[0]}/$(<__PULLREQUEST__.md)}"
;;
"PULLREQUEST+1")
NOTES="${NOTES//${BASH_REMATCH[0]}/$(cat __PULLREQUEST__.md | sed -E 's/^(#+) /\1# /gm;t')}"
;;
"PULLREQUEST+2")
NOTES="${NOTES//${BASH_REMATCH[0]}/$(cat __PULLREQUEST__.md | sed -E 's/^(#+) /\1### /gm;t')}"
;;
"PULLREQUEST+3")
NOTES="${NOTES//${BASH_REMATCH[0]}/$(cat __PULLREQUEST__.md | sed -E 's/^(#+) /\1### /gm;t')}"
;;
esac
done
else
while [[ "${NOTES}" =~ %%(PULLREQUEST(\+[0-3])?)%% ]]; do
NOTES="${NOTES//${BASH_REMATCH[0]}/}"
done
fi
# Inline Files table
if [[ -s __ASSETS__.md ]]; then
NOTES="${NOTES//%%ASSETS%%/$(<__ASSETS__.md)}"
else
NOTES="${NOTES//%%ASSETS%%/}"
fi
# Inline Footer
if [[ -s __FOOTER__.md ]]; then
NOTES="${NOTES//%%FOOTER%%/$(<__FOOTER__.md)}"
else
NOTES="${NOTES//%%FOOTER%%/}"
fi
# Apply replacements
while IFS=$'\r\n' read -r patternLine; do
# skip empty lines
[[ "$patternLine" == "" ]] && continue
pattern="%${patternLine%%=*}%"
replacement="${patternLine#*=}"
NOTES="${NOTES//$pattern/$replacement}"
done <<<'${{ inputs.replacements }}'
# Workarounds for stupid GitHub variables
owner_repo="${{ github.repository }}"
repo=${owner_repo##*/}
# Replace special identifiers
NOTES="${NOTES//%%gh_server%%/${{ github.server_url }}}"
NOTES="${NOTES//%%gh_workflow_name%%/${{ github.workflow }}}"
NOTES="${NOTES//%%gh_owner%%/${{ github.repository_owner }}}"
NOTES="${NOTES//%%gh_repo%%/${repo}}"
NOTES="${NOTES//%%gh_owner_repo%%/${{ github.repository }}}"
#NOTES="${NOTES//%%gh_pages%%/https://${{ github.repository_owner }}.github.io/${repo}/}"
NOTES="${NOTES//%%gh_runid%%/${{ github.run_id }}}"
NOTES="${NOTES//%%gh_actor%%/${{ github.actor }}}"
NOTES="${NOTES//%%gh_sha%%/${{ github.sha }}}"
NOTES="${NOTES//%%date%%/$(date '+%Y-%m-%d')}"
NOTES="${NOTES//%%time%%/$(date '+%H:%M:%S %Z')}"
NOTES="${NOTES//%%datetime%%/$(date '+%Y-%m-%d %H:%M:%S %Z')}"
# Write final release notes to file
printf "%s\n" "${NOTES}" > __NOTES__.md
# Display partial contents for debugging
if [[ -s __DESCRIPTION__.md ]]; then
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__DESCRIPTION__.md' ($(stat --printf="%s" "__DESCRIPTION__.md") B) ...."
cat __DESCRIPTION__.md
printf "::endgroup::\n"
else
printf "${ANSI_LIGHT_YELLOW}No '__DESCRIPTION__.md' found.${ANSI_NOCOLOR}\n"
fi
if [[ -s __PULLREQUEST__.md ]]; then
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__PULLREQUEST__.md' ($(stat --printf="%s" "__PULLREQUEST__.md") B) ...."
cat __PULLREQUEST__.md
printf "::endgroup::\n"
else
printf "${ANSI_LIGHT_YELLOW}No '__PULLREQUEST__.md' found.${ANSI_NOCOLOR}\n"
fi
if [[ -s __ASSETS__.md ]]; then
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__ASSETS__.md' ($(stat --printf="%s" "__ASSETS__.md") B) ...."
cat __ASSETS__.md
printf "::endgroup::\n"
else
printf "${ANSI_LIGHT_YELLOW}No '__ASSETS__.md' found.${ANSI_NOCOLOR}\n"
fi
if [[ -s __FOOTER__.md ]]; then
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__FOOTER__.md' ($(stat --printf="%s" "__FOOTER__.md") B) ...."
cat __FOOTER__.md
printf "::endgroup::\n"
else
printf "${ANSI_LIGHT_YELLOW}No '__FOOTER__.md' found.${ANSI_NOCOLOR}\n"
fi
# Print final release notes
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__NOTES__.md' ($(stat --printf="%s" "__NOTES__.md") B) ...."
cat __NOTES__.md
printf "::endgroup::\n"
- name: 📑 Update release notes
id: updateReleaseNotes
run: |
set +e
ANSI_LIGHT_RED=$'\x1b[91m'
ANSI_LIGHT_GREEN=$'\x1b[92m'
ANSI_LIGHT_YELLOW=$'\x1b[93m'
ANSI_LIGHT_BLUE=$'\x1b[94m'
ANSI_NOCOLOR=$'\x1b[0m'
export GH_TOKEN=${{ github.token }}
if [[ -s __ASSETS__.md ]]; then
addNotes=("--notes-file" "__ASSETS__.md")
else
printf " ${ANSI_LIGHT_RED}File '%s' not found.${ANSI_NOCOLOR}\n" "__ASSETS__.md"
printf "::error title=%s::%s\n" "InternalError" "File '__ASSETS__.md' not found."
exit 1
fi
printf "Updating release '%s' ... " "${{ inputs.tag }}"
message="$(gh release edit "${addNotes[@]}" "${{ inputs.tag }}" 2>&1)"
if [[ $? -eq 0 ]]; then
printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n"
printf " Release page: %s\n" "${message}"
else
printf "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n"
printf " ${ANSI_LIGHT_RED}Couldn't update release '%s' -> Error: '%s'.${ANSI_NOCOLOR}\n" "${{ inputs.tag }}" "${message}"
printf "::error title=%s::%s\n" "InternalError" "Couldn't update release '${{ inputs.tag }}' -> Error: '${message}'."
exit 1
fi
- name: 📑 Remove draft state from Release Page - name: 📑 Remove draft state from Release Page
id: removeDraft id: removeDraft
if: ${{ ! inputs.draft }} if: ${{ ! inputs.draft }}

View File

@@ -102,10 +102,10 @@ jobs:
steps: steps:
- name: ⏬ Checkout repository - name: ⏬ Checkout repository
uses: actions/checkout@v5 uses: actions/checkout@v6
- name: 📥 Download Artifacts - name: 📥 Download Artifacts
uses: pyTooling/download-artifact@v6 uses: pyTooling/download-artifact@v7
with: with:
pattern: ${{ inputs.unittest_artifacts_pattern }} pattern: ${{ inputs.unittest_artifacts_pattern }}
path: artifacts path: artifacts
@@ -156,7 +156,7 @@ jobs:
fail_ci_if_error: true fail_ci_if_error: true
- name: 📤 Upload merged 'JUnit Test Summary' artifact - name: 📤 Upload merged 'JUnit Test Summary' artifact
uses: pyTooling/upload-artifact@v5 uses: pyTooling/upload-artifact@v6
if: inputs.merged_junit_artifact != '' if: inputs.merged_junit_artifact != ''
with: with:
name: ${{ inputs.merged_junit_artifact }} name: ${{ inputs.merged_junit_artifact }}

View File

@@ -45,45 +45,51 @@ on:
default: '' default: ''
type: string type: string
outputs:
github_pages_url:
description: "URL to GitHub Pages."
value: ${{ jobs.PrepareGitHubPages.outputs.github_pages_url }}
jobs: jobs:
PrepareGitHubPages:
PublishToGitHubPages: name: 📖 Merge multiple contents for publishing
name: 📚 Publish to GH-Pages
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}" runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
permissions:
pages: write # to deploy to Pages
id-token: write # to verify the deployment originates from an appropriate source
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
outputs:
github_pages_url: ${{ steps.deployment.outputs.page_url }}
steps: steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v5
- name: 📥 Download artifacts '${{ inputs.doc }}' from 'SphinxDocumentation' job - name: 📥 Download artifacts '${{ inputs.doc }}' from 'SphinxDocumentation' job
uses: pyTooling/download-artifact@v6 uses: pyTooling/download-artifact@v7
with: with:
name: ${{ inputs.doc }} name: ${{ inputs.doc }}
path: public path: public
- name: 📥 Download artifacts '${{ inputs.coverage }}' from 'Coverage' job - name: 📥 Download artifacts '${{ inputs.coverage }}' from 'Coverage' job
uses: pyTooling/download-artifact@v6 uses: pyTooling/download-artifact@v7
if: ${{ inputs.coverage != '' }} if: ${{ inputs.coverage != '' }}
with: with:
name: ${{ inputs.coverage }} name: ${{ inputs.coverage }}
path: public/coverage path: public/coverage
- name: 📥 Download artifacts '${{ inputs.typing }}' from 'StaticTypeCheck' job - name: 📥 Download artifacts '${{ inputs.typing }}' from 'StaticTypeCheck' job
uses: pyTooling/download-artifact@v6 uses: pyTooling/download-artifact@v7
if: ${{ inputs.typing != '' }} if: ${{ inputs.typing != '' }}
with: with:
name: ${{ inputs.typing }} name: ${{ inputs.typing }}
path: public/typing path: public/typing
- name: '📓 Publish site to GitHub Pages' - name: 📑 Upload static files as artifact
if: github.event_name != 'pull_request' if: github.event_name != 'pull_request'
run: | uses: actions/upload-pages-artifact@v4
cd public with:
touch .nojekyll path: public/
git init
cp ../.git/config ./.git/config - name: 📖 Deploy to GitHub Pages
git add . id: deployment
git config --local user.email "BuildTheDocs@GitHubActions" if: github.event_name != 'pull_request'
git config --local user.name "GitHub Actions" uses: actions/deploy-pages@v4
git commit -a -m "update ${{ github.sha }}"
git push -u origin +HEAD:gh-pages

View File

@@ -86,7 +86,7 @@ jobs:
steps: steps:
- name: ⏬ Checkout repository - name: ⏬ Checkout repository
uses: actions/checkout@v5 uses: actions/checkout@v6
with: with:
lfs: true lfs: true
submodules: true submodules: true
@@ -105,7 +105,7 @@ jobs:
python -m pip install --disable-pip-version-check ${{ inputs.requirements }} python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
- name: 📥 Download artifacts '${{ inputs.unittest_xml_artifact }}' from 'Unittesting' job - name: 📥 Download artifacts '${{ inputs.unittest_xml_artifact }}' from 'Unittesting' job
uses: pyTooling/download-artifact@v6 uses: pyTooling/download-artifact@v7
if: inputs.unittest_xml_artifact != '' if: inputs.unittest_xml_artifact != ''
with: with:
name: ${{ inputs.unittest_xml_artifact }} name: ${{ inputs.unittest_xml_artifact }}
@@ -113,7 +113,7 @@ jobs:
investigate: true investigate: true
- name: 📥 Download artifacts '${{ inputs.coverage_json_artifact }}' from 'PublishCoverageResults' job - name: 📥 Download artifacts '${{ inputs.coverage_json_artifact }}' from 'PublishCoverageResults' job
uses: pyTooling/download-artifact@v6 uses: pyTooling/download-artifact@v7
if: inputs.coverage_json_artifact != '' if: inputs.coverage_json_artifact != ''
with: with:
name: ${{ inputs.coverage_json_artifact }} name: ${{ inputs.coverage_json_artifact }}
@@ -129,7 +129,7 @@ jobs:
sphinx-build -v -n -b html -d _build/doctrees -j $(nproc) -w _build/html.log . _build/html sphinx-build -v -n -b html -d _build/doctrees -j $(nproc) -w _build/html.log . _build/html
- name: 📤 Upload 'HTML Documentation' artifact - name: 📤 Upload 'HTML Documentation' artifact
uses: pyTooling/upload-artifact@v5 uses: pyTooling/upload-artifact@v6
if: inputs.html_artifact != '' if: inputs.html_artifact != ''
continue-on-error: true continue-on-error: true
with: with:
@@ -145,7 +145,7 @@ jobs:
steps: steps:
- name: ⏬ Checkout repository - name: ⏬ Checkout repository
uses: actions/checkout@v5 uses: actions/checkout@v6
with: with:
lfs: true lfs: true
submodules: true submodules: true
@@ -164,7 +164,7 @@ jobs:
python -m pip install --disable-pip-version-check ${{ inputs.requirements }} python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
- name: 📥 Download artifacts '${{ inputs.unittest_xml_artifact }}' from 'Unittesting' job - name: 📥 Download artifacts '${{ inputs.unittest_xml_artifact }}' from 'Unittesting' job
uses: pyTooling/download-artifact@v6 uses: pyTooling/download-artifact@v7
if: inputs.unittest_xml_artifact != '' if: inputs.unittest_xml_artifact != ''
with: with:
name: ${{ inputs.unittest_xml_artifact }} name: ${{ inputs.unittest_xml_artifact }}
@@ -172,7 +172,7 @@ jobs:
investigate: true investigate: true
- name: 📥 Download artifacts '${{ inputs.coverage_json_artifact }}' from 'PublishCoverageResults' job - name: 📥 Download artifacts '${{ inputs.coverage_json_artifact }}' from 'PublishCoverageResults' job
uses: pyTooling/download-artifact@v6 uses: pyTooling/download-artifact@v7
if: inputs.coverage_json_artifact != '' if: inputs.coverage_json_artifact != ''
with: with:
name: ${{ inputs.coverage_json_artifact }} name: ${{ inputs.coverage_json_artifact }}
@@ -272,7 +272,7 @@ jobs:
done done
- name: 📤 Upload 'LaTeX Documentation' artifact - name: 📤 Upload 'LaTeX Documentation' artifact
uses: pyTooling/upload-artifact@v5 uses: pyTooling/upload-artifact@v6
if: inputs.latex_artifact != '' if: inputs.latex_artifact != ''
continue-on-error: true continue-on-error: true
with: with:

View File

@@ -38,7 +38,7 @@ on:
requirements: requirements:
description: 'Python dependencies to be installed through pip.' description: 'Python dependencies to be installed through pip.'
required: false required: false
default: '-r tests/requirements.txt' default: '-r tests/typing/requirements.txt'
type: string type: string
mypy_options: mypy_options:
description: 'Additional mypy options.' description: 'Additional mypy options.'
@@ -49,18 +49,18 @@ on:
description: 'Cobertura file to upload as an artifact.' description: 'Cobertura file to upload as an artifact.'
required: false required: false
default: >- default: >-
{ "fullpath": "report/typing/cobertura.xml", { "fullpath": "report/typing/cobertura.xml",
"directory": "report/typing", "directory": "report/typing",
"filename": "cobertura.xml" "filename": "cobertura.xml"
} }
type: string type: string
junit_report: junit_report:
description: 'JUnit file to upload as an artifact.' description: 'JUnit file to upload as an artifact.'
required: false required: false
default: >- default: >-
{ "fullpath": "report/typing/StaticTypingSummary.xml", { "fullpath": "report/typing/StaticTypingSummary.xml",
"directory": "report/typing", "directory": "report/typing",
"filename": "StaticTypingSummary.xml" "filename": "StaticTypingSummary.xml"
} }
type: string type: string
html_report: html_report:
@@ -94,7 +94,7 @@ jobs:
steps: steps:
- name: ⏬ Checkout repository - name: ⏬ Checkout repository
uses: actions/checkout@v5 uses: actions/checkout@v6
- name: 🐍 Setup Python ${{ inputs.python_version }} - name: 🐍 Setup Python ${{ inputs.python_version }}
uses: actions/setup-python@v6 uses: actions/setup-python@v6
@@ -142,7 +142,7 @@ jobs:
fi fi
- name: 📤 Upload '${{ inputs.html_artifact }}' HTML artifact - name: 📤 Upload '${{ inputs.html_artifact }}' HTML artifact
uses: pyTooling/upload-artifact@v5 uses: pyTooling/upload-artifact@v6
if: ${{ inputs.html_artifact != '' }} if: ${{ inputs.html_artifact != '' }}
continue-on-error: true continue-on-error: true
with: with:
@@ -153,7 +153,7 @@ jobs:
retention-days: 1 retention-days: 1
- name: 📤 Upload '${{ inputs.junit_artifact }}' JUnit artifact - name: 📤 Upload '${{ inputs.junit_artifact }}' JUnit artifact
uses: pyTooling/upload-artifact@v5 uses: pyTooling/upload-artifact@v6
if: ${{ inputs.junit_artifact != '' }} if: ${{ inputs.junit_artifact != '' }}
continue-on-error: true continue-on-error: true
with: with:
@@ -164,7 +164,7 @@ jobs:
retention-days: 1 retention-days: 1
- name: 📤 Upload '${{ inputs.cobertura_artifact }}' Cobertura artifact - name: 📤 Upload '${{ inputs.cobertura_artifact }}' Cobertura artifact
uses: pyTooling/upload-artifact@v5 uses: pyTooling/upload-artifact@v6
if: ${{ inputs.cobertura_artifact != '' }} if: ${{ inputs.cobertura_artifact != '' }}
continue-on-error: true continue-on-error: true
with: with:

View File

@@ -47,7 +47,7 @@ on:
requirements: requirements:
description: 'Python dependencies to be installed through pip.' description: 'Python dependencies to be installed through pip.'
required: false required: false
default: '-r tests/requirements.txt' default: '-r ./requirements.txt'
type: string type: string
mingw_requirements: mingw_requirements:
description: 'Override Python dependencies to be installed through pip on MSYS2 (MINGW64) only.' description: 'Override Python dependencies to be installed through pip on MSYS2 (MINGW64) only.'
@@ -82,7 +82,7 @@ on:
root_directory: root_directory:
description: 'Working directory for running tests.' description: 'Working directory for running tests.'
required: false required: false
default: '' default: '.'
type: string type: string
tests_directory: tests_directory:
description: 'Path to the directory containing tests (relative from root_directory).' description: 'Path to the directory containing tests (relative from root_directory).'
@@ -181,7 +181,7 @@ jobs:
steps: steps:
- name: ⏬ Checkout repository - name: ⏬ Checkout repository
uses: actions/checkout@v5 uses: actions/checkout@v6
with: with:
lfs: true lfs: true
submodules: true submodules: true
@@ -205,6 +205,38 @@ jobs:
run: | run: |
py -3.9 -m pip install --disable-pip-version-check --break-system-packages -U tomli py -3.9 -m pip install --disable-pip-version-check --break-system-packages -U tomli
- name: Compute path to requirements file
id: requirements
shell: python
run: |
from os import getenv
from pathlib import Path
from sys import version
print(f"Python: {version}")
requirements = "${{ inputs.requirements }}"
if requirements.startswith("-r"):
requirements = requirements[2:].lstrip()
if requirements.startswith("./"):
requirementsFile = Path("${{ inputs.root_directory || '.' }}") / Path("${{ inputs.tests_directory || '.' }}") / Path("${{ inputs.unittest_directory || '.' }}") / Path(requirements[2:])
else:
requirementsFile = Path(requirements)
if not requirementsFile.exists():
print(f"::error title=FileNotFoundError::{ex}")
exit(1)
print(f"requirements file: {requirementsFile.as_posix()}")
# Write requirements path to special file
github_output = Path(getenv("GITHUB_OUTPUT"))
print(f"GITHUB_OUTPUT: {github_output}")
with github_output.open("a+") as f:
f.write(f"requirements=-r {requirementsFile.as_posix()}\n")
else:
print(f"requirements list: {requirements}")
- name: Compute pacman/pacboy packages - name: Compute pacman/pacboy packages
id: pacboy id: pacboy
if: matrix.system == 'msys2' if: matrix.system == 'msys2'
@@ -215,8 +247,6 @@ jobs:
from re import compile from re import compile
from sys import version from sys import version
print(f"Python: {version}")
def loadRequirementsFile(requirementsFile: Path): def loadRequirementsFile(requirementsFile: Path):
requirements = [] requirements = []
with requirementsFile.open("r") as file: with requirementsFile.open("r") as file:
@@ -232,11 +262,10 @@ jobs:
return requirements return requirements
requirements = "${{ inputs.requirements }}" requirements = "${{ steps.requirements.outputs.requirements }}"
if requirements.startswith("-r"): if requirements.startswith("-r"):
requirementsFile = Path(requirements[2:].lstrip())
try: try:
dependencies = loadRequirementsFile(requirementsFile) dependencies = loadRequirementsFile(Path(requirements[2:].lstrip()))
except FileNotFoundError as ex: except FileNotFoundError as ex:
print(f"::error title=FileNotFoundError::{ex}") print(f"::error title=FileNotFoundError::{ex}")
exit(1) exit(1)
@@ -324,7 +353,7 @@ jobs:
if: matrix.system != 'msys2' if: matrix.system != 'msys2'
run: | run: |
python -m pip install --disable-pip-version-check -U wheel tomli python -m pip install --disable-pip-version-check -U wheel tomli
python -m pip install --disable-pip-version-check ${{ inputs.requirements }} python -m pip install --disable-pip-version-check ${{ steps.requirements.outputs.requirements }}
- name: 🔧 Install pip dependencies (MSYS2) - name: 🔧 Install pip dependencies (MSYS2)
if: matrix.system == 'msys2' if: matrix.system == 'msys2'
@@ -332,7 +361,7 @@ jobs:
if [ -n '${{ inputs.mingw_requirements }}' ]; then if [ -n '${{ inputs.mingw_requirements }}' ]; then
python -m pip install --disable-pip-version-check --break-system-packages ${{ inputs.mingw_requirements }} python -m pip install --disable-pip-version-check --break-system-packages ${{ inputs.mingw_requirements }}
else else
python -m pip install --disable-pip-version-check --break-system-packages ${{ inputs.requirements }} python -m pip install --disable-pip-version-check --break-system-packages ${{ steps.requirements.outputs.requirements }}
fi fi
# Before scripts # Before scripts
@@ -421,7 +450,7 @@ jobs:
# Upload artifacts # Upload artifacts
- name: 📤 Upload '${{ fromJson(inputs.unittest_report_xml).filename }}' artifact - name: 📤 Upload '${{ fromJson(inputs.unittest_report_xml).filename }}' artifact
uses: pyTooling/upload-artifact@v5 uses: pyTooling/upload-artifact@v6
if: inputs.unittest_xml_artifact != '' if: inputs.unittest_xml_artifact != ''
continue-on-error: true continue-on-error: true
with: with:
@@ -434,7 +463,7 @@ jobs:
# - name: 📤 Upload 'Unit Tests HTML Report' artifact # - name: 📤 Upload 'Unit Tests HTML Report' artifact
# if: inputs.unittest_html_artifact != '' # if: inputs.unittest_html_artifact != ''
# continue-on-error: true # continue-on-error: true
# uses: pyTooling/upload-artifact@v5 # uses: pyTooling/upload-artifact@v6
# with: # with:
# name: ${{ inputs.unittest_html_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }} # name: ${{ inputs.unittest_html_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
# path: ${{ inputs.unittest_report_html_directory }} # path: ${{ inputs.unittest_report_html_directory }}
@@ -444,7 +473,7 @@ jobs:
- name: 📤 Upload 'Coverage SQLite Database' artifact - name: 📤 Upload 'Coverage SQLite Database' artifact
if: inputs.coverage_sqlite_artifact != '' if: inputs.coverage_sqlite_artifact != ''
continue-on-error: true continue-on-error: true
uses: pyTooling/upload-artifact@v5 uses: pyTooling/upload-artifact@v6
with: with:
name: ${{ inputs.coverage_sqlite_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }} name: ${{ inputs.coverage_sqlite_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
path: .coverage path: .coverage
@@ -455,7 +484,7 @@ jobs:
- name: 📤 Upload 'Coverage XML Report' artifact - name: 📤 Upload 'Coverage XML Report' artifact
if: inputs.coverage_xml_artifact != '' && steps.convert_xml.outcome == 'success' if: inputs.coverage_xml_artifact != '' && steps.convert_xml.outcome == 'success'
continue-on-error: true continue-on-error: true
uses: pyTooling/upload-artifact@v5 uses: pyTooling/upload-artifact@v6
with: with:
name: ${{ inputs.coverage_xml_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }} name: ${{ inputs.coverage_xml_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
working-directory: ${{ fromJson(inputs.coverage_report_xml).directory }} working-directory: ${{ fromJson(inputs.coverage_report_xml).directory }}
@@ -466,7 +495,7 @@ jobs:
- name: 📤 Upload 'Coverage JSON Report' artifact - name: 📤 Upload 'Coverage JSON Report' artifact
if: inputs.coverage_json_artifact != '' && steps.convert_json.outcome == 'success' if: inputs.coverage_json_artifact != '' && steps.convert_json.outcome == 'success'
continue-on-error: true continue-on-error: true
uses: pyTooling/upload-artifact@v5 uses: pyTooling/upload-artifact@v6
with: with:
name: ${{ inputs.coverage_json_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }} name: ${{ inputs.coverage_json_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
working-directory: ${{ fromJson(inputs.coverage_report_json).directory }} working-directory: ${{ fromJson(inputs.coverage_report_json).directory }}
@@ -477,7 +506,7 @@ jobs:
- name: 📤 Upload 'Coverage HTML Report' artifact - name: 📤 Upload 'Coverage HTML Report' artifact
if: inputs.coverage_html_artifact != '' && steps.convert_html.outcome == 'success' if: inputs.coverage_html_artifact != '' && steps.convert_html.outcome == 'success'
continue-on-error: true continue-on-error: true
uses: pyTooling/upload-artifact@v5 uses: pyTooling/upload-artifact@v6
with: with:
name: ${{ inputs.coverage_html_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }} name: ${{ inputs.coverage_html_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
working-directory: ${{ fromJson(inputs.coverage_report_html).directory }} working-directory: ${{ fromJson(inputs.coverage_report_html).directory }}

View File

@@ -44,7 +44,7 @@ jobs:
steps: steps:
- name: ⏬ Checkout repository - name: ⏬ Checkout repository
uses: actions/checkout@v5 uses: actions/checkout@v6
- name: 🐍 Setup Python - name: 🐍 Setup Python
uses: actions/setup-python@v6 uses: actions/setup-python@v6

View File

@@ -25,7 +25,7 @@ jobs:
run: printf "%s\n" "${{ matrix.runs-on }}-${{ matrix.python }}" >> artifact.txt run: printf "%s\n" "${{ matrix.runs-on }}-${{ matrix.python }}" >> artifact.txt
- name: 📤 Upload artifact for ${{ matrix.system }}-${{ matrix.python }} - name: 📤 Upload artifact for ${{ matrix.system }}-${{ matrix.python }}
uses: pyTooling/upload-artifact@v5 uses: pyTooling/upload-artifact@v6
with: with:
name: ${{ fromJson(needs.Params.outputs.artifact_names).unittesting_xml }}-${{ matrix.system }}-${{ matrix.python }} name: ${{ fromJson(needs.Params.outputs.artifact_names).unittesting_xml }}-${{ matrix.system }}-${{ matrix.python }}
path: artifact.txt path: artifact.txt
@@ -42,7 +42,7 @@ jobs:
run: printf "%s\n" "Package" >> package.txt run: printf "%s\n" "Package" >> package.txt
- name: 📤 Upload artifact for ${{ matrix.system }}-${{ matrix.python }} - name: 📤 Upload artifact for ${{ matrix.system }}-${{ matrix.python }}
uses: pyTooling/upload-artifact@v5 uses: pyTooling/upload-artifact@v6
with: with:
name: ${{ fromJson(needs.Params.outputs.artifact_names).package_all }} name: ${{ fromJson(needs.Params.outputs.artifact_names).package_all }}
path: package.txt path: package.txt

View File

@@ -16,10 +16,9 @@ jobs:
include: include:
- {icon: '🐧', name: 'Ubuntu 22.04 (x86-64)', image: 'ubuntu-22.04', shell: 'bash', can-fail: false} - {icon: '🐧', name: 'Ubuntu 22.04 (x86-64)', image: 'ubuntu-22.04', shell: 'bash', can-fail: false}
- {icon: '🐧', name: 'Ubuntu 24.04 (x86-64)', image: 'ubuntu-24.04', shell: 'bash', can-fail: false} # latest - {icon: '🐧', name: 'Ubuntu 24.04 (x86-64)', image: 'ubuntu-24.04', shell: 'bash', can-fail: false} # latest
- {icon: '🍎', name: 'macOS-13 (x86-64)', image: 'macos-13', shell: 'bash', can-fail: false}
- {icon: '🍎', name: 'macOS-14 (x86-64)', image: 'macos-14-large', shell: 'bash', can-fail: true } # not in free plan - {icon: '🍎', name: 'macOS-14 (x86-64)', image: 'macos-14-large', shell: 'bash', can-fail: true } # not in free plan
- {icon: '🍎', name: 'macOS-15 (x86-64)', image: 'macos-15-large', shell: 'bash', can-fail: true } # not in free plan ### - {icon: '🍎', name: 'macOS-15 (x86-64)', image: 'macos-15-large', shell: 'bash', can-fail: true } # same as -intel; not in free plan
- {icon: '🍏', name: 'macOS-13 (aarch64)', image: 'macos-13-xlarge', shell: 'bash', can-fail: true } # not in free plan - {icon: '🍎', name: 'macOS-15 (x86-64)', image: 'macos-15-intel', shell: 'bash', can-fail: false}
- {icon: '🍏', name: 'macOS-14 (aarch64)', image: 'macos-14', shell: 'bash', can-fail: false} # latest - {icon: '🍏', name: 'macOS-14 (aarch64)', image: 'macos-14', shell: 'bash', can-fail: false} # latest
- {icon: '🍏', name: 'macOS-15 (aarch64)', image: 'macos-15', shell: 'bash', can-fail: false} - {icon: '🍏', name: 'macOS-15 (aarch64)', image: 'macos-15', shell: 'bash', can-fail: false}
- {icon: '🪟', name: 'Windows Server 2022', image: 'windows-2022', shell: 'bash', can-fail: false} - {icon: '🪟', name: 'Windows Server 2022', image: 'windows-2022', shell: 'bash', can-fail: false}

View File

@@ -17,7 +17,7 @@ jobs:
printf "%s\n" "Build log $(date --utc '+%d.%m.%Y - %H:%M:%S')" > build.log printf "%s\n" "Build log $(date --utc '+%d.%m.%Y - %H:%M:%S')" > build.log
- name: 📤 Upload artifact - name: 📤 Upload artifact
uses: pyTooling/upload-artifact@v5 uses: pyTooling/upload-artifact@v6
with: with:
name: document name: document
path: | path: |
@@ -29,10 +29,11 @@ jobs:
- name: 🖉 Program - name: 🖉 Program
run: | run: |
printf "%s\n" "Document other $(date --utc '+%d.%m.%Y - %H:%M:%S')" > document1.txt printf "%s\n" "Document other $(date --utc '+%d.%m.%Y - %H:%M:%S')" > document1.txt
printf "%s\n" "Document other $(date --utc '+%d.%m.%Y - %H:%M:%S')" > document2.txt
printf "%s\n" "Program $(date --utc '+%d.%m.%Y - %H:%M:%S')" > program.py printf "%s\n" "Program $(date --utc '+%d.%m.%Y - %H:%M:%S')" > program.py
- name: 📤 Upload artifact - name: 📤 Upload artifact
uses: actions/upload-artifact@v5 uses: actions/upload-artifact@v6
with: with:
name: other name: other
path: | path: |
@@ -55,7 +56,7 @@ jobs:
version=4.2.0 version=4.2.0
tool=myTool tool=myTool
prog=program prog=program
tag: 4.2.0 tag: v4.2.0
title: "Nightly Test Release" title: "Nightly Test Release"
description: | description: |
This *nightly* release contains all latest and important artifacts created by %tool%'s CI pipeline. This *nightly* release contains all latest and important artifacts created by %tool%'s CI pipeline.
@@ -63,10 +64,14 @@ jobs:
# %tool% %version% # %tool% %version%
* %prog% * %prog%
# Attached files:
%%ASSETS%%
assets: | assets: |
document: document1.txt: Documentation document: document1.txt: Documentation
document: build.log: Logfile - %tool% - %tool% document: build.log: Logfile - %tool% - %tool%
other: document1.txt: SBOM - %version% other: document2.txt: SBOM - %version%
other: %prog%.py: Application - %tool% - %version% other: %prog%.py: Application - %tool% - %version%
document:!archive1.zip: Archive 1 - zip document:!archive1.zip: Archive 1 - zip
document:!archive2.tgz: Archive 2 - tgz document:!archive2.tgz: Archive 2 - tgz
@@ -108,7 +113,7 @@ jobs:
# artifact: file: labels: asset title # artifact: file: labels: asset title
document: document1.txt: doc,html: Documentation document: document1.txt: doc,html: Documentation
document: build.log: build,log: Logfile - %tool% - %tool% document: build.log: build,log: Logfile - %tool% - %tool%
other: document1.txt: build,SBOM:SBOM - %version% other: document2.txt: build,SBOM:SBOM - %version%
other: %prog%.py: app,binary:Application - %tool% - %version% other: %prog%.py: app,binary:Application - %tool% - %version%
document:!archive1.zip: Archive 1 - zip document:!archive1.zip: Archive 1 - zip
document:!archive2.tgz: Archive 2 - tgz document:!archive2.tgz: Archive 2 - tgz

View File

@@ -64,7 +64,7 @@ jobs:
shell: python shell: python
steps: steps:
- name: Checkout repository to access local Action - name: Checkout repository to access local Action
uses: actions/checkout@v5 uses: actions/checkout@v6
- name: Checking job matrix from 'Params_Default' - name: Checking job matrix from 'Params_Default'
uses: ./.github/actions/CheckJobMatrix uses: ./.github/actions/CheckJobMatrix
@@ -92,7 +92,7 @@ jobs:
shell: python shell: python
steps: steps:
- name: Checkout repository to access local Action - name: Checkout repository to access local Action
uses: actions/checkout@v5 uses: actions/checkout@v6
- name: Checking job matrix from 'Params_PythonVersions' - name: Checking job matrix from 'Params_PythonVersions'
uses: ./.github/actions/CheckJobMatrix uses: ./.github/actions/CheckJobMatrix
@@ -114,7 +114,7 @@ jobs:
shell: python shell: python
steps: steps:
- name: Checkout repository to access local Action - name: Checkout repository to access local Action
uses: actions/checkout@v5 uses: actions/checkout@v6
- name: Checking job matrix from 'Params_Systems' - name: Checking job matrix from 'Params_Systems'
uses: ./.github/actions/CheckJobMatrix uses: ./.github/actions/CheckJobMatrix
@@ -136,7 +136,7 @@ jobs:
shell: python shell: python
steps: steps:
- name: Checkout repository to access local Action - name: Checkout repository to access local Action
uses: actions/checkout@v5 uses: actions/checkout@v6
- name: Checking job matrix from 'Params_Include' - name: Checking job matrix from 'Params_Include'
uses: ./.github/actions/CheckJobMatrix uses: ./.github/actions/CheckJobMatrix
@@ -158,7 +158,7 @@ jobs:
shell: python shell: python
steps: steps:
- name: Checkout repository to access local Action - name: Checkout repository to access local Action
uses: actions/checkout@v5 uses: actions/checkout@v6
- name: Checking job matrix from 'Params_Exclude' - name: Checking job matrix from 'Params_Exclude'
uses: ./.github/actions/CheckJobMatrix uses: ./.github/actions/CheckJobMatrix
@@ -180,7 +180,7 @@ jobs:
shell: python shell: python
steps: steps:
- name: Checkout repository to access local Action - name: Checkout repository to access local Action
uses: actions/checkout@v5 uses: actions/checkout@v6
- name: Checking job matrix from 'Params_Disable' - name: Checking job matrix from 'Params_Disable'
uses: ./.github/actions/CheckJobMatrix uses: ./.github/actions/CheckJobMatrix
@@ -202,7 +202,7 @@ jobs:
shell: python shell: python
steps: steps:
- name: Checkout repository to access local Action - name: Checkout repository to access local Action
uses: actions/checkout@v5 uses: actions/checkout@v6
- name: Checking job matrix from 'Params_All' - name: Checking job matrix from 'Params_All'
uses: ./.github/actions/CheckJobMatrix uses: ./.github/actions/CheckJobMatrix

2
.idea/Actions.iml generated
View File

@@ -8,7 +8,7 @@
<excludeFolder url="file://$MODULE_DIR$/doc/_build" /> <excludeFolder url="file://$MODULE_DIR$/doc/_build" />
<excludeFolder url="file://$MODULE_DIR$/report" /> <excludeFolder url="file://$MODULE_DIR$/report" />
</content> </content>
<orderEntry type="jdk" jdkName="Python 3.13" jdkType="Python SDK" /> <orderEntry type="jdk" jdkName="Python 3.14" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" /> <orderEntry type="sourceFolder" forTests="false" />
</component> </component>
</module> </module>

View File

@@ -11,9 +11,9 @@ docutils_stubs ~= 0.0.22
sphinx_rtd_theme ~= 3.0 sphinx_rtd_theme ~= 3.0
# Sphinx Extenstions # Sphinx Extenstions
sphinxcontrib-mermaid ~= 1.0 sphinxcontrib-mermaid ~= 1.2
autoapi >= 2.0.1 autoapi >= 2.0.1
sphinx_design ~= 0.6 sphinx_design ~= 0.6
sphinx-copybutton >= 0.5 sphinx-copybutton >= 0.5
sphinx_autodoc_typehints ~= 3.5 sphinx_autodoc_typehints ~= 3.5 # 3.6 is conflicting with old sphinx_design and rtd theme due to sphinx<9 and docutils<0.22
sphinx_reports ~= 0.9 sphinx_reports ~= 0.9

View File

@@ -1,8 +0,0 @@
# Releaser Development
- [pyTooling/pyAttributes](https://github.com/pyTooling/pyAttributes) or
[willmcgugan/rich](https://github.com/willmcgugan/rich) might be used to enhance the UX.
- It might be desirable to have pyTooling.Version.SemVersion handle the regular expression from
[semver.org](https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string), and use
proper Python classes in **Releaser**.

View File

@@ -1,12 +0,0 @@
FROM python:3.12-slim-bookworm
COPY releaser.py /releaser.py
RUN pip install PyGithub --progress-bar off \
&& apt update -qq \
&& apt install -y curl \
&& curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | \
dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg \
&& echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | \
tee /etc/apt/sources.list.d/github-cli.list > /dev/null \
&& apt update -qq \
&& apt install -y gh
CMD ["/releaser.py"]

View File

@@ -1,181 +0,0 @@
# Releaser
**Releaser** is a Docker GitHub Action written in Python.
**Releaser** allows to keep a GitHub Release of type pre-release and its artifacts up to date with latest builds.
Combined with a workflow that is executed periodically, **Releaser** allows to provide a fixed release name for users willing
to use daily/nightly artifacts of a project.
Furthermore, when any [semver](https://semver.org) compilant tagged commit is pushed, **Releaser** can create a release
and upload assets.
## Context
GitHub provides official clients for the GitHub API through [github.com/octokit](https://github.com/octokit):
- [octokit.js](https://github.com/octokit/octokit.js) ([octokit.github.io/rest.js](https://octokit.github.io/rest.js))
- [octokit.rb](https://github.com/octokit/octokit.rb) ([octokit.github.io/octokit.rb](http://octokit.github.io/octokit.rb))
- [octokit.net](https://github.com/octokit/octokit.net) ([octokitnet.rtfd.io](https://octokitnet.rtfd.io))
When GitHub Actions was released in 2019, two Actions were made available through
[github.com/actions](https://github.com/actions) for dealing with GitHub Releases:
- [actions/create-release](https://github.com/actions/create-release)
- [actions/upload-release-asset](https://github.com/actions/upload-release-asset)
However, those Actions were contributed by an employee in spare time, not officially supported by GitHub.
Therefore, they were unmaintained before GitHub Actions was out of the private beta
(see [actions/upload-release-asset#58](https://github.com/actions/upload-release-asset/issues/58))
and, a year later, archived.
Those Actions are based on [actions/toolkit](https://github.com/actions/toolkit)'s hydrated version of octokit.js.
From a practical point of view, [actions/github-script](https://github.com/actions/github-script) is the natural replacement to those Actions, since it allows to use a pre-authenticated *octokit.js* client along with the workflow run context.
Still, it requires writing plain JavaScript.
Alternatively, there are non-official GitHub API libraries available in other languages (see [docs.github.com: rest/overview/libraries](https://docs.github.com/en/rest/overview/libraries)).
**Releaser** is based on [PyGithub/PyGithub](https://github.com/PyGithub/PyGithub), a Python client for the GitHub API.
**Releaser** was originally created in [eine/tip](https://github.com/eine/tip), as an enhanced alternative to using
`actions/create-release` and `actions/upload-release-asset`, in order to cover certain use cases that were being
migrated from Travis CI to GitHub Actions.
The main limitation of GitHub's Actions was/is verbosity and not being possible to dynamically define the list of assets
to be uploaded.
On the other hand, GitHub Actions artifacts do require login in order to download them.
Conversely, assets of GitHub Releases can be downloaded without login.
Therefore, in order to make CI results available to the widest audience, some projects prefer having tarballs available
as assets.
In this context, one of the main use cases of **Releaser** is pushing artifacts as release assets.
Thus, the name of the Action.
GitHub provides an official CLI tool, written in golang: [cli/cli](https://github.com/cli/cli).
When the Python version of **Releaser** was written, `cli` was evaluated as an alternative to *PyGitHub*.
`gh release` was (and still is) not flexible enough to update the reference of a release, without deleting and
recreating it (see [cli.github.com: manual/gh_release_create](https://cli.github.com/manual/gh_release_create)).
Deletion and recreation is unfortunate, because it notifies all the watchers of a repository
(see [eine/tip#111](https://github.com/eine/tip/issues/111)).
However, [cli.github.com: manual/gh_release_upload](https://cli.github.com/manual/gh_release_upload) handles uploading
artifacts as assets faster and with better stability for larger files than *PyGitHub*
(see [msys2/msys2-installer#36](https://github.com/msys2/msys2-installer/pull/36)).
Furthermore, the GitHub CLI is installed on GitHub Actions' default virtual environments.
Although `gh` does not support login through SSH (see [cli/cli#3715](https://github.com/cli/cli/issues/3715)), on GitHub
Actions a token is available `${{ github.token }}`.
Therefore, **Releaser** uses `gh release upload` internally.
## Usage
The following block shows a minimal YAML workflow file:
```yml
name: 'workflow'
on:
schedule:
- cron: '0 0 * * 5'
jobs:
mwe:
runs-on: ubuntu-24.04
steps:
# Clone repository
- uses: actions/checkout@v5
# Build your application, tool, artifacts, etc.
- name: Build
run: |
echo "Build some tool and generate some artifacts" > artifact.txt
# Update tag and pre-release
# - Update (force-push) tag to the commit that is used in the workflow.
# - Upload artifacts defined by the user.
- uses: pyTooling/Actions/releaser@r0
with:
token: ${{ secrets.GITHUB_TOKEN }}
files: |
artifact.txt
README.md
```
### Composite Action
The default implementation of **Releaser** is a Container Action.
Therefore, a pre-built container image is pulled before starting the job.
Alternatively, a Composite Action version is available: `uses: pyTooling/Actions/releaser/composite@main`.
The Composite version installs the dependencies on the host (the runner environment), instead of using a container.
Both implementations are functionally equivalent from **Releaser**'s point of view; however, the Composite Action allows
users to tweak the version of Python by using [actions/setup-python](https://github.com/actions/setup-python) before.
## Options
All options can be optionally provided as environment variables: `INPUT_TOKEN`, `INPUT_FILES`, `INPUT_TAG`, `INPUT_RM`
and/or `INPUT_SNAPSHOTS`.
### token (required)
Token to make authenticated API calls; can be passed in using `{{ secrets.GITHUB_TOKEN }}`.
### files (required)
Either a single filename/pattern or a multi-line list can be provided. All the artifacts are uploaded regardless of the
hierarchy.
For creating/updating a release without uploading assets, set `files: none`.
### tag
The default tag name for the tip/nightly pre-release is `tip`, but it can be optionally overriden through option `tag`.
### rm
Set option `rm` to `true` for systematically removing previous artifacts (e.g. old versions).
Otherwise (by default), all previours artifacts are preserved or overwritten.
Note:
If all the assets are removed, or if the release itself is removed, tip/nightly assets won't be available for
users until the workflow is successfully run.
For instance, Action [setup-ghdl-ci](https://github.com/ghdl/setup-ghdl-ci) uses assets from [ghdl/ghdl: releases/tag/nightly](https://github.com/ghdl/ghdl/releases/tag/nightly).
Hence, it is recommended to try removing the conflictive assets only, in order to maximise the availability.
### snapshots
Whether to create releases from any tag or to treat some as snapshots.
By default, all the tags with non-empty `prerelease` field (see [semver.org: Is there a suggested regular expression (RegEx) to check a SemVer string?](https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string))
are considered snapshots; neither a release is created nor assets are uploaded.
## Advanced/complex use cases
**Releaser** is essentially a very thin wrapper to use the GitHub Actions context data along with the classes
and methods of PyGithub.
Similarly to [actions/github-script](https://github.com/actions/github-script), users with advanced/complex requirements
might find it desirable to write their own Python script, instead of using **Releaser**.
In fact, since `shell: python` is supported in GitHub Actions, using Python does *not* require any Action.
For prototyping purposes, the following job might be useful:
```yml
Release:
name: '📦 Release'
runs-on: ubuntu-24.04
needs:
- ...
if: github.event_name != 'pull_request' && (github.ref == 'refs/heads/master' || contains(github.ref, 'refs/tags/'))
steps:
- uses: actions/download-artifact@v3
- shell: bash
run: pip install PyGithub --progress-bar off
- name: Set list of files for uploading
id: files
shell: python
run: |
from github import Github
print("· Get GitHub API handler (authenticate)")
gh = Github('${{ github.token }}')
print("· Get Repository handler")
gh_repo = gh.get_repo('${{ github.repository }}')
```
Find a non-trivial use case at [msys2/msys2-autobuild](https://github.com/msys2/msys2-autobuild).

View File

@@ -1,45 +0,0 @@
# ==================================================================================================================== #
# Authors: #
# Unai Martinez-Corral #
# #
# ==================================================================================================================== #
# Copyright 2020-2025 The pyTooling Authors #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
# #
# SPDX-License-Identifier: Apache-2.0 #
# ==================================================================================================================== #
name: 'Releaser'
description: 'Publish releases, upload assets and update tip/nightly tags'
inputs:
token:
description: 'Token to make authenticated API calls; can be passed in using {{ secrets.GITHUB_TOKEN }}'
required: true
files:
description: 'Multi-line list of glob patterns describing the artifacts to be uploaded'
required: true
tag:
description: 'Name of the tag that corresponds to the tip/nightly pre-release'
required: false
default: tip
rm:
description: 'Whether to delete all the previous artifacts, or only replacing the ones with the same name'
required: false
default: false
snapshots:
description: 'Whether to create releases from any tag or to treat some as snapshots'
required: false
default: true
runs:
using: 'docker'
image: 'docker://ghcr.io/pytooling/releaser'

View File

@@ -1,59 +0,0 @@
# ==================================================================================================================== #
# Authors: #
# Unai Martinez-Corral #
# #
# ==================================================================================================================== #
# Copyright 2020-2025 The pyTooling Authors #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
# #
# SPDX-License-Identifier: Apache-2.0 #
# ==================================================================================================================== #
name: 'Releaser'
description: 'Publish releases, upload assets and update tip/nightly tags'
inputs:
token:
description: 'Token to make authenticated API calls; can be passed in using {{ secrets.GITHUB_TOKEN }}'
required: true
files:
description: 'Multi-line list of glob patterns describing the artifacts to be uploaded'
required: true
tag:
description: 'Name of the tag that corresponds to the tip/nightly pre-release'
required: false
default: tip
rm:
description: 'Whether to delete all the previous artifacts, or only replacing the ones with the same name'
required: false
default: false
snapshots:
description: 'Whether to create releases from any tag or to treat some as snapshots'
required: false
default: true
runs:
using: 'composite'
steps:
- shell: bash
run: |
[ "$(source /etc/os-release && echo $VERSION_ID)" == "24.04" ] && UBUNTU_2404_ARGS='--break-system-packages' || unset UBUNTU_2404_ARGS
pip install --disable-pip-version-check --progress-bar off $UBUNTU_2404_ARGS PyGithub
- shell: bash
run: '''${{ github.action_path }}/../releaser.py'''
env:
INPUT_TOKEN: ${{ inputs.token }}
INPUT_FILES: ${{ inputs.files }}
INPUT_TAG: ${{ inputs.tag }}
INPUT_RM: ${{ inputs.rm }}
INPUT_SNAPSHOTS: ${{ inputs.snapshots }}

View File

@@ -1,2 +0,0 @@
[tool.black]
line-length = 120

View File

@@ -1,193 +0,0 @@
#!/usr/bin/env python3
# ==================================================================================================================== #
# Authors: #
# Patrick Lehmann #
# Unai Martinez-Corral #
# #
# ==================================================================================================================== #
# Copyright 2020-2025 The pyTooling Authors #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
# #
# SPDX-License-Identifier: Apache-2.0 #
# ==================================================================================================================== #
import re
from sys import argv as sys_argv, stdout, exit as sys_exit
from os import environ, getenv
from glob import glob
from pathlib import Path
from github import Github, GithubException
from subprocess import check_call
paramTag = getenv("INPUT_TAG", "tip")
paramFiles = getenv("INPUT_FILES", None).split()
paramRM = getenv("INPUT_RM", "false") == "true"
paramSnapshots = getenv("INPUT_SNAPSHOTS", "true").lower() == "true"
paramToken = (
environ["GITHUB_TOKEN"]
if "GITHUB_TOKEN" in environ
else environ["INPUT_TOKEN"]
if "INPUT_TOKEN" in environ
else None
)
paramRepo = getenv("GITHUB_REPOSITORY", None)
paramRef = getenv("GITHUB_REF", None)
paramSHA = getenv("GITHUB_SHA", None)
def GetListOfArtifacts(argv, files):
print("· Get list of artifacts to be uploaded")
args = files if files is not None else []
if len(argv) > 1:
args += argv[1:]
if len(args) == 1 and args[0].lower() == "none":
print("! Skipping 'files' because it's set to 'none'.")
return []
elif len(args) == 0:
stdout.flush()
raise (Exception("Glob patterns need to be provided as positional arguments or through envvar 'INPUT_FILES'!"))
else:
flist = []
for item in args:
print(f" glob({item!s}):")
for fname in [fname for fname in glob(item, recursive=True) if not Path(fname).is_dir()]:
if Path(fname).stat().st_size == 0:
print(f" - ! Skipping empty file {fname!s}.")
continue
print(f" - {fname!s}")
flist.append(fname)
if len(flist) < 1:
stdout.flush()
raise (Exception("Empty list of files to upload/update!"))
return sorted(flist)
def GetGitHubAPIHandler(token):
print("· Get GitHub API handler (authenticate)")
if token is not None:
return Github(token)
raise (Exception("Need credentials to authenticate! Please, provide 'GITHUB_TOKEN' or 'INPUT_TOKEN'"))
def CheckRefSemVer(gh_ref, tag, snapshots):
print("· Check SemVer compliance of the reference/tag")
env_tag = None
if gh_ref[0:10] == "refs/tags/":
env_tag = gh_ref[10:]
if env_tag != tag:
rexp = r"^(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$"
semver = re.search(rexp, env_tag)
if semver == None and env_tag[0] == "v":
semver = re.search(rexp, env_tag[1:])
tag = env_tag
if semver == None:
print(f"! Could not get semver from {gh_ref!s}")
print(f"! Treat tag '{tag!s}' as a release")
return (tag, env_tag, False)
else:
if semver.group("prerelease") is None:
# is a regular semver compilant tag
return (tag, env_tag, False)
elif snapshots:
# is semver compilant prerelease tag, thus a snapshot (we skip it)
print("! Skipping snapshot prerelease.")
sys_exit()
return (tag, env_tag, True)
def GetRepositoryHandler(gh, repo):
print("· Get Repository handler")
if repo is None:
stdout.flush()
raise (Exception("Repository name not defined! Please set 'GITHUB_REPOSITORY"))
return gh.get_repo(repo)
def GetOrCreateRelease(gh_repo, tag, sha, is_prerelease):
print("· Get Release handler")
gh_tag = None
try:
gh_tag = gh_repo.get_git_ref(f"tags/{tag!s}")
except Exception:
stdout.flush()
if gh_tag:
try:
return (gh_repo.get_release(tag), False)
except Exception:
return (gh_repo.create_git_release(tag, tag, "", draft=True, prerelease=is_prerelease), True)
else:
err_msg = f"Tag/release '{tag!s}' does not exist and could not create it!"
if sha is None:
raise (Exception(err_msg))
try:
return (
gh_repo.create_git_tag_and_release(
tag, "", tag, "", sha, "commit", draft=True, prerelease=is_prerelease
),
True,
)
except Exception:
raise (Exception(err_msg))
def UpdateReference(gh_release, tag, sha, is_prerelease, is_draft):
print("· Update Release reference (force-push tag)")
if is_draft:
# Unfortunately, it seems not possible to update fields 'created_at' or 'published_at'.
print(" > Update (pre-)release")
gh_release.update_release(
gh_release.title,
"" if gh_release.body is None else gh_release.body,
draft=False,
prerelease=is_prerelease,
tag_name=gh_release.tag_name,
target_commitish=gh_release.target_commitish,
)
if sha is not None:
print(f" > Force-push '{tag!s}' to {sha!s}")
gh_repo.get_git_ref(f"tags/{tag!s}").edit(sha)
files = GetListOfArtifacts(sys_argv, paramFiles)
stdout.flush()
[tag, env_tag, is_prerelease] = CheckRefSemVer(paramRef, paramTag, paramSnapshots)
stdout.flush()
gh_repo = GetRepositoryHandler(GetGitHubAPIHandler(paramToken), paramRepo)
stdout.flush()
[gh_release, is_draft] = GetOrCreateRelease(gh_repo, tag, paramSHA, is_prerelease)
stdout.flush()
if paramRM:
print("· RM set. All previous assets are being cleared...")
for asset in gh_release.get_assets():
print(f" - {asset.name}")
asset.delete_asset()
stdout.flush()
if len(files) > 0:
print("· Upload assets")
env = environ.copy()
env["GITHUB_TOKEN"] = paramToken
cmd = ["gh", "release", "upload", "--repo", paramRepo, "--clobber", tag] + files
print(f" > {' '.join(cmd)}")
check_call(cmd, env=env)
stdout.flush()
else:
print("! Skipping uploading assets because the file list is empty.")
UpdateReference(gh_release, tag, paramSHA if env_tag is None else None, is_prerelease, is_draft)

View File

@@ -1 +1 @@
-r ../requirements.txt -r ../unit/requirements.txt

View File

@@ -1,13 +1,4 @@
-r ../requirements.txt # Collect all testing requirements
-r platform/requirements.txt
# Coverage collection -r typing/requirements.txt
Coverage ~= 7.11 -r unit/requirements.txt
# Test Runner
pytest ~= 9.0
pytest-cov ~= 7.0
# Static Type Checking
mypy[reports] ~= 1.18
typing_extensions ~= 4.15
lxml >= 5.4, <7.0

View File

@@ -0,0 +1,6 @@
-r ../../requirements.txt
# Static Type Checking
mypy[reports] ~= 1.19
typing_extensions ~= 4.15
lxml >= 5.4, <7.0

View File

@@ -1 +1,8 @@
-r ../requirements.txt -r ../../requirements.txt
# Coverage collection
Coverage ~= 7.13
# Test Runner
pytest ~= 9.0
pytest-cov ~= 7.0