mirror of
https://github.com/pyTooling/Actions.git
synced 2026-02-15 04:26:55 +08:00
Compare commits
62 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7b43b4aa58 | ||
|
|
d0eae08e12 | ||
|
|
aefbd1cbba | ||
|
|
b145ed32bd | ||
|
|
f7353134cb | ||
|
|
8f604de141 | ||
|
|
14db3bef61 | ||
|
|
be972d3c0e | ||
|
|
0cf94920f2 | ||
|
|
878031f339 | ||
|
|
ce8dc41774 | ||
|
|
2658aeb896 | ||
|
|
37a73ff495 | ||
|
|
7d9dc6d312 | ||
|
|
2e502e165a | ||
|
|
e5894f4654 | ||
|
|
f733694766 | ||
|
|
b49cd82b47 | ||
|
|
69f7689c69 | ||
|
|
9459e295d1 | ||
|
|
e5a874819f | ||
|
|
18379306db | ||
|
|
1bef5347ae | ||
|
|
d3e7e4f6ed | ||
|
|
8354c4a084 | ||
|
|
323fa17773 | ||
|
|
92a168c8c8 | ||
|
|
388f55721c | ||
|
|
7c249a1ae0 | ||
|
|
8198b215a7 | ||
|
|
800976853f | ||
|
|
1593383254 | ||
|
|
780b6f466c | ||
|
|
c2282e4d63 | ||
|
|
546bf3db8a | ||
|
|
b04ceae7bb | ||
|
|
25c007b491 | ||
|
|
1e3e3011e4 | ||
|
|
68c8c8b7cc | ||
|
|
fbf1108ec2 | ||
|
|
a78656a0bb | ||
|
|
ec73d6bc41 | ||
|
|
10c10d9566 | ||
|
|
29b1e2d8eb | ||
|
|
0edc7c4ca7 | ||
|
|
77ed5bb343 | ||
|
|
6432741888 | ||
|
|
7e6bb82ae8 | ||
|
|
cf7a98730e | ||
|
|
fb36154250 | ||
|
|
fc08112235 | ||
|
|
953d0698c9 | ||
|
|
05e5d1f86c | ||
|
|
2eebeec719 | ||
|
|
5b97eaf241 | ||
|
|
1e694005ed | ||
|
|
46a2764e73 | ||
|
|
626d64ef6a | ||
|
|
fe4c9139c1 | ||
|
|
ae8a961e93 | ||
|
|
e4b5ea3895 | ||
|
|
b61f479180 |
75
.github/actions/CheckArtifactNames/action.yml
vendored
Normal file
75
.github/actions/CheckArtifactNames/action.yml
vendored
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
name: Check artifact names
|
||||||
|
branding:
|
||||||
|
icon: check-square
|
||||||
|
color: green
|
||||||
|
description: Check generated artifact names.
|
||||||
|
author: Patrick Lehmann (@Paebbels)
|
||||||
|
|
||||||
|
inputs:
|
||||||
|
prefix:
|
||||||
|
description:
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
generated-names:
|
||||||
|
description:
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
|
||||||
|
runs:
|
||||||
|
using: composite
|
||||||
|
steps:
|
||||||
|
- name: Install dependencies
|
||||||
|
shell: bash
|
||||||
|
run: pip install --disable-pip-version-check --break-system-packages pyTooling
|
||||||
|
|
||||||
|
- name: Check artifact names
|
||||||
|
id: check
|
||||||
|
shell: python
|
||||||
|
run: |
|
||||||
|
from json import loads as json_loads
|
||||||
|
from sys import exit
|
||||||
|
|
||||||
|
from pyTooling.Common import zipdicts
|
||||||
|
|
||||||
|
actualArtifactNames = json_loads("""${{ inputs.generated-names }}""".replace("'", '"'))
|
||||||
|
|
||||||
|
expectedName = "${{ inputs.prefix }}"
|
||||||
|
expectedArtifacts = {
|
||||||
|
"unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML",
|
||||||
|
"unittesting_html": f"{expectedName}-UnitTestReportSummary-HTML",
|
||||||
|
"perftesting_xml": f"{expectedName}-PerformanceTestReportSummary-XML",
|
||||||
|
"benchtesting_xml": f"{expectedName}-BenchmarkTestReportSummary-XML",
|
||||||
|
"apptesting_xml": f"{expectedName}-ApplicationTestReportSummary-XML",
|
||||||
|
"codecoverage_sqlite": f"{expectedName}-CodeCoverage-SQLite",
|
||||||
|
"codecoverage_xml": f"{expectedName}-CodeCoverage-XML",
|
||||||
|
"codecoverage_json": f"{expectedName}-CodeCoverage-JSON",
|
||||||
|
"codecoverage_html": f"{expectedName}-CodeCoverage-HTML",
|
||||||
|
"statictyping_cobertura": f"{expectedName}-StaticTyping-Cobertura-XML",
|
||||||
|
"statictyping_junit": f"{expectedName}-StaticTyping-JUnit-XML",
|
||||||
|
"statictyping_html": f"{expectedName}-StaticTyping-HTML",
|
||||||
|
"package_all": f"{expectedName}-Packages",
|
||||||
|
"documentation_html": f"{expectedName}-Documentation-HTML",
|
||||||
|
"documentation_latex": f"{expectedName}-Documentation-LaTeX",
|
||||||
|
"documentation_pdf": f"{expectedName}-Documentation-PDF",
|
||||||
|
}
|
||||||
|
|
||||||
|
errors = 0
|
||||||
|
if len(actualArtifactNames) != len(expectedArtifacts):
|
||||||
|
print(f"❌ Number of 'artifact_names' does not match: {len(actualArtifactNames)} != {len(expectedArtifacts)}.")
|
||||||
|
errors += 1
|
||||||
|
else:
|
||||||
|
print("✅ Number of 'artifact_names' as expected.")
|
||||||
|
print("Checking artifact names ...")
|
||||||
|
|
||||||
|
for key, actual, expected in zipdicts(actualArtifactNames, expectedArtifacts):
|
||||||
|
if actual != expected:
|
||||||
|
print(f" ❌ Artifact name '{key}' does not match: {actual} != {expected}.")
|
||||||
|
errors += 1
|
||||||
|
else:
|
||||||
|
print(f" ☑ Artifact name as expected: {key} ⇢ {actual}.")
|
||||||
|
|
||||||
|
if errors == 0:
|
||||||
|
print("✅ All checks PASSED.")
|
||||||
|
else:
|
||||||
|
print(f"❌ Counted {errors} errors.")
|
||||||
|
exit(errors)
|
||||||
92
.github/actions/CheckJobMatrix/action.yml
vendored
Normal file
92
.github/actions/CheckJobMatrix/action.yml
vendored
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
name: Check job matrix
|
||||||
|
branding:
|
||||||
|
icon: check-square
|
||||||
|
color: green
|
||||||
|
description: Check generated job matrix.
|
||||||
|
author: Patrick Lehmann (@Paebbels)
|
||||||
|
|
||||||
|
inputs:
|
||||||
|
expected-default-version:
|
||||||
|
description:
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
expected-python-versions:
|
||||||
|
description:
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
expected-systems:
|
||||||
|
description:
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
expected-exclude-jobs:
|
||||||
|
description:
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
expected-include-jobs:
|
||||||
|
description:
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
generated-default-version:
|
||||||
|
description:
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
generated-jobmatrix:
|
||||||
|
description:
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
|
||||||
|
runs:
|
||||||
|
using: composite
|
||||||
|
steps:
|
||||||
|
- name: Check parameters
|
||||||
|
id: check
|
||||||
|
shell: python
|
||||||
|
run: |
|
||||||
|
from json import loads as json_loads
|
||||||
|
from sys import exit
|
||||||
|
|
||||||
|
actualPythonVersion = """${{ inputs.generated-default-version }}"""
|
||||||
|
actualPythonJobs = json_loads("""${{ inputs.generated-jobmatrix }}""".replace("'", '"'))
|
||||||
|
|
||||||
|
expectedPythonVersion = """${{ inputs.expected-default-version }}"""
|
||||||
|
expectedPythons = json_loads("""${{ inputs.expected-python-versions }}""".replace("'", '"'))
|
||||||
|
expectedSystems = json_loads("""${{ inputs.expected-systems }}""".replace("'", '"'))
|
||||||
|
excludedJobs = json_loads("""${{ inputs.expected-exclude-jobs }}""".replace("'", '"'))
|
||||||
|
includeJobs = json_loads("""${{ inputs.expected-include-jobs }}""".replace("'", '"'))
|
||||||
|
expectedJobs = sorted([f"{system}:{python}" for system in expectedSystems for python in expectedPythons if f"{system}:{python}" not in excludedJobs] + includeJobs)
|
||||||
|
|
||||||
|
errors = 0
|
||||||
|
if actualPythonVersion != expectedPythonVersion:
|
||||||
|
print(f"'python_version' does not match: '{actualPythonVersion}' != '{expectedPythonVersion}'.")
|
||||||
|
errors += 1
|
||||||
|
|
||||||
|
if len(actualPythonJobs) != len(expectedJobs):
|
||||||
|
print(f"❌ Number of 'python_jobs' does not match: {len(actualPythonJobs)} != {len(expectedJobs)}.")
|
||||||
|
print("Actual jobs:")
|
||||||
|
for job in actualPythonJobs:
|
||||||
|
if job['system'] == "msys2":
|
||||||
|
print(f" {job['runtime'].lower()}:{job['python']}")
|
||||||
|
else:
|
||||||
|
print(f" {job['system']}:{job['python']}")
|
||||||
|
|
||||||
|
print("Expected jobs:")
|
||||||
|
for job in expectedJobs:
|
||||||
|
print(f" {job}")
|
||||||
|
errors += 1
|
||||||
|
else:
|
||||||
|
print("✅ Number of 'python_jobs' as expected.")
|
||||||
|
print("Checking job combinations ...")
|
||||||
|
|
||||||
|
actualJobs = sorted([f"{job['system'] if job['system'] != 'msys2' else job['runtime'].lower()}:{job['python']}" for job in actualPythonJobs])
|
||||||
|
for actual, expected in zip(actualJobs, expectedJobs):
|
||||||
|
if actual != expected:
|
||||||
|
print(f" ❌ Job does not match: {actual} != {expected}.")
|
||||||
|
errors += 1
|
||||||
|
else:
|
||||||
|
print(f" ☑ Job as expected: {actual}.")
|
||||||
|
|
||||||
|
if errors == 0:
|
||||||
|
print("✅ All checks PASSED.")
|
||||||
|
else:
|
||||||
|
print(f"❌ Counted {errors} errors.")
|
||||||
|
exit(errors)
|
||||||
25
.github/workflows/ApplicationTesting.yml
vendored
25
.github/workflows/ApplicationTesting.yml
vendored
@@ -4,7 +4,7 @@
|
|||||||
# Unai Martinez-Corral #
|
# Unai Martinez-Corral #
|
||||||
# #
|
# #
|
||||||
# ==================================================================================================================== #
|
# ==================================================================================================================== #
|
||||||
# Copyright 2020-2025 The pyTooling Authors #
|
# Copyright 2020-2026 The pyTooling Authors #
|
||||||
# #
|
# #
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
# you may not use this file except in compliance with the License. #
|
# you may not use this file except in compliance with the License. #
|
||||||
@@ -86,10 +86,10 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: ⏬ Checkout repository
|
- name: ⏬ Checkout repository
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: 📥 Download artifacts '${{ inputs.wheel }}' from 'Package' job
|
- name: 📥 Download artifacts '${{ inputs.wheel }}' from 'Package' job
|
||||||
uses: pyTooling/download-artifact@v5
|
uses: pyTooling/download-artifact@v7
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.wheel }}
|
name: ${{ inputs.wheel }}
|
||||||
path: install
|
path: install
|
||||||
@@ -134,6 +134,7 @@ jobs:
|
|||||||
dependencies = [req.strip() for req in requirements.split(" ")]
|
dependencies = [req.strip() for req in requirements.split(" ")]
|
||||||
|
|
||||||
packages = {
|
packages = {
|
||||||
|
"aiohttp": "python-aiohttp:p",
|
||||||
"coverage": "python-coverage:p",
|
"coverage": "python-coverage:p",
|
||||||
"docstr_coverage": "python-pyyaml:p python-types-pyyaml:p",
|
"docstr_coverage": "python-pyyaml:p python-types-pyyaml:p",
|
||||||
"igraph": "igraph:p",
|
"igraph": "igraph:p",
|
||||||
@@ -145,7 +146,7 @@ jobs:
|
|||||||
"pyyaml": "python-pyyaml:p python-types-pyyaml:p",
|
"pyyaml": "python-pyyaml:p python-types-pyyaml:p",
|
||||||
"ruamel.yaml": "python-ruamel-yaml:p python-ruamel.yaml.clib:p",
|
"ruamel.yaml": "python-ruamel-yaml:p python-ruamel.yaml.clib:p",
|
||||||
"sphinx": "python-markupsafe:p",
|
"sphinx": "python-markupsafe:p",
|
||||||
"tomli": "python-tomli:p",
|
"tomli": "python-tomli:p", # outdated, now part of Python as tomllib
|
||||||
"wheel": "python-wheel:p",
|
"wheel": "python-wheel:p",
|
||||||
"pyEDAA.ProjectModel": "python-ruamel-yaml:p python-ruamel.yaml.clib:p python-lxml:p",
|
"pyEDAA.ProjectModel": "python-ruamel-yaml:p python-ruamel.yaml.clib:p python-lxml:p",
|
||||||
"pyEDAA.Reports": "python-ruamel-yaml:p python-ruamel.yaml.clib:p python-lxml:p",
|
"pyEDAA.Reports": "python-ruamel-yaml:p python-ruamel.yaml.clib:p python-lxml:p",
|
||||||
@@ -154,6 +155,7 @@ jobs:
|
|||||||
subPackages = {
|
subPackages = {
|
||||||
"pytooling": {
|
"pytooling": {
|
||||||
"yaml": "python-ruamel-yaml:p python-ruamel.yaml.clib:p",
|
"yaml": "python-ruamel-yaml:p python-ruamel.yaml.clib:p",
|
||||||
|
"pypi": "python-aiohttp:p",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -215,16 +217,21 @@ jobs:
|
|||||||
if: matrix.system == 'msys2'
|
if: matrix.system == 'msys2'
|
||||||
run: |
|
run: |
|
||||||
if [ -n '${{ inputs.mingw_requirements }}' ]; then
|
if [ -n '${{ inputs.mingw_requirements }}' ]; then
|
||||||
python -m pip install --disable-pip-version-check ${{ inputs.mingw_requirements }}
|
python -m pip install --disable-pip-version-check --break-system-packages ${{ inputs.mingw_requirements }}
|
||||||
else
|
else
|
||||||
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
python -m pip install --disable-pip-version-check --break-system-packages ${{ inputs.requirements }}
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- name: 🔧 Install wheel from artifact
|
- name: 🔧 Install wheel from artifact (Ubuntu/macOS)
|
||||||
|
if: ( matrix.system != 'windows' && matrix.system != 'windows-arm' )
|
||||||
run: |
|
run: |
|
||||||
ls -l install
|
|
||||||
python -m pip install --disable-pip-version-check -U install/*.whl
|
python -m pip install --disable-pip-version-check -U install/*.whl
|
||||||
|
|
||||||
|
- name: 🔧 Install wheel from artifact (Windows)
|
||||||
|
if: ( matrix.system == 'windows' || matrix.system == 'windows-arm' )
|
||||||
|
run: |
|
||||||
|
python -m pip install -v --disable-pip-version-check (Get-Item .\install\*.whl).FullName
|
||||||
|
|
||||||
- name: ✅ Run application tests (Ubuntu/macOS)
|
- name: ✅ Run application tests (Ubuntu/macOS)
|
||||||
if: ( matrix.system != 'windows' && matrix.system != 'windows-arm' )
|
if: ( matrix.system != 'windows' && matrix.system != 'windows-arm' )
|
||||||
run: |
|
run: |
|
||||||
@@ -257,7 +264,7 @@ jobs:
|
|||||||
|
|
||||||
- name: 📤 Upload 'TestReportSummary.xml' artifact
|
- name: 📤 Upload 'TestReportSummary.xml' artifact
|
||||||
if: inputs.apptest_xml_artifact != ''
|
if: inputs.apptest_xml_artifact != ''
|
||||||
uses: pyTooling/upload-artifact@v4
|
uses: pyTooling/upload-artifact@v6
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.apptest_xml_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
name: ${{ inputs.apptest_xml_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
||||||
working-directory: report/unit
|
working-directory: report/unit
|
||||||
|
|||||||
2
.github/workflows/ArtifactCleanUp.yml
vendored
2
.github/workflows/ArtifactCleanUp.yml
vendored
@@ -4,7 +4,7 @@
|
|||||||
# Unai Martinez-Corral #
|
# Unai Martinez-Corral #
|
||||||
# #
|
# #
|
||||||
# ==================================================================================================================== #
|
# ==================================================================================================================== #
|
||||||
# Copyright 2020-2025 The pyTooling Authors #
|
# Copyright 2020-2026 The pyTooling Authors #
|
||||||
# #
|
# #
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
# you may not use this file except in compliance with the License. #
|
# you may not use this file except in compliance with the License. #
|
||||||
|
|||||||
72
.github/workflows/BuildTheDocs.yml
vendored
72
.github/workflows/BuildTheDocs.yml
vendored
@@ -1,72 +0,0 @@
|
|||||||
# ==================================================================================================================== #
|
|
||||||
# Authors: #
|
|
||||||
# Patrick Lehmann #
|
|
||||||
# Unai Martinez-Corral #
|
|
||||||
# #
|
|
||||||
# ==================================================================================================================== #
|
|
||||||
# Copyright 2020-2025 The pyTooling Authors #
|
|
||||||
# #
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
|
||||||
# you may not use this file except in compliance with the License. #
|
|
||||||
# You may obtain a copy of the License at #
|
|
||||||
# #
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
|
||||||
# #
|
|
||||||
# Unless required by applicable law or agreed to in writing, software #
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
|
||||||
# See the License for the specific language governing permissions and #
|
|
||||||
# limitations under the License. #
|
|
||||||
# #
|
|
||||||
# SPDX-License-Identifier: Apache-2.0 #
|
|
||||||
# ==================================================================================================================== #
|
|
||||||
name: Documentation
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
artifact:
|
|
||||||
description: 'Name of the documentation artifact.'
|
|
||||||
required: false
|
|
||||||
default: ''
|
|
||||||
type: string
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
BuildTheDocs:
|
|
||||||
name: 📓 Run BuildTheDocs
|
|
||||||
runs-on: ubuntu-24.04
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: ⚠️ Deprecation Warning
|
|
||||||
run: printf "::warning title=%s::%s\n" "Deprecated" "'BuildTheDocs.yml' template is deprecated. Please switch to 'SphinxDocumentation.yml'. See https://pytooling.github.io/Actions/JobTemplate/Documentation/SphinxDocumentation.html"
|
|
||||||
|
|
||||||
- name: ⏬ Checkout repository
|
|
||||||
uses: actions/checkout@v5
|
|
||||||
|
|
||||||
- name: 🛳️ Build documentation
|
|
||||||
uses: buildthedocs/btd@v0
|
|
||||||
with:
|
|
||||||
skip-deploy: true
|
|
||||||
|
|
||||||
- name: 📤 Upload 'documentation' artifacts
|
|
||||||
uses: pyTooling/upload-artifact@v4
|
|
||||||
if: inputs.artifact != ''
|
|
||||||
with:
|
|
||||||
name: ${{ inputs.artifact }}
|
|
||||||
working-directory: doc/_build/html
|
|
||||||
path: '*'
|
|
||||||
retention-days: 1
|
|
||||||
|
|
||||||
- name: '📓 Publish site to GitHub Pages'
|
|
||||||
if: inputs.artifact == '' && github.event_name != 'pull_request'
|
|
||||||
run: |
|
|
||||||
cp --recursive -T doc/_build/html public
|
|
||||||
cd public
|
|
||||||
touch .nojekyll
|
|
||||||
git init
|
|
||||||
cp ../.git/config ./.git/config
|
|
||||||
git add .
|
|
||||||
git config --local user.email "BuildTheDocs@GitHubActions"
|
|
||||||
git config --local user.name "GitHub Actions"
|
|
||||||
git commit -a -m "update ${{ github.sha }}"
|
|
||||||
git push -u origin +HEAD:gh-pages
|
|
||||||
10
.github/workflows/CheckCodeQuality.yml
vendored
10
.github/workflows/CheckCodeQuality.yml
vendored
@@ -3,7 +3,7 @@
|
|||||||
# Patrick Lehmann #
|
# Patrick Lehmann #
|
||||||
# #
|
# #
|
||||||
# ==================================================================================================================== #
|
# ==================================================================================================================== #
|
||||||
# Copyright 2025-2025 The pyTooling Authors #
|
# Copyright 2025-2026 The pyTooling Authors #
|
||||||
# #
|
# #
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
# you may not use this file except in compliance with the License. #
|
# you may not use this file except in compliance with the License. #
|
||||||
@@ -32,7 +32,7 @@ on:
|
|||||||
python_version:
|
python_version:
|
||||||
description: 'Python version.'
|
description: 'Python version.'
|
||||||
required: false
|
required: false
|
||||||
default: '3.13'
|
default: '3.14'
|
||||||
type: string
|
type: string
|
||||||
package_directory:
|
package_directory:
|
||||||
description: 'The package''s directory'
|
description: 'The package''s directory'
|
||||||
@@ -71,7 +71,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: ⏬ Checkout repository
|
- name: ⏬ Checkout repository
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
lfs: true
|
lfs: true
|
||||||
submodules: true
|
submodules: true
|
||||||
@@ -140,7 +140,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: ⏬ Checkout repository
|
- name: ⏬ Checkout repository
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
lfs: true
|
lfs: true
|
||||||
submodules: true
|
submodules: true
|
||||||
@@ -180,7 +180,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: ⏬ Checkout repository
|
- name: ⏬ Checkout repository
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
lfs: true
|
lfs: true
|
||||||
submodules: true
|
submodules: true
|
||||||
|
|||||||
6
.github/workflows/CheckDocumentation.yml
vendored
6
.github/workflows/CheckDocumentation.yml
vendored
@@ -3,7 +3,7 @@
|
|||||||
# Patrick Lehmann #
|
# Patrick Lehmann #
|
||||||
# #
|
# #
|
||||||
# ==================================================================================================================== #
|
# ==================================================================================================================== #
|
||||||
# Copyright 2020-2025 The pyTooling Authors #
|
# Copyright 2020-2026 The pyTooling Authors #
|
||||||
# #
|
# #
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
# you may not use this file except in compliance with the License. #
|
# you may not use this file except in compliance with the License. #
|
||||||
@@ -32,7 +32,7 @@ on:
|
|||||||
python_version:
|
python_version:
|
||||||
description: 'Python version.'
|
description: 'Python version.'
|
||||||
required: false
|
required: false
|
||||||
default: '3.13'
|
default: '3.14'
|
||||||
type: string
|
type: string
|
||||||
directory:
|
directory:
|
||||||
description: 'Source code directory to check.'
|
description: 'Source code directory to check.'
|
||||||
@@ -50,7 +50,7 @@ jobs:
|
|||||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||||
steps:
|
steps:
|
||||||
- name: ⏬ Checkout repository
|
- name: ⏬ Checkout repository
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: 🐍 Setup Python ${{ inputs.python_version }}
|
- name: 🐍 Setup Python ${{ inputs.python_version }}
|
||||||
uses: actions/setup-python@v6
|
uses: actions/setup-python@v6
|
||||||
|
|||||||
80
.github/workflows/CompletePipeline.yml
vendored
80
.github/workflows/CompletePipeline.yml
vendored
@@ -3,7 +3,7 @@
|
|||||||
# Patrick Lehmann #
|
# Patrick Lehmann #
|
||||||
# #
|
# #
|
||||||
# ==================================================================================================================== #
|
# ==================================================================================================================== #
|
||||||
# Copyright 2020-2025 The pyTooling Authors #
|
# Copyright 2020-2026 The pyTooling Authors #
|
||||||
# #
|
# #
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
# you may not use this file except in compliance with the License. #
|
# you may not use this file except in compliance with the License. #
|
||||||
@@ -36,12 +36,12 @@ on:
|
|||||||
unittest_python_version:
|
unittest_python_version:
|
||||||
description: 'Python version.'
|
description: 'Python version.'
|
||||||
required: false
|
required: false
|
||||||
default: '3.13'
|
default: '3.14'
|
||||||
type: string
|
type: string
|
||||||
unittest_python_version_list:
|
unittest_python_version_list:
|
||||||
description: 'Space separated list of Python versions to run tests with.'
|
description: 'Space separated list of Python versions to run tests with.'
|
||||||
required: false
|
required: false
|
||||||
default: '3.9 3.10 3.11 3.12 3.13'
|
default: '3.10 3.11 3.12 3.13 3.14'
|
||||||
type: string
|
type: string
|
||||||
unittest_system_list:
|
unittest_system_list:
|
||||||
description: 'Space separated list of systems to run tests on.'
|
description: 'Space separated list of systems to run tests on.'
|
||||||
@@ -66,7 +66,7 @@ on:
|
|||||||
apptest_python_version:
|
apptest_python_version:
|
||||||
description: 'Python version.'
|
description: 'Python version.'
|
||||||
required: false
|
required: false
|
||||||
default: '3.13'
|
default: '3.14'
|
||||||
type: string
|
type: string
|
||||||
apptest_python_version_list:
|
apptest_python_version_list:
|
||||||
description: 'Space separated list of Python versions to run tests with.'
|
description: 'Space separated list of Python versions to run tests with.'
|
||||||
@@ -93,6 +93,16 @@ on:
|
|||||||
required: false
|
required: false
|
||||||
default: 'windows-arm:pypy-3.10 windows-arm:pypy-3.11'
|
default: 'windows-arm:pypy-3.10 windows-arm:pypy-3.11'
|
||||||
type: string
|
type: string
|
||||||
|
bandit:
|
||||||
|
description: 'Run Static Application Security Testing (SAST) using Bandit.'
|
||||||
|
required: false
|
||||||
|
default: 'false'
|
||||||
|
type: string
|
||||||
|
pylint:
|
||||||
|
description: 'Run Python linting using pylint.'
|
||||||
|
required: false
|
||||||
|
default: 'false'
|
||||||
|
type: string
|
||||||
codecov:
|
codecov:
|
||||||
description: 'Publish merged coverage and unittest reports to Codecov.'
|
description: 'Publish merged coverage and unittest reports to Codecov.'
|
||||||
required: false
|
required: false
|
||||||
@@ -167,6 +177,58 @@ jobs:
|
|||||||
exclude_list: ${{ inputs.unittest_exclude_list }}
|
exclude_list: ${{ inputs.unittest_exclude_list }}
|
||||||
disable_list: ${{ inputs.unittest_disable_list }}
|
disable_list: ${{ inputs.unittest_disable_list }}
|
||||||
|
|
||||||
|
VersionCheck:
|
||||||
|
name: ''
|
||||||
|
runs-on: 'ubuntu-24.04'
|
||||||
|
needs:
|
||||||
|
- Prepare
|
||||||
|
- UnitTestingParams
|
||||||
|
if: needs.Prepare.outputs.version != '' && needs.UnitTestingParams.outputs.package_version_file != ''
|
||||||
|
outputs:
|
||||||
|
code_version: ${{ steps.extract.outputs.code_version }}
|
||||||
|
steps:
|
||||||
|
- name: ⏬ Checkout repository
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
with:
|
||||||
|
# The command 'git describe' (used for version) needs the history.
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: 🔧 Install pyTooling dependencies (native)
|
||||||
|
run: |
|
||||||
|
python -m pip install --disable-pip-version-check -U pyTooling
|
||||||
|
|
||||||
|
- name: Extract version from Python source file
|
||||||
|
id: extract
|
||||||
|
if: endsWith(needs.UnitTestingParams.outputs.package_version_file, '.py')
|
||||||
|
shell: python
|
||||||
|
run: |
|
||||||
|
from pathlib import Path
|
||||||
|
from sys import exit
|
||||||
|
from pyTooling.Packaging import extractVersionInformation
|
||||||
|
|
||||||
|
expectedVersion = "${{ needs.Prepare.outputs.version }}".strip()
|
||||||
|
|
||||||
|
versionFile = Path("${{ needs.UnitTestingParams.outputs.package_version_file }}")
|
||||||
|
if not versionFile.exists():
|
||||||
|
print(f"::error title=CompletePipeline::Version file '{versionFile}' not found.")
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
versionInformation = extractVersionInformation(versionFile)
|
||||||
|
print(f"expected: {expectedVersion}")
|
||||||
|
print(f"from code: {versionInformation.Version}")
|
||||||
|
|
||||||
|
if expectedVersion != versionInformation.Version:
|
||||||
|
print(f"::error title=CompletePipeline::Expected version does not version in Python code.")
|
||||||
|
exit(2)
|
||||||
|
|
||||||
|
# Write jobs to special file
|
||||||
|
github_output = Path(getenv("GITHUB_OUTPUT"))
|
||||||
|
print(f"GITHUB_OUTPUT: {github_output}")
|
||||||
|
with github_output.open("a+", encoding="utf-8") as f:
|
||||||
|
f.write(dedent(f"""\
|
||||||
|
code_version={versionInformation.Version}
|
||||||
|
"""))
|
||||||
|
|
||||||
UnitTesting:
|
UnitTesting:
|
||||||
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@main
|
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@main
|
||||||
needs:
|
needs:
|
||||||
@@ -175,8 +237,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
jobs: ${{ needs.UnitTestingParams.outputs.python_jobs }}
|
jobs: ${{ needs.UnitTestingParams.outputs.python_jobs }}
|
||||||
# TODO: shouldn't this be configured by a parameter? Same as directories
|
# TODO: shouldn't this be configured by a parameter? Same as directories
|
||||||
requirements: "-r tests/unit/requirements.txt"
|
|
||||||
# pacboy: "msys/git python-lxml:p"
|
|
||||||
unittest_report_xml: ${{ needs.ConfigParams.outputs.unittest_report_xml }}
|
unittest_report_xml: ${{ needs.ConfigParams.outputs.unittest_report_xml }}
|
||||||
coverage_report_xml: ${{ needs.ConfigParams.outputs.coverage_report_xml }}
|
coverage_report_xml: ${{ needs.ConfigParams.outputs.coverage_report_xml }}
|
||||||
coverage_report_json: ${{ needs.ConfigParams.outputs.coverage_report_json }}
|
coverage_report_json: ${{ needs.ConfigParams.outputs.coverage_report_json }}
|
||||||
@@ -205,6 +265,8 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||||
package_directory: ${{ needs.UnitTestingParams.outputs.package_directory }}
|
package_directory: ${{ needs.UnitTestingParams.outputs.package_directory }}
|
||||||
|
bandit: ${{ inputs.bandit }}
|
||||||
|
pylint: ${{ inputs.pylint }}
|
||||||
artifact: CodeQuality
|
artifact: CodeQuality
|
||||||
|
|
||||||
DocCoverage:
|
DocCoverage:
|
||||||
@@ -219,7 +281,6 @@ jobs:
|
|||||||
uses: pyTooling/Actions/.github/workflows/Package.yml@main
|
uses: pyTooling/Actions/.github/workflows/Package.yml@main
|
||||||
needs:
|
needs:
|
||||||
- UnitTestingParams
|
- UnitTestingParams
|
||||||
# - UnitTesting
|
|
||||||
with:
|
with:
|
||||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||||
artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
||||||
@@ -348,10 +409,10 @@ jobs:
|
|||||||
needs:
|
needs:
|
||||||
- Prepare
|
- Prepare
|
||||||
- UnitTesting
|
- UnitTesting
|
||||||
- Install
|
|
||||||
# - AppTesting
|
# - AppTesting
|
||||||
# - StaticTypeCheck
|
# - StaticTypeCheck
|
||||||
- Package
|
- Package
|
||||||
|
- Install
|
||||||
- PublishToGitHubPages
|
- PublishToGitHubPages
|
||||||
if: needs.Prepare.outputs.is_release_commit == 'true' && github.event_name != 'schedule'
|
if: needs.Prepare.outputs.is_release_commit == 'true' && github.event_name != 'schedule'
|
||||||
permissions:
|
permissions:
|
||||||
@@ -367,10 +428,10 @@ jobs:
|
|||||||
needs:
|
needs:
|
||||||
- Prepare
|
- Prepare
|
||||||
- UnitTesting
|
- UnitTesting
|
||||||
- Install
|
|
||||||
# - AppTesting
|
# - AppTesting
|
||||||
# - StaticTypeCheck
|
# - StaticTypeCheck
|
||||||
- Package
|
- Package
|
||||||
|
- Install
|
||||||
- PublishToGitHubPages
|
- PublishToGitHubPages
|
||||||
if: needs.Prepare.outputs.is_release_tag == 'true'
|
if: needs.Prepare.outputs.is_release_tag == 'true'
|
||||||
permissions:
|
permissions:
|
||||||
@@ -407,6 +468,7 @@ jobs:
|
|||||||
- PublishCoverageResults
|
- PublishCoverageResults
|
||||||
- PublishToGitHubPages
|
- PublishToGitHubPages
|
||||||
# - PublishOnPyPI
|
# - PublishOnPyPI
|
||||||
|
- Install
|
||||||
- IntermediateCleanUp
|
- IntermediateCleanUp
|
||||||
if: inputs.cleanup == 'true'
|
if: inputs.cleanup == 'true'
|
||||||
with:
|
with:
|
||||||
|
|||||||
187
.github/workflows/CoverageCollection.yml
vendored
187
.github/workflows/CoverageCollection.yml
vendored
@@ -1,187 +0,0 @@
|
|||||||
# ==================================================================================================================== #
|
|
||||||
# Authors: #
|
|
||||||
# Patrick Lehmann #
|
|
||||||
# Unai Martinez-Corral #
|
|
||||||
# #
|
|
||||||
# ==================================================================================================================== #
|
|
||||||
# Copyright 2020-2025 The pyTooling Authors #
|
|
||||||
# #
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
|
||||||
# you may not use this file except in compliance with the License. #
|
|
||||||
# You may obtain a copy of the License at #
|
|
||||||
# #
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
|
||||||
# #
|
|
||||||
# Unless required by applicable law or agreed to in writing, software #
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
|
||||||
# See the License for the specific language governing permissions and #
|
|
||||||
# limitations under the License. #
|
|
||||||
# #
|
|
||||||
# SPDX-License-Identifier: Apache-2.0 #
|
|
||||||
# ==================================================================================================================== #
|
|
||||||
name: Coverage Collection
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
ubuntu_image_version:
|
|
||||||
description: 'Ubuntu image version.'
|
|
||||||
required: false
|
|
||||||
default: '24.04'
|
|
||||||
type: string
|
|
||||||
python_version:
|
|
||||||
description: 'Python version.'
|
|
||||||
required: false
|
|
||||||
default: '3.11'
|
|
||||||
type: string
|
|
||||||
requirements:
|
|
||||||
description: 'Python dependencies to be installed through pip.'
|
|
||||||
required: false
|
|
||||||
default: '-r tests/requirements.txt'
|
|
||||||
type: string
|
|
||||||
tests_directory:
|
|
||||||
description: 'Path to the directory containing tests (test working directory).'
|
|
||||||
required: false
|
|
||||||
default: 'tests'
|
|
||||||
type: string
|
|
||||||
unittest_directory:
|
|
||||||
description: 'Path to the directory containing unit tests (relative to tests_directory).'
|
|
||||||
required: false
|
|
||||||
default: 'unit'
|
|
||||||
type: string
|
|
||||||
coverage_config:
|
|
||||||
description: 'Path to the .coveragerc file. Use pyproject.toml by default.'
|
|
||||||
required: false
|
|
||||||
default: 'pyproject.toml'
|
|
||||||
type: string
|
|
||||||
artifact:
|
|
||||||
description: 'Name of the coverage artifact.'
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
secrets:
|
|
||||||
codacy_token:
|
|
||||||
description: 'Token to push result to codacy.'
|
|
||||||
required: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
|
|
||||||
Coverage:
|
|
||||||
name: 📈 Collect Coverage Data using Python ${{ inputs.python_version }}
|
|
||||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: ⚠️ Deprecation Warning
|
|
||||||
run: printf "::warning title=%s::%s\n" "Deprecated" "'CoverageCollection.yml' template is deprecated. Please switch to 'PublishReleaseNotes.yml'. See https://pytooling.github.io/Actions/JobTemplate/Testing/UnitTesting.html"
|
|
||||||
|
|
||||||
- name: ⏬ Checkout repository
|
|
||||||
uses: actions/checkout@v5
|
|
||||||
with:
|
|
||||||
lfs: true
|
|
||||||
submodules: true
|
|
||||||
|
|
||||||
- name: 🐍 Setup Python ${{ inputs.python_version }}
|
|
||||||
uses: actions/setup-python@v6
|
|
||||||
with:
|
|
||||||
python-version: ${{ inputs.python_version }}
|
|
||||||
|
|
||||||
- name: 🗂 Install dependencies
|
|
||||||
run: |
|
|
||||||
python -m pip install --disable-pip-version-check tomli
|
|
||||||
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
|
||||||
|
|
||||||
- name: 🔁 Extract configurations from pyproject.toml
|
|
||||||
id: getVariables
|
|
||||||
shell: python
|
|
||||||
run: |
|
|
||||||
from os import getenv
|
|
||||||
from pathlib import Path
|
|
||||||
from tomli import load as tomli_load
|
|
||||||
from textwrap import dedent
|
|
||||||
|
|
||||||
htmlDirectory = 'htmlcov'
|
|
||||||
xmlFile = './coverage.xml'
|
|
||||||
coverageRC = "${{ inputs.coverage_config }}".strip()
|
|
||||||
|
|
||||||
# Read output paths from 'pyproject.toml' file
|
|
||||||
if coverageRC == "pyproject.toml":
|
|
||||||
pyProjectFile = Path("pyproject.toml")
|
|
||||||
if pyProjectFile.exists():
|
|
||||||
with pyProjectFile.open("rb") as file:
|
|
||||||
pyProjectSettings = tomli_load(file)
|
|
||||||
|
|
||||||
htmlDirectory = pyProjectSettings["tool"]["coverage"]["html"]["directory"]
|
|
||||||
xmlFile = pyProjectSettings["tool"]["coverage"]["xml"]["output"]
|
|
||||||
else:
|
|
||||||
print(f"File '{pyProjectFile}' not found.")
|
|
||||||
print(f"::error title=FileNotFoundError::File '{pyProjectFile}' not found.")
|
|
||||||
exit(1)
|
|
||||||
|
|
||||||
# Read output paths from '.coveragerc' file
|
|
||||||
elif len(coverageRC) > 0:
|
|
||||||
coverageRCFile = Path(coverageRC)
|
|
||||||
if coverageRCFile.exists():
|
|
||||||
with coverageRCFile.open("rb") as file:
|
|
||||||
coverageRCSettings = tomli_load(file)
|
|
||||||
|
|
||||||
htmlDirectory = coverageRCSettings["html"]["directory"]
|
|
||||||
xmlFile = coverageRCSettings["xml"]["output"]
|
|
||||||
else:
|
|
||||||
print(f"File '{coverageRCFile}' not found.")
|
|
||||||
print(f"::error title=FileNotFoundError::File '{coverageRCFile}' not found.")
|
|
||||||
exit(1)
|
|
||||||
|
|
||||||
# Write jobs to special file
|
|
||||||
github_output = Path(getenv("GITHUB_OUTPUT"))
|
|
||||||
print(f"GITHUB_OUTPUT: {github_output}")
|
|
||||||
with github_output.open("a+", encoding="utf-8") as f:
|
|
||||||
f.write(dedent(f"""\
|
|
||||||
coverage_report_html_directory={htmlDirectory}
|
|
||||||
coverage_report_xml={xmlFile}
|
|
||||||
"""))
|
|
||||||
|
|
||||||
print(f"DEBUG:\n html={htmlDirectory}\n xml={xmlFile}")
|
|
||||||
|
|
||||||
- name: Collect coverage
|
|
||||||
continue-on-error: true
|
|
||||||
run: |
|
|
||||||
export ENVIRONMENT_NAME="Linux (x86-64)"
|
|
||||||
export PYTHONPATH=$(pwd)
|
|
||||||
ABSDIR=$(pwd)
|
|
||||||
cd "${{ inputs.tests_directory || '.' }}"
|
|
||||||
[ -n '${{ inputs.coverage_config }}' ] && PYCOV_ARGS="--cov-config=${ABSDIR}/${{ inputs.coverage_config }}" || unset PYCOV_ARGS
|
|
||||||
printf "%s\n" "python -m pytest -rA --cov=${ABSDIR} ${PYCOV_ARGS} ${{ inputs.unittest_directory }} --color=yes"
|
|
||||||
python -m pytest -rA --cov=${ABSDIR} $PYCOV_ARGS ${{ inputs.unittest_directory }} --color=yes
|
|
||||||
|
|
||||||
- name: Convert to cobertura format
|
|
||||||
run: coverage xml --data-file=${{ inputs.tests_directory || '.' }}/.coverage
|
|
||||||
|
|
||||||
- name: Convert to HTML format
|
|
||||||
run: |
|
|
||||||
coverage html --data-file=${{ inputs.tests_directory || '.' }}/.coverage -d ${{ steps.getVariables.outputs.coverage_report_html_directory }}
|
|
||||||
rm ${{ steps.getVariables.outputs.coverage_report_html_directory }}/.gitignore
|
|
||||||
|
|
||||||
- name: 📤 Upload 'Coverage Report' artifact
|
|
||||||
continue-on-error: true
|
|
||||||
uses: pyTooling/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: ${{ inputs.artifact }}
|
|
||||||
working-directory: ${{ steps.getVariables.outputs.coverage_report_html_directory }}
|
|
||||||
path: '*'
|
|
||||||
if-no-files-found: error
|
|
||||||
retention-days: 1
|
|
||||||
|
|
||||||
- name: 📊 Publish coverage at CodeCov
|
|
||||||
continue-on-error: true
|
|
||||||
uses: codecov/codecov-action@v5
|
|
||||||
with:
|
|
||||||
files: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
|
||||||
flags: unittests
|
|
||||||
env_vars: PYTHON
|
|
||||||
|
|
||||||
- name: 📉 Publish coverage at Codacy
|
|
||||||
continue-on-error: true
|
|
||||||
uses: codacy/codacy-coverage-reporter-action@v1
|
|
||||||
with:
|
|
||||||
project-token: ${{ secrets.codacy_token }}
|
|
||||||
coverage-reports: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
|
||||||
56
.github/workflows/ExtractConfiguration.yml
vendored
56
.github/workflows/ExtractConfiguration.yml
vendored
@@ -3,7 +3,7 @@
|
|||||||
# Patrick Lehmann #
|
# Patrick Lehmann #
|
||||||
# #
|
# #
|
||||||
# ==================================================================================================================== #
|
# ==================================================================================================================== #
|
||||||
# Copyright 2020-2025 The pyTooling Authors #
|
# Copyright 2020-2026 The pyTooling Authors #
|
||||||
# #
|
# #
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
# you may not use this file except in compliance with the License. #
|
# you may not use this file except in compliance with the License. #
|
||||||
@@ -32,7 +32,7 @@ on:
|
|||||||
python_version:
|
python_version:
|
||||||
description: 'Python version.'
|
description: 'Python version.'
|
||||||
required: false
|
required: false
|
||||||
default: '3.13'
|
default: '3.14'
|
||||||
type: string
|
type: string
|
||||||
coverage_config:
|
coverage_config:
|
||||||
description: 'Path to the .coveragerc file. Use pyproject.toml by default.'
|
description: 'Path to the .coveragerc file. Use pyproject.toml by default.'
|
||||||
@@ -68,7 +68,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
Extract:
|
Extract:
|
||||||
name: 📓 Extract configurations from pyproject.toml
|
name: 🔬 Extract configurations from pyproject.toml
|
||||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||||
outputs:
|
outputs:
|
||||||
unittest_report_xml: ${{ steps.getVariables.outputs.unittest_report_xml }}
|
unittest_report_xml: ${{ steps.getVariables.outputs.unittest_report_xml }}
|
||||||
@@ -82,16 +82,16 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: ⏬ Checkout repository
|
- name: ⏬ Checkout repository
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: 🐍 Setup Python ${{ inputs.python_version }}
|
- name: 🐍 Setup Python ${{ inputs.python_version }}
|
||||||
uses: actions/setup-python@v6
|
uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: ${{ inputs.python_version }}
|
python-version: ${{ inputs.python_version }}
|
||||||
|
|
||||||
- name: 🔧 Install wheel,tomli and pip dependencies (native)
|
- name: 🔧 Install wheel and pip dependencies (native)
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --disable-pip-version-check -U wheel tomli
|
python -m pip install --disable-pip-version-check -U wheel
|
||||||
|
|
||||||
- name: 🔁 Extract configurations from pyproject.toml
|
- name: 🔁 Extract configurations from pyproject.toml
|
||||||
id: getVariables
|
id: getVariables
|
||||||
@@ -105,7 +105,7 @@ jobs:
|
|||||||
|
|
||||||
print(f"Python: {version} (of default installation)")
|
print(f"Python: {version} (of default installation)")
|
||||||
|
|
||||||
from tomli import load as tomli_load
|
from tomllib import load as toml_load
|
||||||
|
|
||||||
unittestXMLFile = Path("./unittest.xml")
|
unittestXMLFile = Path("./unittest.xml")
|
||||||
coverageHTMLDirectory = Path("htmlcov")
|
coverageHTMLDirectory = Path("htmlcov")
|
||||||
@@ -114,23 +114,43 @@ jobs:
|
|||||||
coverageRC = "${{ inputs.coverage_config }}".strip()
|
coverageRC = "${{ inputs.coverage_config }}".strip()
|
||||||
typingCoberturaFile = Path("report/typing/cobertura.xml")
|
typingCoberturaFile = Path("report/typing/cobertura.xml")
|
||||||
typingJUnitFile = Path("report/typing/StaticTypingSummary.xml")
|
typingJUnitFile = Path("report/typing/StaticTypingSummary.xml")
|
||||||
typingHTMLDirectory = Path("htmlmypy")
|
typingHTMLDirectory = Path("report/typing/html")
|
||||||
|
|
||||||
# Read output paths from 'pyproject.toml' file
|
# Read output paths from 'pyproject.toml' file
|
||||||
if coverageRC == "pyproject.toml":
|
if coverageRC == "pyproject.toml":
|
||||||
pyProjectFile = Path("pyproject.toml")
|
pyProjectFile = Path("pyproject.toml")
|
||||||
if pyProjectFile.exists():
|
if pyProjectFile.exists():
|
||||||
with pyProjectFile.open("rb") as file:
|
with pyProjectFile.open("rb") as file:
|
||||||
pyProjectSettings = tomli_load(file)
|
pyProjectSettings = toml_load(file)
|
||||||
|
|
||||||
unittestXMLFile = Path(pyProjectSettings["tool"]["pytest"]["junit_xml"])
|
toolSection = pyProjectSettings["tool"]
|
||||||
mergedUnittestXMLFile = Path(pyProjectSettings["tool"]["pyedaa-reports"]["junit_xml"])
|
if "pytest" in toolSection:
|
||||||
coverageHTMLDirectory = Path(pyProjectSettings["tool"]["coverage"]["html"]["directory"])
|
section = toolSection["pytest"]
|
||||||
coverageXMLFile = Path(pyProjectSettings["tool"]["coverage"]["xml"]["output"])
|
if "junit_xml" in section:
|
||||||
coverageJSONFile= Path(pyProjectSettings["tool"]["coverage"]["json"]["output"])
|
unittestXMLFile = Path(section["junit_xml"])
|
||||||
typingCoberturaFile = Path(pyProjectSettings["tool"]["mypy"]["cobertura_xml_report"]) / "cobertura.xml"
|
|
||||||
typingJUnitFile = Path(pyProjectSettings["tool"]["mypy"]["junit_xml"])
|
if "pyedaa-reports" in toolSection:
|
||||||
typingHTMLDirectory = Path(pyProjectSettings["tool"]["mypy"]["html_report"])
|
section = toolSection["pyedaa-reports"]
|
||||||
|
if "junit_xml" in section:
|
||||||
|
mergedUnittestXMLFile = Path(section["junit_xml"])
|
||||||
|
|
||||||
|
if "coverage" in toolSection:
|
||||||
|
section = toolSection["coverage"]
|
||||||
|
if "html" in section:
|
||||||
|
coverageHTMLDirectory = Path(section["html"]["directory"])
|
||||||
|
if "xml" in section:
|
||||||
|
coverageXMLFile = Path(section["xml"]["output"])
|
||||||
|
if "json" in section:
|
||||||
|
coverageJSONFile= Path(section["json"]["output"])
|
||||||
|
|
||||||
|
if "mypy" in toolSection:
|
||||||
|
section = toolSection["mypy"]
|
||||||
|
if "cobertura_xml_report" in section:
|
||||||
|
typingCoberturaFile = Path(section["cobertura_xml_report"]) / "cobertura.xml"
|
||||||
|
if "junit_xml" in section:
|
||||||
|
typingJUnitFile = Path(section["junit_xml"])
|
||||||
|
if "html_report" in section:
|
||||||
|
typingHTMLDirectory = Path(section["html_report"])
|
||||||
else:
|
else:
|
||||||
print(f"File '{pyProjectFile}' not found.")
|
print(f"File '{pyProjectFile}' not found.")
|
||||||
print(f"::error title=FileNotFoundError::File '{pyProjectFile}' not found.")
|
print(f"::error title=FileNotFoundError::File '{pyProjectFile}' not found.")
|
||||||
@@ -143,7 +163,7 @@ jobs:
|
|||||||
coverageRCFile = Path(coverageRC)
|
coverageRCFile = Path(coverageRC)
|
||||||
if coverageRCFile.exists():
|
if coverageRCFile.exists():
|
||||||
with coverageRCFile.open("rb") as file:
|
with coverageRCFile.open("rb") as file:
|
||||||
coverageRCSettings = tomli_load(file)
|
coverageRCSettings = toml_load(file)
|
||||||
|
|
||||||
coverageHTMLDirectory = Path(coverageRCSettings["html"]["directory"])
|
coverageHTMLDirectory = Path(coverageRCSettings["html"]["directory"])
|
||||||
coverageXMLFile = Path(coverageRCSettings["xml"]["output"])
|
coverageXMLFile = Path(coverageRCSettings["xml"]["output"])
|
||||||
|
|||||||
5
.github/workflows/InstallPackage.yml
vendored
5
.github/workflows/InstallPackage.yml
vendored
@@ -3,7 +3,7 @@
|
|||||||
# Patrick Lehmann #
|
# Patrick Lehmann #
|
||||||
# #
|
# #
|
||||||
# ==================================================================================================================== #
|
# ==================================================================================================================== #
|
||||||
# Copyright 2025-2025 The pyTooling Authors #
|
# Copyright 2025-2026 The pyTooling Authors #
|
||||||
# #
|
# #
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
# you may not use this file except in compliance with the License. #
|
# you may not use this file except in compliance with the License. #
|
||||||
@@ -53,7 +53,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: 📥 Download artifacts '${{ inputs.wheel }}' from 'Package' job
|
- name: 📥 Download artifacts '${{ inputs.wheel }}' from 'Package' job
|
||||||
uses: pyTooling/download-artifact@v5
|
uses: pyTooling/download-artifact@v7
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.wheel }}
|
name: ${{ inputs.wheel }}
|
||||||
path: install
|
path: install
|
||||||
@@ -70,7 +70,6 @@ jobs:
|
|||||||
python-markupsafe:p
|
python-markupsafe:p
|
||||||
python-pyaml:p python-types-pyyaml:p
|
python-pyaml:p python-types-pyyaml:p
|
||||||
python-ruamel-yaml:p python-ruamel.yaml.clib:p
|
python-ruamel-yaml:p python-ruamel.yaml.clib:p
|
||||||
python-tomli:p
|
|
||||||
|
|
||||||
- name: 🐍 Setup Python ${{ matrix.python }}
|
- name: 🐍 Setup Python ${{ matrix.python }}
|
||||||
uses: actions/setup-python@v6
|
uses: actions/setup-python@v6
|
||||||
|
|||||||
2
.github/workflows/IntermediateCleanUp.yml
vendored
2
.github/workflows/IntermediateCleanUp.yml
vendored
@@ -3,7 +3,7 @@
|
|||||||
# Patrick Lehmann #
|
# Patrick Lehmann #
|
||||||
# #
|
# #
|
||||||
# ==================================================================================================================== #
|
# ==================================================================================================================== #
|
||||||
# Copyright 2020-2025 The pyTooling Authors #
|
# Copyright 2020-2026 The pyTooling Authors #
|
||||||
# #
|
# #
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
# you may not use this file except in compliance with the License. #
|
# you may not use this file except in compliance with the License. #
|
||||||
|
|||||||
6
.github/workflows/LaTeXDocumentation.yml
vendored
6
.github/workflows/LaTeXDocumentation.yml
vendored
@@ -3,7 +3,7 @@
|
|||||||
# Patrick Lehmann #
|
# Patrick Lehmann #
|
||||||
# #
|
# #
|
||||||
# ==================================================================================================================== #
|
# ==================================================================================================================== #
|
||||||
# Copyright 2020-2025 The pyTooling Authors #
|
# Copyright 2020-2026 The pyTooling Authors #
|
||||||
# #
|
# #
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
# you may not use this file except in compliance with the License. #
|
# you may not use this file except in compliance with the License. #
|
||||||
@@ -60,7 +60,7 @@ jobs:
|
|||||||
continue-on-error: ${{ inputs.can-fail == 'true' }}
|
continue-on-error: ${{ inputs.can-fail == 'true' }}
|
||||||
steps:
|
steps:
|
||||||
- name: 📥 Download artifacts '${{ inputs.latex_artifact }}' from 'SphinxDocumentation' job
|
- name: 📥 Download artifacts '${{ inputs.latex_artifact }}' from 'SphinxDocumentation' job
|
||||||
uses: pyTooling/download-artifact@v5
|
uses: pyTooling/download-artifact@v7
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.latex_artifact }}
|
name: ${{ inputs.latex_artifact }}
|
||||||
path: latex
|
path: latex
|
||||||
@@ -83,7 +83,7 @@ jobs:
|
|||||||
latexmk -${{ inputs.processor }} "${{ inputs.document }}.tex"
|
latexmk -${{ inputs.processor }} "${{ inputs.document }}.tex"
|
||||||
|
|
||||||
- name: 📤 Upload 'PDF Documentation' artifact
|
- name: 📤 Upload 'PDF Documentation' artifact
|
||||||
uses: pyTooling/upload-artifact@v4
|
uses: pyTooling/upload-artifact@v6
|
||||||
if: inputs.pdf_artifact != ''
|
if: inputs.pdf_artifact != ''
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.pdf_artifact }}
|
name: ${{ inputs.pdf_artifact }}
|
||||||
|
|||||||
533
.github/workflows/NightlyRelease.yml
vendored
533
.github/workflows/NightlyRelease.yml
vendored
@@ -1,533 +0,0 @@
|
|||||||
# ==================================================================================================================== #
|
|
||||||
# Authors: #
|
|
||||||
# Patrick Lehmann #
|
|
||||||
# #
|
|
||||||
# ==================================================================================================================== #
|
|
||||||
# Copyright 2020-2025 The pyTooling Authors #
|
|
||||||
# #
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
|
||||||
# you may not use this file except in compliance with the License. #
|
|
||||||
# You may obtain a copy of the License at #
|
|
||||||
# #
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
|
||||||
# #
|
|
||||||
# Unless required by applicable law or agreed to in writing, software #
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
|
||||||
# See the License for the specific language governing permissions and #
|
|
||||||
# limitations under the License. #
|
|
||||||
# #
|
|
||||||
# SPDX-License-Identifier: Apache-2.0 #
|
|
||||||
# ==================================================================================================================== #
|
|
||||||
name: Nightly
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
ubuntu_image:
|
|
||||||
description: 'Name of the Ubuntu image.'
|
|
||||||
required: false
|
|
||||||
default: 'ubuntu-24.04'
|
|
||||||
type: string
|
|
||||||
nightly_name:
|
|
||||||
description: 'Name of the nightly release.'
|
|
||||||
required: false
|
|
||||||
default: 'nightly'
|
|
||||||
type: string
|
|
||||||
nightly_title:
|
|
||||||
description: 'Title of the nightly release.'
|
|
||||||
required: false
|
|
||||||
default: ''
|
|
||||||
type: string
|
|
||||||
nightly_description:
|
|
||||||
description: 'Description of the nightly release.'
|
|
||||||
required: false
|
|
||||||
default: 'Release of artifacts from latest CI pipeline.'
|
|
||||||
type: string
|
|
||||||
draft:
|
|
||||||
description: 'Specify if this is a draft.'
|
|
||||||
required: false
|
|
||||||
default: false
|
|
||||||
type: boolean
|
|
||||||
prerelease:
|
|
||||||
description: 'Specify if this is a pre-release.'
|
|
||||||
required: false
|
|
||||||
default: false
|
|
||||||
type: boolean
|
|
||||||
latest:
|
|
||||||
description: 'Specify if this is the latest release.'
|
|
||||||
required: false
|
|
||||||
default: false
|
|
||||||
type: boolean
|
|
||||||
replacements:
|
|
||||||
description: 'Multi-line string containing search=replace patterns.'
|
|
||||||
required: false
|
|
||||||
default: ''
|
|
||||||
type: string
|
|
||||||
assets:
|
|
||||||
description: 'Multi-line string containing artifact:file:title asset descriptions.'
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
inventory-json:
|
|
||||||
type: string
|
|
||||||
required: false
|
|
||||||
default: ''
|
|
||||||
inventory-version:
|
|
||||||
type: string
|
|
||||||
required: false
|
|
||||||
default: ''
|
|
||||||
inventory-categories:
|
|
||||||
type: string
|
|
||||||
required: false
|
|
||||||
default: ''
|
|
||||||
tarball-name:
|
|
||||||
type: string
|
|
||||||
required: false
|
|
||||||
default: '__pyTooling_upload_artifact__.tar'
|
|
||||||
can-fail:
|
|
||||||
type: boolean
|
|
||||||
required: false
|
|
||||||
default: false
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
Release:
|
|
||||||
name: 📝 Update 'Nightly Page' on GitHub
|
|
||||||
runs-on: ${{ inputs.ubuntu_image }}
|
|
||||||
continue-on-error: ${{ inputs.can-fail }}
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
actions: write
|
|
||||||
# attestations: write
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: ⚠️ Deprecation Warning
|
|
||||||
run: printf "::warning title=%s::%s\n" "NightlyRelease" "'NightlyRelease.yml' template is deprecated. Please switch to 'PublishReleaseNotes.yml'. See https://pytooling.github.io/Actions/JobTemplate/Release/PublishReleaseNotes.html"
|
|
||||||
|
|
||||||
- name: ⏬ Checkout repository
|
|
||||||
uses: actions/checkout@v5
|
|
||||||
with:
|
|
||||||
# The command 'git describe' (used for version) needs the history.
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: 🔧 Install zstd
|
|
||||||
run: sudo apt-get install -y --no-install-recommends zstd
|
|
||||||
|
|
||||||
- name: 📑 Delete (old) Release Page
|
|
||||||
id: deleteReleasePage
|
|
||||||
run: |
|
|
||||||
set +e
|
|
||||||
|
|
||||||
ANSI_LIGHT_RED=$'\x1b[91m'
|
|
||||||
ANSI_LIGHT_GREEN=$'\x1b[92m'
|
|
||||||
ANSI_LIGHT_YELLOW=$'\x1b[93m'
|
|
||||||
ANSI_NOCOLOR=$'\x1b[0m'
|
|
||||||
|
|
||||||
export GH_TOKEN=${{ github.token }}
|
|
||||||
|
|
||||||
printf "%s" "Deleting release '${{ inputs.nightly_name }}' ... "
|
|
||||||
message="$(gh release delete ${{ inputs.nightly_name }} --yes 2>&1)"
|
|
||||||
if [[ $? -eq 0 ]]; then
|
|
||||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
|
||||||
elif [[ "${message}" == "release not found" ]]; then
|
|
||||||
printf "%s\n" "${ANSI_LIGHT_YELLOW}[NOT FOUND]${ANSI_NOCOLOR}"
|
|
||||||
else
|
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
|
||||||
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't delete release '${{ inputs.nightly_name }}' -> Error: '${message}'.${ANSI_NOCOLOR}"
|
|
||||||
printf "::error title=%s::%s\n" "InternalError" "Couldn't delete release '${{ inputs.nightly_name }}' -> Error: '${message}'."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: 📑 (Re)create (new) Release Page
|
|
||||||
id: createReleasePage
|
|
||||||
run: |
|
|
||||||
set +e
|
|
||||||
|
|
||||||
ANSI_LIGHT_RED=$'\x1b[91m'
|
|
||||||
ANSI_LIGHT_GREEN=$'\x1b[92m'
|
|
||||||
ANSI_NOCOLOR=$'\x1b[0m'
|
|
||||||
|
|
||||||
export GH_TOKEN=${{ github.token }}
|
|
||||||
|
|
||||||
addDraft="--draft"
|
|
||||||
|
|
||||||
if [[ "${{ inputs.prerelease }}" == "true" ]]; then
|
|
||||||
addPreRelease="--prerelease"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ "${{ inputs.latest }}" == "false" ]]; then
|
|
||||||
addLatest="--latest=false"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ "${{ inputs.nightly_title }}" != "" ]]; then
|
|
||||||
addTitle=("--title" "${{ inputs.nightly_title }}")
|
|
||||||
fi
|
|
||||||
|
|
||||||
cat <<'EOF' > __NoTeS__.md
|
|
||||||
${{ inputs.nightly_description }}
|
|
||||||
EOF
|
|
||||||
if [[ -s __NoTeS__.md ]]; then
|
|
||||||
addNotes=("--notes-file" "__NoTeS__.md")
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Apply replacements
|
|
||||||
while IFS=$'\r\n' read -r patternLine; do
|
|
||||||
# skip empty lines
|
|
||||||
[[ "$patternLine" == "" ]] && continue
|
|
||||||
|
|
||||||
pattern="${patternLine%%=*}"
|
|
||||||
replacement="${patternLine#*=}"
|
|
||||||
sed -i -e "s/%$pattern%/$replacement/g" "__NoTeS__.md"
|
|
||||||
done <<<'${{ inputs.replacements }}'
|
|
||||||
|
|
||||||
# Add footer line
|
|
||||||
cat <<EOF >> __NoTeS__.md
|
|
||||||
|
|
||||||
--------
|
|
||||||
Published from [${{ github.workflow }}](https://github.com/Paebbels/ghdl/actions/runs/${{ github.run_id }}) workflow triggered by @${{ github.actor }} on $(date '+%Y-%m-%d %H:%M:%S %Z').
|
|
||||||
EOF
|
|
||||||
|
|
||||||
printf "%s\n" "Creating release '${{ inputs.nightly_name }}' ... "
|
|
||||||
message="$(gh release create "${{ inputs.nightly_name }}" --verify-tag $addDraft $addPreRelease $addLatest "${addTitle[@]}" "${addNotes[@]}" 2>&1)"
|
|
||||||
if [[ $? -eq 0 ]]; then
|
|
||||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
|
||||||
else
|
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
|
||||||
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't create release '${{ inputs.nightly_name }}' -> Error: '${message}'.${ANSI_NOCOLOR}"
|
|
||||||
printf "::error title=%s::%s\n" "InternalError" "Couldn't create release '${{ inputs.nightly_name }}' -> Error: '${message}'."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: 📥 Download artifacts and upload as assets
|
|
||||||
id: uploadAssets
|
|
||||||
run: |
|
|
||||||
set +e
|
|
||||||
|
|
||||||
ANSI_LIGHT_RED=$'\x1b[91m'
|
|
||||||
ANSI_LIGHT_GREEN=$'\x1b[92m'
|
|
||||||
ANSI_LIGHT_YELLOW=$'\x1b[93m'
|
|
||||||
ANSI_LIGHT_BLUE=$'\x1b[94m'
|
|
||||||
ANSI_NOCOLOR=$'\x1b[0m'
|
|
||||||
|
|
||||||
export GH_TOKEN=${{ github.token }}
|
|
||||||
|
|
||||||
Replace() {
|
|
||||||
line="$1"
|
|
||||||
while IFS=$'\r\n' read -r patternLine; do
|
|
||||||
# skip empty lines
|
|
||||||
[[ "$patternLine" == "" ]] && continue
|
|
||||||
|
|
||||||
pattern="${patternLine%%=*}"
|
|
||||||
replacement="${patternLine#*=}"
|
|
||||||
line="${line//"%$pattern%"/"$replacement"}"
|
|
||||||
done <<<'${{ inputs.replacements }}'
|
|
||||||
printf "%s\n" "$line"
|
|
||||||
}
|
|
||||||
|
|
||||||
# Create JSON inventory
|
|
||||||
if [[ "${{ inputs.inventory-json }}" != "" ]]; then
|
|
||||||
VERSION="1.0"
|
|
||||||
|
|
||||||
# Split categories by ',' into a Bash array.
|
|
||||||
# See https://stackoverflow.com/a/45201229/3719459
|
|
||||||
if [[ "${{ inputs.inventory-categories }}" != "" ]]; then
|
|
||||||
readarray -td, inventoryCategories <<<"${{ inputs.inventory-categories }},"
|
|
||||||
unset 'inventoryCategories[-1]'
|
|
||||||
declare -p inventoryCategories
|
|
||||||
else
|
|
||||||
inventoryCategories=""
|
|
||||||
fi
|
|
||||||
|
|
||||||
jsonInventory=$(jq -c -n \
|
|
||||||
--arg version "${VERSION}" \
|
|
||||||
--arg date "$(date +"%Y-%m-%dT%H-%M-%S%:z")" \
|
|
||||||
--argjson jsonMeta "$(jq -c -n \
|
|
||||||
--arg tag "${{ inputs.nightly_name }}" \
|
|
||||||
--arg version "${{ inputs.inventory-version }}" \
|
|
||||||
--arg hash "${{ github.sha }}" \
|
|
||||||
--arg repo "${{ github.server_url }}/${{ github.repository }}" \
|
|
||||||
--arg release "${{ github.server_url }}/${{ github.repository }}/releases/download/${{ inputs.nightly_name }}" \
|
|
||||||
--argjson categories "$(jq -c -n \
|
|
||||||
'$ARGS.positional' \
|
|
||||||
--args "${inventoryCategories[@]}" \
|
|
||||||
)" \
|
|
||||||
'{"tag": $tag, "version": $version, "git-hash": $hash, "repository-url": $repo, "release-url": $release, "categories": $categories}' \
|
|
||||||
)" \
|
|
||||||
'{"version": 1.0, "timestamp": $date, "meta": $jsonMeta, "files": {}}'
|
|
||||||
)
|
|
||||||
fi
|
|
||||||
|
|
||||||
ERRORS=0
|
|
||||||
# A dictionary of 0/1 to avoid duplicate downloads
|
|
||||||
declare -A downloadedArtifacts
|
|
||||||
# A dictionary to check for duplicate asset files in release
|
|
||||||
declare -A assetFilenames
|
|
||||||
while IFS=$'\r\n' read -r assetLine; do
|
|
||||||
if [[ "${assetLine}" == "" || "${assetLine:0:1}" == "#" ]]; then
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
|
|
||||||
# split assetLine colon separated triple: artifact:asset:title
|
|
||||||
artifact="${assetLine%%:*}"
|
|
||||||
assetLine="${assetLine#*:}"
|
|
||||||
asset="${assetLine%%:*}"
|
|
||||||
assetLine="${assetLine#*:}"
|
|
||||||
if [[ "${{ inputs.inventory-json }}" == "" ]]; then
|
|
||||||
categories=""
|
|
||||||
title="${assetLine##*:}"
|
|
||||||
else
|
|
||||||
categories="${assetLine%%:*}"
|
|
||||||
title="${assetLine##*:}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# remove leading whitespace
|
|
||||||
asset="${asset#"${asset%%[![:space:]]*}"}"
|
|
||||||
categories="${categories#"${categories%%[![:space:]]*}"}"
|
|
||||||
title="${title#"${title%%[![:space:]]*}"}"
|
|
||||||
|
|
||||||
# apply replacements
|
|
||||||
asset="$(Replace "${asset}")"
|
|
||||||
title="$(Replace "${title}")"
|
|
||||||
|
|
||||||
printf "%s\n" "Publish asset '${asset}' from artifact '${artifact}' with title '${title}'"
|
|
||||||
printf " %s" "Checked asset for duplicates ... "
|
|
||||||
if [[ -n "${assetFilenames[$asset]}" ]]; then
|
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
|
||||||
printf "::error title=%s::%s\n" "DuplicateAsset" "Asset '${asset}' from artifact '${artifact}' was already uploaded to release '${{ inputs.nightly_name }}'."
|
|
||||||
ERRORS=$((ERRORS + 1))
|
|
||||||
continue
|
|
||||||
else
|
|
||||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
|
||||||
assetFilenames[$asset]=1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Download artifact by artifact name
|
|
||||||
if [[ -n "${downloadedArtifacts[$artifact]}" ]]; then
|
|
||||||
printf " %s\n" "downloading '${artifact}' ... ${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}"
|
|
||||||
else
|
|
||||||
printf " downloading '${artifact}' ...\n"
|
|
||||||
printf " %s" "gh run download $GITHUB_RUN_ID --dir \"${artifact}\" --name \"${artifact}\" "
|
|
||||||
gh run download $GITHUB_RUN_ID --dir "${artifact}" --name "${artifact}"
|
|
||||||
if [[ $? -eq 0 ]]; then
|
|
||||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
|
||||||
else
|
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
|
||||||
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't download artifact '${artifact}'.${ANSI_NOCOLOR}"
|
|
||||||
printf "::error title=%s::%s\n" "ArtifactNotFound" "Couldn't download artifact '${artifact}'."
|
|
||||||
ERRORS=$((ERRORS + 1))
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
downloadedArtifacts[$artifact]=1
|
|
||||||
|
|
||||||
printf " %s" "Checking for embedded tarball ... "
|
|
||||||
if [[ -f "${artifact}/${{ inputs.tarball-name }}" ]]; then
|
|
||||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[FOUND]${ANSI_NOCOLOR}"
|
|
||||||
|
|
||||||
pushd "${artifact}" > /dev/null
|
|
||||||
|
|
||||||
printf " %s" "Extracting embedded tarball ... "
|
|
||||||
tar -xf "${{ inputs.tarball-name }}"
|
|
||||||
if [[ $? -ne 0 ]]; then
|
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
|
||||||
else
|
|
||||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
printf " %s" "Removing temporary tarball ... "
|
|
||||||
rm -f "${{ inputs.tarball-name }}"
|
|
||||||
if [[ $? -ne 0 ]]; then
|
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
|
||||||
else
|
|
||||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
popd > /dev/null
|
|
||||||
else
|
|
||||||
printf "%s\n" "${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Check if artifact should be compressed (zip, tgz) or if asset was part of the downloaded artifact.
|
|
||||||
printf " %s" "checking asset '${artifact}/${asset}' ... "
|
|
||||||
if [[ "${asset}" == !*.zip ]]; then
|
|
||||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[ZIP]${ANSI_NOCOLOR}"
|
|
||||||
asset="${asset##*!}"
|
|
||||||
printf "::group:: %s\n" "Compressing artifact '${artifact}' to '${asset}' ..."
|
|
||||||
(
|
|
||||||
cd "${artifact}" && \
|
|
||||||
zip -r "../${asset}" *
|
|
||||||
)
|
|
||||||
retCode=$?
|
|
||||||
printf "::endgroup::\n"
|
|
||||||
if [[ $retCode -eq 0 ]]; then
|
|
||||||
printf " %s\n" "Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
|
||||||
uploadFile="${asset}"
|
|
||||||
else
|
|
||||||
printf " %s\n" "Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
|
||||||
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zip file '${asset}'.${ANSI_NOCOLOR}"
|
|
||||||
printf "::error title=%s::%s\n" "CompressionError" "Couldn't compress '${artifact}' to zip file '${asset}'."
|
|
||||||
ERRORS=$((ERRORS + 1))
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
elif [[ "${asset}" == !*.tgz || "${asset}" == !*.tar.gz || "${asset}" == \$*.tgz || "${asset}" == \$*.tar.gz ]]; then
|
|
||||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[TAR/GZ]${ANSI_NOCOLOR}"
|
|
||||||
|
|
||||||
if [[ "${asset:0:1}" == "\$" ]]; then
|
|
||||||
asset="${asset##*$}"
|
|
||||||
dirName="${asset%.*}"
|
|
||||||
printf " %s\n" "Compressing artifact '${artifact}' to '${asset}' ..."
|
|
||||||
tar -c --gzip --owner=0 --group=0 --file="${asset}" --directory="${artifact}" --transform "s|^\.|${dirName%.tar}|" .
|
|
||||||
retCode=$?
|
|
||||||
else
|
|
||||||
asset="${asset##*!}"
|
|
||||||
printf " %s\n" "Compressing artifact '${artifact}' to '${asset}' ..."
|
|
||||||
(
|
|
||||||
cd "${artifact}" && \
|
|
||||||
tar -c --gzip --owner=0 --group=0 --file="../${asset}" *
|
|
||||||
)
|
|
||||||
retCode=$?
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ $retCode -eq 0 ]]; then
|
|
||||||
printf " %s\n" "Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
|
||||||
uploadFile="${asset}"
|
|
||||||
else
|
|
||||||
printf " %s\n" "Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
|
||||||
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to tgz file '${asset}'.${ANSI_NOCOLOR}"
|
|
||||||
printf "::error title=%s::%s\n" "CompressionError" "Couldn't compress '${artifact}' to tgz file '${asset}'."
|
|
||||||
ERRORS=$((ERRORS + 1))
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
elif [[ "${asset}" == !*.tzst || "${asset}" == !*.tar.zst || "${asset}" == \$*.tzst || "${asset}" == \$*.tar.zst ]]; then
|
|
||||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[ZST]${ANSI_NOCOLOR}"
|
|
||||||
|
|
||||||
if [[ "${asset:0:1}" == "\$" ]]; then
|
|
||||||
asset="${asset##*$}"
|
|
||||||
dirName="${asset%.*}"
|
|
||||||
printf " %s\n" "Compressing artifact '${artifact}' to '${asset}' ..."
|
|
||||||
tar -c --zstd --owner=0 --group=0 --file="${asset}" --directory="${artifact}" --transform "s|^\.|${dirName%.tar}|" .
|
|
||||||
retCode=$?
|
|
||||||
else
|
|
||||||
asset="${asset##*!}"
|
|
||||||
printf " %s\n" "Compressing artifact '${artifact}' to '${asset}' ..."
|
|
||||||
(
|
|
||||||
cd "${artifact}" && \
|
|
||||||
tar -c --zstd --owner=0 --group=0 --file="../${asset}" *
|
|
||||||
)
|
|
||||||
retCode=$?
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ $retCode -eq 0 ]]; then
|
|
||||||
printf " %s\n" "Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
|
||||||
uploadFile="${asset}"
|
|
||||||
else
|
|
||||||
printf " %s\n" "Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
|
||||||
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zst file '${asset}'.${ANSI_NOCOLOR}"
|
|
||||||
printf "::error title=%s::%s\n" "CompressionError" "Couldn't compress '${artifact}' to zst file '${asset}'."
|
|
||||||
ERRORS=$((ERRORS + 1))
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
elif [[ -e "${artifact}/${asset}" ]]; then
|
|
||||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
|
||||||
uploadFile="${artifact}/${asset}"
|
|
||||||
else
|
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
|
||||||
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't find asset '${asset}' in artifact '${artifact}'.${ANSI_NOCOLOR}"
|
|
||||||
printf "::error title=%s::%s\n" "FileNotFound" "Couldn't find asset '${asset}' in artifact '${artifact}'."
|
|
||||||
ERRORS=$((ERRORS + 1))
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Add asset to JSON inventory
|
|
||||||
if [[ "${{ inputs.inventory-json }}" != "" ]]; then
|
|
||||||
if [[ "${categories}" != "${title}" ]]; then
|
|
||||||
printf " %s\n" "adding file '${uploadFile#*/}' with '${categories//;/ → }' to JSON inventory ..."
|
|
||||||
category=""
|
|
||||||
jsonEntry=$(jq -c -n \
|
|
||||||
--arg title "${title}" \
|
|
||||||
--arg file "${uploadFile#*/}" \
|
|
||||||
'{"file": $file, "title": $title}' \
|
|
||||||
)
|
|
||||||
|
|
||||||
while [[ "${categories}" != "${category}" ]]; do
|
|
||||||
category="${categories##*,}"
|
|
||||||
categories="${categories%,*}"
|
|
||||||
jsonEntry=$(jq -c -n --arg cat "${category}" --argjson value "${jsonEntry}" '{$cat: $value}')
|
|
||||||
done
|
|
||||||
|
|
||||||
jsonInventory=$(jq -c -n \
|
|
||||||
--argjson inventory "${jsonInventory}" \
|
|
||||||
--argjson file "${jsonEntry}" \
|
|
||||||
'$inventory * {"files": $file}' \
|
|
||||||
)
|
|
||||||
else
|
|
||||||
printf " %s\n" "adding file '${uploadFile#*/}' to JSON inventory ... ${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Upload asset to existing release page
|
|
||||||
printf " %s" "uploading asset '${asset}' from '${uploadFile}' with title '${title}' ... "
|
|
||||||
gh release upload ${{ inputs.nightly_name }} "${uploadFile}#${title}" --clobber
|
|
||||||
if [[ $? -eq 0 ]]; then
|
|
||||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
|
||||||
else
|
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
|
||||||
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
|
|
||||||
printf "::error title=%s::%s\n" "UploadError" "Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'."
|
|
||||||
ERRORS=$((ERRORS + 1))
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
done <<<'${{ inputs.assets }}'
|
|
||||||
|
|
||||||
if [[ "${{ inputs.inventory-json }}" != "" ]]; then
|
|
||||||
inventoryTitle="Release Inventory (JSON)"
|
|
||||||
|
|
||||||
printf "%s\n" "Publish asset '${{ inputs.inventory-json }}' with title '${inventoryTitle}'"
|
|
||||||
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Writing JSON inventory to '${{ inputs.inventory-json }}' ...."
|
|
||||||
printf "%s\n" "$(jq -n --argjson inventory "${jsonInventory}" '$inventory')" > "${{ inputs.inventory-json }}"
|
|
||||||
cat "${{ inputs.inventory-json }}"
|
|
||||||
printf "::endgroup::\n"
|
|
||||||
|
|
||||||
# Upload inventory asset to existing release page
|
|
||||||
printf " %s" "uploading asset '${{ inputs.inventory-json }}' title '${inventoryTitle}' ... "
|
|
||||||
gh release upload ${{ inputs.nightly_name }} "${{ inputs.inventory-json }}#${inventoryTitle}" --clobber
|
|
||||||
if [[ $? -eq 0 ]]; then
|
|
||||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
|
||||||
else
|
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
|
||||||
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't upload asset '${{ inputs.inventory-json }}' to release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
|
|
||||||
printf "::error title=%s::%s\n" "UploadError" "Couldn't upload asset '${{ inputs.inventory-json }}' to release '${{ inputs.nightly_name }}'."
|
|
||||||
ERRORS=$((ERRORS + 1))
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Inspecting downloaded artifacts ..."
|
|
||||||
tree -pash -L 3 .
|
|
||||||
printf "::endgroup::\n"
|
|
||||||
|
|
||||||
if [[ $ERRORS -ne 0 ]]; then
|
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}${ERRORS} errors detected in previous steps.${ANSI_NOCOLOR}"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: 📑 Remove draft state from Release Page
|
|
||||||
if: ${{ ! inputs.draft }}
|
|
||||||
run: |
|
|
||||||
set +e
|
|
||||||
|
|
||||||
ANSI_LIGHT_RED=$'\x1b[91m'
|
|
||||||
ANSI_LIGHT_GREEN=$'\x1b[92m'
|
|
||||||
ANSI_NOCOLOR=$'\x1b[0m'
|
|
||||||
|
|
||||||
export GH_TOKEN=${{ github.token }}
|
|
||||||
|
|
||||||
# Remove draft-state from release page
|
|
||||||
printf "%s" "Remove draft-state from release '${title}' ... "
|
|
||||||
gh release edit --draft=false "${{ inputs.nightly_name }}"
|
|
||||||
if [[ $? -eq 0 ]]; then
|
|
||||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
|
||||||
else
|
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
|
||||||
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't remove draft-state from release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
|
|
||||||
printf "::error title=%s::%s\n" "ReleasePage" "Couldn't remove draft-state from release '${{ inputs.nightly_name }}'."
|
|
||||||
fi
|
|
||||||
8
.github/workflows/Package.yml
vendored
8
.github/workflows/Package.yml
vendored
@@ -4,7 +4,7 @@
|
|||||||
# Unai Martinez-Corral #
|
# Unai Martinez-Corral #
|
||||||
# #
|
# #
|
||||||
# ==================================================================================================================== #
|
# ==================================================================================================================== #
|
||||||
# Copyright 2020-2025 The pyTooling Authors #
|
# Copyright 2020-2026 The pyTooling Authors #
|
||||||
# #
|
# #
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
# you may not use this file except in compliance with the License. #
|
# you may not use this file except in compliance with the License. #
|
||||||
@@ -33,7 +33,7 @@ on:
|
|||||||
python_version:
|
python_version:
|
||||||
description: 'Python version.'
|
description: 'Python version.'
|
||||||
required: false
|
required: false
|
||||||
default: '3.13'
|
default: '3.14'
|
||||||
type: string
|
type: string
|
||||||
requirements:
|
requirements:
|
||||||
description: 'Python dependencies to be installed through pip; if empty, use pyproject.toml through build.'
|
description: 'Python dependencies to be installed through pip; if empty, use pyproject.toml through build.'
|
||||||
@@ -53,7 +53,7 @@ jobs:
|
|||||||
artifact: ${{ inputs.artifact }}
|
artifact: ${{ inputs.artifact }}
|
||||||
steps:
|
steps:
|
||||||
- name: ⏬ Checkout repository
|
- name: ⏬ Checkout repository
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
lfs: true
|
lfs: true
|
||||||
submodules: true
|
submodules: true
|
||||||
@@ -106,7 +106,7 @@ jobs:
|
|||||||
run: python setup.py bdist_wheel
|
run: python setup.py bdist_wheel
|
||||||
|
|
||||||
- name: 📤 Upload wheel artifact
|
- name: 📤 Upload wheel artifact
|
||||||
uses: pyTooling/upload-artifact@v4
|
uses: pyTooling/upload-artifact@v6
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.artifact }}
|
name: ${{ inputs.artifact }}
|
||||||
working-directory: dist
|
working-directory: dist
|
||||||
|
|||||||
106
.github/workflows/Parameters.yml
vendored
106
.github/workflows/Parameters.yml
vendored
@@ -4,7 +4,7 @@
|
|||||||
# Unai Martinez-Corral #
|
# Unai Martinez-Corral #
|
||||||
# #
|
# #
|
||||||
# ==================================================================================================================== #
|
# ==================================================================================================================== #
|
||||||
# Copyright 2020-2025 The pyTooling Authors #
|
# Copyright 2020-2026 The pyTooling Authors #
|
||||||
# #
|
# #
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
# you may not use this file except in compliance with the License. #
|
# you may not use this file except in compliance with the License. #
|
||||||
@@ -45,15 +45,20 @@ on:
|
|||||||
required: false
|
required: false
|
||||||
default: ''
|
default: ''
|
||||||
type: string
|
type: string
|
||||||
|
version_file:
|
||||||
|
description: "Path to module containing the version ('__version__' variable)."
|
||||||
|
required: false
|
||||||
|
default: '__init__.py'
|
||||||
|
type: string
|
||||||
python_version:
|
python_version:
|
||||||
description: 'Python version.'
|
description: 'Python version.'
|
||||||
required: false
|
required: false
|
||||||
default: '3.13'
|
default: '3.14'
|
||||||
type: string
|
type: string
|
||||||
python_version_list:
|
python_version_list:
|
||||||
description: 'Space separated list of Python versions to run tests with.'
|
description: 'Space separated list of Python versions to run tests with.'
|
||||||
required: false
|
required: false
|
||||||
default: '3.9 3.10 3.11 3.12 3.13'
|
default: '3.10 3.11 3.12 3.13 3.14'
|
||||||
type: string
|
type: string
|
||||||
system_list:
|
system_list:
|
||||||
description: 'Space separated list of systems to run tests on.'
|
description: 'Space separated list of systems to run tests on.'
|
||||||
@@ -98,7 +103,7 @@ on:
|
|||||||
macos_intel_image:
|
macos_intel_image:
|
||||||
description: 'The used GitHub Action image for macOS (Intel x86-64) based jobs.'
|
description: 'The used GitHub Action image for macOS (Intel x86-64) based jobs.'
|
||||||
required: false
|
required: false
|
||||||
default: 'macos-13'
|
default: 'macos-15-intel'
|
||||||
type: string
|
type: string
|
||||||
macos_arm_image:
|
macos_arm_image:
|
||||||
description: 'The used GitHub Action image for macOS (ARM aarch64) based jobs.'
|
description: 'The used GitHub Action image for macOS (ARM aarch64) based jobs.'
|
||||||
@@ -121,6 +126,9 @@ on:
|
|||||||
package_directory:
|
package_directory:
|
||||||
description: "The package's directory."
|
description: "The package's directory."
|
||||||
value: ${{ jobs.Parameters.outputs.package_directory }}
|
value: ${{ jobs.Parameters.outputs.package_directory }}
|
||||||
|
package_version_file:
|
||||||
|
description: "Path to the package's module containing the version ('__version__' variable)."
|
||||||
|
value: ${{ jobs.Parameters.outputs.package_version_file }}
|
||||||
artifact_basename:
|
artifact_basename:
|
||||||
description: "Artifact base name."
|
description: "Artifact base name."
|
||||||
value: ${{ jobs.Parameters.outputs.artifact_basename }}
|
value: ${{ jobs.Parameters.outputs.artifact_basename }}
|
||||||
@@ -136,14 +144,21 @@ jobs:
|
|||||||
name: ✎ Generate pipeline parameters
|
name: ✎ Generate pipeline parameters
|
||||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||||
outputs:
|
outputs:
|
||||||
python_version: ${{ steps.variables.outputs.python_version }}
|
python_version: ${{ steps.variables.outputs.python_version }}
|
||||||
package_fullname: ${{ steps.variables.outputs.package_fullname }}
|
package_fullname: ${{ steps.variables.outputs.package_fullname }}
|
||||||
package_directory: ${{ steps.variables.outputs.package_directory }}
|
package_directory: ${{ steps.variables.outputs.package_directory }}
|
||||||
artifact_basename: ${{ steps.variables.outputs.artifact_basename }}
|
package_version_file: ${{ steps.variables.outputs.package_version_file }}
|
||||||
artifact_names: ${{ steps.artifacts.outputs.artifact_names }}
|
artifact_basename: ${{ steps.variables.outputs.artifact_basename }}
|
||||||
python_jobs: ${{ steps.jobs.outputs.python_jobs }}
|
artifact_names: ${{ steps.artifacts.outputs.artifact_names }}
|
||||||
|
python_jobs: ${{ steps.jobs.outputs.python_jobs }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
|
- name: ⏬ Checkout repository
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
with:
|
||||||
|
# The command 'git describe' (used for version) needs the history.
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Generate a startup delay of ${{ inputs.pipeline-delay }} seconds
|
- name: Generate a startup delay of ${{ inputs.pipeline-delay }} seconds
|
||||||
id: delay
|
id: delay
|
||||||
if: inputs.pipeline-delay >= 0
|
if: inputs.pipeline-delay >= 0
|
||||||
@@ -162,9 +177,10 @@ jobs:
|
|||||||
python_version = "${{ inputs.python_version }}".strip()
|
python_version = "${{ inputs.python_version }}".strip()
|
||||||
package_namespace = "${{ inputs.package_namespace }}".strip()
|
package_namespace = "${{ inputs.package_namespace }}".strip()
|
||||||
package_name = "${{ inputs.package_name }}".strip()
|
package_name = "${{ inputs.package_name }}".strip()
|
||||||
|
version_file = "${{ inputs.version_file }}".strip()
|
||||||
name = "${{ inputs.name }}".strip()
|
name = "${{ inputs.name }}".strip()
|
||||||
|
|
||||||
if package_namespace == "": # or package_namespace == ".":
|
if package_namespace == "":
|
||||||
package_fullname = package_name
|
package_fullname = package_name
|
||||||
package_directory = package_name
|
package_directory = package_name
|
||||||
elif package_namespace[-2:] == ".*":
|
elif package_namespace[-2:] == ".*":
|
||||||
@@ -174,16 +190,28 @@ jobs:
|
|||||||
package_fullname = f"{package_namespace}.{package_name}"
|
package_fullname = f"{package_namespace}.{package_name}"
|
||||||
package_directory = f"{package_namespace}/{package_name}"
|
package_directory = f"{package_namespace}/{package_name}"
|
||||||
|
|
||||||
|
packageDirectory = Path(package_directory)
|
||||||
|
packageVersionFile = packageDirectory / version_file
|
||||||
|
print(f"Check if package version file '{packageVersionFile}' exists ... ", end="")
|
||||||
|
if packageVersionFile.exists():
|
||||||
|
print("✅")
|
||||||
|
package_version_file = packageVersionFile.as_posix()
|
||||||
|
else:
|
||||||
|
print("❌")
|
||||||
|
package_version_file = ""
|
||||||
|
print(f"::warning title=Parameters::Version file '{packageVersionFile}' not found.")
|
||||||
|
|
||||||
artifact_basename = package_fullname if name == "" else name
|
artifact_basename = package_fullname if name == "" else name
|
||||||
if artifact_basename == "" or artifact_basename == ".":
|
if artifact_basename == "" or artifact_basename == ".":
|
||||||
print("::error title=Parameter::artifact_basename is empty.")
|
print("::error title=Parameters::artifact_basename is empty.")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
print("Variables:")
|
print("Variables:")
|
||||||
print(f" python_version: {python_version}")
|
print(f" python_version: {python_version}")
|
||||||
print(f" package_fullname: {package_fullname}")
|
print(f" package_fullname: {package_fullname}")
|
||||||
print(f" package_directory: {package_directory}")
|
print(f" package_directory: {package_directory}")
|
||||||
print(f" artifact_basename: {artifact_basename}")
|
print(f" package_version_file: {package_directory}")
|
||||||
|
print(f" artifact_basename: {artifact_basename}")
|
||||||
|
|
||||||
# Write jobs to special file
|
# Write jobs to special file
|
||||||
github_output = Path(getenv("GITHUB_OUTPUT"))
|
github_output = Path(getenv("GITHUB_OUTPUT"))
|
||||||
@@ -193,6 +221,7 @@ jobs:
|
|||||||
python_version={python_version}
|
python_version={python_version}
|
||||||
package_fullname={package_fullname}
|
package_fullname={package_fullname}
|
||||||
package_directory={package_directory}
|
package_directory={package_directory}
|
||||||
|
package_version_file={package_version_file}
|
||||||
artifact_basename={artifact_basename}
|
artifact_basename={artifact_basename}
|
||||||
"""))
|
"""))
|
||||||
|
|
||||||
@@ -258,12 +287,12 @@ jobs:
|
|||||||
exclude_list = "${{ inputs.exclude_list }}".strip()
|
exclude_list = "${{ inputs.exclude_list }}".strip()
|
||||||
disable_list = "${{ inputs.disable_list }}".strip()
|
disable_list = "${{ inputs.disable_list }}".strip()
|
||||||
|
|
||||||
currentMSYS2Version = "3.12"
|
currentMSYS2Version = "3.13"
|
||||||
currentAlphaVersion = "3.14"
|
currentAlphaVersion = "3.15"
|
||||||
currentAlphaRelease = "3.14.0-rc.2"
|
currentAlphaRelease = "3.15.0-a.4"
|
||||||
|
|
||||||
if systems == "":
|
if systems == "":
|
||||||
print("::error title=Parameter::system_list is empty.")
|
print("::error title=Parameters::system_list is empty.")
|
||||||
else:
|
else:
|
||||||
systems = [sys.strip() for sys in systems.split(" ")]
|
systems = [sys.strip() for sys in systems.split(" ")]
|
||||||
|
|
||||||
@@ -287,8 +316,8 @@ jobs:
|
|||||||
else:
|
else:
|
||||||
disabled = [disable.strip() for disable in disable_list.split(" ")]
|
disabled = [disable.strip() for disable in disable_list.split(" ")]
|
||||||
|
|
||||||
if "3.8" in versions:
|
if "3.9" in versions:
|
||||||
print("::warning title=Deprecated::Support for Python 3.8 ended in 2024.10.")
|
print("::warning title=Deprecated::Support for Python 3.9 ended in 2025.10.")
|
||||||
if "msys2" in systems:
|
if "msys2" in systems:
|
||||||
print("::warning title=Deprecated::System 'msys2' will be replaced by 'mingw64'.")
|
print("::warning title=Deprecated::System 'msys2' will be replaced by 'mingw64'.")
|
||||||
if currentAlphaVersion in versions:
|
if currentAlphaVersion in versions:
|
||||||
@@ -300,15 +329,13 @@ jobs:
|
|||||||
data = {
|
data = {
|
||||||
# Python and PyPy versions supported by "setup-python" action
|
# Python and PyPy versions supported by "setup-python" action
|
||||||
"python": {
|
"python": {
|
||||||
"3.8": { "icon": "⚫", "until": "2024.10" },
|
"3.9": { "icon": "⚫", "until": "2025.10" },
|
||||||
"3.9": { "icon": "🔴", "until": "2025.10" },
|
"3.10": { "icon": "🔴", "until": "2026.10" },
|
||||||
"3.10": { "icon": "🟠", "until": "2026.10" },
|
"3.11": { "icon": "🟠", "until": "2027.10" },
|
||||||
"3.11": { "icon": "🟡", "until": "2027.10" },
|
"3.12": { "icon": "🟡", "until": "2028.10" },
|
||||||
"3.12": { "icon": "🟢", "until": "2028.10" },
|
|
||||||
"3.13": { "icon": "🟢", "until": "2029.10" },
|
"3.13": { "icon": "🟢", "until": "2029.10" },
|
||||||
"3.14": { "icon": "🟣", "until": "2030.10" },
|
"3.14": { "icon": "🟢", "until": "2030.10" },
|
||||||
"pypy-3.7": { "icon": "⟲⚫", "until": "????.??" },
|
"3.15": { "icon": "🟣", "until": "2031.10" },
|
||||||
"pypy-3.8": { "icon": "⟲⚫", "until": "????.??" },
|
|
||||||
"pypy-3.9": { "icon": "⟲🔴", "until": "????.??" },
|
"pypy-3.9": { "icon": "⟲🔴", "until": "????.??" },
|
||||||
"pypy-3.10": { "icon": "⟲🟠", "until": "????.??" },
|
"pypy-3.10": { "icon": "⟲🟠", "until": "????.??" },
|
||||||
"pypy-3.11": { "icon": "⟲🟡", "until": "????.??" },
|
"pypy-3.11": { "icon": "⟲🟡", "until": "????.??" },
|
||||||
@@ -430,12 +457,13 @@ jobs:
|
|||||||
- name: Verify out parameters
|
- name: Verify out parameters
|
||||||
id: verify
|
id: verify
|
||||||
run: |
|
run: |
|
||||||
printf "python_version: %s\n" '${{ steps.variables.outputs.python_version }}'
|
printf "python_version: %s\n" '${{ steps.variables.outputs.python_version }}'
|
||||||
printf "package_fullname: %s\n" '${{ steps.variables.outputs.package_fullname }}'
|
printf "package_fullname: %s\n" '${{ steps.variables.outputs.package_fullname }}'
|
||||||
printf "package_directory: %s\n" '${{ steps.variables.outputs.package_directory }}'
|
printf "package_directory: %s\n" '${{ steps.variables.outputs.package_directory }}'
|
||||||
printf "artifact_basename: %s\n" '${{ steps.variables.outputs.artifact_basename }}'
|
printf "package_version_file: %s\n" '${{ steps.variables.outputs.package_version_file }}'
|
||||||
printf "====================\n"
|
printf "artifact_basename: %s\n" '${{ steps.variables.outputs.artifact_basename }}'
|
||||||
printf "artifact_names: %s\n" '${{ steps.artifacts.outputs.artifact_names }}'
|
printf "================================================================================\n"
|
||||||
printf "====================\n"
|
printf "artifact_names: %s\n" '${{ steps.artifacts.outputs.artifact_names }}'
|
||||||
printf "python_jobs: %s\n" '${{ steps.jobs.outputs.python_jobs }}'
|
printf "================================================================================\n"
|
||||||
printf "====================\n"
|
printf "python_jobs: %s\n" '${{ steps.jobs.outputs.python_jobs }}'
|
||||||
|
printf "================================================================================\n"
|
||||||
|
|||||||
294
.github/workflows/PrepareJob.yml
vendored
294
.github/workflows/PrepareJob.yml
vendored
@@ -1,3 +1,24 @@
|
|||||||
|
# ==================================================================================================================== #
|
||||||
|
# Authors: #
|
||||||
|
# Patrick Lehmann #
|
||||||
|
# #
|
||||||
|
# ==================================================================================================================== #
|
||||||
|
# Copyright 2025-2026 The pyTooling Authors #
|
||||||
|
# #
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
|
# you may not use this file except in compliance with the License. #
|
||||||
|
# You may obtain a copy of the License at #
|
||||||
|
# #
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||||
|
# #
|
||||||
|
# Unless required by applicable law or agreed to in writing, software #
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||||
|
# See the License for the specific language governing permissions and #
|
||||||
|
# limitations under the License. #
|
||||||
|
# #
|
||||||
|
# SPDX-License-Identifier: Apache-2.0 #
|
||||||
|
# ==================================================================================================================== #
|
||||||
name: Prepare Variables
|
name: Prepare Variables
|
||||||
|
|
||||||
on:
|
on:
|
||||||
@@ -35,15 +56,18 @@ on:
|
|||||||
type: string
|
type: string
|
||||||
|
|
||||||
outputs:
|
outputs:
|
||||||
|
on_default_branch:
|
||||||
|
description: ""
|
||||||
|
value: ${{ jobs.Prepare.outputs.on_default_branch }}
|
||||||
on_main_branch:
|
on_main_branch:
|
||||||
description: ""
|
description: ""
|
||||||
value: ${{ jobs.Prepare.outputs.on_main_branch }}
|
value: ${{ jobs.Prepare.outputs.on_main_branch }}
|
||||||
on_dev_branch:
|
|
||||||
description: ""
|
|
||||||
value: ${{ jobs.Prepare.outputs.on_dev_branch }}
|
|
||||||
on_release_branch:
|
on_release_branch:
|
||||||
description: ""
|
description: ""
|
||||||
value: ${{ jobs.Prepare.outputs.on_release_branch }}
|
value: ${{ jobs.Prepare.outputs.on_release_branch }}
|
||||||
|
on_dev_branch:
|
||||||
|
description: ""
|
||||||
|
value: ${{ jobs.Prepare.outputs.on_dev_branch }}
|
||||||
is_regular_commit:
|
is_regular_commit:
|
||||||
description: ""
|
description: ""
|
||||||
value: ${{ jobs.Prepare.outputs.is_regular_commit }}
|
value: ${{ jobs.Prepare.outputs.is_regular_commit }}
|
||||||
@@ -59,9 +83,15 @@ on:
|
|||||||
is_release_tag:
|
is_release_tag:
|
||||||
description: ""
|
description: ""
|
||||||
value: ${{ jobs.Prepare.outputs.is_release_tag }}
|
value: ${{ jobs.Prepare.outputs.is_release_tag }}
|
||||||
|
has_submodules:
|
||||||
|
description: ""
|
||||||
|
value: ${{ jobs.Prepare.outputs.has_submodules }}
|
||||||
ref_kind:
|
ref_kind:
|
||||||
description: ""
|
description: ""
|
||||||
value: ${{ jobs.Prepare.outputs.ref_kind }}
|
value: ${{ jobs.Prepare.outputs.ref_kind }}
|
||||||
|
default_branch:
|
||||||
|
description: ""
|
||||||
|
value: ${{ jobs.Prepare.outputs.default_branch }}
|
||||||
branch:
|
branch:
|
||||||
description: ""
|
description: ""
|
||||||
value: ${{ jobs.Prepare.outputs.branch }}
|
value: ${{ jobs.Prepare.outputs.branch }}
|
||||||
@@ -83,31 +113,46 @@ on:
|
|||||||
# pr_mergedat:
|
# pr_mergedat:
|
||||||
# description: ""
|
# description: ""
|
||||||
# value: ${{ jobs.Prepare.outputs.pr_mergedat }}
|
# value: ${{ jobs.Prepare.outputs.pr_mergedat }}
|
||||||
|
git_submodule_count:
|
||||||
|
description: ""
|
||||||
|
value: ${{ jobs.Prepare.outputs.git_submodule_count }}
|
||||||
|
git_submodule_names:
|
||||||
|
description: ""
|
||||||
|
value: ${{ jobs.Prepare.outputs.git_submodule_names }}
|
||||||
|
git_submodule_paths:
|
||||||
|
description: ""
|
||||||
|
value: ${{ jobs.Prepare.outputs.git_submodule_paths }}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
Prepare:
|
Prepare:
|
||||||
name: Extract Information
|
name: Extract Information
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
outputs:
|
outputs:
|
||||||
on_main_branch: ${{ steps.Classify.outputs.on_main_branch }}
|
on_default_branch: ${{ steps.Classify.outputs.on_default_branch }}
|
||||||
on_dev_branch: ${{ steps.Classify.outputs.on_dev_branch }}
|
on_main_branch: ${{ steps.Classify.outputs.on_main_branch }}
|
||||||
on_release_branch: ${{ steps.Classify.outputs.on_release_branch }}
|
on_release_branch: ${{ steps.Classify.outputs.on_release_branch }}
|
||||||
is_regular_commit: ${{ steps.Classify.outputs.is_regular_commit }}
|
on_dev_branch: ${{ steps.Classify.outputs.on_dev_branch }}
|
||||||
is_merge_commit: ${{ steps.Classify.outputs.is_merge_commit }}
|
is_regular_commit: ${{ steps.Classify.outputs.is_regular_commit }}
|
||||||
is_release_commit: ${{ steps.Classify.outputs.is_release_commit }}
|
is_merge_commit: ${{ steps.Classify.outputs.is_merge_commit }}
|
||||||
is_nightly_tag: ${{ steps.Classify.outputs.is_nightly_tag }}
|
is_release_commit: ${{ steps.Classify.outputs.is_release_commit }}
|
||||||
is_release_tag: ${{ steps.Classify.outputs.is_release_tag }}
|
is_nightly_tag: ${{ steps.Classify.outputs.is_nightly_tag }}
|
||||||
ref_kind: ${{ steps.Classify.outputs.ref_kind }}
|
is_release_tag: ${{ steps.Classify.outputs.is_release_tag }}
|
||||||
branch: ${{ steps.Classify.outputs.branch }}
|
has_submodules: ${{ steps.Classify.outputs.has_submodules }}
|
||||||
tag: ${{ steps.Classify.outputs.tag }}
|
ref_kind: ${{ steps.Classify.outputs.ref_kind }}
|
||||||
version: ${{ steps.Classify.outputs.version || steps.FindPullRequest.outputs.pr_version }}
|
default_branch: ${{ steps.Classify.outputs.default_branch }}
|
||||||
# release_version: ${{ steps.FindPullRequest.outputs.release_version }}
|
branch: ${{ steps.Classify.outputs.branch }}
|
||||||
pr_title: ${{ steps.FindPullRequest.outputs.pr_title }}
|
tag: ${{ steps.Classify.outputs.tag }}
|
||||||
pr_number: ${{ steps.Classify.outputs.pr_number || steps.FindPullRequest.outputs.pr_number }}
|
version: ${{ steps.Classify.outputs.version || steps.FindPullRequest.outputs.pr_version }}
|
||||||
|
# release_version: ${{ steps.FindPullRequest.outputs.release_version }}
|
||||||
|
pr_title: ${{ steps.FindPullRequest.outputs.pr_title }}
|
||||||
|
pr_number: ${{ steps.Classify.outputs.pr_number || steps.FindPullRequest.outputs.pr_number }}
|
||||||
|
git_submodule_count: ${{ steps.Classify.outputs.git_submodule_count }}
|
||||||
|
git_submodule_names: ${{ steps.Classify.outputs.git_submodule_names }}
|
||||||
|
git_submodule_paths: ${{ steps.Classify.outputs.git_submodule_paths }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: ⏬ Checkout repository
|
- name: ⏬ Checkout repository
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
# The command 'git describe' (used for version) needs the history.
|
# The command 'git describe' (used for version) needs the history.
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
@@ -132,89 +177,133 @@ jobs:
|
|||||||
ANSI_LIGHT_BLUE=$'\x1b[94m'
|
ANSI_LIGHT_BLUE=$'\x1b[94m'
|
||||||
ANSI_NOCOLOR=$'\x1b[0m'
|
ANSI_NOCOLOR=$'\x1b[0m'
|
||||||
|
|
||||||
|
export GH_TOKEN=${{ github.token }}
|
||||||
|
|
||||||
ref="${{ github.ref }}"
|
ref="${{ github.ref }}"
|
||||||
|
on_default_branch="false"
|
||||||
on_main_branch="false"
|
on_main_branch="false"
|
||||||
on_dev_branch="false"
|
|
||||||
on_release_branch="false"
|
on_release_branch="false"
|
||||||
|
on_dev_branch="false"
|
||||||
is_regular_commit="false"
|
is_regular_commit="false"
|
||||||
is_merge_commit="false"
|
is_merge_commit="false"
|
||||||
is_release_commit="false"
|
is_release_commit="false"
|
||||||
is_nightly_tag="false"
|
is_nightly_tag="false"
|
||||||
is_release_tag="false"
|
is_release_tag="false"
|
||||||
|
has_submodules="false"
|
||||||
ref_kind="unknown"
|
ref_kind="unknown"
|
||||||
|
default_branch=""
|
||||||
branch=""
|
branch=""
|
||||||
tag=""
|
tag=""
|
||||||
pr_number=""
|
pr_number=""
|
||||||
version=""
|
version=""
|
||||||
|
git_submodule_count="0"
|
||||||
|
git_submodule_names=""
|
||||||
|
git_submodule_paths=""
|
||||||
|
|
||||||
|
printf "Classify Git reference '%s' " "${ref}"
|
||||||
if [[ "${ref:0:11}" == "refs/heads/" ]]; then
|
if [[ "${ref:0:11}" == "refs/heads/" ]]; then
|
||||||
|
printf "${ANSI_LIGHT_GREEN}[BRANCH]\n"
|
||||||
ref_kind="branch"
|
ref_kind="branch"
|
||||||
branch="${ref:11}"
|
branch="${ref:11}"
|
||||||
|
|
||||||
printf "Commit check:\n"
|
printf "Get default branch name ... "
|
||||||
|
defaultBranch=$(gh repo view "${{ github.repository }}" --json defaultBranchRef --jq '.defaultBranchRef.name' 2>&1)
|
||||||
|
if [[ $? -eq 0 ]]; then
|
||||||
|
printf "${ANSI_LIGHT_GREEN} [OK]\n"
|
||||||
|
|
||||||
|
default_branch="${defaultBranch}"
|
||||||
|
printf " Default branch ${ANSI_LIGHT_BLUE}'%s'${ANSI_NOCOLOR}\n" "${default_branch}"
|
||||||
|
else
|
||||||
|
printf "${ANSI_LIGHT_RED} [FAILED]\n"
|
||||||
|
printf " ${ANSI_LIGHT_RED}%s${ANSI_NOCOLOR}\n" "${default_branch}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
printf "Commit checks:\n"
|
||||||
|
printf " Commit: %s\n" "${{ github.sha }}"
|
||||||
|
printf " Commit kind "
|
||||||
|
if [[ -z "$(git rev-list -1 --merges ${{ github.sha }}~1..${{ github.sha }})" ]]; then
|
||||||
|
is_regular_commit="true"
|
||||||
|
printf "${ANSI_LIGHT_YELLOW}[REGULAR]${ANSI_NOCOLOR}\n"
|
||||||
|
else
|
||||||
|
is_merge_commit="true"
|
||||||
|
printf "${ANSI_LIGHT_GREEN}[MERGE]${ANSI_NOCOLOR}\n"
|
||||||
|
fi
|
||||||
|
|
||||||
|
printf "Branch checks:\n"
|
||||||
|
printf " Branch: %s\n" "${branch}"
|
||||||
|
printf " Commit on default branch ${ANSI_LIGHT_BLUE}'%s'${ANSI_NOCOLOR} " "${defaultBranch}"
|
||||||
|
if [[ "${branch}" == "${defaultBranch}" ]]; then
|
||||||
|
on_default_branch="true"
|
||||||
|
printf "${ANSI_LIGHT_GREEN}[YES]${ANSI_NOCOLOR}\n"
|
||||||
|
else
|
||||||
|
printf "${ANSI_LIGHT_RED}[NO]${ANSI_NOCOLOR}\n"
|
||||||
|
fi
|
||||||
|
|
||||||
|
printf " Commit on main branch ${ANSI_LIGHT_BLUE}'%s'${ANSI_NOCOLOR} " "${{ inputs.main_branch }}"
|
||||||
if [[ "${branch}" == "${{ inputs.main_branch }}" ]]; then
|
if [[ "${branch}" == "${{ inputs.main_branch }}" ]]; then
|
||||||
on_main_branch="true"
|
on_main_branch="true"
|
||||||
|
printf "${ANSI_LIGHT_GREEN}[YES]${ANSI_NOCOLOR}\n"
|
||||||
if [[ -z "$(git rev-list -1 --merges ${{ github.sha }}~1..${{ github.sha }})" ]]; then
|
else
|
||||||
is_regular_commit="true"
|
printf "${ANSI_LIGHT_RED}[NO]${ANSI_NOCOLOR}\n"
|
||||||
printf " ${ANSI_LIGHT_YELLOW}regular "
|
|
||||||
else
|
|
||||||
is_merge_commit="true"
|
|
||||||
printf " ${ANSI_LIGHT_GREEN}merge "
|
|
||||||
fi
|
|
||||||
printf "commit${ANSI_NOCOLOR} on main branch ${ANSI_LIGHT_BLUE}'%s'${ANSI_NOCOLOR}\n" "${{ inputs.main_branch }}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ "${branch}" == "${{ inputs.development_branch }}" ]]; then
|
|
||||||
on_dev_branch="true"
|
|
||||||
|
|
||||||
if [[ -z "$(git rev-list -1 --merges ${{ github.sha }}~1..${{ github.sha }})" ]]; then
|
|
||||||
is_regular_commit="true"
|
|
||||||
printf " ${ANSI_LIGHT_YELLOW}regular "
|
|
||||||
else
|
|
||||||
is_merge_commit="true"
|
|
||||||
printf " ${ANSI_LIGHT_GREEN}merge "
|
|
||||||
fi
|
|
||||||
printf "commit${ANSI_NOCOLOR} on development branch ${ANSI_LIGHT_BLUE}'%s'${ANSI_NOCOLOR}\n" "${{ inputs.development_branch }}"
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
printf " Commit on release branch ${ANSI_LIGHT_BLUE}'%s'${ANSI_NOCOLOR} " "${{ inputs.release_branch }}"
|
||||||
if [[ "${branch}" == "${{ inputs.release_branch }}" ]]; then
|
if [[ "${branch}" == "${{ inputs.release_branch }}" ]]; then
|
||||||
on_release_branch="true"
|
on_release_branch="true"
|
||||||
|
printf "${ANSI_LIGHT_GREEN}[YES]${ANSI_NOCOLOR}\n"
|
||||||
|
else
|
||||||
|
printf "${ANSI_LIGHT_RED}[NO]${ANSI_NOCOLOR}\n"
|
||||||
|
fi
|
||||||
|
|
||||||
if [[ -z "$(git rev-list -1 --merges ${{ github.sha }}~1..${{ github.sha }})" ]]; then
|
printf " Commit on development branch ${ANSI_LIGHT_BLUE}'%s'${ANSI_NOCOLOR} " "${{ inputs.development_branch }}"
|
||||||
is_regular_commit="true"
|
if [[ "${branch}" == "${{ inputs.development_branch }}" ]]; then
|
||||||
printf " ${ANSI_LIGHT_YELLOW}regular "
|
on_dev_branch="true"
|
||||||
else
|
printf "${ANSI_LIGHT_GREEN}[YES]${ANSI_NOCOLOR}\n"
|
||||||
|
else
|
||||||
|
printf "${ANSI_LIGHT_RED}[NO]${ANSI_NOCOLOR}\n"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "${is_merge_commit}" == "true" ]]; then
|
||||||
|
printf "Release checks:\n"
|
||||||
|
printf " Release kind "
|
||||||
|
if [[ "${on_main_branch}" == "true" ]]; then
|
||||||
is_release_commit="true"
|
is_release_commit="true"
|
||||||
printf " ${ANSI_LIGHT_GREEN}release "
|
printf "${ANSI_LIGHT_GREEN}[RELEASE]${ANSI_NOCOLOR}\n"
|
||||||
|
elif [[ "${on_version_branch}" == "true" ]]; then
|
||||||
|
is_release_commit="true"
|
||||||
|
printf "${ANSI_LIGHT_GREEN}[RELEASE]${ANSI_NOCOLOR}\n"
|
||||||
|
elif [[ "${on_release_branch}" == "true" ]]; then
|
||||||
|
is_prerelease_commit="true"
|
||||||
|
printf "${ANSI_LIGHT_YELLOW}[PRERELEASE]${ANSI_NOCOLOR}\n"
|
||||||
fi
|
fi
|
||||||
printf "commit${ANSI_NOCOLOR} on release branch ${ANSI_LIGHT_BLUE}'%s'${ANSI_NOCOLOR}\n" "${{ inputs.release_branch }}"
|
|
||||||
fi
|
fi
|
||||||
elif [[ "${ref:0:10}" == "refs/tags/" ]]; then
|
elif [[ "${ref:0:10}" == "refs/tags/" ]]; then
|
||||||
|
printf "${ANSI_LIGHT_GREEN}[TAG]\n"
|
||||||
ref_kind="tag"
|
ref_kind="tag"
|
||||||
tag="${ref:10}"
|
tag="${ref:10}"
|
||||||
|
|
||||||
printf "Tag check:\n"
|
printf "Tag checks:\n"
|
||||||
|
printf " Check if tag is on main branch '%s' ... " "${{ inputs.main_branch }}"
|
||||||
printf " Check if tag is on release branch '%s' ... " "${{ inputs.release_branch }}"
|
git branch --remotes --contains $(git rev-parse --verify "tags/${tag}~0") | grep "origin/${{ inputs.main_branch }}" > /dev/null
|
||||||
git branch --remotes --contains $(git rev-parse --verify "tags/${tag}~0") | grep "origin/${{ inputs.release_branch }}" > /dev/null
|
|
||||||
if [[ $? -eq 0 ]]; then
|
if [[ $? -eq 0 ]]; then
|
||||||
printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n"
|
printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n"
|
||||||
else
|
else
|
||||||
printf "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n"
|
printf "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n"
|
||||||
printf "${ANSI_LIGHT_RED}Tag '%s' isn't on branch '%s'.${ANSI_NOCOLOR}\n" "${tag}" "${{ inputs.release_branch }}"
|
printf "${ANSI_LIGHT_RED}Tag '%s' isn't on branch '%s'.${ANSI_NOCOLOR}\n" "${tag}" "${{ inputs.main_branch }}"
|
||||||
printf "::error title=TagCheck::Tag '%s' isn't on branch '%s'.\n" "${tag}" "${{ inputs.release_branch }}"
|
printf "::error title=TagCheck::Tag '%s' isn't on branch '%s'.\n" "${tag}" "${{ inputs.main_branch }}"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
NIGHTLY_TAG_PATTERN='^${{ inputs.nightly_tag_pattern }}$'
|
NIGHTLY_TAG_PATTERN='^${{ inputs.nightly_tag_pattern }}$'
|
||||||
RELEASE_TAG_PATTERN='^${{ inputs.release_tag_pattern }}$'
|
RELEASE_TAG_PATTERN='^${{ inputs.release_tag_pattern }}$'
|
||||||
printf " Check tag name against regexp '%s' ... " "${RELEASE_TAG_PATTERN}"
|
|
||||||
if [[ "${tag}" =~ NIGHTLY_TAG_PATTERN ]]; then
|
printf "Tag checks:\n"
|
||||||
|
printf " Tag: %s\n" "${tag}"
|
||||||
|
printf " Check tag '%s' against regexp ... " "${tag}"
|
||||||
|
if [[ "${tag}" =~ ${NIGHTLY_TAG_PATTERN} ]]; then
|
||||||
printf "${ANSI_LIGHT_GREEN}[NIGHTLY]${ANSI_NOCOLOR}\n"
|
printf "${ANSI_LIGHT_GREEN}[NIGHTLY]${ANSI_NOCOLOR}\n"
|
||||||
is_nightly_tag="true"
|
is_nightly_tag="true"
|
||||||
elif [[ "${tag}" =~ $RELEASE_TAG_PATTERN ]]; then
|
elif [[ "${tag}" =~ ${RELEASE_TAG_PATTERN} ]]; then
|
||||||
printf "${ANSI_LIGHT_GREEN}[RELEASE]${ANSI_NOCOLOR}\n"
|
printf "${ANSI_LIGHT_GREEN}[RELEASE]${ANSI_NOCOLOR}\n"
|
||||||
version="${tag}"
|
version="${tag}"
|
||||||
is_release_tag="true"
|
is_release_tag="true"
|
||||||
@@ -226,33 +315,75 @@ jobs:
|
|||||||
printf "::error title=RexExpCheck::Tag name '%s' doesn't conform to regexp '%s' nor '%s'.\n" "${tag}" "${NIGHTLY_TAG_PATTERN}" "${RELEASE_TAG_PATTERN}"
|
printf "::error title=RexExpCheck::Tag name '%s' doesn't conform to regexp '%s' nor '%s'.\n" "${tag}" "${NIGHTLY_TAG_PATTERN}" "${RELEASE_TAG_PATTERN}"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
if [[ "${is_nightly_tag}" == "true" ]]; then
|
||||||
|
printf " Check if nightly tag is on main branch '%s' ... " "${{ inputs.main_branch }}"
|
||||||
|
git branch --remotes --contains $(git rev-parse --verify "tags/${tag}~0") | grep "origin/${{ inputs.main_branch }}" > /dev/null
|
||||||
|
if [[ $? -eq 0 ]]; then
|
||||||
|
printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n"
|
||||||
|
else
|
||||||
|
printf "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n"
|
||||||
|
printf " ${ANSI_LIGHT_RED}Tag '%s' isn't on branch '%s'.${ANSI_NOCOLOR}\n" "${tag}" "${{ inputs.main_branch }}"
|
||||||
|
printf "::error title=TagCheck::Tag '%s' isn't on branch '%s'.\n" "${tag}" "${{ inputs.main_branch }}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
elif [[ "${is_release_tag}" == "true" ]]; then
|
||||||
|
printf " Check if release tag is on main branch '%s' ... " "${{ inputs.main_branch }}"
|
||||||
|
git branch --remotes --contains $(git rev-parse --verify "tags/${tag}~0") | grep "origin/${{ inputs.main_branch }}" > /dev/null
|
||||||
|
if [[ $? -eq 0 ]]; then
|
||||||
|
printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n"
|
||||||
|
else
|
||||||
|
printf "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n"
|
||||||
|
printf " ${ANSI_LIGHT_RED}Tag '%s' isn't on branch '%s'.${ANSI_NOCOLOR}\n" "${tag}" "${{ inputs.main_branch }}"
|
||||||
|
printf "::error title=TagCheck::Tag '%s' isn't on branch '%s'.\n" "${tag}" "${{ inputs.main_branch }}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
elif [[ "${ref:0:10}" == "refs/pull/" ]]; then
|
elif [[ "${ref:0:10}" == "refs/pull/" ]]; then
|
||||||
|
printf "${ANSI_LIGHT_YELLOW}[PULL REQUEST]\n"
|
||||||
ref_kind="pullrequest"
|
ref_kind="pullrequest"
|
||||||
pr_number=${ref:11}
|
pr_number=${ref:11}
|
||||||
pr_number=${pr_number%%/*}
|
pr_number=${pr_number%%/*}
|
||||||
|
|
||||||
printf "Pull Request check:\n"
|
printf "Pull Request checks:\n"
|
||||||
printf " Number: %s\n" "${pr_number}"
|
printf " Number: %s\n" "${pr_number}"
|
||||||
else
|
else
|
||||||
|
printf "${ANSI_LIGHT_RED}[UNKNOWN]\n"
|
||||||
printf "${ANSI_LIGHT_RED}Unknown Git reference '%s'.${ANSI_NOCOLOR}\n" "${{ github.ref }}"
|
printf "${ANSI_LIGHT_RED}Unknown Git reference '%s'.${ANSI_NOCOLOR}\n" "${{ github.ref }}"
|
||||||
printf "::error title=Classify Commit::Unknown Git reference '%s'.\n" "${{ github.ref }}"
|
printf "::error title=Classify Commit::Unknown Git reference '%s'.\n" "${{ github.ref }}"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Submodules
|
||||||
|
if [[ -f .gitsubmodules ]]; then
|
||||||
|
has_submodules="true"
|
||||||
|
git_modules_file=.gitmodules # $(git rev-parse --show-toplevel)/.gitmodules
|
||||||
|
git_submodule_count="$(grep -Po '(?<=\[submodule \")(.*)(?=\"\])' "${git_modules_file}" | wc -l)"
|
||||||
|
git_submodule_names="$(grep -Po '(?<=\[submodule \")(.*)(?=\"\])' "${git_modules_file}" | paste -sd ':' -)"
|
||||||
|
git_submodule_paths="$(git config --file "${git_modules_file}" --null --name-only --get-regexp '\.path$' | xargs -0 -n1 git config --file "${git_modules_file}" --get | paste -sd ':' -)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
printf "\nWriting output variables ...\n"
|
||||||
tee --append "${GITHUB_OUTPUT}" <<EOF
|
tee --append "${GITHUB_OUTPUT}" <<EOF
|
||||||
|
on_default_branch=${on_default_branch}
|
||||||
on_main_branch=${on_main_branch}
|
on_main_branch=${on_main_branch}
|
||||||
on_dev_branch=${on_dev_branch}
|
|
||||||
on_release_branch=${on_release_branch}
|
on_release_branch=${on_release_branch}
|
||||||
|
on_dev_branch=${on_dev_branch}
|
||||||
is_regular_commit=${is_regular_commit}
|
is_regular_commit=${is_regular_commit}
|
||||||
is_merge_commit=${is_merge_commit}
|
is_merge_commit=${is_merge_commit}
|
||||||
is_release_commit=${is_release_commit}
|
is_release_commit=${is_release_commit}
|
||||||
is_nightly_tag=${is_nightly_tag}
|
is_nightly_tag=${is_nightly_tag}
|
||||||
is_release_tag=${is_release_tag}
|
is_release_tag=${is_release_tag}
|
||||||
|
has_submodules=${has_submodules}
|
||||||
ref_kind=${ref_kind}
|
ref_kind=${ref_kind}
|
||||||
|
default_branch=${default_branch}
|
||||||
branch=${branch}
|
branch=${branch}
|
||||||
tag=${tag}
|
tag=${tag}
|
||||||
pr_number=${pr_number}
|
pr_number=${pr_number}
|
||||||
version=${version}
|
version=${version}
|
||||||
|
git_submodule_count=${git_submodule_count}
|
||||||
|
git_submodule_names=${git_submodule_names}
|
||||||
|
git_submodule_paths=${git_submodule_paths}
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
# TODO: why not is_release_commit?
|
# TODO: why not is_release_commit?
|
||||||
@@ -328,21 +459,28 @@ jobs:
|
|||||||
|
|
||||||
- name: Debug
|
- name: Debug
|
||||||
run: |
|
run: |
|
||||||
printf "on_main_branch: %s\n" "${{ steps.Classify.outputs.on_main_branch }}"
|
printf "on_default_branch: %s\n" "${{ steps.Classify.outputs.on_default_branch }}"
|
||||||
printf "on_dev_branch: %s\n" "${{ steps.Classify.outputs.on_dev_branch }}"
|
printf "on_main_branch: %s\n" "${{ steps.Classify.outputs.on_main_branch }}"
|
||||||
printf "on_release_branch: %s\n" "${{ steps.Classify.outputs.on_release_branch }}"
|
printf "on_release_branch: %s\n" "${{ steps.Classify.outputs.on_release_branch }}"
|
||||||
printf "is_regular_commit: %s\n" "${{ steps.Classify.outputs.is_regular_commit }}"
|
printf "on_dev_branch: %s\n" "${{ steps.Classify.outputs.on_dev_branch }}"
|
||||||
printf "is_merge_commit: %s\n" "${{ steps.Classify.outputs.is_merge_commit }}"
|
printf "is_regular_commit: %s\n" "${{ steps.Classify.outputs.is_regular_commit }}"
|
||||||
printf "is_release_commit: %s\n" "${{ steps.Classify.outputs.is_release_commit }}"
|
printf "is_merge_commit: %s\n" "${{ steps.Classify.outputs.is_merge_commit }}"
|
||||||
printf "is_nightly_tag: %s\n" "${{ steps.Classify.outputs.is_nightly_tag }}"
|
printf "is_release_commit: %s\n" "${{ steps.Classify.outputs.is_release_commit }}"
|
||||||
printf "is_release_tag: %s\n" "${{ steps.Classify.outputs.is_release_tag }}"
|
printf "is_nightly_tag: %s\n" "${{ steps.Classify.outputs.is_nightly_tag }}"
|
||||||
printf "ref_kind: %s\n" "${{ steps.Classify.outputs.ref_kind }}"
|
printf "is_release_tag: %s\n" "${{ steps.Classify.outputs.is_release_tag }}"
|
||||||
printf "branch: %s\n" "${{ steps.Classify.outputs.branch }}"
|
printf "has_submodules: %s\n" "${{ steps.Classify.outputs.has_submodules }}"
|
||||||
printf "tag: %s\n" "${{ steps.Classify.outputs.tag }}"
|
printf "ref_kind: %s\n" "${{ steps.Classify.outputs.ref_kind }}"
|
||||||
printf "version: %s\n" "${{ steps.Classify.outputs.version || steps.FindPullRequest.outputs.pr_version }}"
|
printf "default_branch: %s\n" "${{ steps.Classify.outputs.default_branch }}"
|
||||||
printf " from tag: %s\n" "${{ steps.Classify.outputs.version }}"
|
printf "branch: %s\n" "${{ steps.Classify.outputs.branch }}"
|
||||||
printf " from pr: %s\n" "${{ steps.FindPullRequest.outputs.pr_version }}"
|
printf "tag: %s\n" "${{ steps.Classify.outputs.tag }}"
|
||||||
printf "pr title: %s\n" "${{ steps.FindPullRequest.outputs.pr_title }}"
|
printf "version: %s\n" "${{ steps.Classify.outputs.version || steps.FindPullRequest.outputs.pr_version }}"
|
||||||
printf "pr number: %s\n" "${{ steps.Classify.outputs.pr_number || steps.FindPullRequest.outputs.pr_number }}"
|
printf " from tag: %s\n" "${{ steps.Classify.outputs.version }}"
|
||||||
printf " from merge: %s\n" "${{ steps.Classify.outputs.pr_number }}"
|
printf " from pr: %s\n" "${{ steps.FindPullRequest.outputs.pr_version }}"
|
||||||
printf " from pr: %s\n" "${{ steps.FindPullRequest.outputs.pr_number }}"
|
printf "pr title: %s\n" "${{ steps.FindPullRequest.outputs.pr_title }}"
|
||||||
|
printf "pr number: %s\n" "${{ steps.Classify.outputs.pr_number || steps.FindPullRequest.outputs.pr_number }}"
|
||||||
|
printf " from merge: %s\n" "${{ steps.Classify.outputs.pr_number }}"
|
||||||
|
printf " from pr: %s\n" "${{ steps.FindPullRequest.outputs.pr_number }}"
|
||||||
|
printf "git_submodule_*:\n"
|
||||||
|
printf " *_count_: %s\n" "${{ steps.FindPullRequest.outputs.git_submodule_count }}"
|
||||||
|
printf " *_names: %s\n" "${{ steps.FindPullRequest.outputs.git_submodule_names }}"
|
||||||
|
printf " *_paths: %s\n" "${{ steps.FindPullRequest.outputs.git_submodule_paths }}"
|
||||||
|
|||||||
19
.github/workflows/PublishCoverageResults.yml
vendored
19
.github/workflows/PublishCoverageResults.yml
vendored
@@ -3,7 +3,7 @@
|
|||||||
# Patrick Lehmann #
|
# Patrick Lehmann #
|
||||||
# #
|
# #
|
||||||
# ==================================================================================================================== #
|
# ==================================================================================================================== #
|
||||||
# Copyright 2020-2025 The pyTooling Authors #
|
# Copyright 2020-2026 The pyTooling Authors #
|
||||||
# #
|
# #
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
# you may not use this file except in compliance with the License. #
|
# you may not use this file except in compliance with the License. #
|
||||||
@@ -109,13 +109,13 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: ⏬ Checkout repository
|
- name: ⏬ Checkout repository
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
lfs: true
|
lfs: true
|
||||||
submodules: true
|
submodules: true
|
||||||
|
|
||||||
- name: 📥 Download Artifacts
|
- name: 📥 Download Artifacts
|
||||||
uses: pyTooling/download-artifact@v5
|
uses: pyTooling/download-artifact@v7
|
||||||
with:
|
with:
|
||||||
pattern: ${{ inputs.coverage_artifacts_pattern }}
|
pattern: ${{ inputs.coverage_artifacts_pattern }}
|
||||||
path: artifacts
|
path: artifacts
|
||||||
@@ -124,9 +124,9 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
tree -pash artifacts
|
tree -pash artifacts
|
||||||
|
|
||||||
- name: 🔧 Install coverage and tomli
|
- name: 🔧 Install coverage
|
||||||
run: |
|
run: |
|
||||||
python -m pip install -U --disable-pip-version-check --break-system-packages coverage[toml] tomli
|
python -m pip install -U --disable-pip-version-check --break-system-packages coverage[toml]
|
||||||
|
|
||||||
- name: Rename .coverage files and move them all into 'coverage/'
|
- name: Rename .coverage files and move them all into 'coverage/'
|
||||||
run: |
|
run: |
|
||||||
@@ -156,7 +156,7 @@ jobs:
|
|||||||
tree -pash ${{ fromJson(inputs.coverage_report_html).directory }}
|
tree -pash ${{ fromJson(inputs.coverage_report_html).directory }}
|
||||||
|
|
||||||
- name: 📤 Upload 'Coverage SQLite Database' artifact
|
- name: 📤 Upload 'Coverage SQLite Database' artifact
|
||||||
uses: pyTooling/upload-artifact@v4
|
uses: pyTooling/upload-artifact@v6
|
||||||
if: inputs.coverage_sqlite_artifact != ''
|
if: inputs.coverage_sqlite_artifact != ''
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
with:
|
with:
|
||||||
@@ -166,7 +166,7 @@ jobs:
|
|||||||
retention-days: 1
|
retention-days: 1
|
||||||
|
|
||||||
- name: 📤 Upload 'Coverage XML Report' artifact
|
- name: 📤 Upload 'Coverage XML Report' artifact
|
||||||
uses: pyTooling/upload-artifact@v4
|
uses: pyTooling/upload-artifact@v6
|
||||||
if: inputs.coverage_xml_artifact != ''
|
if: inputs.coverage_xml_artifact != ''
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
with:
|
with:
|
||||||
@@ -177,7 +177,7 @@ jobs:
|
|||||||
retention-days: 1
|
retention-days: 1
|
||||||
|
|
||||||
- name: 📤 Upload 'Coverage JSON Report' artifact
|
- name: 📤 Upload 'Coverage JSON Report' artifact
|
||||||
uses: pyTooling/upload-artifact@v4
|
uses: pyTooling/upload-artifact@v6
|
||||||
if: inputs.coverage_json_artifact != ''
|
if: inputs.coverage_json_artifact != ''
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
with:
|
with:
|
||||||
@@ -188,7 +188,7 @@ jobs:
|
|||||||
retention-days: 1
|
retention-days: 1
|
||||||
|
|
||||||
- name: 📤 Upload 'Coverage HTML Report' artifact
|
- name: 📤 Upload 'Coverage HTML Report' artifact
|
||||||
uses: pyTooling/upload-artifact@v4
|
uses: pyTooling/upload-artifact@v6
|
||||||
if: inputs.coverage_html_artifact != ''
|
if: inputs.coverage_html_artifact != ''
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
with:
|
with:
|
||||||
@@ -205,6 +205,7 @@ jobs:
|
|||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
report_type: "coverage"
|
||||||
disable_search: true
|
disable_search: true
|
||||||
files: ${{ fromJson(inputs.coverage_report_xml).fullpath }}
|
files: ${{ fromJson(inputs.coverage_report_xml).fullpath }}
|
||||||
flags: unittests
|
flags: unittests
|
||||||
|
|||||||
6
.github/workflows/PublishOnPyPI.yml
vendored
6
.github/workflows/PublishOnPyPI.yml
vendored
@@ -4,7 +4,7 @@
|
|||||||
# Unai Martinez-Corral #
|
# Unai Martinez-Corral #
|
||||||
# #
|
# #
|
||||||
# ==================================================================================================================== #
|
# ==================================================================================================================== #
|
||||||
# Copyright 2020-2025 The pyTooling Authors #
|
# Copyright 2020-2026 The pyTooling Authors #
|
||||||
# #
|
# #
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
# you may not use this file except in compliance with the License. #
|
# you may not use this file except in compliance with the License. #
|
||||||
@@ -33,7 +33,7 @@ on:
|
|||||||
python_version:
|
python_version:
|
||||||
description: 'Python version.'
|
description: 'Python version.'
|
||||||
required: false
|
required: false
|
||||||
default: '3.13'
|
default: '3.14'
|
||||||
type: string
|
type: string
|
||||||
requirements:
|
requirements:
|
||||||
description: 'Python dependencies to be installed through pip.'
|
description: 'Python dependencies to be installed through pip.'
|
||||||
@@ -56,7 +56,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: 📥 Download artifacts '${{ inputs.artifact }}' from 'Package' job
|
- name: 📥 Download artifacts '${{ inputs.artifact }}' from 'Package' job
|
||||||
uses: pyTooling/download-artifact@v5
|
uses: pyTooling/download-artifact@v7
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.artifact }}
|
name: ${{ inputs.artifact }}
|
||||||
path: dist
|
path: dist
|
||||||
|
|||||||
488
.github/workflows/PublishReleaseNotes.yml
vendored
488
.github/workflows/PublishReleaseNotes.yml
vendored
@@ -3,7 +3,7 @@
|
|||||||
# Patrick Lehmann #
|
# Patrick Lehmann #
|
||||||
# #
|
# #
|
||||||
# ==================================================================================================================== #
|
# ==================================================================================================================== #
|
||||||
# Copyright 2020-2025 The pyTooling Authors #
|
# Copyright 2020-2026 The pyTooling Authors #
|
||||||
# #
|
# #
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
# you may not use this file except in compliance with the License. #
|
# you may not use this file except in compliance with the License. #
|
||||||
@@ -132,7 +132,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: ⏬ Checkout repository
|
- name: ⏬ Checkout repository
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
# The command 'git describe' (used for version) needs the history.
|
# The command 'git describe' (used for version) needs the history.
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
@@ -191,198 +191,6 @@ jobs:
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- name: 📑 Assemble Release Notes
|
|
||||||
id: createReleaseNotes
|
|
||||||
run: |
|
|
||||||
set +e
|
|
||||||
|
|
||||||
ANSI_LIGHT_RED=$'\x1b[91m'
|
|
||||||
ANSI_LIGHT_GREEN=$'\x1b[92m'
|
|
||||||
ANSI_LIGHT_YELLOW=$'\x1b[93m'
|
|
||||||
ANSI_LIGHT_BLUE=$'\x1b[94m'
|
|
||||||
ANSI_NOCOLOR=$'\x1b[0m'
|
|
||||||
|
|
||||||
export GH_TOKEN=${{ github.token }}
|
|
||||||
|
|
||||||
# Save release description (from parameter in a file)
|
|
||||||
head -c -1 <<'EOF' > __DESCRIPTION__.md
|
|
||||||
${{ inputs.description }}
|
|
||||||
EOF
|
|
||||||
|
|
||||||
# Save release footer (from parameter in a file)
|
|
||||||
head -c -1 <<'EOF' > __FOOTER__.md
|
|
||||||
${{ inputs.description_footer }}
|
|
||||||
EOF
|
|
||||||
|
|
||||||
# Download Markdown from PullRequest
|
|
||||||
# Readout second parent's SHA
|
|
||||||
# Search PR with that SHA
|
|
||||||
# Load description of that PR
|
|
||||||
printf "Read second parent of current SHA (%s) ... " "${{ github.ref }}"
|
|
||||||
FATHER_SHA=$(git rev-parse ${{ github.ref }}^2 -- 2> /dev/null)
|
|
||||||
if [[ $? -ne 0 || "{FATHER_SHA}" == "" ]]; then
|
|
||||||
printf "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n"
|
|
||||||
printf "→ ${ANSI_LIGHT_YELLOW}Skipped readout of pull request description. This is not a merge commit.${ANSI_NOCOLOR}\n"
|
|
||||||
else
|
|
||||||
printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n"
|
|
||||||
|
|
||||||
printf "Search Pull Request to '%s' and branch containing SHA %s ... " "${{ inputs.release_branch }}" "${FATHER_SHA}"
|
|
||||||
PULL_REQUESTS=$(gh pr list --base "${{ inputs.release_branch }}" --search "${FATHER_SHA}" --state "merged" --json "title,number,mergedBy,mergedAt,body")
|
|
||||||
if [[ $? -ne 0 || "${PULL_REQUESTS}" == "" ]]; then
|
|
||||||
printf "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n"
|
|
||||||
printf "${ANSI_LIGHT_RED}Couldn't find a merged Pull Request to '%s'. -> %s${ANSI_NOCOLOR}\n" "${{ inputs.release_branch }}" "${PULL_REQUESTS}"
|
|
||||||
printf "::error title=PullRequest::Couldn't find a merged Pull Request to '%s'. -> %s\n" "${{ inputs.release_branch }}" "${PULL_REQUESTS}"
|
|
||||||
exit 1
|
|
||||||
else
|
|
||||||
printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n"
|
|
||||||
|
|
||||||
PR_TITLE="$( printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].title")"
|
|
||||||
PR_NUMBER="$( printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].number")"
|
|
||||||
PR_BODY="$( printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].body")"
|
|
||||||
PR_MERGED_BY="$(printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].mergedBy.login")"
|
|
||||||
PR_MERGED_AT="$(printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].mergedAt")"
|
|
||||||
|
|
||||||
printf "Found Pull Request:\n"
|
|
||||||
printf " %s\n" "Title: ${PR_TITLE}"
|
|
||||||
printf " %s\n" "Number: ${PR_NUMBER}"
|
|
||||||
printf " %s\n" "MergedBy: ${PR_MERGED_BY}"
|
|
||||||
printf " %s\n" "MergedAt: ${PR_MERGED_AT} ($(date -d"${PR_MERGED_AT}" '+%d.%m.%Y - %H:%M:%S'))"
|
|
||||||
fi
|
|
||||||
|
|
||||||
printf "%s\n" "${PR_BODY}" > __PULLREQUEST__.md
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Check if a release description file should be used and exists.
|
|
||||||
if [[ "${{ inputs.description_file }}" != "" ]]; then
|
|
||||||
if [[ ! -f "${{ inputs.description_file }}" ]]; then
|
|
||||||
printf "${ANSI_LIGHT_RED}Release description file '%s' not found.${ANSI_NOCOLOR}\n" "${{ inputs.description_file }}"
|
|
||||||
printf "::error title=%s::%s\n" "FileNotFound" "Release description file '${{ inputs.description_file }}' not found."
|
|
||||||
exit 1
|
|
||||||
elif [[ -s "${{ inputs.description_file }}" ]]; then
|
|
||||||
printf "Use '%s' as main release description.\n" "${{ inputs.description_file }}"
|
|
||||||
cp -v "${{ inputs.description_file }}" __NOTES__.md
|
|
||||||
else
|
|
||||||
printf "${ANSI_LIGHT_RED}Release description file '%s' is empty.${ANSI_NOCOLOR}\n" "${{ inputs.description_file }}"
|
|
||||||
printf "::error title=%s::%s\n" "FileNotFound" "Release description file '${{ inputs.description_file }}' is empty."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
# Check if the main release description is provided by a template parameter
|
|
||||||
elif [[ -s __DESCRIPTION__.md ]]; then
|
|
||||||
printf "Use '__DESCRIPTION__.md' as main release description.\n"
|
|
||||||
mv -v __DESCRIPTION__.md __NOTES__.md
|
|
||||||
# Check if the pull request serves as the main release description text.
|
|
||||||
elif [[ -s __PULLREQUEST__.md ]]; then
|
|
||||||
printf "Use '__PULLREQUEST__.md' as main release description.\n"
|
|
||||||
mv -v __PULLREQUEST__.md __NOTES__.md
|
|
||||||
|
|
||||||
printf "Append '%%%%FOOTER%%%%' to '__NOTES__.md'.\n"
|
|
||||||
printf "\n%%%%FOOTER%%%%\n" >> __NOTES__.md
|
|
||||||
else
|
|
||||||
printf "${ANSI_LIGHT_RED}No release description specified (file, parameter, PR text).${ANSI_NOCOLOR}\n"
|
|
||||||
printf "::error title=%s::%s\n" "MissingDescription" "No release description specified (file, parameter, PR text)."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Read release notes main file for placeholder substitution
|
|
||||||
NOTES=$(<__NOTES__.md)
|
|
||||||
|
|
||||||
# Inline description
|
|
||||||
if [[ -s __DESCRIPTION__.md ]]; then
|
|
||||||
NOTES="${NOTES//%%DESCRIPTION%%/$(<__DESCRIPTION__.md)}"
|
|
||||||
else
|
|
||||||
NOTES="${NOTES//%%DESCRIPTION%%/}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Inline PullRequest and increase headline levels
|
|
||||||
if [[ -s __PULLREQUEST__.md ]]; then
|
|
||||||
while [[ "${NOTES}" =~ %%(PULLREQUEST(\+[0-3])?)%% ]]; do
|
|
||||||
case "${BASH_REMATCH[1]}" in
|
|
||||||
"PULLREQUEST+0" | "PULLREQUEST")
|
|
||||||
NOTES="${NOTES//${BASH_REMATCH[0]}/$(<__PULLREQUEST__.md)}"
|
|
||||||
;;
|
|
||||||
"PULLREQUEST+1")
|
|
||||||
NOTES="${NOTES//${BASH_REMATCH[0]}/$(cat __PULLREQUEST__.md | sed -E 's/^(#+) /\1# /gm;t')}"
|
|
||||||
;;
|
|
||||||
"PULLREQUEST+2")
|
|
||||||
NOTES="${NOTES//${BASH_REMATCH[0]}/$(cat __PULLREQUEST__.md | sed -E 's/^(#+) /\1### /gm;t')}"
|
|
||||||
;;
|
|
||||||
"PULLREQUEST+3")
|
|
||||||
NOTES="${NOTES//${BASH_REMATCH[0]}/$(cat __PULLREQUEST__.md | sed -E 's/^(#+) /\1### /gm;t')}"
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
else
|
|
||||||
while [[ "${NOTES}" =~ %%(PULLREQUEST(\+[0-3])?)%% ]]; do
|
|
||||||
NOTES="${NOTES//${BASH_REMATCH[0]}/}"
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
|
|
||||||
# inline Footer
|
|
||||||
if [[ -s __FOOTER__.md ]]; then
|
|
||||||
NOTES="${NOTES//%%FOOTER%%/$(<__FOOTER__.md)}"
|
|
||||||
else
|
|
||||||
NOTES="${NOTES//%%FOOTER%%/}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Apply replacements
|
|
||||||
while IFS=$'\r\n' read -r patternLine; do
|
|
||||||
# skip empty lines
|
|
||||||
[[ "$patternLine" == "" ]] && continue
|
|
||||||
|
|
||||||
pattern="%${patternLine%%=*}%"
|
|
||||||
replacement="${patternLine#*=}"
|
|
||||||
NOTES="${NOTES//$pattern/$replacement}"
|
|
||||||
done <<<'${{ inputs.replacements }}'
|
|
||||||
|
|
||||||
# Workarounds for stupid GitHub variables
|
|
||||||
owner_repo="${{ github.repository }}"
|
|
||||||
repo=${owner_repo##*/}
|
|
||||||
|
|
||||||
# Replace special identifiers
|
|
||||||
NOTES="${NOTES//%%gh_server%%/${{ github.server_url }}}"
|
|
||||||
NOTES="${NOTES//%%gh_workflow_name%%/${{ github.workflow }}}"
|
|
||||||
NOTES="${NOTES//%%gh_owner%%/${{ github.repository_owner }}}"
|
|
||||||
NOTES="${NOTES//%%gh_repo%%/${repo}}"
|
|
||||||
NOTES="${NOTES//%%gh_owner_repo%%/${{ github.repository }}}"
|
|
||||||
#NOTES="${NOTES//%%gh_pages%%/https://${{ github.repository_owner }}.github.io/${repo}/}"
|
|
||||||
NOTES="${NOTES//%%gh_runid%%/${{ github.run_id }}}"
|
|
||||||
NOTES="${NOTES//%%gh_actor%%/${{ github.actor }}}"
|
|
||||||
NOTES="${NOTES//%%gh_sha%%/${{ github.sha }}}"
|
|
||||||
NOTES="${NOTES//%%date%%/$(date '+%Y-%m-%d')}"
|
|
||||||
NOTES="${NOTES//%%time%%/$(date '+%H:%M:%S %Z')}"
|
|
||||||
NOTES="${NOTES//%%datetime%%/$(date '+%Y-%m-%d %H:%M:%S %Z')}"
|
|
||||||
|
|
||||||
# Write final release notes to file
|
|
||||||
printf "%s\n" "${NOTES}" > __NOTES__.md
|
|
||||||
|
|
||||||
# Display partial contents for debugging
|
|
||||||
if [[ -s __DESCRIPTION__.md ]]; then
|
|
||||||
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__DESCRIPTION__.md' ($(stat --printf="%s" "__DESCRIPTION__.md") B) ...."
|
|
||||||
cat __DESCRIPTION__.md
|
|
||||||
printf "::endgroup::\n"
|
|
||||||
else
|
|
||||||
printf "${ANSI_LIGHT_YELLOW}No '__DESCRIPTION__.md' found.${ANSI_NOCOLOR}\n"
|
|
||||||
fi
|
|
||||||
if [[ -s __PULLREQUEST__.md ]]; then
|
|
||||||
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__PULLREQUEST__.md' ($(stat --printf="%s" "__PULLREQUEST__.md") B) ...."
|
|
||||||
cat __PULLREQUEST__.md
|
|
||||||
printf "::endgroup::\n"
|
|
||||||
else
|
|
||||||
printf "${ANSI_LIGHT_YELLOW}No '__PULLREQUEST__.md' found.${ANSI_NOCOLOR}\n"
|
|
||||||
fi
|
|
||||||
if [[ -s __FOOTER__.md ]]; then
|
|
||||||
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__FOOTER__.md' ($(stat --printf="%s" "__FOOTER__.md") B) ...."
|
|
||||||
cat __FOOTER__.md
|
|
||||||
printf "::endgroup::\n"
|
|
||||||
else
|
|
||||||
printf "${ANSI_LIGHT_YELLOW}No '__FOOTER__.md' found.${ANSI_NOCOLOR}\n"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Print final release notes
|
|
||||||
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__NOTES__.md' ($(stat --printf="%s" "__NOTES__.md") B) ...."
|
|
||||||
cat __NOTES__.md
|
|
||||||
printf "::endgroup::\n"
|
|
||||||
|
|
||||||
- name: 📑 Create new Release Page
|
- name: 📑 Create new Release Page
|
||||||
id: createReleasePage
|
id: createReleasePage
|
||||||
if: inputs.mode == 'release'
|
if: inputs.mode == 'release'
|
||||||
@@ -397,6 +205,15 @@ jobs:
|
|||||||
|
|
||||||
export GH_TOKEN=${{ github.token }}
|
export GH_TOKEN=${{ github.token }}
|
||||||
|
|
||||||
|
tee "__PRELIMINARY_NOTES__.md" <<EOF
|
||||||
|
Release notes for ${{ inputs.tag }} are created right now ...
|
||||||
|
|
||||||
|
1. download artifacts → (compression?) → upload as assets
|
||||||
|
2. optional: create inventory.json
|
||||||
|
3. assemble release notes → update this text
|
||||||
|
4. optional: remove draft state
|
||||||
|
EOF
|
||||||
|
|
||||||
if [[ "${{ inputs.prerelease }}" == "true" ]]; then
|
if [[ "${{ inputs.prerelease }}" == "true" ]]; then
|
||||||
addPreRelease="--prerelease"
|
addPreRelease="--prerelease"
|
||||||
fi
|
fi
|
||||||
@@ -409,9 +226,7 @@ jobs:
|
|||||||
addTitle=("--title" "${{ inputs.title }}")
|
addTitle=("--title" "${{ inputs.title }}")
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ -s __NOTES__.md ]]; then
|
addNotes=("--notes-file" "__PRELIMINARY_NOTES__.md")
|
||||||
addNotes=("--notes-file" "__NOTES__.md")
|
|
||||||
fi
|
|
||||||
|
|
||||||
printf "Creating release '%s' ... " "${{ inputs.tag }}"
|
printf "Creating release '%s' ... " "${{ inputs.tag }}"
|
||||||
message="$(gh release create "${{ inputs.tag }}" --verify-tag --draft $addPreRelease $addLatest "${addTitle[@]}" "${addNotes[@]}" 2>&1)"
|
message="$(gh release create "${{ inputs.tag }}" --verify-tag --draft $addPreRelease $addLatest "${addTitle[@]}" "${addNotes[@]}" 2>&1)"
|
||||||
@@ -439,6 +254,14 @@ jobs:
|
|||||||
|
|
||||||
export GH_TOKEN=${{ github.token }}
|
export GH_TOKEN=${{ github.token }}
|
||||||
|
|
||||||
|
tee "__PRELIMINARY_NOTES__.md" <<EOF
|
||||||
|
Release notes for ${{ inputs.tag }} are updated right now ...
|
||||||
|
|
||||||
|
1. download artifacts → (compression?) → upload as assets
|
||||||
|
2. optional: create inventory.json
|
||||||
|
3. assemble release notes → update this text
|
||||||
|
EOF
|
||||||
|
|
||||||
addDraft="--draft"
|
addDraft="--draft"
|
||||||
if [[ "${{ inputs.prerelease }}" == "true" ]]; then
|
if [[ "${{ inputs.prerelease }}" == "true" ]]; then
|
||||||
addPreRelease="--prerelease"
|
addPreRelease="--prerelease"
|
||||||
@@ -452,9 +275,7 @@ jobs:
|
|||||||
addTitle=("--title" "${{ inputs.title }}")
|
addTitle=("--title" "${{ inputs.title }}")
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ -s __NOTES__.md ]]; then
|
addNotes=("--notes-file" "__PRELIMINARY_NOTES__.md")
|
||||||
addNotes=("--notes-file" "__NOTES__.md")
|
|
||||||
fi
|
|
||||||
|
|
||||||
printf "Creating release '%s' ... " "${{ inputs.tag }}"
|
printf "Creating release '%s' ... " "${{ inputs.tag }}"
|
||||||
message="$(gh release create "${{ inputs.tag }}" --verify-tag --draft $addPreRelease $addLatest "${addTitle[@]}" "${addNotes[@]}" 2>&1)"
|
message="$(gh release create "${{ inputs.tag }}" --verify-tag --draft $addPreRelease $addLatest "${addTitle[@]}" "${addNotes[@]}" 2>&1)"
|
||||||
@@ -496,18 +317,20 @@ jobs:
|
|||||||
|
|
||||||
# Create JSON inventory
|
# Create JSON inventory
|
||||||
if [[ "${{ inputs.inventory-json }}" != "" ]]; then
|
if [[ "${{ inputs.inventory-json }}" != "" ]]; then
|
||||||
VERSION="1.0"
|
STRUCT_VERSION="1.1"
|
||||||
|
|
||||||
# Use GitHub API to ask for latest version
|
# Use GitHub API to ask for latest version
|
||||||
printf "Get latest released version via GitHub API ...\n"
|
printf "Get latest released version via GitHub API ...\n"
|
||||||
printf " gh release list --json tagName,isLatest --jq '.[] | select(.isLatest == true) | .tagName' "
|
printf " gh release list --json tagName,isLatest --jq '.[] | select(.isLatest == true) | .tagName' "
|
||||||
latestVersion=$(gh release list --json tagName,isLatest --jq '.[] | select(.isLatest == true) | .tagName')
|
latestVersion=$(gh release list --json tagName,isLatest --jq '.[] | select(.isLatest == true) | .tagName')
|
||||||
if [[ $? -eq 0 ]]; then
|
if [[ $? -eq 0 ]]; then
|
||||||
if [[ -n "${latestVersion}" ]]; then
|
if [[ -z "${latestVersion}" ]]; then
|
||||||
printf "${ANSI_LIGHT_RED}[UNKNOWN]${ANSI_NOCOLOR}\n"
|
printf "${ANSI_LIGHT_RED}[UNKNOWN]${ANSI_NOCOLOR}\n"
|
||||||
|
printf " latest=unknown\n"
|
||||||
latestVersion="unknown"
|
latestVersion="unknown"
|
||||||
else
|
else
|
||||||
printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n"
|
printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n"
|
||||||
|
printf " latest=%s\n" "${latestVersion}"
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
printf "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}\n"
|
printf "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}\n"
|
||||||
@@ -528,8 +351,8 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
jsonInventory=$(jq -c -n \
|
jsonInventory=$(jq -c -n \
|
||||||
--arg version "${VERSION}" \
|
--arg structVersion "${STRUCT_VERSION}" \
|
||||||
--arg date "$(date +"%Y-%m-%dT%H-%M-%S%:z")" \
|
--arg date "$(date +"%Y-%m-%dT%H:%M:%S%:z")" \
|
||||||
--argjson jsonMeta "$(jq -c -n \
|
--argjson jsonMeta "$(jq -c -n \
|
||||||
--arg tag "${{ inputs.tag }}" \
|
--arg tag "${{ inputs.tag }}" \
|
||||||
--arg version "${{ inputs.inventory-version }}" \
|
--arg version "${{ inputs.inventory-version }}" \
|
||||||
@@ -538,7 +361,8 @@ jobs:
|
|||||||
--arg release "${{ github.server_url }}/${{ github.repository }}/releases/download/${{ inputs.tag }}" \
|
--arg release "${{ github.server_url }}/${{ github.repository }}/releases/download/${{ inputs.tag }}" \
|
||||||
--argjson jsonLatest "$(jq -c -n \
|
--argjson jsonLatest "$(jq -c -n \
|
||||||
--arg version "${latestVersion}" \
|
--arg version "${latestVersion}" \
|
||||||
'{"version": $version}' \
|
--arg release "${{ github.server_url }}/${{ github.repository }}/releases/download/${latestVersion}" \
|
||||||
|
'{"version": $version, "release-url": $release}' \
|
||||||
)" \
|
)" \
|
||||||
--argjson categories "$(jq -c -n \
|
--argjson categories "$(jq -c -n \
|
||||||
'$ARGS.positional' \
|
'$ARGS.positional' \
|
||||||
@@ -546,10 +370,14 @@ jobs:
|
|||||||
)" \
|
)" \
|
||||||
'{"tag": $tag, "version": $version, "git-hash": $hash, "repository-url": $repo, "release-url": $release, "categories": $categories, "latest": $jsonLatest}' \
|
'{"tag": $tag, "version": $version, "git-hash": $hash, "repository-url": $repo, "release-url": $release, "categories": $categories, "latest": $jsonLatest}' \
|
||||||
)" \
|
)" \
|
||||||
'{"version": 1.0, "timestamp": $date, "meta": $jsonMeta, "files": {}}'
|
'{"version": $structVersion, "timestamp": $date, "meta": $jsonMeta, "files": {}}'
|
||||||
)
|
)
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Write Markdown table header
|
||||||
|
printf "| Asset Name | File Size | SHA256 |\n" > __ASSETS__.md
|
||||||
|
printf "|------------|-----------|--------|\n" >> __ASSETS__.md
|
||||||
|
|
||||||
ERRORS=0
|
ERRORS=0
|
||||||
# A dictionary of 0/1 to avoid duplicate downloads
|
# A dictionary of 0/1 to avoid duplicate downloads
|
||||||
declare -A downloadedArtifacts
|
declare -A downloadedArtifacts
|
||||||
@@ -738,6 +566,13 @@ jobs:
|
|||||||
sha256Checksums[$asset]="sha256:${sha256}"
|
sha256Checksums[$asset]="sha256:${sha256}"
|
||||||
printf "${ANSI_LIGHT_BLUE}${sha256}${ANSI_NOCOLOR}\n"
|
printf "${ANSI_LIGHT_BLUE}${sha256}${ANSI_NOCOLOR}\n"
|
||||||
|
|
||||||
|
# Add asset to Markdown table
|
||||||
|
printf "| %s | %s | %s |\n" \
|
||||||
|
"[${title}](${{ github.server_url }}/${{ github.repository }}/releases/download/${{ inputs.tag }}/${uploadFile#*/})" \
|
||||||
|
"$(stat --printf="%s" "${uploadFile}" | numfmt --format "%.1f" --suffix=B --to=iec-i)" \
|
||||||
|
"\`${sha256}\`" \
|
||||||
|
>> __ASSETS__.md
|
||||||
|
|
||||||
# Add asset to JSON inventory
|
# Add asset to JSON inventory
|
||||||
if [[ "${{ inputs.inventory-json }}" != "" ]]; then
|
if [[ "${{ inputs.inventory-json }}" != "" ]]; then
|
||||||
if [[ "${categories}" != "${title}" ]]; then
|
if [[ "${categories}" != "${title}" ]]; then
|
||||||
@@ -772,7 +607,7 @@ jobs:
|
|||||||
if [[ $? -eq 0 ]]; then
|
if [[ $? -eq 0 ]]; then
|
||||||
printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n"
|
printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n"
|
||||||
|
|
||||||
printf " checking assets SHA256 checksum ... \n"
|
printf " checking assets SHA256 checksum ... "
|
||||||
ghSHA256=$(gh release view --json assets --jq ".assets[] | select(.name == \"${asset}\") | .digest" ${{ inputs.tag }})
|
ghSHA256=$(gh release view --json assets --jq ".assets[] | select(.name == \"${asset}\") | .digest" ${{ inputs.tag }})
|
||||||
if [[ "${ghSHA256}" == "${sha256Checksums[$asset]}" ]]; then
|
if [[ "${ghSHA256}" == "${sha256Checksums[$asset]}" ]]; then
|
||||||
printf "${ANSI_LIGHT_GREEN}[PASSED]${ANSI_NOCOLOR}\n"
|
printf "${ANSI_LIGHT_GREEN}[PASSED]${ANSI_NOCOLOR}\n"
|
||||||
@@ -826,6 +661,245 @@ jobs:
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
- name: 📑 Assemble Release Notes
|
||||||
|
id: createReleaseNotes
|
||||||
|
run: |
|
||||||
|
set +e
|
||||||
|
|
||||||
|
ANSI_LIGHT_RED=$'\x1b[91m'
|
||||||
|
ANSI_LIGHT_GREEN=$'\x1b[92m'
|
||||||
|
ANSI_LIGHT_YELLOW=$'\x1b[93m'
|
||||||
|
ANSI_LIGHT_BLUE=$'\x1b[94m'
|
||||||
|
ANSI_NOCOLOR=$'\x1b[0m'
|
||||||
|
|
||||||
|
export GH_TOKEN=${{ github.token }}
|
||||||
|
|
||||||
|
# Save release description (from parameter in a file)
|
||||||
|
head -c -1 <<'EOF' > __DESCRIPTION__.md
|
||||||
|
${{ inputs.description }}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Save release footer (from parameter in a file)
|
||||||
|
head -c -1 <<'EOF' > __FOOTER__.md
|
||||||
|
${{ inputs.description_footer }}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Download Markdown from PullRequest
|
||||||
|
# Readout second parent's SHA
|
||||||
|
# Search PR with that SHA
|
||||||
|
# Load description of that PR
|
||||||
|
printf "Read second parent of current SHA (%s) ... " "${{ github.ref }}"
|
||||||
|
FATHER_SHA=$(git rev-parse ${{ github.ref }}^2 -- 2> /dev/null)
|
||||||
|
if [[ $? -ne 0 || "{FATHER_SHA}" == "" ]]; then
|
||||||
|
printf "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n"
|
||||||
|
printf "→ ${ANSI_LIGHT_YELLOW}Skipped readout of pull request description. This is not a merge commit.${ANSI_NOCOLOR}\n"
|
||||||
|
else
|
||||||
|
printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n"
|
||||||
|
|
||||||
|
printf "Search Pull Request to '%s' and branch containing SHA %s ... " "${{ inputs.release_branch }}" "${FATHER_SHA}"
|
||||||
|
PULL_REQUESTS=$(gh pr list --base "${{ inputs.release_branch }}" --search "${FATHER_SHA}" --state "merged" --json "title,number,mergedBy,mergedAt,body")
|
||||||
|
if [[ $? -ne 0 || "${PULL_REQUESTS}" == "" ]]; then
|
||||||
|
printf "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n"
|
||||||
|
printf "${ANSI_LIGHT_RED}Couldn't find a merged Pull Request to '%s'. -> %s${ANSI_NOCOLOR}\n" "${{ inputs.release_branch }}" "${PULL_REQUESTS}"
|
||||||
|
printf "::error title=PullRequest::Couldn't find a merged Pull Request to '%s'. -> %s\n" "${{ inputs.release_branch }}" "${PULL_REQUESTS}"
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n"
|
||||||
|
|
||||||
|
PR_TITLE="$( printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].title")"
|
||||||
|
PR_NUMBER="$( printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].number")"
|
||||||
|
PR_BODY="$( printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].body")"
|
||||||
|
PR_MERGED_BY="$(printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].mergedBy.login")"
|
||||||
|
PR_MERGED_AT="$(printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].mergedAt")"
|
||||||
|
|
||||||
|
printf "Found Pull Request:\n"
|
||||||
|
printf " %s\n" "Title: ${PR_TITLE}"
|
||||||
|
printf " %s\n" "Number: ${PR_NUMBER}"
|
||||||
|
printf " %s\n" "MergedBy: ${PR_MERGED_BY}"
|
||||||
|
printf " %s\n" "MergedAt: ${PR_MERGED_AT} ($(date -d"${PR_MERGED_AT}" '+%d.%m.%Y - %H:%M:%S'))"
|
||||||
|
fi
|
||||||
|
|
||||||
|
printf "%s\n" "${PR_BODY}" > __PULLREQUEST__.md
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if a release description file should be used and exists.
|
||||||
|
if [[ "${{ inputs.description_file }}" != "" ]]; then
|
||||||
|
if [[ ! -f "${{ inputs.description_file }}" ]]; then
|
||||||
|
printf "${ANSI_LIGHT_RED}Release description file '%s' not found.${ANSI_NOCOLOR}\n" "${{ inputs.description_file }}"
|
||||||
|
printf "::error title=%s::%s\n" "FileNotFound" "Release description file '${{ inputs.description_file }}' not found."
|
||||||
|
exit 1
|
||||||
|
elif [[ -s "${{ inputs.description_file }}" ]]; then
|
||||||
|
printf "Use '%s' as main release description.\n" "${{ inputs.description_file }}"
|
||||||
|
cp -v "${{ inputs.description_file }}" __NOTES__.md
|
||||||
|
else
|
||||||
|
printf "${ANSI_LIGHT_RED}Release description file '%s' is empty.${ANSI_NOCOLOR}\n" "${{ inputs.description_file }}"
|
||||||
|
printf "::error title=%s::%s\n" "FileNotFound" "Release description file '${{ inputs.description_file }}' is empty."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
# Check if the main release description is provided by a template parameter
|
||||||
|
elif [[ -s __DESCRIPTION__.md ]]; then
|
||||||
|
printf "Use '__DESCRIPTION__.md' as main release description.\n"
|
||||||
|
mv -v __DESCRIPTION__.md __NOTES__.md
|
||||||
|
# Check if the pull request serves as the main release description text.
|
||||||
|
elif [[ -s __PULLREQUEST__.md ]]; then
|
||||||
|
printf "Use '__PULLREQUEST__.md' as main release description.\n"
|
||||||
|
mv -v __PULLREQUEST__.md __NOTES__.md
|
||||||
|
|
||||||
|
printf "Append '%%%%FOOTER%%%%' to '__NOTES__.md'.\n"
|
||||||
|
printf "\n%%%%FOOTER%%%%\n" >> __NOTES__.md
|
||||||
|
else
|
||||||
|
printf "${ANSI_LIGHT_RED}No release description specified (file, parameter, PR text).${ANSI_NOCOLOR}\n"
|
||||||
|
printf "::error title=%s::%s\n" "MissingDescription" "No release description specified (file, parameter, PR text)."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Read release notes main file for placeholder substitution
|
||||||
|
NOTES=$(<__NOTES__.md)
|
||||||
|
|
||||||
|
# Inline description
|
||||||
|
if [[ -s __DESCRIPTION__.md ]]; then
|
||||||
|
NOTES="${NOTES//%%DESCRIPTION%%/$(<__DESCRIPTION__.md)}"
|
||||||
|
else
|
||||||
|
NOTES="${NOTES//%%DESCRIPTION%%/}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Inline PullRequest and increase headline levels
|
||||||
|
if [[ -s __PULLREQUEST__.md ]]; then
|
||||||
|
while [[ "${NOTES}" =~ %%(PULLREQUEST(\+[0-3])?)%% ]]; do
|
||||||
|
case "${BASH_REMATCH[1]}" in
|
||||||
|
"PULLREQUEST+0" | "PULLREQUEST")
|
||||||
|
NOTES="${NOTES//${BASH_REMATCH[0]}/$(<__PULLREQUEST__.md)}"
|
||||||
|
;;
|
||||||
|
"PULLREQUEST+1")
|
||||||
|
NOTES="${NOTES//${BASH_REMATCH[0]}/$(cat __PULLREQUEST__.md | sed -E 's/^(#+) /\1# /gm;t')}"
|
||||||
|
;;
|
||||||
|
"PULLREQUEST+2")
|
||||||
|
NOTES="${NOTES//${BASH_REMATCH[0]}/$(cat __PULLREQUEST__.md | sed -E 's/^(#+) /\1### /gm;t')}"
|
||||||
|
;;
|
||||||
|
"PULLREQUEST+3")
|
||||||
|
NOTES="${NOTES//${BASH_REMATCH[0]}/$(cat __PULLREQUEST__.md | sed -E 's/^(#+) /\1### /gm;t')}"
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
else
|
||||||
|
while [[ "${NOTES}" =~ %%(PULLREQUEST(\+[0-3])?)%% ]]; do
|
||||||
|
NOTES="${NOTES//${BASH_REMATCH[0]}/}"
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Inline Files table
|
||||||
|
if [[ -s __ASSETS__.md ]]; then
|
||||||
|
NOTES="${NOTES//%%ASSETS%%/$(<__ASSETS__.md)}"
|
||||||
|
else
|
||||||
|
NOTES="${NOTES//%%ASSETS%%/}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Inline Footer
|
||||||
|
if [[ -s __FOOTER__.md ]]; then
|
||||||
|
NOTES="${NOTES//%%FOOTER%%/$(<__FOOTER__.md)}"
|
||||||
|
else
|
||||||
|
NOTES="${NOTES//%%FOOTER%%/}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Apply replacements
|
||||||
|
while IFS=$'\r\n' read -r patternLine; do
|
||||||
|
# skip empty lines
|
||||||
|
[[ "$patternLine" == "" ]] && continue
|
||||||
|
|
||||||
|
pattern="%${patternLine%%=*}%"
|
||||||
|
replacement="${patternLine#*=}"
|
||||||
|
NOTES="${NOTES//$pattern/$replacement}"
|
||||||
|
done <<<'${{ inputs.replacements }}'
|
||||||
|
|
||||||
|
# Workarounds for stupid GitHub variables
|
||||||
|
owner_repo="${{ github.repository }}"
|
||||||
|
repo=${owner_repo##*/}
|
||||||
|
|
||||||
|
# Replace special identifiers
|
||||||
|
NOTES="${NOTES//%%gh_server%%/${{ github.server_url }}}"
|
||||||
|
NOTES="${NOTES//%%gh_workflow_name%%/${{ github.workflow }}}"
|
||||||
|
NOTES="${NOTES//%%gh_owner%%/${{ github.repository_owner }}}"
|
||||||
|
NOTES="${NOTES//%%gh_repo%%/${repo}}"
|
||||||
|
NOTES="${NOTES//%%gh_owner_repo%%/${{ github.repository }}}"
|
||||||
|
#NOTES="${NOTES//%%gh_pages%%/https://${{ github.repository_owner }}.github.io/${repo}/}"
|
||||||
|
NOTES="${NOTES//%%gh_runid%%/${{ github.run_id }}}"
|
||||||
|
NOTES="${NOTES//%%gh_actor%%/${{ github.actor }}}"
|
||||||
|
NOTES="${NOTES//%%gh_sha%%/${{ github.sha }}}"
|
||||||
|
NOTES="${NOTES//%%date%%/$(date '+%Y-%m-%d')}"
|
||||||
|
NOTES="${NOTES//%%time%%/$(date '+%H:%M:%S %Z')}"
|
||||||
|
NOTES="${NOTES//%%datetime%%/$(date '+%Y-%m-%d %H:%M:%S %Z')}"
|
||||||
|
|
||||||
|
# Write final release notes to file
|
||||||
|
printf "%s\n" "${NOTES}" > __NOTES__.md
|
||||||
|
|
||||||
|
# Display partial contents for debugging
|
||||||
|
if [[ -s __DESCRIPTION__.md ]]; then
|
||||||
|
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__DESCRIPTION__.md' ($(stat --printf="%s" "__DESCRIPTION__.md") B) ...."
|
||||||
|
cat __DESCRIPTION__.md
|
||||||
|
printf "::endgroup::\n"
|
||||||
|
else
|
||||||
|
printf "${ANSI_LIGHT_YELLOW}No '__DESCRIPTION__.md' found.${ANSI_NOCOLOR}\n"
|
||||||
|
fi
|
||||||
|
if [[ -s __PULLREQUEST__.md ]]; then
|
||||||
|
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__PULLREQUEST__.md' ($(stat --printf="%s" "__PULLREQUEST__.md") B) ...."
|
||||||
|
cat __PULLREQUEST__.md
|
||||||
|
printf "::endgroup::\n"
|
||||||
|
else
|
||||||
|
printf "${ANSI_LIGHT_YELLOW}No '__PULLREQUEST__.md' found.${ANSI_NOCOLOR}\n"
|
||||||
|
fi
|
||||||
|
if [[ -s __ASSETS__.md ]]; then
|
||||||
|
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__ASSETS__.md' ($(stat --printf="%s" "__ASSETS__.md") B) ...."
|
||||||
|
cat __ASSETS__.md
|
||||||
|
printf "::endgroup::\n"
|
||||||
|
else
|
||||||
|
printf "${ANSI_LIGHT_YELLOW}No '__ASSETS__.md' found.${ANSI_NOCOLOR}\n"
|
||||||
|
fi
|
||||||
|
if [[ -s __FOOTER__.md ]]; then
|
||||||
|
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__FOOTER__.md' ($(stat --printf="%s" "__FOOTER__.md") B) ...."
|
||||||
|
cat __FOOTER__.md
|
||||||
|
printf "::endgroup::\n"
|
||||||
|
else
|
||||||
|
printf "${ANSI_LIGHT_YELLOW}No '__FOOTER__.md' found.${ANSI_NOCOLOR}\n"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Print final release notes
|
||||||
|
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__NOTES__.md' ($(stat --printf="%s" "__NOTES__.md") B) ...."
|
||||||
|
cat __NOTES__.md
|
||||||
|
printf "::endgroup::\n"
|
||||||
|
|
||||||
|
- name: 📑 Update release notes
|
||||||
|
id: updateReleaseNotes
|
||||||
|
run: |
|
||||||
|
set +e
|
||||||
|
|
||||||
|
ANSI_LIGHT_RED=$'\x1b[91m'
|
||||||
|
ANSI_LIGHT_GREEN=$'\x1b[92m'
|
||||||
|
ANSI_LIGHT_YELLOW=$'\x1b[93m'
|
||||||
|
ANSI_LIGHT_BLUE=$'\x1b[94m'
|
||||||
|
ANSI_NOCOLOR=$'\x1b[0m'
|
||||||
|
|
||||||
|
export GH_TOKEN=${{ github.token }}
|
||||||
|
|
||||||
|
if [[ -s __NOTES__.md ]]; then
|
||||||
|
addNotes=("--notes-file" "__NOTES__.md")
|
||||||
|
else
|
||||||
|
printf " ${ANSI_LIGHT_RED}File '%s' not found.${ANSI_NOCOLOR}\n" "__NOTES__.md"
|
||||||
|
printf "::error title=%s::%s\n" "InternalError" "File '__NOTES__.md' not found."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
printf "Updating release '%s' ... " "${{ inputs.tag }}"
|
||||||
|
message="$(gh release edit "${addNotes[@]}" "${{ inputs.tag }}" 2>&1)"
|
||||||
|
if [[ $? -eq 0 ]]; then
|
||||||
|
printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n"
|
||||||
|
printf " Release page: %s\n" "${message}"
|
||||||
|
else
|
||||||
|
printf "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n"
|
||||||
|
printf " ${ANSI_LIGHT_RED}Couldn't update release '%s' -> Error: '%s'.${ANSI_NOCOLOR}\n" "${{ inputs.tag }}" "${message}"
|
||||||
|
printf "::error title=%s::%s\n" "InternalError" "Couldn't update release '${{ inputs.tag }}' -> Error: '${message}'."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
- name: 📑 Remove draft state from Release Page
|
- name: 📑 Remove draft state from Release Page
|
||||||
id: removeDraft
|
id: removeDraft
|
||||||
if: ${{ ! inputs.draft }}
|
if: ${{ ! inputs.draft }}
|
||||||
|
|||||||
11
.github/workflows/PublishTestResults.yml
vendored
11
.github/workflows/PublishTestResults.yml
vendored
@@ -4,7 +4,7 @@
|
|||||||
# Unai Martinez-Corral #
|
# Unai Martinez-Corral #
|
||||||
# #
|
# #
|
||||||
# ==================================================================================================================== #
|
# ==================================================================================================================== #
|
||||||
# Copyright 2020-2025 The pyTooling Authors #
|
# Copyright 2020-2026 The pyTooling Authors #
|
||||||
# #
|
# #
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
# you may not use this file except in compliance with the License. #
|
# you may not use this file except in compliance with the License. #
|
||||||
@@ -102,10 +102,10 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: ⏬ Checkout repository
|
- name: ⏬ Checkout repository
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: 📥 Download Artifacts
|
- name: 📥 Download Artifacts
|
||||||
uses: pyTooling/download-artifact@v5
|
uses: pyTooling/download-artifact@v7
|
||||||
with:
|
with:
|
||||||
pattern: ${{ inputs.unittest_artifacts_pattern }}
|
pattern: ${{ inputs.unittest_artifacts_pattern }}
|
||||||
path: artifacts
|
path: artifacts
|
||||||
@@ -144,19 +144,20 @@ jobs:
|
|||||||
reporter: java-junit
|
reporter: java-junit
|
||||||
|
|
||||||
- name: 📊 Publish unittest results at CodeCov
|
- name: 📊 Publish unittest results at CodeCov
|
||||||
uses: codecov/test-results-action@v1
|
uses: codecov/codecov-action@v5
|
||||||
id: codecov
|
id: codecov
|
||||||
if: inputs.codecov == 'true'
|
if: inputs.codecov == 'true'
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
report_type: "test_results"
|
||||||
disable_search: true
|
disable_search: true
|
||||||
files: ${{ inputs.merged_junit_filename }}
|
files: ${{ inputs.merged_junit_filename }}
|
||||||
flags: ${{ inputs.codecov_flags }}
|
flags: ${{ inputs.codecov_flags }}
|
||||||
fail_ci_if_error: true
|
fail_ci_if_error: true
|
||||||
|
|
||||||
- name: 📤 Upload merged 'JUnit Test Summary' artifact
|
- name: 📤 Upload merged 'JUnit Test Summary' artifact
|
||||||
uses: pyTooling/upload-artifact@v4
|
uses: pyTooling/upload-artifact@v6
|
||||||
if: inputs.merged_junit_artifact != ''
|
if: inputs.merged_junit_artifact != ''
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.merged_junit_artifact }}
|
name: ${{ inputs.merged_junit_artifact }}
|
||||||
|
|||||||
50
.github/workflows/PublishToGitHubPages.yml
vendored
50
.github/workflows/PublishToGitHubPages.yml
vendored
@@ -4,7 +4,7 @@
|
|||||||
# Unai Martinez-Corral #
|
# Unai Martinez-Corral #
|
||||||
# #
|
# #
|
||||||
# ==================================================================================================================== #
|
# ==================================================================================================================== #
|
||||||
# Copyright 2020-2025 The pyTooling Authors #
|
# Copyright 2020-2026 The pyTooling Authors #
|
||||||
# #
|
# #
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
# you may not use this file except in compliance with the License. #
|
# you may not use this file except in compliance with the License. #
|
||||||
@@ -45,45 +45,51 @@ on:
|
|||||||
default: ''
|
default: ''
|
||||||
type: string
|
type: string
|
||||||
|
|
||||||
|
outputs:
|
||||||
|
github_pages_url:
|
||||||
|
description: "URL to GitHub Pages."
|
||||||
|
value: ${{ jobs.PrepareGitHubPages.outputs.github_pages_url }}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
PrepareGitHubPages:
|
||||||
PublishToGitHubPages:
|
name: 📖 Merge multiple contents for publishing
|
||||||
name: 📚 Publish to GH-Pages
|
|
||||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||||
|
permissions:
|
||||||
|
pages: write # to deploy to Pages
|
||||||
|
id-token: write # to verify the deployment originates from an appropriate source
|
||||||
|
environment:
|
||||||
|
name: github-pages
|
||||||
|
url: ${{ steps.deployment.outputs.page_url }}
|
||||||
|
outputs:
|
||||||
|
github_pages_url: ${{ steps.deployment.outputs.page_url }}
|
||||||
steps:
|
steps:
|
||||||
- name: ⏬ Checkout repository
|
|
||||||
uses: actions/checkout@v5
|
|
||||||
|
|
||||||
- name: 📥 Download artifacts '${{ inputs.doc }}' from 'SphinxDocumentation' job
|
- name: 📥 Download artifacts '${{ inputs.doc }}' from 'SphinxDocumentation' job
|
||||||
uses: pyTooling/download-artifact@v5
|
uses: pyTooling/download-artifact@v7
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.doc }}
|
name: ${{ inputs.doc }}
|
||||||
path: public
|
path: public
|
||||||
|
|
||||||
- name: 📥 Download artifacts '${{ inputs.coverage }}' from 'Coverage' job
|
- name: 📥 Download artifacts '${{ inputs.coverage }}' from 'Coverage' job
|
||||||
uses: pyTooling/download-artifact@v5
|
uses: pyTooling/download-artifact@v7
|
||||||
if: ${{ inputs.coverage != '' }}
|
if: ${{ inputs.coverage != '' }}
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.coverage }}
|
name: ${{ inputs.coverage }}
|
||||||
path: public/coverage
|
path: public/coverage
|
||||||
|
|
||||||
- name: 📥 Download artifacts '${{ inputs.typing }}' from 'StaticTypeCheck' job
|
- name: 📥 Download artifacts '${{ inputs.typing }}' from 'StaticTypeCheck' job
|
||||||
uses: pyTooling/download-artifact@v5
|
uses: pyTooling/download-artifact@v7
|
||||||
if: ${{ inputs.typing != '' }}
|
if: ${{ inputs.typing != '' }}
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.typing }}
|
name: ${{ inputs.typing }}
|
||||||
path: public/typing
|
path: public/typing
|
||||||
|
|
||||||
- name: '📓 Publish site to GitHub Pages'
|
- name: 📑 Upload static files as artifact
|
||||||
if: github.event_name != 'pull_request'
|
if: github.event_name != 'pull_request'
|
||||||
run: |
|
uses: actions/upload-pages-artifact@v4
|
||||||
cd public
|
with:
|
||||||
touch .nojekyll
|
path: public/
|
||||||
git init
|
|
||||||
cp ../.git/config ./.git/config
|
- name: 📖 Deploy to GitHub Pages
|
||||||
git add .
|
id: deployment
|
||||||
git config --local user.email "BuildTheDocs@GitHubActions"
|
if: github.event_name != 'pull_request'
|
||||||
git config --local user.name "GitHub Actions"
|
uses: actions/deploy-pages@v4
|
||||||
git commit -a -m "update ${{ github.sha }}"
|
|
||||||
git push -u origin +HEAD:gh-pages
|
|
||||||
|
|||||||
24
.github/workflows/SphinxDocumentation.yml
vendored
24
.github/workflows/SphinxDocumentation.yml
vendored
@@ -3,7 +3,7 @@
|
|||||||
# Patrick Lehmann #
|
# Patrick Lehmann #
|
||||||
# #
|
# #
|
||||||
# ==================================================================================================================== #
|
# ==================================================================================================================== #
|
||||||
# Copyright 2020-2025 The pyTooling Authors #
|
# Copyright 2020-2026 The pyTooling Authors #
|
||||||
# #
|
# #
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
# you may not use this file except in compliance with the License. #
|
# you may not use this file except in compliance with the License. #
|
||||||
@@ -32,7 +32,7 @@ on:
|
|||||||
python_version:
|
python_version:
|
||||||
description: 'Python version.'
|
description: 'Python version.'
|
||||||
required: false
|
required: false
|
||||||
default: '3.13'
|
default: '3.14'
|
||||||
type: string
|
type: string
|
||||||
requirements:
|
requirements:
|
||||||
description: 'Python dependencies to be installed through pip.'
|
description: 'Python dependencies to be installed through pip.'
|
||||||
@@ -86,7 +86,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: ⏬ Checkout repository
|
- name: ⏬ Checkout repository
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
lfs: true
|
lfs: true
|
||||||
submodules: true
|
submodules: true
|
||||||
@@ -99,13 +99,13 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
python-version: ${{ inputs.python_version }}
|
python-version: ${{ inputs.python_version }}
|
||||||
|
|
||||||
- name: 🔧 Install wheel,tomli and pip dependencies (native)
|
- name: 🔧 Install wheel and pip dependencies (native)
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --disable-pip-version-check -U wheel
|
python -m pip install --disable-pip-version-check -U wheel
|
||||||
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
||||||
|
|
||||||
- name: 📥 Download artifacts '${{ inputs.unittest_xml_artifact }}' from 'Unittesting' job
|
- name: 📥 Download artifacts '${{ inputs.unittest_xml_artifact }}' from 'Unittesting' job
|
||||||
uses: pyTooling/download-artifact@v5
|
uses: pyTooling/download-artifact@v7
|
||||||
if: inputs.unittest_xml_artifact != ''
|
if: inputs.unittest_xml_artifact != ''
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.unittest_xml_artifact }}
|
name: ${{ inputs.unittest_xml_artifact }}
|
||||||
@@ -113,7 +113,7 @@ jobs:
|
|||||||
investigate: true
|
investigate: true
|
||||||
|
|
||||||
- name: 📥 Download artifacts '${{ inputs.coverage_json_artifact }}' from 'PublishCoverageResults' job
|
- name: 📥 Download artifacts '${{ inputs.coverage_json_artifact }}' from 'PublishCoverageResults' job
|
||||||
uses: pyTooling/download-artifact@v5
|
uses: pyTooling/download-artifact@v7
|
||||||
if: inputs.coverage_json_artifact != ''
|
if: inputs.coverage_json_artifact != ''
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.coverage_json_artifact }}
|
name: ${{ inputs.coverage_json_artifact }}
|
||||||
@@ -129,7 +129,7 @@ jobs:
|
|||||||
sphinx-build -v -n -b html -d _build/doctrees -j $(nproc) -w _build/html.log . _build/html
|
sphinx-build -v -n -b html -d _build/doctrees -j $(nproc) -w _build/html.log . _build/html
|
||||||
|
|
||||||
- name: 📤 Upload 'HTML Documentation' artifact
|
- name: 📤 Upload 'HTML Documentation' artifact
|
||||||
uses: pyTooling/upload-artifact@v4
|
uses: pyTooling/upload-artifact@v6
|
||||||
if: inputs.html_artifact != ''
|
if: inputs.html_artifact != ''
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
with:
|
with:
|
||||||
@@ -145,7 +145,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: ⏬ Checkout repository
|
- name: ⏬ Checkout repository
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
lfs: true
|
lfs: true
|
||||||
submodules: true
|
submodules: true
|
||||||
@@ -158,13 +158,13 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
python-version: ${{ inputs.python_version }}
|
python-version: ${{ inputs.python_version }}
|
||||||
|
|
||||||
- name: 🔧 Install wheel,tomli and pip dependencies (native)
|
- name: 🔧 Install wheel and pip dependencies (native)
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --disable-pip-version-check -U wheel
|
python -m pip install --disable-pip-version-check -U wheel
|
||||||
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
||||||
|
|
||||||
- name: 📥 Download artifacts '${{ inputs.unittest_xml_artifact }}' from 'Unittesting' job
|
- name: 📥 Download artifacts '${{ inputs.unittest_xml_artifact }}' from 'Unittesting' job
|
||||||
uses: pyTooling/download-artifact@v5
|
uses: pyTooling/download-artifact@v7
|
||||||
if: inputs.unittest_xml_artifact != ''
|
if: inputs.unittest_xml_artifact != ''
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.unittest_xml_artifact }}
|
name: ${{ inputs.unittest_xml_artifact }}
|
||||||
@@ -172,7 +172,7 @@ jobs:
|
|||||||
investigate: true
|
investigate: true
|
||||||
|
|
||||||
- name: 📥 Download artifacts '${{ inputs.coverage_json_artifact }}' from 'PublishCoverageResults' job
|
- name: 📥 Download artifacts '${{ inputs.coverage_json_artifact }}' from 'PublishCoverageResults' job
|
||||||
uses: pyTooling/download-artifact@v5
|
uses: pyTooling/download-artifact@v7
|
||||||
if: inputs.coverage_json_artifact != ''
|
if: inputs.coverage_json_artifact != ''
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.coverage_json_artifact }}
|
name: ${{ inputs.coverage_json_artifact }}
|
||||||
@@ -272,7 +272,7 @@ jobs:
|
|||||||
done
|
done
|
||||||
|
|
||||||
- name: 📤 Upload 'LaTeX Documentation' artifact
|
- name: 📤 Upload 'LaTeX Documentation' artifact
|
||||||
uses: pyTooling/upload-artifact@v4
|
uses: pyTooling/upload-artifact@v6
|
||||||
if: inputs.latex_artifact != ''
|
if: inputs.latex_artifact != ''
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
with:
|
with:
|
||||||
|
|||||||
22
.github/workflows/StaticTypeCheck.yml
vendored
22
.github/workflows/StaticTypeCheck.yml
vendored
@@ -4,7 +4,7 @@
|
|||||||
# Unai Martinez-Corral #
|
# Unai Martinez-Corral #
|
||||||
# #
|
# #
|
||||||
# ==================================================================================================================== #
|
# ==================================================================================================================== #
|
||||||
# Copyright 2020-2025 The pyTooling Authors #
|
# Copyright 2020-2026 The pyTooling Authors #
|
||||||
# #
|
# #
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
# you may not use this file except in compliance with the License. #
|
# you may not use this file except in compliance with the License. #
|
||||||
@@ -33,12 +33,12 @@ on:
|
|||||||
python_version:
|
python_version:
|
||||||
description: 'Python version.'
|
description: 'Python version.'
|
||||||
required: false
|
required: false
|
||||||
default: '3.13'
|
default: '3.14'
|
||||||
type: string
|
type: string
|
||||||
requirements:
|
requirements:
|
||||||
description: 'Python dependencies to be installed through pip.'
|
description: 'Python dependencies to be installed through pip.'
|
||||||
required: false
|
required: false
|
||||||
default: '-r tests/requirements.txt'
|
default: '-r tests/typing/requirements.txt'
|
||||||
type: string
|
type: string
|
||||||
mypy_options:
|
mypy_options:
|
||||||
description: 'Additional mypy options.'
|
description: 'Additional mypy options.'
|
||||||
@@ -49,18 +49,18 @@ on:
|
|||||||
description: 'Cobertura file to upload as an artifact.'
|
description: 'Cobertura file to upload as an artifact.'
|
||||||
required: false
|
required: false
|
||||||
default: >-
|
default: >-
|
||||||
{ "fullpath": "report/typing/cobertura.xml",
|
{ "fullpath": "report/typing/cobertura.xml",
|
||||||
"directory": "report/typing",
|
"directory": "report/typing",
|
||||||
"filename": "cobertura.xml"
|
"filename": "cobertura.xml"
|
||||||
}
|
}
|
||||||
type: string
|
type: string
|
||||||
junit_report:
|
junit_report:
|
||||||
description: 'JUnit file to upload as an artifact.'
|
description: 'JUnit file to upload as an artifact.'
|
||||||
required: false
|
required: false
|
||||||
default: >-
|
default: >-
|
||||||
{ "fullpath": "report/typing/StaticTypingSummary.xml",
|
{ "fullpath": "report/typing/StaticTypingSummary.xml",
|
||||||
"directory": "report/typing",
|
"directory": "report/typing",
|
||||||
"filename": "StaticTypingSummary.xml"
|
"filename": "StaticTypingSummary.xml"
|
||||||
}
|
}
|
||||||
type: string
|
type: string
|
||||||
html_report:
|
html_report:
|
||||||
@@ -94,7 +94,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: ⏬ Checkout repository
|
- name: ⏬ Checkout repository
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: 🐍 Setup Python ${{ inputs.python_version }}
|
- name: 🐍 Setup Python ${{ inputs.python_version }}
|
||||||
uses: actions/setup-python@v6
|
uses: actions/setup-python@v6
|
||||||
@@ -142,7 +142,7 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
- name: 📤 Upload '${{ inputs.html_artifact }}' HTML artifact
|
- name: 📤 Upload '${{ inputs.html_artifact }}' HTML artifact
|
||||||
uses: pyTooling/upload-artifact@v4
|
uses: pyTooling/upload-artifact@v6
|
||||||
if: ${{ inputs.html_artifact != '' }}
|
if: ${{ inputs.html_artifact != '' }}
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
with:
|
with:
|
||||||
@@ -153,7 +153,7 @@ jobs:
|
|||||||
retention-days: 1
|
retention-days: 1
|
||||||
|
|
||||||
- name: 📤 Upload '${{ inputs.junit_artifact }}' JUnit artifact
|
- name: 📤 Upload '${{ inputs.junit_artifact }}' JUnit artifact
|
||||||
uses: pyTooling/upload-artifact@v4
|
uses: pyTooling/upload-artifact@v6
|
||||||
if: ${{ inputs.junit_artifact != '' }}
|
if: ${{ inputs.junit_artifact != '' }}
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
with:
|
with:
|
||||||
@@ -164,7 +164,7 @@ jobs:
|
|||||||
retention-days: 1
|
retention-days: 1
|
||||||
|
|
||||||
- name: 📤 Upload '${{ inputs.cobertura_artifact }}' Cobertura artifact
|
- name: 📤 Upload '${{ inputs.cobertura_artifact }}' Cobertura artifact
|
||||||
uses: pyTooling/upload-artifact@v4
|
uses: pyTooling/upload-artifact@v6
|
||||||
if: ${{ inputs.cobertura_artifact != '' }}
|
if: ${{ inputs.cobertura_artifact != '' }}
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
with:
|
with:
|
||||||
|
|||||||
2
.github/workflows/TagReleaseCommit.yml
vendored
2
.github/workflows/TagReleaseCommit.yml
vendored
@@ -4,7 +4,7 @@
|
|||||||
# Unai Martinez-Corral #
|
# Unai Martinez-Corral #
|
||||||
# #
|
# #
|
||||||
# ==================================================================================================================== #
|
# ==================================================================================================================== #
|
||||||
# Copyright 2020-2025 The pyTooling Authors #
|
# Copyright 2020-2026 The pyTooling Authors #
|
||||||
# #
|
# #
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
# you may not use this file except in compliance with the License. #
|
# you may not use this file except in compliance with the License. #
|
||||||
|
|||||||
81
.github/workflows/UnitTesting.yml
vendored
81
.github/workflows/UnitTesting.yml
vendored
@@ -4,7 +4,7 @@
|
|||||||
# Unai Martinez-Corral #
|
# Unai Martinez-Corral #
|
||||||
# #
|
# #
|
||||||
# ==================================================================================================================== #
|
# ==================================================================================================================== #
|
||||||
# Copyright 2020-2025 The pyTooling Authors #
|
# Copyright 2020-2026 The pyTooling Authors #
|
||||||
# #
|
# #
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
# you may not use this file except in compliance with the License. #
|
# you may not use this file except in compliance with the License. #
|
||||||
@@ -47,7 +47,7 @@ on:
|
|||||||
requirements:
|
requirements:
|
||||||
description: 'Python dependencies to be installed through pip.'
|
description: 'Python dependencies to be installed through pip.'
|
||||||
required: false
|
required: false
|
||||||
default: '-r tests/requirements.txt'
|
default: '-r ./requirements.txt'
|
||||||
type: string
|
type: string
|
||||||
mingw_requirements:
|
mingw_requirements:
|
||||||
description: 'Override Python dependencies to be installed through pip on MSYS2 (MINGW64) only.'
|
description: 'Override Python dependencies to be installed through pip on MSYS2 (MINGW64) only.'
|
||||||
@@ -82,7 +82,7 @@ on:
|
|||||||
root_directory:
|
root_directory:
|
||||||
description: 'Working directory for running tests.'
|
description: 'Working directory for running tests.'
|
||||||
required: false
|
required: false
|
||||||
default: ''
|
default: '.'
|
||||||
type: string
|
type: string
|
||||||
tests_directory:
|
tests_directory:
|
||||||
description: 'Path to the directory containing tests (relative from root_directory).'
|
description: 'Path to the directory containing tests (relative from root_directory).'
|
||||||
@@ -181,7 +181,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: ⏬ Checkout repository
|
- name: ⏬ Checkout repository
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
lfs: true
|
lfs: true
|
||||||
submodules: true
|
submodules: true
|
||||||
@@ -199,11 +199,43 @@ jobs:
|
|||||||
|
|
||||||
# Compute Dependencies for MSYS2 steps
|
# Compute Dependencies for MSYS2 steps
|
||||||
|
|
||||||
- name: 🔧 Install dependencies (system Python for Python shell)
|
# - name: 🔧 Install dependencies (system Python for Python shell)
|
||||||
if: matrix.system == 'msys2'
|
# if: matrix.system == 'msys2'
|
||||||
shell: pwsh
|
# shell: pwsh
|
||||||
|
# run: |
|
||||||
|
# py -3.12 -m pip install --disable-pip-version-check --break-system-packages -U tomli
|
||||||
|
|
||||||
|
- name: Compute path to requirements file
|
||||||
|
id: requirements
|
||||||
|
shell: python
|
||||||
run: |
|
run: |
|
||||||
py -3.9 -m pip install --disable-pip-version-check -U tomli
|
from os import getenv
|
||||||
|
from pathlib import Path
|
||||||
|
from sys import version
|
||||||
|
|
||||||
|
print(f"Python: {version}")
|
||||||
|
|
||||||
|
requirements = "${{ inputs.requirements }}"
|
||||||
|
if requirements.startswith("-r"):
|
||||||
|
requirements = requirements[2:].lstrip()
|
||||||
|
if requirements.startswith("./"):
|
||||||
|
requirementsFile = Path("${{ inputs.root_directory || '.' }}") / Path("${{ inputs.tests_directory || '.' }}") / Path("${{ inputs.unittest_directory || '.' }}") / Path(requirements[2:])
|
||||||
|
else:
|
||||||
|
requirementsFile = Path(requirements)
|
||||||
|
|
||||||
|
if not requirementsFile.exists():
|
||||||
|
print(f"::error title=FileNotFoundError::{requirementsFile}")
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
print(f"requirements file: {requirementsFile.as_posix()}")
|
||||||
|
|
||||||
|
# Write requirements path to special file
|
||||||
|
github_output = Path(getenv("GITHUB_OUTPUT"))
|
||||||
|
print(f"GITHUB_OUTPUT: {github_output}")
|
||||||
|
with github_output.open("a+") as f:
|
||||||
|
f.write(f"requirements=-r {requirementsFile.as_posix()}\n")
|
||||||
|
else:
|
||||||
|
print(f"requirements list: {requirements}")
|
||||||
|
|
||||||
- name: Compute pacman/pacboy packages
|
- name: Compute pacman/pacboy packages
|
||||||
id: pacboy
|
id: pacboy
|
||||||
@@ -215,8 +247,6 @@ jobs:
|
|||||||
from re import compile
|
from re import compile
|
||||||
from sys import version
|
from sys import version
|
||||||
|
|
||||||
print(f"Python: {version}")
|
|
||||||
|
|
||||||
def loadRequirementsFile(requirementsFile: Path):
|
def loadRequirementsFile(requirementsFile: Path):
|
||||||
requirements = []
|
requirements = []
|
||||||
with requirementsFile.open("r") as file:
|
with requirementsFile.open("r") as file:
|
||||||
@@ -232,11 +262,10 @@ jobs:
|
|||||||
|
|
||||||
return requirements
|
return requirements
|
||||||
|
|
||||||
requirements = "${{ inputs.requirements }}"
|
requirements = "${{ steps.requirements.outputs.requirements }}"
|
||||||
if requirements.startswith("-r"):
|
if requirements.startswith("-r"):
|
||||||
requirementsFile = Path(requirements[2:].lstrip())
|
|
||||||
try:
|
try:
|
||||||
dependencies = loadRequirementsFile(requirementsFile)
|
dependencies = loadRequirementsFile(Path(requirements[2:].lstrip()))
|
||||||
except FileNotFoundError as ex:
|
except FileNotFoundError as ex:
|
||||||
print(f"::error title=FileNotFoundError::{ex}")
|
print(f"::error title=FileNotFoundError::{ex}")
|
||||||
exit(1)
|
exit(1)
|
||||||
@@ -244,6 +273,7 @@ jobs:
|
|||||||
dependencies = [req.strip() for req in requirements.split(" ")]
|
dependencies = [req.strip() for req in requirements.split(" ")]
|
||||||
|
|
||||||
packages = {
|
packages = {
|
||||||
|
"aiohttp": "python-aiohttp:p",
|
||||||
"coverage": "python-coverage:p",
|
"coverage": "python-coverage:p",
|
||||||
"docstr_coverage": "python-pyaml:p python-types-pyyaml:p",
|
"docstr_coverage": "python-pyaml:p python-types-pyyaml:p",
|
||||||
"igraph": "igraph:p",
|
"igraph": "igraph:p",
|
||||||
@@ -256,7 +286,7 @@ jobs:
|
|||||||
"ruamel.yaml": "python-ruamel-yaml:p",
|
"ruamel.yaml": "python-ruamel-yaml:p",
|
||||||
# "ruamel.yaml": "python-ruamel-yaml:p python-ruamel.yaml.clib:p",
|
# "ruamel.yaml": "python-ruamel-yaml:p python-ruamel.yaml.clib:p",
|
||||||
"sphinx": "python-markupsafe:p",
|
"sphinx": "python-markupsafe:p",
|
||||||
"tomli": "python-tomli:p",
|
"tomli": "python-tomli:p", # outdated, now part of Python as tomllib
|
||||||
"wheel": "python-wheel:p",
|
"wheel": "python-wheel:p",
|
||||||
"pyedaa.projectmodel": "python-ruamel-yaml:p python-ruamel.yaml.clib:p python-lxml:p",
|
"pyedaa.projectmodel": "python-ruamel-yaml:p python-ruamel.yaml.clib:p python-lxml:p",
|
||||||
"pyedaa.reports": "python-ruamel-yaml:p python-ruamel.yaml.clib:p python-lxml:p",
|
"pyedaa.reports": "python-ruamel-yaml:p python-ruamel.yaml.clib:p python-lxml:p",
|
||||||
@@ -265,6 +295,7 @@ jobs:
|
|||||||
subPackages = {
|
subPackages = {
|
||||||
"pytooling": {
|
"pytooling": {
|
||||||
"yaml": "python-ruamel-yaml:p python-ruamel.yaml.clib:p",
|
"yaml": "python-ruamel-yaml:p python-ruamel.yaml.clib:p",
|
||||||
|
"pypi": "python-aiohttp:p",
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -320,19 +351,19 @@ jobs:
|
|||||||
|
|
||||||
# Python Dependency steps
|
# Python Dependency steps
|
||||||
|
|
||||||
- name: 🔧 Install wheel,tomli and pip dependencies (native)
|
- name: 🔧 Install wheel and pip dependencies (native)
|
||||||
if: matrix.system != 'msys2'
|
if: matrix.system != 'msys2'
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --disable-pip-version-check -U wheel tomli
|
python -m pip install --disable-pip-version-check -U wheel
|
||||||
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
python -m pip install --disable-pip-version-check ${{ steps.requirements.outputs.requirements }}
|
||||||
|
|
||||||
- name: 🔧 Install pip dependencies (MSYS2)
|
- name: 🔧 Install pip dependencies (MSYS2)
|
||||||
if: matrix.system == 'msys2'
|
if: matrix.system == 'msys2'
|
||||||
run: |
|
run: |
|
||||||
if [ -n '${{ inputs.mingw_requirements }}' ]; then
|
if [ -n '${{ inputs.mingw_requirements }}' ]; then
|
||||||
python -m pip install --disable-pip-version-check ${{ inputs.mingw_requirements }}
|
python -m pip install --disable-pip-version-check --break-system-packages ${{ inputs.mingw_requirements }}
|
||||||
else
|
else
|
||||||
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
python -m pip install --disable-pip-version-check --break-system-packages ${{ steps.requirements.outputs.requirements }}
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Before scripts
|
# Before scripts
|
||||||
@@ -421,7 +452,7 @@ jobs:
|
|||||||
# Upload artifacts
|
# Upload artifacts
|
||||||
|
|
||||||
- name: 📤 Upload '${{ fromJson(inputs.unittest_report_xml).filename }}' artifact
|
- name: 📤 Upload '${{ fromJson(inputs.unittest_report_xml).filename }}' artifact
|
||||||
uses: pyTooling/upload-artifact@v4
|
uses: pyTooling/upload-artifact@v6
|
||||||
if: inputs.unittest_xml_artifact != ''
|
if: inputs.unittest_xml_artifact != ''
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
with:
|
with:
|
||||||
@@ -434,7 +465,7 @@ jobs:
|
|||||||
# - name: 📤 Upload 'Unit Tests HTML Report' artifact
|
# - name: 📤 Upload 'Unit Tests HTML Report' artifact
|
||||||
# if: inputs.unittest_html_artifact != ''
|
# if: inputs.unittest_html_artifact != ''
|
||||||
# continue-on-error: true
|
# continue-on-error: true
|
||||||
# uses: pyTooling/upload-artifact@v4
|
# uses: pyTooling/upload-artifact@v6
|
||||||
# with:
|
# with:
|
||||||
# name: ${{ inputs.unittest_html_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
# name: ${{ inputs.unittest_html_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
||||||
# path: ${{ inputs.unittest_report_html_directory }}
|
# path: ${{ inputs.unittest_report_html_directory }}
|
||||||
@@ -444,7 +475,7 @@ jobs:
|
|||||||
- name: 📤 Upload 'Coverage SQLite Database' artifact
|
- name: 📤 Upload 'Coverage SQLite Database' artifact
|
||||||
if: inputs.coverage_sqlite_artifact != ''
|
if: inputs.coverage_sqlite_artifact != ''
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
uses: pyTooling/upload-artifact@v4
|
uses: pyTooling/upload-artifact@v6
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.coverage_sqlite_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
name: ${{ inputs.coverage_sqlite_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
||||||
path: .coverage
|
path: .coverage
|
||||||
@@ -455,7 +486,7 @@ jobs:
|
|||||||
- name: 📤 Upload 'Coverage XML Report' artifact
|
- name: 📤 Upload 'Coverage XML Report' artifact
|
||||||
if: inputs.coverage_xml_artifact != '' && steps.convert_xml.outcome == 'success'
|
if: inputs.coverage_xml_artifact != '' && steps.convert_xml.outcome == 'success'
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
uses: pyTooling/upload-artifact@v4
|
uses: pyTooling/upload-artifact@v6
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.coverage_xml_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
name: ${{ inputs.coverage_xml_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
||||||
working-directory: ${{ fromJson(inputs.coverage_report_xml).directory }}
|
working-directory: ${{ fromJson(inputs.coverage_report_xml).directory }}
|
||||||
@@ -466,7 +497,7 @@ jobs:
|
|||||||
- name: 📤 Upload 'Coverage JSON Report' artifact
|
- name: 📤 Upload 'Coverage JSON Report' artifact
|
||||||
if: inputs.coverage_json_artifact != '' && steps.convert_json.outcome == 'success'
|
if: inputs.coverage_json_artifact != '' && steps.convert_json.outcome == 'success'
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
uses: pyTooling/upload-artifact@v4
|
uses: pyTooling/upload-artifact@v6
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.coverage_json_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
name: ${{ inputs.coverage_json_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
||||||
working-directory: ${{ fromJson(inputs.coverage_report_json).directory }}
|
working-directory: ${{ fromJson(inputs.coverage_report_json).directory }}
|
||||||
@@ -477,7 +508,7 @@ jobs:
|
|||||||
- name: 📤 Upload 'Coverage HTML Report' artifact
|
- name: 📤 Upload 'Coverage HTML Report' artifact
|
||||||
if: inputs.coverage_html_artifact != '' && steps.convert_html.outcome == 'success'
|
if: inputs.coverage_html_artifact != '' && steps.convert_html.outcome == 'success'
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
uses: pyTooling/upload-artifact@v4
|
uses: pyTooling/upload-artifact@v6
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.coverage_html_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
name: ${{ inputs.coverage_html_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
||||||
working-directory: ${{ fromJson(inputs.coverage_report_html).directory }}
|
working-directory: ${{ fromJson(inputs.coverage_report_html).directory }}
|
||||||
|
|||||||
6
.github/workflows/VerifyDocs.yml
vendored
6
.github/workflows/VerifyDocs.yml
vendored
@@ -4,7 +4,7 @@
|
|||||||
# Unai Martinez-Corral #
|
# Unai Martinez-Corral #
|
||||||
# #
|
# #
|
||||||
# ==================================================================================================================== #
|
# ==================================================================================================================== #
|
||||||
# Copyright 2020-2025 The pyTooling Authors #
|
# Copyright 2020-2026 The pyTooling Authors #
|
||||||
# #
|
# #
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
# you may not use this file except in compliance with the License. #
|
# you may not use this file except in compliance with the License. #
|
||||||
@@ -33,7 +33,7 @@ on:
|
|||||||
python_version:
|
python_version:
|
||||||
description: 'Python version.'
|
description: 'Python version.'
|
||||||
required: false
|
required: false
|
||||||
default: '3.13'
|
default: '3.14'
|
||||||
type: string
|
type: string
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
@@ -44,7 +44,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: ⏬ Checkout repository
|
- name: ⏬ Checkout repository
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: 🐍 Setup Python
|
- name: 🐍 Setup Python
|
||||||
uses: actions/setup-python@v6
|
uses: actions/setup-python@v6
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ jobs:
|
|||||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||||
with:
|
with:
|
||||||
name: Example
|
name: Example
|
||||||
python_version_list: "3.12 3.13"
|
python_version_list: "3.13 3.14" # py-1, py-0
|
||||||
system_list: "ubuntu windows"
|
system_list: "ubuntu windows"
|
||||||
|
|
||||||
Testing:
|
Testing:
|
||||||
@@ -25,7 +25,7 @@ jobs:
|
|||||||
run: printf "%s\n" "${{ matrix.runs-on }}-${{ matrix.python }}" >> artifact.txt
|
run: printf "%s\n" "${{ matrix.runs-on }}-${{ matrix.python }}" >> artifact.txt
|
||||||
|
|
||||||
- name: 📤 Upload artifact for ${{ matrix.system }}-${{ matrix.python }}
|
- name: 📤 Upload artifact for ${{ matrix.system }}-${{ matrix.python }}
|
||||||
uses: pyTooling/upload-artifact@v4
|
uses: pyTooling/upload-artifact@v6
|
||||||
with:
|
with:
|
||||||
name: ${{ fromJson(needs.Params.outputs.artifact_names).unittesting_xml }}-${{ matrix.system }}-${{ matrix.python }}
|
name: ${{ fromJson(needs.Params.outputs.artifact_names).unittesting_xml }}-${{ matrix.system }}-${{ matrix.python }}
|
||||||
path: artifact.txt
|
path: artifact.txt
|
||||||
@@ -42,7 +42,7 @@ jobs:
|
|||||||
run: printf "%s\n" "Package" >> package.txt
|
run: printf "%s\n" "Package" >> package.txt
|
||||||
|
|
||||||
- name: 📤 Upload artifact for ${{ matrix.system }}-${{ matrix.python }}
|
- name: 📤 Upload artifact for ${{ matrix.system }}-${{ matrix.python }}
|
||||||
uses: pyTooling/upload-artifact@v4
|
uses: pyTooling/upload-artifact@v6
|
||||||
with:
|
with:
|
||||||
name: ${{ fromJson(needs.Params.outputs.artifact_names).package_all }}
|
name: ${{ fromJson(needs.Params.outputs.artifact_names).package_all }}
|
||||||
path: package.txt
|
path: package.txt
|
||||||
|
|||||||
@@ -16,10 +16,9 @@ jobs:
|
|||||||
include:
|
include:
|
||||||
- {icon: '🐧', name: 'Ubuntu 22.04 (x86-64)', image: 'ubuntu-22.04', shell: 'bash', can-fail: false}
|
- {icon: '🐧', name: 'Ubuntu 22.04 (x86-64)', image: 'ubuntu-22.04', shell: 'bash', can-fail: false}
|
||||||
- {icon: '🐧', name: 'Ubuntu 24.04 (x86-64)', image: 'ubuntu-24.04', shell: 'bash', can-fail: false} # latest
|
- {icon: '🐧', name: 'Ubuntu 24.04 (x86-64)', image: 'ubuntu-24.04', shell: 'bash', can-fail: false} # latest
|
||||||
- {icon: '🍎', name: 'macOS-13 (x86-64)', image: 'macos-13', shell: 'bash', can-fail: false}
|
|
||||||
- {icon: '🍎', name: 'macOS-14 (x86-64)', image: 'macos-14-large', shell: 'bash', can-fail: true } # not in free plan
|
- {icon: '🍎', name: 'macOS-14 (x86-64)', image: 'macos-14-large', shell: 'bash', can-fail: true } # not in free plan
|
||||||
- {icon: '🍎', name: 'macOS-15 (x86-64)', image: 'macos-15-large', shell: 'bash', can-fail: true } # not in free plan
|
### - {icon: '🍎', name: 'macOS-15 (x86-64)', image: 'macos-15-large', shell: 'bash', can-fail: true } # same as -intel; not in free plan
|
||||||
- {icon: '🍏', name: 'macOS-13 (aarch64)', image: 'macos-13-xlarge', shell: 'bash', can-fail: true } # not in free plan
|
- {icon: '🍎', name: 'macOS-15 (x86-64)', image: 'macos-15-intel', shell: 'bash', can-fail: false}
|
||||||
- {icon: '🍏', name: 'macOS-14 (aarch64)', image: 'macos-14', shell: 'bash', can-fail: false} # latest
|
- {icon: '🍏', name: 'macOS-14 (aarch64)', image: 'macos-14', shell: 'bash', can-fail: false} # latest
|
||||||
- {icon: '🍏', name: 'macOS-15 (aarch64)', image: 'macos-15', shell: 'bash', can-fail: false}
|
- {icon: '🍏', name: 'macOS-15 (aarch64)', image: 'macos-15', shell: 'bash', can-fail: false}
|
||||||
- {icon: '🪟', name: 'Windows Server 2022', image: 'windows-2022', shell: 'bash', can-fail: false}
|
- {icon: '🪟', name: 'Windows Server 2022', image: 'windows-2022', shell: 'bash', can-fail: false}
|
||||||
|
|||||||
13
.github/workflows/_Checking_JobTemplates.yml
vendored
13
.github/workflows/_Checking_JobTemplates.yml
vendored
@@ -15,8 +15,8 @@ jobs:
|
|||||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||||
with:
|
with:
|
||||||
package_name: 'myPackage'
|
package_name: 'myPackage'
|
||||||
python_version_list: '3.9 3.10 3.11 3.12 3.13 3.14 pypy-3.10 pypy-3.11'
|
python_version_list: '3.11 3.12 3.13 3.14 pypy-3.11'
|
||||||
disable_list: 'windows-arm:pypy-3.10 windows-arm:pypy-3.11'
|
disable_list: 'windows-arm:pypy-3.11'
|
||||||
|
|
||||||
PlatformTestingParams:
|
PlatformTestingParams:
|
||||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||||
@@ -88,7 +88,9 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||||
package_directory: ${{ needs.UnitTestingParams.outputs.package_directory }}
|
package_directory: ${{ needs.UnitTestingParams.outputs.package_directory }}
|
||||||
artifact: CodeQuality
|
bandit: 'true'
|
||||||
|
pylint: 'true'
|
||||||
|
artifact: 'CodeQuality'
|
||||||
|
|
||||||
DocCoverage:
|
DocCoverage:
|
||||||
uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@main
|
uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@main
|
||||||
@@ -218,9 +220,9 @@ jobs:
|
|||||||
- Prepare
|
- Prepare
|
||||||
- UnitTesting
|
- UnitTesting
|
||||||
- PlatformTesting
|
- PlatformTesting
|
||||||
- Install
|
|
||||||
# - StaticTypeCheck
|
# - StaticTypeCheck
|
||||||
- Package
|
- Package
|
||||||
|
- Install
|
||||||
- PublishToGitHubPages
|
- PublishToGitHubPages
|
||||||
permissions:
|
permissions:
|
||||||
contents: write # required for create tag
|
contents: write # required for create tag
|
||||||
@@ -236,9 +238,9 @@ jobs:
|
|||||||
- Prepare
|
- Prepare
|
||||||
- UnitTesting
|
- UnitTesting
|
||||||
- PlatformTesting
|
- PlatformTesting
|
||||||
- Install
|
|
||||||
# - StaticTypeCheck
|
# - StaticTypeCheck
|
||||||
- Package
|
- Package
|
||||||
|
- Install
|
||||||
- PublishToGitHubPages
|
- PublishToGitHubPages
|
||||||
if: needs.Prepare.outputs.is_release_tag == 'true'
|
if: needs.Prepare.outputs.is_release_tag == 'true'
|
||||||
permissions:
|
permissions:
|
||||||
@@ -274,6 +276,7 @@ jobs:
|
|||||||
- PublishTestResults
|
- PublishTestResults
|
||||||
- PublishCoverageResults
|
- PublishCoverageResults
|
||||||
- PublishToGitHubPages
|
- PublishToGitHubPages
|
||||||
|
- Install
|
||||||
- IntermediateCleanUp
|
- IntermediateCleanUp
|
||||||
with:
|
with:
|
||||||
package: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
package: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
||||||
|
|||||||
@@ -8,11 +8,15 @@ jobs:
|
|||||||
NamespacePackage:
|
NamespacePackage:
|
||||||
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@main
|
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@main
|
||||||
with:
|
with:
|
||||||
package_namespace: myFramework
|
package_namespace: 'myFramework'
|
||||||
package_name: Extension
|
package_name: 'Extension'
|
||||||
codecov: true
|
unittest_python_version_list: '3.11 3.12 3.13 3.14 pypy-3.11'
|
||||||
codacy: true
|
bandit: 'true'
|
||||||
dorny: true
|
pylint: 'true'
|
||||||
|
codecov: 'true'
|
||||||
|
codacy: 'true'
|
||||||
|
dorny: 'true'
|
||||||
|
cleanup: 'false'
|
||||||
secrets:
|
secrets:
|
||||||
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
|||||||
15
.github/workflows/_Checking_Nightly.yml
vendored
15
.github/workflows/_Checking_Nightly.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
|||||||
printf "%s\n" "Build log $(date --utc '+%d.%m.%Y - %H:%M:%S')" > build.log
|
printf "%s\n" "Build log $(date --utc '+%d.%m.%Y - %H:%M:%S')" > build.log
|
||||||
|
|
||||||
- name: 📤 Upload artifact
|
- name: 📤 Upload artifact
|
||||||
uses: pyTooling/upload-artifact@v4
|
uses: pyTooling/upload-artifact@v6
|
||||||
with:
|
with:
|
||||||
name: document
|
name: document
|
||||||
path: |
|
path: |
|
||||||
@@ -29,10 +29,11 @@ jobs:
|
|||||||
- name: 🖉 Program
|
- name: 🖉 Program
|
||||||
run: |
|
run: |
|
||||||
printf "%s\n" "Document other $(date --utc '+%d.%m.%Y - %H:%M:%S')" > document1.txt
|
printf "%s\n" "Document other $(date --utc '+%d.%m.%Y - %H:%M:%S')" > document1.txt
|
||||||
|
printf "%s\n" "Document other $(date --utc '+%d.%m.%Y - %H:%M:%S')" > document2.txt
|
||||||
printf "%s\n" "Program $(date --utc '+%d.%m.%Y - %H:%M:%S')" > program.py
|
printf "%s\n" "Program $(date --utc '+%d.%m.%Y - %H:%M:%S')" > program.py
|
||||||
|
|
||||||
- name: 📤 Upload artifact
|
- name: 📤 Upload artifact
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v6
|
||||||
with:
|
with:
|
||||||
name: other
|
name: other
|
||||||
path: |
|
path: |
|
||||||
@@ -55,7 +56,7 @@ jobs:
|
|||||||
version=4.2.0
|
version=4.2.0
|
||||||
tool=myTool
|
tool=myTool
|
||||||
prog=program
|
prog=program
|
||||||
tag: 4.2.0
|
tag: v4.2.0
|
||||||
title: "Nightly Test Release"
|
title: "Nightly Test Release"
|
||||||
description: |
|
description: |
|
||||||
This *nightly* release contains all latest and important artifacts created by %tool%'s CI pipeline.
|
This *nightly* release contains all latest and important artifacts created by %tool%'s CI pipeline.
|
||||||
@@ -63,10 +64,14 @@ jobs:
|
|||||||
# %tool% %version%
|
# %tool% %version%
|
||||||
|
|
||||||
* %prog%
|
* %prog%
|
||||||
|
|
||||||
|
# Attached files:
|
||||||
|
|
||||||
|
%%ASSETS%%
|
||||||
assets: |
|
assets: |
|
||||||
document: document1.txt: Documentation
|
document: document1.txt: Documentation
|
||||||
document: build.log: Logfile - %tool% - %tool%
|
document: build.log: Logfile - %tool% - %tool%
|
||||||
other: document1.txt: SBOM - %version%
|
other: document2.txt: SBOM - %version%
|
||||||
other: %prog%.py: Application - %tool% - %version%
|
other: %prog%.py: Application - %tool% - %version%
|
||||||
document:!archive1.zip: Archive 1 - zip
|
document:!archive1.zip: Archive 1 - zip
|
||||||
document:!archive2.tgz: Archive 2 - tgz
|
document:!archive2.tgz: Archive 2 - tgz
|
||||||
@@ -108,7 +113,7 @@ jobs:
|
|||||||
# artifact: file: labels: asset title
|
# artifact: file: labels: asset title
|
||||||
document: document1.txt: doc,html: Documentation
|
document: document1.txt: doc,html: Documentation
|
||||||
document: build.log: build,log: Logfile - %tool% - %tool%
|
document: build.log: build,log: Logfile - %tool% - %tool%
|
||||||
other: document1.txt: build,SBOM:SBOM - %version%
|
other: document2.txt: build,SBOM:SBOM - %version%
|
||||||
other: %prog%.py: app,binary:Application - %tool% - %version%
|
other: %prog%.py: app,binary:Application - %tool% - %version%
|
||||||
document:!archive1.zip: Archive 1 - zip
|
document:!archive1.zip: Archive 1 - zip
|
||||||
document:!archive2.tgz: Archive 2 - tgz
|
document:!archive2.tgz: Archive 2 - tgz
|
||||||
|
|||||||
574
.github/workflows/_Checking_Parameters.yml
vendored
574
.github/workflows/_Checking_Parameters.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
|||||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||||
with:
|
with:
|
||||||
name: Example
|
name: Example
|
||||||
python_version_list: "3.12 3.13 pypy-3.10 pypy-3.11"
|
python_version_list: "3.12 3.13 pypy-3.10 pypy-3.11" # py-2, py-1, pypy-1, pypy-0
|
||||||
|
|
||||||
Params_Systems:
|
Params_Systems:
|
||||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||||
@@ -26,7 +26,7 @@ jobs:
|
|||||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||||
with:
|
with:
|
||||||
name: Example
|
name: Example
|
||||||
python_version_list: "3.12"
|
python_version_list: "3.12" # py-2
|
||||||
system_list: "ubuntu windows macos macos-arm"
|
system_list: "ubuntu windows macos macos-arm"
|
||||||
include_list: "ubuntu:3.13 ubuntu:3.14 ubuntu-arm:3.12"
|
include_list: "ubuntu:3.13 ubuntu:3.14 ubuntu-arm:3.12"
|
||||||
|
|
||||||
@@ -34,7 +34,7 @@ jobs:
|
|||||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||||
with:
|
with:
|
||||||
name: Example
|
name: Example
|
||||||
python_version_list: "3.13"
|
python_version_list: "3.13" # py-1
|
||||||
system_list: "ubuntu windows macos macos-arm"
|
system_list: "ubuntu windows macos macos-arm"
|
||||||
exclude_list: "windows:3.13 windows:3.14"
|
exclude_list: "windows:3.13 windows:3.14"
|
||||||
|
|
||||||
@@ -42,7 +42,7 @@ jobs:
|
|||||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||||
with:
|
with:
|
||||||
name: Example
|
name: Example
|
||||||
python_version_list: "3.13"
|
python_version_list: "3.13" # py-1
|
||||||
system_list: "ubuntu windows macos macos-arm"
|
system_list: "ubuntu windows macos macos-arm"
|
||||||
disable_list: "windows:3.13 windows:3.14"
|
disable_list: "windows:3.13 windows:3.14"
|
||||||
|
|
||||||
@@ -50,7 +50,7 @@ jobs:
|
|||||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||||
with:
|
with:
|
||||||
name: Example
|
name: Example
|
||||||
python_version_list: "3.12 3.13"
|
python_version_list: "3.12 3.13" # py-2, py-1
|
||||||
system_list: "ubuntu windows macos macos-arm"
|
system_list: "ubuntu windows macos macos-arm"
|
||||||
include_list: "windows:3.10 windows:3.11 windows:3.13"
|
include_list: "windows:3.10 windows:3.11 windows:3.13"
|
||||||
exclude_list: "macos:3.12 macos:3.13"
|
exclude_list: "macos:3.12 macos:3.13"
|
||||||
@@ -63,75 +63,25 @@ jobs:
|
|||||||
run:
|
run:
|
||||||
shell: python
|
shell: python
|
||||||
steps:
|
steps:
|
||||||
- name: Install dependencies
|
- name: Checkout repository to access local Action
|
||||||
shell: bash
|
uses: actions/checkout@v6
|
||||||
run: pip install --disable-pip-version-check --break-system-packages pyTooling
|
|
||||||
|
|
||||||
- name: Checking results from 'Params_Default'
|
- name: Checking job matrix from 'Params_Default'
|
||||||
run: |
|
uses: ./.github/actions/CheckJobMatrix
|
||||||
from json import loads as json_loads
|
with:
|
||||||
from sys import exit
|
expected-default-version: '3.14'
|
||||||
|
expected-python-versions: '["3.10", "3.11", "3.12", "3.13", "3.14"]'
|
||||||
|
expected-systems: '["ubuntu", "ubuntu-arm", "windows", "windows-arm", "macos", "macos-arm"]'
|
||||||
|
expected-exclude-jobs: '["windows-arm:3.10"]'
|
||||||
|
expected-include-jobs: '["mingw64:3.13", "ucrt64:3.13"]'
|
||||||
|
generated-default-version: ${{ needs.Params_Default.outputs.python_version }}
|
||||||
|
generated-jobmatrix: ${{ needs.Params_Default.outputs.python_jobs }}
|
||||||
|
|
||||||
from pyTooling.Common import zipdicts
|
- name: Checking artifact names from 'Params_Default'
|
||||||
|
uses: ./.github/actions/CheckArtifactNames
|
||||||
expectedPythonVersion = "3.13"
|
with:
|
||||||
expectedPythons = ["3.9", "3.10", "3.11", "3.12", "3.13"]
|
prefix: 'Example'
|
||||||
expectedSystems = ["ubuntu", "ubuntu-arm", "windows", "windows-arm", "macos", "macos-arm"]
|
generated-names: ${{ needs.Params_Default.outputs.artifact_names }}
|
||||||
excludedJobs = ["windows-arm:3.9", "windows-arm:3.10"]
|
|
||||||
includeJobs = ["mingw64:3.12", "ucrt64:3.12"]
|
|
||||||
expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons if f"{system}:{python}" not in excludedJobs] + includeJobs
|
|
||||||
expectedName = "Example"
|
|
||||||
expectedArtifacts = {
|
|
||||||
"unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML",
|
|
||||||
"unittesting_html": f"{expectedName}-UnitTestReportSummary-HTML",
|
|
||||||
"perftesting_xml": f"{expectedName}-PerformanceTestReportSummary-XML",
|
|
||||||
"benchtesting_xml": f"{expectedName}-BenchmarkTestReportSummary-XML",
|
|
||||||
"apptesting_xml": f"{expectedName}-ApplicationTestReportSummary-XML",
|
|
||||||
"codecoverage_sqlite": f"{expectedName}-CodeCoverage-SQLite",
|
|
||||||
"codecoverage_xml": f"{expectedName}-CodeCoverage-XML",
|
|
||||||
"codecoverage_json": f"{expectedName}-CodeCoverage-JSON",
|
|
||||||
"codecoverage_html": f"{expectedName}-CodeCoverage-HTML",
|
|
||||||
"statictyping_cobertura": f"{expectedName}-StaticTyping-Cobertura-XML",
|
|
||||||
"statictyping_junit": f"{expectedName}-StaticTyping-JUnit-XML",
|
|
||||||
"statictyping_html": f"{expectedName}-StaticTyping-HTML",
|
|
||||||
"package_all": f"{expectedName}-Packages",
|
|
||||||
"documentation_html": f"{expectedName}-Documentation-HTML",
|
|
||||||
"documentation_latex": f"{expectedName}-Documentation-LaTeX",
|
|
||||||
"documentation_pdf": f"{expectedName}-Documentation-PDF",
|
|
||||||
}
|
|
||||||
|
|
||||||
actualPythonVersion = """${{ needs.Params_Default.outputs.python_version }}"""
|
|
||||||
actualPythonJobs = json_loads("""${{ needs.Params_Default.outputs.python_jobs }}""".replace("'", '"'))
|
|
||||||
actualArtifactNames = json_loads("""${{ needs.Params_Default.outputs.artifact_names }}""".replace("'", '"'))
|
|
||||||
errors = 0
|
|
||||||
|
|
||||||
if actualPythonVersion != expectedPythonVersion:
|
|
||||||
print(f"'python_version' does not match: '{actualPythonVersion}' != '{expectedPythonVersion}'.")
|
|
||||||
errors += 1
|
|
||||||
if len(actualPythonJobs) != len(expectedJobs):
|
|
||||||
print(f"Number of 'python_jobs' does not match: {len(actualPythonJobs)} != {len(expectedJobs)}.")
|
|
||||||
print("Actual jobs:")
|
|
||||||
for job in actualPythonJobs:
|
|
||||||
if job['system'] == "msys2":
|
|
||||||
print(f" {job['runtime'].lower()}:{job['python']}")
|
|
||||||
else:
|
|
||||||
print(f" {job['system']}:{job['python']}")
|
|
||||||
print("Expected jobs:")
|
|
||||||
for job in expectedJobs:
|
|
||||||
print(f" {job}")
|
|
||||||
errors += 1
|
|
||||||
if len(actualArtifactNames) != len(expectedArtifacts):
|
|
||||||
print(f"Number of 'artifact_names' does not match: {len(actualArtifactNames)} != {len(expectedArtifacts)}.")
|
|
||||||
errors += 1
|
|
||||||
else:
|
|
||||||
for key, actual, expected in zipdicts(actualArtifactNames, expectedArtifacts):
|
|
||||||
if actual != expected:
|
|
||||||
print(f"Artifact name '{key}' does not match: {actual} != {expected}.")
|
|
||||||
errors += 1
|
|
||||||
|
|
||||||
if errors == 0:
|
|
||||||
print(f"All checks PASSED.")
|
|
||||||
exit(errors)
|
|
||||||
|
|
||||||
Params_Check_PythonVersions:
|
Params_Check_PythonVersions:
|
||||||
needs:
|
needs:
|
||||||
@@ -141,75 +91,19 @@ jobs:
|
|||||||
run:
|
run:
|
||||||
shell: python
|
shell: python
|
||||||
steps:
|
steps:
|
||||||
- name: Install dependencies
|
- name: Checkout repository to access local Action
|
||||||
shell: bash
|
uses: actions/checkout@v6
|
||||||
run: pip install --disable-pip-version-check --break-system-packages pyTooling
|
|
||||||
|
|
||||||
- name: Checking results from 'Params_PythonVersions'
|
- name: Checking job matrix from 'Params_PythonVersions'
|
||||||
run: |
|
uses: ./.github/actions/CheckJobMatrix
|
||||||
from json import loads as json_loads
|
with:
|
||||||
from sys import exit
|
expected-default-version: '3.14'
|
||||||
|
expected-python-versions: '["3.12", "3.13", "pypy-3.10", "pypy-3.11"]'
|
||||||
from pyTooling.Common import zipdicts
|
expected-systems: '["ubuntu", "ubuntu-arm", "windows", "windows-arm", "macos", "macos-arm"]'
|
||||||
|
expected-exclude-jobs: '["windows-arm:pypy-3.10", "windows-arm:pypy-3.11"]'
|
||||||
expectedPythonVersion = "3.13"
|
expected-include-jobs: '["mingw64:3.13", "ucrt64:3.13"]'
|
||||||
expectedPythons = ["3.12", "3.13", "pypy-3.10", "pypy-3.11"]
|
generated-default-version: ${{ needs.Params_PythonVersions.outputs.python_version }}
|
||||||
expectedSystems = ["ubuntu", "ubuntu-arm", "windows", "windows-arm", "macos", "macos-arm"]
|
generated-jobmatrix: ${{ needs.Params_PythonVersions.outputs.python_jobs }}
|
||||||
excludedJobs = ["windows-arm:pypy-3.10", "windows-arm:pypy-3.11"]
|
|
||||||
includeJobs = ["mingw64:3.12", "ucrt64:3.12"]
|
|
||||||
expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons if f"{system}:{python}" not in excludedJobs] + includeJobs
|
|
||||||
expectedName = "Example"
|
|
||||||
expectedArtifacts = {
|
|
||||||
"unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML",
|
|
||||||
"unittesting_html": f"{expectedName}-UnitTestReportSummary-HTML",
|
|
||||||
"perftesting_xml": f"{expectedName}-PerformanceTestReportSummary-XML",
|
|
||||||
"benchtesting_xml": f"{expectedName}-BenchmarkTestReportSummary-XML",
|
|
||||||
"apptesting_xml": f"{expectedName}-ApplicationTestReportSummary-XML",
|
|
||||||
"codecoverage_sqlite": f"{expectedName}-CodeCoverage-SQLite",
|
|
||||||
"codecoverage_xml": f"{expectedName}-CodeCoverage-XML",
|
|
||||||
"codecoverage_json": f"{expectedName}-CodeCoverage-JSON",
|
|
||||||
"codecoverage_html": f"{expectedName}-CodeCoverage-HTML",
|
|
||||||
"statictyping_cobertura": f"{expectedName}-StaticTyping-Cobertura-XML",
|
|
||||||
"statictyping_junit": f"{expectedName}-StaticTyping-JUnit-XML",
|
|
||||||
"statictyping_html": f"{expectedName}-StaticTyping-HTML",
|
|
||||||
"package_all": f"{expectedName}-Packages",
|
|
||||||
"documentation_html": f"{expectedName}-Documentation-HTML",
|
|
||||||
"documentation_latex": f"{expectedName}-Documentation-LaTeX",
|
|
||||||
"documentation_pdf": f"{expectedName}-Documentation-PDF",
|
|
||||||
}
|
|
||||||
|
|
||||||
actualPythonVersion = """${{ needs.Params_PythonVersions.outputs.python_version }}"""
|
|
||||||
actualPythonJobs = json_loads("""${{ needs.Params_PythonVersions.outputs.python_jobs }}""".replace("'", '"'))
|
|
||||||
actualArtifactNames = json_loads("""${{ needs.Params_PythonVersions.outputs.artifact_names }}""".replace("'", '"'))
|
|
||||||
errors = 0
|
|
||||||
|
|
||||||
if actualPythonVersion != expectedPythonVersion:
|
|
||||||
print(f"'python_version' does not match: '{actualPythonVersion}' != '{expectedPythonVersion}'.")
|
|
||||||
errors += 1
|
|
||||||
if len(actualPythonJobs) != len(expectedJobs):
|
|
||||||
print(f"Number of 'python_jobs' does not match: {len(actualPythonJobs)} != {len(expectedJobs)}.")
|
|
||||||
print("Actual jobs:")
|
|
||||||
for job in actualPythonJobs:
|
|
||||||
if job['system'] == "msys2":
|
|
||||||
print(f" {job['runtime'].lower()}:{job['python']}")
|
|
||||||
else:
|
|
||||||
print(f" {job['system']}:{job['python']}")
|
|
||||||
print("Expected jobs:")
|
|
||||||
for job in expectedJobs:
|
|
||||||
print(f" {job}")
|
|
||||||
errors += 1
|
|
||||||
if len(actualArtifactNames) != len(expectedArtifacts):
|
|
||||||
print(f"Number of 'artifact_names' does not match: {len(actualArtifactNames)} != {len(expectedArtifacts)}.")
|
|
||||||
errors += 1
|
|
||||||
else:
|
|
||||||
for key, actual, expected in zipdicts(actualArtifactNames, expectedArtifacts):
|
|
||||||
if actual != expected:
|
|
||||||
print(f"Artifact name '{key}' does not match: {actual} != {expected}.")
|
|
||||||
errors += 1
|
|
||||||
|
|
||||||
if errors == 0:
|
|
||||||
print(f"All checks PASSED.")
|
|
||||||
exit(errors)
|
|
||||||
|
|
||||||
Params_Check_Systems:
|
Params_Check_Systems:
|
||||||
needs:
|
needs:
|
||||||
@@ -219,75 +113,19 @@ jobs:
|
|||||||
run:
|
run:
|
||||||
shell: python
|
shell: python
|
||||||
steps:
|
steps:
|
||||||
- name: Install dependencies
|
- name: Checkout repository to access local Action
|
||||||
shell: bash
|
uses: actions/checkout@v6
|
||||||
run: pip install --disable-pip-version-check --break-system-packages pyTooling
|
|
||||||
|
|
||||||
- name: Checking results from 'Params_Systems'
|
- name: Checking job matrix from 'Params_Systems'
|
||||||
run: |
|
uses: ./.github/actions/CheckJobMatrix
|
||||||
from json import loads as json_loads
|
with:
|
||||||
from sys import exit
|
expected-default-version: '3.14'
|
||||||
|
expected-python-versions: '["3.10", "3.11", "3.12", "3.13", "3.14"]'
|
||||||
from pyTooling.Common import zipdicts
|
expected-systems: '["windows"]'
|
||||||
|
expected-exclude-jobs: '[]'
|
||||||
expectedPythonVersion = "3.13"
|
expected-include-jobs: '["mingw32:3.13", "mingw64:3.13"]'
|
||||||
expectedPythons = ["3.9", "3.10", "3.11", "3.12", "3.13"]
|
generated-default-version: ${{ needs.Params_Systems.outputs.python_version }}
|
||||||
expectedSystems = ["windows"]
|
generated-jobmatrix: ${{ needs.Params_Systems.outputs.python_jobs }}
|
||||||
excludedJobs = []
|
|
||||||
includeJobs = ["mingw64:3.12", "ucrt64:3.12"]
|
|
||||||
expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons if f"{system}:{python}" not in excludedJobs] + includeJobs
|
|
||||||
expectedName = "Example"
|
|
||||||
expectedArtifacts = {
|
|
||||||
"unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML",
|
|
||||||
"unittesting_html": f"{expectedName}-UnitTestReportSummary-HTML",
|
|
||||||
"perftesting_xml": f"{expectedName}-PerformanceTestReportSummary-XML",
|
|
||||||
"benchtesting_xml": f"{expectedName}-BenchmarkTestReportSummary-XML",
|
|
||||||
"apptesting_xml": f"{expectedName}-ApplicationTestReportSummary-XML",
|
|
||||||
"codecoverage_sqlite": f"{expectedName}-CodeCoverage-SQLite",
|
|
||||||
"codecoverage_xml": f"{expectedName}-CodeCoverage-XML",
|
|
||||||
"codecoverage_json": f"{expectedName}-CodeCoverage-JSON",
|
|
||||||
"codecoverage_html": f"{expectedName}-CodeCoverage-HTML",
|
|
||||||
"statictyping_cobertura": f"{expectedName}-StaticTyping-Cobertura-XML",
|
|
||||||
"statictyping_junit": f"{expectedName}-StaticTyping-JUnit-XML",
|
|
||||||
"statictyping_html": f"{expectedName}-StaticTyping-HTML",
|
|
||||||
"package_all": f"{expectedName}-Packages",
|
|
||||||
"documentation_html": f"{expectedName}-Documentation-HTML",
|
|
||||||
"documentation_latex": f"{expectedName}-Documentation-LaTeX",
|
|
||||||
"documentation_pdf": f"{expectedName}-Documentation-PDF",
|
|
||||||
}
|
|
||||||
|
|
||||||
actualPythonVersion = """${{ needs.Params_Systems.outputs.python_version }}"""
|
|
||||||
actualPythonJobs = json_loads("""${{ needs.Params_Systems.outputs.python_jobs }}""".replace("'", '"'))
|
|
||||||
actualArtifactNames = json_loads("""${{ needs.Params_Systems.outputs.artifact_names }}""".replace("'", '"'))
|
|
||||||
errors = 0
|
|
||||||
|
|
||||||
if actualPythonVersion != expectedPythonVersion:
|
|
||||||
print(f"'python_version' does not match: '{actualPythonVersion}' != '{expectedPythonVersion}'.")
|
|
||||||
errors += 1
|
|
||||||
if len(actualPythonJobs) != len(expectedJobs):
|
|
||||||
print(f"Number of 'python_jobs' does not match: {len(actualPythonJobs)} != {len(expectedJobs)}.")
|
|
||||||
print("Actual jobs:")
|
|
||||||
for job in actualPythonJobs:
|
|
||||||
if job['system'] == "msys2":
|
|
||||||
print(f" {job['runtime'].lower()}:{job['python']}")
|
|
||||||
else:
|
|
||||||
print(f" {job['system']}:{job['python']}")
|
|
||||||
print("Expected jobs:")
|
|
||||||
for job in expectedJobs:
|
|
||||||
print(f" {job}")
|
|
||||||
errors += 1
|
|
||||||
if len(actualArtifactNames) != len(expectedArtifacts):
|
|
||||||
print(f"Number of 'artifact_names' does not match: {len(actualArtifactNames)} != {len(expectedArtifacts)}.")
|
|
||||||
errors += 1
|
|
||||||
else:
|
|
||||||
for key, actual, expected in zipdicts(actualArtifactNames, expectedArtifacts):
|
|
||||||
if actual != expected:
|
|
||||||
print(f"Artifact name '{key}' does not match: {actual} != {expected}.")
|
|
||||||
errors += 1
|
|
||||||
|
|
||||||
if errors == 0:
|
|
||||||
print(f"All checks PASSED.")
|
|
||||||
exit(errors)
|
|
||||||
|
|
||||||
Params_Check_Include:
|
Params_Check_Include:
|
||||||
needs:
|
needs:
|
||||||
@@ -297,75 +135,19 @@ jobs:
|
|||||||
run:
|
run:
|
||||||
shell: python
|
shell: python
|
||||||
steps:
|
steps:
|
||||||
- name: Install dependencies
|
- name: Checkout repository to access local Action
|
||||||
shell: bash
|
uses: actions/checkout@v6
|
||||||
run: pip install --disable-pip-version-check --break-system-packages pyTooling
|
|
||||||
|
|
||||||
- name: Checking results from 'Params_Include'
|
- name: Checking job matrix from 'Params_Include'
|
||||||
run: |
|
uses: ./.github/actions/CheckJobMatrix
|
||||||
from json import loads as json_loads
|
with:
|
||||||
from sys import exit
|
expected-default-version: '3.14'
|
||||||
|
expected-python-versions: '["3.12"]'
|
||||||
from pyTooling.Common import zipdicts
|
expected-systems: '["ubuntu", "windows", "macos", "macos-arm"]'
|
||||||
|
expected-exclude-jobs: '[]'
|
||||||
expectedPythonVersion = "3.13"
|
expected-include-jobs: '["ubuntu:3.13", "ubuntu:3.14", "ubuntu-arm:3.12"]'
|
||||||
expectedPythons = ["3.12"]
|
generated-default-version: ${{ needs.Params_Include.outputs.python_version }}
|
||||||
expectedSystems = ["ubuntu", "windows", "macos", "macos-arm"]
|
generated-jobmatrix: ${{ needs.Params_Include.outputs.python_jobs }}
|
||||||
excludedJobs = []
|
|
||||||
includeJobs = ["ubuntu:3.13", "ubuntu:3.14", "ubuntu-arm:3.12"]
|
|
||||||
expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons if f"{system}:{python}" not in excludedJobs] + includeJobs
|
|
||||||
expectedName = "Example"
|
|
||||||
expectedArtifacts = {
|
|
||||||
"unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML",
|
|
||||||
"unittesting_html": f"{expectedName}-UnitTestReportSummary-HTML",
|
|
||||||
"perftesting_xml": f"{expectedName}-PerformanceTestReportSummary-XML",
|
|
||||||
"benchtesting_xml": f"{expectedName}-BenchmarkTestReportSummary-XML",
|
|
||||||
"apptesting_xml": f"{expectedName}-ApplicationTestReportSummary-XML",
|
|
||||||
"codecoverage_sqlite": f"{expectedName}-CodeCoverage-SQLite",
|
|
||||||
"codecoverage_xml": f"{expectedName}-CodeCoverage-XML",
|
|
||||||
"codecoverage_json": f"{expectedName}-CodeCoverage-JSON",
|
|
||||||
"codecoverage_html": f"{expectedName}-CodeCoverage-HTML",
|
|
||||||
"statictyping_cobertura": f"{expectedName}-StaticTyping-Cobertura-XML",
|
|
||||||
"statictyping_junit": f"{expectedName}-StaticTyping-JUnit-XML",
|
|
||||||
"statictyping_html": f"{expectedName}-StaticTyping-HTML",
|
|
||||||
"package_all": f"{expectedName}-Packages",
|
|
||||||
"documentation_html": f"{expectedName}-Documentation-HTML",
|
|
||||||
"documentation_latex": f"{expectedName}-Documentation-LaTeX",
|
|
||||||
"documentation_pdf": f"{expectedName}-Documentation-PDF",
|
|
||||||
}
|
|
||||||
|
|
||||||
actualPythonVersion = """${{ needs.Params_Include.outputs.python_version }}"""
|
|
||||||
actualPythonJobs = json_loads("""${{ needs.Params_Include.outputs.python_jobs }}""".replace("'", '"'))
|
|
||||||
actualArtifactNames = json_loads("""${{ needs.Params_Include.outputs.artifact_names }}""".replace("'", '"'))
|
|
||||||
errors = 0
|
|
||||||
|
|
||||||
if actualPythonVersion != expectedPythonVersion:
|
|
||||||
print(f"'python_version' does not match: '{actualPythonVersion}' != '{expectedPythonVersion}'.")
|
|
||||||
errors += 1
|
|
||||||
if len(actualPythonJobs) != len(expectedJobs):
|
|
||||||
print(f"Number of 'python_jobs' does not match: {len(actualPythonJobs)} != {len(expectedJobs)}.")
|
|
||||||
print("Actual jobs:")
|
|
||||||
for job in actualPythonJobs:
|
|
||||||
if job['system'] == "msys2":
|
|
||||||
print(f" {job['runtime'].lower()}:{job['python']}")
|
|
||||||
else:
|
|
||||||
print(f" {job['system']}:{job['python']}")
|
|
||||||
print("Expected jobs:")
|
|
||||||
for job in expectedJobs:
|
|
||||||
print(f" {job}")
|
|
||||||
errors += 1
|
|
||||||
if len(actualArtifactNames) != len(expectedArtifacts):
|
|
||||||
print(f"Number of 'artifact_names' does not match: {len(actualArtifactNames)} != {len(expectedArtifacts)}.")
|
|
||||||
errors += 1
|
|
||||||
else:
|
|
||||||
for key, actual, expected in zipdicts(actualArtifactNames, expectedArtifacts):
|
|
||||||
if actual != expected:
|
|
||||||
print(f"Artifact name '{key}' does not match: {actual} != {expected}.")
|
|
||||||
errors += 1
|
|
||||||
|
|
||||||
if errors == 0:
|
|
||||||
print(f"All checks PASSED.")
|
|
||||||
exit(errors)
|
|
||||||
|
|
||||||
Params_Check_Exclude:
|
Params_Check_Exclude:
|
||||||
needs:
|
needs:
|
||||||
@@ -375,75 +157,19 @@ jobs:
|
|||||||
run:
|
run:
|
||||||
shell: python
|
shell: python
|
||||||
steps:
|
steps:
|
||||||
- name: Install dependencies
|
- name: Checkout repository to access local Action
|
||||||
shell: bash
|
uses: actions/checkout@v6
|
||||||
run: pip install --disable-pip-version-check --break-system-packages pyTooling
|
|
||||||
|
|
||||||
- name: Checking results from 'Params_Exclude'
|
- name: Checking job matrix from 'Params_Exclude'
|
||||||
run: |
|
uses: ./.github/actions/CheckJobMatrix
|
||||||
from json import loads as json_loads
|
with:
|
||||||
from sys import exit
|
expected-default-version: '3.14'
|
||||||
|
expected-python-versions: '["3.13"]'
|
||||||
from pyTooling.Common import zipdicts
|
expected-systems: '["ubuntu", "macos", "macos-arm"]'
|
||||||
|
expected-exclude-jobs: '[]'
|
||||||
expectedPythonVersion = "3.13"
|
expected-include-jobs: '[]'
|
||||||
expectedPythons = ["3.13"]
|
generated-default-version: ${{ needs.Params_Exclude.outputs.python_version }}
|
||||||
expectedSystems = ["ubuntu", "macos", "macos-arm"]
|
generated-jobmatrix: ${{ needs.Params_Exclude.outputs.python_jobs }}
|
||||||
excludedJobs = []
|
|
||||||
includeJobs = []
|
|
||||||
expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons if f"{system}:{python}" not in excludedJobs] + includeJobs
|
|
||||||
expectedName = "Example"
|
|
||||||
expectedArtifacts = {
|
|
||||||
"unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML",
|
|
||||||
"unittesting_html": f"{expectedName}-UnitTestReportSummary-HTML",
|
|
||||||
"perftesting_xml": f"{expectedName}-PerformanceTestReportSummary-XML",
|
|
||||||
"benchtesting_xml": f"{expectedName}-BenchmarkTestReportSummary-XML",
|
|
||||||
"apptesting_xml": f"{expectedName}-ApplicationTestReportSummary-XML",
|
|
||||||
"codecoverage_sqlite": f"{expectedName}-CodeCoverage-SQLite",
|
|
||||||
"codecoverage_xml": f"{expectedName}-CodeCoverage-XML",
|
|
||||||
"codecoverage_json": f"{expectedName}-CodeCoverage-JSON",
|
|
||||||
"codecoverage_html": f"{expectedName}-CodeCoverage-HTML",
|
|
||||||
"statictyping_cobertura": f"{expectedName}-StaticTyping-Cobertura-XML",
|
|
||||||
"statictyping_junit": f"{expectedName}-StaticTyping-JUnit-XML",
|
|
||||||
"statictyping_html": f"{expectedName}-StaticTyping-HTML",
|
|
||||||
"package_all": f"{expectedName}-Packages",
|
|
||||||
"documentation_html": f"{expectedName}-Documentation-HTML",
|
|
||||||
"documentation_latex": f"{expectedName}-Documentation-LaTeX",
|
|
||||||
"documentation_pdf": f"{expectedName}-Documentation-PDF",
|
|
||||||
}
|
|
||||||
|
|
||||||
actualPythonVersion = """${{ needs.Params_Exclude.outputs.python_version }}"""
|
|
||||||
actualPythonJobs = json_loads("""${{ needs.Params_Exclude.outputs.python_jobs }}""".replace("'", '"'))
|
|
||||||
actualArtifactNames = json_loads("""${{ needs.Params_Exclude.outputs.artifact_names }}""".replace("'", '"'))
|
|
||||||
errors = 0
|
|
||||||
|
|
||||||
if actualPythonVersion != expectedPythonVersion:
|
|
||||||
print(f"'python_version' does not match: '{actualPythonVersion}' != '{expectedPythonVersion}'.")
|
|
||||||
errors += 1
|
|
||||||
if len(actualPythonJobs) != len(expectedJobs):
|
|
||||||
print(f"Number of 'python_jobs' does not match: {len(actualPythonJobs)} != {len(expectedJobs)}.")
|
|
||||||
print("Actual jobs:")
|
|
||||||
for job in actualPythonJobs:
|
|
||||||
if job['system'] == "msys2":
|
|
||||||
print(f" {job['runtime'].lower()}:{job['python']}")
|
|
||||||
else:
|
|
||||||
print(f" {job['system']}:{job['python']}")
|
|
||||||
print("Expected jobs:")
|
|
||||||
for job in expectedJobs:
|
|
||||||
print(f" {job}")
|
|
||||||
errors += 1
|
|
||||||
if len(actualArtifactNames) != len(expectedArtifacts):
|
|
||||||
print(f"Number of 'artifact_names' does not match: {len(actualArtifactNames)} != {len(expectedArtifacts)}.")
|
|
||||||
errors += 1
|
|
||||||
else:
|
|
||||||
for key, actual, expected in zipdicts(actualArtifactNames, expectedArtifacts):
|
|
||||||
if actual != expected:
|
|
||||||
print(f"Artifact name '{key}' does not match: {actual} != {expected}.")
|
|
||||||
errors += 1
|
|
||||||
|
|
||||||
if errors == 0:
|
|
||||||
print(f"All checks PASSED.")
|
|
||||||
exit(errors)
|
|
||||||
|
|
||||||
Params_Check_Disable:
|
Params_Check_Disable:
|
||||||
needs:
|
needs:
|
||||||
@@ -453,75 +179,19 @@ jobs:
|
|||||||
run:
|
run:
|
||||||
shell: python
|
shell: python
|
||||||
steps:
|
steps:
|
||||||
- name: Install dependencies
|
- name: Checkout repository to access local Action
|
||||||
shell: bash
|
uses: actions/checkout@v6
|
||||||
run: pip install --disable-pip-version-check --break-system-packages pyTooling
|
|
||||||
|
|
||||||
- name: Checking results from 'Params_Disable'
|
- name: Checking job matrix from 'Params_Disable'
|
||||||
run: |
|
uses: ./.github/actions/CheckJobMatrix
|
||||||
from json import loads as json_loads
|
with:
|
||||||
from sys import exit
|
expected-default-version: '3.14'
|
||||||
|
expected-python-versions: '["3.13"]'
|
||||||
from pyTooling.Common import zipdicts
|
expected-systems: '["ubuntu", "windows", "macos", "macos-arm"]'
|
||||||
|
expected-exclude-jobs: '["windows:3.13"]'
|
||||||
expectedPythonVersion = "3.13"
|
expected-include-jobs: '[]'
|
||||||
expectedPythons = ["3.13"]
|
generated-default-version: ${{ needs.Params_Disable.outputs.python_version }}
|
||||||
expectedSystems = ["ubuntu", "windows", "macos", "macos-arm"]
|
generated-jobmatrix: ${{ needs.Params_Disable.outputs.python_jobs }}
|
||||||
excludedJobs = ["windows:3.13"]
|
|
||||||
includeJobs = []
|
|
||||||
expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons if f"{system}:{python}" not in excludedJobs] + includeJobs
|
|
||||||
expectedName = "Example"
|
|
||||||
expectedArtifacts = {
|
|
||||||
"unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML",
|
|
||||||
"unittesting_html": f"{expectedName}-UnitTestReportSummary-HTML",
|
|
||||||
"perftesting_xml": f"{expectedName}-PerformanceTestReportSummary-XML",
|
|
||||||
"benchtesting_xml": f"{expectedName}-BenchmarkTestReportSummary-XML",
|
|
||||||
"apptesting_xml": f"{expectedName}-ApplicationTestReportSummary-XML",
|
|
||||||
"codecoverage_sqlite": f"{expectedName}-CodeCoverage-SQLite",
|
|
||||||
"codecoverage_xml": f"{expectedName}-CodeCoverage-XML",
|
|
||||||
"codecoverage_json": f"{expectedName}-CodeCoverage-JSON",
|
|
||||||
"codecoverage_html": f"{expectedName}-CodeCoverage-HTML",
|
|
||||||
"statictyping_cobertura": f"{expectedName}-StaticTyping-Cobertura-XML",
|
|
||||||
"statictyping_junit": f"{expectedName}-StaticTyping-JUnit-XML",
|
|
||||||
"statictyping_html": f"{expectedName}-StaticTyping-HTML",
|
|
||||||
"package_all": f"{expectedName}-Packages",
|
|
||||||
"documentation_html": f"{expectedName}-Documentation-HTML",
|
|
||||||
"documentation_latex": f"{expectedName}-Documentation-LaTeX",
|
|
||||||
"documentation_pdf": f"{expectedName}-Documentation-PDF",
|
|
||||||
}
|
|
||||||
|
|
||||||
actualPythonVersion = """${{ needs.Params_Disable.outputs.python_version }}"""
|
|
||||||
actualPythonJobs = json_loads("""${{ needs.Params_Disable.outputs.python_jobs }}""".replace("'", '"'))
|
|
||||||
actualArtifactNames = json_loads("""${{ needs.Params_Disable.outputs.artifact_names }}""".replace("'", '"'))
|
|
||||||
errors = 0
|
|
||||||
|
|
||||||
if actualPythonVersion != expectedPythonVersion:
|
|
||||||
print(f"'python_version' does not match: '{actualPythonVersion}' != '{expectedPythonVersion}'.")
|
|
||||||
errors += 1
|
|
||||||
if len(actualPythonJobs) != len(expectedJobs):
|
|
||||||
print(f"Number of 'python_jobs' does not match: {len(actualPythonJobs)} != {len(expectedJobs)}.")
|
|
||||||
print("Actual jobs:")
|
|
||||||
for job in actualPythonJobs:
|
|
||||||
if job['system'] == "msys2":
|
|
||||||
print(f" {job['runtime'].lower()}:{job['python']}")
|
|
||||||
else:
|
|
||||||
print(f" {job['system']}:{job['python']}")
|
|
||||||
print("Expected jobs:")
|
|
||||||
for job in expectedJobs:
|
|
||||||
print(f" {job}")
|
|
||||||
errors += 1
|
|
||||||
if len(actualArtifactNames) != len(expectedArtifacts):
|
|
||||||
print(f"Number of 'artifact_names' does not match: {len(actualArtifactNames)} != {len(expectedArtifacts)}.")
|
|
||||||
errors += 1
|
|
||||||
else:
|
|
||||||
for key, actual, expected in zipdicts(actualArtifactNames, expectedArtifacts):
|
|
||||||
if actual != expected:
|
|
||||||
print(f"Artifact name '{key}' does not match: {actual} != {expected}.")
|
|
||||||
errors += 1
|
|
||||||
|
|
||||||
if errors == 0:
|
|
||||||
print(f"All checks PASSED.")
|
|
||||||
exit(errors)
|
|
||||||
|
|
||||||
Params_Check_All:
|
Params_Check_All:
|
||||||
needs:
|
needs:
|
||||||
@@ -531,72 +201,16 @@ jobs:
|
|||||||
run:
|
run:
|
||||||
shell: python
|
shell: python
|
||||||
steps:
|
steps:
|
||||||
- name: Install dependencies
|
- name: Checkout repository to access local Action
|
||||||
shell: bash
|
uses: actions/checkout@v6
|
||||||
run: pip install --disable-pip-version-check --break-system-packages pyTooling
|
|
||||||
|
|
||||||
- name: Checking results from 'Params_All'
|
- name: Checking job matrix from 'Params_All'
|
||||||
run: |
|
uses: ./.github/actions/CheckJobMatrix
|
||||||
from json import loads as json_loads
|
with:
|
||||||
from sys import exit
|
expected-default-version: '3.14'
|
||||||
|
expected-python-versions: '["3.12", "3.13"]'
|
||||||
from pyTooling.Common import zipdicts
|
expected-systems: '["ubuntu", "windows", "macos-arm"]'
|
||||||
|
expected-exclude-jobs: '[]'
|
||||||
expectedPythonVersion = "3.13"
|
expected-include-jobs: '["windows:3.10", "windows:3.11", "windows:3.13"]'
|
||||||
expectedPythons = ["3.12", "3.13"]
|
generated-default-version: ${{ needs.Params_All.outputs.python_version }}
|
||||||
expectedSystems = ["ubuntu", "macos-arm", "windows"]
|
generated-jobmatrix: ${{ needs.Params_All.outputs.python_jobs }}
|
||||||
excludedJobs = []
|
|
||||||
includeJobs = ["windows:3.10", "windows:3.11", "windows:3.13"]
|
|
||||||
expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons if f"{system}:{python}" not in excludedJobs] + includeJobs
|
|
||||||
expectedName = "Example"
|
|
||||||
expectedArtifacts = {
|
|
||||||
"unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML",
|
|
||||||
"unittesting_html": f"{expectedName}-UnitTestReportSummary-HTML",
|
|
||||||
"perftesting_xml": f"{expectedName}-PerformanceTestReportSummary-XML",
|
|
||||||
"benchtesting_xml": f"{expectedName}-BenchmarkTestReportSummary-XML",
|
|
||||||
"apptesting_xml": f"{expectedName}-ApplicationTestReportSummary-XML",
|
|
||||||
"codecoverage_sqlite": f"{expectedName}-CodeCoverage-SQLite",
|
|
||||||
"codecoverage_xml": f"{expectedName}-CodeCoverage-XML",
|
|
||||||
"codecoverage_json": f"{expectedName}-CodeCoverage-JSON",
|
|
||||||
"codecoverage_html": f"{expectedName}-CodeCoverage-HTML",
|
|
||||||
"statictyping_cobertura": f"{expectedName}-StaticTyping-Cobertura-XML",
|
|
||||||
"statictyping_junit": f"{expectedName}-StaticTyping-JUnit-XML",
|
|
||||||
"statictyping_html": f"{expectedName}-StaticTyping-HTML",
|
|
||||||
"package_all": f"{expectedName}-Packages",
|
|
||||||
"documentation_html": f"{expectedName}-Documentation-HTML",
|
|
||||||
"documentation_latex": f"{expectedName}-Documentation-LaTeX",
|
|
||||||
"documentation_pdf": f"{expectedName}-Documentation-PDF",
|
|
||||||
}
|
|
||||||
|
|
||||||
actualPythonVersion = """${{ needs.Params_All.outputs.python_version }}"""
|
|
||||||
actualPythonJobs = json_loads("""${{ needs.Params_All.outputs.python_jobs }}""".replace("'", '"'))
|
|
||||||
actualArtifactNames = json_loads("""${{ needs.Params_All.outputs.artifact_names }}""".replace("'", '"'))
|
|
||||||
errors = 0
|
|
||||||
|
|
||||||
if actualPythonVersion != expectedPythonVersion:
|
|
||||||
print(f"'python_version' does not match: '{actualPythonVersion}' != '{expectedPythonVersion}'.")
|
|
||||||
errors += 1
|
|
||||||
if len(actualPythonJobs) != len(expectedJobs):
|
|
||||||
print(f"Number of 'python_jobs' does not match: {len(actualPythonJobs)} != {len(expectedJobs)}.")
|
|
||||||
print("Actual jobs:")
|
|
||||||
for job in actualPythonJobs:
|
|
||||||
if job['system'] == "msys2":
|
|
||||||
print(f" {job['runtime'].lower()}:{job['python']}")
|
|
||||||
else:
|
|
||||||
print(f" {job['system']}:{job['python']}")
|
|
||||||
print("Expected jobs:")
|
|
||||||
for job in expectedJobs:
|
|
||||||
print(f" {job}")
|
|
||||||
errors += 1
|
|
||||||
if len(actualArtifactNames) != len(expectedArtifacts):
|
|
||||||
print(f"Number of 'artifact_names' does not match: {len(actualArtifactNames)} != {len(expectedArtifacts)}.")
|
|
||||||
errors += 1
|
|
||||||
else:
|
|
||||||
for key, actual, expected in zipdicts(actualArtifactNames, expectedArtifacts):
|
|
||||||
if actual != expected:
|
|
||||||
print(f"Artifact name '{key}' does not match: {actual} != {expected}.")
|
|
||||||
errors += 1
|
|
||||||
|
|
||||||
if errors == 0:
|
|
||||||
print(f"All checks PASSED.")
|
|
||||||
exit(errors)
|
|
||||||
|
|||||||
@@ -8,11 +8,14 @@ jobs:
|
|||||||
SimplePackage:
|
SimplePackage:
|
||||||
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@main
|
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@main
|
||||||
with:
|
with:
|
||||||
package_name: myPackage
|
package_name: 'myPackage'
|
||||||
codecov: true
|
unittest_python_version_list: '3.11 3.12 3.13 3.14 pypy-3.11'
|
||||||
codacy: true
|
bandit: 'true'
|
||||||
dorny: true
|
pylint: 'true'
|
||||||
cleanup: false
|
codecov: 'true'
|
||||||
|
codacy: 'true'
|
||||||
|
dorny: 'true'
|
||||||
|
cleanup: 'false'
|
||||||
secrets:
|
secrets:
|
||||||
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
|||||||
2
.idea/Actions.iml
generated
2
.idea/Actions.iml
generated
@@ -8,7 +8,7 @@
|
|||||||
<excludeFolder url="file://$MODULE_DIR$/doc/_build" />
|
<excludeFolder url="file://$MODULE_DIR$/doc/_build" />
|
||||||
<excludeFolder url="file://$MODULE_DIR$/report" />
|
<excludeFolder url="file://$MODULE_DIR$/report" />
|
||||||
</content>
|
</content>
|
||||||
<orderEntry type="jdk" jdkName="Python 3.13" jdkType="Python SDK" />
|
<orderEntry type="jdk" jdkName="Python 3.14" jdkType="Python SDK" />
|
||||||
<orderEntry type="sourceFolder" forTests="false" />
|
<orderEntry type="sourceFolder" forTests="false" />
|
||||||
</component>
|
</component>
|
||||||
</module>
|
</module>
|
||||||
15
README.md
15
README.md
@@ -75,8 +75,6 @@ As shown in the screenshots above, the expected order is:
|
|||||||
|
|
||||||
[**PublishCoverageResults**](.github/workflows/PublishCoverageResults.yml): publish ucode coverage results.
|
[**PublishCoverageResults**](.github/workflows/PublishCoverageResults.yml): publish ucode coverage results.
|
||||||
|
|
||||||
[**NightlyRelease**](.github/workflows/NightlyRelease.yml): publish GitHub Release.
|
|
||||||
|
|
||||||
[**PublishReleaseNotes**](.github/workflows/PublishReleaseNotes.yml): publish GitHub Release.
|
[**PublishReleaseNotes**](.github/workflows/PublishReleaseNotes.yml): publish GitHub Release.
|
||||||
- **Documentation:**
|
- **Documentation:**
|
||||||
[**SphinxDocumentation**](.github/workflows/PublishCoverageResults.yml): create HTML and LaTeX documentation using
|
[**SphinxDocumentation**](.github/workflows/PublishCoverageResults.yml): create HTML and LaTeX documentation using
|
||||||
@@ -90,12 +88,11 @@ As shown in the screenshots above, the expected order is:
|
|||||||
[**IntermediateCleanUp**](.github/workflows/IntermediateCleanUp.yml): delete intermediate artifacts.
|
[**IntermediateCleanUp**](.github/workflows/IntermediateCleanUp.yml): delete intermediate artifacts.
|
||||||
|
|
||||||
[**ArtifactCleanUp**](.github/workflows/ArtifactCleanUp.yml): delete artifacts.
|
[**ArtifactCleanUp**](.github/workflows/ArtifactCleanUp.yml): delete artifacts.
|
||||||
- **⚠ Deprecated ⚠:**
|
- **Removed:**
|
||||||
[**CoverageCollection**](.github/workflows/CoverageCollection.yml): Use `UnitTesting`, because is can collect code
|
❌ **NightlyRelease**: Use `PublishReleaseNotes`, because it's more advanced and not limited to nightly releases.
|
||||||
coverage too. This avoids code duplication in job templates.
|
❌ **CoverageCollection**: Use `UnitTesting`, because is can collect code coverage too.
|
||||||
|
|
||||||
[**BuildTheDocs**](.github/workflows/BuildTheDocs.yml): Use `SphinxDocumentation`, `LaTeXDocumentation` and
|
❌ **BuildTheDocs**: Use `SphinxDocumentation`, `LaTeXDocumentation` and `PublishToGitHubPages`.
|
||||||
`PublishToGitHubPages`. BuildTheDocs isn't maintained anymore.
|
|
||||||
|
|
||||||
|
|
||||||
### Example pipeline
|
### Example pipeline
|
||||||
@@ -116,8 +113,8 @@ Find further usage cases in the following list of projects:
|
|||||||
|
|
||||||
## Contributors
|
## Contributors
|
||||||
|
|
||||||
* [Patrick Lehmann](https://GitHub.com/Paebbels)
|
* [Patrick Lehmann](https://GitHub.com/Paebbels) (Maintainer)
|
||||||
* [Unai Martinez-Corral](https://GitHub.com/umarcor) (Maintainer)
|
* [Unai Martinez-Corral](https://GitHub.com/umarcor)
|
||||||
* [and more...](https://GitHub.com/pyTooling/Actions/graphs/contributors)
|
* [and more...](https://GitHub.com/pyTooling/Actions/graphs/contributors)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
4
dist/requirements.txt
vendored
4
dist/requirements.txt
vendored
@@ -1,2 +1,2 @@
|
|||||||
wheel ~= 0.45
|
wheel ~= 0.45.0
|
||||||
twine ~= 6.1
|
twine ~= 6.2
|
||||||
|
|||||||
@@ -124,7 +124,6 @@ It can be used for simple Python packages as well as namespace packages.
|
|||||||
* :gh:`actions/setup-python`
|
* :gh:`actions/setup-python`
|
||||||
|
|
||||||
* :pypi:`wheel`
|
* :pypi:`wheel`
|
||||||
* :pypi:`tomli`
|
|
||||||
|
|
||||||
* :ref:`pyTooling/Actions/.github/workflows/UnitTesting.yml <JOBTMPL/UnitTesting>`
|
* :ref:`pyTooling/Actions/.github/workflows/UnitTesting.yml <JOBTMPL/UnitTesting>`
|
||||||
|
|
||||||
@@ -145,7 +144,6 @@ It can be used for simple Python packages as well as namespace packages.
|
|||||||
* pip
|
* pip
|
||||||
|
|
||||||
* :pypi:`wheel`
|
* :pypi:`wheel`
|
||||||
* :pypi:`tomli`
|
|
||||||
* Python packages specified via :ref:`JOBTMPL/UnitTesting/Input/requirements` or
|
* Python packages specified via :ref:`JOBTMPL/UnitTesting/Input/requirements` or
|
||||||
:ref:`JOBTMPL/UnitTesting/Input/mingw_requirements` parameter.
|
:ref:`JOBTMPL/UnitTesting/Input/mingw_requirements` parameter.
|
||||||
|
|
||||||
@@ -203,7 +201,6 @@ It can be used for simple Python packages as well as namespace packages.
|
|||||||
* pip
|
* pip
|
||||||
|
|
||||||
* :pypi:`coverage`
|
* :pypi:`coverage`
|
||||||
* :pypi:`tomli`
|
|
||||||
|
|
||||||
* :gh:`pyTooling/upload-artifact`
|
* :gh:`pyTooling/upload-artifact`
|
||||||
|
|
||||||
@@ -375,9 +372,9 @@ Parameter Summary
|
|||||||
+---------------------------------------------------------------------+----------+----------+---------------------------------------------------+
|
+---------------------------------------------------------------------+----------+----------+---------------------------------------------------+
|
||||||
| :ref:`JOBTMPL/CompletePipeline/Input/package_name` | yes | string | — — — — |
|
| :ref:`JOBTMPL/CompletePipeline/Input/package_name` | yes | string | — — — — |
|
||||||
+---------------------------------------------------------------------+----------+----------+---------------------------------------------------+
|
+---------------------------------------------------------------------+----------+----------+---------------------------------------------------+
|
||||||
| :ref:`JOBTMPL/CompletePipeline/Input/unittest_python_version` | no | string | ``'3.13'`` |
|
| :ref:`JOBTMPL/CompletePipeline/Input/unittest_python_version` | no | string | ``'3.14'`` |
|
||||||
+---------------------------------------------------------------------+----------+----------+---------------------------------------------------+
|
+---------------------------------------------------------------------+----------+----------+---------------------------------------------------+
|
||||||
| :ref:`JOBTMPL/CompletePipeline/Input/unittest_python_version_list` | no | string | ``'3.9 3.10 3.11 3.12 3.13'`` |
|
| :ref:`JOBTMPL/CompletePipeline/Input/unittest_python_version_list` | no | string | ``'3.10 3.11 3.12 3.13 3.14'`` |
|
||||||
+---------------------------------------------------------------------+----------+----------+---------------------------------------------------+
|
+---------------------------------------------------------------------+----------+----------+---------------------------------------------------+
|
||||||
| :ref:`JOBTMPL/CompletePipeline/Input/unittest_system_list` | no | string | ``'ubuntu windows macos macos-arm ucrt64'`` |
|
| :ref:`JOBTMPL/CompletePipeline/Input/unittest_system_list` | no | string | ``'ubuntu windows macos macos-arm ucrt64'`` |
|
||||||
+---------------------------------------------------------------------+----------+----------+---------------------------------------------------+
|
+---------------------------------------------------------------------+----------+----------+---------------------------------------------------+
|
||||||
@@ -387,7 +384,7 @@ Parameter Summary
|
|||||||
+---------------------------------------------------------------------+----------+----------+---------------------------------------------------+
|
+---------------------------------------------------------------------+----------+----------+---------------------------------------------------+
|
||||||
| :ref:`JOBTMPL/CompletePipeline/Input/unittest_disable_list` | no | string | ``'windows-arm:pypy-3.10 windows-arm:pypy-3.11'`` |
|
| :ref:`JOBTMPL/CompletePipeline/Input/unittest_disable_list` | no | string | ``'windows-arm:pypy-3.10 windows-arm:pypy-3.11'`` |
|
||||||
+---------------------------------------------------------------------+----------+----------+---------------------------------------------------+
|
+---------------------------------------------------------------------+----------+----------+---------------------------------------------------+
|
||||||
| :ref:`JOBTMPL/CompletePipeline/Input/apptest_python_version` | no | string | ``'3.13'`` |
|
| :ref:`JOBTMPL/CompletePipeline/Input/apptest_python_version` | no | string | ``'3.14'`` |
|
||||||
+---------------------------------------------------------------------+----------+----------+---------------------------------------------------+
|
+---------------------------------------------------------------------+----------+----------+---------------------------------------------------+
|
||||||
| :ref:`JOBTMPL/CompletePipeline/Input/apptest_python_version_list` | no | string | ``''`` |
|
| :ref:`JOBTMPL/CompletePipeline/Input/apptest_python_version_list` | no | string | ``''`` |
|
||||||
+---------------------------------------------------------------------+----------+----------+---------------------------------------------------+
|
+---------------------------------------------------------------------+----------+----------+---------------------------------------------------+
|
||||||
@@ -532,7 +529,7 @@ unittest_python_version
|
|||||||
|
|
||||||
:Type: string
|
:Type: string
|
||||||
:Required: no
|
:Required: no
|
||||||
:Default Value: ``'3.13'``
|
:Default Value: ``'3.14'``
|
||||||
:Possible Values: Any valid Python version conforming to the pattern ``<major>.<minor>`` or ``pypy-<major>.<minor>``. |br|
|
:Possible Values: Any valid Python version conforming to the pattern ``<major>.<minor>`` or ``pypy-<major>.<minor>``. |br|
|
||||||
See `actions/python-versions - available Python versions <https://github.com/actions/python-versions>`__
|
See `actions/python-versions - available Python versions <https://github.com/actions/python-versions>`__
|
||||||
and `actions/setup-python - configurable Python versions <https://github.com/actions/setup-python>`__.
|
and `actions/setup-python - configurable Python versions <https://github.com/actions/setup-python>`__.
|
||||||
@@ -550,7 +547,7 @@ unittest_python_version_list
|
|||||||
|
|
||||||
:Type: string
|
:Type: string
|
||||||
:Required: no
|
:Required: no
|
||||||
:Default Value: ``'3.9 3.10 3.11 3.12 3.13'``
|
:Default Value: ``'3.10 3.11 3.12 3.13 3.14'``
|
||||||
:Possible Values: A space separated list of valid Python versions conforming to the pattern ``<major>.<minor>`` or
|
:Possible Values: A space separated list of valid Python versions conforming to the pattern ``<major>.<minor>`` or
|
||||||
``pypy-<major>.<minor>``.
|
``pypy-<major>.<minor>``.
|
||||||
:Description: The list of space-separated Python versions used for unit testing.
|
:Description: The list of space-separated Python versions used for unit testing.
|
||||||
@@ -625,7 +622,7 @@ apptest_python_version
|
|||||||
|
|
||||||
:Type: string
|
:Type: string
|
||||||
:Required: no
|
:Required: no
|
||||||
:Default Value: ``'3.13'``
|
:Default Value: ``'3.14'``
|
||||||
:Possible Values: Any valid Python version conforming to the pattern ``<major>.<minor>`` or ``pypy-<major>.<minor>``. |br|
|
:Possible Values: Any valid Python version conforming to the pattern ``<major>.<minor>`` or ``pypy-<major>.<minor>``. |br|
|
||||||
See `actions/python-versions - available Python versions <https://github.com/actions/python-versions>`__
|
See `actions/python-versions - available Python versions <https://github.com/actions/python-versions>`__
|
||||||
and `actions/setup-python - configurable Python versions <https://github.com/actions/setup-python>`__.
|
and `actions/setup-python - configurable Python versions <https://github.com/actions/setup-python>`__.
|
||||||
|
|||||||
@@ -1,10 +0,0 @@
|
|||||||
.. _JOBTMPL/BuildTheDocs:
|
|
||||||
|
|
||||||
BuildTheDocs
|
|
||||||
############
|
|
||||||
|
|
||||||
.. attention::
|
|
||||||
|
|
||||||
The ``BuildTheDocs`` job template is deprecated.
|
|
||||||
|
|
||||||
See :ref:`JOBTMPL/SphinxDocumentation` and :ref:`JOBTMPL/LaTeXDocumentation`.
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
.. _JOBTMPL/CoverageCollection:
|
|
||||||
|
|
||||||
CoverageCollection
|
|
||||||
##################
|
|
||||||
|
|
||||||
.. attention::
|
|
||||||
|
|
||||||
The ``CoverageCollection`` job template is deprecated.
|
|
||||||
|
|
||||||
See :ref:`JOBTMPL/UnitTesting` and :ref:`JOBTMPL/PublishCoverageResults`.
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
.. _JOBTMPL/NightlyRelease:
|
|
||||||
|
|
||||||
NightlyRelease
|
|
||||||
##############
|
|
||||||
|
|
||||||
.. attention::
|
|
||||||
|
|
||||||
The ``NightlyRelease`` job template is deprecated.
|
|
||||||
|
|
||||||
See :ref:`JOBTMPL/PublishReleaseNotes`.
|
|
||||||
@@ -5,16 +5,14 @@ Deprecated
|
|||||||
|
|
||||||
The category *deprecated* collects outdated job templates:
|
The category *deprecated* collects outdated job templates:
|
||||||
|
|
||||||
:ref:`JOBTMPL/CoverageCollection`
|
CoverageCollection
|
||||||
replaced by :ref:`JOBTMPL/UnitTesting`
|
replaced by :ref:`JOBTMPL/UnitTesting`
|
||||||
:ref:`JOBTMPL/NightlyRelease`
|
NightlyRelease
|
||||||
replaced by :ref:`JOBTMPL/PublishReleaseNotes`
|
replaced by :ref:`JOBTMPL/PublishReleaseNotes`
|
||||||
:ref:`JOBTMPL/BuildTheDocs`
|
BuildTheDocs
|
||||||
replaced by :ref:`JOBTMPL/SphinxDocumentation` and :ref:`JOBTMPL/LaTeXDocumentation`
|
replaced by :ref:`JOBTMPL/SphinxDocumentation` and :ref:`JOBTMPL/LaTeXDocumentation`
|
||||||
|
|
||||||
.. toctree::
|
.. #toctree::
|
||||||
:hidden:
|
:hidden:
|
||||||
|
|
||||||
CoverageCollection
|
|
||||||
NightlyRelease
|
NightlyRelease
|
||||||
BuildTheDocs
|
|
||||||
|
|||||||
@@ -98,7 +98,7 @@ Parameter Summary
|
|||||||
+=========================================================================+==========+================+===================================================================+
|
+=========================================================================+==========+================+===================================================================+
|
||||||
| :ref:`JOBTMPL/SphinxDocumentation/Input/ubuntu_image_version` | no | string | ``'24.04'`` |
|
| :ref:`JOBTMPL/SphinxDocumentation/Input/ubuntu_image_version` | no | string | ``'24.04'`` |
|
||||||
+-------------------------------------------------------------------------+----------+----------------+-------------------------------------------------------------------+
|
+-------------------------------------------------------------------------+----------+----------------+-------------------------------------------------------------------+
|
||||||
| :ref:`JOBTMPL/SphinxDocumentation/Input/python_version` | no | string | ``'3.13'`` |
|
| :ref:`JOBTMPL/SphinxDocumentation/Input/python_version` | no | string | ``'3.14'`` |
|
||||||
+-------------------------------------------------------------------------+----------+----------------+-------------------------------------------------------------------+
|
+-------------------------------------------------------------------------+----------+----------------+-------------------------------------------------------------------+
|
||||||
| :ref:`JOBTMPL/SphinxDocumentation/Input/requirements` | no | string | ``'-r doc/requirements.txt'`` |
|
| :ref:`JOBTMPL/SphinxDocumentation/Input/requirements` | no | string | ``'-r doc/requirements.txt'`` |
|
||||||
+-------------------------------------------------------------------------+----------+----------------+-------------------------------------------------------------------+
|
+-------------------------------------------------------------------------+----------+----------------+-------------------------------------------------------------------+
|
||||||
|
|||||||
@@ -91,7 +91,7 @@ Parameter Summary
|
|||||||
+=====================================================================+==========+==========+===================================================================+
|
+=====================================================================+==========+==========+===================================================================+
|
||||||
| :ref:`JOBTMPL/Package/Input/ubuntu_image_version` | no | string | ``'24.04'`` |
|
| :ref:`JOBTMPL/Package/Input/ubuntu_image_version` | no | string | ``'24.04'`` |
|
||||||
+---------------------------------------------------------------------+----------+----------+-------------------------------------------------------------------+
|
+---------------------------------------------------------------------+----------+----------+-------------------------------------------------------------------+
|
||||||
| :ref:`JOBTMPL/Package/Input/python_version` | no | string | ``'3.13'`` |
|
| :ref:`JOBTMPL/Package/Input/python_version` | no | string | ``'3.14'`` |
|
||||||
+---------------------------------------------------------------------+----------+----------+-------------------------------------------------------------------+
|
+---------------------------------------------------------------------+----------+----------+-------------------------------------------------------------------+
|
||||||
| :ref:`JOBTMPL/Package/Input/requirements` | no | string | ``''`` |
|
| :ref:`JOBTMPL/Package/Input/requirements` | no | string | ``''`` |
|
||||||
+---------------------------------------------------------------------+----------+----------+-------------------------------------------------------------------+
|
+---------------------------------------------------------------------+----------+----------+-------------------------------------------------------------------+
|
||||||
|
|||||||
@@ -116,7 +116,7 @@ Parameter Summary
|
|||||||
+=====================================================================+==========+==========+===================================================================+
|
+=====================================================================+==========+==========+===================================================================+
|
||||||
| :ref:`JOBTMPL/PublishOnPyPI/Input/ubuntu_image_version` | no | string | ``'24.04'`` |
|
| :ref:`JOBTMPL/PublishOnPyPI/Input/ubuntu_image_version` | no | string | ``'24.04'`` |
|
||||||
+---------------------------------------------------------------------+----------+----------+-------------------------------------------------------------------+
|
+---------------------------------------------------------------------+----------+----------+-------------------------------------------------------------------+
|
||||||
| :ref:`JOBTMPL/PublishOnPyPI/Input/python_version` | no | string | ``'3.13'`` |
|
| :ref:`JOBTMPL/PublishOnPyPI/Input/python_version` | no | string | ``'3.14'`` |
|
||||||
+---------------------------------------------------------------------+----------+----------+-------------------------------------------------------------------+
|
+---------------------------------------------------------------------+----------+----------+-------------------------------------------------------------------+
|
||||||
| :ref:`JOBTMPL/PublishOnPyPI/Input/requirements` | no | string | ``'wheel twine'`` |
|
| :ref:`JOBTMPL/PublishOnPyPI/Input/requirements` | no | string | ``'wheel twine'`` |
|
||||||
+---------------------------------------------------------------------+----------+----------+-------------------------------------------------------------------+
|
+---------------------------------------------------------------------+----------+----------+-------------------------------------------------------------------+
|
||||||
|
|||||||
@@ -56,7 +56,6 @@ cloud services like :term:`CodeCov` or :term:`Codacy`.
|
|||||||
* pip
|
* pip
|
||||||
|
|
||||||
* :pypi:`coverage`
|
* :pypi:`coverage`
|
||||||
* :pypi:`tomli`
|
|
||||||
|
|
||||||
* :gh:`pyTooling/upload-artifact`
|
* :gh:`pyTooling/upload-artifact`
|
||||||
|
|
||||||
|
|||||||
@@ -78,7 +78,7 @@ Parameter Summary
|
|||||||
+=========================================================================+==========+==========+===================================================================+
|
+=========================================================================+==========+==========+===================================================================+
|
||||||
| :ref:`JOBTMPL/CheckDocumentation/Input/ubuntu_image_version` | no | string | ``'24.04'`` |
|
| :ref:`JOBTMPL/CheckDocumentation/Input/ubuntu_image_version` | no | string | ``'24.04'`` |
|
||||||
+-------------------------------------------------------------------------+----------+----------+-------------------------------------------------------------------+
|
+-------------------------------------------------------------------------+----------+----------+-------------------------------------------------------------------+
|
||||||
| :ref:`JOBTMPL/CheckDocumentation/Input/python_version` | no | string | ``'3.13'`` |
|
| :ref:`JOBTMPL/CheckDocumentation/Input/python_version` | no | string | ``'3.14'`` |
|
||||||
+-------------------------------------------------------------------------+----------+----------+-------------------------------------------------------------------+
|
+-------------------------------------------------------------------------+----------+----------+-------------------------------------------------------------------+
|
||||||
| :ref:`JOBTMPL/CheckDocumentation/Input/directory` | yes | string | — — — — |
|
| :ref:`JOBTMPL/CheckDocumentation/Input/directory` | yes | string | — — — — |
|
||||||
+-------------------------------------------------------------------------+----------+----------+-------------------------------------------------------------------+
|
+-------------------------------------------------------------------------+----------+----------+-------------------------------------------------------------------+
|
||||||
|
|||||||
@@ -123,7 +123,7 @@ Parameter Summary
|
|||||||
+=====================================================================+==========+================+==========================================================================================================================================+
|
+=====================================================================+==========+================+==========================================================================================================================================+
|
||||||
| :ref:`JOBTMPL/StaticTypeCheck/Input/ubuntu_image_version` | no | string | ``'24.04'`` |
|
| :ref:`JOBTMPL/StaticTypeCheck/Input/ubuntu_image_version` | no | string | ``'24.04'`` |
|
||||||
+---------------------------------------------------------------------+----------+----------------+------------------------------------------------------------------------------------------------------------------------------------------+
|
+---------------------------------------------------------------------+----------+----------------+------------------------------------------------------------------------------------------------------------------------------------------+
|
||||||
| :ref:`JOBTMPL/StaticTypeCheck/Input/python_version` | no | string | ``'3.13'`` |
|
| :ref:`JOBTMPL/StaticTypeCheck/Input/python_version` | no | string | ``'3.14'`` |
|
||||||
+---------------------------------------------------------------------+----------+----------------+------------------------------------------------------------------------------------------------------------------------------------------+
|
+---------------------------------------------------------------------+----------+----------------+------------------------------------------------------------------------------------------------------------------------------------------+
|
||||||
| :ref:`JOBTMPL/StaticTypeCheck/Input/requirements` | no | string | ``'-r tests/requirements.txt'`` |
|
| :ref:`JOBTMPL/StaticTypeCheck/Input/requirements` | no | string | ``'-r tests/requirements.txt'`` |
|
||||||
+---------------------------------------------------------------------+----------+----------------+------------------------------------------------------------------------------------------------------------------------------------------+
|
+---------------------------------------------------------------------+----------+----------------+------------------------------------------------------------------------------------------------------------------------------------------+
|
||||||
|
|||||||
@@ -49,7 +49,6 @@ duplications within jobs.
|
|||||||
* :gh:`actions/setup-python`
|
* :gh:`actions/setup-python`
|
||||||
|
|
||||||
* :pypi:`wheel`
|
* :pypi:`wheel`
|
||||||
* :pypi:`tomli`
|
|
||||||
|
|
||||||
|
|
||||||
.. _JOBTMPL/ExtractConfiguration/Instantiation:
|
.. _JOBTMPL/ExtractConfiguration/Instantiation:
|
||||||
@@ -107,7 +106,7 @@ Parameter Summary
|
|||||||
+=====================================================================+==========+==========+===================================================================+
|
+=====================================================================+==========+==========+===================================================================+
|
||||||
| :ref:`JOBTMPL/ExtractConfiguration/Input/ubuntu_image_version` | no | string | ``'24.04'`` |
|
| :ref:`JOBTMPL/ExtractConfiguration/Input/ubuntu_image_version` | no | string | ``'24.04'`` |
|
||||||
+---------------------------------------------------------------------+----------+----------+-------------------------------------------------------------------+
|
+---------------------------------------------------------------------+----------+----------+-------------------------------------------------------------------+
|
||||||
| :ref:`JOBTMPL/ExtractConfiguration/Input/python_version` | no | string | ``'3.13'`` |
|
| :ref:`JOBTMPL/ExtractConfiguration/Input/python_version` | no | string | ``'3.14'`` |
|
||||||
+---------------------------------------------------------------------+----------+----------+-------------------------------------------------------------------+
|
+---------------------------------------------------------------------+----------+----------+-------------------------------------------------------------------+
|
||||||
| :ref:`JOBTMPL/ExtractConfiguration/Input/coverage_config` | no | string | ``'pyproject.toml'`` |
|
| :ref:`JOBTMPL/ExtractConfiguration/Input/coverage_config` | no | string | ``'pyproject.toml'`` |
|
||||||
+---------------------------------------------------------------------+----------+----------+-------------------------------------------------------------------+
|
+---------------------------------------------------------------------+----------+----------+-------------------------------------------------------------------+
|
||||||
|
|||||||
@@ -169,9 +169,9 @@ Parameter Summary
|
|||||||
+---------------------------------------------------------------------+----------+----------+-------------------------------------------------------------------+
|
+---------------------------------------------------------------------+----------+----------+-------------------------------------------------------------------+
|
||||||
| :ref:`JOBTMPL/Parameters/Input/package_name` | no | string | ``''`` |
|
| :ref:`JOBTMPL/Parameters/Input/package_name` | no | string | ``''`` |
|
||||||
+---------------------------------------------------------------------+----------+----------+-------------------------------------------------------------------+
|
+---------------------------------------------------------------------+----------+----------+-------------------------------------------------------------------+
|
||||||
| :ref:`JOBTMPL/Parameters/Input/python_version` | no | string | ``'3.13'`` |
|
| :ref:`JOBTMPL/Parameters/Input/python_version` | no | string | ``'3.14'`` |
|
||||||
+---------------------------------------------------------------------+----------+----------+-------------------------------------------------------------------+
|
+---------------------------------------------------------------------+----------+----------+-------------------------------------------------------------------+
|
||||||
| :ref:`JOBTMPL/Parameters/Input/python_version_list` | no | string | ``'3.9 3.10 3.11 3.12 3.13'`` |
|
| :ref:`JOBTMPL/Parameters/Input/python_version_list` | no | string | ``'3.10 3.11 3.12 3.13 3.14'`` |
|
||||||
+---------------------------------------------------------------------+----------+----------+-------------------------------------------------------------------+
|
+---------------------------------------------------------------------+----------+----------+-------------------------------------------------------------------+
|
||||||
| :ref:`JOBTMPL/Parameters/Input/system_list` | no | string | ``'ubuntu windows macos macos-arm mingw64 ucrt64'`` |
|
| :ref:`JOBTMPL/Parameters/Input/system_list` | no | string | ``'ubuntu windows macos macos-arm mingw64 ucrt64'`` |
|
||||||
+---------------------------------------------------------------------+----------+----------+-------------------------------------------------------------------+
|
+---------------------------------------------------------------------+----------+----------+-------------------------------------------------------------------+
|
||||||
@@ -348,7 +348,7 @@ python_version
|
|||||||
|
|
||||||
:Type: string
|
:Type: string
|
||||||
:Required: no
|
:Required: no
|
||||||
:Default Value: ``'3.13'``
|
:Default Value: ``'3.14'``
|
||||||
:Possible Values: Any valid Python version conforming to the pattern ``<major>.<minor>`` or ``pypy-<major>.<minor>``. |br|
|
:Possible Values: Any valid Python version conforming to the pattern ``<major>.<minor>`` or ``pypy-<major>.<minor>``. |br|
|
||||||
See `actions/python-versions - available Python versions <https://github.com/actions/python-versions>`__
|
See `actions/python-versions - available Python versions <https://github.com/actions/python-versions>`__
|
||||||
and `actions/setup-python - configurable Python versions <https://github.com/actions/setup-python>`__.
|
and `actions/setup-python - configurable Python versions <https://github.com/actions/setup-python>`__.
|
||||||
@@ -364,7 +364,7 @@ python_version_list
|
|||||||
|
|
||||||
:Type: string
|
:Type: string
|
||||||
:Required: no
|
:Required: no
|
||||||
:Default Value: ``'3.9 3.10 3.11 3.12 3.13'``
|
:Default Value: ``'3.10 3.11 3.12 3.13 3.14'``
|
||||||
:Possible Values: A space separated list of valid Python versions conforming to the pattern ``<major>.<minor>`` or
|
:Possible Values: A space separated list of valid Python versions conforming to the pattern ``<major>.<minor>`` or
|
||||||
``pypy-<major>.<minor>``. |br|
|
``pypy-<major>.<minor>``. |br|
|
||||||
See `actions/python-versions - available Python versions <https://github.com/actions/python-versions>`__
|
See `actions/python-versions - available Python versions <https://github.com/actions/python-versions>`__
|
||||||
@@ -563,7 +563,7 @@ python_version
|
|||||||
==============
|
==============
|
||||||
|
|
||||||
:Type: string
|
:Type: string
|
||||||
:Default Value: ``'3.13'``
|
:Default Value: ``'3.14'``
|
||||||
:Possible Values: Any valid Python version conforming to the pattern ``<major>.<minor>`` or ``pypy-<major>.<minor>``.
|
:Possible Values: Any valid Python version conforming to the pattern ``<major>.<minor>`` or ``pypy-<major>.<minor>``.
|
||||||
:Description: Returns
|
:Description: Returns
|
||||||
|
|
||||||
|
|||||||
@@ -67,7 +67,7 @@
|
|||||||
* :ref:`JOBTMPL/IntermediateCleanup`
|
* :ref:`JOBTMPL/IntermediateCleanup`
|
||||||
* :ref:`JOBTMPL/ArtifactCleanup`
|
* :ref:`JOBTMPL/ArtifactCleanup`
|
||||||
|
|
||||||
.. grid-item::
|
.. #grid-item::
|
||||||
:columns: 2
|
:columns: 2
|
||||||
|
|
||||||
.. rubric:: :ref:`JOBTMPL/Deprecated`
|
.. rubric:: :ref:`JOBTMPL/Deprecated`
|
||||||
|
|||||||
@@ -60,7 +60,6 @@ Configuration options to :term:`pytest` should be given via section ``[tool.pyte
|
|||||||
* pip
|
* pip
|
||||||
|
|
||||||
* :pypi:`wheel`
|
* :pypi:`wheel`
|
||||||
* :pypi:`tomli`
|
|
||||||
* Python packages specified via :ref:`JOBTMPL/UnitTesting/Input/requirements` or
|
* Python packages specified via :ref:`JOBTMPL/UnitTesting/Input/requirements` or
|
||||||
:ref:`JOBTMPL/UnitTesting/Input/mingw_requirements` parameter.
|
:ref:`JOBTMPL/UnitTesting/Input/mingw_requirements` parameter.
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ python_version
|
|||||||
|
|
||||||
:Type: string
|
:Type: string
|
||||||
:Required: no
|
:Required: no
|
||||||
:Default Value: ``'3.13'``
|
:Default Value: ``'3.14'``
|
||||||
:Possible Values: Any valid Python version conforming to the pattern ``<major>.<minor>`` or ``pypy-<major>.<minor>``. |br|
|
:Possible Values: Any valid Python version conforming to the pattern ``<major>.<minor>`` or ``pypy-<major>.<minor>``. |br|
|
||||||
See `actions/python-versions - available Python versions <https://github.com/actions/python-versions>`__
|
See `actions/python-versions - available Python versions <https://github.com/actions/python-versions>`__
|
||||||
and `actions/setup-python - configurable Python versions <https://github.com/actions/setup-python>`__.
|
and `actions/setup-python - configurable Python versions <https://github.com/actions/setup-python>`__.
|
||||||
|
|||||||
@@ -164,7 +164,7 @@ Example Pipelines
|
|||||||
.. code-block:: toml
|
.. code-block:: toml
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ["setuptools >= 80.0", "wheel ~= 0.45", "pyTooling ~= 8.5"]
|
requires = ["setuptools >= 80.0", "wheel ~= 0.45.0", "pyTooling ~= 8.10"]
|
||||||
build-backend = "setuptools.build_meta"
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
[tool.mypy]
|
[tool.mypy]
|
||||||
@@ -232,8 +232,8 @@ References
|
|||||||
Contributors
|
Contributors
|
||||||
************
|
************
|
||||||
|
|
||||||
* `Patrick Lehmann <https://GitHub.com/Paebbels>`__
|
* `Patrick Lehmann <https://GitHub.com/Paebbels>`__ (Maintainer)
|
||||||
* `Unai Martinez-Corral <https://GitHub.com/umarcor>`__ (Maintainer)
|
* `Unai Martinez-Corral <https://GitHub.com/umarcor>`__
|
||||||
* `and more... <https://GitHub.com/pyTooling/Actions/graphs/contributors>`__
|
* `and more... <https://GitHub.com/pyTooling/Actions/graphs/contributors>`__
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,19 +1,19 @@
|
|||||||
-r ../requirements.txt
|
-r ../requirements.txt
|
||||||
|
|
||||||
pyTooling ~= 8.5
|
pyTooling ~= 8.10
|
||||||
|
|
||||||
# Enforce latest version on ReadTheDocs
|
# Enforce latest version on ReadTheDocs
|
||||||
sphinx ~= 8.2
|
sphinx ~= 8.2
|
||||||
docutils ~= 0.21
|
docutils ~= 0.21.0
|
||||||
docutils_stubs ~= 0.0.22
|
docutils_stubs ~= 0.0.22
|
||||||
|
|
||||||
# ReadTheDocs Theme
|
# ReadTheDocs Theme
|
||||||
sphinx_rtd_theme ~= 3.0
|
sphinx_rtd_theme ~= 3.0
|
||||||
|
|
||||||
# Sphinx Extenstions
|
# Sphinx Extenstions
|
||||||
sphinxcontrib-mermaid ~= 1.0
|
sphinxcontrib-mermaid ~= 1.2
|
||||||
autoapi >= 2.0.1
|
autoapi >= 2.0.1
|
||||||
sphinx_design ~= 0.6.1
|
sphinx_design ~= 0.6.0
|
||||||
sphinx-copybutton >= 0.5.2
|
sphinx-copybutton >= 0.5.0
|
||||||
sphinx_autodoc_typehints ~= 3.2
|
sphinx_autodoc_typehints ~= 3.5 # 3.6 is conflicting with old sphinx_design and rtd theme due to sphinx<9 and docutils<0.22
|
||||||
sphinx_reports ~= 0.9
|
sphinx_reports ~= 0.9.0
|
||||||
|
|||||||
@@ -11,7 +11,7 @@
|
|||||||
# #
|
# #
|
||||||
# License: #
|
# License: #
|
||||||
# ==================================================================================================================== #
|
# ==================================================================================================================== #
|
||||||
# Copyright 2017-2025 Patrick Lehmann - Bötzingen, Germany #
|
# Copyright 2017-2026 Patrick Lehmann - Bötzingen, Germany #
|
||||||
# #
|
# #
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
# you may not use this file except in compliance with the License. #
|
# you may not use this file except in compliance with the License. #
|
||||||
@@ -34,7 +34,7 @@ A module for a set of dummy classes.
|
|||||||
|
|
||||||
__author__ = "Patrick Lehmann"
|
__author__ = "Patrick Lehmann"
|
||||||
__email__ = "Paebbels@gmail.com"
|
__email__ = "Paebbels@gmail.com"
|
||||||
__copyright__ = "2017-2025, Patrick Lehmann"
|
__copyright__ = "2017-2026, Patrick Lehmann"
|
||||||
__license__ = "Apache License, Version 2.0"
|
__license__ = "Apache License, Version 2.0"
|
||||||
__version__ = "0.14.8"
|
__version__ = "0.14.8"
|
||||||
__keywords__ = ["GitHub Actions"]
|
__keywords__ = ["GitHub Actions"]
|
||||||
|
|||||||
@@ -11,7 +11,7 @@
|
|||||||
# #
|
# #
|
||||||
# License: #
|
# License: #
|
||||||
# ==================================================================================================================== #
|
# ==================================================================================================================== #
|
||||||
# Copyright 2017-2025 Patrick Lehmann - Bötzingen, Germany #
|
# Copyright 2017-2026 Patrick Lehmann - Bötzingen, Germany #
|
||||||
# #
|
# #
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
# you may not use this file except in compliance with the License. #
|
# you may not use this file except in compliance with the License. #
|
||||||
@@ -34,12 +34,13 @@ A module for a set of dummy classes.
|
|||||||
|
|
||||||
__author__ = "Patrick Lehmann"
|
__author__ = "Patrick Lehmann"
|
||||||
__email__ = "Paebbels@gmail.com"
|
__email__ = "Paebbels@gmail.com"
|
||||||
__copyright__ = "2017-2025, Patrick Lehmann"
|
__copyright__ = "2017-2026, Patrick Lehmann"
|
||||||
__license__ = "Apache License, Version 2.0"
|
__license__ = "Apache License, Version 2.0"
|
||||||
__version__ = "0.4.5"
|
__version__ = "7.3.0"
|
||||||
__keywords__ = ["GitHub Actions"]
|
__keywords__ = ["GitHub Actions"]
|
||||||
__issue_tracker__ = "https://GitHub.com/pyTooling/Actions/issues"
|
__issue_tracker__ = "https://GitHub.com/pyTooling/Actions/issues"
|
||||||
|
|
||||||
|
from pickle import dumps
|
||||||
from subprocess import check_call
|
from subprocess import check_call
|
||||||
|
|
||||||
from pyTooling.Decorators import export, readonly
|
from pyTooling.Decorators import export, readonly
|
||||||
|
|||||||
@@ -1,14 +1,15 @@
|
|||||||
[build-system]
|
[build-system]
|
||||||
requires = [
|
requires = [
|
||||||
"setuptools >= 80.0",
|
"setuptools >= 80.0",
|
||||||
"wheel ~= 0.45",
|
"wheel ~= 0.45.0",
|
||||||
"pyTooling ~= 8.5"
|
"pyTooling ~= 8.10"
|
||||||
]
|
]
|
||||||
build-backend = "setuptools.build_meta"
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
[tool.pylint.format]
|
[tool.pylint.format]
|
||||||
indent-string="\t"
|
indent-string="\t"
|
||||||
max-line-length = 120
|
max-line-length = 120
|
||||||
|
ignore-long-lines = "^.{0,110}#: .*"
|
||||||
|
|
||||||
[tool.pylint.basic]
|
[tool.pylint.basic]
|
||||||
argument-naming-style = "camelCase"
|
argument-naming-style = "camelCase"
|
||||||
@@ -37,23 +38,21 @@ junit_xml = "report/typing/StaticTypingSummary.xml"
|
|||||||
cobertura_xml_report = "report/typing"
|
cobertura_xml_report = "report/typing"
|
||||||
|
|
||||||
[tool.pytest]
|
[tool.pytest]
|
||||||
junit_xml = "report/unit/UnittestReportSummary.xml"
|
addopts = ["--tb=native"]
|
||||||
|
|
||||||
[tool.pyedaa-reports]
|
|
||||||
junit_xml = "report/unit/unittest.xml"
|
|
||||||
|
|
||||||
[tool.pytest.ini_options]
|
|
||||||
addopts = "--tb=native"
|
|
||||||
# Don't set 'python_classes = *' otherwise, pytest doesn't search for classes
|
# Don't set 'python_classes = *' otherwise, pytest doesn't search for classes
|
||||||
# derived from unittest.Testcase
|
# derived from unittest.Testcase
|
||||||
python_files = "*"
|
python_files = ["*"]
|
||||||
python_functions = "test_*"
|
python_functions = ["test_*"]
|
||||||
filterwarnings = [
|
filterwarnings = [
|
||||||
"error::DeprecationWarning",
|
"error::DeprecationWarning",
|
||||||
"error::PendingDeprecationWarning"
|
"error::PendingDeprecationWarning"
|
||||||
]
|
]
|
||||||
|
junit_xml = "report/unit/UnittestReportSummary.xml"
|
||||||
junit_logging = "all"
|
junit_logging = "all"
|
||||||
|
|
||||||
|
[tool.pyedaa-reports]
|
||||||
|
junit_xml = "report/unit/unittest.xml"
|
||||||
|
|
||||||
[tool.interrogate]
|
[tool.interrogate]
|
||||||
color = true
|
color = true
|
||||||
verbose = 1 # possible values: 0 (minimal output), 1 (-v), 2 (-vv)
|
verbose = 1 # possible values: 0 (minimal output), 1 (-v), 2 (-vv)
|
||||||
|
|||||||
@@ -1,8 +0,0 @@
|
|||||||
# Releaser Development
|
|
||||||
|
|
||||||
- [pyTooling/pyAttributes](https://github.com/pyTooling/pyAttributes) or
|
|
||||||
[willmcgugan/rich](https://github.com/willmcgugan/rich) might be used to enhance the UX.
|
|
||||||
|
|
||||||
- It might be desirable to have pyTooling.Version.SemVersion handle the regular expression from
|
|
||||||
[semver.org](https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string), and use
|
|
||||||
proper Python classes in **Releaser**.
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
FROM python:3.12-slim-bookworm
|
|
||||||
COPY releaser.py /releaser.py
|
|
||||||
RUN pip install PyGithub --progress-bar off \
|
|
||||||
&& apt update -qq \
|
|
||||||
&& apt install -y curl \
|
|
||||||
&& curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | \
|
|
||||||
dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg \
|
|
||||||
&& echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | \
|
|
||||||
tee /etc/apt/sources.list.d/github-cli.list > /dev/null \
|
|
||||||
&& apt update -qq \
|
|
||||||
&& apt install -y gh
|
|
||||||
CMD ["/releaser.py"]
|
|
||||||
@@ -1,181 +0,0 @@
|
|||||||
# Releaser
|
|
||||||
|
|
||||||
**Releaser** is a Docker GitHub Action written in Python.
|
|
||||||
|
|
||||||
**Releaser** allows to keep a GitHub Release of type pre-release and its artifacts up to date with latest builds.
|
|
||||||
Combined with a workflow that is executed periodically, **Releaser** allows to provide a fixed release name for users willing
|
|
||||||
to use daily/nightly artifacts of a project.
|
|
||||||
|
|
||||||
Furthermore, when any [semver](https://semver.org) compilant tagged commit is pushed, **Releaser** can create a release
|
|
||||||
and upload assets.
|
|
||||||
|
|
||||||
## Context
|
|
||||||
|
|
||||||
GitHub provides official clients for the GitHub API through [github.com/octokit](https://github.com/octokit):
|
|
||||||
|
|
||||||
- [octokit.js](https://github.com/octokit/octokit.js) ([octokit.github.io/rest.js](https://octokit.github.io/rest.js))
|
|
||||||
- [octokit.rb](https://github.com/octokit/octokit.rb) ([octokit.github.io/octokit.rb](http://octokit.github.io/octokit.rb))
|
|
||||||
- [octokit.net](https://github.com/octokit/octokit.net) ([octokitnet.rtfd.io](https://octokitnet.rtfd.io))
|
|
||||||
|
|
||||||
When GitHub Actions was released in 2019, two Actions were made available through
|
|
||||||
[github.com/actions](https://github.com/actions) for dealing with GitHub Releases:
|
|
||||||
|
|
||||||
- [actions/create-release](https://github.com/actions/create-release)
|
|
||||||
- [actions/upload-release-asset](https://github.com/actions/upload-release-asset)
|
|
||||||
|
|
||||||
However, those Actions were contributed by an employee in spare time, not officially supported by GitHub.
|
|
||||||
Therefore, they were unmaintained before GitHub Actions was out of the private beta
|
|
||||||
(see [actions/upload-release-asset#58](https://github.com/actions/upload-release-asset/issues/58))
|
|
||||||
and, a year later, archived.
|
|
||||||
Those Actions are based on [actions/toolkit](https://github.com/actions/toolkit)'s hydrated version of octokit.js.
|
|
||||||
|
|
||||||
From a practical point of view, [actions/github-script](https://github.com/actions/github-script) is the natural replacement to those Actions, since it allows to use a pre-authenticated *octokit.js* client along with the workflow run context.
|
|
||||||
Still, it requires writing plain JavaScript.
|
|
||||||
|
|
||||||
Alternatively, there are non-official GitHub API libraries available in other languages (see [docs.github.com: rest/overview/libraries](https://docs.github.com/en/rest/overview/libraries)).
|
|
||||||
**Releaser** is based on [PyGithub/PyGithub](https://github.com/PyGithub/PyGithub), a Python client for the GitHub API.
|
|
||||||
|
|
||||||
**Releaser** was originally created in [eine/tip](https://github.com/eine/tip), as an enhanced alternative to using
|
|
||||||
`actions/create-release` and `actions/upload-release-asset`, in order to cover certain use cases that were being
|
|
||||||
migrated from Travis CI to GitHub Actions.
|
|
||||||
The main limitation of GitHub's Actions was/is verbosity and not being possible to dynamically define the list of assets
|
|
||||||
to be uploaded.
|
|
||||||
|
|
||||||
On the other hand, GitHub Actions artifacts do require login in order to download them.
|
|
||||||
Conversely, assets of GitHub Releases can be downloaded without login.
|
|
||||||
Therefore, in order to make CI results available to the widest audience, some projects prefer having tarballs available
|
|
||||||
as assets.
|
|
||||||
In this context, one of the main use cases of **Releaser** is pushing artifacts as release assets.
|
|
||||||
Thus, the name of the Action.
|
|
||||||
|
|
||||||
GitHub provides an official CLI tool, written in golang: [cli/cli](https://github.com/cli/cli).
|
|
||||||
When the Python version of **Releaser** was written, `cli` was evaluated as an alternative to *PyGitHub*.
|
|
||||||
`gh release` was (and still is) not flexible enough to update the reference of a release, without deleting and
|
|
||||||
recreating it (see [cli.github.com: manual/gh_release_create](https://cli.github.com/manual/gh_release_create)).
|
|
||||||
Deletion and recreation is unfortunate, because it notifies all the watchers of a repository
|
|
||||||
(see [eine/tip#111](https://github.com/eine/tip/issues/111)).
|
|
||||||
However, [cli.github.com: manual/gh_release_upload](https://cli.github.com/manual/gh_release_upload) handles uploading
|
|
||||||
artifacts as assets faster and with better stability for larger files than *PyGitHub*
|
|
||||||
(see [msys2/msys2-installer#36](https://github.com/msys2/msys2-installer/pull/36)).
|
|
||||||
Furthermore, the GitHub CLI is installed on GitHub Actions' default virtual environments.
|
|
||||||
Although `gh` does not support login through SSH (see [cli/cli#3715](https://github.com/cli/cli/issues/3715)), on GitHub
|
|
||||||
Actions a token is available `${{ github.token }}`.
|
|
||||||
Therefore, **Releaser** uses `gh release upload` internally.
|
|
||||||
|
|
||||||
## Usage
|
|
||||||
|
|
||||||
The following block shows a minimal YAML workflow file:
|
|
||||||
|
|
||||||
```yml
|
|
||||||
name: 'workflow'
|
|
||||||
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: '0 0 * * 5'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
mwe:
|
|
||||||
runs-on: ubuntu-24.04
|
|
||||||
steps:
|
|
||||||
|
|
||||||
# Clone repository
|
|
||||||
- uses: actions/checkout@v5
|
|
||||||
|
|
||||||
# Build your application, tool, artifacts, etc.
|
|
||||||
- name: Build
|
|
||||||
run: |
|
|
||||||
echo "Build some tool and generate some artifacts" > artifact.txt
|
|
||||||
|
|
||||||
# Update tag and pre-release
|
|
||||||
# - Update (force-push) tag to the commit that is used in the workflow.
|
|
||||||
# - Upload artifacts defined by the user.
|
|
||||||
- uses: pyTooling/Actions/releaser@r0
|
|
||||||
with:
|
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
files: |
|
|
||||||
artifact.txt
|
|
||||||
README.md
|
|
||||||
```
|
|
||||||
|
|
||||||
### Composite Action
|
|
||||||
|
|
||||||
The default implementation of **Releaser** is a Container Action.
|
|
||||||
Therefore, a pre-built container image is pulled before starting the job.
|
|
||||||
Alternatively, a Composite Action version is available: `uses: pyTooling/Actions/releaser/composite@main`.
|
|
||||||
The Composite version installs the dependencies on the host (the runner environment), instead of using a container.
|
|
||||||
Both implementations are functionally equivalent from **Releaser**'s point of view; however, the Composite Action allows
|
|
||||||
users to tweak the version of Python by using [actions/setup-python](https://github.com/actions/setup-python) before.
|
|
||||||
|
|
||||||
## Options
|
|
||||||
|
|
||||||
All options can be optionally provided as environment variables: `INPUT_TOKEN`, `INPUT_FILES`, `INPUT_TAG`, `INPUT_RM`
|
|
||||||
and/or `INPUT_SNAPSHOTS`.
|
|
||||||
|
|
||||||
### token (required)
|
|
||||||
|
|
||||||
Token to make authenticated API calls; can be passed in using `{{ secrets.GITHUB_TOKEN }}`.
|
|
||||||
|
|
||||||
### files (required)
|
|
||||||
|
|
||||||
Either a single filename/pattern or a multi-line list can be provided. All the artifacts are uploaded regardless of the
|
|
||||||
hierarchy.
|
|
||||||
|
|
||||||
For creating/updating a release without uploading assets, set `files: none`.
|
|
||||||
|
|
||||||
### tag
|
|
||||||
|
|
||||||
The default tag name for the tip/nightly pre-release is `tip`, but it can be optionally overriden through option `tag`.
|
|
||||||
|
|
||||||
### rm
|
|
||||||
|
|
||||||
Set option `rm` to `true` for systematically removing previous artifacts (e.g. old versions).
|
|
||||||
Otherwise (by default), all previours artifacts are preserved or overwritten.
|
|
||||||
|
|
||||||
Note:
|
|
||||||
If all the assets are removed, or if the release itself is removed, tip/nightly assets won't be available for
|
|
||||||
users until the workflow is successfully run.
|
|
||||||
For instance, Action [setup-ghdl-ci](https://github.com/ghdl/setup-ghdl-ci) uses assets from [ghdl/ghdl: releases/tag/nightly](https://github.com/ghdl/ghdl/releases/tag/nightly).
|
|
||||||
Hence, it is recommended to try removing the conflictive assets only, in order to maximise the availability.
|
|
||||||
|
|
||||||
### snapshots
|
|
||||||
|
|
||||||
Whether to create releases from any tag or to treat some as snapshots.
|
|
||||||
By default, all the tags with non-empty `prerelease` field (see [semver.org: Is there a suggested regular expression (RegEx) to check a SemVer string?](https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string))
|
|
||||||
are considered snapshots; neither a release is created nor assets are uploaded.
|
|
||||||
|
|
||||||
## Advanced/complex use cases
|
|
||||||
|
|
||||||
**Releaser** is essentially a very thin wrapper to use the GitHub Actions context data along with the classes
|
|
||||||
and methods of PyGithub.
|
|
||||||
|
|
||||||
Similarly to [actions/github-script](https://github.com/actions/github-script), users with advanced/complex requirements
|
|
||||||
might find it desirable to write their own Python script, instead of using **Releaser**.
|
|
||||||
In fact, since `shell: python` is supported in GitHub Actions, using Python does *not* require any Action.
|
|
||||||
For prototyping purposes, the following job might be useful:
|
|
||||||
|
|
||||||
```yml
|
|
||||||
Release:
|
|
||||||
name: '📦 Release'
|
|
||||||
runs-on: ubuntu-24.04
|
|
||||||
needs:
|
|
||||||
- ...
|
|
||||||
if: github.event_name != 'pull_request' && (github.ref == 'refs/heads/master' || contains(github.ref, 'refs/tags/'))
|
|
||||||
steps:
|
|
||||||
|
|
||||||
- uses: actions/download-artifact@v3
|
|
||||||
|
|
||||||
- shell: bash
|
|
||||||
run: pip install PyGithub --progress-bar off
|
|
||||||
|
|
||||||
- name: Set list of files for uploading
|
|
||||||
id: files
|
|
||||||
shell: python
|
|
||||||
run: |
|
|
||||||
from github import Github
|
|
||||||
print("· Get GitHub API handler (authenticate)")
|
|
||||||
gh = Github('${{ github.token }}')
|
|
||||||
print("· Get Repository handler")
|
|
||||||
gh_repo = gh.get_repo('${{ github.repository }}')
|
|
||||||
```
|
|
||||||
|
|
||||||
Find a non-trivial use case at [msys2/msys2-autobuild](https://github.com/msys2/msys2-autobuild).
|
|
||||||
@@ -1,45 +0,0 @@
|
|||||||
# ==================================================================================================================== #
|
|
||||||
# Authors: #
|
|
||||||
# Unai Martinez-Corral #
|
|
||||||
# #
|
|
||||||
# ==================================================================================================================== #
|
|
||||||
# Copyright 2020-2025 The pyTooling Authors #
|
|
||||||
# #
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
|
||||||
# you may not use this file except in compliance with the License. #
|
|
||||||
# You may obtain a copy of the License at #
|
|
||||||
# #
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
|
||||||
# #
|
|
||||||
# Unless required by applicable law or agreed to in writing, software #
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
|
||||||
# See the License for the specific language governing permissions and #
|
|
||||||
# limitations under the License. #
|
|
||||||
# #
|
|
||||||
# SPDX-License-Identifier: Apache-2.0 #
|
|
||||||
# ==================================================================================================================== #
|
|
||||||
name: 'Releaser'
|
|
||||||
description: 'Publish releases, upload assets and update tip/nightly tags'
|
|
||||||
inputs:
|
|
||||||
token:
|
|
||||||
description: 'Token to make authenticated API calls; can be passed in using {{ secrets.GITHUB_TOKEN }}'
|
|
||||||
required: true
|
|
||||||
files:
|
|
||||||
description: 'Multi-line list of glob patterns describing the artifacts to be uploaded'
|
|
||||||
required: true
|
|
||||||
tag:
|
|
||||||
description: 'Name of the tag that corresponds to the tip/nightly pre-release'
|
|
||||||
required: false
|
|
||||||
default: tip
|
|
||||||
rm:
|
|
||||||
description: 'Whether to delete all the previous artifacts, or only replacing the ones with the same name'
|
|
||||||
required: false
|
|
||||||
default: false
|
|
||||||
snapshots:
|
|
||||||
description: 'Whether to create releases from any tag or to treat some as snapshots'
|
|
||||||
required: false
|
|
||||||
default: true
|
|
||||||
runs:
|
|
||||||
using: 'docker'
|
|
||||||
image: 'docker://ghcr.io/pytooling/releaser'
|
|
||||||
@@ -1,59 +0,0 @@
|
|||||||
# ==================================================================================================================== #
|
|
||||||
# Authors: #
|
|
||||||
# Unai Martinez-Corral #
|
|
||||||
# #
|
|
||||||
# ==================================================================================================================== #
|
|
||||||
# Copyright 2020-2025 The pyTooling Authors #
|
|
||||||
# #
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
|
||||||
# you may not use this file except in compliance with the License. #
|
|
||||||
# You may obtain a copy of the License at #
|
|
||||||
# #
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
|
||||||
# #
|
|
||||||
# Unless required by applicable law or agreed to in writing, software #
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
|
||||||
# See the License for the specific language governing permissions and #
|
|
||||||
# limitations under the License. #
|
|
||||||
# #
|
|
||||||
# SPDX-License-Identifier: Apache-2.0 #
|
|
||||||
# ==================================================================================================================== #
|
|
||||||
name: 'Releaser'
|
|
||||||
description: 'Publish releases, upload assets and update tip/nightly tags'
|
|
||||||
inputs:
|
|
||||||
token:
|
|
||||||
description: 'Token to make authenticated API calls; can be passed in using {{ secrets.GITHUB_TOKEN }}'
|
|
||||||
required: true
|
|
||||||
files:
|
|
||||||
description: 'Multi-line list of glob patterns describing the artifacts to be uploaded'
|
|
||||||
required: true
|
|
||||||
tag:
|
|
||||||
description: 'Name of the tag that corresponds to the tip/nightly pre-release'
|
|
||||||
required: false
|
|
||||||
default: tip
|
|
||||||
rm:
|
|
||||||
description: 'Whether to delete all the previous artifacts, or only replacing the ones with the same name'
|
|
||||||
required: false
|
|
||||||
default: false
|
|
||||||
snapshots:
|
|
||||||
description: 'Whether to create releases from any tag or to treat some as snapshots'
|
|
||||||
required: false
|
|
||||||
default: true
|
|
||||||
runs:
|
|
||||||
using: 'composite'
|
|
||||||
steps:
|
|
||||||
|
|
||||||
- shell: bash
|
|
||||||
run: |
|
|
||||||
[ "$(source /etc/os-release && echo $VERSION_ID)" == "24.04" ] && UBUNTU_2404_ARGS='--break-system-packages' || unset UBUNTU_2404_ARGS
|
|
||||||
pip install --disable-pip-version-check --progress-bar off $UBUNTU_2404_ARGS PyGithub
|
|
||||||
|
|
||||||
- shell: bash
|
|
||||||
run: '''${{ github.action_path }}/../releaser.py'''
|
|
||||||
env:
|
|
||||||
INPUT_TOKEN: ${{ inputs.token }}
|
|
||||||
INPUT_FILES: ${{ inputs.files }}
|
|
||||||
INPUT_TAG: ${{ inputs.tag }}
|
|
||||||
INPUT_RM: ${{ inputs.rm }}
|
|
||||||
INPUT_SNAPSHOTS: ${{ inputs.snapshots }}
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
[tool.black]
|
|
||||||
line-length = 120
|
|
||||||
@@ -1,193 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
# ==================================================================================================================== #
|
|
||||||
# Authors: #
|
|
||||||
# Patrick Lehmann #
|
|
||||||
# Unai Martinez-Corral #
|
|
||||||
# #
|
|
||||||
# ==================================================================================================================== #
|
|
||||||
# Copyright 2020-2025 The pyTooling Authors #
|
|
||||||
# #
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
|
||||||
# you may not use this file except in compliance with the License. #
|
|
||||||
# You may obtain a copy of the License at #
|
|
||||||
# #
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
|
||||||
# #
|
|
||||||
# Unless required by applicable law or agreed to in writing, software #
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
|
||||||
# See the License for the specific language governing permissions and #
|
|
||||||
# limitations under the License. #
|
|
||||||
# #
|
|
||||||
# SPDX-License-Identifier: Apache-2.0 #
|
|
||||||
# ==================================================================================================================== #
|
|
||||||
import re
|
|
||||||
from sys import argv as sys_argv, stdout, exit as sys_exit
|
|
||||||
from os import environ, getenv
|
|
||||||
from glob import glob
|
|
||||||
from pathlib import Path
|
|
||||||
from github import Github, GithubException
|
|
||||||
from subprocess import check_call
|
|
||||||
|
|
||||||
|
|
||||||
paramTag = getenv("INPUT_TAG", "tip")
|
|
||||||
paramFiles = getenv("INPUT_FILES", None).split()
|
|
||||||
paramRM = getenv("INPUT_RM", "false") == "true"
|
|
||||||
paramSnapshots = getenv("INPUT_SNAPSHOTS", "true").lower() == "true"
|
|
||||||
paramToken = (
|
|
||||||
environ["GITHUB_TOKEN"]
|
|
||||||
if "GITHUB_TOKEN" in environ
|
|
||||||
else environ["INPUT_TOKEN"]
|
|
||||||
if "INPUT_TOKEN" in environ
|
|
||||||
else None
|
|
||||||
)
|
|
||||||
paramRepo = getenv("GITHUB_REPOSITORY", None)
|
|
||||||
paramRef = getenv("GITHUB_REF", None)
|
|
||||||
paramSHA = getenv("GITHUB_SHA", None)
|
|
||||||
|
|
||||||
|
|
||||||
def GetListOfArtifacts(argv, files):
|
|
||||||
print("· Get list of artifacts to be uploaded")
|
|
||||||
args = files if files is not None else []
|
|
||||||
if len(argv) > 1:
|
|
||||||
args += argv[1:]
|
|
||||||
if len(args) == 1 and args[0].lower() == "none":
|
|
||||||
print("! Skipping 'files' because it's set to 'none'.")
|
|
||||||
return []
|
|
||||||
elif len(args) == 0:
|
|
||||||
stdout.flush()
|
|
||||||
raise (Exception("Glob patterns need to be provided as positional arguments or through envvar 'INPUT_FILES'!"))
|
|
||||||
else:
|
|
||||||
flist = []
|
|
||||||
for item in args:
|
|
||||||
print(f" glob({item!s}):")
|
|
||||||
for fname in [fname for fname in glob(item, recursive=True) if not Path(fname).is_dir()]:
|
|
||||||
if Path(fname).stat().st_size == 0:
|
|
||||||
print(f" - ! Skipping empty file {fname!s}.")
|
|
||||||
continue
|
|
||||||
print(f" - {fname!s}")
|
|
||||||
flist.append(fname)
|
|
||||||
if len(flist) < 1:
|
|
||||||
stdout.flush()
|
|
||||||
raise (Exception("Empty list of files to upload/update!"))
|
|
||||||
return sorted(flist)
|
|
||||||
|
|
||||||
|
|
||||||
def GetGitHubAPIHandler(token):
|
|
||||||
print("· Get GitHub API handler (authenticate)")
|
|
||||||
if token is not None:
|
|
||||||
return Github(token)
|
|
||||||
raise (Exception("Need credentials to authenticate! Please, provide 'GITHUB_TOKEN' or 'INPUT_TOKEN'"))
|
|
||||||
|
|
||||||
|
|
||||||
def CheckRefSemVer(gh_ref, tag, snapshots):
|
|
||||||
print("· Check SemVer compliance of the reference/tag")
|
|
||||||
env_tag = None
|
|
||||||
if gh_ref[0:10] == "refs/tags/":
|
|
||||||
env_tag = gh_ref[10:]
|
|
||||||
if env_tag != tag:
|
|
||||||
rexp = r"^(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$"
|
|
||||||
semver = re.search(rexp, env_tag)
|
|
||||||
if semver == None and env_tag[0] == "v":
|
|
||||||
semver = re.search(rexp, env_tag[1:])
|
|
||||||
tag = env_tag
|
|
||||||
if semver == None:
|
|
||||||
print(f"! Could not get semver from {gh_ref!s}")
|
|
||||||
print(f"! Treat tag '{tag!s}' as a release")
|
|
||||||
return (tag, env_tag, False)
|
|
||||||
else:
|
|
||||||
if semver.group("prerelease") is None:
|
|
||||||
# is a regular semver compilant tag
|
|
||||||
return (tag, env_tag, False)
|
|
||||||
elif snapshots:
|
|
||||||
# is semver compilant prerelease tag, thus a snapshot (we skip it)
|
|
||||||
print("! Skipping snapshot prerelease.")
|
|
||||||
sys_exit()
|
|
||||||
|
|
||||||
return (tag, env_tag, True)
|
|
||||||
|
|
||||||
|
|
||||||
def GetRepositoryHandler(gh, repo):
|
|
||||||
print("· Get Repository handler")
|
|
||||||
if repo is None:
|
|
||||||
stdout.flush()
|
|
||||||
raise (Exception("Repository name not defined! Please set 'GITHUB_REPOSITORY"))
|
|
||||||
return gh.get_repo(repo)
|
|
||||||
|
|
||||||
|
|
||||||
def GetOrCreateRelease(gh_repo, tag, sha, is_prerelease):
|
|
||||||
print("· Get Release handler")
|
|
||||||
gh_tag = None
|
|
||||||
try:
|
|
||||||
gh_tag = gh_repo.get_git_ref(f"tags/{tag!s}")
|
|
||||||
except Exception:
|
|
||||||
stdout.flush()
|
|
||||||
|
|
||||||
if gh_tag:
|
|
||||||
try:
|
|
||||||
return (gh_repo.get_release(tag), False)
|
|
||||||
except Exception:
|
|
||||||
return (gh_repo.create_git_release(tag, tag, "", draft=True, prerelease=is_prerelease), True)
|
|
||||||
else:
|
|
||||||
err_msg = f"Tag/release '{tag!s}' does not exist and could not create it!"
|
|
||||||
if sha is None:
|
|
||||||
raise (Exception(err_msg))
|
|
||||||
try:
|
|
||||||
return (
|
|
||||||
gh_repo.create_git_tag_and_release(
|
|
||||||
tag, "", tag, "", sha, "commit", draft=True, prerelease=is_prerelease
|
|
||||||
),
|
|
||||||
True,
|
|
||||||
)
|
|
||||||
except Exception:
|
|
||||||
raise (Exception(err_msg))
|
|
||||||
|
|
||||||
|
|
||||||
def UpdateReference(gh_release, tag, sha, is_prerelease, is_draft):
|
|
||||||
print("· Update Release reference (force-push tag)")
|
|
||||||
|
|
||||||
if is_draft:
|
|
||||||
# Unfortunately, it seems not possible to update fields 'created_at' or 'published_at'.
|
|
||||||
print(" > Update (pre-)release")
|
|
||||||
gh_release.update_release(
|
|
||||||
gh_release.title,
|
|
||||||
"" if gh_release.body is None else gh_release.body,
|
|
||||||
draft=False,
|
|
||||||
prerelease=is_prerelease,
|
|
||||||
tag_name=gh_release.tag_name,
|
|
||||||
target_commitish=gh_release.target_commitish,
|
|
||||||
)
|
|
||||||
|
|
||||||
if sha is not None:
|
|
||||||
print(f" > Force-push '{tag!s}' to {sha!s}")
|
|
||||||
gh_repo.get_git_ref(f"tags/{tag!s}").edit(sha)
|
|
||||||
|
|
||||||
|
|
||||||
files = GetListOfArtifacts(sys_argv, paramFiles)
|
|
||||||
stdout.flush()
|
|
||||||
[tag, env_tag, is_prerelease] = CheckRefSemVer(paramRef, paramTag, paramSnapshots)
|
|
||||||
stdout.flush()
|
|
||||||
gh_repo = GetRepositoryHandler(GetGitHubAPIHandler(paramToken), paramRepo)
|
|
||||||
stdout.flush()
|
|
||||||
[gh_release, is_draft] = GetOrCreateRelease(gh_repo, tag, paramSHA, is_prerelease)
|
|
||||||
stdout.flush()
|
|
||||||
|
|
||||||
if paramRM:
|
|
||||||
print("· RM set. All previous assets are being cleared...")
|
|
||||||
for asset in gh_release.get_assets():
|
|
||||||
print(f" - {asset.name}")
|
|
||||||
asset.delete_asset()
|
|
||||||
stdout.flush()
|
|
||||||
|
|
||||||
if len(files) > 0:
|
|
||||||
print("· Upload assets")
|
|
||||||
env = environ.copy()
|
|
||||||
env["GITHUB_TOKEN"] = paramToken
|
|
||||||
cmd = ["gh", "release", "upload", "--repo", paramRepo, "--clobber", tag] + files
|
|
||||||
print(f" > {' '.join(cmd)}")
|
|
||||||
check_call(cmd, env=env)
|
|
||||||
stdout.flush()
|
|
||||||
else:
|
|
||||||
print("! Skipping uploading assets because the file list is empty.")
|
|
||||||
|
|
||||||
UpdateReference(gh_release, tag, paramSHA if env_tag is None else None, is_prerelease, is_draft)
|
|
||||||
@@ -1 +1 @@
|
|||||||
pyTooling ~= 8.5
|
pyTooling ~= 8.10
|
||||||
|
|||||||
7
run.ps1
7
run.ps1
@@ -116,7 +116,9 @@ $jobs = @()
|
|||||||
if ($livedoc)
|
if ($livedoc)
|
||||||
{ Write-Host -ForegroundColor DarkYellow "[live][DOC] Building documentation using Sphinx ..."
|
{ Write-Host -ForegroundColor DarkYellow "[live][DOC] Building documentation using Sphinx ..."
|
||||||
|
|
||||||
.\doc\make.bat html --verbose
|
cd doc
|
||||||
|
py -3.14 -m sphinx.cmd.build -b html . _build/html --doctree-dir _build/doctrees --jobs auto --warning-file _build/sphinx-warnings.log --verbose
|
||||||
|
cd ..
|
||||||
|
|
||||||
Write-Host -ForegroundColor DarkYellow "[live][DOC] Documentation finished"
|
Write-Host -ForegroundColor DarkYellow "[live][DOC] Documentation finished"
|
||||||
}
|
}
|
||||||
@@ -126,7 +128,8 @@ elseif ($doc)
|
|||||||
|
|
||||||
# Compile documentation
|
# Compile documentation
|
||||||
$compileDocFunc = {
|
$compileDocFunc = {
|
||||||
.\doc\make.bat html --verbose
|
cd doc
|
||||||
|
py -3.14 -m sphinx.cmd.build -b html . _build/html --doctree-dir _build/doctrees --jobs auto --warning-file _build/sphinx-warnings.log --verbose
|
||||||
}
|
}
|
||||||
$docJob = Start-Job -Name "Documentation" -ScriptBlock $compileDocFunc
|
$docJob = Start-Job -Name "Documentation" -ScriptBlock $compileDocFunc
|
||||||
# $jobs += $docJob
|
# $jobs += $docJob
|
||||||
|
|||||||
2
setup.py
2
setup.py
@@ -11,7 +11,7 @@
|
|||||||
# #
|
# #
|
||||||
# License: #
|
# License: #
|
||||||
# ==================================================================================================================== #
|
# ==================================================================================================================== #
|
||||||
# Copyright 2017-2025 Patrick Lehmann - Bötzingen, Germany #
|
# Copyright 2017-2026 Patrick Lehmann - Bötzingen, Germany #
|
||||||
# #
|
# #
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
# you may not use this file except in compliance with the License. #
|
# you may not use this file except in compliance with the License. #
|
||||||
|
|||||||
@@ -11,7 +11,7 @@
|
|||||||
# #
|
# #
|
||||||
# License: #
|
# License: #
|
||||||
# ==================================================================================================================== #
|
# ==================================================================================================================== #
|
||||||
# Copyright 2017-2025 Patrick Lehmann - Bötzingen, Germany #
|
# Copyright 2017-2026 Patrick Lehmann - Bötzingen, Germany #
|
||||||
# #
|
# #
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
# you may not use this file except in compliance with the License. #
|
# you may not use this file except in compliance with the License. #
|
||||||
@@ -44,55 +44,55 @@ if __name__ == "__main__": # pragma: no cover
|
|||||||
|
|
||||||
class PlatformTesting(TestCase):
|
class PlatformTesting(TestCase):
|
||||||
@mark.skipif(not CurrentPlatform.IsNativeLinux, reason="Skipped, if current platform isn't native Linux.")
|
@mark.skipif(not CurrentPlatform.IsNativeLinux, reason="Skipped, if current platform isn't native Linux.")
|
||||||
def test_ApplicationOnNativeLinux(self):
|
def test_ApplicationOnNativeLinux(self) -> None:
|
||||||
app = Application()
|
app = Application()
|
||||||
|
|
||||||
self.assertEqual(1, app.Value)
|
self.assertEqual(1, app.Value)
|
||||||
|
|
||||||
@mark.skipif(not CurrentPlatform.IsNativeMacOS, reason="Skipped, if current platform isn't native macOS.")
|
@mark.skipif(not CurrentPlatform.IsNativeMacOS, reason="Skipped, if current platform isn't native macOS.")
|
||||||
def test_ApplicationOnNativeMacOS(self):
|
def test_ApplicationOnNativeMacOS(self) -> None:
|
||||||
app = Application()
|
app = Application()
|
||||||
|
|
||||||
self.assertEqual(2, app.Value)
|
self.assertEqual(2, app.Value)
|
||||||
|
|
||||||
@mark.skipif(not CurrentPlatform.IsNativeWindows, reason="Skipped, if current platform isn't native Windows.")
|
@mark.skipif(not CurrentPlatform.IsNativeWindows, reason="Skipped, if current platform isn't native Windows.")
|
||||||
def test_ApplicationOnNativeWindows(self):
|
def test_ApplicationOnNativeWindows(self) -> None:
|
||||||
app = Application()
|
app = Application()
|
||||||
|
|
||||||
self.assertEqual(3, app.Value)
|
self.assertEqual(3, app.Value)
|
||||||
|
|
||||||
@mark.skipif(not CurrentPlatform.IsMSYSOnWindows, reason="Skipped, if current platform isn't MSYS on Windows.")
|
@mark.skipif(not CurrentPlatform.IsMSYSOnWindows, reason="Skipped, if current platform isn't MSYS on Windows.")
|
||||||
def test_ApplicationOnMSYS2OnWindows(self):
|
def test_ApplicationOnMSYS2OnWindows(self) -> None:
|
||||||
app = Application()
|
app = Application()
|
||||||
|
|
||||||
self.assertEqual(11, app.Value)
|
self.assertEqual(11, app.Value)
|
||||||
|
|
||||||
@mark.skipif(not CurrentPlatform.IsMinGW32OnWindows, reason="Skipped, if current platform isn't MinGW32 on Windows.")
|
@mark.skipif(not CurrentPlatform.IsMinGW32OnWindows, reason="Skipped, if current platform isn't MinGW32 on Windows.")
|
||||||
def test_ApplicationOnMinGW32OnWindows(self):
|
def test_ApplicationOnMinGW32OnWindows(self) -> None:
|
||||||
app = Application()
|
app = Application()
|
||||||
|
|
||||||
self.assertEqual(12, app.Value)
|
self.assertEqual(12, app.Value)
|
||||||
|
|
||||||
@mark.skipif(not CurrentPlatform.IsMinGW64OnWindows, reason="Skipped, if current platform isn't MinGW64 on Windows.")
|
@mark.skipif(not CurrentPlatform.IsMinGW64OnWindows, reason="Skipped, if current platform isn't MinGW64 on Windows.")
|
||||||
def test_ApplicationOnMinGW64OnWindows(self):
|
def test_ApplicationOnMinGW64OnWindows(self) -> None:
|
||||||
app = Application()
|
app = Application()
|
||||||
|
|
||||||
self.assertEqual(13, app.Value)
|
self.assertEqual(13, app.Value)
|
||||||
|
|
||||||
@mark.skipif(not CurrentPlatform.IsUCRT64OnWindows, reason="Skipped, if current platform isn't UCRT64 on Windows.")
|
@mark.skipif(not CurrentPlatform.IsUCRT64OnWindows, reason="Skipped, if current platform isn't UCRT64 on Windows.")
|
||||||
def test_ApplicationOnURTC64OnWindows(self):
|
def test_ApplicationOnURTC64OnWindows(self) -> None:
|
||||||
app = Application()
|
app = Application()
|
||||||
|
|
||||||
self.assertEqual(14, app.Value)
|
self.assertEqual(14, app.Value)
|
||||||
|
|
||||||
@mark.skipif(not CurrentPlatform.IsClang32OnWindows, reason="Skipped, if current platform isn't Clang32 on Windows.")
|
@mark.skipif(not CurrentPlatform.IsClang32OnWindows, reason="Skipped, if current platform isn't Clang32 on Windows.")
|
||||||
def test_ApplicationOnClang32OnWindows(self):
|
def test_ApplicationOnClang32OnWindows(self) -> None:
|
||||||
app = Application()
|
app = Application()
|
||||||
|
|
||||||
self.assertEqual(15, app.Value)
|
self.assertEqual(15, app.Value)
|
||||||
|
|
||||||
@mark.skipif(not CurrentPlatform.IsClang64OnWindows, reason="Skipped, if current platform isn't Clang64 on Windows.")
|
@mark.skipif(not CurrentPlatform.IsClang64OnWindows, reason="Skipped, if current platform isn't Clang64 on Windows.")
|
||||||
def test_ApplicationOnClang64OnWindows(self):
|
def test_ApplicationOnClang64OnWindows(self) -> None:
|
||||||
app = Application()
|
app = Application()
|
||||||
|
|
||||||
self.assertEqual(16, app.Value)
|
self.assertEqual(16, app.Value)
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
-r ../requirements.txt
|
-r ../unit/requirements.txt
|
||||||
|
|||||||
@@ -1,13 +1,4 @@
|
|||||||
-r ../requirements.txt
|
# Collect all testing requirements
|
||||||
|
-r platform/requirements.txt
|
||||||
# Coverage collection
|
-r typing/requirements.txt
|
||||||
Coverage ~= 7.10
|
-r unit/requirements.txt
|
||||||
|
|
||||||
# Test Runner
|
|
||||||
pytest ~= 8.4
|
|
||||||
pytest-cov ~= 7.0
|
|
||||||
|
|
||||||
# Static Type Checking
|
|
||||||
mypy[reports] ~= 1.18
|
|
||||||
typing_extensions ~= 4.15
|
|
||||||
lxml ~= 6.0
|
|
||||||
|
|||||||
6
tests/typing/requirements.txt
Normal file
6
tests/typing/requirements.txt
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
-r ../../requirements.txt
|
||||||
|
|
||||||
|
# Static Type Checking
|
||||||
|
mypy[reports] ~= 1.19
|
||||||
|
typing_extensions ~= 4.15
|
||||||
|
lxml >= 5.4, <7.0
|
||||||
@@ -11,7 +11,7 @@
|
|||||||
# #
|
# #
|
||||||
# License: #
|
# License: #
|
||||||
# ==================================================================================================================== #
|
# ==================================================================================================================== #
|
||||||
# Copyright 2017-2025 Patrick Lehmann - Bötzingen, Germany #
|
# Copyright 2017-2026 Patrick Lehmann - Bötzingen, Germany #
|
||||||
# #
|
# #
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
# you may not use this file except in compliance with the License. #
|
# you may not use this file except in compliance with the License. #
|
||||||
@@ -34,7 +34,7 @@ from myPackage import Application
|
|||||||
|
|
||||||
|
|
||||||
class Instantiation(TestCase):
|
class Instantiation(TestCase):
|
||||||
def test_Application(self):
|
def test_Application(self) -> None:
|
||||||
app = Application()
|
app = Application()
|
||||||
|
|
||||||
self.assertGreater(app.Value, 0)
|
self.assertGreater(app.Value, 0)
|
||||||
|
|||||||
@@ -11,7 +11,7 @@
|
|||||||
# #
|
# #
|
||||||
# License: #
|
# License: #
|
||||||
# ==================================================================================================================== #
|
# ==================================================================================================================== #
|
||||||
# Copyright 2017-2025 Patrick Lehmann - Bötzingen, Germany #
|
# Copyright 2017-2026 Patrick Lehmann - Bötzingen, Germany #
|
||||||
# #
|
# #
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
# you may not use this file except in compliance with the License. #
|
# you may not use this file except in compliance with the License. #
|
||||||
|
|||||||
@@ -1 +1,8 @@
|
|||||||
-r ../requirements.txt
|
-r ../../requirements.txt
|
||||||
|
|
||||||
|
# Coverage collection
|
||||||
|
Coverage ~= 7.13
|
||||||
|
|
||||||
|
# Test Runner
|
||||||
|
pytest ~= 9.0
|
||||||
|
pytest-cov ~= 7.0
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
# Unai Martinez-Corral #
|
# Unai Martinez-Corral #
|
||||||
# #
|
# #
|
||||||
# ==================================================================================================================== #
|
# ==================================================================================================================== #
|
||||||
# Copyright 2020-2025 The pyTooling Authors #
|
# Copyright 2020-2026 The pyTooling Authors #
|
||||||
# #
|
# #
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
# you may not use this file except in compliance with the License. #
|
# you may not use this file except in compliance with the License. #
|
||||||
|
|||||||
Reference in New Issue
Block a user