Compare commits

..

17 Commits

Author SHA1 Message Date
Patrick Lehmann
9e0b1c69f1 v3.0.0 2024-12-08 09:43:32 +01:00
Patrick Lehmann
f084e02f01 Use pattern to reduce the number of downloaded artifacts. 2024-12-07 23:41:16 +01:00
Patrick Lehmann
c34d4e240e [Dependabot]: Bump codecov/codecov-action from 3 to 5 2024-12-07 15:03:03 +01:00
Patrick Lehmann
6d04009bd6 Use new pyTooling/upload-artifact pyTooling/download-artifact composite actions. 2024-12-07 10:20:00 +01:00
Patrick Lehmann
50d32d1950 Replace also in description text. 2024-12-02 08:27:34 +01:00
Patrick Lehmann
7733e8998f Supporting replacements in NightlyReleases. 2024-12-01 22:06:18 +01:00
Patrick Lehmann
4c28b9d003 Added support for ZStandard. [skip ci] 2024-11-29 01:51:56 +01:00
Patrick Lehmann
bafea7d082 Trim leading whitespace. 2024-11-27 19:45:24 +01:00
Patrick Lehmann
9ca7b04f37 Added tar/gz compression as tgz files. 2024-11-27 19:34:44 +01:00
Patrick Lehmann
7a0ee75fd5 Added nightly release job template. 2024-11-27 08:03:57 +01:00
Patrick Lehmann
bc876f7171 Allow disabling the publishing of test reports via Dorny Test Reporter. 2024-11-22 22:23:18 +01:00
Patrick Lehmann
edca070047 Also checkout submodules. 2024-11-17 01:59:51 +01:00
dependabot[bot]
21c2f48dad [Dependabot]: Bump codecov/codecov-action from 3 to 5
Bumps [codecov/codecov-action](https://github.com/codecov/codecov-action) from 3 to 5.
- [Release notes](https://github.com/codecov/codecov-action/releases)
- [Changelog](https://github.com/codecov/codecov-action/blob/main/CHANGELOG.md)
- [Commits](https://github.com/codecov/codecov-action/compare/v3...v5)

---
updated-dependencies:
- dependency-name: codecov/codecov-action
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-11-15 03:35:36 +00:00
Patrick Lehmann
9338fbd106 v2.0.1 2024-11-10 21:15:14 +01:00
Patrick Lehmann
6869d0f666 Forward secrets to pipeline template. 2024-11-10 21:11:14 +01:00
Patrick Lehmann
bef77effcb v2.1.0 2024-11-10 19:03:51 +01:00
Patrick Lehmann
9808b6c7f9 Handle simple packages and namespace packages by pipeline template.
Added pipeline to check pipeline templates.
2024-11-10 18:49:33 +01:00
28 changed files with 902 additions and 147 deletions

View File

@@ -89,7 +89,7 @@ jobs:
uses: actions/checkout@v4
- name: 📥 Download artifacts '${{ inputs.wheel }}' from 'Package' job
uses: actions/download-artifact@v4
uses: pyTooling/download-artifact@v4
with:
name: ${{ inputs.wheel }}
path: install
@@ -255,9 +255,10 @@ jobs:
- name: 📤 Upload 'TestReportSummary.xml' artifact
if: inputs.apptest_xml_artifact != ''
uses: actions/upload-artifact@v4
uses: pyTooling/upload-artifact@v4
with:
name: ${{ inputs.apptest_xml_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
path: report/unit/TestReportSummary.xml
working-directory: report/unit
path: TestReportSummary.xml
if-no-files-found: error
retention-days: 1

View File

@@ -50,10 +50,11 @@ jobs:
- name: 📤 Upload 'documentation' artifacts
if: inputs.artifact != ''
uses: actions/upload-artifact@v4
uses: pyTooling/upload-artifact@v4
with:
name: ${{ inputs.artifact }}
path: doc/_build/html
working-directory: doc/_build/html
path: '*'
retention-days: 1
- name: '📓 Publish site to GitHub Pages'

View File

@@ -1,14 +1,36 @@
# ==================================================================================================================== #
# Authors: #
# Patrick Lehmann #
# #
# ==================================================================================================================== #
# Copyright 2020-2024 The pyTooling Authors #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
# #
# SPDX-License-Identifier: Apache-2.0 #
# ==================================================================================================================== #
name: Namespace Package
on:
workflow_call:
inputs:
package_namespace:
description: 'Name of the tool.'
required: true
description: 'Name of the tool''s namespace.'
required: false
default: ''
type: string
package_name:
description: 'Name of the tool.'
description: 'Name of the tool''s package.'
required: true
type: string
unittest_python_version:
@@ -71,32 +93,47 @@ on:
required: false
default: ''
type: string
secrets:
PYPI_TOKEN:
description: "Token for pushing releases to PyPI."
required: false
CODACY_PROJECT_TOKEN:
description: "Token for pushing coverage results to Codacy."
required: false
jobs:
UnitTestingParams:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@main
with:
name: "${{ inputs.package_namespace }}.${{ inputs.package_name }}"
python_version: ${{ inputs.unittest_python_version }}
package_namespace: ${{ inputs.package_namespace }}
package_name: ${{ inputs.package_name }}
UnitTestingParams:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
with:
package_namespace: ${{ inputs.package_namespace }}
package_name: ${{ inputs.package_name }}
python_version: ${{ inputs.unittest_python_version }}
python_version_list: ${{ inputs.unittest_python_version_list }}
system_list: ${{ inputs.unittest_system_list }}
system_list: ${{ inputs.unittest_system_list }}
include_list: ${{ inputs.unittest_include_list }}
exclude_list: ${{ inputs.unittest_exclude_list }}
disable_list: ${{ inputs.unittest_disable_list }}
AppTestingParams:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
with:
name: "${{ inputs.package_namespace }}.${{ inputs.package_name }}"
python_version: ${{ inputs.apptest_python_version }}
package_namespace: ${{ inputs.package_namespace }}
package_name: ${{ inputs.package_name }}
python_version: ${{ inputs.apptest_python_version }}
python_version_list: ${{ inputs.apptest_python_version_list }}
system_list: ${{ inputs.apptest_system_list }}
system_list: ${{ inputs.apptest_system_list }}
include_list: ${{ inputs.apptest_include_list }}
exclude_list: ${{ inputs.apptest_exclude_list }}
disable_list: ${{ inputs.apptest_disable_list }}
UnitTesting:
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r2
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@main
needs:
- UnitTestingParams
with:
@@ -107,33 +144,30 @@ jobs:
coverage_sqlite_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}
StaticTypeCheck:
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r2
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@main
needs:
- ConfigParams
- UnitTestingParams
with:
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
commands: |
touch ${{ inputs.package_namespace }}/__init__.py
mypy --html-report htmlmypy -p ${{ inputs.package_namespace }}.${{ inputs.name }}
html_report: 'htmlmypy'
${{ needs.ConfigParams.outputs.mypy_prepare_command }}
mypy --html-report report/typing -p ${{ needs.ConfigParams.outputs.package_fullname }}
html_report: 'report/typing'
html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
DocCoverage:
uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@r2
uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@main
needs:
- ConfigParams
- UnitTestingParams
with:
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
directory: ${{ inputs.package_namespace }}/${{ inputs.package_name }}
# fail_below: 70
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r2
needs:
- DocCoverage
Package:
uses: pyTooling/Actions/.github/workflows/Package.yml@r2
uses: pyTooling/Actions/.github/workflows/Package.yml@main
needs:
- UnitTestingParams
- UnitTesting
@@ -142,7 +176,7 @@ jobs:
artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
# AppTesting:
# uses: pyTooling/Actions/.github/workflows/ApplicationTesting.yml@r2
# uses: pyTooling/Actions/.github/workflows/ApplicationTesting.yml@main
# needs:
# - AppTestingParams
# - UnitTestingParams
@@ -153,7 +187,7 @@ jobs:
# apptest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).apptesting_xml }}
PublishCoverageResults:
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@r2
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@main
needs:
- UnitTestingParams
- UnitTesting
@@ -166,7 +200,7 @@ jobs:
codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }}
PublishTestResults:
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@r2
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@main
needs:
- UnitTestingParams
- UnitTesting
@@ -174,17 +208,17 @@ jobs:
merged_junit_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
# VerifyDocs:
# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@r2
# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@main
# needs:
# - UnitTestingParams
# with:
# python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
Documentation:
uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@r2
uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@main
needs:
- UnitTestingParams
- ConfigParams
- UnitTestingParams
- PublishTestResults
- PublishCoverageResults
# - VerifyDocs
@@ -197,7 +231,7 @@ jobs:
latex_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }}
IntermediateCleanUp:
uses: pyTooling/Actions/.github/workflows/IntermediateCleanUp.yml@r2
uses: pyTooling/Actions/.github/workflows/IntermediateCleanUp.yml@main
needs:
- UnitTestingParams
- PublishCoverageResults
@@ -208,7 +242,7 @@ jobs:
xml_unittest_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}-
# PDFDocumentation:
# uses: pyTooling/Actions/.github/workflows/LaTeXDocumentation.yml@r2
# uses: pyTooling/Actions/.github/workflows/LaTeXDocumentation.yml@main
# needs:
# - UnitTestingParams
# - Documentation
@@ -218,7 +252,7 @@ jobs:
# pdf_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_pdf }}
PublishToGitHubPages:
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r2
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@main
needs:
- UnitTestingParams
- Documentation
@@ -231,7 +265,7 @@ jobs:
typing: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
ReleasePage:
uses: pyTooling/Actions/.github/workflows/Release.yml@r2
uses: pyTooling/Actions/.github/workflows/Release.yml@main
if: startsWith(github.ref, 'refs/tags')
needs:
- Package
@@ -239,7 +273,7 @@ jobs:
- PublishToGitHubPages
PublishOnPyPI:
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@r2
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@main
if: startsWith(github.ref, 'refs/tags')
needs:
- UnitTestingParams
@@ -252,7 +286,7 @@ jobs:
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
ArtifactCleanUp:
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r2
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@main
needs:
- UnitTestingParams
- UnitTesting

View File

@@ -76,6 +76,9 @@ jobs:
- name: ⏬ Checkout repository
uses: actions/checkout@v4
with:
lfs: true
submodules: true
- name: 🐍 Setup Python ${{ inputs.python_version }}
uses: actions/setup-python@v5
@@ -160,16 +163,17 @@ jobs:
- name: 📤 Upload 'Coverage Report' artifact
continue-on-error: true
uses: actions/upload-artifact@v4
uses: pyTooling/upload-artifact@v4
with:
name: ${{ inputs.artifact }}
path: ${{ steps.getVariables.outputs.coverage_report_html_directory }}
working-directory: ${{ steps.getVariables.outputs.coverage_report_html_directory }}
path: '*'
if-no-files-found: error
retention-days: 1
- name: 📊 Publish coverage at CodeCov
continue-on-error: true
uses: codecov/codecov-action@v3
uses: codecov/codecov-action@v5
with:
files: ${{ steps.getVariables.outputs.coverage_report_xml }}
flags: unittests

View File

@@ -34,6 +34,15 @@ on:
required: false
default: '3.12'
type: string
package_namespace:
description: 'Name of the tool''s namespace.'
required: false
default: ''
type: string
package_name:
description: 'Name of the tool''s package.'
required: true
type: string
coverage_config:
description: 'Path to the .coveragerc file. Use pyproject.toml by default.'
required: false
@@ -41,6 +50,15 @@ on:
type: string
outputs:
package_fullname:
description: ""
value: ${{ jobs.Extract.outputs.package_fullname }}
package_directory:
description: ""
value: ${{ jobs.Extract.outputs.package_directory }}
mypy_prepare_command:
description: ""
value: ${{ jobs.Extract.outputs.mypy_prepare_command }}
coverage_report_html_directory:
description: ""
value: ${{ jobs.Extract.outputs.coverage_report_html_directory }}
@@ -62,6 +80,9 @@ jobs:
name: 📓 Extract configurations from pyproject.toml
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
outputs:
package_fullname: ${{ steps.getPackageName.outputs.package_fullname }}
package_directory: ${{ steps.getPackageName.outputs.package_directory }}
mypy_prepare_command: ${{ steps.getPackageName.outputs.mypy_prepare_command }}
coverage_report_html_directory: ${{ steps.getVariables.outputs.coverage_report_html_directory }}
coverage_report_xml_directory: ${{ steps.getVariables.outputs.coverage_report_xml_directory }}
coverage_report_xml: ${{ steps.getVariables.outputs.coverage_report_xml }}
@@ -81,6 +102,35 @@ jobs:
run: |
python -m pip install --disable-pip-version-check -U wheel tomli
- name: 🔁 Full package name and directory
id: getPackageName
shell: python
run: |
from os import getenv
from pathlib import Path
from textwrap import dedent
namespace = "${{ inputs.package_namespace }}".strip()
name = "${{ inputs.package_name }}".strip()
if namespace == "" or namespace == ".":
fullname = f"{name}"
directory = f"{name}"
mypy_prepare_command = ""
else:
fullname = f"{namespace}.{name}"
directory = f"{namespace}/{name}"
mypy_prepare_command = f"touch {namespace}/__init__.py"
github_output = Path(getenv("GITHUB_OUTPUT"))
print(f"GITHUB_OUTPUT: {github_output}")
with github_output.open("a+", encoding="utf-8") as f:
f.write(dedent(f"""\
package_fullname={fullname}
package_directory={directory}
mypy_prepare_command={mypy_prepare_command}
"""))
- name: 🔁 Extract configurations from pyproject.toml
id: getVariables
shell: python

View File

@@ -50,7 +50,7 @@ jobs:
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
steps:
- name: 📥 Download artifacts '${{ inputs.latex_artifact }}' from 'SphinxDocumentation' job
uses: actions/download-artifact@v4
uses: pyTooling/download-artifact@v4
with:
name: ${{ inputs.latex_artifact }}
path: latex
@@ -62,7 +62,7 @@ jobs:
root_file: ${{ inputs.document }}.tex
- name: 📤 Upload 'PDF Documentation' artifact
uses: actions/upload-artifact@v4
uses: pyTooling/upload-artifact@v4
if: inputs.pdf_artifact != ''
with:
name: ${{ inputs.pdf_artifact }}

387
.github/workflows/NightlyRelease.yml vendored Normal file
View File

@@ -0,0 +1,387 @@
# ==================================================================================================================== #
# Authors: #
# Patrick Lehmann #
# #
# ==================================================================================================================== #
# Copyright 2020-2024 The pyTooling Authors #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
# #
# SPDX-License-Identifier: Apache-2.0 #
# ==================================================================================================================== #
name: Nightly
on:
workflow_call:
inputs:
ubuntu_image:
description: 'Name of the Ubuntu image.'
required: false
default: 'ubuntu-24.04'
type: string
nightly_name:
description: 'Name of the nightly release.'
required: false
default: 'nightly'
type: string
nightly_title:
description: 'Title of the nightly release.'
required: false
default: ''
type: string
nightly_description:
description: 'Description of the nightly release.'
required: false
default: 'Release of artifacts from latest CI pipeline.'
type: string
draft:
description: 'Specify if this is a draft.'
required: false
default: false
type: boolean
prerelease:
description: 'Specify if this is a pre-release.'
required: false
default: false
type: boolean
latest:
description: 'Specify if this is the latest release.'
required: false
default: false
type: boolean
replacements:
description: 'Multi-line string containing search=replace patterns.'
required: false
default: ''
type: string
assets:
description: 'Multi-line string containing artifact:file:title asset descriptions.'
required: true
type: string
jobs:
Release:
name: 📝 Update 'Nightly Page' on GitHub
runs-on: ${{ inputs.ubuntu_image }}
permissions:
contents: write
actions: write
# attestations: write
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v4
with:
# The command 'git describe' (used for version) needs the history.
fetch-depth: 0
- name: 🔧 Install zstd
run: sudo apt-get install -y --no-install-recommends zstd
- name: 📑 Delete (old) Release Page
id: deleteReleasePage
run: |
set +e
ANSI_LIGHT_RED="\e[91m"
ANSI_LIGHT_GREEN="\e[92m"
ANSI_LIGHT_YELLOW="\e[93m"
ANSI_NOCOLOR="\e[0m"
export GH_TOKEN=${{ github.token }}
echo -n "Deleting release '${{ inputs.nightly_name }}' ... "
message="$(gh release delete ${{ inputs.nightly_name }} --yes 2>&1)"
if [[ $? -eq 0 ]]; then
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
elif [[ "${message}" == "release not found" ]]; then
echo -e "${ANSI_LIGHT_YELLOW}[NOT FOUND]${ANSI_NOCOLOR}"
else
echo -e "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
echo -e "${ANSI_LIGHT_RED}Couldn't delete release '${{ inputs.nightly_name }}' -> Error: '${message}'.${ANSI_NOCOLOR}"
echo "::error title=InternalError::Couldn't delete release '${{ inputs.nightly_name }}' -> Error: '${message}'."
exit 1
fi
- name: 📑 (Re)create (new) Release Page
id: createReleasePage
run: |
set +e
ANSI_LIGHT_RED="\e[91m"
ANSI_LIGHT_GREEN="\e[92m"
ANSI_NOCOLOR="\e[0m"
export GH_TOKEN=${{ github.token }}
addDraft="--draft"
if ${{ inputs.prerelease }}; then
addPreRelease="--prerelease"
fi
if ! ${{ inputs.latest }}; then
addLatest="--latest=false"
fi
if [[ "${{ inputs.nightly_title }}" != "" ]]; then
addTitle=("--title" "${{ inputs.nightly_title }}")
fi
cat <<'EOF' > __NoTeS__.md
${{ inputs.nightly_description }}
EOF
if [[ -s __NoTeS__.md ]]; then
addNotes=("--notes-file" "__NoTeS__.md")
fi
# Apply replacements
while IFS=$'\r\n' read -r patternLine; do
# skip empty lines
[[ "$patternLine" == "" ]] && continue
pattern="${patternLine%%=*}"
replacement="${patternLine#*=}"
sed -i -e "s/%$pattern%/$replacement/g" "__NoTeS__.md"
done <<<'${{ inputs.replacements }}'
# Add footer line
cat <<EOF >> __NoTeS__.md
--------
Published from [${{ github.workflow }}](https://github.com/Paebbels/ghdl/actions/runs/${{ github.run_id }}) workflow triggered by @${{ github.actor }} on $(date '+%Y-%m-%d %H:%M:%S').
EOF
echo "Creating release '${{ inputs.nightly_name }}' ... "
message="$(gh release create "${{ inputs.nightly_name }}" --verify-tag $addDraft $addPreRelease $addLatest "${addTitle[@]}" "${addNotes[@]}" 2>&1)"
if [[ $? -eq 0 ]]; then
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
else
echo -e "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
echo -e "${ANSI_LIGHT_RED}Couldn't create release '${{ inputs.nightly_name }}' -> Error: '${message}'.${ANSI_NOCOLOR}"
echo "::error title=InternalError::Couldn't create release '${{ inputs.nightly_name }}' -> Error: '${message}'."
exit 1
fi
- name: 📥 Download artifacts and upload as assets
id: uploadAssets
run: |
set +e
ANSI_LIGHT_RED="\e[91m"
ANSI_LIGHT_GREEN="\e[92m"
ANSI_LIGHT_YELLOW="\e[93m"
ANSI_NOCOLOR="\e[0m"
export GH_TOKEN=${{ github.token }}
Replace() {
line="$1"
while IFS=$'\r\n' read -r patternLine; do
# skip empty lines
[[ "$patternLine" == "" ]] && continue
pattern="${patternLine%%=*}"
replacement="${patternLine#*=}"
line="${line//"%$pattern%"/"$replacement"}"
done <<<'${{ inputs.replacements }}'
echo "$line"
}
ERRORS=0
# A dictionary of 0/1 to avoid duplicate downloads
declare -A downloadedArtifacts
# A dictionary to check for duplicate asset files in release
declare -A assetFilenames
while IFS=$'\r\n' read -r assetLine; do
if [[ "${assetLine}" == "" ]]; then
continue
fi
# split assetLine colon separated triple: artifact:asset:title
artifact="${assetLine%%:*}"
remaining="${assetLine#*:}"
asset="${remaining%%:*}"
title="${remaining##*:}"
# remove leading whitespace
asset="${asset#"${asset%%[![:space:]]*}"}"
title="${title#"${title%%[![:space:]]*}"}"
# apply replacements
asset="$(Replace "${asset}")"
title="$(Replace "${title}")"
echo "Publish asset '${asset}' from artifact '${artifact}' with title '${title}'"
echo -n " Checked asset for duplicates ... "
if [[ -n "${assetFilenames[$asset]}" ]]; then
echo -e "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
echo "::error title=DuplicateAsset::Asset '${asset}' from artifact '${artifact}' was already uploaded to release '${{ inputs.nightly_name }}'."
ERRORS=1
continue
else
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
assetFilenames[$asset]=1
fi
# Download artifact by artifact name
if [[ -n "${downloadedArtifacts[$artifact]}" ]]; then
echo -e " downloading '${artifact}' ... ${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}"
else
echo " downloading '${artifact}' ... "
echo -n " gh run download $GITHUB_RUN_ID --dir \"${artifact}\" --name \"${artifact}\" "
gh run download $GITHUB_RUN_ID --dir "${artifact}" --name "${artifact}"
if [[ $? -eq 0 ]]; then
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
else
echo -e "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
echo -e "${ANSI_LIGHT_RED}Couldn't download artifact '${artifact}'.${ANSI_NOCOLOR}"
echo "::error title=ArtifactNotFound::Couldn't download artifact '${artifact}'."
ERRORS=1
continue
fi
downloadedArtifacts[$artifact]=1
fi
# Check if artifact should be compressed (zip, tgz) or if asset was part of the downloaded artifact.
echo -n " checking asset '${artifact}/${asset}' ... "
if [[ "${asset}" == !*.zip ]]; then
echo -e "${ANSI_LIGHT_GREEN}[ZIP]${ANSI_NOCOLOR}"
asset="${asset##*!}"
echo " Compressing artifact '${artifact}' to '${asset}' ..."
(
cd "${artifact}" && \
zip -r "../${asset}" *
)
if [[ $? -eq 0 ]]; then
echo -e " Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
uploadFile="${asset}"
else
echo -e " Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
echo -e "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zip file '${asset}'.${ANSI_NOCOLOR}"
echo "::error title=CompressionError::Couldn't compress '${artifact}' to zip file '${asset}'."
ERRORS=1
continue
fi
elif [[ "${asset}" == !*.tgz || "${asset}" == !*.tar.gz || "${asset}" == \$*.tgz || "${asset}" == \$*.tar.gz ]]; then
echo -e "${ANSI_LIGHT_GREEN}[TAR/GZ]${ANSI_NOCOLOR}"
if [[ "${asset:0:1}" == "\$" ]]; then
asset="${asset##*$}"
dirName="${asset%.*}"
echo " Compressing artifact '${artifact}' to '${asset}' ..."
tar -c --gzip --file="${asset}" --directory="${artifact}" --transform "s|^\.|${dirName%.tar}|" .
retCode=$?
else
asset="${asset##*!}"
echo " Compressing artifact '${artifact}' to '${asset}' ..."
(
cd "${artifact}" && \
tar -c --gzip --file="../${asset}" *
)
retCode=$?
fi
if [[ $retCode -eq 0 ]]; then
echo -e " Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
uploadFile="${asset}"
else
echo -e " Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
echo -e "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to tgz file '${asset}'.${ANSI_NOCOLOR}"
echo "::error title=CompressionError::Couldn't compress '${artifact}' to tgz file '${asset}'."
ERRORS=1
continue
fi
elif [[ "${asset}" == !*.tzst || "${asset}" == !*.tar.zst || "${asset}" == \$*.tzst || "${asset}" == \$*.tar.zst ]]; then
echo -e "${ANSI_LIGHT_GREEN}[ZST]${ANSI_NOCOLOR}"
if [[ "${asset:0:1}" == "\$" ]]; then
asset="${asset##*$}"
dirName="${asset%.*}"
echo " Compressing artifact '${artifact}' to '${asset}' ..."
tar -c --zstd --file="${asset}" --directory="${artifact}" --transform "s|^\.|${dirName%.tar}|" .
retCode=$?
else
asset="${asset##*!}"
echo " Compressing artifact '${artifact}' to '${asset}' ..."
(
cd "${artifact}" && \
tar -c --zstd --file="../${asset}" *
)
retCode=$?
fi
if [[ $retCode -eq 0 ]]; then
echo -e " Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
uploadFile="${asset}"
else
echo -e " Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
echo -e "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zst file '${asset}'.${ANSI_NOCOLOR}"
echo "::error title=CompressionError::Couldn't compress '${artifact}' to zst file '${asset}'."
ERRORS=1
continue
fi
elif [[ -e "${artifact}/${asset}" ]]; then
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
uploadFile="${artifact}/${asset}"
else
echo -e "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
echo -e "${ANSI_LIGHT_RED}Couldn't find asset '${asset}' in artifact '${artifact}'.${ANSI_NOCOLOR}"
echo "::error title=FileNotFound::Couldn't find asset '${asset}' in artifact '${artifact}'."
ERRORS=1
continue
fi
# Upload asset to existing release page
echo -n " uploading asset '${asset}' from '${uploadFile}' with title '${title}' ... "
gh release upload ${{ inputs.nightly_name }} "${uploadFile}#${title}" --clobber
if [[ $? -eq 0 ]]; then
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
else
echo -e "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
echo -e "${ANSI_LIGHT_RED}Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
echo "::error title=UploadError::Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'."
ERRORS=1
continue
fi
done <<<'${{ inputs.assets }}'
echo "Inspecting downloaded artifacts ..."
tree -L 3 .
if [[ $ERROR -ne 0 ]]; then
echo -e "${ANSI_LIGHT_RED}Errors detected in previous steps.${ANSI_NOCOLOR}"
exit 1
fi
- name: 📑 Remove draft state from Release Page
if: ${{ ! inputs.draft }}
run: |
set +e
ANSI_LIGHT_RED="\e[91m"
ANSI_LIGHT_GREEN="\e[92m"
ANSI_NOCOLOR="\e[0m"
export GH_TOKEN=${{ github.token }}
# Remove draft-state from release page
echo -n "Remove draft-state from release '${title}' ... "
gh release edit --draft=false "${{ inputs.nightly_name }}"
if [[ $? -eq 0 ]]; then
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
else
echo -e "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
echo -e "${ANSI_LIGHT_RED}Couldn't remove draft-state from release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
echo "::error title=ReleasePage::Couldn't remove draft-state from release '${{ inputs.nightly_name }}'."
fi

View File

@@ -54,6 +54,9 @@ jobs:
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v4
with:
lfs: true
submodules: true
- name: 🐍 Setup Python ${{ inputs.python_version }}
uses: actions/setup-python@v5
@@ -103,9 +106,10 @@ jobs:
run: python setup.py bdist_wheel
- name: 📤 Upload wheel artifact
uses: actions/upload-artifact@v4
uses: pyTooling/upload-artifact@v4
with:
name: ${{ inputs.artifact }}
path: dist/
working-directory: dist
path: '*'
if-no-files-found: error
retention-days: 1

View File

@@ -32,7 +32,18 @@ on:
type: string
name:
description: 'Name of the tool.'
required: true
required: false
default: ''
type: string
package_namespace:
description: 'Name of the tool''s namespace.'
required: false
default: ''
type: string
package_name:
description: 'Name of the tool''s package.'
required: false
default: ''
type: string
python_version:
description: 'Python version.'
@@ -72,7 +83,7 @@ on:
windows_image:
description: 'The used GitHub Action image for Windows based jobs.'
required: false
default: 'windows-latest'
default: 'windows-2022'
type: string
macos_intel_image:
description: 'The used GitHub Action image for macOS (Intel x86-64) based jobs.'
@@ -82,7 +93,7 @@ on:
macos_arm_image:
description: 'The used GitHub Action image for macOS (ARM aarch64) based jobs.'
required: false
default: 'macos-latest'
default: 'macos-14'
type: string
outputs:
@@ -120,7 +131,9 @@ jobs:
from textwrap import dedent
from typing import Iterable
name = "${{ inputs.name }}".strip()
package_namespace = "${{ inputs.package_namespace }}".strip()
package_name = "${{ inputs.package_name }}".strip()
name = "${{ inputs.name }}".strip()
python_version = "${{ inputs.python_version }}".strip()
systems = "${{ inputs.system_list }}".strip()
versions = "${{ inputs.python_version_list }}".strip()
@@ -128,6 +141,12 @@ jobs:
exclude_list = "${{ inputs.exclude_list }}".strip()
disable_list = "${{ inputs.disable_list }}".strip()
if name == "":
if package_namespace == "" or package_namespace == ".":
name = f"{package_name}"
else:
name = f"{package_namespace}.{package_name}"
currentMSYS2Version = "3.11"
currentAlphaVersion = "3.14"
currentAlphaRelease = "3.14.0-alpha.1"
@@ -296,18 +315,6 @@ jobs:
"documentation_pdf": f"{name}-Documentation-PDF",
}
# Deprecated structure
params = {
"python_version": python_version,
"artifacts": {
"unittesting": f"{artifact_names['unittesting_xml']}",
"coverage": f"{artifact_names['codecoverage_html']}",
"typing": f"{artifact_names['statictyping_html']}",
"package": f"{artifact_names['package_all']}",
"doc": f"{artifact_names['documentation_html']}",
}
}
print("Parameters:")
print(f" python_version: {python_version}")
print(f" python_jobs ({len(jobs)}):\n" +
@@ -325,7 +332,6 @@ jobs:
python_version={python_version}
python_jobs={json_dumps(jobs)}
artifact_names={json_dumps(artifact_names)}
params={json_dumps(params)}
"""))
- name: Verify out parameters

View File

@@ -29,6 +29,10 @@ on:
required: false
default: '24.04'
type: string
coverage_artifacts_pattern:
required: false
default: '*-CodeCoverage-*'
type: string
coverage_config:
description: 'Path to the .coveragerc file. Use pyproject.toml by default.'
required: false
@@ -68,12 +72,20 @@ jobs:
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v4
with:
lfs: true
submodules: true
- name: Download Artifacts
uses: actions/download-artifact@v4
uses: pyTooling/download-artifact@v4
with:
pattern: ${{ inputs.coverage_artifacts_pattern }}
path: artifacts
- name: 🔎 Inspect extracted artifact (tarball)
run: |
tree -psh artifacts
- name: 🔧 Install coverage and tomli
run: |
python -m pip install -U --disable-pip-version-check --break-system-packages coverage[toml] tomli
@@ -170,7 +182,7 @@ jobs:
- name: 📤 Upload 'Coverage SQLite Database' artifact
if: inputs.coverage_sqlite_artifact != ''
continue-on-error: true
uses: actions/upload-artifact@v4
uses: pyTooling/upload-artifact@v4
with:
name: ${{ inputs.coverage_sqlite_artifact }}
path: .coverage
@@ -180,7 +192,7 @@ jobs:
- name: 📤 Upload 'Coverage XML Report' artifact
if: inputs.coverage_xml_artifact != ''
continue-on-error: true
uses: actions/upload-artifact@v4
uses: pyTooling/upload-artifact@v4
with:
name: ${{ inputs.coverage_xml_artifact }}
path: ${{ steps.getVariables.outputs.coverage_report_xml }}
@@ -190,7 +202,7 @@ jobs:
- name: 📤 Upload 'Coverage JSON Report' artifact
if: inputs.coverage_json_artifact != ''
continue-on-error: true
uses: actions/upload-artifact@v4
uses: pyTooling/upload-artifact@v4
with:
name: ${{ inputs.coverage_json_artifact }}
path: ${{ steps.getVariables.outputs.coverage_report_json }}
@@ -200,17 +212,18 @@ jobs:
- name: 📤 Upload 'Coverage HTML Report' artifact
if: inputs.coverage_html_artifact != ''
continue-on-error: true
uses: actions/upload-artifact@v4
uses: pyTooling/upload-artifact@v4
with:
name: ${{ inputs.coverage_html_artifact }}
path: ${{ steps.getVariables.outputs.coverage_report_html_directory }}
working-directory: ${{ steps.getVariables.outputs.coverage_report_html_directory }}
path: '*'
if-no-files-found: error
retention-days: 1
- name: 📊 Publish code coverage at CodeCov
if: inputs.CodeCov == true
continue-on-error: true
uses: codecov/codecov-action@v4
uses: codecov/codecov-action@v5
with:
files: ${{ steps.getVariables.outputs.coverage_report_xml }}
flags: unittests

View File

@@ -57,10 +57,10 @@ jobs:
steps:
- name: 📥 Download artifacts '${{ inputs.artifact }}' from 'Package' job
uses: actions/download-artifact@v4
uses: pyTooling/download-artifact@v4
with:
name: ${{ inputs.artifact }}
path: dist/
path: dist
- name: 🐍 Setup Python ${{ inputs.python_version }}
uses: actions/setup-python@v5

View File

@@ -30,6 +30,10 @@ on:
required: false
default: '24.04'
type: string
unittest_artifacts_pattern:
required: false
default: '*-UnitTestReportSummary-*'
type: string
merged_junit_artifact:
description: 'Name of the merged JUnit Test Summary artifact.'
required: false
@@ -40,6 +44,11 @@ on:
required: false
default: '"--pytest=rewrite-dunder-init;reduce-depth:pytest.tests.unit"'
type: string
publish:
description: 'Publish test report summary via Dorny Test-Reporter'
required: false
default: true
type: boolean
report_title:
description: 'Title of the summary report in the pipeline''s sidebar'
required: false
@@ -57,10 +66,15 @@ jobs:
uses: actions/checkout@v4
- name: Download Artifacts
uses: actions/download-artifact@v4
uses: pyTooling/download-artifact@v4
with:
pattern: ${{ inputs.unittest_artifacts_pattern }}
path: artifacts
- name: 🔎 Inspect extracted artifact (tarball)
run: |
tree -psh artifacts
- name: 🔧 Install pyEDAA.Reports (JUunit Parser and Merger)
run: |
python -m pip install --disable-pip-version-check --break-system-packages -U pyEDAA.Reports
@@ -80,6 +94,7 @@ jobs:
- name: 📊 Publish Unit Test Results
uses: dorny/test-reporter@v1
if: inputs.publish && inputs.report_title != ''
with:
name: ${{ inputs.report_title }}
path: Unittesting.xml
@@ -87,7 +102,7 @@ jobs:
- name: 📤 Upload merged 'JUnit Test Summary' artifact
if: inputs.merged_junit_artifact != ''
uses: actions/upload-artifact@v4
uses: pyTooling/upload-artifact@v4
with:
name: ${{ inputs.merged_junit_artifact }}
path: Unittesting.xml

View File

@@ -56,21 +56,21 @@ jobs:
uses: actions/checkout@v4
- name: 📥 Download artifacts '${{ inputs.doc }}' from 'BuildTheDocs' job
uses: actions/download-artifact@v4
uses: pyTooling/download-artifact@v4
with:
name: ${{ inputs.doc }}
path: public
- name: 📥 Download artifacts '${{ inputs.coverage }}' from 'Coverage' job
if: ${{ inputs.coverage != '' }}
uses: actions/download-artifact@v4
uses: pyTooling/download-artifact@v4
with:
name: ${{ inputs.coverage }}
path: public/coverage
- name: 📥 Download artifacts '${{ inputs.typing }}' from 'StaticTypeCheck' job
if: ${{ inputs.typing != '' }}
uses: actions/download-artifact@v4
uses: pyTooling/download-artifact@v4
with:
name: ${{ inputs.typing }}
path: public/typing

View File

@@ -82,6 +82,9 @@ jobs:
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v4
with:
lfs: true
submodules: true
- name: 🔧 Install graphviz
run: sudo apt-get install -y --no-install-recommends graphviz
@@ -98,14 +101,14 @@ jobs:
- name: 📥 Download artifacts '${{ inputs.unittest_xml_artifact }}' from 'Unittesting' job
if: inputs.unittest_xml_artifact != ''
uses: actions/download-artifact@v4
uses: pyTooling/download-artifact@v4
with:
name: ${{ inputs.unittest_xml_artifact }}
path: ${{ inputs.unittest_xml_directory }}
- name: 📥 Download artifacts '${{ inputs.coverage_json_artifact }}' from 'PublishCoverageResults' job
if: inputs.coverage_json_artifact != ''
uses: actions/download-artifact@v4
uses: pyTooling/download-artifact@v4
with:
name: ${{ inputs.coverage_json_artifact }}
path: ${{ inputs.coverage_report_json_directory }}
@@ -121,10 +124,11 @@ jobs:
- name: 📤 Upload 'HTML Documentation' artifact
if: inputs.html_artifact != ''
continue-on-error: true
uses: actions/upload-artifact@v4
uses: pyTooling/upload-artifact@v4
with:
name: ${{ inputs.html_artifact }}
path: ${{ inputs.doc_directory }}/_build/html
working-directory: ${{ inputs.doc_directory }}/_build/html
path: '*'
if-no-files-found: error
retention-days: 1
@@ -135,6 +139,9 @@ jobs:
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v4
with:
lfs: true
submodules: true
- name: 🔧 Install graphviz
run: sudo apt-get install -y --no-install-recommends graphviz
@@ -151,14 +158,14 @@ jobs:
- name: 📥 Download artifacts '${{ inputs.unittest_xml_artifact }}' from 'Unittesting' job
if: inputs.unittest_xml_artifact != ''
uses: actions/download-artifact@v4
uses: pyTooling/download-artifact@v4
with:
name: ${{ inputs.unittest_xml_artifact }}
path: ${{ inputs.unittest_xml_directory }}
- name: 📥 Download artifacts '${{ inputs.coverage_json_artifact }}' from 'PublishCoverageResults' job
if: inputs.coverage_json_artifact != ''
uses: actions/download-artifact@v4
uses: pyTooling/download-artifact@v4
with:
name: ${{ inputs.coverage_json_artifact }}
path: ${{ inputs.coverage_report_json_directory }}
@@ -176,9 +183,10 @@ jobs:
- name: 📤 Upload 'LaTeX Documentation' artifact
if: inputs.latex_artifact != ''
continue-on-error: true
uses: actions/upload-artifact@v4
uses: pyTooling/upload-artifact@v4
with:
name: ${{ inputs.latex_artifact }}
path: ${{ inputs.doc_directory }}/_build/latex
working-directory: ${{ inputs.doc_directory }}/_build/latex
path: '*'
if-no-files-found: error
retention-days: 1

View File

@@ -89,17 +89,18 @@ jobs:
- name: 📤 Upload 'Static Typing Report' HTML artifact
if: ${{ inputs.html_artifact != '' }}
continue-on-error: true
uses: actions/upload-artifact@v4
uses: pyTooling/upload-artifact@v4
with:
name: ${{ inputs.html_artifact }}
path: ${{ inputs.html_report }}
working-directory: ${{ inputs.html_report }}
path: '*'
if-no-files-found: error
retention-days: 1
- name: 📤 Upload 'Static Typing Report' JUnit artifact
if: ${{ inputs.junit_artifact != '' }}
continue-on-error: true
uses: actions/upload-artifact@v4
uses: pyTooling/upload-artifact@v4
with:
name: ${{ inputs.junit_artifact }}
path: ${{ inputs.junit_report }}

View File

@@ -147,6 +147,9 @@ jobs:
steps:
- name: ⏬ Checkout repository
uses: actions/checkout@v4
with:
lfs: true
submodules: true
# Package Manager steps
- name: 🔧 Install homebrew dependencies on macOS
@@ -439,17 +442,18 @@ jobs:
- name: 📤 Upload 'TestReportSummary.xml' artifact
if: inputs.unittest_xml_artifact != ''
continue-on-error: true
uses: actions/upload-artifact@v4
uses: pyTooling/upload-artifact@v4
with:
name: ${{ inputs.unittest_xml_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
path: report/unit/TestReportSummary.xml
working-directory: report/unit
path: TestReportSummary.xml
if-no-files-found: error
retention-days: 1
# - name: 📤 Upload 'Unit Tests HTML Report' artifact
# if: inputs.unittest_html_artifact != ''
# continue-on-error: true
# uses: actions/upload-artifact@v4
# uses: pyTooling/upload-artifact@v4
# with:
# name: ${{ inputs.unittest_html_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
# path: ${{ steps.getVariables.outputs.unittest_report_html_directory }}
@@ -459,7 +463,7 @@ jobs:
- name: 📤 Upload 'Coverage SQLite Database' artifact
if: inputs.coverage_sqlite_artifact != ''
continue-on-error: true
uses: actions/upload-artifact@v4
uses: pyTooling/upload-artifact@v4
with:
name: ${{ inputs.coverage_sqlite_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
path: .coverage
@@ -470,7 +474,7 @@ jobs:
- name: 📤 Upload 'Coverage XML Report' artifact
if: inputs.coverage_xml_artifact != ''
continue-on-error: true
uses: actions/upload-artifact@v4
uses: pyTooling/upload-artifact@v4
with:
name: ${{ inputs.coverage_xml_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
path: ${{ steps.getVariables.outputs.coverage_report_xml }}
@@ -480,7 +484,7 @@ jobs:
- name: 📤 Upload 'Coverage JSON Report' artifact
if: inputs.coverage_json_artifact != ''
continue-on-error: true
uses: actions/upload-artifact@v4
uses: pyTooling/upload-artifact@v4
with:
name: ${{ inputs.coverage_json_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
path: ${{ steps.getVariables.outputs.coverage_report_json }}
@@ -490,9 +494,10 @@ jobs:
- name: 📤 Upload 'Coverage HTML Report' artifact
if: inputs.coverage_html_artifact != ''
continue-on-error: true
uses: actions/upload-artifact@v4
uses: pyTooling/upload-artifact@v4
with:
name: ${{ inputs.coverage_html_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
path: ${{ steps.getVariables.outputs.coverage_report_html_directory }}
working-directory: ${{ steps.getVariables.outputs.coverage_report_html_directory }}
path: '*'
if-no-files-found: error
retention-days: 1

View File

@@ -6,7 +6,7 @@ on:
jobs:
Params:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
with:
name: Example
python_version_list: "3.12 3.13"
@@ -25,7 +25,7 @@ jobs:
run: echo "${{ matrix.runs-on }}-${{ matrix.python }}" >> artifact.txt
- name: 📤 Upload artifact for ${{ matrix.system }}-${{ matrix.python }}
uses: actions/upload-artifact@v4
uses: pyTooling/upload-artifact@v4
with:
name: ${{ fromJson(needs.Params.outputs.artifact_names).unittesting_xml }}-${{ matrix.system }}-${{ matrix.python }}
path: artifact.txt
@@ -42,7 +42,7 @@ jobs:
run: echo "Package" >> package.txt
- name: 📤 Upload artifact for ${{ matrix.system }}-${{ matrix.python }}
uses: actions/upload-artifact@v4
uses: pyTooling/upload-artifact@v4
with:
name: ${{ fromJson(needs.Params.outputs.artifact_names).package_all }}
path: package.txt
@@ -50,7 +50,7 @@ jobs:
retention-days: 1
ArtifactCleanUp:
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r2
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@main
needs:
- Params
- Testing

View File

@@ -1,26 +1,31 @@
name: Verification of Complete Pipeline
name: Verification of Job Templates
on:
push:
workflow_dispatch:
jobs:
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@main
with:
package_name: pyDummy
UnitTestingParams:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
with:
name: pyDummy
python_version_list: "3.9 3.10 3.11 3.12 3.13 pypy-3.9 pypy-3.10"
# disable_list: "windows:pypy-3.10"
PlatformTestingParams:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
with:
name: Platform
python_version_list: ""
system_list: "ubuntu windows macos mingw32 mingw64 clang64 ucrt64"
UnitTesting:
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r2
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@main
needs:
- UnitTestingParams
with:
@@ -33,7 +38,7 @@ jobs:
# coverage_html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
PlatformTesting:
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r2
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@main
needs:
- PlatformTestingParams
with:
@@ -48,7 +53,7 @@ jobs:
coverage_html_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_html }}
# Coverage:
# uses: pyTooling/Actions/.github/workflows/CoverageCollection.yml@r2
# uses: pyTooling/Actions/.github/workflows/CoverageCollection.yml@main
# needs:
# - UnitTestingParams
# with:
@@ -58,32 +63,30 @@ jobs:
# codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }}
StaticTypeCheck:
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r2
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@main
needs:
- ConfigParams
- UnitTestingParams
with:
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
commands: |
mypy --html-report htmlmypy -p pyDummy
${{ needs.ConfigParams.outputs.mypy_prepare_command }}
mypy --html-report htmlmypy -p ${{ needs.ConfigParams.outputs.package_fullname }}
html_report: 'htmlmypy'
html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
DocCoverage:
uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@r1
needs:
- ConfigParams
- UnitTestingParams
with:
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
directory: sphinx_reports
directory: ${{ needs.ConfigParams.outputs.package_directors }}
# fail_below: 70
ConfigParams:
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r2
needs:
- DocCoverage
Package:
uses: pyTooling/Actions/.github/workflows/Package.yml@r2
uses: pyTooling/Actions/.github/workflows/Package.yml@main
needs:
- UnitTestingParams
- UnitTesting
@@ -94,7 +97,7 @@ jobs:
artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
PublishCoverageResults:
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@r2
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@main
needs:
- UnitTestingParams
- UnitTesting
@@ -109,7 +112,7 @@ jobs:
codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }}
PublishTestResults:
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@r2
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@main
needs:
- UnitTesting
- PlatformTesting
@@ -117,17 +120,17 @@ jobs:
additional_merge_args: '-d "--pytest=rewrite-dunder-init;reduce-depth:pytest.tests.unit;reduce-depth:pytest.tests.platform"'
# VerifyDocs:
# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@r2
# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@main
# needs:
# - UnitTestingParams
# with:
# python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
Documentation:
uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@r2
uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@main
needs:
- UnitTestingParams
- ConfigParams
- UnitTestingParams
- PublishTestResults
- PublishCoverageResults
# - VerifyDocs
@@ -151,7 +154,7 @@ jobs:
xml_unittest_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}-
PDFDocumentation:
uses: pyTooling/Actions/.github/workflows/LaTeXDocumentation.yml@r2
uses: pyTooling/Actions/.github/workflows/LaTeXDocumentation.yml@main
needs:
- UnitTestingParams
- Documentation
@@ -161,7 +164,7 @@ jobs:
pdf_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_pdf }}
PublishToGitHubPages:
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r2
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@main
needs:
- UnitTestingParams
- Documentation
@@ -175,7 +178,7 @@ jobs:
typing: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
ReleasePage:
uses: pyTooling/Actions/.github/workflows/Release.yml@r2
uses: pyTooling/Actions/.github/workflows/Release.yml@main
if: startsWith(github.ref, 'refs/tags')
needs:
- UnitTesting
@@ -186,7 +189,7 @@ jobs:
- PublishToGitHubPages
PublishOnPyPI:
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@r2
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@main
if: startsWith(github.ref, 'refs/tags')
needs:
- UnitTestingParams
@@ -200,7 +203,7 @@ jobs:
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
ArtifactCleanUp:
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r2
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@main
needs:
- UnitTestingParams
- PlatformTestingParams

View File

@@ -0,0 +1,15 @@
name: Verification of Pipeline Templates (Namespace Package)
on:
push:
workflow_dispatch:
jobs:
NamespacePackage:
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@main
with:
package_namespace: pyExamples
package_name: Extensions
secrets:
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
CODACY_PROJECT_TOKEN: ${{ secrets.CODACY_PROJECT_TOKEN }}

101
.github/workflows/_Checking_Nightly.yml vendored Normal file
View File

@@ -0,0 +1,101 @@
name: Verification of Nightly Releases
on:
push:
workflow_dispatch:
jobs:
Build:
name: Build something
runs-on: ubuntu-24.04
steps:
- name: 🖉 Build 1
run: |
echo "Document 1 $(date --utc '+%d.%m.%Y - %H:%M:%S')" > document1.txt
echo "Analysis log $(date --utc '+%d.%m.%Y - %H:%M:%S')" > analysis.log
echo "Build log $(date --utc '+%d.%m.%Y - %H:%M:%S')" > build.log
- name: 📤 Upload artifact
uses: pyTooling/upload-artifact@v4
with:
name: document
path: |
document1.txt
*.log
if-no-files-found: error
retention-days: 1
- name: 🖉 Program
run: |
echo "Document other $(date --utc '+%d.%m.%Y - %H:%M:%S')" > document1.txt
echo "Program $(date --utc '+%d.%m.%Y - %H:%M:%S')" > program.py
- name: 📤 Upload artifact
uses: pyTooling/upload-artifact@v4
with:
name: other
path: |
*.txt
*.py
if-no-files-found: error
retention-days: 1
NightlyPage:
uses: pyTooling/Actions/.github/workflows/NightlyRelease.yml@main
needs:
- Build
secrets: inherit
permissions:
contents: write
actions: write
# attestations: write
with:
prerelease: true
replacements: |
version=4.2.0
tool=myTool
prog=program
nightly_title: "Nightly Release"
nightly_description: |
This *nightly* release contains all latest and important artifacts created by GHDL's CI pipeline.
# GHDL %version%
GHDL offers the simulator and synthesis tool for VHDL. GHDL can be build for various backends:
* `gcc` - using the GCC compiler framework
* `mcode` - in memory code generation
* `llvm` - using the LLVM compiler framework
* `llvm-jit` - using the LLVM compiler framework, but in memory
The following asset categories are provided for GHDL:
* macOS x64-64 builds as TAR/GZ file
* macOS aarch64 builds as TAR/GZ file
* Ubuntu 24.04 LTS builds as TAR/GZ file
* Windows builds for standalone usage (without MSYS2) as ZIP file
* MSYS2 packages as TAR/ZST file
# pyGHDL %version%
The Python package `pyGHDL` offers Python binding (`pyGHDL.libghdl`) to a `libghdl` shared library (`*.so`/`*.dll`).
In addition to the low-level binding layer, pyGHDL offers:
* a Language Server Protocol (LSP) instance for e.g. live code checking by editors
* a Code Document Object Model (CodeDOM) based on [pyVHDLModel](https://github.com/VHDL/pyVHDLModel)
The following asset categories are provided for pyGHDL:
* Platform specific Python wheel package for Ubuntu incl. `pyGHDL...so`
* Platform specific Python wheel package for Windows incl. `pyGHDL...dll`
assets: |
document: document1.txt: Documentation
document: build.log: Logfile - %tool% - %tool%
other: document1.txt: SBOM - %version%
other: %prog%.py: Application - %tool% - %version%
document:!archive1.zip: Archive 1 - zip
document:!archive2.tgz: Archive 2 - tgz
document:!archive3.tar.gz: Archive 3 - tar.gz
document:!archive4.tzst: Archive 4 - tzst
document:!archive5.tar.zst:Archive 5 - tar.zst
document:$archive6.tgz: Archive 6 - tgz + dir
document:$archive7.tar.gz: Archive 7 - tar.gz + dir
document:$archive8.tzst: Archive 8 - tzst + dir
document:$archive9.tar.zst:Archive 9 - tar.zst + dir

View File

@@ -6,24 +6,24 @@ on:
jobs:
Params_Default:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
with:
name: Example
Params_PythonVersions:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
with:
name: Example
python_version_list: "3.11 3.12 pypy-3.9 pypy-3.10"
Params_Systems:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
with:
name: Example
system_list: "windows mingw32 mingw64"
Params_Include:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
with:
name: Example
python_version_list: "3.11"
@@ -31,7 +31,7 @@ jobs:
include_list: "ubuntu:3.12 ubuntu:3.13"
Params_Exclude:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
with:
name: Example
python_version_list: "3.12"
@@ -39,7 +39,7 @@ jobs:
exclude_list: "windows:3.12 windows:3.13"
Params_Disable:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
with:
name: Example
python_version_list: "3.12"
@@ -47,7 +47,7 @@ jobs:
disable_list: "windows:3.12 windows:3.13"
Params_All:
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
with:
name: Example
python_version_list: "3.12 3.13"
@@ -460,7 +460,7 @@ jobs:
expectedPythonVersion = "3.13"
expectedPythons = ["3.12", "3.13"]
expectedSystems = ["ubuntu", "windows"]
expectedSystems = ["ubuntu", "macos-arm", "windows"]
expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons] + ["windows:3.10", "windows:3.11", "windows:3.13"]
expectedName = "Example"
expectedArtifacts = {

View File

@@ -0,0 +1,14 @@
name: Verification of Pipeline Templates (Simple Package)
on:
push:
workflow_dispatch:
jobs:
SimplePackage:
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@main
with:
package_name: pyDummy
secrets:
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
CODACY_PROJECT_TOKEN: ${{ secrets.CODACY_PROJECT_TOKEN }}

2
.gitignore vendored
View File

@@ -31,7 +31,7 @@ doc/pyDummy/**/*.*
# BuildTheDocs
doc/_theme/**/*.*
# IntelliJ project files
# PyCharm project files
/.idea/workspace.xml
# Git files

View File

@@ -11,17 +11,9 @@ docutils_stubs ~= 0.0.22
sphinx_rtd_theme ~= 3.0
# Sphinx Extenstions
#sphinx.ext.coverage
#sphinxcontrib-actdiag>=0.8.5
sphinxcontrib-mermaid>=0.9.2
#sphinxcontrib-seqdiag>=0.8.5
#sphinxcontrib-textstyle>=0.2.1
#sphinxcontrib-spelling>=2.2.0
autoapi >= 2.0.1
sphinx_design ~= 0.6.1
sphinx-copybutton >= 0.5.2
sphinx_autodoc_typehints ~= 2.5
# changelog>=0.3.5
sphinx_reports ~= 0.7
# BuildTheDocs Extensions (mostly patched Sphinx extensions)

View File

@@ -0,0 +1,101 @@
# ==================================================================================================================== #
# _____ _ _ _ _ _ #
# _ __ _ |_ _|__ ___ | (_)_ __ __ _ / \ ___| |_(_) ___ _ __ ___ #
# | '_ \| | | || |/ _ \ / _ \| | | '_ \ / _` | / _ \ / __| __| |/ _ \| '_ \/ __| #
# | |_) | |_| || | (_) | (_) | | | | | | (_| |_ / ___ \ (__| |_| | (_) | | | \__ \ #
# | .__/ \__, ||_|\___/ \___/|_|_|_| |_|\__, (_)_/ \_\___|\__|_|\___/|_| |_|___/ #
# |_| |___/ |___/ #
# ==================================================================================================================== #
# Authors: #
# Patrick Lehmann #
# #
# License: #
# ==================================================================================================================== #
# Copyright 2017-2024 Patrick Lehmann - Bötzingen, Germany #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
# #
# SPDX-License-Identifier: Apache-2.0 #
# ==================================================================================================================== #
#
"""
A module for a set of dummy classes.
"""
__author__ = "Patrick Lehmann"
__email__ = "Paebbels@gmail.com"
__copyright__ = "2017-2024, Patrick Lehmann"
__license__ = "Apache License, Version 2.0"
__version__ = "0.14.8"
__keywords__ = ["GitHub Actions"]
__issue_tracker__ = "https://GitHub.com/pyTooling/Actions/issues"
from pyTooling.Decorators import export, readonly
from pyTooling.Platform import Platform
@export
class Base:
"""
A base-class for dummy applications.
"""
_value: int #: An internal value.
def __init__(self) -> None:
"""
Initializes the base-class.
"""
self._value = 0
@readonly
def Value(self) -> int:
"""
Read-only property to return the internal value.
:return: Internal value.
"""
return self._value
@export
class Application(Base):
"""
A dummy application for demonstration purposes.
"""
def __init__(self) -> None:
"""
Initializes the dummy application.
"""
super().__init__()
platform = Platform()
if platform.IsNativeLinux:
self._value += 1
elif platform.IsNativeMacOS:
self._value += 2
elif platform.IsNativeWindows:
self._value += 3
elif platform.IsMSYSOnWindows:
self._value += 11
elif platform.IsMinGW32OnWindows:
self._value += 12
elif platform.IsMinGW64OnWindows:
self._value += 13
elif platform.IsUCRT64OnWindows:
self._value += 14
elif platform.IsClang32OnWindows:
self._value += 15
elif platform.IsClang64OnWindows:
self._value += 16

View File

View File

@@ -1,6 +1,6 @@
[build-system]
requires = [
"setuptools ~= 75.3",
"setuptools ~= 75.5",
"wheel ~= 0.45",
"pyTooling ~= 8.0"
]

View File

@@ -8,7 +8,7 @@ print(f"Python: {version}")
def loadRequirementsFile(requirementsFile: Path):
requirements = []
with requirementsFile.open("r") as file:
with requirementsFile.open("r", encoding="utf-8") as file:
for line in file.readlines():
line = line.strip()
if line.startswith("#") or line.startswith("https") or line == "":
@@ -84,7 +84,7 @@ for dependency in dependencies:
# Write jobs to special file
github_output = Path(getenv("GITHUB_OUTPUT"))
print(f"GITHUB_OUTPUT: {github_output}")
with github_output.open("a+") as f:
with github_output.open("a+", encoding="utf-8") as f:
f.write(f"pacboy_packages={' '.join(pacboyPackages)}\n")
print(f"GITHUB_OUTPUT:")