mirror of
https://github.com/pyTooling/Actions.git
synced 2026-02-15 04:26:55 +08:00
Compare commits
32 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e1e3da83f1 | ||
|
|
af582df38d | ||
|
|
84625b8790 | ||
|
|
7cfd7f9567 | ||
|
|
d6e835cbd4 | ||
|
|
8661720172 | ||
|
|
a0c016bf79 | ||
|
|
40217006fd | ||
|
|
99f30dab53 | ||
|
|
1c42072471 | ||
|
|
74afc5a42a | ||
|
|
2e5a79e0c2 | ||
|
|
db99e35dec | ||
|
|
6cfc6e0f8f | ||
|
|
5adddda1a1 | ||
|
|
91289c4257 | ||
|
|
527e94b245 | ||
|
|
f11c335674 | ||
|
|
5bed864443 | ||
|
|
37ec436eb4 | ||
|
|
6a7a4212c3 | ||
|
|
f5b6f17d4e | ||
|
|
883238547a | ||
|
|
7cd852db58 | ||
|
|
ce0d30fe3f | ||
|
|
34dacf7bcf | ||
|
|
48090e113d | ||
|
|
e082d77e7a | ||
|
|
181035b0ba | ||
|
|
643f95bbb6 | ||
|
|
424b75ca96 | ||
|
|
f0610331b9 |
2
.github/workflows/ApplicationTesting.yml
vendored
2
.github/workflows/ApplicationTesting.yml
vendored
@@ -89,7 +89,7 @@ jobs:
|
|||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: 📥 Download artifacts '${{ inputs.wheel }}' from 'Package' job
|
- name: 📥 Download artifacts '${{ inputs.wheel }}' from 'Package' job
|
||||||
uses: actions/download-artifact@v4
|
uses: pyTooling/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.wheel }}
|
name: ${{ inputs.wheel }}
|
||||||
path: install
|
path: install
|
||||||
|
|||||||
60
.github/workflows/CompletePipeline.yml
vendored
60
.github/workflows/CompletePipeline.yml
vendored
@@ -1,3 +1,24 @@
|
|||||||
|
# ==================================================================================================================== #
|
||||||
|
# Authors: #
|
||||||
|
# Patrick Lehmann #
|
||||||
|
# #
|
||||||
|
# ==================================================================================================================== #
|
||||||
|
# Copyright 2020-2024 The pyTooling Authors #
|
||||||
|
# #
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
|
# you may not use this file except in compliance with the License. #
|
||||||
|
# You may obtain a copy of the License at #
|
||||||
|
# #
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||||
|
# #
|
||||||
|
# Unless required by applicable law or agreed to in writing, software #
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||||
|
# See the License for the specific language governing permissions and #
|
||||||
|
# limitations under the License. #
|
||||||
|
# #
|
||||||
|
# SPDX-License-Identifier: Apache-2.0 #
|
||||||
|
# ==================================================================================================================== #
|
||||||
name: Namespace Package
|
name: Namespace Package
|
||||||
|
|
||||||
on:
|
on:
|
||||||
@@ -82,13 +103,13 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
ConfigParams:
|
ConfigParams:
|
||||||
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@main
|
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r2
|
||||||
with:
|
with:
|
||||||
package_namespace: ${{ inputs.package_namespace }}
|
package_namespace: ${{ inputs.package_namespace }}
|
||||||
package_name: ${{ inputs.package_name }}
|
package_name: ${{ inputs.package_name }}
|
||||||
|
|
||||||
UnitTestingParams:
|
UnitTestingParams:
|
||||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2
|
||||||
with:
|
with:
|
||||||
package_namespace: ${{ inputs.package_namespace }}
|
package_namespace: ${{ inputs.package_namespace }}
|
||||||
package_name: ${{ inputs.package_name }}
|
package_name: ${{ inputs.package_name }}
|
||||||
@@ -100,7 +121,7 @@ jobs:
|
|||||||
disable_list: ${{ inputs.unittest_disable_list }}
|
disable_list: ${{ inputs.unittest_disable_list }}
|
||||||
|
|
||||||
AppTestingParams:
|
AppTestingParams:
|
||||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2
|
||||||
with:
|
with:
|
||||||
package_namespace: ${{ inputs.package_namespace }}
|
package_namespace: ${{ inputs.package_namespace }}
|
||||||
package_name: ${{ inputs.package_name }}
|
package_name: ${{ inputs.package_name }}
|
||||||
@@ -112,7 +133,7 @@ jobs:
|
|||||||
disable_list: ${{ inputs.apptest_disable_list }}
|
disable_list: ${{ inputs.apptest_disable_list }}
|
||||||
|
|
||||||
UnitTesting:
|
UnitTesting:
|
||||||
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@main
|
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r2
|
||||||
needs:
|
needs:
|
||||||
- UnitTestingParams
|
- UnitTestingParams
|
||||||
with:
|
with:
|
||||||
@@ -123,7 +144,7 @@ jobs:
|
|||||||
coverage_sqlite_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}
|
coverage_sqlite_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}
|
||||||
|
|
||||||
StaticTypeCheck:
|
StaticTypeCheck:
|
||||||
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@main
|
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r2
|
||||||
needs:
|
needs:
|
||||||
- ConfigParams
|
- ConfigParams
|
||||||
- UnitTestingParams
|
- UnitTestingParams
|
||||||
@@ -136,8 +157,9 @@ jobs:
|
|||||||
html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
|
html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
|
||||||
|
|
||||||
DocCoverage:
|
DocCoverage:
|
||||||
uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@main
|
uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@r2
|
||||||
needs:
|
needs:
|
||||||
|
- ConfigParams
|
||||||
- UnitTestingParams
|
- UnitTestingParams
|
||||||
with:
|
with:
|
||||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||||
@@ -145,7 +167,7 @@ jobs:
|
|||||||
# fail_below: 70
|
# fail_below: 70
|
||||||
|
|
||||||
Package:
|
Package:
|
||||||
uses: pyTooling/Actions/.github/workflows/Package.yml@main
|
uses: pyTooling/Actions/.github/workflows/Package.yml@r2
|
||||||
needs:
|
needs:
|
||||||
- UnitTestingParams
|
- UnitTestingParams
|
||||||
- UnitTesting
|
- UnitTesting
|
||||||
@@ -154,7 +176,7 @@ jobs:
|
|||||||
artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
||||||
|
|
||||||
# AppTesting:
|
# AppTesting:
|
||||||
# uses: pyTooling/Actions/.github/workflows/ApplicationTesting.yml@main
|
# uses: pyTooling/Actions/.github/workflows/ApplicationTesting.yml@r2
|
||||||
# needs:
|
# needs:
|
||||||
# - AppTestingParams
|
# - AppTestingParams
|
||||||
# - UnitTestingParams
|
# - UnitTestingParams
|
||||||
@@ -165,7 +187,7 @@ jobs:
|
|||||||
# apptest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).apptesting_xml }}
|
# apptest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).apptesting_xml }}
|
||||||
|
|
||||||
PublishCoverageResults:
|
PublishCoverageResults:
|
||||||
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@main
|
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@r2
|
||||||
needs:
|
needs:
|
||||||
- UnitTestingParams
|
- UnitTestingParams
|
||||||
- UnitTesting
|
- UnitTesting
|
||||||
@@ -178,7 +200,7 @@ jobs:
|
|||||||
codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||||
|
|
||||||
PublishTestResults:
|
PublishTestResults:
|
||||||
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@main
|
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@r2
|
||||||
needs:
|
needs:
|
||||||
- UnitTestingParams
|
- UnitTestingParams
|
||||||
- UnitTesting
|
- UnitTesting
|
||||||
@@ -186,17 +208,17 @@ jobs:
|
|||||||
merged_junit_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
merged_junit_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||||
|
|
||||||
# VerifyDocs:
|
# VerifyDocs:
|
||||||
# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@main
|
# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@r2
|
||||||
# needs:
|
# needs:
|
||||||
# - UnitTestingParams
|
# - UnitTestingParams
|
||||||
# with:
|
# with:
|
||||||
# python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
# python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||||
|
|
||||||
Documentation:
|
Documentation:
|
||||||
uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@main
|
uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@r2
|
||||||
needs:
|
needs:
|
||||||
- UnitTestingParams
|
|
||||||
- ConfigParams
|
- ConfigParams
|
||||||
|
- UnitTestingParams
|
||||||
- PublishTestResults
|
- PublishTestResults
|
||||||
- PublishCoverageResults
|
- PublishCoverageResults
|
||||||
# - VerifyDocs
|
# - VerifyDocs
|
||||||
@@ -209,7 +231,7 @@ jobs:
|
|||||||
latex_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }}
|
latex_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }}
|
||||||
|
|
||||||
IntermediateCleanUp:
|
IntermediateCleanUp:
|
||||||
uses: pyTooling/Actions/.github/workflows/IntermediateCleanUp.yml@main
|
uses: pyTooling/Actions/.github/workflows/IntermediateCleanUp.yml@r2
|
||||||
needs:
|
needs:
|
||||||
- UnitTestingParams
|
- UnitTestingParams
|
||||||
- PublishCoverageResults
|
- PublishCoverageResults
|
||||||
@@ -220,7 +242,7 @@ jobs:
|
|||||||
xml_unittest_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}-
|
xml_unittest_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}-
|
||||||
|
|
||||||
# PDFDocumentation:
|
# PDFDocumentation:
|
||||||
# uses: pyTooling/Actions/.github/workflows/LaTeXDocumentation.yml@main
|
# uses: pyTooling/Actions/.github/workflows/LaTeXDocumentation.yml@r2
|
||||||
# needs:
|
# needs:
|
||||||
# - UnitTestingParams
|
# - UnitTestingParams
|
||||||
# - Documentation
|
# - Documentation
|
||||||
@@ -230,7 +252,7 @@ jobs:
|
|||||||
# pdf_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_pdf }}
|
# pdf_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_pdf }}
|
||||||
|
|
||||||
PublishToGitHubPages:
|
PublishToGitHubPages:
|
||||||
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@main
|
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r2
|
||||||
needs:
|
needs:
|
||||||
- UnitTestingParams
|
- UnitTestingParams
|
||||||
- Documentation
|
- Documentation
|
||||||
@@ -243,7 +265,7 @@ jobs:
|
|||||||
typing: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
|
typing: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
|
||||||
|
|
||||||
ReleasePage:
|
ReleasePage:
|
||||||
uses: pyTooling/Actions/.github/workflows/Release.yml@main
|
uses: pyTooling/Actions/.github/workflows/Release.yml@r2
|
||||||
if: startsWith(github.ref, 'refs/tags')
|
if: startsWith(github.ref, 'refs/tags')
|
||||||
needs:
|
needs:
|
||||||
- Package
|
- Package
|
||||||
@@ -251,7 +273,7 @@ jobs:
|
|||||||
- PublishToGitHubPages
|
- PublishToGitHubPages
|
||||||
|
|
||||||
PublishOnPyPI:
|
PublishOnPyPI:
|
||||||
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@main
|
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@r2
|
||||||
if: startsWith(github.ref, 'refs/tags')
|
if: startsWith(github.ref, 'refs/tags')
|
||||||
needs:
|
needs:
|
||||||
- UnitTestingParams
|
- UnitTestingParams
|
||||||
@@ -264,7 +286,7 @@ jobs:
|
|||||||
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||||
|
|
||||||
ArtifactCleanUp:
|
ArtifactCleanUp:
|
||||||
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@main
|
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r2
|
||||||
needs:
|
needs:
|
||||||
- UnitTestingParams
|
- UnitTestingParams
|
||||||
- UnitTesting
|
- UnitTesting
|
||||||
|
|||||||
5
.github/workflows/CoverageCollection.yml
vendored
5
.github/workflows/CoverageCollection.yml
vendored
@@ -76,6 +76,9 @@ jobs:
|
|||||||
|
|
||||||
- name: ⏬ Checkout repository
|
- name: ⏬ Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
lfs: true
|
||||||
|
submodules: true
|
||||||
|
|
||||||
- name: 🐍 Setup Python ${{ inputs.python_version }}
|
- name: 🐍 Setup Python ${{ inputs.python_version }}
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
@@ -169,7 +172,7 @@ jobs:
|
|||||||
|
|
||||||
- name: 📊 Publish coverage at CodeCov
|
- name: 📊 Publish coverage at CodeCov
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
uses: codecov/codecov-action@v3
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
files: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
files: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
||||||
flags: unittests
|
flags: unittests
|
||||||
|
|||||||
2
.github/workflows/LaTeXDocumentation.yml
vendored
2
.github/workflows/LaTeXDocumentation.yml
vendored
@@ -50,7 +50,7 @@ jobs:
|
|||||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||||
steps:
|
steps:
|
||||||
- name: 📥 Download artifacts '${{ inputs.latex_artifact }}' from 'SphinxDocumentation' job
|
- name: 📥 Download artifacts '${{ inputs.latex_artifact }}' from 'SphinxDocumentation' job
|
||||||
uses: actions/download-artifact@v4
|
uses: pyTooling/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.latex_artifact }}
|
name: ${{ inputs.latex_artifact }}
|
||||||
path: latex
|
path: latex
|
||||||
|
|||||||
418
.github/workflows/NightlyRelease.yml
vendored
Normal file
418
.github/workflows/NightlyRelease.yml
vendored
Normal file
@@ -0,0 +1,418 @@
|
|||||||
|
# ==================================================================================================================== #
|
||||||
|
# Authors: #
|
||||||
|
# Patrick Lehmann #
|
||||||
|
# #
|
||||||
|
# ==================================================================================================================== #
|
||||||
|
# Copyright 2020-2024 The pyTooling Authors #
|
||||||
|
# #
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||||
|
# you may not use this file except in compliance with the License. #
|
||||||
|
# You may obtain a copy of the License at #
|
||||||
|
# #
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||||
|
# #
|
||||||
|
# Unless required by applicable law or agreed to in writing, software #
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||||
|
# See the License for the specific language governing permissions and #
|
||||||
|
# limitations under the License. #
|
||||||
|
# #
|
||||||
|
# SPDX-License-Identifier: Apache-2.0 #
|
||||||
|
# ==================================================================================================================== #
|
||||||
|
name: Nightly
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
ubuntu_image:
|
||||||
|
description: 'Name of the Ubuntu image.'
|
||||||
|
required: false
|
||||||
|
default: 'ubuntu-24.04'
|
||||||
|
type: string
|
||||||
|
nightly_name:
|
||||||
|
description: 'Name of the nightly release.'
|
||||||
|
required: false
|
||||||
|
default: 'nightly'
|
||||||
|
type: string
|
||||||
|
nightly_title:
|
||||||
|
description: 'Title of the nightly release.'
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
type: string
|
||||||
|
nightly_description:
|
||||||
|
description: 'Description of the nightly release.'
|
||||||
|
required: false
|
||||||
|
default: 'Release of artifacts from latest CI pipeline.'
|
||||||
|
type: string
|
||||||
|
draft:
|
||||||
|
description: 'Specify if this is a draft.'
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
|
prerelease:
|
||||||
|
description: 'Specify if this is a pre-release.'
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
|
latest:
|
||||||
|
description: 'Specify if this is the latest release.'
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
|
replacements:
|
||||||
|
description: 'Multi-line string containing search=replace patterns.'
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
type: string
|
||||||
|
assets:
|
||||||
|
description: 'Multi-line string containing artifact:file:title asset descriptions.'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
tarball-name:
|
||||||
|
type: string
|
||||||
|
required: false
|
||||||
|
default: '__pyTooling_upload_artifact__.tar'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
Release:
|
||||||
|
name: 📝 Update 'Nightly Page' on GitHub
|
||||||
|
runs-on: ${{ inputs.ubuntu_image }}
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
actions: write
|
||||||
|
# attestations: write
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: ⏬ Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
# The command 'git describe' (used for version) needs the history.
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: 🔧 Install zstd
|
||||||
|
run: sudo apt-get install -y --no-install-recommends zstd
|
||||||
|
|
||||||
|
- name: 📑 Delete (old) Release Page
|
||||||
|
id: deleteReleasePage
|
||||||
|
run: |
|
||||||
|
set +e
|
||||||
|
|
||||||
|
ANSI_LIGHT_RED="\e[91m"
|
||||||
|
ANSI_LIGHT_GREEN="\e[92m"
|
||||||
|
ANSI_LIGHT_YELLOW="\e[93m"
|
||||||
|
ANSI_NOCOLOR="\e[0m"
|
||||||
|
|
||||||
|
export GH_TOKEN=${{ github.token }}
|
||||||
|
|
||||||
|
echo -n "Deleting release '${{ inputs.nightly_name }}' ... "
|
||||||
|
message="$(gh release delete ${{ inputs.nightly_name }} --yes 2>&1)"
|
||||||
|
if [[ $? -eq 0 ]]; then
|
||||||
|
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||||
|
elif [[ "${message}" == "release not found" ]]; then
|
||||||
|
echo -e "${ANSI_LIGHT_YELLOW}[NOT FOUND]${ANSI_NOCOLOR}"
|
||||||
|
else
|
||||||
|
echo -e "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
||||||
|
echo -e "${ANSI_LIGHT_RED}Couldn't delete release '${{ inputs.nightly_name }}' -> Error: '${message}'.${ANSI_NOCOLOR}"
|
||||||
|
echo "::error title=InternalError::Couldn't delete release '${{ inputs.nightly_name }}' -> Error: '${message}'."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: 📑 (Re)create (new) Release Page
|
||||||
|
id: createReleasePage
|
||||||
|
run: |
|
||||||
|
set +e
|
||||||
|
|
||||||
|
ANSI_LIGHT_RED="\e[91m"
|
||||||
|
ANSI_LIGHT_GREEN="\e[92m"
|
||||||
|
ANSI_NOCOLOR="\e[0m"
|
||||||
|
|
||||||
|
export GH_TOKEN=${{ github.token }}
|
||||||
|
|
||||||
|
addDraft="--draft"
|
||||||
|
|
||||||
|
if ${{ inputs.prerelease }}; then
|
||||||
|
addPreRelease="--prerelease"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! ${{ inputs.latest }}; then
|
||||||
|
addLatest="--latest=false"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "${{ inputs.nightly_title }}" != "" ]]; then
|
||||||
|
addTitle=("--title" "${{ inputs.nightly_title }}")
|
||||||
|
fi
|
||||||
|
|
||||||
|
cat <<'EOF' > __NoTeS__.md
|
||||||
|
${{ inputs.nightly_description }}
|
||||||
|
EOF
|
||||||
|
if [[ -s __NoTeS__.md ]]; then
|
||||||
|
addNotes=("--notes-file" "__NoTeS__.md")
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Apply replacements
|
||||||
|
while IFS=$'\r\n' read -r patternLine; do
|
||||||
|
# skip empty lines
|
||||||
|
[[ "$patternLine" == "" ]] && continue
|
||||||
|
|
||||||
|
pattern="${patternLine%%=*}"
|
||||||
|
replacement="${patternLine#*=}"
|
||||||
|
sed -i -e "s/%$pattern%/$replacement/g" "__NoTeS__.md"
|
||||||
|
done <<<'${{ inputs.replacements }}'
|
||||||
|
|
||||||
|
# Add footer line
|
||||||
|
cat <<EOF >> __NoTeS__.md
|
||||||
|
|
||||||
|
--------
|
||||||
|
Published from [${{ github.workflow }}](https://github.com/Paebbels/ghdl/actions/runs/${{ github.run_id }}) workflow triggered by @${{ github.actor }} on $(date '+%Y-%m-%d %H:%M:%S').
|
||||||
|
EOF
|
||||||
|
|
||||||
|
echo "Creating release '${{ inputs.nightly_name }}' ... "
|
||||||
|
message="$(gh release create "${{ inputs.nightly_name }}" --verify-tag $addDraft $addPreRelease $addLatest "${addTitle[@]}" "${addNotes[@]}" 2>&1)"
|
||||||
|
if [[ $? -eq 0 ]]; then
|
||||||
|
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||||
|
else
|
||||||
|
echo -e "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
||||||
|
echo -e "${ANSI_LIGHT_RED}Couldn't create release '${{ inputs.nightly_name }}' -> Error: '${message}'.${ANSI_NOCOLOR}"
|
||||||
|
echo "::error title=InternalError::Couldn't create release '${{ inputs.nightly_name }}' -> Error: '${message}'."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: 📥 Download artifacts and upload as assets
|
||||||
|
id: uploadAssets
|
||||||
|
run: |
|
||||||
|
set +e
|
||||||
|
|
||||||
|
ANSI_LIGHT_RED="\e[91m"
|
||||||
|
ANSI_LIGHT_GREEN="\e[92m"
|
||||||
|
ANSI_LIGHT_YELLOW="\e[93m"
|
||||||
|
ANSI_NOCOLOR="\e[0m"
|
||||||
|
|
||||||
|
export GH_TOKEN=${{ github.token }}
|
||||||
|
|
||||||
|
Replace() {
|
||||||
|
line="$1"
|
||||||
|
while IFS=$'\r\n' read -r patternLine; do
|
||||||
|
# skip empty lines
|
||||||
|
[[ "$patternLine" == "" ]] && continue
|
||||||
|
|
||||||
|
pattern="${patternLine%%=*}"
|
||||||
|
replacement="${patternLine#*=}"
|
||||||
|
line="${line//"%$pattern%"/"$replacement"}"
|
||||||
|
done <<<'${{ inputs.replacements }}'
|
||||||
|
echo "$line"
|
||||||
|
}
|
||||||
|
|
||||||
|
ERRORS=0
|
||||||
|
# A dictionary of 0/1 to avoid duplicate downloads
|
||||||
|
declare -A downloadedArtifacts
|
||||||
|
# A dictionary to check for duplicate asset files in release
|
||||||
|
declare -A assetFilenames
|
||||||
|
while IFS=$'\r\n' read -r assetLine; do
|
||||||
|
if [[ "${assetLine}" == "" ]]; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
# split assetLine colon separated triple: artifact:asset:title
|
||||||
|
artifact="${assetLine%%:*}"
|
||||||
|
remaining="${assetLine#*:}"
|
||||||
|
asset="${remaining%%:*}"
|
||||||
|
title="${remaining##*:}"
|
||||||
|
|
||||||
|
# remove leading whitespace
|
||||||
|
asset="${asset#"${asset%%[![:space:]]*}"}"
|
||||||
|
title="${title#"${title%%[![:space:]]*}"}"
|
||||||
|
|
||||||
|
# apply replacements
|
||||||
|
asset="$(Replace "${asset}")"
|
||||||
|
title="$(Replace "${title}")"
|
||||||
|
|
||||||
|
echo "Publish asset '${asset}' from artifact '${artifact}' with title '${title}'"
|
||||||
|
echo -n " Checked asset for duplicates ... "
|
||||||
|
if [[ -n "${assetFilenames[$asset]}" ]]; then
|
||||||
|
echo -e "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||||
|
echo "::error title=DuplicateAsset::Asset '${asset}' from artifact '${artifact}' was already uploaded to release '${{ inputs.nightly_name }}'."
|
||||||
|
ERRORS=1
|
||||||
|
continue
|
||||||
|
else
|
||||||
|
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||||
|
assetFilenames[$asset]=1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Download artifact by artifact name
|
||||||
|
if [[ -n "${downloadedArtifacts[$artifact]}" ]]; then
|
||||||
|
echo -e " downloading '${artifact}' ... ${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}"
|
||||||
|
else
|
||||||
|
echo " downloading '${artifact}' ... "
|
||||||
|
echo -n " gh run download $GITHUB_RUN_ID --dir \"${artifact}\" --name \"${artifact}\" "
|
||||||
|
gh run download $GITHUB_RUN_ID --dir "${artifact}" --name "${artifact}"
|
||||||
|
if [[ $? -eq 0 ]]; then
|
||||||
|
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||||
|
else
|
||||||
|
echo -e "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||||
|
echo -e "${ANSI_LIGHT_RED}Couldn't download artifact '${artifact}'.${ANSI_NOCOLOR}"
|
||||||
|
echo "::error title=ArtifactNotFound::Couldn't download artifact '${artifact}'."
|
||||||
|
ERRORS=1
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
downloadedArtifacts[$artifact]=1
|
||||||
|
|
||||||
|
echo -n " Checking for embedded tarball ... "
|
||||||
|
if [[ -f "${artifact}/${{ inputs.tarball-name }}" ]]; then
|
||||||
|
echo -e "${ANSI_LIGHT_GREEN}[FOUND]${ANSI_NOCOLOR}"
|
||||||
|
|
||||||
|
pushd "${artifact}" > /dev/null
|
||||||
|
|
||||||
|
echo -n " Extracting embedded tarball ... "
|
||||||
|
tar -xf "${{ inputs.tarball-name }}"
|
||||||
|
if [[ $? -ne 0 ]]; then
|
||||||
|
echo -e "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
||||||
|
else
|
||||||
|
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -n " Removing temporary tarball ... "
|
||||||
|
rm -f "${{ inputs.tarball-name }}"
|
||||||
|
if [[ $? -ne 0 ]]; then
|
||||||
|
echo -e "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
||||||
|
else
|
||||||
|
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
popd > /dev/null
|
||||||
|
else
|
||||||
|
echo -e "${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if artifact should be compressed (zip, tgz) or if asset was part of the downloaded artifact.
|
||||||
|
echo -n " checking asset '${artifact}/${asset}' ... "
|
||||||
|
if [[ "${asset}" == !*.zip ]]; then
|
||||||
|
echo -e "${ANSI_LIGHT_GREEN}[ZIP]${ANSI_NOCOLOR}"
|
||||||
|
asset="${asset##*!}"
|
||||||
|
echo " Compressing artifact '${artifact}' to '${asset}' ..."
|
||||||
|
(
|
||||||
|
cd "${artifact}" && \
|
||||||
|
zip -r "../${asset}" *
|
||||||
|
)
|
||||||
|
if [[ $? -eq 0 ]]; then
|
||||||
|
echo -e " Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||||
|
uploadFile="${asset}"
|
||||||
|
else
|
||||||
|
echo -e " Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||||
|
echo -e "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zip file '${asset}'.${ANSI_NOCOLOR}"
|
||||||
|
echo "::error title=CompressionError::Couldn't compress '${artifact}' to zip file '${asset}'."
|
||||||
|
ERRORS=1
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
elif [[ "${asset}" == !*.tgz || "${asset}" == !*.tar.gz || "${asset}" == \$*.tgz || "${asset}" == \$*.tar.gz ]]; then
|
||||||
|
echo -e "${ANSI_LIGHT_GREEN}[TAR/GZ]${ANSI_NOCOLOR}"
|
||||||
|
|
||||||
|
if [[ "${asset:0:1}" == "\$" ]]; then
|
||||||
|
asset="${asset##*$}"
|
||||||
|
dirName="${asset%.*}"
|
||||||
|
echo " Compressing artifact '${artifact}' to '${asset}' ..."
|
||||||
|
tar -c --gzip --file="${asset}" --directory="${artifact}" --transform "s|^\.|${dirName%.tar}|" .
|
||||||
|
retCode=$?
|
||||||
|
else
|
||||||
|
asset="${asset##*!}"
|
||||||
|
echo " Compressing artifact '${artifact}' to '${asset}' ..."
|
||||||
|
(
|
||||||
|
cd "${artifact}" && \
|
||||||
|
tar -c --gzip --file="../${asset}" *
|
||||||
|
)
|
||||||
|
retCode=$?
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ $retCode -eq 0 ]]; then
|
||||||
|
echo -e " Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||||
|
uploadFile="${asset}"
|
||||||
|
else
|
||||||
|
echo -e " Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||||
|
echo -e "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to tgz file '${asset}'.${ANSI_NOCOLOR}"
|
||||||
|
echo "::error title=CompressionError::Couldn't compress '${artifact}' to tgz file '${asset}'."
|
||||||
|
ERRORS=1
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
elif [[ "${asset}" == !*.tzst || "${asset}" == !*.tar.zst || "${asset}" == \$*.tzst || "${asset}" == \$*.tar.zst ]]; then
|
||||||
|
echo -e "${ANSI_LIGHT_GREEN}[ZST]${ANSI_NOCOLOR}"
|
||||||
|
|
||||||
|
if [[ "${asset:0:1}" == "\$" ]]; then
|
||||||
|
asset="${asset##*$}"
|
||||||
|
dirName="${asset%.*}"
|
||||||
|
echo " Compressing artifact '${artifact}' to '${asset}' ..."
|
||||||
|
tar -c --zstd --file="${asset}" --directory="${artifact}" --transform "s|^\.|${dirName%.tar}|" .
|
||||||
|
retCode=$?
|
||||||
|
else
|
||||||
|
asset="${asset##*!}"
|
||||||
|
echo " Compressing artifact '${artifact}' to '${asset}' ..."
|
||||||
|
(
|
||||||
|
cd "${artifact}" && \
|
||||||
|
tar -c --zstd --file="../${asset}" *
|
||||||
|
)
|
||||||
|
retCode=$?
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ $retCode -eq 0 ]]; then
|
||||||
|
echo -e " Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||||
|
uploadFile="${asset}"
|
||||||
|
else
|
||||||
|
echo -e " Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||||
|
echo -e "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zst file '${asset}'.${ANSI_NOCOLOR}"
|
||||||
|
echo "::error title=CompressionError::Couldn't compress '${artifact}' to zst file '${asset}'."
|
||||||
|
ERRORS=1
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
elif [[ -e "${artifact}/${asset}" ]]; then
|
||||||
|
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||||
|
uploadFile="${artifact}/${asset}"
|
||||||
|
else
|
||||||
|
echo -e "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||||
|
echo -e "${ANSI_LIGHT_RED}Couldn't find asset '${asset}' in artifact '${artifact}'.${ANSI_NOCOLOR}"
|
||||||
|
echo "::error title=FileNotFound::Couldn't find asset '${asset}' in artifact '${artifact}'."
|
||||||
|
ERRORS=1
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Upload asset to existing release page
|
||||||
|
echo -n " uploading asset '${asset}' from '${uploadFile}' with title '${title}' ... "
|
||||||
|
gh release upload ${{ inputs.nightly_name }} "${uploadFile}#${title}" --clobber
|
||||||
|
if [[ $? -eq 0 ]]; then
|
||||||
|
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||||
|
else
|
||||||
|
echo -e "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||||
|
echo -e "${ANSI_LIGHT_RED}Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
|
||||||
|
echo "::error title=UploadError::Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'."
|
||||||
|
ERRORS=1
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
done <<<'${{ inputs.assets }}'
|
||||||
|
|
||||||
|
echo "Inspecting downloaded artifacts ..."
|
||||||
|
tree -L 3 .
|
||||||
|
|
||||||
|
if [[ $ERROR -ne 0 ]]; then
|
||||||
|
echo -e "${ANSI_LIGHT_RED}Errors detected in previous steps.${ANSI_NOCOLOR}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: 📑 Remove draft state from Release Page
|
||||||
|
if: ${{ ! inputs.draft }}
|
||||||
|
run: |
|
||||||
|
set +e
|
||||||
|
|
||||||
|
ANSI_LIGHT_RED="\e[91m"
|
||||||
|
ANSI_LIGHT_GREEN="\e[92m"
|
||||||
|
ANSI_NOCOLOR="\e[0m"
|
||||||
|
|
||||||
|
export GH_TOKEN=${{ github.token }}
|
||||||
|
|
||||||
|
# Remove draft-state from release page
|
||||||
|
echo -n "Remove draft-state from release '${title}' ... "
|
||||||
|
gh release edit --draft=false "${{ inputs.nightly_name }}"
|
||||||
|
if [[ $? -eq 0 ]]; then
|
||||||
|
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||||
|
else
|
||||||
|
echo -e "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||||
|
echo -e "${ANSI_LIGHT_RED}Couldn't remove draft-state from release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
|
||||||
|
echo "::error title=ReleasePage::Couldn't remove draft-state from release '${{ inputs.nightly_name }}'."
|
||||||
|
fi
|
||||||
3
.github/workflows/Package.yml
vendored
3
.github/workflows/Package.yml
vendored
@@ -54,6 +54,9 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: ⏬ Checkout repository
|
- name: ⏬ Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
lfs: true
|
||||||
|
submodules: true
|
||||||
|
|
||||||
- name: 🐍 Setup Python ${{ inputs.python_version }}
|
- name: 🐍 Setup Python ${{ inputs.python_version }}
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
|
|||||||
4
.github/workflows/Parameters.yml
vendored
4
.github/workflows/Parameters.yml
vendored
@@ -83,7 +83,7 @@ on:
|
|||||||
windows_image:
|
windows_image:
|
||||||
description: 'The used GitHub Action image for Windows based jobs.'
|
description: 'The used GitHub Action image for Windows based jobs.'
|
||||||
required: false
|
required: false
|
||||||
default: 'windows-latest'
|
default: 'windows-2022'
|
||||||
type: string
|
type: string
|
||||||
macos_intel_image:
|
macos_intel_image:
|
||||||
description: 'The used GitHub Action image for macOS (Intel x86-64) based jobs.'
|
description: 'The used GitHub Action image for macOS (Intel x86-64) based jobs.'
|
||||||
@@ -93,7 +93,7 @@ on:
|
|||||||
macos_arm_image:
|
macos_arm_image:
|
||||||
description: 'The used GitHub Action image for macOS (ARM aarch64) based jobs.'
|
description: 'The used GitHub Action image for macOS (ARM aarch64) based jobs.'
|
||||||
required: false
|
required: false
|
||||||
default: 'macos-latest'
|
default: 'macos-14'
|
||||||
type: string
|
type: string
|
||||||
|
|
||||||
outputs:
|
outputs:
|
||||||
|
|||||||
16
.github/workflows/PublishCoverageResults.yml
vendored
16
.github/workflows/PublishCoverageResults.yml
vendored
@@ -29,6 +29,10 @@ on:
|
|||||||
required: false
|
required: false
|
||||||
default: '24.04'
|
default: '24.04'
|
||||||
type: string
|
type: string
|
||||||
|
coverage_artifacts_pattern:
|
||||||
|
required: false
|
||||||
|
default: '*-CodeCoverage-*'
|
||||||
|
type: string
|
||||||
coverage_config:
|
coverage_config:
|
||||||
description: 'Path to the .coveragerc file. Use pyproject.toml by default.'
|
description: 'Path to the .coveragerc file. Use pyproject.toml by default.'
|
||||||
required: false
|
required: false
|
||||||
@@ -68,12 +72,20 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: ⏬ Checkout repository
|
- name: ⏬ Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
lfs: true
|
||||||
|
submodules: true
|
||||||
|
|
||||||
- name: Download Artifacts
|
- name: Download Artifacts
|
||||||
uses: actions/download-artifact@v4
|
uses: pyTooling/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
|
pattern: ${{ inputs.coverage_artifacts_pattern }}
|
||||||
path: artifacts
|
path: artifacts
|
||||||
|
|
||||||
|
- name: 🔎 Inspect extracted artifact (tarball)
|
||||||
|
run: |
|
||||||
|
tree -psh artifacts
|
||||||
|
|
||||||
- name: 🔧 Install coverage and tomli
|
- name: 🔧 Install coverage and tomli
|
||||||
run: |
|
run: |
|
||||||
python -m pip install -U --disable-pip-version-check --break-system-packages coverage[toml] tomli
|
python -m pip install -U --disable-pip-version-check --break-system-packages coverage[toml] tomli
|
||||||
@@ -210,7 +222,7 @@ jobs:
|
|||||||
- name: 📊 Publish code coverage at CodeCov
|
- name: 📊 Publish code coverage at CodeCov
|
||||||
if: inputs.CodeCov == true
|
if: inputs.CodeCov == true
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
uses: codecov/codecov-action@v4
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
files: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
files: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
||||||
flags: unittests
|
flags: unittests
|
||||||
|
|||||||
4
.github/workflows/PublishOnPyPI.yml
vendored
4
.github/workflows/PublishOnPyPI.yml
vendored
@@ -57,10 +57,10 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: 📥 Download artifacts '${{ inputs.artifact }}' from 'Package' job
|
- name: 📥 Download artifacts '${{ inputs.artifact }}' from 'Package' job
|
||||||
uses: actions/download-artifact@v4
|
uses: pyTooling/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.artifact }}
|
name: ${{ inputs.artifact }}
|
||||||
path: dist/
|
path: dist
|
||||||
|
|
||||||
- name: 🐍 Setup Python ${{ inputs.python_version }}
|
- name: 🐍 Setup Python ${{ inputs.python_version }}
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
|
|||||||
17
.github/workflows/PublishTestResults.yml
vendored
17
.github/workflows/PublishTestResults.yml
vendored
@@ -30,6 +30,10 @@ on:
|
|||||||
required: false
|
required: false
|
||||||
default: '24.04'
|
default: '24.04'
|
||||||
type: string
|
type: string
|
||||||
|
unittest_artifacts_pattern:
|
||||||
|
required: false
|
||||||
|
default: '*-UnitTestReportSummary-*'
|
||||||
|
type: string
|
||||||
merged_junit_artifact:
|
merged_junit_artifact:
|
||||||
description: 'Name of the merged JUnit Test Summary artifact.'
|
description: 'Name of the merged JUnit Test Summary artifact.'
|
||||||
required: false
|
required: false
|
||||||
@@ -40,6 +44,11 @@ on:
|
|||||||
required: false
|
required: false
|
||||||
default: '"--pytest=rewrite-dunder-init;reduce-depth:pytest.tests.unit"'
|
default: '"--pytest=rewrite-dunder-init;reduce-depth:pytest.tests.unit"'
|
||||||
type: string
|
type: string
|
||||||
|
publish:
|
||||||
|
description: 'Publish test report summary via Dorny Test-Reporter'
|
||||||
|
required: false
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
report_title:
|
report_title:
|
||||||
description: 'Title of the summary report in the pipeline''s sidebar'
|
description: 'Title of the summary report in the pipeline''s sidebar'
|
||||||
required: false
|
required: false
|
||||||
@@ -57,10 +66,15 @@ jobs:
|
|||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Download Artifacts
|
- name: Download Artifacts
|
||||||
uses: actions/download-artifact@v4
|
uses: pyTooling/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
|
pattern: ${{ inputs.unittest_artifacts_pattern }}
|
||||||
path: artifacts
|
path: artifacts
|
||||||
|
|
||||||
|
- name: 🔎 Inspect extracted artifact (tarball)
|
||||||
|
run: |
|
||||||
|
tree -psh artifacts
|
||||||
|
|
||||||
- name: 🔧 Install pyEDAA.Reports (JUunit Parser and Merger)
|
- name: 🔧 Install pyEDAA.Reports (JUunit Parser and Merger)
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --disable-pip-version-check --break-system-packages -U pyEDAA.Reports
|
python -m pip install --disable-pip-version-check --break-system-packages -U pyEDAA.Reports
|
||||||
@@ -80,6 +94,7 @@ jobs:
|
|||||||
|
|
||||||
- name: 📊 Publish Unit Test Results
|
- name: 📊 Publish Unit Test Results
|
||||||
uses: dorny/test-reporter@v1
|
uses: dorny/test-reporter@v1
|
||||||
|
if: inputs.publish && inputs.report_title != ''
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.report_title }}
|
name: ${{ inputs.report_title }}
|
||||||
path: Unittesting.xml
|
path: Unittesting.xml
|
||||||
|
|||||||
6
.github/workflows/PublishToGitHubPages.yml
vendored
6
.github/workflows/PublishToGitHubPages.yml
vendored
@@ -56,21 +56,21 @@ jobs:
|
|||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: 📥 Download artifacts '${{ inputs.doc }}' from 'BuildTheDocs' job
|
- name: 📥 Download artifacts '${{ inputs.doc }}' from 'BuildTheDocs' job
|
||||||
uses: actions/download-artifact@v4
|
uses: pyTooling/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.doc }}
|
name: ${{ inputs.doc }}
|
||||||
path: public
|
path: public
|
||||||
|
|
||||||
- name: 📥 Download artifacts '${{ inputs.coverage }}' from 'Coverage' job
|
- name: 📥 Download artifacts '${{ inputs.coverage }}' from 'Coverage' job
|
||||||
if: ${{ inputs.coverage != '' }}
|
if: ${{ inputs.coverage != '' }}
|
||||||
uses: actions/download-artifact@v4
|
uses: pyTooling/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.coverage }}
|
name: ${{ inputs.coverage }}
|
||||||
path: public/coverage
|
path: public/coverage
|
||||||
|
|
||||||
- name: 📥 Download artifacts '${{ inputs.typing }}' from 'StaticTypeCheck' job
|
- name: 📥 Download artifacts '${{ inputs.typing }}' from 'StaticTypeCheck' job
|
||||||
if: ${{ inputs.typing != '' }}
|
if: ${{ inputs.typing != '' }}
|
||||||
uses: actions/download-artifact@v4
|
uses: pyTooling/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.typing }}
|
name: ${{ inputs.typing }}
|
||||||
path: public/typing
|
path: public/typing
|
||||||
|
|||||||
14
.github/workflows/SphinxDocumentation.yml
vendored
14
.github/workflows/SphinxDocumentation.yml
vendored
@@ -82,6 +82,9 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: ⏬ Checkout repository
|
- name: ⏬ Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
lfs: true
|
||||||
|
submodules: true
|
||||||
|
|
||||||
- name: 🔧 Install graphviz
|
- name: 🔧 Install graphviz
|
||||||
run: sudo apt-get install -y --no-install-recommends graphviz
|
run: sudo apt-get install -y --no-install-recommends graphviz
|
||||||
@@ -98,14 +101,14 @@ jobs:
|
|||||||
|
|
||||||
- name: 📥 Download artifacts '${{ inputs.unittest_xml_artifact }}' from 'Unittesting' job
|
- name: 📥 Download artifacts '${{ inputs.unittest_xml_artifact }}' from 'Unittesting' job
|
||||||
if: inputs.unittest_xml_artifact != ''
|
if: inputs.unittest_xml_artifact != ''
|
||||||
uses: actions/download-artifact@v4
|
uses: pyTooling/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.unittest_xml_artifact }}
|
name: ${{ inputs.unittest_xml_artifact }}
|
||||||
path: ${{ inputs.unittest_xml_directory }}
|
path: ${{ inputs.unittest_xml_directory }}
|
||||||
|
|
||||||
- name: 📥 Download artifacts '${{ inputs.coverage_json_artifact }}' from 'PublishCoverageResults' job
|
- name: 📥 Download artifacts '${{ inputs.coverage_json_artifact }}' from 'PublishCoverageResults' job
|
||||||
if: inputs.coverage_json_artifact != ''
|
if: inputs.coverage_json_artifact != ''
|
||||||
uses: actions/download-artifact@v4
|
uses: pyTooling/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.coverage_json_artifact }}
|
name: ${{ inputs.coverage_json_artifact }}
|
||||||
path: ${{ inputs.coverage_report_json_directory }}
|
path: ${{ inputs.coverage_report_json_directory }}
|
||||||
@@ -135,6 +138,9 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: ⏬ Checkout repository
|
- name: ⏬ Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
lfs: true
|
||||||
|
submodules: true
|
||||||
|
|
||||||
- name: 🔧 Install graphviz
|
- name: 🔧 Install graphviz
|
||||||
run: sudo apt-get install -y --no-install-recommends graphviz
|
run: sudo apt-get install -y --no-install-recommends graphviz
|
||||||
@@ -151,14 +157,14 @@ jobs:
|
|||||||
|
|
||||||
- name: 📥 Download artifacts '${{ inputs.unittest_xml_artifact }}' from 'Unittesting' job
|
- name: 📥 Download artifacts '${{ inputs.unittest_xml_artifact }}' from 'Unittesting' job
|
||||||
if: inputs.unittest_xml_artifact != ''
|
if: inputs.unittest_xml_artifact != ''
|
||||||
uses: actions/download-artifact@v4
|
uses: pyTooling/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.unittest_xml_artifact }}
|
name: ${{ inputs.unittest_xml_artifact }}
|
||||||
path: ${{ inputs.unittest_xml_directory }}
|
path: ${{ inputs.unittest_xml_directory }}
|
||||||
|
|
||||||
- name: 📥 Download artifacts '${{ inputs.coverage_json_artifact }}' from 'PublishCoverageResults' job
|
- name: 📥 Download artifacts '${{ inputs.coverage_json_artifact }}' from 'PublishCoverageResults' job
|
||||||
if: inputs.coverage_json_artifact != ''
|
if: inputs.coverage_json_artifact != ''
|
||||||
uses: actions/download-artifact@v4
|
uses: pyTooling/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: ${{ inputs.coverage_json_artifact }}
|
name: ${{ inputs.coverage_json_artifact }}
|
||||||
path: ${{ inputs.coverage_report_json_directory }}
|
path: ${{ inputs.coverage_report_json_directory }}
|
||||||
|
|||||||
3
.github/workflows/UnitTesting.yml
vendored
3
.github/workflows/UnitTesting.yml
vendored
@@ -147,6 +147,9 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: ⏬ Checkout repository
|
- name: ⏬ Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
lfs: true
|
||||||
|
submodules: true
|
||||||
|
|
||||||
# Package Manager steps
|
# Package Manager steps
|
||||||
- name: 🔧 Install homebrew dependencies on macOS
|
- name: 🔧 Install homebrew dependencies on macOS
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
Params:
|
Params:
|
||||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2
|
||||||
with:
|
with:
|
||||||
name: Example
|
name: Example
|
||||||
python_version_list: "3.12 3.13"
|
python_version_list: "3.12 3.13"
|
||||||
@@ -50,7 +50,7 @@ jobs:
|
|||||||
retention-days: 1
|
retention-days: 1
|
||||||
|
|
||||||
ArtifactCleanUp:
|
ArtifactCleanUp:
|
||||||
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@main
|
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r2
|
||||||
needs:
|
needs:
|
||||||
- Params
|
- Params
|
||||||
- Testing
|
- Testing
|
||||||
|
|||||||
41
.github/workflows/_Checking_JobTemplates.yml
vendored
41
.github/workflows/_Checking_JobTemplates.yml
vendored
@@ -6,28 +6,26 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
ConfigParams:
|
ConfigParams:
|
||||||
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@main
|
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@r2
|
||||||
needs:
|
|
||||||
- DocCoverage
|
|
||||||
with:
|
with:
|
||||||
package_name: pyDummy
|
package_name: pyDummy
|
||||||
|
|
||||||
UnitTestingParams:
|
UnitTestingParams:
|
||||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2
|
||||||
with:
|
with:
|
||||||
name: pyDummy
|
name: pyDummy
|
||||||
python_version_list: "3.9 3.10 3.11 3.12 3.13 pypy-3.9 pypy-3.10"
|
python_version_list: "3.9 3.10 3.11 3.12 3.13 pypy-3.9 pypy-3.10"
|
||||||
# disable_list: "windows:pypy-3.10"
|
# disable_list: "windows:pypy-3.10"
|
||||||
|
|
||||||
PlatformTestingParams:
|
PlatformTestingParams:
|
||||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2
|
||||||
with:
|
with:
|
||||||
name: Platform
|
name: Platform
|
||||||
python_version_list: ""
|
python_version_list: ""
|
||||||
system_list: "ubuntu windows macos mingw32 mingw64 clang64 ucrt64"
|
system_list: "ubuntu windows macos mingw32 mingw64 clang64 ucrt64"
|
||||||
|
|
||||||
UnitTesting:
|
UnitTesting:
|
||||||
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@main
|
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r2
|
||||||
needs:
|
needs:
|
||||||
- UnitTestingParams
|
- UnitTestingParams
|
||||||
with:
|
with:
|
||||||
@@ -40,7 +38,7 @@ jobs:
|
|||||||
# coverage_html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
# coverage_html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||||
|
|
||||||
PlatformTesting:
|
PlatformTesting:
|
||||||
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@main
|
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r2
|
||||||
needs:
|
needs:
|
||||||
- PlatformTestingParams
|
- PlatformTestingParams
|
||||||
with:
|
with:
|
||||||
@@ -55,7 +53,7 @@ jobs:
|
|||||||
coverage_html_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_html }}
|
coverage_html_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||||
|
|
||||||
# Coverage:
|
# Coverage:
|
||||||
# uses: pyTooling/Actions/.github/workflows/CoverageCollection.yml@main
|
# uses: pyTooling/Actions/.github/workflows/CoverageCollection.yml@r2
|
||||||
# needs:
|
# needs:
|
||||||
# - UnitTestingParams
|
# - UnitTestingParams
|
||||||
# with:
|
# with:
|
||||||
@@ -65,7 +63,7 @@ jobs:
|
|||||||
# codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
# codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||||
|
|
||||||
StaticTypeCheck:
|
StaticTypeCheck:
|
||||||
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@main
|
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r2
|
||||||
needs:
|
needs:
|
||||||
- ConfigParams
|
- ConfigParams
|
||||||
- UnitTestingParams
|
- UnitTestingParams
|
||||||
@@ -80,14 +78,15 @@ jobs:
|
|||||||
DocCoverage:
|
DocCoverage:
|
||||||
uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@r1
|
uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@r1
|
||||||
needs:
|
needs:
|
||||||
|
- ConfigParams
|
||||||
- UnitTestingParams
|
- UnitTestingParams
|
||||||
with:
|
with:
|
||||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||||
directory: sphinx_reports
|
directory: ${{ needs.ConfigParams.outputs.package_directors }}
|
||||||
# fail_below: 70
|
# fail_below: 70
|
||||||
|
|
||||||
Package:
|
Package:
|
||||||
uses: pyTooling/Actions/.github/workflows/Package.yml@main
|
uses: pyTooling/Actions/.github/workflows/Package.yml@r2
|
||||||
needs:
|
needs:
|
||||||
- UnitTestingParams
|
- UnitTestingParams
|
||||||
- UnitTesting
|
- UnitTesting
|
||||||
@@ -98,7 +97,7 @@ jobs:
|
|||||||
artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
||||||
|
|
||||||
PublishCoverageResults:
|
PublishCoverageResults:
|
||||||
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@main
|
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@r2
|
||||||
needs:
|
needs:
|
||||||
- UnitTestingParams
|
- UnitTestingParams
|
||||||
- UnitTesting
|
- UnitTesting
|
||||||
@@ -113,7 +112,7 @@ jobs:
|
|||||||
codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||||
|
|
||||||
PublishTestResults:
|
PublishTestResults:
|
||||||
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@main
|
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@r2
|
||||||
needs:
|
needs:
|
||||||
- UnitTesting
|
- UnitTesting
|
||||||
- PlatformTesting
|
- PlatformTesting
|
||||||
@@ -121,17 +120,17 @@ jobs:
|
|||||||
additional_merge_args: '-d "--pytest=rewrite-dunder-init;reduce-depth:pytest.tests.unit;reduce-depth:pytest.tests.platform"'
|
additional_merge_args: '-d "--pytest=rewrite-dunder-init;reduce-depth:pytest.tests.unit;reduce-depth:pytest.tests.platform"'
|
||||||
|
|
||||||
# VerifyDocs:
|
# VerifyDocs:
|
||||||
# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@main
|
# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@r2
|
||||||
# needs:
|
# needs:
|
||||||
# - UnitTestingParams
|
# - UnitTestingParams
|
||||||
# with:
|
# with:
|
||||||
# python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
# python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||||
|
|
||||||
Documentation:
|
Documentation:
|
||||||
uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@main
|
uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@r2
|
||||||
needs:
|
needs:
|
||||||
- UnitTestingParams
|
|
||||||
- ConfigParams
|
- ConfigParams
|
||||||
|
- UnitTestingParams
|
||||||
- PublishTestResults
|
- PublishTestResults
|
||||||
- PublishCoverageResults
|
- PublishCoverageResults
|
||||||
# - VerifyDocs
|
# - VerifyDocs
|
||||||
@@ -155,7 +154,7 @@ jobs:
|
|||||||
xml_unittest_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}-
|
xml_unittest_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}-
|
||||||
|
|
||||||
PDFDocumentation:
|
PDFDocumentation:
|
||||||
uses: pyTooling/Actions/.github/workflows/LaTeXDocumentation.yml@main
|
uses: pyTooling/Actions/.github/workflows/LaTeXDocumentation.yml@r2
|
||||||
needs:
|
needs:
|
||||||
- UnitTestingParams
|
- UnitTestingParams
|
||||||
- Documentation
|
- Documentation
|
||||||
@@ -165,7 +164,7 @@ jobs:
|
|||||||
pdf_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_pdf }}
|
pdf_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_pdf }}
|
||||||
|
|
||||||
PublishToGitHubPages:
|
PublishToGitHubPages:
|
||||||
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@main
|
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r2
|
||||||
needs:
|
needs:
|
||||||
- UnitTestingParams
|
- UnitTestingParams
|
||||||
- Documentation
|
- Documentation
|
||||||
@@ -179,7 +178,7 @@ jobs:
|
|||||||
typing: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
|
typing: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
|
||||||
|
|
||||||
ReleasePage:
|
ReleasePage:
|
||||||
uses: pyTooling/Actions/.github/workflows/Release.yml@main
|
uses: pyTooling/Actions/.github/workflows/Release.yml@r2
|
||||||
if: startsWith(github.ref, 'refs/tags')
|
if: startsWith(github.ref, 'refs/tags')
|
||||||
needs:
|
needs:
|
||||||
- UnitTesting
|
- UnitTesting
|
||||||
@@ -190,7 +189,7 @@ jobs:
|
|||||||
- PublishToGitHubPages
|
- PublishToGitHubPages
|
||||||
|
|
||||||
PublishOnPyPI:
|
PublishOnPyPI:
|
||||||
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@main
|
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@r2
|
||||||
if: startsWith(github.ref, 'refs/tags')
|
if: startsWith(github.ref, 'refs/tags')
|
||||||
needs:
|
needs:
|
||||||
- UnitTestingParams
|
- UnitTestingParams
|
||||||
@@ -204,7 +203,7 @@ jobs:
|
|||||||
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||||
|
|
||||||
ArtifactCleanUp:
|
ArtifactCleanUp:
|
||||||
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@main
|
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r2
|
||||||
needs:
|
needs:
|
||||||
- UnitTestingParams
|
- UnitTestingParams
|
||||||
- PlatformTestingParams
|
- PlatformTestingParams
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
name: Verification of Pipeline Templates
|
name: Verification of Pipeline Templates (Namespace Package)
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
@@ -6,7 +6,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
NamespacePackage:
|
NamespacePackage:
|
||||||
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@main
|
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@r2
|
||||||
with:
|
with:
|
||||||
package_namespace: pyExamples
|
package_namespace: pyExamples
|
||||||
package_name: Extensions
|
package_name: Extensions
|
||||||
|
|||||||
101
.github/workflows/_Checking_Nightly.yml
vendored
Normal file
101
.github/workflows/_Checking_Nightly.yml
vendored
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
name: Verification of Nightly Releases
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
Build:
|
||||||
|
name: Build something
|
||||||
|
runs-on: ubuntu-24.04
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: 🖉 Build 1
|
||||||
|
run: |
|
||||||
|
echo "Document 1 $(date --utc '+%d.%m.%Y - %H:%M:%S')" > document1.txt
|
||||||
|
echo "Analysis log $(date --utc '+%d.%m.%Y - %H:%M:%S')" > analysis.log
|
||||||
|
echo "Build log $(date --utc '+%d.%m.%Y - %H:%M:%S')" > build.log
|
||||||
|
|
||||||
|
- name: 📤 Upload artifact
|
||||||
|
uses: pyTooling/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: document
|
||||||
|
path: |
|
||||||
|
document1.txt
|
||||||
|
*.log
|
||||||
|
if-no-files-found: error
|
||||||
|
retention-days: 1
|
||||||
|
|
||||||
|
- name: 🖉 Program
|
||||||
|
run: |
|
||||||
|
echo "Document other $(date --utc '+%d.%m.%Y - %H:%M:%S')" > document1.txt
|
||||||
|
echo "Program $(date --utc '+%d.%m.%Y - %H:%M:%S')" > program.py
|
||||||
|
|
||||||
|
- name: 📤 Upload artifact
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: other
|
||||||
|
path: |
|
||||||
|
*.txt
|
||||||
|
*.py
|
||||||
|
if-no-files-found: error
|
||||||
|
retention-days: 1
|
||||||
|
|
||||||
|
NightlyPage:
|
||||||
|
uses: pyTooling/Actions/.github/workflows/NightlyRelease.yml@main
|
||||||
|
needs:
|
||||||
|
- Build
|
||||||
|
secrets: inherit
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
actions: write
|
||||||
|
# attestations: write
|
||||||
|
with:
|
||||||
|
prerelease: true
|
||||||
|
replacements: |
|
||||||
|
version=4.2.0
|
||||||
|
tool=myTool
|
||||||
|
prog=program
|
||||||
|
nightly_title: "Nightly Release"
|
||||||
|
nightly_description: |
|
||||||
|
This *nightly* release contains all latest and important artifacts created by GHDL's CI pipeline.
|
||||||
|
|
||||||
|
# GHDL %version%
|
||||||
|
|
||||||
|
GHDL offers the simulator and synthesis tool for VHDL. GHDL can be build for various backends:
|
||||||
|
* `gcc` - using the GCC compiler framework
|
||||||
|
* `mcode` - in memory code generation
|
||||||
|
* `llvm` - using the LLVM compiler framework
|
||||||
|
* `llvm-jit` - using the LLVM compiler framework, but in memory
|
||||||
|
|
||||||
|
The following asset categories are provided for GHDL:
|
||||||
|
* macOS x64-64 builds as TAR/GZ file
|
||||||
|
* macOS aarch64 builds as TAR/GZ file
|
||||||
|
* Ubuntu 24.04 LTS builds as TAR/GZ file
|
||||||
|
* Windows builds for standalone usage (without MSYS2) as ZIP file
|
||||||
|
* MSYS2 packages as TAR/ZST file
|
||||||
|
|
||||||
|
# pyGHDL %version%
|
||||||
|
|
||||||
|
The Python package `pyGHDL` offers Python binding (`pyGHDL.libghdl`) to a `libghdl` shared library (`*.so`/`*.dll`).
|
||||||
|
In addition to the low-level binding layer, pyGHDL offers:
|
||||||
|
* a Language Server Protocol (LSP) instance for e.g. live code checking by editors
|
||||||
|
* a Code Document Object Model (CodeDOM) based on [pyVHDLModel](https://github.com/VHDL/pyVHDLModel)
|
||||||
|
|
||||||
|
The following asset categories are provided for pyGHDL:
|
||||||
|
* Platform specific Python wheel package for Ubuntu incl. `pyGHDL...so`
|
||||||
|
* Platform specific Python wheel package for Windows incl. `pyGHDL...dll`
|
||||||
|
assets: |
|
||||||
|
document: document1.txt: Documentation
|
||||||
|
document: build.log: Logfile - %tool% - %tool%
|
||||||
|
other: document1.txt: SBOM - %version%
|
||||||
|
other: %prog%.py: Application - %tool% - %version%
|
||||||
|
document:!archive1.zip: Archive 1 - zip
|
||||||
|
document:!archive2.tgz: Archive 2 - tgz
|
||||||
|
document:!archive3.tar.gz: Archive 3 - tar.gz
|
||||||
|
document:!archive4.tzst: Archive 4 - tzst
|
||||||
|
document:!archive5.tar.zst:Archive 5 - tar.zst
|
||||||
|
document:$archive6.tgz: Archive 6 - tgz + dir
|
||||||
|
document:$archive7.tar.gz: Archive 7 - tar.gz + dir
|
||||||
|
document:$archive8.tzst: Archive 8 - tzst + dir
|
||||||
|
document:$archive9.tar.zst:Archive 9 - tar.zst + dir
|
||||||
14
.github/workflows/_Checking_Parameters.yml
vendored
14
.github/workflows/_Checking_Parameters.yml
vendored
@@ -6,24 +6,24 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
Params_Default:
|
Params_Default:
|
||||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2
|
||||||
with:
|
with:
|
||||||
name: Example
|
name: Example
|
||||||
|
|
||||||
Params_PythonVersions:
|
Params_PythonVersions:
|
||||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2
|
||||||
with:
|
with:
|
||||||
name: Example
|
name: Example
|
||||||
python_version_list: "3.11 3.12 pypy-3.9 pypy-3.10"
|
python_version_list: "3.11 3.12 pypy-3.9 pypy-3.10"
|
||||||
|
|
||||||
Params_Systems:
|
Params_Systems:
|
||||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2
|
||||||
with:
|
with:
|
||||||
name: Example
|
name: Example
|
||||||
system_list: "windows mingw32 mingw64"
|
system_list: "windows mingw32 mingw64"
|
||||||
|
|
||||||
Params_Include:
|
Params_Include:
|
||||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2
|
||||||
with:
|
with:
|
||||||
name: Example
|
name: Example
|
||||||
python_version_list: "3.11"
|
python_version_list: "3.11"
|
||||||
@@ -31,7 +31,7 @@ jobs:
|
|||||||
include_list: "ubuntu:3.12 ubuntu:3.13"
|
include_list: "ubuntu:3.12 ubuntu:3.13"
|
||||||
|
|
||||||
Params_Exclude:
|
Params_Exclude:
|
||||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2
|
||||||
with:
|
with:
|
||||||
name: Example
|
name: Example
|
||||||
python_version_list: "3.12"
|
python_version_list: "3.12"
|
||||||
@@ -39,7 +39,7 @@ jobs:
|
|||||||
exclude_list: "windows:3.12 windows:3.13"
|
exclude_list: "windows:3.12 windows:3.13"
|
||||||
|
|
||||||
Params_Disable:
|
Params_Disable:
|
||||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2
|
||||||
with:
|
with:
|
||||||
name: Example
|
name: Example
|
||||||
python_version_list: "3.12"
|
python_version_list: "3.12"
|
||||||
@@ -47,7 +47,7 @@ jobs:
|
|||||||
disable_list: "windows:3.12 windows:3.13"
|
disable_list: "windows:3.12 windows:3.13"
|
||||||
|
|
||||||
Params_All:
|
Params_All:
|
||||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r2
|
||||||
with:
|
with:
|
||||||
name: Example
|
name: Example
|
||||||
python_version_list: "3.12 3.13"
|
python_version_list: "3.12 3.13"
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
name: Verification of Pipeline Templates
|
name: Verification of Pipeline Templates (Simple Package)
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
@@ -6,7 +6,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
SimplePackage:
|
SimplePackage:
|
||||||
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@main
|
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@r2
|
||||||
with:
|
with:
|
||||||
package_name: pyDummy
|
package_name: pyDummy
|
||||||
secrets:
|
secrets:
|
||||||
|
|||||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -31,7 +31,7 @@ doc/pyDummy/**/*.*
|
|||||||
# BuildTheDocs
|
# BuildTheDocs
|
||||||
doc/_theme/**/*.*
|
doc/_theme/**/*.*
|
||||||
|
|
||||||
# IntelliJ project files
|
# PyCharm project files
|
||||||
/.idea/workspace.xml
|
/.idea/workspace.xml
|
||||||
|
|
||||||
# Git files
|
# Git files
|
||||||
|
|||||||
@@ -11,17 +11,9 @@ docutils_stubs ~= 0.0.22
|
|||||||
sphinx_rtd_theme ~= 3.0
|
sphinx_rtd_theme ~= 3.0
|
||||||
|
|
||||||
# Sphinx Extenstions
|
# Sphinx Extenstions
|
||||||
#sphinx.ext.coverage
|
|
||||||
#sphinxcontrib-actdiag>=0.8.5
|
|
||||||
sphinxcontrib-mermaid>=0.9.2
|
sphinxcontrib-mermaid>=0.9.2
|
||||||
#sphinxcontrib-seqdiag>=0.8.5
|
|
||||||
#sphinxcontrib-textstyle>=0.2.1
|
|
||||||
#sphinxcontrib-spelling>=2.2.0
|
|
||||||
autoapi >= 2.0.1
|
autoapi >= 2.0.1
|
||||||
sphinx_design ~= 0.6.1
|
sphinx_design ~= 0.6.1
|
||||||
sphinx-copybutton >= 0.5.2
|
sphinx-copybutton >= 0.5.2
|
||||||
sphinx_autodoc_typehints ~= 2.5
|
sphinx_autodoc_typehints ~= 2.5
|
||||||
# changelog>=0.3.5
|
|
||||||
sphinx_reports ~= 0.7
|
sphinx_reports ~= 0.7
|
||||||
|
|
||||||
# BuildTheDocs Extensions (mostly patched Sphinx extensions)
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[build-system]
|
[build-system]
|
||||||
requires = [
|
requires = [
|
||||||
"setuptools ~= 75.3",
|
"setuptools ~= 75.5",
|
||||||
"wheel ~= 0.45",
|
"wheel ~= 0.45",
|
||||||
"pyTooling ~= 8.0"
|
"pyTooling ~= 8.0"
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ print(f"Python: {version}")
|
|||||||
|
|
||||||
def loadRequirementsFile(requirementsFile: Path):
|
def loadRequirementsFile(requirementsFile: Path):
|
||||||
requirements = []
|
requirements = []
|
||||||
with requirementsFile.open("r") as file:
|
with requirementsFile.open("r", encoding="utf-8") as file:
|
||||||
for line in file.readlines():
|
for line in file.readlines():
|
||||||
line = line.strip()
|
line = line.strip()
|
||||||
if line.startswith("#") or line.startswith("https") or line == "":
|
if line.startswith("#") or line.startswith("https") or line == "":
|
||||||
@@ -84,7 +84,7 @@ for dependency in dependencies:
|
|||||||
# Write jobs to special file
|
# Write jobs to special file
|
||||||
github_output = Path(getenv("GITHUB_OUTPUT"))
|
github_output = Path(getenv("GITHUB_OUTPUT"))
|
||||||
print(f"GITHUB_OUTPUT: {github_output}")
|
print(f"GITHUB_OUTPUT: {github_output}")
|
||||||
with github_output.open("a+") as f:
|
with github_output.open("a+", encoding="utf-8") as f:
|
||||||
f.write(f"pacboy_packages={' '.join(pacboyPackages)}\n")
|
f.write(f"pacboy_packages={' '.join(pacboyPackages)}\n")
|
||||||
|
|
||||||
print(f"GITHUB_OUTPUT:")
|
print(f"GITHUB_OUTPUT:")
|
||||||
|
|||||||
Reference in New Issue
Block a user