mirror of
https://github.com/pyTooling/Actions.git
synced 2026-02-18 05:56:57 +08:00
Compare commits
17 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c81d139080 | ||
|
|
c64e054bcd | ||
|
|
78fdb584aa | ||
|
|
a456635686 | ||
|
|
befc59f22d | ||
|
|
d6fc0efd47 | ||
|
|
c018acc3c1 | ||
|
|
d74c610bb4 | ||
|
|
edc4ab3e86 | ||
|
|
0a338ae8b7 | ||
|
|
4069da0a74 | ||
|
|
679ec24c80 | ||
|
|
3a13486ea6 | ||
|
|
34fb9c9869 | ||
|
|
7523c4adca | ||
|
|
530ad7a4a1 | ||
|
|
bd3f2afaf3 |
4
.github/workflows/ApplicationTesting.yml
vendored
4
.github/workflows/ApplicationTesting.yml
vendored
@@ -188,8 +188,8 @@ jobs:
|
||||
f.write(f"pacboy_packages={' '.join(pacboyPackages)}\n")
|
||||
|
||||
- name: '🟦 Setup MSYS2 for ${{ matrix.runtime }}'
|
||||
if: matrix.system == 'msys2'
|
||||
uses: msys2/setup-msys2@v2
|
||||
if: matrix.system == 'msys2'
|
||||
with:
|
||||
msystem: ${{ matrix.runtime }}
|
||||
update: true
|
||||
@@ -198,8 +198,8 @@ jobs:
|
||||
${{ inputs.pacboy }}
|
||||
|
||||
- name: 🐍 Setup Python ${{ matrix.python }}
|
||||
if: matrix.system != 'msys2'
|
||||
uses: actions/setup-python@v5
|
||||
if: matrix.system != 'msys2'
|
||||
with:
|
||||
python-version: ${{ matrix.python }}
|
||||
|
||||
|
||||
4
.github/workflows/ArtifactCleanUp.yml
vendored
4
.github/workflows/ArtifactCleanUp.yml
vendored
@@ -47,13 +47,13 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: 🗑️ Delete package Artifacts
|
||||
if: ${{ ! startsWith(github.ref, 'refs/tags') }}
|
||||
uses: geekyeggo/delete-artifact@v5
|
||||
if: ${{ ! startsWith(github.ref, 'refs/tags') }}
|
||||
with:
|
||||
name: ${{ inputs.package }}
|
||||
|
||||
- name: 🗑️ Delete remaining Artifacts
|
||||
if: ${{ inputs.remaining != '' }}
|
||||
uses: geekyeggo/delete-artifact@v5
|
||||
if: ${{ inputs.remaining != '' }}
|
||||
with:
|
||||
name: ${{ inputs.remaining }}
|
||||
|
||||
2
.github/workflows/BuildTheDocs.yml
vendored
2
.github/workflows/BuildTheDocs.yml
vendored
@@ -49,8 +49,8 @@ jobs:
|
||||
skip-deploy: true
|
||||
|
||||
- name: 📤 Upload 'documentation' artifacts
|
||||
if: inputs.artifact != ''
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
if: inputs.artifact != ''
|
||||
with:
|
||||
name: ${{ inputs.artifact }}
|
||||
working-directory: doc/_build/html
|
||||
|
||||
43
.github/workflows/NightlyRelease.yml
vendored
43
.github/workflows/NightlyRelease.yml
vendored
@@ -84,11 +84,16 @@ on:
|
||||
type: string
|
||||
required: false
|
||||
default: '__pyTooling_upload_artifact__.tar'
|
||||
can-fail:
|
||||
type: boolean
|
||||
required: false
|
||||
default: false
|
||||
|
||||
jobs:
|
||||
Release:
|
||||
name: 📝 Update 'Nightly Page' on GitHub
|
||||
runs-on: ${{ inputs.ubuntu_image }}
|
||||
continue-on-error: ${{ inputs.can-fail }}
|
||||
permissions:
|
||||
contents: write
|
||||
actions: write
|
||||
@@ -175,7 +180,7 @@ jobs:
|
||||
cat <<EOF >> __NoTeS__.md
|
||||
|
||||
--------
|
||||
Published from [${{ github.workflow }}](https://github.com/Paebbels/ghdl/actions/runs/${{ github.run_id }}) workflow triggered by @${{ github.actor }} on $(date '+%Y-%m-%d %H:%M:%S').
|
||||
Published from [${{ github.workflow }}](https://github.com/Paebbels/ghdl/actions/runs/${{ github.run_id }}) workflow triggered by @${{ github.actor }} on $(date '+%Y-%m-%d %H:%M:%S %Z').
|
||||
EOF
|
||||
|
||||
printf "%s\n" "Creating release '${{ inputs.nightly_name }}' ... "
|
||||
@@ -197,7 +202,7 @@ jobs:
|
||||
ANSI_LIGHT_RED=$'\x1b[91m'
|
||||
ANSI_LIGHT_GREEN=$'\x1b[92m'
|
||||
ANSI_LIGHT_YELLOW=$'\x1b[93m'
|
||||
ANSI_LIGHT_BLUE="\e[94m"
|
||||
ANSI_LIGHT_BLUE=$'\x1b[94m'
|
||||
ANSI_NOCOLOR=$'\x1b[0m'
|
||||
|
||||
export GH_TOKEN=${{ github.token }}
|
||||
@@ -254,7 +259,7 @@ jobs:
|
||||
# A dictionary to check for duplicate asset files in release
|
||||
declare -A assetFilenames
|
||||
while IFS=$'\r\n' read -r assetLine; do
|
||||
if [[ "${assetLine}" == "" ]]; then
|
||||
if [[ "${assetLine}" == "" || "${assetLine:0:1}" == "#" ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
@@ -285,7 +290,7 @@ jobs:
|
||||
if [[ -n "${assetFilenames[$asset]}" ]]; then
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=DuplicateAsset::Asset '${asset}' from artifact '${artifact}' was already uploaded to release '${{ inputs.nightly_name }}'."
|
||||
ERRORS=1
|
||||
ERRORS=$((ERRORS + 1))
|
||||
continue
|
||||
else
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
@@ -305,7 +310,7 @@ jobs:
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't download artifact '${artifact}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=ArtifactNotFound::Couldn't download artifact '${artifact}'."
|
||||
ERRORS=1
|
||||
ERRORS=$((ERRORS + 1))
|
||||
continue
|
||||
fi
|
||||
downloadedArtifacts[$artifact]=1
|
||||
@@ -343,19 +348,21 @@ jobs:
|
||||
if [[ "${asset}" == !*.zip ]]; then
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[ZIP]${ANSI_NOCOLOR}"
|
||||
asset="${asset##*!}"
|
||||
printf " %s\n" "Compressing artifact '${artifact}' to '${asset}' ..."
|
||||
printf "::group:: %s\n" "Compressing artifact '${artifact}' to '${asset}' ..."
|
||||
(
|
||||
cd "${artifact}" && \
|
||||
zip -r "../${asset}" *
|
||||
)
|
||||
if [[ $? -eq 0 ]]; then
|
||||
retCode=$?
|
||||
printf "::endgroup::\n"
|
||||
if [[ $retCode -eq 0 ]]; then
|
||||
printf " %s\n" "Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
uploadFile="${asset}"
|
||||
else
|
||||
printf " %s\n" "Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zip file '${asset}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=CompressionError::Couldn't compress '${artifact}' to zip file '${asset}'."
|
||||
ERRORS=1
|
||||
ERRORS=$((ERRORS + 1))
|
||||
continue
|
||||
fi
|
||||
elif [[ "${asset}" == !*.tgz || "${asset}" == !*.tar.gz || "${asset}" == \$*.tgz || "${asset}" == \$*.tar.gz ]]; then
|
||||
@@ -384,7 +391,7 @@ jobs:
|
||||
printf " %s\n" "Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to tgz file '${asset}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=CompressionError::Couldn't compress '${artifact}' to tgz file '${asset}'."
|
||||
ERRORS=1
|
||||
ERRORS=$((ERRORS + 1))
|
||||
continue
|
||||
fi
|
||||
elif [[ "${asset}" == !*.tzst || "${asset}" == !*.tar.zst || "${asset}" == \$*.tzst || "${asset}" == \$*.tar.zst ]]; then
|
||||
@@ -413,7 +420,7 @@ jobs:
|
||||
printf " %s\n" "Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zst file '${asset}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=CompressionError::Couldn't compress '${artifact}' to zst file '${asset}'."
|
||||
ERRORS=1
|
||||
ERRORS=$((ERRORS + 1))
|
||||
continue
|
||||
fi
|
||||
elif [[ -e "${artifact}/${asset}" ]]; then
|
||||
@@ -423,18 +430,18 @@ jobs:
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't find asset '${asset}' in artifact '${artifact}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=FileNotFound::Couldn't find asset '${asset}' in artifact '${artifact}'."
|
||||
ERRORS=1
|
||||
ERRORS=$((ERRORS + 1))
|
||||
continue
|
||||
fi
|
||||
|
||||
# Add asset to JSON inventory
|
||||
if [[ "${{ inputs.inventory-json }}" != "" ]]; then
|
||||
if [[ "${categories}" != "${title}" ]]; then
|
||||
printf " %s\n" "adding file '${uploadFile}' with '${categories//;/ → }' to JSON inventory ..."
|
||||
printf " %s\n" "adding file '${uploadFile#*/}' with '${categories//;/ → }' to JSON inventory ..."
|
||||
category=""
|
||||
jsonEntry=$(jq -c -n \
|
||||
--arg title "${title}" \
|
||||
--arg file "${uploadFile}" \
|
||||
--arg file "${uploadFile#*/}" \
|
||||
'{"file": $file, "title": $title}' \
|
||||
)
|
||||
|
||||
@@ -450,7 +457,7 @@ jobs:
|
||||
'$inventory * {"files": $file}' \
|
||||
)
|
||||
else
|
||||
printf " %s\n" "adding file '${uploadFile}' to JSON inventory ... ${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}"
|
||||
printf " %s\n" "adding file '${uploadFile#*/}' to JSON inventory ... ${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}"
|
||||
fi
|
||||
fi
|
||||
|
||||
@@ -463,7 +470,7 @@ jobs:
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=UploadError::Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'."
|
||||
ERRORS=1
|
||||
ERRORS=$((ERRORS + 1))
|
||||
continue
|
||||
fi
|
||||
done <<<'${{ inputs.assets }}'
|
||||
@@ -486,7 +493,7 @@ jobs:
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
printf " %s\n" "${ANSI_LIGHT_RED}Couldn't upload asset '${{ inputs.inventory-json }}' to release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=UploadError::Couldn't upload asset '${{ inputs.inventory-json }}' to release '${{ inputs.nightly_name }}'."
|
||||
ERRORS=1
|
||||
ERRORS=$((ERRORS + 1))
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
@@ -495,8 +502,8 @@ jobs:
|
||||
tree -pash -L 3 .
|
||||
printf "::endgroup::\n"
|
||||
|
||||
if [[ $ERROR -ne 0 ]]; then
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}Errors detected in previous steps.${ANSI_NOCOLOR}"
|
||||
if [[ $ERRORS -ne 0 ]]; then
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}${ERRORS} errors detected in previous steps.${ANSI_NOCOLOR}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
7
.github/workflows/Parameters.yml
vendored
7
.github/workflows/Parameters.yml
vendored
@@ -197,9 +197,10 @@ jobs:
|
||||
"3.13": { "icon": "🟢", "until": "2029.10" },
|
||||
"3.14": { "icon": "🟣", "until": "2030.10" },
|
||||
"pypy-3.7": { "icon": "⟲⚫", "until": "????.??" },
|
||||
"pypy-3.8": { "icon": "⟲🔴", "until": "????.??" },
|
||||
"pypy-3.9": { "icon": "⟲🟠", "until": "????.??" },
|
||||
"pypy-3.10": { "icon": "⟲🟡", "until": "????.??" },
|
||||
"pypy-3.8": { "icon": "⟲⚫", "until": "????.??" },
|
||||
"pypy-3.9": { "icon": "⟲🔴", "until": "????.??" },
|
||||
"pypy-3.10": { "icon": "⟲🟠", "until": "????.??" },
|
||||
"pypy-3.11": { "icon": "⟲🟡", "until": "????.??" },
|
||||
},
|
||||
# Runner systems (runner images) supported by GitHub Actions
|
||||
"sys": {
|
||||
|
||||
14
.github/workflows/PublishCoverageResults.yml
vendored
14
.github/workflows/PublishCoverageResults.yml
vendored
@@ -76,7 +76,7 @@ jobs:
|
||||
lfs: true
|
||||
submodules: true
|
||||
|
||||
- name: Download Artifacts
|
||||
- name: 📥 Download Artifacts
|
||||
uses: pyTooling/download-artifact@v4
|
||||
with:
|
||||
pattern: ${{ inputs.coverage_artifacts_pattern }}
|
||||
@@ -178,9 +178,9 @@ jobs:
|
||||
tree -pash report/coverage/html
|
||||
|
||||
- name: 📤 Upload 'Coverage SQLite Database' artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
if: inputs.coverage_sqlite_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.coverage_sqlite_artifact }}
|
||||
path: .coverage
|
||||
@@ -188,9 +188,9 @@ jobs:
|
||||
retention-days: 1
|
||||
|
||||
- name: 📤 Upload 'Coverage XML Report' artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
if: inputs.coverage_xml_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.coverage_xml_artifact }}
|
||||
path: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
||||
@@ -198,9 +198,9 @@ jobs:
|
||||
retention-days: 1
|
||||
|
||||
- name: 📤 Upload 'Coverage JSON Report' artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
if: inputs.coverage_json_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.coverage_json_artifact }}
|
||||
path: ${{ steps.getVariables.outputs.coverage_report_json }}
|
||||
@@ -208,9 +208,9 @@ jobs:
|
||||
retention-days: 1
|
||||
|
||||
- name: 📤 Upload 'Coverage HTML Report' artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
if: inputs.coverage_html_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.coverage_html_artifact }}
|
||||
working-directory: ${{ steps.getVariables.outputs.coverage_report_html_directory }}
|
||||
@@ -219,18 +219,18 @@ jobs:
|
||||
retention-days: 1
|
||||
|
||||
- name: 📊 Publish code coverage at CodeCov
|
||||
uses: codecov/codecov-action@v5
|
||||
if: inputs.CodeCov == true
|
||||
continue-on-error: true
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
files: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
||||
flags: unittests
|
||||
env_vars: PYTHON
|
||||
|
||||
- name: 📉 Publish code coverage at Codacy
|
||||
uses: codacy/codacy-coverage-reporter-action@v1
|
||||
if: inputs.Codacy == true
|
||||
continue-on-error: true
|
||||
uses: codacy/codacy-coverage-reporter-action@v1
|
||||
with:
|
||||
project-token: ${{ secrets.codacy_token }}
|
||||
coverage-reports: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
||||
|
||||
6
.github/workflows/PublishTestResults.yml
vendored
6
.github/workflows/PublishTestResults.yml
vendored
@@ -65,7 +65,7 @@ jobs:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download Artifacts
|
||||
- name: 📥 Download Artifacts
|
||||
uses: pyTooling/download-artifact@v4
|
||||
with:
|
||||
pattern: ${{ inputs.unittest_artifacts_pattern }}
|
||||
@@ -82,7 +82,7 @@ jobs:
|
||||
- name: Rename JUnit files and move them all into 'junit/'
|
||||
run: |
|
||||
mkdir -p junit
|
||||
find artifacts/ -type f -path "*TestReportSummary*.xml" -exec sh -c 'cp -v $0 "junit/$(basename $(dirname $0)).$(basename $0)"' {} ';'
|
||||
find artifacts/ -type f -path "*.xml" -exec sh -c 'cp -v $0 "junit/$(basename $(dirname $0)).$(basename $0)"' {} ';'
|
||||
tree -pash junit
|
||||
|
||||
- name: 🔁 Merge JUnit Unit Test Summaries
|
||||
@@ -100,8 +100,8 @@ jobs:
|
||||
reporter: java-junit
|
||||
|
||||
- name: 📤 Upload merged 'JUnit Test Summary' artifact
|
||||
if: inputs.merged_junit_artifact != ''
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
if: inputs.merged_junit_artifact != ''
|
||||
with:
|
||||
name: ${{ inputs.merged_junit_artifact }}
|
||||
path: Unittesting.xml
|
||||
|
||||
4
.github/workflows/PublishToGitHubPages.yml
vendored
4
.github/workflows/PublishToGitHubPages.yml
vendored
@@ -62,15 +62,15 @@ jobs:
|
||||
path: public
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.coverage }}' from 'Coverage' job
|
||||
if: ${{ inputs.coverage != '' }}
|
||||
uses: pyTooling/download-artifact@v4
|
||||
if: ${{ inputs.coverage != '' }}
|
||||
with:
|
||||
name: ${{ inputs.coverage }}
|
||||
path: public/coverage
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.typing }}' from 'StaticTypeCheck' job
|
||||
if: ${{ inputs.typing != '' }}
|
||||
uses: pyTooling/download-artifact@v4
|
||||
if: ${{ inputs.typing != '' }}
|
||||
with:
|
||||
name: ${{ inputs.typing }}
|
||||
path: public/typing
|
||||
|
||||
2
.github/workflows/Release.yml
vendored
2
.github/workflows/Release.yml
vendored
@@ -49,8 +49,8 @@ jobs:
|
||||
echo "datetime=${RELEASE_DATETIME}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: 📑 Create Release Page
|
||||
id: createReleasePage
|
||||
uses: actions/create-release@v1
|
||||
id: createReleasePage
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
with:
|
||||
|
||||
16
.github/workflows/SphinxDocumentation.yml
vendored
16
.github/workflows/SphinxDocumentation.yml
vendored
@@ -100,15 +100,15 @@ jobs:
|
||||
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.unittest_xml_artifact }}' from 'Unittesting' job
|
||||
if: inputs.unittest_xml_artifact != ''
|
||||
uses: pyTooling/download-artifact@v4
|
||||
if: inputs.unittest_xml_artifact != ''
|
||||
with:
|
||||
name: ${{ inputs.unittest_xml_artifact }}
|
||||
path: ${{ inputs.unittest_xml_directory }}
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.coverage_json_artifact }}' from 'PublishCoverageResults' job
|
||||
if: inputs.coverage_json_artifact != ''
|
||||
uses: pyTooling/download-artifact@v4
|
||||
if: inputs.coverage_json_artifact != ''
|
||||
with:
|
||||
name: ${{ inputs.coverage_json_artifact }}
|
||||
path: ${{ inputs.coverage_report_json_directory }}
|
||||
@@ -122,9 +122,9 @@ jobs:
|
||||
sphinx-build -v -n -b html -d _build/doctrees -j $(nproc) -w _build/html.log . _build/html
|
||||
|
||||
- name: 📤 Upload 'HTML Documentation' artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
if: inputs.html_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.html_artifact }}
|
||||
working-directory: ${{ inputs.doc_directory }}/_build/html
|
||||
@@ -157,15 +157,15 @@ jobs:
|
||||
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.unittest_xml_artifact }}' from 'Unittesting' job
|
||||
if: inputs.unittest_xml_artifact != ''
|
||||
uses: pyTooling/download-artifact@v4
|
||||
if: inputs.unittest_xml_artifact != ''
|
||||
with:
|
||||
name: ${{ inputs.unittest_xml_artifact }}
|
||||
path: ${{ inputs.unittest_xml_directory }}
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.coverage_json_artifact }}' from 'PublishCoverageResults' job
|
||||
if: inputs.coverage_json_artifact != ''
|
||||
uses: pyTooling/download-artifact@v4
|
||||
if: inputs.coverage_json_artifact != ''
|
||||
with:
|
||||
name: ${{ inputs.coverage_json_artifact }}
|
||||
path: ${{ inputs.coverage_report_json_directory }}
|
||||
@@ -227,7 +227,7 @@ jobs:
|
||||
if [[ $found -eq 0 ]]; then
|
||||
printf "[SKIPPED]\n"
|
||||
fi
|
||||
done <<<$(find . -type f -not -iname "*.cls" -not -iname "*.sty" -not -iname "*.xdy" -not -iname "*.svg" -not -iname "*.png" -not -iname "*.jpg" | sed 's:./::')
|
||||
done < <(find . -type f -not -iname "*.cls" -not -iname "*.sty" -not -iname "*.xdy" -not -iname "*.svg" -not -iname "*.png" -not -iname "*.jpg" | sed 's:./::')
|
||||
|
||||
- name: Workaround II - https://github.com/sphinx-doc/sphinx/issues/13189
|
||||
if: inputs.latex_artifact != ''
|
||||
@@ -259,13 +259,13 @@ jobs:
|
||||
printf "[FAILED]\n"
|
||||
fi
|
||||
fi
|
||||
done <<<$(find . -type f -iname "*.$imageExt" | sed 's:./::')
|
||||
done < <(find . -type f -iname "*.$imageExt" | sed 's:./::')
|
||||
done
|
||||
|
||||
- name: 📤 Upload 'LaTeX Documentation' artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
if: inputs.latex_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.latex_artifact }}
|
||||
working-directory: ${{ inputs.doc_directory }}/_build/latex
|
||||
|
||||
4
.github/workflows/StaticTypeCheck.yml
vendored
4
.github/workflows/StaticTypeCheck.yml
vendored
@@ -87,9 +87,9 @@ jobs:
|
||||
run: ${{ inputs.commands }}
|
||||
|
||||
- name: 📤 Upload 'Static Typing Report' HTML artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
if: ${{ inputs.html_artifact != '' }}
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.html_artifact }}
|
||||
working-directory: ${{ inputs.html_report }}
|
||||
@@ -98,9 +98,9 @@ jobs:
|
||||
retention-days: 1
|
||||
|
||||
- name: 📤 Upload 'Static Typing Report' JUnit artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
if: ${{ inputs.junit_artifact != '' }}
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.junit_artifact }}
|
||||
path: ${{ inputs.junit_report }}
|
||||
|
||||
6
.github/workflows/UnitTesting.yml
vendored
6
.github/workflows/UnitTesting.yml
vendored
@@ -282,8 +282,8 @@ jobs:
|
||||
# Python setup
|
||||
|
||||
- name: '🟦 Setup MSYS2 for ${{ matrix.runtime }}'
|
||||
if: matrix.system == 'msys2'
|
||||
uses: msys2/setup-msys2@v2
|
||||
if: matrix.system == 'msys2'
|
||||
with:
|
||||
msystem: ${{ matrix.runtime }}
|
||||
update: true
|
||||
@@ -292,8 +292,8 @@ jobs:
|
||||
${{ inputs.pacboy }}
|
||||
|
||||
- name: 🐍 Setup Python ${{ matrix.python }}
|
||||
if: matrix.system != 'msys2'
|
||||
uses: actions/setup-python@v5
|
||||
if: matrix.system != 'msys2'
|
||||
with:
|
||||
python-version: ${{ matrix.python }}
|
||||
|
||||
@@ -392,9 +392,9 @@ jobs:
|
||||
# Upload artifacts
|
||||
|
||||
- name: 📤 Upload '${{ inputs.unittest_report_xml_filename }}' artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
if: inputs.unittest_xml_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.unittest_xml_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
||||
working-directory: ${{ inputs.unittest_report_xml_directory }}
|
||||
|
||||
3
.github/workflows/_Checking_Nightly.yml
vendored
3
.github/workflows/_Checking_Nightly.yml
vendored
@@ -51,6 +51,7 @@ jobs:
|
||||
actions: write
|
||||
# attestations: write
|
||||
with:
|
||||
can-fail: true
|
||||
prerelease: true
|
||||
replacements: |
|
||||
version=4.2.0
|
||||
@@ -88,6 +89,7 @@ jobs:
|
||||
actions: write
|
||||
# attestations: write
|
||||
with:
|
||||
can-fail: true
|
||||
replacements: |
|
||||
version=4.2.0
|
||||
tool=myTool
|
||||
@@ -105,6 +107,7 @@ jobs:
|
||||
inventory-version: 4.2.5
|
||||
inventory-categories: "kind1,kind2"
|
||||
assets: |
|
||||
# artifact: file: labels: asset title
|
||||
document: document1.txt: doc,html: Documentation
|
||||
document: build.log: build,log: Logfile - %tool% - %tool%
|
||||
other: document1.txt: build,SBOM:SBOM - %version%
|
||||
|
||||
2
dist/requirements.txt
vendored
2
dist/requirements.txt
vendored
@@ -1,2 +1,2 @@
|
||||
wheel ~= 0.45
|
||||
twine ~= 6.0
|
||||
twine ~= 6.1
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
-r ../requirements.txt
|
||||
|
||||
pyTooling ~= 8.0
|
||||
pyTooling ~= 8.2
|
||||
|
||||
# Enforce latest version on ReadTheDocs
|
||||
sphinx ~= 8.1
|
||||
sphinx ~= 8.2
|
||||
docutils ~= 0.21
|
||||
docutils_stubs ~= 0.0.22
|
||||
|
||||
@@ -15,5 +15,5 @@ sphinxcontrib-mermaid ~= 1.0
|
||||
autoapi >= 2.0.1
|
||||
sphinx_design ~= 0.6.1
|
||||
sphinx-copybutton >= 0.5.2
|
||||
sphinx_autodoc_typehints ~= 2.5
|
||||
sphinx_autodoc_typehints ~= 3.1
|
||||
sphinx_reports ~= 0.7
|
||||
|
||||
@@ -64,11 +64,11 @@
|
||||
:target: https://pyTooling.github.io/pyTooling/
|
||||
|
||||
.. # Gitter
|
||||
.. |SHIELD:svg:pyTooling-gitter| image:: https://img.shields.io/badge/chat-on%20gitter-4db797.?longCache=true&style=flat-square&logo=gitter&logoColor=e8ecef
|
||||
.. |SHIELD:svg:pyTooling-gitter| image:: https://img.shields.io/badge/chat-on%20gitter-4db797?longCache=true&style=flat-square&logo=gitter&logoColor=e8ecef
|
||||
:alt: Documentation License
|
||||
:height: 22
|
||||
:target: https://gitter.im/hdl/community
|
||||
.. |SHIELD:png:pyTooling-gitter| image:: https://raster.shields.io/badge/chat-on%20gitter-4db797.?longCache=true&style=flat-square&logo=gitter&logoColor=e8ecef
|
||||
.. |SHIELD:png:pyTooling-gitter| image:: https://raster.shields.io/badge/chat-on%20gitter-4db797?longCache=true&style=flat-square&logo=gitter&logoColor=e8ecef
|
||||
:alt: Documentation License
|
||||
:height: 22
|
||||
:target: https://gitter.im/hdl/community
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
# #
|
||||
# License: #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2017-2024 Patrick Lehmann - Bötzingen, Germany #
|
||||
# Copyright 2017-2025 Patrick Lehmann - Bötzingen, Germany #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
@@ -34,7 +34,7 @@ A module for a set of dummy classes.
|
||||
|
||||
__author__ = "Patrick Lehmann"
|
||||
__email__ = "Paebbels@gmail.com"
|
||||
__copyright__ = "2017-2024, Patrick Lehmann"
|
||||
__copyright__ = "2017-2025, Patrick Lehmann"
|
||||
__license__ = "Apache License, Version 2.0"
|
||||
__version__ = "0.4.4"
|
||||
__keywords__ = ["GitHub Actions"]
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
# #
|
||||
# License: #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2017-2024 Patrick Lehmann - Bötzingen, Germany #
|
||||
# Copyright 2017-2025 Patrick Lehmann - Bötzingen, Germany #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
@@ -34,7 +34,7 @@ A module for a set of dummy classes.
|
||||
|
||||
__author__ = "Patrick Lehmann"
|
||||
__email__ = "Paebbels@gmail.com"
|
||||
__copyright__ = "2017-2024, Patrick Lehmann"
|
||||
__copyright__ = "2017-2025, Patrick Lehmann"
|
||||
__license__ = "Apache License, Version 2.0"
|
||||
__version__ = "0.14.8"
|
||||
__keywords__ = ["GitHub Actions"]
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
[build-system]
|
||||
requires = [
|
||||
"setuptools ~= 75.5",
|
||||
"setuptools ~= 75.8",
|
||||
"wheel ~= 0.45",
|
||||
"pyTooling ~= 8.0"
|
||||
"pyTooling ~= 8.2"
|
||||
]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
|
||||
@@ -1 +1 @@
|
||||
pyTooling ~= 8.0
|
||||
pyTooling ~= 8.2
|
||||
|
||||
2
setup.py
2
setup.py
@@ -11,7 +11,7 @@
|
||||
# #
|
||||
# License: #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2017-2024 Patrick Lehmann - Bötzingen, Germany #
|
||||
# Copyright 2017-2025 Patrick Lehmann - Bötzingen, Germany #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
# #
|
||||
# License: #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2017-2024 Patrick Lehmann - Bötzingen, Germany #
|
||||
# Copyright 2017-2025 Patrick Lehmann - Bötzingen, Germany #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
|
||||
@@ -8,6 +8,6 @@ pytest ~= 8.3
|
||||
pytest-cov ~= 6.0
|
||||
|
||||
# Static Type Checking
|
||||
mypy ~= 1.13
|
||||
mypy ~= 1.15
|
||||
typing_extensions ~= 4.12
|
||||
lxml ~= 5.3
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
# #
|
||||
# License: #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2017-2024 Patrick Lehmann - Bötzingen, Germany #
|
||||
# Copyright 2017-2025 Patrick Lehmann - Bötzingen, Germany #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
# #
|
||||
# License: #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2017-2024 Patrick Lehmann - Bötzingen, Germany #
|
||||
# Copyright 2017-2025 Patrick Lehmann - Bötzingen, Germany #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
|
||||
Reference in New Issue
Block a user