mirror of
https://github.com/pyTooling/Actions.git
synced 2026-02-14 20:16:56 +08:00
Compare commits
246 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9ceefdbf5d | ||
|
|
b1bc6e50a3 | ||
|
|
fdee9e011f | ||
|
|
3e50c2ed5b | ||
|
|
9e0b1c69f1 | ||
|
|
f084e02f01 | ||
|
|
c34d4e240e | ||
|
|
6d04009bd6 | ||
|
|
50d32d1950 | ||
|
|
7733e8998f | ||
|
|
4c28b9d003 | ||
|
|
bafea7d082 | ||
|
|
9ca7b04f37 | ||
|
|
7a0ee75fd5 | ||
|
|
bc876f7171 | ||
|
|
edca070047 | ||
|
|
21c2f48dad | ||
|
|
9338fbd106 | ||
|
|
6869d0f666 | ||
|
|
bef77effcb | ||
|
|
9808b6c7f9 | ||
|
|
e7e95b446d | ||
|
|
df0889b86b | ||
|
|
87978fd1f6 | ||
|
|
3e95c89362 | ||
|
|
f737b07992 | ||
|
|
77a6b4c00a | ||
|
|
ef5c852097 | ||
|
|
bec076bd66 | ||
|
|
c924651632 | ||
|
|
b9b9b0b1d4 | ||
|
|
1cef082753 | ||
|
|
c3a999c754 | ||
|
|
9760023567 | ||
|
|
fbbb39046a | ||
|
|
1d0c8b36e8 | ||
|
|
c9d0e8e9c6 | ||
|
|
00269cf507 | ||
|
|
a15499a807 | ||
|
|
13076012dd | ||
|
|
b2ac6bc0d9 | ||
|
|
e88aa7b973 | ||
|
|
855d432978 | ||
|
|
bf6ba9ba19 | ||
|
|
93cdeb9cba | ||
|
|
72a8705e6c | ||
|
|
ea96cce0d1 | ||
|
|
59ce0fa84a | ||
|
|
c8362d99cc | ||
|
|
0e9d878f0e | ||
|
|
5d67896606 | ||
|
|
4b058faf3e | ||
|
|
474a8024d1 | ||
|
|
5dc19a5d65 | ||
|
|
188feb556b | ||
|
|
d58db55086 | ||
|
|
ee9a3fbdcd | ||
|
|
8dfc484c42 | ||
|
|
960b7089e7 | ||
|
|
706ef39595 | ||
|
|
04881fc4ca | ||
|
|
e444e57112 | ||
|
|
cea83bc2ae | ||
|
|
440553e7fb | ||
|
|
26461822b5 | ||
|
|
7a341dbe8f | ||
|
|
33b99a3b4e | ||
|
|
5e0aa52e5d | ||
|
|
2862238ee5 | ||
|
|
ebd20f5aea | ||
|
|
2004711d48 | ||
|
|
02d386a9e1 | ||
|
|
e0af5055a8 | ||
|
|
cc1dade947 | ||
|
|
b87d11502b | ||
|
|
fa96ee9197 | ||
|
|
0495bfb18c | ||
|
|
f62d5d93ea | ||
|
|
13c1a56f92 | ||
|
|
da3cdbe96a | ||
|
|
5fe793e3fa | ||
|
|
c38ff2af3c | ||
|
|
98f0fffaf6 | ||
|
|
0fef6f8a4d | ||
|
|
92ce834303 | ||
|
|
607637b278 | ||
|
|
dfc9221529 | ||
|
|
d4afc820ab | ||
|
|
ae13aa2dff | ||
|
|
7879c05ab7 | ||
|
|
df4815f666 | ||
|
|
8b7a8009a6 | ||
|
|
6b4af68fa4 | ||
|
|
0db1821658 | ||
|
|
6d84311338 | ||
|
|
4406abe788 | ||
|
|
e9d0dc3dba | ||
|
|
f9a74102d9 | ||
|
|
b33e0f2782 | ||
|
|
ae32d20719 | ||
|
|
c3c6a09a9b | ||
|
|
87fa2b693a | ||
|
|
be27e58d8c | ||
|
|
6d039bba90 | ||
|
|
0753edca95 | ||
|
|
461931099a | ||
|
|
0802f6d02f | ||
|
|
3b95a36955 | ||
|
|
583eed8c84 | ||
|
|
0e567aebc4 | ||
|
|
60281e01e2 | ||
|
|
fc9ddee4e2 | ||
|
|
66572dca45 | ||
|
|
5663891b89 | ||
|
|
e4881c0956 | ||
|
|
9ae9a199bb | ||
|
|
cfdff6a993 | ||
|
|
c8c793dd86 | ||
|
|
907c0e2239 | ||
|
|
209f10675c | ||
|
|
b2c4408f73 | ||
|
|
981141f194 | ||
|
|
4eb6a73d77 | ||
|
|
e2aa830a51 | ||
|
|
3920096e31 | ||
|
|
8ad5a861b4 | ||
|
|
370e7ac1d9 | ||
|
|
df7cf39d3e | ||
|
|
b044ad96ca | ||
|
|
a4a54df1da | ||
|
|
19b2b7c6b6 | ||
|
|
6844d48a6f | ||
|
|
67d7ec2c73 | ||
|
|
41e1e109c3 | ||
|
|
8a801bd851 | ||
|
|
0efec87463 | ||
|
|
cf6fbd4d8e | ||
|
|
e678c1f377 | ||
|
|
94a0c91f69 | ||
|
|
79ed372079 | ||
|
|
ce36b28f42 | ||
|
|
dfeee1fafe | ||
|
|
a43485a5f1 | ||
|
|
367819ac38 | ||
|
|
ec038f96e8 | ||
|
|
3ed3cc82f0 | ||
|
|
582c5620b7 | ||
|
|
5237a1c53c | ||
|
|
a6a92e9c02 | ||
|
|
56726cf929 | ||
|
|
60967bdde1 | ||
|
|
d754745237 | ||
|
|
996c1b6f94 | ||
|
|
3cb8ca83a4 | ||
|
|
316eaa115e | ||
|
|
26586b21cc | ||
|
|
1c90019ed0 | ||
|
|
f5511dc0bd | ||
|
|
e0bd24de74 | ||
|
|
29fea10f2c | ||
|
|
6757b3e5f3 | ||
|
|
bd9357990c | ||
|
|
f567f4bf0d | ||
|
|
f488d4367b | ||
|
|
1306a815b3 | ||
|
|
cebd214123 | ||
|
|
3b00121ca5 | ||
|
|
aba4e3d496 | ||
|
|
6a192321b6 | ||
|
|
fc8bb4241b | ||
|
|
9171343062 | ||
|
|
e526218346 | ||
|
|
6c0e90b968 | ||
|
|
c2769bde2a | ||
|
|
49ff1bdab8 | ||
|
|
24aa375ab6 | ||
|
|
d15059eccb | ||
|
|
70e8f32351 | ||
|
|
025cc4ff4e | ||
|
|
d3889a00ab | ||
|
|
674b4ed239 | ||
|
|
e9e62c5ef6 | ||
|
|
8bc6ca673c | ||
|
|
2e15b32bad | ||
|
|
e5af317346 | ||
|
|
7aae4b2aaa | ||
|
|
8764150071 | ||
|
|
5cc87cf754 | ||
|
|
301584a670 | ||
|
|
3c61bc24ea | ||
|
|
69d566d369 | ||
|
|
439290c700 | ||
|
|
96bccfbd18 | ||
|
|
7ef3bc0a4a | ||
|
|
e71e5ee302 | ||
|
|
cdc60e317c | ||
|
|
22ce7c64e2 | ||
|
|
4605e5a374 | ||
|
|
0a9177eaff | ||
|
|
cf2c3f19cf | ||
|
|
26768a3855 | ||
|
|
91ea5db1ce | ||
|
|
81c1f12836 | ||
|
|
5f6d1b5254 | ||
|
|
a64e575bdd | ||
|
|
e40d45fcaa | ||
|
|
9a54fc4002 | ||
|
|
8fa62eeab1 | ||
|
|
1748911f70 | ||
|
|
4aa82d16d3 | ||
|
|
be1eaa4de4 | ||
|
|
6285d65fd0 | ||
|
|
28d2560506 | ||
|
|
d3eb6e611d | ||
|
|
b0edc772b0 | ||
|
|
968e0f4ef9 | ||
|
|
364a2667ed | ||
|
|
d5820666d4 | ||
|
|
dc5eb76f58 | ||
|
|
f776c3cc5a | ||
|
|
9992109467 | ||
|
|
7ace9f065b | ||
|
|
4a9ba5ad6f | ||
|
|
d7ad8f1387 | ||
|
|
8d21ca154d | ||
|
|
1ab3b1a1e9 | ||
|
|
ebdc386c6e | ||
|
|
f6c7c4c275 | ||
|
|
c78c1c8503 | ||
|
|
2d10c74d2f | ||
|
|
88175d0d21 | ||
|
|
0a1d11d24f | ||
|
|
222eb31ddc | ||
|
|
c997afb2c2 | ||
|
|
6c73825f18 | ||
|
|
03827ea0b6 | ||
|
|
35660ac998 | ||
|
|
221696c46a | ||
|
|
477e89aba2 | ||
|
|
b5f5716522 | ||
|
|
f37ab8dcb1 | ||
|
|
52b0f2398e | ||
|
|
798c2cb9db | ||
|
|
e8b0902eb2 | ||
|
|
d9ff527a75 | ||
|
|
4afadf2861 |
30
.editorconfig
Normal file
30
.editorconfig
Normal file
@@ -0,0 +1,30 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
charset = utf-8
|
||||
# end_of_line = lf
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
indent_style = tab
|
||||
indent_size = 2
|
||||
tab_width = 2
|
||||
|
||||
|
||||
[*.py]
|
||||
indent_style = tab
|
||||
indent_size = 2
|
||||
|
||||
[*.{yml,yaml}]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
[*.{json,ini}]
|
||||
indent_style = tab
|
||||
indent_size = 2
|
||||
|
||||
[*.md]
|
||||
trim_trailing_whitespace = false
|
||||
|
||||
[*.rst]
|
||||
indent_style = space
|
||||
indent_size = 3
|
||||
4
.github/dependabot.yml
vendored
4
.github/dependabot.yml
vendored
@@ -10,9 +10,9 @@ updates:
|
||||
- Dependencies
|
||||
assignees:
|
||||
- Paebbels
|
||||
- Umarcor
|
||||
- umarcor
|
||||
reviewers:
|
||||
- Paebbels
|
||||
- Umarcor
|
||||
- umarcor
|
||||
schedule:
|
||||
interval: "daily" # Checks on Monday trough Friday.
|
||||
|
||||
20
.github/pull_request_template.md
vendored
20
.github/pull_request_template.md
vendored
@@ -1,16 +1,30 @@
|
||||
# New Features
|
||||
|
||||
|
||||
* tbd
|
||||
* tbd
|
||||
|
||||
# Changes
|
||||
|
||||
* tbd
|
||||
* tbd
|
||||
|
||||
# Bug Fixes
|
||||
|
||||
* tbd
|
||||
* tbd
|
||||
|
||||
----------
|
||||
# Related PRs:
|
||||
# Documentation
|
||||
|
||||
* tbd
|
||||
* tbd
|
||||
|
||||
# Unit Tests
|
||||
|
||||
* tbd
|
||||
* tbd
|
||||
|
||||
----------
|
||||
# Related Issues and Pull-Requests
|
||||
|
||||
* tbd
|
||||
* tbd
|
||||
|
||||
264
.github/workflows/ApplicationTesting.yml
vendored
Normal file
264
.github/workflows/ApplicationTesting.yml
vendored
Normal file
@@ -0,0 +1,264 @@
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: Application Testing
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
jobs:
|
||||
description: 'JSON list with environment fields, telling the system and Python versions to run tests with.'
|
||||
required: true
|
||||
type: string
|
||||
wheel:
|
||||
description: "Wheel package as input artifact."
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
requirements:
|
||||
description: 'Python dependencies to be installed through pip.'
|
||||
required: false
|
||||
default: '-r tests/requirements.txt'
|
||||
type: string
|
||||
pacboy:
|
||||
description: 'MSYS2 dependencies to be installed through pacboy (pacman).'
|
||||
required: false
|
||||
default: ""
|
||||
type: string
|
||||
mingw_requirements:
|
||||
description: 'Override Python dependencies to be installed through pip on MSYS2 (MINGW64) only.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
root_directory:
|
||||
description: 'Working directory for running tests.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
tests_directory:
|
||||
description: 'Path to the directory containing tests (relative to root_directory).'
|
||||
required: false
|
||||
default: 'tests'
|
||||
type: string
|
||||
apptest_directory:
|
||||
description: 'Path to the directory containing application tests (relative to tests_directory).'
|
||||
required: false
|
||||
default: 'app'
|
||||
type: string
|
||||
apptest_xml_artifact:
|
||||
description: "Generate application test report with junitxml and upload results as an artifact."
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
ApplicationTesting:
|
||||
name: ${{ matrix.sysicon }} ${{ matrix.pyicon }} Application Tests using Python ${{ matrix.python }}
|
||||
runs-on: ${{ matrix.runs-on }}
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include: ${{ fromJson(inputs.jobs) }}
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: ${{ matrix.shell }}
|
||||
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.wheel }}' from 'Package' job
|
||||
uses: pyTooling/download-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.wheel }}
|
||||
path: install
|
||||
|
||||
- name: Compute pacman/pacboy packages
|
||||
id: pacboy
|
||||
if: matrix.system == 'msys2'
|
||||
shell: python
|
||||
run: |
|
||||
from os import getenv
|
||||
from pathlib import Path
|
||||
from re import compile
|
||||
from sys import version
|
||||
|
||||
print(f"Python: {version}")
|
||||
|
||||
def loadRequirementsFile(requirementsFile: Path):
|
||||
requirements = []
|
||||
with requirementsFile.open("r") as file:
|
||||
for line in file.readlines():
|
||||
line = line.strip()
|
||||
if line.startswith("#") or line.startswith("https") or line == "":
|
||||
continue
|
||||
elif line.startswith("-r"):
|
||||
# Remove the first word/argument (-r)
|
||||
requirements += loadRequirementsFile(requirementsFile.parent / line[2:].lstrip())
|
||||
else:
|
||||
requirements.append(line)
|
||||
|
||||
return requirements
|
||||
|
||||
requirements = "${{ inputs.requirements }}"
|
||||
if requirements.startswith("-r"):
|
||||
requirementsFile = Path(requirements[2:].lstrip())
|
||||
try:
|
||||
dependencies = loadRequirementsFile(requirementsFile)
|
||||
except FileNotFoundError as ex:
|
||||
print(f"::error title=FileNotFoundError::{ex}")
|
||||
exit(1)
|
||||
else:
|
||||
dependencies = [req.strip() for req in requirements.split(" ")]
|
||||
|
||||
packages = {
|
||||
"coverage": "python-coverage:p",
|
||||
"docstr_coverage": "python-pyyaml:p",
|
||||
"igraph": "igraph:p",
|
||||
"jinja2": "python-markupsafe:p",
|
||||
"lxml": "python-lxml:p",
|
||||
"numpy": "python-numpy:p",
|
||||
"markupsafe": "python-markupsafe:p",
|
||||
"pip": "python-pip:p",
|
||||
"pyyaml": "python-pyyaml:p",
|
||||
"ruamel.yaml": "python-ruamel-yaml:p python-ruamel.yaml.clib:p",
|
||||
"sphinx": "python-markupsafe:p",
|
||||
"tomli": "python-tomli:p",
|
||||
"wheel": "python-wheel:p",
|
||||
"pyEDAA.ProjectModel": "python-ruamel-yaml:p python-ruamel.yaml.clib:p python-lxml:p",
|
||||
"pyEDAA.Reports": "python-ruamel-yaml:p python-ruamel.yaml.clib:p python-lxml:p",
|
||||
}
|
||||
subPackages = {
|
||||
"pytooling": {
|
||||
"yaml": "python-ruamel-yaml:p python-ruamel.yaml.clib:p",
|
||||
}
|
||||
}
|
||||
|
||||
regExp = compile(r"(?P<PackageName>[\w_\-\.]+)(?:\[(?P<SubPackages>(?:\w+)(?:\s*,\s*\w+)*)\])?(?:\s*(?P<Comperator>[<>~=]+)\s*)(?P<Version>\d+(?:\.\d+)*)(?:-(?P<VersionExtension>\w+))?")
|
||||
|
||||
pacboyPackages = set(("python-pip:p", "python-wheel:p", "python-tomli:p"))
|
||||
print(f"Processing dependencies ({len(dependencies)}):")
|
||||
for dependency in dependencies:
|
||||
print(f" {dependency}")
|
||||
|
||||
match = regExp.match(dependency.lower())
|
||||
if not match:
|
||||
print(f" Wrong format: {dependency}")
|
||||
print(f"::error title=Identifying Pacboy Packages::Unrecognized dependency format '{dependency}'")
|
||||
continue
|
||||
|
||||
package = match["PackageName"]
|
||||
if package in packages:
|
||||
rewrite = packages[package]
|
||||
print(f" Found rewrite rule for '{package}': {rewrite}")
|
||||
pacboyPackages.add(rewrite)
|
||||
|
||||
if match["SubPackages"] and package in subPackages:
|
||||
for subPackage in match["SubPackages"].split(","):
|
||||
if subPackage in subPackages[package]:
|
||||
rewrite = subPackages[package][subPackage]
|
||||
print(f" Found rewrite rule for '{package}[..., {subPackage}, ...]': {rewrite}")
|
||||
pacboyPackages.add(rewrite)
|
||||
|
||||
# Write jobs to special file
|
||||
github_output = Path(getenv("GITHUB_OUTPUT"))
|
||||
print(f"GITHUB_OUTPUT: {github_output}")
|
||||
with github_output.open("a+") as f:
|
||||
f.write(f"pacboy_packages={' '.join(pacboyPackages)}\n")
|
||||
|
||||
- name: '🟦 Setup MSYS2 for ${{ matrix.runtime }}'
|
||||
if: matrix.system == 'msys2'
|
||||
uses: msys2/setup-msys2@v2
|
||||
with:
|
||||
msystem: ${{ matrix.runtime }}
|
||||
update: true
|
||||
pacboy: >-
|
||||
${{ steps.pacboy.outputs.pacboy_packages }}
|
||||
${{ inputs.pacboy }}
|
||||
|
||||
- name: 🐍 Setup Python ${{ matrix.python }}
|
||||
if: matrix.system != 'msys2'
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python }}
|
||||
|
||||
- name: 🔧 Install wheel and pip dependencies (native)
|
||||
if: matrix.system != 'msys2'
|
||||
run: |
|
||||
python -m pip install --disable-pip-version-check -U wheel
|
||||
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
||||
|
||||
- name: 🔧 Install pip dependencies (MSYS2)
|
||||
if: matrix.system == 'msys2'
|
||||
run: |
|
||||
if [ -n '${{ inputs.mingw_requirements }}' ]; then
|
||||
python -m pip install --disable-pip-version-check ${{ inputs.mingw_requirements }}
|
||||
else
|
||||
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
||||
fi
|
||||
|
||||
- name: 🔧 Install wheel from artifact
|
||||
run: |
|
||||
ls -l install
|
||||
python -m pip install --disable-pip-version-check -U install/*.whl
|
||||
|
||||
- name: ✅ Run application tests (Ubuntu/macOS)
|
||||
if: matrix.system != 'windows'
|
||||
run: |
|
||||
export ENVIRONMENT_NAME="${{ matrix.envname }}"
|
||||
|
||||
cd "${{ inputs.root_directory || '.' }}"
|
||||
[ -n '${{ inputs.apptest_xml_artifact }}' ] && PYTEST_ARGS='--junitxml=report/unit/TestReportSummary.xml' || unset PYTEST_ARGS
|
||||
if [ -n '${{ inputs.coverage_config }}' ]; then
|
||||
echo "coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.apptest_directory }}"
|
||||
coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.apptest_directory }}
|
||||
else
|
||||
echo "python -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.apptest_directory }}"
|
||||
python -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.apptest_directory }}
|
||||
fi
|
||||
|
||||
- name: ✅ Run application tests (Windows)
|
||||
if: matrix.system == 'windows'
|
||||
run: |
|
||||
$env:ENVIRONMENT_NAME = "${{ matrix.envname }}"
|
||||
|
||||
cd "${{ inputs.root_directory || '.' }}"
|
||||
$PYTEST_ARGS = if ("${{ inputs.apptest_xml_artifact }}") { "--junitxml=report/unit/TestReportSummary.xml" } else { "" }
|
||||
if ("${{ inputs.coverage_config }}") {
|
||||
Write-Host "coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.apptest_directory }}"
|
||||
coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.apptest_directory }}
|
||||
} else {
|
||||
Write-Host "python -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.apptest_directory }}"
|
||||
python -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.apptest_directory }}
|
||||
}
|
||||
|
||||
- name: 📤 Upload 'TestReportSummary.xml' artifact
|
||||
if: inputs.apptest_xml_artifact != ''
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.apptest_xml_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
||||
working-directory: report/unit
|
||||
path: TestReportSummary.xml
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
15
.github/workflows/ArtifactCleanUp.yml
vendored
15
.github/workflows/ArtifactCleanUp.yml
vendored
@@ -4,7 +4,7 @@
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2022 The pyTooling Authors #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
@@ -25,6 +25,11 @@ name: ArtifactCleanUp
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
package:
|
||||
description: 'Artifacts to be removed on not tagged runs.'
|
||||
required: true
|
||||
@@ -36,21 +41,19 @@ on:
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
|
||||
ArtifactCleanUp:
|
||||
name: 🗑️ Artifact Cleanup
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
|
||||
steps:
|
||||
|
||||
- name: 🗑️ Delete package Artifacts
|
||||
if: ${{ ! startsWith(github.ref, 'refs/tags') }}
|
||||
uses: geekyeggo/delete-artifact@v2
|
||||
uses: geekyeggo/delete-artifact@v5
|
||||
with:
|
||||
name: ${{ inputs.package }}
|
||||
|
||||
- name: 🗑️ Delete remaining Artifacts
|
||||
if: ${{ inputs.remaining != '' }}
|
||||
uses: geekyeggo/delete-artifact@v2
|
||||
uses: geekyeggo/delete-artifact@v5
|
||||
with:
|
||||
name: ${{ inputs.remaining }}
|
||||
|
||||
33
.github/workflows/BuildTheDocs.yml
vendored
33
.github/workflows/BuildTheDocs.yml
vendored
@@ -4,7 +4,7 @@
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2022 The pyTooling Authors #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
@@ -27,18 +27,21 @@ on:
|
||||
inputs:
|
||||
artifact:
|
||||
description: 'Name of the documentation artifact.'
|
||||
required: true
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
|
||||
BuildTheDocs:
|
||||
name: 📓 Run BuildTheDocs
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
steps:
|
||||
- name: '❗ Deprecation message'
|
||||
run: echo "::warning title=Deprecated::'BuildTheDocs.yml' is not maintained anymore. Please switch to 'SphinxDocumentation.yml', 'LaTeXDocumentation.yml' and 'ExtractConfiguration.yml'."
|
||||
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: 🛳️ Build documentation
|
||||
uses: buildthedocs/btd@v0
|
||||
@@ -46,8 +49,24 @@ jobs:
|
||||
skip-deploy: true
|
||||
|
||||
- name: 📤 Upload 'documentation' artifacts
|
||||
uses: actions/upload-artifact@v3
|
||||
if: inputs.artifact != ''
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.artifact }}
|
||||
path: doc/_build/html
|
||||
working-directory: doc/_build/html
|
||||
path: '*'
|
||||
retention-days: 1
|
||||
|
||||
- name: '📓 Publish site to GitHub Pages'
|
||||
if: inputs.artifact == '' && github.event_name != 'pull_request'
|
||||
run: |
|
||||
cp --recursive -T doc/_build/html public
|
||||
cd public
|
||||
touch .nojekyll
|
||||
git init
|
||||
cp ../.git/config ./.git/config
|
||||
git add .
|
||||
git config --local user.email "BuildTheDocs@GitHubActions"
|
||||
git config --local user.name "GitHub Actions"
|
||||
git commit -a -m "update ${{ github.sha }}"
|
||||
git push -u origin +HEAD:gh-pages
|
||||
|
||||
72
.github/workflows/CheckDocumentation.yml
vendored
Normal file
72
.github/workflows/CheckDocumentation.yml
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: Check Documentation
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
default: '3.12'
|
||||
type: string
|
||||
directory:
|
||||
description: 'Source code directory to check.'
|
||||
required: true
|
||||
type: string
|
||||
fail_under:
|
||||
description: 'Minimum required documentation coverage level'
|
||||
required: false
|
||||
default: 80
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
DocCoverage:
|
||||
name: 👀 Check documentation coverage
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: 🐍 Setup Python ${{ inputs.python_version }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ inputs.python_version }}
|
||||
|
||||
- name: 🔧 Install wheel,tomli and pip dependencies (native)
|
||||
run: |
|
||||
python -m pip install --disable-pip-version-check -U docstr_coverage interrogate
|
||||
|
||||
- name: Run 'interrogate' Documentation Coverage Check
|
||||
continue-on-error: true
|
||||
run: |
|
||||
interrogate -c pyproject.toml --fail-under=${{ inputs.fail_under }} && echo "::error title=interrogate::Insufficient documentation quality (goal: ${{ inputs.fail_under }})"
|
||||
|
||||
- name: Run 'docstr_coverage' Documentation Coverage Check
|
||||
continue-on-error: true
|
||||
run: |
|
||||
docstr-coverage -v 2 --fail-under=${{ inputs.fail_under }} ${{ inputs.directory }} && echo "::error title=docstr-coverage::Insufficient documentation quality (goal: ${{ inputs.fail_under }})"
|
||||
318
.github/workflows/CompletePipeline.yml
vendored
Normal file
318
.github/workflows/CompletePipeline.yml
vendored
Normal file
@@ -0,0 +1,318 @@
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: Namespace Package
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
package_namespace:
|
||||
description: 'Name of the tool''s namespace.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
package_name:
|
||||
description: 'Name of the tool''s package.'
|
||||
required: true
|
||||
type: string
|
||||
unittest_python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
default: '3.13'
|
||||
type: string
|
||||
unittest_python_version_list:
|
||||
description: 'Space separated list of Python versions to run tests with.'
|
||||
required: false
|
||||
default: '3.9 3.10 3.11 3.12 3.13'
|
||||
type: string
|
||||
unittest_system_list:
|
||||
description: 'Space separated list of systems to run tests on.'
|
||||
required: false
|
||||
default: 'ubuntu windows macos macos-arm mingw64 ucrt64'
|
||||
type: string
|
||||
unittest_include_list:
|
||||
description: 'Space separated list of system:python items to be included into the list of test.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
unittest_exclude_list:
|
||||
description: 'Space separated list of system:python items to be excluded from the list of test.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
unittest_disable_list:
|
||||
description: 'Space separated list of system:python items to be disabled from the list of test.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
apptest_python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
default: '3.13'
|
||||
type: string
|
||||
apptest_python_version_list:
|
||||
description: 'Space separated list of Python versions to run tests with.'
|
||||
required: false
|
||||
default: ""
|
||||
type: string
|
||||
apptest_system_list:
|
||||
description: 'Space separated list of systems to run tests on.'
|
||||
required: false
|
||||
default: 'ubuntu windows macos macos-arm ucrt64'
|
||||
type: string
|
||||
apptest_include_list:
|
||||
description: 'Space separated list of system:python items to be included into the list of test.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
apptest_exclude_list:
|
||||
description: 'Space separated list of system:python items to be excluded from the list of test.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
apptest_disable_list:
|
||||
description: 'Space separated list of system:python items to be disabled from the list of test.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
secrets:
|
||||
PYPI_TOKEN:
|
||||
description: "Token for pushing releases to PyPI."
|
||||
required: false
|
||||
CODACY_PROJECT_TOKEN:
|
||||
description: "Token for pushing coverage results to Codacy."
|
||||
required: false
|
||||
|
||||
jobs:
|
||||
ConfigParams:
|
||||
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@main
|
||||
with:
|
||||
package_namespace: ${{ inputs.package_namespace }}
|
||||
package_name: ${{ inputs.package_name }}
|
||||
|
||||
UnitTestingParams:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
with:
|
||||
package_namespace: ${{ inputs.package_namespace }}
|
||||
package_name: ${{ inputs.package_name }}
|
||||
python_version: ${{ inputs.unittest_python_version }}
|
||||
python_version_list: ${{ inputs.unittest_python_version_list }}
|
||||
system_list: ${{ inputs.unittest_system_list }}
|
||||
include_list: ${{ inputs.unittest_include_list }}
|
||||
exclude_list: ${{ inputs.unittest_exclude_list }}
|
||||
disable_list: ${{ inputs.unittest_disable_list }}
|
||||
|
||||
AppTestingParams:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
with:
|
||||
package_namespace: ${{ inputs.package_namespace }}
|
||||
package_name: ${{ inputs.package_name }}
|
||||
python_version: ${{ inputs.apptest_python_version }}
|
||||
python_version_list: ${{ inputs.apptest_python_version_list }}
|
||||
system_list: ${{ inputs.apptest_system_list }}
|
||||
include_list: ${{ inputs.apptest_include_list }}
|
||||
exclude_list: ${{ inputs.apptest_exclude_list }}
|
||||
disable_list: ${{ inputs.apptest_disable_list }}
|
||||
|
||||
UnitTesting:
|
||||
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@main
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
with:
|
||||
jobs: ${{ needs.UnitTestingParams.outputs.python_jobs }}
|
||||
requirements: "-r tests/unit/requirements.txt"
|
||||
# pacboy: "msys/git python-lxml:p"
|
||||
unittest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||
coverage_sqlite_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}
|
||||
|
||||
StaticTypeCheck:
|
||||
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@main
|
||||
needs:
|
||||
- ConfigParams
|
||||
- UnitTestingParams
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
commands: |
|
||||
${{ needs.ConfigParams.outputs.mypy_prepare_command }}
|
||||
mypy --html-report report/typing -p ${{ needs.ConfigParams.outputs.package_fullname }}
|
||||
html_report: 'report/typing'
|
||||
html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
|
||||
|
||||
DocCoverage:
|
||||
uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@main
|
||||
needs:
|
||||
- ConfigParams
|
||||
- UnitTestingParams
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
directory: ${{ inputs.package_namespace }}/${{ inputs.package_name }}
|
||||
# fail_below: 70
|
||||
|
||||
Package:
|
||||
uses: pyTooling/Actions/.github/workflows/Package.yml@main
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- UnitTesting
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
||||
|
||||
# AppTesting:
|
||||
# uses: pyTooling/Actions/.github/workflows/ApplicationTesting.yml@main
|
||||
# needs:
|
||||
# - AppTestingParams
|
||||
# - UnitTestingParams
|
||||
# - Package
|
||||
# with:
|
||||
# jobs: ${{ needs.AppTestingParams.outputs.python_jobs }}
|
||||
# wheel: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
||||
# apptest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).apptesting_xml }}
|
||||
|
||||
PublishCoverageResults:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@main
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- UnitTesting
|
||||
with:
|
||||
# coverage_sqlite_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}
|
||||
# coverage_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }}
|
||||
coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}
|
||||
coverage_html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||
secrets:
|
||||
codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||
|
||||
PublishTestResults:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@main
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- UnitTesting
|
||||
with:
|
||||
merged_junit_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||
|
||||
# VerifyDocs:
|
||||
# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@main
|
||||
# needs:
|
||||
# - UnitTestingParams
|
||||
# with:
|
||||
# python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
|
||||
Documentation:
|
||||
uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@main
|
||||
needs:
|
||||
- ConfigParams
|
||||
- UnitTestingParams
|
||||
- PublishTestResults
|
||||
- PublishCoverageResults
|
||||
# - VerifyDocs
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
coverage_report_json_directory: ${{ needs.ConfigParams.outputs.coverage_report_json_directory }}
|
||||
unittest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}-ubuntu-native-3.12
|
||||
coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}
|
||||
html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_html }}
|
||||
latex_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }}
|
||||
|
||||
IntermediateCleanUp:
|
||||
uses: pyTooling/Actions/.github/workflows/IntermediateCleanUp.yml@main
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- PublishCoverageResults
|
||||
- PublishTestResults
|
||||
- Documentation
|
||||
with:
|
||||
sqlite_coverage_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}-
|
||||
xml_unittest_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}-
|
||||
|
||||
# PDFDocumentation:
|
||||
# uses: pyTooling/Actions/.github/workflows/LaTeXDocumentation.yml@main
|
||||
# needs:
|
||||
# - UnitTestingParams
|
||||
# - Documentation
|
||||
# with:
|
||||
# document: pyEDAA.ProjectModel
|
||||
# latex_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }}
|
||||
# pdf_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_pdf }}
|
||||
|
||||
PublishToGitHubPages:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@main
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- Documentation
|
||||
# - PDFDocumentation
|
||||
- PublishCoverageResults
|
||||
- StaticTypeCheck
|
||||
with:
|
||||
doc: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_html }}
|
||||
coverage: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||
typing: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
|
||||
|
||||
ReleasePage:
|
||||
uses: pyTooling/Actions/.github/workflows/Release.yml@main
|
||||
if: startsWith(github.ref, 'refs/tags')
|
||||
needs:
|
||||
- Package
|
||||
# - AppTesting
|
||||
- PublishToGitHubPages
|
||||
|
||||
PublishOnPyPI:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@main
|
||||
if: startsWith(github.ref, 'refs/tags')
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- ReleasePage
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
requirements: -r dist/requirements.txt
|
||||
artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
||||
secrets:
|
||||
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||
|
||||
ArtifactCleanUp:
|
||||
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@main
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- UnitTesting
|
||||
- StaticTypeCheck
|
||||
- Documentation
|
||||
# - PDFDocumentation
|
||||
- PublishTestResults
|
||||
- PublishCoverageResults
|
||||
- PublishToGitHubPages
|
||||
# - PublishOnPyPI
|
||||
- IntermediateCleanUp
|
||||
with:
|
||||
package: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
||||
remaining: |
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_html }}-*
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }}-*
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}-*
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}-*
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_html }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_html }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }}
|
||||
# ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).apptesting_xml }}-*
|
||||
# ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_pdf }}
|
||||
116
.github/workflows/CoverageCollection.yml
vendored
116
.github/workflows/CoverageCollection.yml
vendored
@@ -4,7 +4,7 @@
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2022 The pyTooling Authors #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
@@ -25,20 +25,30 @@ name: Coverage Collection
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
default: '3.10'
|
||||
default: '3.11'
|
||||
type: string
|
||||
requirements:
|
||||
description: 'Python dependencies to be installed through pip.'
|
||||
required: false
|
||||
default: '-r tests/requirements.txt'
|
||||
type: string
|
||||
unittest_directory:
|
||||
description: 'Path to the directory containing unit tests.'
|
||||
tests_directory:
|
||||
description: 'Path to the directory containing tests (test working directory).'
|
||||
required: false
|
||||
default: 'tests/unit'
|
||||
default: 'tests'
|
||||
type: string
|
||||
unittest_directory:
|
||||
description: 'Path to the directory containing unit tests (relative to tests_directory).'
|
||||
required: false
|
||||
default: 'unit'
|
||||
type: string
|
||||
coverage_config:
|
||||
description: 'Path to the .coveragerc file. Use pyproject.toml by default.'
|
||||
@@ -58,30 +68,36 @@ jobs:
|
||||
|
||||
Coverage:
|
||||
name: 📈 Collect Coverage Data using Python ${{ inputs.python_version }}
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
|
||||
steps:
|
||||
- name: '❗ Deprecation message'
|
||||
run: echo "::warning title=Deprecated::'CoverageCollection.yml' is not maintained anymore. Please switch to 'UnitTesting.yml', 'PublishCoverageResults.yml' and 'PublishTestResults.yml'."
|
||||
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
lfs: true
|
||||
submodules: true
|
||||
|
||||
- name: 🐍 Setup Python ${{ inputs.python_version }}
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ inputs.python_version }}
|
||||
|
||||
- name: 🗂 Install dependencies
|
||||
run: |
|
||||
python -m pip install -U pip
|
||||
python -m pip install tomli
|
||||
python -m pip install ${{ inputs.requirements }}
|
||||
python -m pip install --disable-pip-version-check tomli
|
||||
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
||||
|
||||
- name: 🔁 Extract configurations from pyproject.toml
|
||||
id: getVariables
|
||||
shell: python
|
||||
run: |
|
||||
from os import environ
|
||||
from pathlib import Path
|
||||
from tomli import load as tomli_load
|
||||
from os import getenv
|
||||
from pathlib import Path
|
||||
from tomli import load as tomli_load
|
||||
from textwrap import dedent
|
||||
|
||||
htmlDirectory = 'htmlcov'
|
||||
xmlFile = './coverage.xml'
|
||||
@@ -89,61 +105,75 @@ jobs:
|
||||
|
||||
# Read output paths from 'pyproject.toml' file
|
||||
if coverageRC == "pyproject.toml":
|
||||
pyProjectFile = Path("pyproject.toml")
|
||||
if pyProjectFile.exists():
|
||||
with pyProjectFile.open("rb") as file:
|
||||
pyProjectSettings = tomli_load(file)
|
||||
pyProjectFile = Path("pyproject.toml")
|
||||
if pyProjectFile.exists():
|
||||
with pyProjectFile.open("rb") as file:
|
||||
pyProjectSettings = tomli_load(file)
|
||||
|
||||
htmlDirectory = pyProjectSettings["tool"]["coverage"]["html"]["directory"]
|
||||
xmlFile = pyProjectSettings["tool"]["coverage"]["xml"]["output"]
|
||||
else:
|
||||
print(f"File '{pyProjectFile}' not found and no ' .coveragerc' file specified.")
|
||||
htmlDirectory = pyProjectSettings["tool"]["coverage"]["html"]["directory"]
|
||||
xmlFile = pyProjectSettings["tool"]["coverage"]["xml"]["output"]
|
||||
else:
|
||||
print(f"File '{pyProjectFile}' not found.")
|
||||
print(f"::error title=FileNotFoundError::File '{pyProjectFile}' not found.")
|
||||
exit(1)
|
||||
|
||||
# Read output paths from '.coveragerc' file
|
||||
elif len(coverageRC) > 0:
|
||||
coverageRCFile = Path(coverageRC)
|
||||
if coverageRCFile.exists():
|
||||
with coverageRCFile.open("rb") as file:
|
||||
coverageRCSettings = tomli_load(file)
|
||||
coverageRCFile = Path(coverageRC)
|
||||
if coverageRCFile.exists():
|
||||
with coverageRCFile.open("rb") as file:
|
||||
coverageRCSettings = tomli_load(file)
|
||||
|
||||
htmlDirectory = coverageRCSettings["html"]["directory"]
|
||||
xmlFile = coverageRCSettings["xml"]["output"]
|
||||
else:
|
||||
print(f"File '{coverageRCFile}' not found.")
|
||||
htmlDirectory = coverageRCSettings["html"]["directory"]
|
||||
xmlFile = coverageRCSettings["xml"]["output"]
|
||||
else:
|
||||
print(f"File '{coverageRCFile}' not found.")
|
||||
print(f"::error title=FileNotFoundError::File '{coverageRCFile}' not found.")
|
||||
exit(1)
|
||||
|
||||
# Write jobs to special file
|
||||
github_output = Path(getenv("GITHUB_OUTPUT"))
|
||||
print(f"GITHUB_OUTPUT: {github_output}")
|
||||
with github_output.open("a+", encoding="utf-8") as f:
|
||||
f.write(dedent(f"""\
|
||||
coverage_report_html_directory={htmlDirectory}
|
||||
coverage_report_xml={xmlFile}
|
||||
"""))
|
||||
|
||||
with open(environ['GITHUB_OUTPUT'], 'a', encoding='utf-8') as gho:
|
||||
gho.write(f"""\
|
||||
coverage_report_html_directory={htmlDirectory}
|
||||
coverage_report_xml={xmlFile}
|
||||
""")
|
||||
print(f"DEBUG:\n html={htmlDirectory}\n xml={xmlFile}")
|
||||
|
||||
- name: Collect coverage
|
||||
continue-on-error: true
|
||||
run: |
|
||||
[ 'x${{ inputs.coverage_config }}' != 'x' ] && PYCOV_ARGS='--cov-config=${{ inputs.coverage_config }}' || unset PYCOV_ARGS
|
||||
python -m pytest -rA --cov=. $PYCOV_ARGS ${{ inputs.unittest_directory }} --color=yes
|
||||
export ENVIRONMENT_NAME="Linux (x86-64)"
|
||||
export PYTHONPATH=$(pwd)
|
||||
ABSDIR=$(pwd)
|
||||
cd "${{ inputs.tests_directory || '.' }}"
|
||||
[ -n '${{ inputs.coverage_config }}' ] && PYCOV_ARGS="--cov-config=${ABSDIR}/${{ inputs.coverage_config }}" || unset PYCOV_ARGS
|
||||
echo "python -m pytest -rA --cov=${ABSDIR} ${PYCOV_ARGS} ${{ inputs.unittest_directory }} --color=yes"
|
||||
python -m pytest -rA --cov=${ABSDIR} $PYCOV_ARGS ${{ inputs.unittest_directory }} --color=yes
|
||||
|
||||
- name: Convert to cobertura format
|
||||
run: coverage xml
|
||||
run: coverage xml --data-file=${{ inputs.tests_directory || '.' }}/.coverage
|
||||
|
||||
- name: Convert to HTML format
|
||||
run: |
|
||||
coverage html -d ${{ steps.getVariables.outputs.coverage_report_html_directory }}
|
||||
coverage html --data-file=${{ inputs.tests_directory || '.' }}/.coverage -d ${{ steps.getVariables.outputs.coverage_report_html_directory }}
|
||||
rm ${{ steps.getVariables.outputs.coverage_report_html_directory }}/.gitignore
|
||||
|
||||
- name: 📤 Upload 'Coverage Report' artifact
|
||||
continue-on-error: true
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.artifact }}
|
||||
path: ${{ steps.getVariables.outputs.coverage_report_html_directory }}
|
||||
working-directory: ${{ steps.getVariables.outputs.coverage_report_html_directory }}
|
||||
path: '*'
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- name: 📊 Publish coverage at CodeCov
|
||||
continue-on-error: true
|
||||
uses: codecov/codecov-action@v3
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
files: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
||||
flags: unittests
|
||||
@@ -151,7 +181,7 @@ jobs:
|
||||
|
||||
- name: 📉 Publish coverage at Codacy
|
||||
continue-on-error: true
|
||||
uses: codacy/codacy-coverage-reporter-action@master
|
||||
uses: codacy/codacy-coverage-reporter-action@v1
|
||||
with:
|
||||
project-token: ${{ secrets.codacy_token }}
|
||||
coverage-reports: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
||||
|
||||
194
.github/workflows/ExtractConfiguration.yml
vendored
Normal file
194
.github/workflows/ExtractConfiguration.yml
vendored
Normal file
@@ -0,0 +1,194 @@
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: Extract Configuration
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
default: '3.12'
|
||||
type: string
|
||||
package_namespace:
|
||||
description: 'Name of the tool''s namespace.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
package_name:
|
||||
description: 'Name of the tool''s package.'
|
||||
required: true
|
||||
type: string
|
||||
coverage_config:
|
||||
description: 'Path to the .coveragerc file. Use pyproject.toml by default.'
|
||||
required: false
|
||||
default: 'pyproject.toml'
|
||||
type: string
|
||||
|
||||
outputs:
|
||||
package_fullname:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.package_fullname }}
|
||||
package_directory:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.package_directory }}
|
||||
mypy_prepare_command:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.mypy_prepare_command }}
|
||||
coverage_report_html_directory:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.coverage_report_html_directory }}
|
||||
coverage_report_xml_directory:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.coverage_report_xml_directory }}
|
||||
coverage_report_xml:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.coverage_report_xml }}
|
||||
coverage_report_json_directory:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.coverage_report_json_directory }}
|
||||
coverage_report_json:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.coverage_report_json }}
|
||||
|
||||
jobs:
|
||||
Extract:
|
||||
name: 📓 Extract configurations from pyproject.toml
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
outputs:
|
||||
package_fullname: ${{ steps.getPackageName.outputs.package_fullname }}
|
||||
package_directory: ${{ steps.getPackageName.outputs.package_directory }}
|
||||
mypy_prepare_command: ${{ steps.getPackageName.outputs.mypy_prepare_command }}
|
||||
coverage_report_html_directory: ${{ steps.getVariables.outputs.coverage_report_html_directory }}
|
||||
coverage_report_xml_directory: ${{ steps.getVariables.outputs.coverage_report_xml_directory }}
|
||||
coverage_report_xml: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
||||
coverage_report_json_directory: ${{ steps.getVariables.outputs.coverage_report_json_directory }}
|
||||
coverage_report_json: ${{ steps.getVariables.outputs.coverage_report_json }}
|
||||
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: 🐍 Setup Python ${{ inputs.python_version }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ inputs.python_version }}
|
||||
|
||||
- name: 🔧 Install wheel,tomli and pip dependencies (native)
|
||||
run: |
|
||||
python -m pip install --disable-pip-version-check -U wheel tomli
|
||||
|
||||
- name: 🔁 Full package name and directory
|
||||
id: getPackageName
|
||||
shell: python
|
||||
run: |
|
||||
from os import getenv
|
||||
from pathlib import Path
|
||||
from textwrap import dedent
|
||||
|
||||
namespace = "${{ inputs.package_namespace }}".strip()
|
||||
name = "${{ inputs.package_name }}".strip()
|
||||
|
||||
if namespace == "" or namespace == ".":
|
||||
fullname = f"{name}"
|
||||
directory = f"{name}"
|
||||
mypy_prepare_command = ""
|
||||
else:
|
||||
fullname = f"{namespace}.{name}"
|
||||
directory = f"{namespace}/{name}"
|
||||
mypy_prepare_command = f"touch {namespace}/__init__.py"
|
||||
|
||||
github_output = Path(getenv("GITHUB_OUTPUT"))
|
||||
print(f"GITHUB_OUTPUT: {github_output}")
|
||||
with github_output.open("a+", encoding="utf-8") as f:
|
||||
f.write(dedent(f"""\
|
||||
package_fullname={fullname}
|
||||
package_directory={directory}
|
||||
mypy_prepare_command={mypy_prepare_command}
|
||||
"""))
|
||||
|
||||
- name: 🔁 Extract configurations from pyproject.toml
|
||||
id: getVariables
|
||||
shell: python
|
||||
run: |
|
||||
from os import getenv
|
||||
from pathlib import Path
|
||||
from sys import version
|
||||
from textwrap import dedent
|
||||
|
||||
print(f"Python: {version}")
|
||||
|
||||
from tomli import load as tomli_load
|
||||
|
||||
htmlDirectory = Path("htmlcov")
|
||||
xmlFile = Path("./coverage.xml")
|
||||
jsonFile = Path("./coverage.json")
|
||||
coverageRC = "${{ inputs.coverage_config }}".strip()
|
||||
|
||||
# Read output paths from 'pyproject.toml' file
|
||||
if coverageRC == "pyproject.toml":
|
||||
pyProjectFile = Path("pyproject.toml")
|
||||
if pyProjectFile.exists():
|
||||
with pyProjectFile.open("rb") as file:
|
||||
pyProjectSettings = tomli_load(file)
|
||||
|
||||
htmlDirectory = Path(pyProjectSettings["tool"]["coverage"]["html"]["directory"])
|
||||
xmlFile = Path(pyProjectSettings["tool"]["coverage"]["xml"]["output"])
|
||||
jsonFile = Path(pyProjectSettings["tool"]["coverage"]["json"]["output"])
|
||||
else:
|
||||
print(f"File '{pyProjectFile}' not found.")
|
||||
print(f"::error title=FileNotFoundError::File '{pyProjectFile}' not found.")
|
||||
exit(1)
|
||||
|
||||
# Read output paths from '.coveragerc' file
|
||||
elif len(coverageRC) > 0:
|
||||
coverageRCFile = Path(coverageRC)
|
||||
if coverageRCFile.exists():
|
||||
with coverageRCFile.open("rb") as file:
|
||||
coverageRCSettings = tomli_load(file)
|
||||
|
||||
htmlDirectory = Path(coverageRCSettings["html"]["directory"])
|
||||
xmlFile = Path(coverageRCSettings["xml"]["output"])
|
||||
jsonFile = Path(coverageRCSettings["json"]["output"])
|
||||
else:
|
||||
print(f"File '{coverageRCFile}' not found.")
|
||||
print(f"::error title=FileNotFoundError::File '{coverageRCFile}' not found.")
|
||||
exit(1)
|
||||
|
||||
# Write jobs to special file
|
||||
github_output = Path(getenv("GITHUB_OUTPUT"))
|
||||
print(f"GITHUB_OUTPUT: {github_output}")
|
||||
with github_output.open("a+", encoding="utf-8") as f:
|
||||
f.write(dedent(f"""\
|
||||
coverage_report_html_directory={htmlDirectory.as_posix()}
|
||||
coverage_report_xml_directory={xmlFile.parent.as_posix()}
|
||||
coverage_report_xml={xmlFile.as_posix()}
|
||||
coverage_report_json_directory={jsonFile.parent.as_posix()}
|
||||
coverage_report_json={jsonFile.as_posix()}
|
||||
"""))
|
||||
|
||||
print(f"DEBUG:\n html={htmlDirectory}\n xml={xmlFile}\n json={jsonFile}")
|
||||
58
.github/workflows/IntermediateCleanUp.yml
vendored
Normal file
58
.github/workflows/IntermediateCleanUp.yml
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: Intermediate Cleanup
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
sqlite_coverage_artifacts_prefix:
|
||||
description: 'Prefix for SQLite coverage artifacts'
|
||||
required: false
|
||||
type: string
|
||||
xml_unittest_artifacts_prefix:
|
||||
description: 'Prefix for XML unittest artifacts'
|
||||
required: false
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
IntermediateCleanUp:
|
||||
name: 🗑️ Intermediate Artifact Cleanup
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
steps:
|
||||
- name: 🗑️ Delete SQLite coverage artifacts from matrix jobs
|
||||
uses: geekyeggo/delete-artifact@v5
|
||||
if: inputs.sqlite_coverage_artifacts_prefix != ''
|
||||
continue-on-error: true
|
||||
with:
|
||||
name: ${{ inputs.sqlite_coverage_artifacts_prefix }}*
|
||||
|
||||
- name: 🗑️ Delete JUnit XML artifacts from matrix jobs
|
||||
uses: geekyeggo/delete-artifact@v5
|
||||
if: inputs.xml_unittest_artifacts_prefix != ''
|
||||
continue-on-error: true
|
||||
with:
|
||||
name: ${{ inputs.xml_unittest_artifacts_prefix }}*
|
||||
71
.github/workflows/LaTeXDocumentation.yml
vendored
Normal file
71
.github/workflows/LaTeXDocumentation.yml
vendored
Normal file
@@ -0,0 +1,71 @@
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: LaTeX Documentation
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
document:
|
||||
description: 'LaTeX root document without *.tex extension.'
|
||||
required: true
|
||||
type: string
|
||||
latex_artifact:
|
||||
description: 'Name of the LaTeX documentation artifact.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
pdf_artifact:
|
||||
description: 'Name of the PDF documentation artifact.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
PDFDocumentation:
|
||||
name: 📓 Converting LaTeX Documentation to PDF
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
steps:
|
||||
- name: 📥 Download artifacts '${{ inputs.latex_artifact }}' from 'SphinxDocumentation' job
|
||||
uses: pyTooling/download-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.latex_artifact }}
|
||||
path: latex
|
||||
|
||||
- name: Compile LaTeX document
|
||||
uses: xu-cheng/latex-action@master
|
||||
with:
|
||||
working_directory: latex
|
||||
root_file: ${{ inputs.document }}.tex
|
||||
|
||||
- name: 📤 Upload 'PDF Documentation' artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
if: inputs.pdf_artifact != ''
|
||||
with:
|
||||
name: ${{ inputs.pdf_artifact }}
|
||||
path: ${{ inputs.document }}.pdf
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
418
.github/workflows/NightlyRelease.yml
vendored
Normal file
418
.github/workflows/NightlyRelease.yml
vendored
Normal file
@@ -0,0 +1,418 @@
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: Nightly
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image:
|
||||
description: 'Name of the Ubuntu image.'
|
||||
required: false
|
||||
default: 'ubuntu-24.04'
|
||||
type: string
|
||||
nightly_name:
|
||||
description: 'Name of the nightly release.'
|
||||
required: false
|
||||
default: 'nightly'
|
||||
type: string
|
||||
nightly_title:
|
||||
description: 'Title of the nightly release.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
nightly_description:
|
||||
description: 'Description of the nightly release.'
|
||||
required: false
|
||||
default: 'Release of artifacts from latest CI pipeline.'
|
||||
type: string
|
||||
draft:
|
||||
description: 'Specify if this is a draft.'
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
prerelease:
|
||||
description: 'Specify if this is a pre-release.'
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
latest:
|
||||
description: 'Specify if this is the latest release.'
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
replacements:
|
||||
description: 'Multi-line string containing search=replace patterns.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
assets:
|
||||
description: 'Multi-line string containing artifact:file:title asset descriptions.'
|
||||
required: true
|
||||
type: string
|
||||
tarball-name:
|
||||
type: string
|
||||
required: false
|
||||
default: '__pyTooling_upload_artifact__.tar'
|
||||
|
||||
jobs:
|
||||
Release:
|
||||
name: 📝 Update 'Nightly Page' on GitHub
|
||||
runs-on: ${{ inputs.ubuntu_image }}
|
||||
permissions:
|
||||
contents: write
|
||||
actions: write
|
||||
# attestations: write
|
||||
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
# The command 'git describe' (used for version) needs the history.
|
||||
fetch-depth: 0
|
||||
|
||||
- name: 🔧 Install zstd
|
||||
run: sudo apt-get install -y --no-install-recommends zstd
|
||||
|
||||
- name: 📑 Delete (old) Release Page
|
||||
id: deleteReleasePage
|
||||
run: |
|
||||
set +e
|
||||
|
||||
ANSI_LIGHT_RED="\e[91m"
|
||||
ANSI_LIGHT_GREEN="\e[92m"
|
||||
ANSI_LIGHT_YELLOW="\e[93m"
|
||||
ANSI_NOCOLOR="\e[0m"
|
||||
|
||||
export GH_TOKEN=${{ github.token }}
|
||||
|
||||
echo -n "Deleting release '${{ inputs.nightly_name }}' ... "
|
||||
message="$(gh release delete ${{ inputs.nightly_name }} --yes 2>&1)"
|
||||
if [[ $? -eq 0 ]]; then
|
||||
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
elif [[ "${message}" == "release not found" ]]; then
|
||||
echo -e "${ANSI_LIGHT_YELLOW}[NOT FOUND]${ANSI_NOCOLOR}"
|
||||
else
|
||||
echo -e "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
||||
echo -e "${ANSI_LIGHT_RED}Couldn't delete release '${{ inputs.nightly_name }}' -> Error: '${message}'.${ANSI_NOCOLOR}"
|
||||
echo "::error title=InternalError::Couldn't delete release '${{ inputs.nightly_name }}' -> Error: '${message}'."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: 📑 (Re)create (new) Release Page
|
||||
id: createReleasePage
|
||||
run: |
|
||||
set +e
|
||||
|
||||
ANSI_LIGHT_RED="\e[91m"
|
||||
ANSI_LIGHT_GREEN="\e[92m"
|
||||
ANSI_NOCOLOR="\e[0m"
|
||||
|
||||
export GH_TOKEN=${{ github.token }}
|
||||
|
||||
addDraft="--draft"
|
||||
|
||||
if ${{ inputs.prerelease }}; then
|
||||
addPreRelease="--prerelease"
|
||||
fi
|
||||
|
||||
if ! ${{ inputs.latest }}; then
|
||||
addLatest="--latest=false"
|
||||
fi
|
||||
|
||||
if [[ "${{ inputs.nightly_title }}" != "" ]]; then
|
||||
addTitle=("--title" "${{ inputs.nightly_title }}")
|
||||
fi
|
||||
|
||||
cat <<'EOF' > __NoTeS__.md
|
||||
${{ inputs.nightly_description }}
|
||||
EOF
|
||||
if [[ -s __NoTeS__.md ]]; then
|
||||
addNotes=("--notes-file" "__NoTeS__.md")
|
||||
fi
|
||||
|
||||
# Apply replacements
|
||||
while IFS=$'\r\n' read -r patternLine; do
|
||||
# skip empty lines
|
||||
[[ "$patternLine" == "" ]] && continue
|
||||
|
||||
pattern="${patternLine%%=*}"
|
||||
replacement="${patternLine#*=}"
|
||||
sed -i -e "s/%$pattern%/$replacement/g" "__NoTeS__.md"
|
||||
done <<<'${{ inputs.replacements }}'
|
||||
|
||||
# Add footer line
|
||||
cat <<EOF >> __NoTeS__.md
|
||||
|
||||
--------
|
||||
Published from [${{ github.workflow }}](https://github.com/Paebbels/ghdl/actions/runs/${{ github.run_id }}) workflow triggered by @${{ github.actor }} on $(date '+%Y-%m-%d %H:%M:%S').
|
||||
EOF
|
||||
|
||||
echo "Creating release '${{ inputs.nightly_name }}' ... "
|
||||
message="$(gh release create "${{ inputs.nightly_name }}" --verify-tag $addDraft $addPreRelease $addLatest "${addTitle[@]}" "${addNotes[@]}" 2>&1)"
|
||||
if [[ $? -eq 0 ]]; then
|
||||
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
else
|
||||
echo -e "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
||||
echo -e "${ANSI_LIGHT_RED}Couldn't create release '${{ inputs.nightly_name }}' -> Error: '${message}'.${ANSI_NOCOLOR}"
|
||||
echo "::error title=InternalError::Couldn't create release '${{ inputs.nightly_name }}' -> Error: '${message}'."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: 📥 Download artifacts and upload as assets
|
||||
id: uploadAssets
|
||||
run: |
|
||||
set +e
|
||||
|
||||
ANSI_LIGHT_RED="\e[91m"
|
||||
ANSI_LIGHT_GREEN="\e[92m"
|
||||
ANSI_LIGHT_YELLOW="\e[93m"
|
||||
ANSI_NOCOLOR="\e[0m"
|
||||
|
||||
export GH_TOKEN=${{ github.token }}
|
||||
|
||||
Replace() {
|
||||
line="$1"
|
||||
while IFS=$'\r\n' read -r patternLine; do
|
||||
# skip empty lines
|
||||
[[ "$patternLine" == "" ]] && continue
|
||||
|
||||
pattern="${patternLine%%=*}"
|
||||
replacement="${patternLine#*=}"
|
||||
line="${line//"%$pattern%"/"$replacement"}"
|
||||
done <<<'${{ inputs.replacements }}'
|
||||
echo "$line"
|
||||
}
|
||||
|
||||
ERRORS=0
|
||||
# A dictionary of 0/1 to avoid duplicate downloads
|
||||
declare -A downloadedArtifacts
|
||||
# A dictionary to check for duplicate asset files in release
|
||||
declare -A assetFilenames
|
||||
while IFS=$'\r\n' read -r assetLine; do
|
||||
if [[ "${assetLine}" == "" ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
# split assetLine colon separated triple: artifact:asset:title
|
||||
artifact="${assetLine%%:*}"
|
||||
remaining="${assetLine#*:}"
|
||||
asset="${remaining%%:*}"
|
||||
title="${remaining##*:}"
|
||||
|
||||
# remove leading whitespace
|
||||
asset="${asset#"${asset%%[![:space:]]*}"}"
|
||||
title="${title#"${title%%[![:space:]]*}"}"
|
||||
|
||||
# apply replacements
|
||||
asset="$(Replace "${asset}")"
|
||||
title="$(Replace "${title}")"
|
||||
|
||||
echo "Publish asset '${asset}' from artifact '${artifact}' with title '${title}'"
|
||||
echo -n " Checked asset for duplicates ... "
|
||||
if [[ -n "${assetFilenames[$asset]}" ]]; then
|
||||
echo -e "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
echo "::error title=DuplicateAsset::Asset '${asset}' from artifact '${artifact}' was already uploaded to release '${{ inputs.nightly_name }}'."
|
||||
ERRORS=1
|
||||
continue
|
||||
else
|
||||
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
assetFilenames[$asset]=1
|
||||
fi
|
||||
|
||||
# Download artifact by artifact name
|
||||
if [[ -n "${downloadedArtifacts[$artifact]}" ]]; then
|
||||
echo -e " downloading '${artifact}' ... ${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}"
|
||||
else
|
||||
echo " downloading '${artifact}' ... "
|
||||
echo -n " gh run download $GITHUB_RUN_ID --dir \"${artifact}\" --name \"${artifact}\" "
|
||||
gh run download $GITHUB_RUN_ID --dir "${artifact}" --name "${artifact}"
|
||||
if [[ $? -eq 0 ]]; then
|
||||
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
else
|
||||
echo -e "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
echo -e "${ANSI_LIGHT_RED}Couldn't download artifact '${artifact}'.${ANSI_NOCOLOR}"
|
||||
echo "::error title=ArtifactNotFound::Couldn't download artifact '${artifact}'."
|
||||
ERRORS=1
|
||||
continue
|
||||
fi
|
||||
downloadedArtifacts[$artifact]=1
|
||||
|
||||
echo -n " Checking for embedded tarball ... "
|
||||
if [[ -f "${artifact}/${{ inputs.tarball-name }}" ]]; then
|
||||
echo -e "${ANSI_LIGHT_GREEN}[FOUND]${ANSI_NOCOLOR}"
|
||||
|
||||
pushd "${artifact}" > /dev/null
|
||||
|
||||
echo -n " Extracting embedded tarball ... "
|
||||
tar -xf "${{ inputs.tarball-name }}"
|
||||
if [[ $? -ne 0 ]]; then
|
||||
echo -e "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
||||
else
|
||||
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
fi
|
||||
|
||||
echo -n " Removing temporary tarball ... "
|
||||
rm -f "${{ inputs.tarball-name }}"
|
||||
if [[ $? -ne 0 ]]; then
|
||||
echo -e "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
||||
else
|
||||
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
fi
|
||||
|
||||
popd > /dev/null
|
||||
else
|
||||
echo -e "${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Check if artifact should be compressed (zip, tgz) or if asset was part of the downloaded artifact.
|
||||
echo -n " checking asset '${artifact}/${asset}' ... "
|
||||
if [[ "${asset}" == !*.zip ]]; then
|
||||
echo -e "${ANSI_LIGHT_GREEN}[ZIP]${ANSI_NOCOLOR}"
|
||||
asset="${asset##*!}"
|
||||
echo " Compressing artifact '${artifact}' to '${asset}' ..."
|
||||
(
|
||||
cd "${artifact}" && \
|
||||
zip -r "../${asset}" *
|
||||
)
|
||||
if [[ $? -eq 0 ]]; then
|
||||
echo -e " Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
uploadFile="${asset}"
|
||||
else
|
||||
echo -e " Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
echo -e "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zip file '${asset}'.${ANSI_NOCOLOR}"
|
||||
echo "::error title=CompressionError::Couldn't compress '${artifact}' to zip file '${asset}'."
|
||||
ERRORS=1
|
||||
continue
|
||||
fi
|
||||
elif [[ "${asset}" == !*.tgz || "${asset}" == !*.tar.gz || "${asset}" == \$*.tgz || "${asset}" == \$*.tar.gz ]]; then
|
||||
echo -e "${ANSI_LIGHT_GREEN}[TAR/GZ]${ANSI_NOCOLOR}"
|
||||
|
||||
if [[ "${asset:0:1}" == "\$" ]]; then
|
||||
asset="${asset##*$}"
|
||||
dirName="${asset%.*}"
|
||||
echo " Compressing artifact '${artifact}' to '${asset}' ..."
|
||||
tar -c --gzip --file="${asset}" --directory="${artifact}" --transform "s|^\.|${dirName%.tar}|" .
|
||||
retCode=$?
|
||||
else
|
||||
asset="${asset##*!}"
|
||||
echo " Compressing artifact '${artifact}' to '${asset}' ..."
|
||||
(
|
||||
cd "${artifact}" && \
|
||||
tar -c --gzip --file="../${asset}" *
|
||||
)
|
||||
retCode=$?
|
||||
fi
|
||||
|
||||
if [[ $retCode -eq 0 ]]; then
|
||||
echo -e " Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
uploadFile="${asset}"
|
||||
else
|
||||
echo -e " Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
echo -e "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to tgz file '${asset}'.${ANSI_NOCOLOR}"
|
||||
echo "::error title=CompressionError::Couldn't compress '${artifact}' to tgz file '${asset}'."
|
||||
ERRORS=1
|
||||
continue
|
||||
fi
|
||||
elif [[ "${asset}" == !*.tzst || "${asset}" == !*.tar.zst || "${asset}" == \$*.tzst || "${asset}" == \$*.tar.zst ]]; then
|
||||
echo -e "${ANSI_LIGHT_GREEN}[ZST]${ANSI_NOCOLOR}"
|
||||
|
||||
if [[ "${asset:0:1}" == "\$" ]]; then
|
||||
asset="${asset##*$}"
|
||||
dirName="${asset%.*}"
|
||||
echo " Compressing artifact '${artifact}' to '${asset}' ..."
|
||||
tar -c --zstd --file="${asset}" --directory="${artifact}" --transform "s|^\.|${dirName%.tar}|" .
|
||||
retCode=$?
|
||||
else
|
||||
asset="${asset##*!}"
|
||||
echo " Compressing artifact '${artifact}' to '${asset}' ..."
|
||||
(
|
||||
cd "${artifact}" && \
|
||||
tar -c --zstd --file="../${asset}" *
|
||||
)
|
||||
retCode=$?
|
||||
fi
|
||||
|
||||
if [[ $retCode -eq 0 ]]; then
|
||||
echo -e " Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
uploadFile="${asset}"
|
||||
else
|
||||
echo -e " Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
echo -e "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zst file '${asset}'.${ANSI_NOCOLOR}"
|
||||
echo "::error title=CompressionError::Couldn't compress '${artifact}' to zst file '${asset}'."
|
||||
ERRORS=1
|
||||
continue
|
||||
fi
|
||||
elif [[ -e "${artifact}/${asset}" ]]; then
|
||||
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
uploadFile="${artifact}/${asset}"
|
||||
else
|
||||
echo -e "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
echo -e "${ANSI_LIGHT_RED}Couldn't find asset '${asset}' in artifact '${artifact}'.${ANSI_NOCOLOR}"
|
||||
echo "::error title=FileNotFound::Couldn't find asset '${asset}' in artifact '${artifact}'."
|
||||
ERRORS=1
|
||||
continue
|
||||
fi
|
||||
|
||||
# Upload asset to existing release page
|
||||
echo -n " uploading asset '${asset}' from '${uploadFile}' with title '${title}' ... "
|
||||
gh release upload ${{ inputs.nightly_name }} "${uploadFile}#${title}" --clobber
|
||||
if [[ $? -eq 0 ]]; then
|
||||
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
else
|
||||
echo -e "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
echo -e "${ANSI_LIGHT_RED}Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
|
||||
echo "::error title=UploadError::Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'."
|
||||
ERRORS=1
|
||||
continue
|
||||
fi
|
||||
done <<<'${{ inputs.assets }}'
|
||||
|
||||
echo "Inspecting downloaded artifacts ..."
|
||||
tree -L 3 .
|
||||
|
||||
if [[ $ERROR -ne 0 ]]; then
|
||||
echo -e "${ANSI_LIGHT_RED}Errors detected in previous steps.${ANSI_NOCOLOR}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: 📑 Remove draft state from Release Page
|
||||
if: ${{ ! inputs.draft }}
|
||||
run: |
|
||||
set +e
|
||||
|
||||
ANSI_LIGHT_RED="\e[91m"
|
||||
ANSI_LIGHT_GREEN="\e[92m"
|
||||
ANSI_NOCOLOR="\e[0m"
|
||||
|
||||
export GH_TOKEN=${{ github.token }}
|
||||
|
||||
# Remove draft-state from release page
|
||||
echo -n "Remove draft-state from release '${title}' ... "
|
||||
gh release edit --draft=false "${{ inputs.nightly_name }}"
|
||||
if [[ $? -eq 0 ]]; then
|
||||
echo -e "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
else
|
||||
echo -e "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
echo -e "${ANSI_LIGHT_RED}Couldn't remove draft-state from release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
|
||||
echo "::error title=ReleasePage::Couldn't remove draft-state from release '${{ inputs.nightly_name }}'."
|
||||
fi
|
||||
33
.github/workflows/Package.yml
vendored
33
.github/workflows/Package.yml
vendored
@@ -4,7 +4,7 @@
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2022 The pyTooling Authors #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
@@ -25,10 +25,15 @@ name: Package
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
default: '3.10'
|
||||
default: '3.12'
|
||||
type: string
|
||||
requirements:
|
||||
description: 'Python dependencies to be installed through pip; if empty, use pyproject.toml through build.'
|
||||
@@ -44,25 +49,25 @@ jobs:
|
||||
|
||||
Package:
|
||||
name: 📦 Package in Source and Wheel Format
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
lfs: true
|
||||
submodules: true
|
||||
|
||||
- name: 🐍 Setup Python ${{ inputs.python_version }}
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ inputs.python_version }}
|
||||
|
||||
- name: 🐍 Update pip
|
||||
run: python -m pip install -U pip
|
||||
|
||||
# build
|
||||
|
||||
- name: 🔧 [build] Install dependencies for packaging and release
|
||||
if: inputs.requirements == ''
|
||||
run: python -m pip install build
|
||||
run: python -m pip install --disable-pip-version-check build
|
||||
|
||||
- name: 🔨 [build] Build Python package (source distribution)
|
||||
if: inputs.requirements == ''
|
||||
@@ -76,7 +81,7 @@ jobs:
|
||||
|
||||
- name: 🔧 [build] Install dependencies for packaging and release
|
||||
if: inputs.requirements == 'no-isolation'
|
||||
run: python -m pip install build
|
||||
run: python -m pip install --disable-pip-version-check build
|
||||
|
||||
- name: 🔨 [build] Build Python package (source distribution)
|
||||
if: inputs.requirements == 'no-isolation'
|
||||
@@ -90,7 +95,7 @@ jobs:
|
||||
|
||||
- name: 🔧 [setuptools] Install dependencies for packaging and release
|
||||
if: inputs.requirements != '' && inputs.requirements != 'no-isolation'
|
||||
run: python -m pip install ${{ inputs.requirements }}
|
||||
run: python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
||||
|
||||
- name: 🔨 [setuptools] Build Python package (source distribution)
|
||||
if: inputs.requirements != '' && inputs.requirements != 'no-isolation'
|
||||
@@ -100,11 +105,11 @@ jobs:
|
||||
if: inputs.requirements != '' && inputs.requirements != 'no-isolation'
|
||||
run: python setup.py bdist_wheel
|
||||
|
||||
|
||||
- name: 📤 Upload wheel artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.artifact }}
|
||||
path: dist/
|
||||
working-directory: dist
|
||||
path: '*'
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
351
.github/workflows/Parameters.yml
vendored
351
.github/workflows/Parameters.yml
vendored
@@ -4,7 +4,7 @@
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2022 The pyTooling Authors #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
@@ -25,100 +25,319 @@ name: Parameters
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
name:
|
||||
description: 'Name of the tool.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
package_namespace:
|
||||
description: 'Name of the tool''s namespace.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
package_name:
|
||||
description: 'Name of the tool''s package.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
default: '3.10'
|
||||
default: '3.13'
|
||||
type: string
|
||||
python_version_list:
|
||||
description: 'Space separated list of Python versions to run tests with.'
|
||||
required: false
|
||||
default: '3.7 3.8 3.9 3.10'
|
||||
default: '3.9 3.10 3.11 3.12 3.13'
|
||||
type: string
|
||||
system_list:
|
||||
description: 'Space separated list of systems to run tests on.'
|
||||
required: false
|
||||
default: 'ubuntu windows msys2 macos'
|
||||
default: 'ubuntu windows macos macos-arm mingw64 ucrt64'
|
||||
type: string
|
||||
name:
|
||||
description: 'Name of the tool.'
|
||||
required: true
|
||||
include_list:
|
||||
description: 'Space separated list of system:python items to be included into the list of test.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
exclude_list:
|
||||
description: 'Space separated list of system:python items to be excluded from the list of test.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
disable_list:
|
||||
description: 'Space separated list of system:python items to be disabled from the list of test.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
ubuntu_image:
|
||||
description: 'The used GitHub Action image for Ubuntu based jobs.'
|
||||
required: false
|
||||
default: 'ubuntu-24.04'
|
||||
type: string
|
||||
windows_image:
|
||||
description: 'The used GitHub Action image for Windows based jobs.'
|
||||
required: false
|
||||
default: 'windows-2022'
|
||||
type: string
|
||||
macos_intel_image:
|
||||
description: 'The used GitHub Action image for macOS (Intel x86-64) based jobs.'
|
||||
required: false
|
||||
default: 'macos-13'
|
||||
type: string
|
||||
macos_arm_image:
|
||||
description: 'The used GitHub Action image for macOS (ARM aarch64) based jobs.'
|
||||
required: false
|
||||
default: 'macos-14'
|
||||
type: string
|
||||
|
||||
outputs:
|
||||
python_version:
|
||||
description: "Default Python version for other jobs."
|
||||
value: ${{ jobs.Parameters.outputs.python_version }}
|
||||
python_jobs:
|
||||
description: "List of Python versions (and system combinations) to be used in the matrix of other jobs."
|
||||
value: ${{ jobs.Parameters.outputs.python_jobs }}
|
||||
artifact_names:
|
||||
description: "Pre-defined artifact names for other jobs."
|
||||
value: ${{ jobs.Parameters.outputs.artifact_names }}
|
||||
params:
|
||||
description: "Parameters to be used in other jobs."
|
||||
value: ${{ jobs.Parameters.outputs.params }}
|
||||
python_jobs:
|
||||
description: "List of Python versions to be used in the matrix of other jobs."
|
||||
value: ${{ jobs.Parameters.outputs.python_jobs }}
|
||||
|
||||
jobs:
|
||||
|
||||
Parameters:
|
||||
runs-on: ubuntu-latest
|
||||
name: ✎ Generate pipeline parameters
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
outputs:
|
||||
params: ${{ steps.params.outputs.params }}
|
||||
python_jobs: ${{ steps.params.outputs.python_jobs }}
|
||||
steps:
|
||||
python_version: ${{ steps.params.outputs.python_version }}
|
||||
python_jobs: ${{ steps.params.outputs.python_jobs }}
|
||||
artifact_names: ${{ steps.params.outputs.artifact_names }}
|
||||
params: ${{ steps.params.outputs.params }}
|
||||
|
||||
steps:
|
||||
- name: Generate 'params' and 'python_jobs'
|
||||
id: params
|
||||
shell: python
|
||||
run: |
|
||||
from os import environ
|
||||
from json import dumps as json_dumps
|
||||
from os import getenv
|
||||
from pathlib import Path
|
||||
from textwrap import dedent
|
||||
from typing import Iterable
|
||||
|
||||
name = '${{ inputs.name }}'
|
||||
params = {
|
||||
'python_version': '${{ inputs.python_version }}',
|
||||
'artifacts': {
|
||||
'unittesting': f'{name}-TestReport',
|
||||
'coverage': f'{name}-coverage',
|
||||
'typing': f'{name}-typing',
|
||||
'package': f'{name}-package',
|
||||
'doc': f'{name}-doc',
|
||||
package_namespace = "${{ inputs.package_namespace }}".strip()
|
||||
package_name = "${{ inputs.package_name }}".strip()
|
||||
name = "${{ inputs.name }}".strip()
|
||||
python_version = "${{ inputs.python_version }}".strip()
|
||||
systems = "${{ inputs.system_list }}".strip()
|
||||
versions = "${{ inputs.python_version_list }}".strip()
|
||||
include_list = "${{ inputs.include_list }}".strip()
|
||||
exclude_list = "${{ inputs.exclude_list }}".strip()
|
||||
disable_list = "${{ inputs.disable_list }}".strip()
|
||||
|
||||
if name == "":
|
||||
if package_namespace == "" or package_namespace == ".":
|
||||
name = f"{package_name}"
|
||||
else:
|
||||
name = f"{package_namespace}.{package_name}"
|
||||
|
||||
currentMSYS2Version = "3.11"
|
||||
currentAlphaVersion = "3.14"
|
||||
currentAlphaRelease = "3.14.0-alpha.1"
|
||||
|
||||
if systems == "":
|
||||
print("::error title=Parameter::system_list is empty.")
|
||||
else:
|
||||
systems = [sys.strip() for sys in systems.split(" ")]
|
||||
|
||||
if versions == "":
|
||||
versions = [ python_version ]
|
||||
else:
|
||||
versions = [ver.strip() for ver in versions.split(" ")]
|
||||
|
||||
if include_list == "":
|
||||
includes = []
|
||||
else:
|
||||
includes = [tuple(include.strip().split(":")) for include in include_list.split(" ")]
|
||||
|
||||
if exclude_list == "":
|
||||
excludes = []
|
||||
else:
|
||||
excludes = [exclude.strip() for exclude in exclude_list.split(" ")]
|
||||
|
||||
if disable_list == "":
|
||||
disabled = []
|
||||
else:
|
||||
disabled = [disable.strip() for disable in disable_list.split(" ")]
|
||||
|
||||
if "3.8" in versions:
|
||||
print("::warning title=Deprecated::Support for Python 3.8 ended in 2024.10.")
|
||||
if "msys2" in systems:
|
||||
print("::warning title=Deprecated::System 'msys2' will be replaced by 'mingw64'.")
|
||||
if currentAlphaVersion in versions:
|
||||
print(f"::notice title=Experimental::Python {currentAlphaVersion} ({currentAlphaRelease}) is a pre-release.")
|
||||
for disable in disabled:
|
||||
print(f"::warning title=Disabled Python Job::System '{disable}' temporarily disabled.")
|
||||
|
||||
# see https://raw.githubusercontent.com/actions/python-versions/main/versions-manifest.json
|
||||
data = {
|
||||
# Python and PyPy versions supported by "setup-python" action
|
||||
"python": {
|
||||
"3.8": { "icon": "⚫", "until": "2024.10" },
|
||||
"3.9": { "icon": "🔴", "until": "2025.10" },
|
||||
"3.10": { "icon": "🟠", "until": "2026.10" },
|
||||
"3.11": { "icon": "🟡", "until": "2027.10" },
|
||||
"3.12": { "icon": "🟢", "until": "2028.10" },
|
||||
"3.13": { "icon": "🟢", "until": "2029.10" },
|
||||
"3.14": { "icon": "🟣", "until": "2030.10" },
|
||||
"pypy-3.7": { "icon": "⟲⚫", "until": "????.??" },
|
||||
"pypy-3.8": { "icon": "⟲🔴", "until": "????.??" },
|
||||
"pypy-3.9": { "icon": "⟲🟠", "until": "????.??" },
|
||||
"pypy-3.10": { "icon": "⟲🟡", "until": "????.??" },
|
||||
},
|
||||
# Runner systems (runner images) supported by GitHub Actions
|
||||
"sys": {
|
||||
"ubuntu": { "icon": "🐧", "runs-on": "${{ inputs.ubuntu_image }}", "shell": "bash", "name": "Linux (x86-64)" },
|
||||
"windows": { "icon": "🪟", "runs-on": "${{ inputs.windows_image }}", "shell": "pwsh", "name": "Windows (x86-64)" },
|
||||
"macos": { "icon": "🍎", "runs-on": "${{ inputs.macos_intel_image }}", "shell": "bash", "name": "macOS (x86-64)" },
|
||||
"macos-arm": { "icon": "🍏", "runs-on": "${{ inputs.macos_arm_image }}", "shell": "bash", "name": "macOS (aarch64)" },
|
||||
},
|
||||
# Runtimes provided by MSYS2
|
||||
"runtime": {
|
||||
"msys": { "icon": "🪟🟪", "name": "Windows+MSYS2 (x86-64) - MSYS" },
|
||||
"mingw32": { "icon": "🪟⬛", "name": "Windows+MSYS2 (x86-64) - MinGW32" },
|
||||
"mingw64": { "icon": "🪟🟦", "name": "Windows+MSYS2 (x86-64) - MinGW64" },
|
||||
"clang32": { "icon": "🪟🟫", "name": "Windows+MSYS2 (x86-64) - Clang32" },
|
||||
"clang64": { "icon": "🪟🟧", "name": "Windows+MSYS2 (x86-64) - Clang64" },
|
||||
"ucrt64": { "icon": "🪟🟨", "name": "Windows+MSYS2 (x86-64) - UCRT64" },
|
||||
}
|
||||
}
|
||||
|
||||
with open(environ['GITHUB_OUTPUT'], 'a', encoding='utf-8') as gho:
|
||||
gho.write(f"params={params!s}\n")
|
||||
print("Parameters:")
|
||||
print(params)
|
||||
print(f"includes ({len(includes)}):")
|
||||
for system,version in includes:
|
||||
print(f"- {system}:{version}")
|
||||
print(f"excludes ({len(excludes)}):")
|
||||
for exclude in excludes:
|
||||
print(f"- {exclude}")
|
||||
print(f"disabled ({len(disabled)}):")
|
||||
for disable in disabled:
|
||||
print(f"- {disable}")
|
||||
|
||||
systems = '${{ inputs.system_list }}'.split(' ')
|
||||
versions = '${{ inputs.python_version_list }}'.split(' ')
|
||||
if '3.6' in versions:
|
||||
print("::warning title=Deprecated::Support for Python 3.6 ended in 2021.12.23.")
|
||||
if '3.11' in versions:
|
||||
print(f"::notice title=Experimental::Python 3.11 (3.11.0-alpha3) is a pre-release.")
|
||||
data = {
|
||||
'python': {
|
||||
'3.6': { 'icon': '⚫', 'until': '2021.12.23' },
|
||||
'3.7': { 'icon': '🔴', 'until': '2023.06.27' },
|
||||
'3.8': { 'icon': '🟠', 'until': '2024.10' },
|
||||
'3.9': { 'icon': '🟡', 'until': '2025.10' },
|
||||
'3.10': { 'icon': '🟢', 'until': '2026.10' },
|
||||
'3.11': { 'icon': '🟣', 'until': '2027.10' },
|
||||
},
|
||||
'sys': {
|
||||
'ubuntu': { 'icon': '🐧', 'runs-on': 'ubuntu-latest', 'shell': 'bash' },
|
||||
'windows': { 'icon': '🧊', 'runs-on': 'windows-latest', 'shell': 'pwsh' },
|
||||
'msys2': { 'icon': '🟦', 'runs-on': 'windows-latest', 'shell': 'msys2 {0}' },
|
||||
'macos': { 'icon': '🍎', 'runs-on': 'macos-latest', 'shell': 'bash' }
|
||||
}
|
||||
}
|
||||
jobs = [
|
||||
{
|
||||
'sysicon': data['sys'][system]['icon'],
|
||||
'system': system,
|
||||
'runs-on': data['sys'][system]['runs-on'],
|
||||
'shell': data['sys'][system]['shell'],
|
||||
'pyicon': data['python'][version]['icon'],
|
||||
'python': '3.11.0-alpha.3' if version == '3.11' else version
|
||||
}
|
||||
for system in systems
|
||||
for version in (versions if system != 'msys2' else ['3.10'])
|
||||
def match(combination: str, pattern: str) -> bool:
|
||||
system, version = combination.split(":")
|
||||
sys, ver = pattern.split(":")
|
||||
|
||||
if sys == "*":
|
||||
return (ver == "*") or (version == ver)
|
||||
elif system == sys:
|
||||
return (ver == "*") or (version == ver)
|
||||
else:
|
||||
return False
|
||||
|
||||
def notIn(combination: str, patterns: Iterable[str]) -> bool:
|
||||
for pattern in patterns:
|
||||
if match(combination, pattern):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
combinations = [
|
||||
(system, version)
|
||||
for system in systems
|
||||
if system in data["sys"]
|
||||
for version in versions
|
||||
if version in data["python"]
|
||||
and notIn(f"{system}:{version}", excludes)
|
||||
and notIn(f"{system}:{version}", disabled)
|
||||
] + [
|
||||
(system, currentMSYS2Version)
|
||||
for system in systems
|
||||
if system in data["runtime"]
|
||||
and notIn(f"{system}:{currentMSYS2Version}", excludes)
|
||||
and notIn(f"{system}:{currentMSYS2Version}", disabled)
|
||||
] + [
|
||||
(system, version)
|
||||
for system, version in includes
|
||||
if system in data["sys"]
|
||||
and version in data["python"]
|
||||
and notIn(f"{system}:{version}", disabled)
|
||||
]
|
||||
with open(environ['GITHUB_OUTPUT'], 'a', encoding='utf-8') as gho:
|
||||
gho.write(f"python_jobs={jobs!s}\n")
|
||||
print("Python jobs:")
|
||||
print(jobs)
|
||||
print(f"Combinations ({len(combinations)}):")
|
||||
for system, version in combinations:
|
||||
print(f"- {system}:{version}")
|
||||
|
||||
jobs = [
|
||||
{
|
||||
"sysicon": data["sys"][system]["icon"],
|
||||
"system": system,
|
||||
"runs-on": data["sys"][system]["runs-on"],
|
||||
"runtime": "native",
|
||||
"shell": data["sys"][system]["shell"],
|
||||
"pyicon": data["python"][version]["icon"],
|
||||
"python": currentAlphaRelease if version == currentAlphaVersion else version,
|
||||
"envname": data["sys"][system]["name"],
|
||||
}
|
||||
for system, version in combinations if system in data["sys"]
|
||||
] + [
|
||||
{
|
||||
"sysicon": data["runtime"][runtime]["icon"],
|
||||
"system": "msys2",
|
||||
"runs-on": "windows-latest",
|
||||
"runtime": runtime.upper(),
|
||||
"shell": "msys2 {0}",
|
||||
"pyicon": data["python"][currentMSYS2Version]["icon"],
|
||||
"python": version,
|
||||
"envname": data["runtime"][runtime]["name"],
|
||||
}
|
||||
for runtime, version in combinations if runtime not in data["sys"]
|
||||
]
|
||||
|
||||
artifact_names = {
|
||||
"unittesting_xml": f"{name}-UnitTestReportSummary-XML",
|
||||
"unittesting_html": f"{name}-UnitTestReportSummary-HTML",
|
||||
"perftesting_xml": f"{name}-PerformanceTestReportSummary-XML",
|
||||
"benchtesting_xml": f"{name}-BenchmarkTestReportSummary-XML",
|
||||
"apptesting_xml": f"{name}-ApplicationTestReportSummary-XML",
|
||||
"codecoverage_sqlite": f"{name}-CodeCoverage-SQLite",
|
||||
"codecoverage_xml": f"{name}-CodeCoverage-XML",
|
||||
"codecoverage_json": f"{name}-CodeCoverage-JSON",
|
||||
"codecoverage_html": f"{name}-CodeCoverage-HTML",
|
||||
"statictyping_html": f"{name}-StaticTyping-HTML",
|
||||
"package_all": f"{name}-Packages",
|
||||
"documentation_html": f"{name}-Documentation-HTML",
|
||||
"documentation_latex": f"{name}-Documentation-LaTeX",
|
||||
"documentation_pdf": f"{name}-Documentation-PDF",
|
||||
}
|
||||
|
||||
print("Parameters:")
|
||||
print(f" python_version: {python_version}")
|
||||
print(f" python_jobs ({len(jobs)}):\n" +
|
||||
"".join([f" {{ " + ", ".join([f"\"{key}\": \"{value}\"" for key, value in job.items()]) + f" }},\n" for job in jobs])
|
||||
)
|
||||
print(f" artifact_names ({len(artifact_names)}):")
|
||||
for id, name in artifact_names.items():
|
||||
print(f" {id:>20}: {name}")
|
||||
|
||||
# Write jobs to special file
|
||||
github_output = Path(getenv("GITHUB_OUTPUT"))
|
||||
print(f"GITHUB_OUTPUT: {github_output}")
|
||||
with github_output.open("a+", encoding="utf-8") as f:
|
||||
f.write(dedent(f"""\
|
||||
python_version={python_version}
|
||||
python_jobs={json_dumps(jobs)}
|
||||
artifact_names={json_dumps(artifact_names)}
|
||||
"""))
|
||||
|
||||
- name: Verify out parameters
|
||||
id: verify
|
||||
run: |
|
||||
echo 'python_version: ${{ steps.params.outputs.python_version }}'
|
||||
echo 'python_jobs: ${{ steps.params.outputs.python_jobs }}'
|
||||
echo 'artifact_names: ${{ steps.params.outputs.artifact_names }}'
|
||||
echo 'params: ${{ steps.params.outputs.params }}'
|
||||
|
||||
238
.github/workflows/PublishCoverageResults.yml
vendored
Normal file
238
.github/workflows/PublishCoverageResults.yml
vendored
Normal file
@@ -0,0 +1,238 @@
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: Publish Code Coverage Results
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
coverage_artifacts_pattern:
|
||||
required: false
|
||||
default: '*-CodeCoverage-*'
|
||||
type: string
|
||||
coverage_config:
|
||||
description: 'Path to the .coveragerc file. Use pyproject.toml by default.'
|
||||
required: false
|
||||
default: 'pyproject.toml'
|
||||
type: string
|
||||
coverage_sqlite_artifact:
|
||||
description: 'Name of the SQLite coverage artifact.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
coverage_xml_artifact:
|
||||
description: 'Name of the XML coverage artifact.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
coverage_json_artifact:
|
||||
description: 'Name of the JSON coverage artifact.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
coverage_html_artifact:
|
||||
description: 'Name of the HTML coverage artifact.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
secrets:
|
||||
codacy_token:
|
||||
description: 'Token to push result to codacy.'
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
PublishCoverageResults:
|
||||
name: 📊 Publish Code Coverage Results
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
if: always()
|
||||
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
lfs: true
|
||||
submodules: true
|
||||
|
||||
- name: Download Artifacts
|
||||
uses: pyTooling/download-artifact@v4
|
||||
with:
|
||||
pattern: ${{ inputs.coverage_artifacts_pattern }}
|
||||
path: artifacts
|
||||
|
||||
- name: 🔎 Inspect extracted artifact (tarball)
|
||||
run: |
|
||||
tree -psh artifacts
|
||||
|
||||
- name: 🔧 Install coverage and tomli
|
||||
run: |
|
||||
python -m pip install -U --disable-pip-version-check --break-system-packages coverage[toml] tomli
|
||||
|
||||
- name: 🔁 Extract configurations from pyproject.toml
|
||||
id: getVariables
|
||||
shell: python
|
||||
run: |
|
||||
from os import getenv
|
||||
from pathlib import Path
|
||||
from sys import version
|
||||
from textwrap import dedent
|
||||
|
||||
print(f"Python: {version}")
|
||||
|
||||
from tomli import load as tomli_load
|
||||
|
||||
htmlDirectory = Path("htmlcov")
|
||||
xmlFile = Path("./coverage.xml")
|
||||
jsonFile = Path("./coverage.json")
|
||||
coverageRC = "${{ inputs.coverage_config }}".strip()
|
||||
|
||||
# Read output paths from 'pyproject.toml' file
|
||||
if coverageRC == "pyproject.toml":
|
||||
pyProjectFile = Path("pyproject.toml")
|
||||
if pyProjectFile.exists():
|
||||
with pyProjectFile.open("rb") as file:
|
||||
pyProjectSettings = tomli_load(file)
|
||||
|
||||
htmlDirectory = Path(pyProjectSettings["tool"]["coverage"]["html"]["directory"])
|
||||
xmlFile = Path(pyProjectSettings["tool"]["coverage"]["xml"]["output"])
|
||||
jsonFile = Path(pyProjectSettings["tool"]["coverage"]["json"]["output"])
|
||||
else:
|
||||
print(f"File '{pyProjectFile}' not found.")
|
||||
print(f"::error title=FileNotFoundError::File '{pyProjectFile}' not found.")
|
||||
exit(1)
|
||||
|
||||
# Read output paths from '.coveragerc' file
|
||||
elif len(coverageRC) > 0:
|
||||
coverageRCFile = Path(coverageRC)
|
||||
if coverageRCFile.exists():
|
||||
with coverageRCFile.open("rb") as file:
|
||||
coverageRCSettings = tomli_load(file)
|
||||
|
||||
htmlDirectory = Path(coverageRCSettings["html"]["directory"])
|
||||
xmlFile = Path(coverageRCSettings["xml"]["output"])
|
||||
jsonFile = Path(coverageRCSettings["json"]["output"])
|
||||
else:
|
||||
print(f"File '{coverageRCFile}' not found.")
|
||||
print(f"::error title=FileNotFoundError::File '{coverageRCFile}' not found.")
|
||||
exit(1)
|
||||
|
||||
# Write jobs to special file
|
||||
github_output = Path(getenv("GITHUB_OUTPUT"))
|
||||
print(f"GITHUB_OUTPUT: {github_output}")
|
||||
with github_output.open("a+", encoding="utf-8") as f:
|
||||
f.write(dedent(f"""\
|
||||
coverage_report_html_directory={htmlDirectory.as_posix()}
|
||||
coverage_report_xml={xmlFile}
|
||||
coverage_report_json={jsonFile}
|
||||
"""))
|
||||
|
||||
print(f"DEBUG:\n html={htmlDirectory}\n xml={xmlFile}\n json={jsonFile}")
|
||||
|
||||
- name: Rename .coverage files and collect them all to coverage/
|
||||
run: |
|
||||
ls -lAh artifacts/
|
||||
ls -lAh artifacts/*/.coverage
|
||||
mkdir -p coverage
|
||||
find artifacts/ -type f -path "*SQLite*.coverage" -exec sh -c 'cp -v $0 "coverage/$(basename $0).$(basename $(dirname $0))"' {} ';'
|
||||
tree -a coverage
|
||||
|
||||
- name: Combine SQLite files (using Coverage.py)
|
||||
run: coverage combine --data-file=.coverage coverage/
|
||||
|
||||
- name: Report code coverage
|
||||
run: coverage report --rcfile=pyproject.toml --data-file=.coverage
|
||||
|
||||
- name: Convert to XML format (Cobertura)
|
||||
if: inputs.coverage_xml_artifact != ''
|
||||
run: coverage xml --data-file=.coverage
|
||||
|
||||
- name: Convert to JSON format
|
||||
if: inputs.coverage_json_artifact != ''
|
||||
run: coverage json --data-file=.coverage
|
||||
|
||||
- name: Convert to HTML format
|
||||
if: inputs.coverage_html_artifact != ''
|
||||
run: |
|
||||
coverage html --data-file=.coverage -d report/coverage/html
|
||||
rm report/coverage/html/.gitignore
|
||||
tree -a report/coverage/html
|
||||
|
||||
- name: 📤 Upload 'Coverage SQLite Database' artifact
|
||||
if: inputs.coverage_sqlite_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.coverage_sqlite_artifact }}
|
||||
path: .coverage
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- name: 📤 Upload 'Coverage XML Report' artifact
|
||||
if: inputs.coverage_xml_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.coverage_xml_artifact }}
|
||||
path: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- name: 📤 Upload 'Coverage JSON Report' artifact
|
||||
if: inputs.coverage_json_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.coverage_json_artifact }}
|
||||
path: ${{ steps.getVariables.outputs.coverage_report_json }}
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- name: 📤 Upload 'Coverage HTML Report' artifact
|
||||
if: inputs.coverage_html_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.coverage_html_artifact }}
|
||||
working-directory: ${{ steps.getVariables.outputs.coverage_report_html_directory }}
|
||||
path: '*'
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- name: 📊 Publish code coverage at CodeCov
|
||||
if: inputs.CodeCov == true
|
||||
continue-on-error: true
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
files: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
||||
flags: unittests
|
||||
env_vars: PYTHON
|
||||
|
||||
- name: 📉 Publish code coverage at Codacy
|
||||
if: inputs.Codacy == true
|
||||
continue-on-error: true
|
||||
uses: codacy/codacy-coverage-reporter-action@v1
|
||||
with:
|
||||
project-token: ${{ secrets.codacy_token }}
|
||||
coverage-reports: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
||||
23
.github/workflows/PublishOnPyPI.yml
vendored
23
.github/workflows/PublishOnPyPI.yml
vendored
@@ -4,7 +4,7 @@
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2022 The pyTooling Authors #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
@@ -25,10 +25,15 @@ name: Publish on PyPI
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
default: '3.10'
|
||||
default: '3.12'
|
||||
type: string
|
||||
requirements:
|
||||
description: 'Python dependencies to be installed through pip.'
|
||||
@@ -48,24 +53,22 @@ jobs:
|
||||
|
||||
PublishOnPyPI:
|
||||
name: 🚀 Publish to PyPI
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
|
||||
steps:
|
||||
- name: 📥 Download artifacts '${{ inputs.artifact }}' from 'Package' job
|
||||
uses: actions/download-artifact@v3
|
||||
uses: pyTooling/download-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.artifact }}
|
||||
path: dist/
|
||||
path: dist
|
||||
|
||||
- name: 🐍 Setup Python ${{ inputs.python_version }}
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ inputs.python_version }}
|
||||
|
||||
- name: ⚙ Install dependencies for packaging and release
|
||||
run: |
|
||||
python -m pip install -U pip
|
||||
python -m pip install ${{ inputs.requirements }}
|
||||
run: python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
||||
|
||||
- name: ⤴ Release Python source package to PyPI
|
||||
env:
|
||||
@@ -80,6 +83,6 @@ jobs:
|
||||
run: twine upload dist/*.whl
|
||||
|
||||
- name: 🗑️ Delete packaging Artifacts
|
||||
uses: geekyeggo/delete-artifact@v1
|
||||
uses: geekyeggo/delete-artifact@v5
|
||||
with:
|
||||
name: ${{ inputs.artifact }}
|
||||
|
||||
74
.github/workflows/PublishTestResults.yml
vendored
74
.github/workflows/PublishTestResults.yml
vendored
@@ -4,7 +4,7 @@
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2022 The pyTooling Authors #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
@@ -25,30 +25,86 @@ name: Publish Unit Test Results
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
report_files:
|
||||
description: 'Pattern of report files to upload. Can be a comma separated list.'
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: 'artifacts/**/*.xml'
|
||||
default: '24.04'
|
||||
type: string
|
||||
unittest_artifacts_pattern:
|
||||
required: false
|
||||
default: '*-UnitTestReportSummary-*'
|
||||
type: string
|
||||
merged_junit_artifact:
|
||||
description: 'Name of the merged JUnit Test Summary artifact.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
additional_merge_args:
|
||||
description: 'Additional merging arguments.'
|
||||
required: false
|
||||
default: '"--pytest=rewrite-dunder-init;reduce-depth:pytest.tests.unit"'
|
||||
type: string
|
||||
publish:
|
||||
description: 'Publish test report summary via Dorny Test-Reporter'
|
||||
required: false
|
||||
default: true
|
||||
type: boolean
|
||||
report_title:
|
||||
description: 'Title of the summary report in the pipeline''s sidebar'
|
||||
required: false
|
||||
default: 'Unit Test Results'
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
PublishTestResults:
|
||||
name: 📊 Publish Test Results
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
if: always()
|
||||
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download Artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
uses: pyTooling/download-artifact@v4
|
||||
with:
|
||||
pattern: ${{ inputs.unittest_artifacts_pattern }}
|
||||
path: artifacts
|
||||
|
||||
- name: 🔎 Inspect extracted artifact (tarball)
|
||||
run: |
|
||||
tree -psh artifacts
|
||||
|
||||
- name: 🔧 Install pyEDAA.Reports (JUunit Parser and Merger)
|
||||
run: |
|
||||
python -m pip install --disable-pip-version-check --break-system-packages -U pyEDAA.Reports
|
||||
|
||||
- name: Move JUnit files and collect them all to junit/
|
||||
run: |
|
||||
mkdir -p junit
|
||||
ls -lAh artifacts/*/*.xml
|
||||
find artifacts/ -type f -path "*TestReportSummary*.xml" -exec sh -c 'cp -v $0 "junit/$(basename $(dirname $0)).$(basename $0)"' {} ';'
|
||||
tree -a junit
|
||||
|
||||
- name: 🔁 Merge JUnit Unit Test Summaries
|
||||
run: |
|
||||
pyedaa-reports -v unittest "--merge=pyTest-JUnit:junit/*.xml" ${{ inputs.additional_merge_args }} "--output=pyTest-JUnit:Unittesting.xml"
|
||||
echo "cat Unittesting.xml"
|
||||
cat Unittesting.xml
|
||||
|
||||
- name: 📊 Publish Unit Test Results
|
||||
uses: dorny/test-reporter@v1
|
||||
if: inputs.publish && inputs.report_title != ''
|
||||
with:
|
||||
name: Unit Test Results
|
||||
path: ${{ inputs.report_files }}
|
||||
name: ${{ inputs.report_title }}
|
||||
path: Unittesting.xml
|
||||
reporter: java-junit
|
||||
|
||||
- name: 📤 Upload merged 'JUnit Test Summary' artifact
|
||||
if: inputs.merged_junit_artifact != ''
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.merged_junit_artifact }}
|
||||
path: Unittesting.xml
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
17
.github/workflows/PublishToGitHubPages.yml
vendored
17
.github/workflows/PublishToGitHubPages.yml
vendored
@@ -4,7 +4,7 @@
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2022 The pyTooling Authors #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
@@ -25,6 +25,11 @@ name: Publish to GitHub Pages
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
doc:
|
||||
description: 'Name of the documentation artifact.'
|
||||
required: true
|
||||
@@ -44,28 +49,28 @@ jobs:
|
||||
|
||||
PublishToGitHubPages:
|
||||
name: 📚 Publish to GH-Pages
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.doc }}' from 'BuildTheDocs' job
|
||||
uses: actions/download-artifact@v3
|
||||
uses: pyTooling/download-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.doc }}
|
||||
path: public
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.coverage }}' from 'Coverage' job
|
||||
if: ${{ inputs.coverage != '' }}
|
||||
uses: actions/download-artifact@v3
|
||||
uses: pyTooling/download-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.coverage }}
|
||||
path: public/coverage
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.typing }}' from 'StaticTypeCheck' job
|
||||
if: ${{ inputs.typing != '' }}
|
||||
uses: actions/download-artifact@v3
|
||||
uses: pyTooling/download-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.typing }}
|
||||
path: public/typing
|
||||
|
||||
43
.github/workflows/Release.yml
vendored
43
.github/workflows/Release.yml
vendored
@@ -4,7 +4,7 @@
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2022 The pyTooling Authors #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
@@ -24,12 +24,17 @@ name: Release
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
|
||||
Release:
|
||||
name: 📝 Create 'Release Page' on GitHub
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
|
||||
steps:
|
||||
- name: 🔁 Extract Git tag from GITHUB_REF
|
||||
@@ -39,11 +44,9 @@ jobs:
|
||||
RELEASE_VERSION=${GIT_TAG#v}
|
||||
RELEASE_DATETIME="$(date --utc '+%d.%m.%Y - %H:%M:%S')"
|
||||
# write to step outputs
|
||||
cat >> "$GITHUB_OUTPUT" << EOF
|
||||
gitTag=${GIT_TAG}
|
||||
version=${RELEASE_VERSION}
|
||||
datetime=${RELEASE_DATETIME}
|
||||
EOF
|
||||
echo "gitTag=${GIT_TAG}" >> $GITHUB_OUTPUT
|
||||
echo "version=${RELEASE_VERSION}" >> $GITHUB_OUTPUT
|
||||
echo "datetime=${RELEASE_DATETIME}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: 📑 Create Release Page
|
||||
id: createReleasePage
|
||||
@@ -57,12 +60,34 @@ jobs:
|
||||
**Automated Release created on: ${{ steps.getVariables.outputs.datetime }}**
|
||||
|
||||
# New Features
|
||||
|
||||
* tbd
|
||||
* tbd
|
||||
|
||||
# Changes
|
||||
|
||||
* tbd
|
||||
* tbd
|
||||
|
||||
# Bug Fixes
|
||||
|
||||
* tbd
|
||||
draft: false
|
||||
* tbd
|
||||
|
||||
# Documentation
|
||||
|
||||
* tbd
|
||||
* tbd
|
||||
|
||||
# Unit Tests
|
||||
|
||||
* tbd
|
||||
* tbd
|
||||
|
||||
----------
|
||||
# Related Issues and Pull-Requests
|
||||
|
||||
* tbd
|
||||
* tbd
|
||||
draft: true
|
||||
prerelease: false
|
||||
|
||||
192
.github/workflows/SphinxDocumentation.yml
vendored
Normal file
192
.github/workflows/SphinxDocumentation.yml
vendored
Normal file
@@ -0,0 +1,192 @@
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: Documentation
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
default: '3.12'
|
||||
type: string
|
||||
requirements:
|
||||
description: 'Python dependencies to be installed through pip.'
|
||||
required: false
|
||||
default: '-r doc/requirements.txt'
|
||||
type: string
|
||||
doc_directory:
|
||||
description: 'Path to the directory containing documentation (Sphinx working directory).'
|
||||
required: false
|
||||
default: 'doc'
|
||||
type: string
|
||||
coverage_report_json_directory:
|
||||
description: ''
|
||||
required: true
|
||||
type: string
|
||||
coverage_json_artifact:
|
||||
description: 'Name of the coverage JSON artifact.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
unittest_xml_artifact:
|
||||
description: 'Name of the unittest XML artifact.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
unittest_xml_directory:
|
||||
description: 'Directory where unittest XML artifact is extracted.'
|
||||
required: false
|
||||
default: 'report/unit'
|
||||
type: string
|
||||
html_artifact:
|
||||
description: 'Name of the HTML documentation artifact.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
latex_artifact:
|
||||
description: 'Name of the LaTeX documentation artifact.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
Sphinx-HTML:
|
||||
name: 📓 HTML Documentation using Sphinx and Python ${{ inputs.python_version }}
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
lfs: true
|
||||
submodules: true
|
||||
|
||||
- name: 🔧 Install graphviz
|
||||
run: sudo apt-get install -y --no-install-recommends graphviz
|
||||
|
||||
- name: 🐍 Setup Python ${{ inputs.python_version }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ inputs.python_version }}
|
||||
|
||||
- name: 🔧 Install wheel,tomli and pip dependencies (native)
|
||||
run: |
|
||||
python -m pip install --disable-pip-version-check -U wheel
|
||||
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.unittest_xml_artifact }}' from 'Unittesting' job
|
||||
if: inputs.unittest_xml_artifact != ''
|
||||
uses: pyTooling/download-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.unittest_xml_artifact }}
|
||||
path: ${{ inputs.unittest_xml_directory }}
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.coverage_json_artifact }}' from 'PublishCoverageResults' job
|
||||
if: inputs.coverage_json_artifact != ''
|
||||
uses: pyTooling/download-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.coverage_json_artifact }}
|
||||
path: ${{ inputs.coverage_report_json_directory }}
|
||||
|
||||
- name: ☑ Generate HTML documentation
|
||||
if: inputs.html_artifact != ''
|
||||
run: |
|
||||
export PYTHONPATH=$(pwd)
|
||||
|
||||
cd "${{ inputs.doc_directory || '.' }}"
|
||||
sphinx-build -v -n -b html -d _build/doctrees -j $(nproc) -w _build/html.log . _build/html
|
||||
|
||||
- name: 📤 Upload 'HTML Documentation' artifact
|
||||
if: inputs.html_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.html_artifact }}
|
||||
working-directory: ${{ inputs.doc_directory }}/_build/html
|
||||
path: '*'
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
Sphinx-LaTeX:
|
||||
name: 📓 LaTeX Documentation using Sphinx and Python ${{ inputs.python_version }}
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
lfs: true
|
||||
submodules: true
|
||||
|
||||
- name: 🔧 Install graphviz
|
||||
run: sudo apt-get install -y --no-install-recommends graphviz
|
||||
|
||||
- name: 🐍 Setup Python ${{ inputs.python_version }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ inputs.python_version }}
|
||||
|
||||
- name: 🔧 Install wheel,tomli and pip dependencies (native)
|
||||
run: |
|
||||
python -m pip install --disable-pip-version-check -U wheel
|
||||
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.unittest_xml_artifact }}' from 'Unittesting' job
|
||||
if: inputs.unittest_xml_artifact != ''
|
||||
uses: pyTooling/download-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.unittest_xml_artifact }}
|
||||
path: ${{ inputs.unittest_xml_directory }}
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.coverage_json_artifact }}' from 'PublishCoverageResults' job
|
||||
if: inputs.coverage_json_artifact != ''
|
||||
uses: pyTooling/download-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.coverage_json_artifact }}
|
||||
path: ${{ inputs.coverage_report_json_directory }}
|
||||
|
||||
- name: ☑ Generate LaTeX documentation
|
||||
if: inputs.latex_artifact != ''
|
||||
# continue-on-error: true
|
||||
run: |
|
||||
export PYTHONPATH=$(pwd)
|
||||
|
||||
cd "${{ inputs.doc_directory || '.' }}"
|
||||
sphinx-build -v -n -b latex -d _build/doctrees -j $(nproc) -w _build/latex.log . _build/latex
|
||||
# --builder html --doctree-dir _build/doctrees --verbose --fresh-env --write-all --nitpicky --warning-file _build/html.log . _build/html
|
||||
|
||||
- name: 📤 Upload 'LaTeX Documentation' artifact
|
||||
if: inputs.latex_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.latex_artifact }}
|
||||
working-directory: ${{ inputs.doc_directory }}/_build/latex
|
||||
path: '*'
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
64
.github/workflows/StaticTypeCheck.yml
vendored
64
.github/workflows/StaticTypeCheck.yml
vendored
@@ -4,7 +4,7 @@
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2022 The pyTooling Authors #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
@@ -25,60 +25,84 @@ name: Static Type Check
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
default: '3.10'
|
||||
default: '3.12'
|
||||
type: string
|
||||
requirements:
|
||||
description: 'Python dependencies to be installed through pip.'
|
||||
required: false
|
||||
default: '-r tests/requirements.txt'
|
||||
type: string
|
||||
report:
|
||||
description: 'Directory to upload as an artifact.'
|
||||
required: false
|
||||
default: 'htmlmypy'
|
||||
type: string
|
||||
commands:
|
||||
description: 'Commands to run the static type checks.'
|
||||
required: true
|
||||
type: string
|
||||
artifact:
|
||||
description: 'Name of the typing artifact.'
|
||||
html_report:
|
||||
description: 'Directory to upload as an artifact.'
|
||||
required: false
|
||||
default: 'htmlmypy'
|
||||
type: string
|
||||
junit_report:
|
||||
description: 'junit file to upload as an artifact.'
|
||||
required: false
|
||||
default: 'StaticTypingSummary.xml'
|
||||
type: string
|
||||
html_artifact:
|
||||
description: 'Name of the typing artifact (HTML report).'
|
||||
required: true
|
||||
type: string
|
||||
junit_artifact:
|
||||
description: 'Name of the typing junit artifact (junit XML).'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
|
||||
StaticTypeCheck:
|
||||
name: 👀 Check Static Typing using Python ${{ inputs.python_version }}
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: 🐍 Setup Python ${{ inputs.python_version }}
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ inputs.python_version }}
|
||||
|
||||
- name: 🗂 Install dependencies
|
||||
run: |
|
||||
python -m pip install -U pip
|
||||
python -m pip install ${{ inputs.requirements }}
|
||||
run: python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
||||
|
||||
- name: Check Static Typing
|
||||
continue-on-error: true
|
||||
run: ${{ inputs.commands }}
|
||||
|
||||
- name: 📤 Upload 'Static Typing Report' artifact
|
||||
if: ${{ inputs.artifact != '' }}
|
||||
- name: 📤 Upload 'Static Typing Report' HTML artifact
|
||||
if: ${{ inputs.html_artifact != '' }}
|
||||
continue-on-error: true
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.artifact }}
|
||||
path: ${{ inputs.report }}
|
||||
name: ${{ inputs.html_artifact }}
|
||||
working-directory: ${{ inputs.html_report }}
|
||||
path: '*'
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- name: 📤 Upload 'Static Typing Report' JUnit artifact
|
||||
if: ${{ inputs.junit_artifact != '' }}
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.junit_artifact }}
|
||||
path: ${{ inputs.junit_report }}
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
14
.github/workflows/TestReleaser.yml
vendored
14
.github/workflows/TestReleaser.yml
vendored
@@ -3,7 +3,7 @@
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2022 The pyTooling Authors #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
@@ -41,11 +41,11 @@ jobs:
|
||||
|
||||
|
||||
Image:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Build container image
|
||||
run: docker build -t ghcr.io/pytooling/releaser -f releaser/Dockerfile releaser
|
||||
@@ -60,9 +60,9 @@ jobs:
|
||||
|
||||
|
||||
Composite:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- run: echo "Build some tool and generate some (versioned) artifacts" > artifact-$(date -u +"%Y-%m-%dT%H-%M-%SZ").txt
|
||||
|
||||
@@ -120,9 +120,9 @@ jobs:
|
||||
needs:
|
||||
- Image
|
||||
- Composite
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- run: echo "Build some tool and generate some (versioned) artifacts" > artifact-$(date -u +"%Y-%m-%dT%H-%M-%SZ").txt
|
||||
|
||||
|
||||
457
.github/workflows/UnitTesting.yml
vendored
457
.github/workflows/UnitTesting.yml
vendored
@@ -4,7 +4,7 @@
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2022 The pyTooling Authors #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
@@ -20,7 +20,7 @@
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: Unit Testing
|
||||
name: Unit Testing (Matrix)
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
@@ -29,40 +29,110 @@ on:
|
||||
description: 'JSON list with environment fields, telling the system and Python versions to run tests with.'
|
||||
required: true
|
||||
type: string
|
||||
requirements:
|
||||
description: 'Python dependencies to be installed through pip.'
|
||||
apt:
|
||||
description: 'Ubuntu dependencies to be installed through apt.'
|
||||
required: false
|
||||
default: '-r tests/requirements.txt'
|
||||
default: ''
|
||||
type: string
|
||||
brew:
|
||||
description: 'macOS dependencies to be installed through brew.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
pacboy:
|
||||
description: 'MSYS2 dependencies to be installed through pacboy (pacman).'
|
||||
required: false
|
||||
default: >-
|
||||
python-pip:p
|
||||
python-wheel:p
|
||||
python-coverage:p
|
||||
python-lxml:p
|
||||
default: ''
|
||||
type: string
|
||||
requirements:
|
||||
description: 'Python dependencies to be installed through pip.'
|
||||
required: false
|
||||
default: '-r tests/requirements.txt'
|
||||
type: string
|
||||
mingw_requirements:
|
||||
description: 'Override Python dependencies to be installed through pip on MSYS2 (MINGW64) only.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
unittest_directory:
|
||||
description: 'Path to the directory containing unit tests.'
|
||||
macos_before_script:
|
||||
description: 'Scripts to execute before pytest on macOS (Intel).'
|
||||
required: false
|
||||
default: 'tests/unit'
|
||||
default: ''
|
||||
type: string
|
||||
artifact:
|
||||
macos_arm_before_script:
|
||||
description: 'Scripts to execute before pytest on macOS (ARM).'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
ubuntu_before_script:
|
||||
description: 'Scripts to execute before pytest on Ubuntu.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
mingw64_before_script:
|
||||
description: 'Scripts to execute before pytest on Windows within MSYS2 MinGW64.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
ucrt64_before_script:
|
||||
description: 'Scripts to execute before pytest on Windows within MSYS2 UCRT64.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
root_directory:
|
||||
description: 'Working directory for running tests.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
tests_directory:
|
||||
description: 'Path to the directory containing tests (relative to root_directory).'
|
||||
required: false
|
||||
default: 'tests'
|
||||
type: string
|
||||
unittest_directory:
|
||||
description: 'Path to the directory containing unit tests (relative to tests_directory).'
|
||||
required: false
|
||||
default: 'unit'
|
||||
type: string
|
||||
coverage_config:
|
||||
description: 'Path to the .coveragerc file. Use pyproject.toml by default.'
|
||||
required: false
|
||||
default: 'pyproject.toml'
|
||||
type: string
|
||||
unittest_xml_artifact:
|
||||
description: "Generate unit test report with junitxml and upload results as an artifact."
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
unittest_html_artifact:
|
||||
description: "Generate unit test report with junitxml and upload results as an artifact."
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
coverage_sqlite_artifact:
|
||||
description: 'Name of the SQLite coverage artifact.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
coverage_xml_artifact:
|
||||
description: 'Name of the XML coverage artifact.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
coverage_json_artifact:
|
||||
description: 'Name of the JSON coverage artifact.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
coverage_html_artifact:
|
||||
description: 'Name of the HTML coverage artifact.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
|
||||
UnitTesting:
|
||||
name: ${{ matrix.sysicon }} ${{ matrix.pyicon }} Unit Tests using Python ${{ matrix.python }}
|
||||
name: ${{ matrix.sysicon }} ${{ matrix.pyicon }} Unit Tests - Python ${{ matrix.python }}
|
||||
runs-on: ${{ matrix.runs-on }}
|
||||
|
||||
strategy:
|
||||
@@ -76,57 +146,358 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
lfs: true
|
||||
submodules: true
|
||||
|
||||
- name: '🟦 Setup MSYS2'
|
||||
# Package Manager steps
|
||||
- name: 🔧 Install homebrew dependencies on macOS
|
||||
if: ( matrix.system == 'macos' || matrix.system == 'macos-arm' ) && inputs.brew != ''
|
||||
run: brew install ${{ inputs.brew }}
|
||||
|
||||
- name: 🔧 Install apt dependencies on Ubuntu
|
||||
if: matrix.system == 'ubuntu' && inputs.apt != ''
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends ${{ inputs.apt }}
|
||||
|
||||
# Compute Dependencies for MSYS2 steps
|
||||
|
||||
- name: 🔧 Install dependencies (system Python for Python shell)
|
||||
if: matrix.system == 'msys2'
|
||||
shell: pwsh
|
||||
run: |
|
||||
py -3.9 -m pip install --disable-pip-version-check -U tomli
|
||||
|
||||
- name: Compute pacman/pacboy packages
|
||||
id: pacboy
|
||||
if: matrix.system == 'msys2'
|
||||
shell: python
|
||||
run: |
|
||||
from os import getenv
|
||||
from pathlib import Path
|
||||
from re import compile
|
||||
from sys import version
|
||||
|
||||
print(f"Python: {version}")
|
||||
|
||||
def loadRequirementsFile(requirementsFile: Path):
|
||||
requirements = []
|
||||
with requirementsFile.open("r") as file:
|
||||
for line in file.readlines():
|
||||
line = line.strip()
|
||||
if line.startswith("#") or line.startswith("https") or line == "":
|
||||
continue
|
||||
elif line.startswith("-r"):
|
||||
# Remove the first word/argument (-r)
|
||||
requirements += loadRequirementsFile(requirementsFile.parent / line[2:].lstrip())
|
||||
else:
|
||||
requirements.append(line)
|
||||
|
||||
return requirements
|
||||
|
||||
requirements = "${{ inputs.requirements }}"
|
||||
if requirements.startswith("-r"):
|
||||
requirementsFile = Path(requirements[2:].lstrip())
|
||||
try:
|
||||
dependencies = loadRequirementsFile(requirementsFile)
|
||||
except FileNotFoundError as ex:
|
||||
print(f"::error title=FileNotFoundError::{ex}")
|
||||
exit(1)
|
||||
else:
|
||||
dependencies = [req.strip() for req in requirements.split(" ")]
|
||||
|
||||
packages = {
|
||||
"coverage": "python-coverage:p",
|
||||
"docstr_coverage": "python-pyaml:p",
|
||||
"igraph": "igraph:p",
|
||||
"jinja2": "python-markupsafe:p",
|
||||
"lxml": "python-lxml:p",
|
||||
"numpy": "python-numpy:p",
|
||||
"markupsafe": "python-markupsafe:p",
|
||||
"pip": "python-pip:p",
|
||||
"pyyaml": "python-pyyaml:p",
|
||||
"ruamel.yaml": "python-ruamel-yaml:p",
|
||||
# "ruamel.yaml": "python-ruamel-yaml:p python-ruamel.yaml.clib:p",
|
||||
"sphinx": "python-markupsafe:p",
|
||||
"tomli": "python-tomli:p",
|
||||
"wheel": "python-wheel:p",
|
||||
"pyedaa.projectmodel": "python-ruamel-yaml:p python-ruamel.yaml.clib:p python-lxml:p",
|
||||
"pyedaa.reports": "python-ruamel-yaml:p python-ruamel.yaml.clib:p python-lxml:p",
|
||||
}
|
||||
subPackages = {
|
||||
"pytooling": {
|
||||
"yaml": "python-ruamel-yaml:p python-ruamel.yaml.clib:p",
|
||||
},
|
||||
}
|
||||
|
||||
regExp = compile(r"(?P<PackageName>[\w_\-\.]+)(?:\[(?P<SubPackages>(?:\w+)(?:\s*,\s*\w+)*)\])?(?:\s*(?P<Comperator>[<>~=]+)\s*)(?P<Version>\d+(?:\.\d+)*)(?:-(?P<VersionExtension>\w+))?")
|
||||
|
||||
pacboyPackages = set(("python-pip:p", "python-wheel:p", "python-tomli:p"))
|
||||
print(f"Processing dependencies ({len(dependencies)}):")
|
||||
for dependency in dependencies:
|
||||
print(f" {dependency}")
|
||||
|
||||
match = regExp.match(dependency.lower())
|
||||
if not match:
|
||||
print(f" Wrong format: {dependency}")
|
||||
print(f"::error title=Identifying Pacboy Packages::Unrecognized dependency format '{dependency}'")
|
||||
continue
|
||||
|
||||
package = match["PackageName"]
|
||||
if package in packages:
|
||||
rewrite = packages[package]
|
||||
print(f" Found rewrite rule for '{package}': {rewrite}")
|
||||
pacboyPackages.add(rewrite)
|
||||
|
||||
if match["SubPackages"] and package in subPackages:
|
||||
for subPackage in match["SubPackages"].split(","):
|
||||
if subPackage in subPackages[package]:
|
||||
rewrite = subPackages[package][subPackage]
|
||||
print(f" Found rewrite rule for '{package}[..., {subPackage}, ...]': {rewrite}")
|
||||
pacboyPackages.add(rewrite)
|
||||
|
||||
# Write jobs to special file
|
||||
github_output = Path(getenv("GITHUB_OUTPUT"))
|
||||
print(f"GITHUB_OUTPUT: {github_output}")
|
||||
with github_output.open("a+") as f:
|
||||
f.write(f"pacboy_packages={' '.join(pacboyPackages)}\n")
|
||||
|
||||
# Python setup
|
||||
|
||||
- name: '🟦 Setup MSYS2 for ${{ matrix.runtime }}'
|
||||
if: matrix.system == 'msys2'
|
||||
uses: msys2/setup-msys2@v2
|
||||
with:
|
||||
msystem: MINGW64
|
||||
msystem: ${{ matrix.runtime }}
|
||||
update: true
|
||||
pacboy: ${{ inputs.pacboy }}
|
||||
pacboy: >-
|
||||
${{ steps.pacboy.outputs.pacboy_packages }}
|
||||
${{ inputs.pacboy }}
|
||||
|
||||
- name: 🐍 Setup Python ${{ matrix.python }}
|
||||
if: matrix.system != 'msys2'
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python }}
|
||||
|
||||
- name: ⚙️ Update pip
|
||||
run: python -m pip install -U pip
|
||||
# Python Dependency steps
|
||||
|
||||
- name: 🔧 Install wheel and pip dependencies
|
||||
- name: 🔧 Install wheel,tomli and pip dependencies (native)
|
||||
if: matrix.system != 'msys2'
|
||||
run: |
|
||||
python -m pip install -U wheel
|
||||
python -m pip install ${{ inputs.requirements }}
|
||||
python -m pip install --disable-pip-version-check -U wheel tomli
|
||||
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
||||
|
||||
- name: 🔧 Install pip dependencies
|
||||
- name: 🔧 Install pip dependencies (MSYS2)
|
||||
if: matrix.system == 'msys2'
|
||||
run: |
|
||||
if [ 'x${{ inputs.mingw_requirements }}' != 'x' ]; then
|
||||
python -m pip install ${{ inputs.mingw_requirements }}
|
||||
if [ -n '${{ inputs.mingw_requirements }}' ]; then
|
||||
python -m pip install --disable-pip-version-check ${{ inputs.mingw_requirements }}
|
||||
else
|
||||
python -m pip install ${{ inputs.requirements }}
|
||||
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
||||
fi
|
||||
|
||||
- name: ☑ Run unit tests
|
||||
if: matrix.system == 'windows'
|
||||
run: |
|
||||
$PYTEST_ARGS = if ("${{ inputs.artifact }}".length -gt 0) { "--junitxml=TestReport.xml" } else { "" }
|
||||
python -m pytest -rA ${{ inputs.unittest_directory }} $PYTEST_ARGS --color=yes
|
||||
# Before scripts
|
||||
|
||||
- name: ☑ Run unit tests
|
||||
- name: 🍎 macOS (Intel) before scripts
|
||||
if: matrix.system == 'macos' && inputs.macos_before_script != ''
|
||||
run: ${{ inputs.macos_before_script }}
|
||||
|
||||
- name: 🍏 macOS (ARM) before scripts
|
||||
if: matrix.system == 'macos-arm' && inputs.macos_arm_before_script != ''
|
||||
run: ${{ inputs.macos_arm_before_script }}
|
||||
|
||||
- name: 🐧 Ubuntu before scripts
|
||||
if: matrix.system == 'ubuntu' && inputs.ubuntu_before_script != ''
|
||||
run: ${{ inputs.ubuntu_before_script }}
|
||||
|
||||
# Windows before script
|
||||
|
||||
- name: 🪟🟦 MinGW64 before scripts
|
||||
if: matrix.system == 'msys2' && matrix.runtime == 'MINGW64' && inputs.mingw64_before_script != ''
|
||||
run: ${{ inputs.mingw64_before_script }}
|
||||
|
||||
- name: 🪟🟨 UCRT64 before scripts
|
||||
if: matrix.system == 'msys2' && matrix.runtime == 'UCRT64' && inputs.ucrt64_before_script != ''
|
||||
run: ${{ inputs.ucrt64_before_script }}
|
||||
|
||||
# Read pyproject.toml
|
||||
|
||||
- name: 🔁 Extract configurations from pyproject.toml
|
||||
id: getVariables
|
||||
shell: python
|
||||
run: |
|
||||
from os import getenv
|
||||
from pathlib import Path
|
||||
from sys import version
|
||||
from textwrap import dedent
|
||||
|
||||
print(f"Python: {version}")
|
||||
|
||||
from tomli import load as tomli_load
|
||||
|
||||
htmlDirectory = Path("htmlcov")
|
||||
xmlFile = Path("./coverage.xml")
|
||||
jsonFile = Path("./coverage.json")
|
||||
coverageRC = "${{ inputs.coverage_config }}".strip()
|
||||
|
||||
# Read output paths from 'pyproject.toml' file
|
||||
if coverageRC == "pyproject.toml":
|
||||
pyProjectFile = Path("pyproject.toml")
|
||||
if pyProjectFile.exists():
|
||||
with pyProjectFile.open("rb") as file:
|
||||
pyProjectSettings = tomli_load(file)
|
||||
|
||||
htmlDirectory = Path(pyProjectSettings["tool"]["coverage"]["html"]["directory"])
|
||||
xmlFile = Path(pyProjectSettings["tool"]["coverage"]["xml"]["output"])
|
||||
jsonFile = Path(pyProjectSettings["tool"]["coverage"]["json"]["output"])
|
||||
else:
|
||||
print(f"File '{pyProjectFile}' not found.")
|
||||
print(f"::error title=FileNotFoundError::File '{pyProjectFile}' not found.")
|
||||
exit(1)
|
||||
|
||||
# Read output paths from '.coveragerc' file
|
||||
elif len(coverageRC) > 0:
|
||||
coverageRCFile = Path(coverageRC)
|
||||
if coverageRCFile.exists():
|
||||
with coverageRCFile.open("rb") as file:
|
||||
coverageRCSettings = tomli_load(file)
|
||||
|
||||
htmlDirectory = Path(coverageRCSettings["html"]["directory"])
|
||||
xmlFile = Path(coverageRCSettings["xml"]["output"])
|
||||
jsonFile = Path(coverageRCSettings["json"]["output"])
|
||||
else:
|
||||
print(f"File '{coverageRCFile}' not found.")
|
||||
print(f"::error title=FileNotFoundError::File '{coverageRCFile}' not found.")
|
||||
exit(1)
|
||||
|
||||
# Write jobs to special file
|
||||
github_output = Path(getenv("GITHUB_OUTPUT"))
|
||||
print(f"GITHUB_OUTPUT: {github_output}")
|
||||
with github_output.open("a+", encoding="utf-8") as f:
|
||||
f.write(dedent(f"""\
|
||||
unittest_report_html_directory={htmlDirectory}
|
||||
coverage_report_html_directory={htmlDirectory.as_posix()}
|
||||
coverage_report_xml={xmlFile}
|
||||
coverage_report_json={jsonFile}
|
||||
"""))
|
||||
|
||||
print(f"DEBUG:\n html={htmlDirectory}\n xml={xmlFile}\n json={jsonFile}")
|
||||
|
||||
# Run pytests
|
||||
|
||||
- name: ✅ Run unit tests (Ubuntu/macOS)
|
||||
if: matrix.system != 'windows'
|
||||
run: |
|
||||
[ 'x${{ inputs.artifact }}' != 'x' ] && PYTEST_ARGS='--junitxml=TestReport.xml' || unset PYTEST_ARGS
|
||||
python -m pytest -rA ${{ inputs.unittest_directory }} $PYTEST_ARGS --color=yes
|
||||
export ENVIRONMENT_NAME="${{ matrix.envname }}"
|
||||
export PYTHONPATH=$(pwd)
|
||||
|
||||
- name: 📤 Upload 'TestReport.xml' artifact
|
||||
if: inputs.artifact != ''
|
||||
uses: actions/upload-artifact@v3
|
||||
cd "${{ inputs.root_directory || '.' }}"
|
||||
[ -n '${{ inputs.unittest_xml_artifact }}' ] && PYTEST_ARGS='--junitxml=report/unit/TestReportSummary.xml' || unset PYTEST_ARGS
|
||||
if [ -n '${{ inputs.coverage_config }}' ]; then
|
||||
echo "coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }}"
|
||||
coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }}
|
||||
else
|
||||
echo "python -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }}"
|
||||
python -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }}
|
||||
fi
|
||||
|
||||
- name: ✅ Run unit tests (Windows)
|
||||
if: matrix.system == 'windows'
|
||||
run: |
|
||||
$env:ENVIRONMENT_NAME = "${{ matrix.envname }}"
|
||||
$env:PYTHONPATH = (Get-Location).ToString()
|
||||
|
||||
cd "${{ inputs.root_directory || '.' }}"
|
||||
$PYTEST_ARGS = if ("${{ inputs.unittest_xml_artifact }}") { "--junitxml=report/unit/TestReportSummary.xml" } else { "" }
|
||||
if ("${{ inputs.coverage_config }}") {
|
||||
Write-Host "coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }}"
|
||||
coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }}
|
||||
} else {
|
||||
Write-Host "python -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }}"
|
||||
python -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }}
|
||||
}
|
||||
|
||||
- name: Convert coverage to XML format (Cobertura)
|
||||
if: inputs.coverage_xml_artifact != ''
|
||||
continue-on-error: true
|
||||
run: coverage xml --data-file=.coverage
|
||||
|
||||
- name: Convert coverage to JSON format
|
||||
if: inputs.coverage_json_artifact != ''
|
||||
continue-on-error: true
|
||||
run: coverage json --data-file=.coverage
|
||||
|
||||
- name: Convert coverage to HTML format
|
||||
if: inputs.coverage_html_artifact != ''
|
||||
continue-on-error: true
|
||||
run: |
|
||||
coverage html --data-file=.coverage -d ${{ steps.getVariables.outputs.coverage_report_html_directory }}
|
||||
rm ${{ steps.getVariables.outputs.coverage_report_html_directory }}/.gitignore
|
||||
|
||||
# Upload artifacts
|
||||
|
||||
- name: 📤 Upload 'TestReportSummary.xml' artifact
|
||||
if: inputs.unittest_xml_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.artifact }}-${{ matrix.system }}-${{ matrix.python }}
|
||||
path: TestReport.xml
|
||||
name: ${{ inputs.unittest_xml_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
||||
working-directory: report/unit
|
||||
path: TestReportSummary.xml
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
# - name: 📤 Upload 'Unit Tests HTML Report' artifact
|
||||
# if: inputs.unittest_html_artifact != ''
|
||||
# continue-on-error: true
|
||||
# uses: pyTooling/upload-artifact@v4
|
||||
# with:
|
||||
# name: ${{ inputs.unittest_html_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
||||
# path: ${{ steps.getVariables.outputs.unittest_report_html_directory }}
|
||||
# if-no-files-found: error
|
||||
# retention-days: 1
|
||||
|
||||
- name: 📤 Upload 'Coverage SQLite Database' artifact
|
||||
if: inputs.coverage_sqlite_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.coverage_sqlite_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
||||
path: .coverage
|
||||
include-hidden-files: true
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- name: 📤 Upload 'Coverage XML Report' artifact
|
||||
if: inputs.coverage_xml_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.coverage_xml_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
||||
path: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- name: 📤 Upload 'Coverage JSON Report' artifact
|
||||
if: inputs.coverage_json_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.coverage_json_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
||||
path: ${{ steps.getVariables.outputs.coverage_report_json }}
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- name: 📤 Upload 'Coverage HTML Report' artifact
|
||||
if: inputs.coverage_html_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.coverage_html_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
||||
working-directory: ${{ steps.getVariables.outputs.coverage_report_html_directory }}
|
||||
path: '*'
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
17
.github/workflows/VerifyDocs.yml
vendored
17
.github/workflows/VerifyDocs.yml
vendored
@@ -4,7 +4,7 @@
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2022 The pyTooling Authors #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
@@ -25,24 +25,29 @@ name: Verify examples
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
default: '3.10'
|
||||
default: '3.12'
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
|
||||
VerifyDocs:
|
||||
name: 👍 Verify example snippets using Python ${{ inputs.python_version }}
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: 🐍 Setup Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ inputs.python_version }}
|
||||
|
||||
@@ -72,7 +77,7 @@ jobs:
|
||||
- name: Print example.py
|
||||
run: cat tests/docs/example.py
|
||||
|
||||
- name: ☑ Run example snippet
|
||||
- name: ✅ Run example snippet
|
||||
working-directory: tests/docs
|
||||
run: |
|
||||
python3 example.py
|
||||
|
||||
61
.github/workflows/_Checking_ArtifactCleanup.yml
vendored
Normal file
61
.github/workflows/_Checking_ArtifactCleanup.yml
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
name: Verification Pipeline for ArtifactCleanup
|
||||
|
||||
on:
|
||||
push:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
Params:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
with:
|
||||
name: Example
|
||||
python_version_list: "3.12 3.13"
|
||||
system_list: "ubuntu windows"
|
||||
|
||||
Testing:
|
||||
name: Artifact generation ${{ matrix.system }}-${{ matrix.python }}
|
||||
needs:
|
||||
- Params
|
||||
runs-on: ${{ matrix.runs-on }}
|
||||
strategy:
|
||||
matrix:
|
||||
include: ${{ fromJson(needs.Params.outputs.python_jobs) }}
|
||||
steps:
|
||||
- name: Content creation for ${{ matrix.system }}-${{ matrix.python }}
|
||||
run: echo "${{ matrix.runs-on }}-${{ matrix.python }}" >> artifact.txt
|
||||
|
||||
- name: 📤 Upload artifact for ${{ matrix.system }}-${{ matrix.python }}
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ fromJson(needs.Params.outputs.artifact_names).unittesting_xml }}-${{ matrix.system }}-${{ matrix.python }}
|
||||
path: artifact.txt
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
Package:
|
||||
name: Package generation
|
||||
needs:
|
||||
- Params
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Package creation
|
||||
run: echo "Package" >> package.txt
|
||||
|
||||
- name: 📤 Upload artifact for ${{ matrix.system }}-${{ matrix.python }}
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ fromJson(needs.Params.outputs.artifact_names).package_all }}
|
||||
path: package.txt
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
ArtifactCleanUp:
|
||||
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@main
|
||||
needs:
|
||||
- Params
|
||||
- Testing
|
||||
- Package
|
||||
with:
|
||||
package: ${{ fromJson(needs.Params.outputs.artifact_names).package_all }}
|
||||
remaining: |
|
||||
${{ fromJson(needs.Params.outputs.artifact_names).unittesting_xml }}-*
|
||||
242
.github/workflows/_Checking_JobTemplates.yml
vendored
Normal file
242
.github/workflows/_Checking_JobTemplates.yml
vendored
Normal file
@@ -0,0 +1,242 @@
|
||||
name: Verification of Job Templates
|
||||
|
||||
on:
|
||||
push:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
ConfigParams:
|
||||
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@main
|
||||
with:
|
||||
package_name: pyDummy
|
||||
|
||||
UnitTestingParams:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
with:
|
||||
name: pyDummy
|
||||
python_version_list: "3.9 3.10 3.11 3.12 3.13 pypy-3.9 pypy-3.10"
|
||||
# disable_list: "windows:pypy-3.10"
|
||||
|
||||
PlatformTestingParams:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
with:
|
||||
name: Platform
|
||||
python_version_list: ""
|
||||
system_list: "ubuntu windows macos mingw32 mingw64 clang64 ucrt64"
|
||||
|
||||
UnitTesting:
|
||||
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@main
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
with:
|
||||
jobs: ${{ needs.UnitTestingParams.outputs.python_jobs }}
|
||||
unittest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||
unittest_html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_html }}
|
||||
# coverage_sqlite_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}
|
||||
# coverage_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }}
|
||||
# coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}
|
||||
# coverage_html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||
|
||||
PlatformTesting:
|
||||
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@main
|
||||
needs:
|
||||
- PlatformTestingParams
|
||||
with:
|
||||
jobs: ${{ needs.PlatformTestingParams.outputs.python_jobs }}
|
||||
# tests_directory: ""
|
||||
unittest_directory: platform
|
||||
unittest_xml_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||
unittest_html_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).unittesting_html }}
|
||||
coverage_sqlite_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_sqlite }}
|
||||
coverage_xml_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_xml }}
|
||||
coverage_json_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_json }}
|
||||
coverage_html_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||
|
||||
# Coverage:
|
||||
# uses: pyTooling/Actions/.github/workflows/CoverageCollection.yml@main
|
||||
# needs:
|
||||
# - UnitTestingParams
|
||||
# with:
|
||||
# python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
# artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||
# secrets:
|
||||
# codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||
|
||||
StaticTypeCheck:
|
||||
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@main
|
||||
needs:
|
||||
- ConfigParams
|
||||
- UnitTestingParams
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
commands: |
|
||||
${{ needs.ConfigParams.outputs.mypy_prepare_command }}
|
||||
mypy --html-report htmlmypy -p ${{ needs.ConfigParams.outputs.package_fullname }}
|
||||
html_report: 'htmlmypy'
|
||||
html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
|
||||
|
||||
DocCoverage:
|
||||
uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@r1
|
||||
needs:
|
||||
- ConfigParams
|
||||
- UnitTestingParams
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
directory: ${{ needs.ConfigParams.outputs.package_directors }}
|
||||
# fail_below: 70
|
||||
|
||||
Package:
|
||||
uses: pyTooling/Actions/.github/workflows/Package.yml@main
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- UnitTesting
|
||||
# - Coverage
|
||||
- PlatformTesting
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
||||
|
||||
PublishCoverageResults:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@main
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- UnitTesting
|
||||
- PlatformTesting
|
||||
# - Coverage
|
||||
with:
|
||||
coverage_sqlite_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}
|
||||
coverage_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }}
|
||||
coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}
|
||||
coverage_html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||
secrets:
|
||||
codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||
|
||||
PublishTestResults:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@main
|
||||
needs:
|
||||
- UnitTesting
|
||||
- PlatformTesting
|
||||
with:
|
||||
additional_merge_args: '-d "--pytest=rewrite-dunder-init;reduce-depth:pytest.tests.unit;reduce-depth:pytest.tests.platform"'
|
||||
|
||||
# VerifyDocs:
|
||||
# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@main
|
||||
# needs:
|
||||
# - UnitTestingParams
|
||||
# with:
|
||||
# python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
|
||||
Documentation:
|
||||
uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@main
|
||||
needs:
|
||||
- ConfigParams
|
||||
- UnitTestingParams
|
||||
- PublishTestResults
|
||||
- PublishCoverageResults
|
||||
# - VerifyDocs
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
coverage_report_json_directory: ${{ needs.ConfigParams.outputs.coverage_report_json_directory }}
|
||||
# unittest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||
# coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}
|
||||
html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_html }}
|
||||
latex_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }}
|
||||
|
||||
IntermediateCleanUp:
|
||||
uses: pyTooling/Actions/.github/workflows/IntermediateCleanUp.yml@r1
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- PublishCoverageResults
|
||||
- PublishTestResults
|
||||
- Documentation
|
||||
with:
|
||||
sqlite_coverage_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}-
|
||||
xml_unittest_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}-
|
||||
|
||||
PDFDocumentation:
|
||||
uses: pyTooling/Actions/.github/workflows/LaTeXDocumentation.yml@main
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- Documentation
|
||||
with:
|
||||
document: actions
|
||||
latex_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }}
|
||||
pdf_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_pdf }}
|
||||
|
||||
PublishToGitHubPages:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@main
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- Documentation
|
||||
# - PDFDocumentation
|
||||
# - Coverage
|
||||
- PublishCoverageResults
|
||||
- StaticTypeCheck
|
||||
with:
|
||||
doc: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_html }}
|
||||
coverage: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||
typing: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
|
||||
|
||||
ReleasePage:
|
||||
uses: pyTooling/Actions/.github/workflows/Release.yml@main
|
||||
if: startsWith(github.ref, 'refs/tags')
|
||||
needs:
|
||||
- UnitTesting
|
||||
- PlatformTesting
|
||||
# - Coverage
|
||||
# - StaticTypeCheck
|
||||
- Package
|
||||
- PublishToGitHubPages
|
||||
|
||||
PublishOnPyPI:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@main
|
||||
if: startsWith(github.ref, 'refs/tags')
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- ReleasePage
|
||||
# - Package
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
requirements: -r dist/requirements.txt
|
||||
artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
||||
secrets:
|
||||
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||
|
||||
ArtifactCleanUp:
|
||||
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@main
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- PlatformTestingParams
|
||||
- UnitTesting
|
||||
# - Coverage
|
||||
- StaticTypeCheck
|
||||
- PlatformTesting
|
||||
- Documentation
|
||||
# - PDFDocumentation
|
||||
- PublishTestResults
|
||||
- PublishCoverageResults
|
||||
- PublishToGitHubPages
|
||||
- IntermediateCleanUp
|
||||
with:
|
||||
package: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
||||
remaining: |
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}-*
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_html }}-*
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}-*
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }}-*
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}-*
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}-*
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_html }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_html }}
|
||||
${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).unittesting_xml }}-*
|
||||
${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).unittesting_html }}-*
|
||||
${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_sqlite }}-*
|
||||
${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_xml }}-*
|
||||
${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_json }}-*
|
||||
${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_html }}-*
|
||||
15
.github/workflows/_Checking_NamespacePackage_Pipeline.yml
vendored
Normal file
15
.github/workflows/_Checking_NamespacePackage_Pipeline.yml
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
name: Verification of Pipeline Templates (Namespace Package)
|
||||
|
||||
on:
|
||||
push:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
NamespacePackage:
|
||||
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@main
|
||||
with:
|
||||
package_namespace: pyExamples
|
||||
package_name: Extensions
|
||||
secrets:
|
||||
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||
CODACY_PROJECT_TOKEN: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||
101
.github/workflows/_Checking_Nightly.yml
vendored
Normal file
101
.github/workflows/_Checking_Nightly.yml
vendored
Normal file
@@ -0,0 +1,101 @@
|
||||
name: Verification of Nightly Releases
|
||||
|
||||
on:
|
||||
push:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
Build:
|
||||
name: Build something
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
steps:
|
||||
- name: 🖉 Build 1
|
||||
run: |
|
||||
echo "Document 1 $(date --utc '+%d.%m.%Y - %H:%M:%S')" > document1.txt
|
||||
echo "Analysis log $(date --utc '+%d.%m.%Y - %H:%M:%S')" > analysis.log
|
||||
echo "Build log $(date --utc '+%d.%m.%Y - %H:%M:%S')" > build.log
|
||||
|
||||
- name: 📤 Upload artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: document
|
||||
path: |
|
||||
document1.txt
|
||||
*.log
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- name: 🖉 Program
|
||||
run: |
|
||||
echo "Document other $(date --utc '+%d.%m.%Y - %H:%M:%S')" > document1.txt
|
||||
echo "Program $(date --utc '+%d.%m.%Y - %H:%M:%S')" > program.py
|
||||
|
||||
- name: 📤 Upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: other
|
||||
path: |
|
||||
*.txt
|
||||
*.py
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
NightlyPage:
|
||||
uses: pyTooling/Actions/.github/workflows/NightlyRelease.yml@main
|
||||
needs:
|
||||
- Build
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: write
|
||||
actions: write
|
||||
# attestations: write
|
||||
with:
|
||||
prerelease: true
|
||||
replacements: |
|
||||
version=4.2.0
|
||||
tool=myTool
|
||||
prog=program
|
||||
nightly_title: "Nightly Release"
|
||||
nightly_description: |
|
||||
This *nightly* release contains all latest and important artifacts created by GHDL's CI pipeline.
|
||||
|
||||
# GHDL %version%
|
||||
|
||||
GHDL offers the simulator and synthesis tool for VHDL. GHDL can be build for various backends:
|
||||
* `gcc` - using the GCC compiler framework
|
||||
* `mcode` - in memory code generation
|
||||
* `llvm` - using the LLVM compiler framework
|
||||
* `llvm-jit` - using the LLVM compiler framework, but in memory
|
||||
|
||||
The following asset categories are provided for GHDL:
|
||||
* macOS x64-64 builds as TAR/GZ file
|
||||
* macOS aarch64 builds as TAR/GZ file
|
||||
* Ubuntu 24.04 LTS builds as TAR/GZ file
|
||||
* Windows builds for standalone usage (without MSYS2) as ZIP file
|
||||
* MSYS2 packages as TAR/ZST file
|
||||
|
||||
# pyGHDL %version%
|
||||
|
||||
The Python package `pyGHDL` offers Python binding (`pyGHDL.libghdl`) to a `libghdl` shared library (`*.so`/`*.dll`).
|
||||
In addition to the low-level binding layer, pyGHDL offers:
|
||||
* a Language Server Protocol (LSP) instance for e.g. live code checking by editors
|
||||
* a Code Document Object Model (CodeDOM) based on [pyVHDLModel](https://github.com/VHDL/pyVHDLModel)
|
||||
|
||||
The following asset categories are provided for pyGHDL:
|
||||
* Platform specific Python wheel package for Ubuntu incl. `pyGHDL...so`
|
||||
* Platform specific Python wheel package for Windows incl. `pyGHDL...dll`
|
||||
assets: |
|
||||
document: document1.txt: Documentation
|
||||
document: build.log: Logfile - %tool% - %tool%
|
||||
other: document1.txt: SBOM - %version%
|
||||
other: %prog%.py: Application - %tool% - %version%
|
||||
document:!archive1.zip: Archive 1 - zip
|
||||
document:!archive2.tgz: Archive 2 - tgz
|
||||
document:!archive3.tar.gz: Archive 3 - tar.gz
|
||||
document:!archive4.tzst: Archive 4 - tzst
|
||||
document:!archive5.tar.zst:Archive 5 - tar.zst
|
||||
document:$archive6.tgz: Archive 6 - tgz + dir
|
||||
document:$archive7.tar.gz: Archive 7 - tar.gz + dir
|
||||
document:$archive8.tzst: Archive 8 - tzst + dir
|
||||
document:$archive9.tar.zst:Archive 9 - tar.zst + dir
|
||||
514
.github/workflows/_Checking_Parameters.yml
vendored
Normal file
514
.github/workflows/_Checking_Parameters.yml
vendored
Normal file
@@ -0,0 +1,514 @@
|
||||
name: Verification Pipeline for Parameters
|
||||
|
||||
on:
|
||||
push:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
Params_Default:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
with:
|
||||
name: Example
|
||||
|
||||
Params_PythonVersions:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
with:
|
||||
name: Example
|
||||
python_version_list: "3.11 3.12 pypy-3.9 pypy-3.10"
|
||||
|
||||
Params_Systems:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
with:
|
||||
name: Example
|
||||
system_list: "windows mingw32 mingw64"
|
||||
|
||||
Params_Include:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
with:
|
||||
name: Example
|
||||
python_version_list: "3.11"
|
||||
system_list: "ubuntu windows macos macos-arm"
|
||||
include_list: "ubuntu:3.12 ubuntu:3.13"
|
||||
|
||||
Params_Exclude:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
with:
|
||||
name: Example
|
||||
python_version_list: "3.12"
|
||||
system_list: "ubuntu windows macos macos-arm"
|
||||
exclude_list: "windows:3.12 windows:3.13"
|
||||
|
||||
Params_Disable:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
with:
|
||||
name: Example
|
||||
python_version_list: "3.12"
|
||||
system_list: "ubuntu windows macos macos-arm"
|
||||
disable_list: "windows:3.12 windows:3.13"
|
||||
|
||||
Params_All:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
with:
|
||||
name: Example
|
||||
python_version_list: "3.12 3.13"
|
||||
system_list: "ubuntu windows macos macos-arm"
|
||||
include_list: "windows:3.10 windows:3.11 windows:3.13"
|
||||
exclude_list: "macos:3.12 macos:3.13"
|
||||
|
||||
Params_Check:
|
||||
needs:
|
||||
- Params_Default
|
||||
- Params_PythonVersions
|
||||
- Params_Systems
|
||||
- Params_Include
|
||||
- Params_Exclude
|
||||
- Params_Disable
|
||||
- Params_All
|
||||
runs-on: ubuntu-24.04
|
||||
defaults:
|
||||
run:
|
||||
shell: python
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
shell: bash
|
||||
run: pip install --disable-pip-version-check --break-system-packages pyTooling
|
||||
# Params_Default
|
||||
- name: Checking results from 'Params_Default'
|
||||
run: |
|
||||
from json import loads as json_loads
|
||||
from sys import exit
|
||||
|
||||
from pyTooling.Common import zipdicts
|
||||
|
||||
expectedPythonVersion = "3.13"
|
||||
expectedPythons = ["3.9", "3.10", "3.11", "3.12", "3.13"]
|
||||
expectedSystems = ["ubuntu", "windows", "macos", "macos-arm"]
|
||||
expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons] + ["mingw64:3.11", "ucrt64:3.11"]
|
||||
expectedName = "Example"
|
||||
expectedArtifacts = {
|
||||
"unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML",
|
||||
"unittesting_html": f"{expectedName}-UnitTestReportSummary-HTML",
|
||||
"perftesting_xml": f"{expectedName}-PerformanceTestReportSummary-XML",
|
||||
"benchtesting_xml": f"{expectedName}-BenchmarkTestReportSummary-XML",
|
||||
"apptesting_xml": f"{expectedName}-ApplicationTestReportSummary-XML",
|
||||
"codecoverage_sqlite": f"{expectedName}-CodeCoverage-SQLite",
|
||||
"codecoverage_xml": f"{expectedName}-CodeCoverage-XML",
|
||||
"codecoverage_json": f"{expectedName}-CodeCoverage-JSON",
|
||||
"codecoverage_html": f"{expectedName}-CodeCoverage-HTML",
|
||||
"statictyping_html": f"{expectedName}-StaticTyping-HTML",
|
||||
"package_all": f"{expectedName}-Packages",
|
||||
"documentation_html": f"{expectedName}-Documentation-HTML",
|
||||
"documentation_latex": f"{expectedName}-Documentation-LaTeX",
|
||||
"documentation_pdf": f"{expectedName}-Documentation-PDF",
|
||||
}
|
||||
|
||||
actualPythonVersion = """${{ needs.Params_Default.outputs.python_version }}"""
|
||||
actualPythonJobs = json_loads("""${{ needs.Params_Default.outputs.python_jobs }}""".replace("'", '"'))
|
||||
actualArtifactNames = json_loads("""${{ needs.Params_Default.outputs.artifact_names }}""".replace("'", '"'))
|
||||
errors = 0
|
||||
|
||||
if actualPythonVersion != expectedPythonVersion:
|
||||
print(f"'python_version' does not match: '{actualPythonVersion}' != '{expectedPythonVersion}'.")
|
||||
errors += 1
|
||||
if len(actualPythonJobs) != len(expectedJobs):
|
||||
print(f"Number of 'python_jobs' does not match: {len(actualPythonJobs)} != {len(expectedJobs)}.")
|
||||
print("Actual jobs:")
|
||||
for job in actualPythonJobs:
|
||||
if job['system'] == "msys2":
|
||||
print(f" {job['runtime'].lower()}:{job['python']}")
|
||||
else:
|
||||
print(f" {job['system']}:{job['python']}")
|
||||
print("Expected jobs:")
|
||||
for job in expectedJobs:
|
||||
print(f" {job}")
|
||||
errors += 1
|
||||
if len(actualArtifactNames) != len(expectedArtifacts):
|
||||
print(f"Number of 'artifact_names' does not match: {len(actualArtifactNames)} != {len(expectedArtifacts)}.")
|
||||
errors += 1
|
||||
else:
|
||||
for key, actual, expected in zipdicts(actualArtifactNames, expectedArtifacts):
|
||||
if actual != expected:
|
||||
print(f"Artifact name '{key}' does not match: {actual} != {expected}.")
|
||||
errors += 1
|
||||
|
||||
if errors == 0:
|
||||
print(f"All checks PASSED.")
|
||||
exit(errors)
|
||||
|
||||
# Params_PythonVersions
|
||||
- name: Checking results from 'Params_PythonVersions'
|
||||
run: |
|
||||
from json import loads as json_loads
|
||||
from sys import exit
|
||||
|
||||
from pyTooling.Common import zipdicts
|
||||
|
||||
expectedPythonVersion = "3.13"
|
||||
expectedPythons = ["3.11", "3.12", "pypy-3.9", "pypy-3.10"]
|
||||
expectedSystems = ["ubuntu", "windows", "macos", "macos-arm"]
|
||||
expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons] + ["mingw64:3.11", "ucrt64:3.11"]
|
||||
expectedName = "Example"
|
||||
expectedArtifacts = {
|
||||
"unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML",
|
||||
"unittesting_html": f"{expectedName}-UnitTestReportSummary-HTML",
|
||||
"perftesting_xml": f"{expectedName}-PerformanceTestReportSummary-XML",
|
||||
"benchtesting_xml": f"{expectedName}-BenchmarkTestReportSummary-XML",
|
||||
"apptesting_xml": f"{expectedName}-ApplicationTestReportSummary-XML",
|
||||
"codecoverage_sqlite": f"{expectedName}-CodeCoverage-SQLite",
|
||||
"codecoverage_xml": f"{expectedName}-CodeCoverage-XML",
|
||||
"codecoverage_json": f"{expectedName}-CodeCoverage-JSON",
|
||||
"codecoverage_html": f"{expectedName}-CodeCoverage-HTML",
|
||||
"statictyping_html": f"{expectedName}-StaticTyping-HTML",
|
||||
"package_all": f"{expectedName}-Packages",
|
||||
"documentation_html": f"{expectedName}-Documentation-HTML",
|
||||
"documentation_latex": f"{expectedName}-Documentation-LaTeX",
|
||||
"documentation_pdf": f"{expectedName}-Documentation-PDF",
|
||||
}
|
||||
|
||||
actualPythonVersion = """${{ needs.Params_PythonVersions.outputs.python_version }}"""
|
||||
actualPythonJobs = json_loads("""${{ needs.Params_PythonVersions.outputs.python_jobs }}""".replace("'", '"'))
|
||||
actualArtifactNames = json_loads("""${{ needs.Params_PythonVersions.outputs.artifact_names }}""".replace("'", '"'))
|
||||
errors = 0
|
||||
|
||||
if actualPythonVersion != expectedPythonVersion:
|
||||
print(f"'python_version' does not match: '{actualPythonVersion}' != '{expectedPythonVersion}'.")
|
||||
errors += 1
|
||||
if len(actualPythonJobs) != len(expectedJobs):
|
||||
print(f"Number of 'python_jobs' does not match: {len(actualPythonJobs)} != {len(expectedJobs)}.")
|
||||
print("Actual jobs:")
|
||||
for job in actualPythonJobs:
|
||||
if job['system'] == "msys2":
|
||||
print(f" {job['runtime'].lower()}:{job['python']}")
|
||||
else:
|
||||
print(f" {job['system']}:{job['python']}")
|
||||
print("Expected jobs:")
|
||||
for job in expectedJobs:
|
||||
print(f" {job}")
|
||||
errors += 1
|
||||
if len(actualArtifactNames) != len(expectedArtifacts):
|
||||
print(f"Number of 'artifact_names' does not match: {len(actualArtifactNames)} != {len(expectedArtifacts)}.")
|
||||
errors += 1
|
||||
else:
|
||||
for key, actual, expected in zipdicts(actualArtifactNames, expectedArtifacts):
|
||||
if actual != expected:
|
||||
print(f"Artifact name '{key}' does not match: {actual} != {expected}.")
|
||||
errors += 1
|
||||
|
||||
if errors == 0:
|
||||
print(f"All checks PASSED.")
|
||||
exit(errors)
|
||||
|
||||
# Params_Systems
|
||||
- name: Checking results from 'Params_Systems'
|
||||
run: |
|
||||
from json import loads as json_loads
|
||||
from sys import exit
|
||||
|
||||
from pyTooling.Common import zipdicts
|
||||
|
||||
expectedPythonVersion = "3.13"
|
||||
expectedPythons = ["3.9", "3.10", "3.11", "3.12", "3.13"]
|
||||
expectedSystems = ["windows"]
|
||||
expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons] + ["mingw32:3.11", "mingw64:3.11"]
|
||||
expectedName = "Example"
|
||||
expectedArtifacts = {
|
||||
"unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML",
|
||||
"unittesting_html": f"{expectedName}-UnitTestReportSummary-HTML",
|
||||
"perftesting_xml": f"{expectedName}-PerformanceTestReportSummary-XML",
|
||||
"benchtesting_xml": f"{expectedName}-BenchmarkTestReportSummary-XML",
|
||||
"apptesting_xml": f"{expectedName}-ApplicationTestReportSummary-XML",
|
||||
"codecoverage_sqlite": f"{expectedName}-CodeCoverage-SQLite",
|
||||
"codecoverage_xml": f"{expectedName}-CodeCoverage-XML",
|
||||
"codecoverage_json": f"{expectedName}-CodeCoverage-JSON",
|
||||
"codecoverage_html": f"{expectedName}-CodeCoverage-HTML",
|
||||
"statictyping_html": f"{expectedName}-StaticTyping-HTML",
|
||||
"package_all": f"{expectedName}-Packages",
|
||||
"documentation_html": f"{expectedName}-Documentation-HTML",
|
||||
"documentation_latex": f"{expectedName}-Documentation-LaTeX",
|
||||
"documentation_pdf": f"{expectedName}-Documentation-PDF",
|
||||
}
|
||||
|
||||
actualPythonVersion = """${{ needs.Params_Systems.outputs.python_version }}"""
|
||||
actualPythonJobs = json_loads("""${{ needs.Params_Systems.outputs.python_jobs }}""".replace("'", '"'))
|
||||
actualArtifactNames = json_loads("""${{ needs.Params_Systems.outputs.artifact_names }}""".replace("'", '"'))
|
||||
errors = 0
|
||||
|
||||
if actualPythonVersion != expectedPythonVersion:
|
||||
print(f"'python_version' does not match: '{actualPythonVersion}' != '{expectedPythonVersion}'.")
|
||||
errors += 1
|
||||
if len(actualPythonJobs) != len(expectedJobs):
|
||||
print(f"Number of 'python_jobs' does not match: {len(actualPythonJobs)} != {len(expectedJobs)}.")
|
||||
print("Actual jobs:")
|
||||
for job in actualPythonJobs:
|
||||
if job['system'] == "msys2":
|
||||
print(f" {job['runtime'].lower()}:{job['python']}")
|
||||
else:
|
||||
print(f" {job['system']}:{job['python']}")
|
||||
print("Expected jobs:")
|
||||
for job in expectedJobs:
|
||||
print(f" {job}")
|
||||
errors += 1
|
||||
if len(actualArtifactNames) != len(expectedArtifacts):
|
||||
print(f"Number of 'artifact_names' does not match: {len(actualArtifactNames)} != {len(expectedArtifacts)}.")
|
||||
errors += 1
|
||||
else:
|
||||
for key, actual, expected in zipdicts(actualArtifactNames, expectedArtifacts):
|
||||
if actual != expected:
|
||||
print(f"Artifact name '{key}' does not match: {actual} != {expected}.")
|
||||
errors += 1
|
||||
|
||||
if errors == 0:
|
||||
print(f"All checks PASSED.")
|
||||
exit(errors)
|
||||
|
||||
# Params_Include
|
||||
- name: Checking results from 'Params_Include'
|
||||
run: |
|
||||
from json import loads as json_loads
|
||||
from sys import exit
|
||||
|
||||
from pyTooling.Common import zipdicts
|
||||
|
||||
expectedPythonVersion = "3.13"
|
||||
expectedPythons = ["3.12"]
|
||||
expectedSystems = ["ubuntu", "windows", "macos", "macos-arm"]
|
||||
expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons] + ["ubuntu:3.11", "ubuntu:3.12"]
|
||||
expectedName = "Example"
|
||||
expectedArtifacts = {
|
||||
"unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML",
|
||||
"unittesting_html": f"{expectedName}-UnitTestReportSummary-HTML",
|
||||
"perftesting_xml": f"{expectedName}-PerformanceTestReportSummary-XML",
|
||||
"benchtesting_xml": f"{expectedName}-BenchmarkTestReportSummary-XML",
|
||||
"apptesting_xml": f"{expectedName}-ApplicationTestReportSummary-XML",
|
||||
"codecoverage_sqlite": f"{expectedName}-CodeCoverage-SQLite",
|
||||
"codecoverage_xml": f"{expectedName}-CodeCoverage-XML",
|
||||
"codecoverage_json": f"{expectedName}-CodeCoverage-JSON",
|
||||
"codecoverage_html": f"{expectedName}-CodeCoverage-HTML",
|
||||
"statictyping_html": f"{expectedName}-StaticTyping-HTML",
|
||||
"package_all": f"{expectedName}-Packages",
|
||||
"documentation_html": f"{expectedName}-Documentation-HTML",
|
||||
"documentation_latex": f"{expectedName}-Documentation-LaTeX",
|
||||
"documentation_pdf": f"{expectedName}-Documentation-PDF",
|
||||
}
|
||||
|
||||
actualPythonVersion = """${{ needs.Params_Include.outputs.python_version }}"""
|
||||
actualPythonJobs = json_loads("""${{ needs.Params_Include.outputs.python_jobs }}""".replace("'", '"'))
|
||||
actualArtifactNames = json_loads("""${{ needs.Params_Include.outputs.artifact_names }}""".replace("'", '"'))
|
||||
errors = 0
|
||||
|
||||
if actualPythonVersion != expectedPythonVersion:
|
||||
print(f"'python_version' does not match: '{actualPythonVersion}' != '{expectedPythonVersion}'.")
|
||||
errors += 1
|
||||
if len(actualPythonJobs) != len(expectedJobs):
|
||||
print(f"Number of 'python_jobs' does not match: {len(actualPythonJobs)} != {len(expectedJobs)}.")
|
||||
print("Actual jobs:")
|
||||
for job in actualPythonJobs:
|
||||
if job['system'] == "msys2":
|
||||
print(f" {job['runtime'].lower()}:{job['python']}")
|
||||
else:
|
||||
print(f" {job['system']}:{job['python']}")
|
||||
print("Expected jobs:")
|
||||
for job in expectedJobs:
|
||||
print(f" {job}")
|
||||
errors += 1
|
||||
if len(actualArtifactNames) != len(expectedArtifacts):
|
||||
print(f"Number of 'artifact_names' does not match: {len(actualArtifactNames)} != {len(expectedArtifacts)}.")
|
||||
errors += 1
|
||||
else:
|
||||
for key, actual, expected in zipdicts(actualArtifactNames, expectedArtifacts):
|
||||
if actual != expected:
|
||||
print(f"Artifact name '{key}' does not match: {actual} != {expected}.")
|
||||
errors += 1
|
||||
|
||||
if errors == 0:
|
||||
print(f"All checks PASSED.")
|
||||
exit(errors)
|
||||
|
||||
# Params_Exclude
|
||||
- name: Checking results from 'Params_Exclude'
|
||||
run: |
|
||||
from json import loads as json_loads
|
||||
from sys import exit
|
||||
|
||||
from pyTooling.Common import zipdicts
|
||||
|
||||
expectedPythonVersion = "3.13"
|
||||
expectedPythons = ["3.12"]
|
||||
expectedSystems = ["ubuntu", "macos", "macos-arm"]
|
||||
expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons]
|
||||
expectedName = "Example"
|
||||
expectedArtifacts = {
|
||||
"unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML",
|
||||
"unittesting_html": f"{expectedName}-UnitTestReportSummary-HTML",
|
||||
"perftesting_xml": f"{expectedName}-PerformanceTestReportSummary-XML",
|
||||
"benchtesting_xml": f"{expectedName}-BenchmarkTestReportSummary-XML",
|
||||
"apptesting_xml": f"{expectedName}-ApplicationTestReportSummary-XML",
|
||||
"codecoverage_sqlite": f"{expectedName}-CodeCoverage-SQLite",
|
||||
"codecoverage_xml": f"{expectedName}-CodeCoverage-XML",
|
||||
"codecoverage_json": f"{expectedName}-CodeCoverage-JSON",
|
||||
"codecoverage_html": f"{expectedName}-CodeCoverage-HTML",
|
||||
"statictyping_html": f"{expectedName}-StaticTyping-HTML",
|
||||
"package_all": f"{expectedName}-Packages",
|
||||
"documentation_html": f"{expectedName}-Documentation-HTML",
|
||||
"documentation_latex": f"{expectedName}-Documentation-LaTeX",
|
||||
"documentation_pdf": f"{expectedName}-Documentation-PDF",
|
||||
}
|
||||
|
||||
actualPythonVersion = """${{ needs.Params_Exclude.outputs.python_version }}"""
|
||||
actualPythonJobs = json_loads("""${{ needs.Params_Exclude.outputs.python_jobs }}""".replace("'", '"'))
|
||||
actualArtifactNames = json_loads("""${{ needs.Params_Exclude.outputs.artifact_names }}""".replace("'", '"'))
|
||||
errors = 0
|
||||
|
||||
if actualPythonVersion != expectedPythonVersion:
|
||||
print(f"'python_version' does not match: '{actualPythonVersion}' != '{expectedPythonVersion}'.")
|
||||
errors += 1
|
||||
if len(actualPythonJobs) != len(expectedJobs):
|
||||
print(f"Number of 'python_jobs' does not match: {len(actualPythonJobs)} != {len(expectedJobs)}.")
|
||||
print("Actual jobs:")
|
||||
for job in actualPythonJobs:
|
||||
if job['system'] == "msys2":
|
||||
print(f" {job['runtime'].lower()}:{job['python']}")
|
||||
else:
|
||||
print(f" {job['system']}:{job['python']}")
|
||||
print("Expected jobs:")
|
||||
for job in expectedJobs:
|
||||
print(f" {job}")
|
||||
errors += 1
|
||||
if len(actualArtifactNames) != len(expectedArtifacts):
|
||||
print(f"Number of 'artifact_names' does not match: {len(actualArtifactNames)} != {len(expectedArtifacts)}.")
|
||||
errors += 1
|
||||
else:
|
||||
for key, actual, expected in zipdicts(actualArtifactNames, expectedArtifacts):
|
||||
if actual != expected:
|
||||
print(f"Artifact name '{key}' does not match: {actual} != {expected}.")
|
||||
errors += 1
|
||||
|
||||
if errors == 0:
|
||||
print(f"All checks PASSED.")
|
||||
exit(errors)
|
||||
|
||||
# Params_Disable
|
||||
- name: Checking results from 'Params_Disable'
|
||||
run: |
|
||||
from json import loads as json_loads
|
||||
from sys import exit
|
||||
|
||||
from pyTooling.Common import zipdicts
|
||||
|
||||
expectedPythonVersion = "3.13"
|
||||
expectedPythons = ["3.12"]
|
||||
expectedSystems = ["ubuntu", "macos", "macos-arm"]
|
||||
expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons]
|
||||
expectedName = "Example"
|
||||
expectedArtifacts = {
|
||||
"unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML",
|
||||
"unittesting_html": f"{expectedName}-UnitTestReportSummary-HTML",
|
||||
"perftesting_xml": f"{expectedName}-PerformanceTestReportSummary-XML",
|
||||
"benchtesting_xml": f"{expectedName}-BenchmarkTestReportSummary-XML",
|
||||
"apptesting_xml": f"{expectedName}-ApplicationTestReportSummary-XML",
|
||||
"codecoverage_sqlite": f"{expectedName}-CodeCoverage-SQLite",
|
||||
"codecoverage_xml": f"{expectedName}-CodeCoverage-XML",
|
||||
"codecoverage_json": f"{expectedName}-CodeCoverage-JSON",
|
||||
"codecoverage_html": f"{expectedName}-CodeCoverage-HTML",
|
||||
"statictyping_html": f"{expectedName}-StaticTyping-HTML",
|
||||
"package_all": f"{expectedName}-Packages",
|
||||
"documentation_html": f"{expectedName}-Documentation-HTML",
|
||||
"documentation_latex": f"{expectedName}-Documentation-LaTeX",
|
||||
"documentation_pdf": f"{expectedName}-Documentation-PDF",
|
||||
}
|
||||
|
||||
actualPythonVersion = """${{ needs.Params_Exclude.outputs.python_version }}"""
|
||||
actualPythonJobs = json_loads("""${{ needs.Params_Exclude.outputs.python_jobs }}""".replace("'", '"'))
|
||||
actualArtifactNames = json_loads("""${{ needs.Params_Exclude.outputs.artifact_names }}""".replace("'", '"'))
|
||||
errors = 0
|
||||
|
||||
if actualPythonVersion != expectedPythonVersion:
|
||||
print(f"'python_version' does not match: '{actualPythonVersion}' != '{expectedPythonVersion}'.")
|
||||
errors += 1
|
||||
if len(actualPythonJobs) != len(expectedJobs):
|
||||
print(f"Number of 'python_jobs' does not match: {len(actualPythonJobs)} != {len(expectedJobs)}.")
|
||||
print("Actual jobs:")
|
||||
for job in actualPythonJobs:
|
||||
if job['system'] == "msys2":
|
||||
print(f" {job['runtime'].lower()}:{job['python']}")
|
||||
else:
|
||||
print(f" {job['system']}:{job['python']}")
|
||||
print("Expected jobs:")
|
||||
for job in expectedJobs:
|
||||
print(f" {job}")
|
||||
errors += 1
|
||||
if len(actualArtifactNames) != len(expectedArtifacts):
|
||||
print(f"Number of 'artifact_names' does not match: {len(actualArtifactNames)} != {len(expectedArtifacts)}.")
|
||||
errors += 1
|
||||
else:
|
||||
for key, actual, expected in zipdicts(actualArtifactNames, expectedArtifacts):
|
||||
if actual != expected:
|
||||
print(f"Artifact name '{key}' does not match: {actual} != {expected}.")
|
||||
errors += 1
|
||||
|
||||
if errors == 0:
|
||||
print(f"All checks PASSED.")
|
||||
exit(errors)
|
||||
|
||||
# Params_All
|
||||
- name: Checking results from 'Params_All'
|
||||
run: |
|
||||
from json import loads as json_loads
|
||||
from sys import exit
|
||||
|
||||
from pyTooling.Common import zipdicts
|
||||
|
||||
expectedPythonVersion = "3.13"
|
||||
expectedPythons = ["3.12", "3.13"]
|
||||
expectedSystems = ["ubuntu", "macos-arm", "windows"]
|
||||
expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons] + ["windows:3.10", "windows:3.11", "windows:3.13"]
|
||||
expectedName = "Example"
|
||||
expectedArtifacts = {
|
||||
"unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML",
|
||||
"unittesting_html": f"{expectedName}-UnitTestReportSummary-HTML",
|
||||
"perftesting_xml": f"{expectedName}-PerformanceTestReportSummary-XML",
|
||||
"benchtesting_xml": f"{expectedName}-BenchmarkTestReportSummary-XML",
|
||||
"apptesting_xml": f"{expectedName}-ApplicationTestReportSummary-XML",
|
||||
"codecoverage_sqlite": f"{expectedName}-CodeCoverage-SQLite",
|
||||
"codecoverage_xml": f"{expectedName}-CodeCoverage-XML",
|
||||
"codecoverage_json": f"{expectedName}-CodeCoverage-JSON",
|
||||
"codecoverage_html": f"{expectedName}-CodeCoverage-HTML",
|
||||
"statictyping_html": f"{expectedName}-StaticTyping-HTML",
|
||||
"package_all": f"{expectedName}-Packages",
|
||||
"documentation_html": f"{expectedName}-Documentation-HTML",
|
||||
"documentation_latex": f"{expectedName}-Documentation-LaTeX",
|
||||
"documentation_pdf": f"{expectedName}-Documentation-PDF",
|
||||
}
|
||||
|
||||
actualPythonVersion = """${{ needs.Params_All.outputs.python_version }}"""
|
||||
actualPythonJobs = json_loads("""${{ needs.Params_All.outputs.python_jobs }}""".replace("'", '"'))
|
||||
actualArtifactNames = json_loads("""${{ needs.Params_All.outputs.artifact_names }}""".replace("'", '"'))
|
||||
errors = 0
|
||||
|
||||
if actualPythonVersion != expectedPythonVersion:
|
||||
print(f"'python_version' does not match: '{actualPythonVersion}' != '{expectedPythonVersion}'.")
|
||||
errors += 1
|
||||
if len(actualPythonJobs) != len(expectedJobs):
|
||||
print(f"Number of 'python_jobs' does not match: {len(actualPythonJobs)} != {len(expectedJobs)}.")
|
||||
print("Actual jobs:")
|
||||
for job in actualPythonJobs:
|
||||
if job['system'] == "msys2":
|
||||
print(f" {job['runtime'].lower()}:{job['python']}")
|
||||
else:
|
||||
print(f" {job['system']}:{job['python']}")
|
||||
print("Expected jobs:")
|
||||
for job in expectedJobs:
|
||||
print(f" {job}")
|
||||
errors += 1
|
||||
if len(actualArtifactNames) != len(expectedArtifacts):
|
||||
print(f"Number of 'artifact_names' does not match: {len(actualArtifactNames)} != {len(expectedArtifacts)}.")
|
||||
errors += 1
|
||||
else:
|
||||
for key, actual, expected in zipdicts(actualArtifactNames, expectedArtifacts):
|
||||
if actual != expected:
|
||||
print(f"Artifact name '{key}' does not match: {actual} != {expected}.")
|
||||
errors += 1
|
||||
|
||||
if errors == 0:
|
||||
print(f"All checks PASSED.")
|
||||
exit(errors)
|
||||
14
.github/workflows/_Checking_SimplePackage_Pipeline.yml
vendored
Normal file
14
.github/workflows/_Checking_SimplePackage_Pipeline.yml
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
name: Verification of Pipeline Templates (Simple Package)
|
||||
|
||||
on:
|
||||
push:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
SimplePackage:
|
||||
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@main
|
||||
with:
|
||||
package_name: pyDummy
|
||||
secrets:
|
||||
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||
CODACY_PROJECT_TOKEN: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||
38
.gitignore
vendored
Normal file
38
.gitignore
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
# Python cache and object files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
|
||||
# Coverage.py
|
||||
.coverage
|
||||
.cov
|
||||
coverage.xml
|
||||
/report/coverage
|
||||
|
||||
# mypy
|
||||
/report/typing
|
||||
|
||||
# pytest
|
||||
/report/unit
|
||||
/tests/*.github
|
||||
|
||||
# setuptools
|
||||
/build/**/*.*
|
||||
/dist/**/*.*
|
||||
/*.egg-info
|
||||
|
||||
# Dependencies
|
||||
!requirements.txt
|
||||
|
||||
# Sphinx
|
||||
doc/_build/
|
||||
doc/pyDummy/**/*.*
|
||||
!doc/pyDummy/index.rst
|
||||
|
||||
# BuildTheDocs
|
||||
doc/_theme/**/*.*
|
||||
|
||||
# PyCharm project files
|
||||
/.idea/workspace.xml
|
||||
|
||||
# Git files
|
||||
!.git*
|
||||
2
.idea/Actions.iml
generated
2
.idea/Actions.iml
generated
@@ -2,7 +2,7 @@
|
||||
<module type="PYTHON_MODULE" version="4">
|
||||
<component name="NewModuleRootManager">
|
||||
<content url="file://$MODULE_DIR$" />
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="jdk" jdkName="Python 3.13" jdkType="Python SDK" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
</component>
|
||||
</module>
|
||||
3
.vscode/settings.json
vendored
Normal file
3
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"files.trimTrailingWhitespace": false,
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
# Development
|
||||
|
||||
## Tagging/versioning
|
||||
|
||||
See context in [#5](https://github.com/pyTooling/Actions/issues/5).
|
||||
|
||||
Tag new releases in the `main` branch using a semver compatible value, starting with `v`:
|
||||
|
||||
```sh
|
||||
git checkout main
|
||||
git tag v0.0.0
|
||||
git push upstream v0.0.0
|
||||
```
|
||||
|
||||
Move the corresponding release branch (starting with `r`) forward by creating a merge commit, and using the merged tag
|
||||
as the commit message:
|
||||
|
||||
```sh
|
||||
git checkout r0
|
||||
git merge --no-ff -m 'v0.0.0' v0.0.0
|
||||
git push upstream r0
|
||||
```
|
||||
@@ -4,7 +4,7 @@
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2022 The pyTooling Authors #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
@@ -52,18 +52,21 @@ jobs:
|
||||
python-coverage:p
|
||||
python-lxml:p
|
||||
mingw_requirements: '-r tests/requirements.mingw.txt'
|
||||
unittest_directory: 'tests/unit'
|
||||
artifact: ${{ fromJson(needs.Params.outputs.params).artifacts.unittesting }}
|
||||
tests_directory: 'tests'
|
||||
unittest_directory: 'unit'
|
||||
artifact: ${{ fromJson(needs.Params.outputs.artifact_names).unittesting_xml }}
|
||||
|
||||
Coverage:
|
||||
uses: pyTooling/Actions/.github/workflows/CoverageCollection.yml@main
|
||||
needs:
|
||||
- Params
|
||||
with:
|
||||
artifact: ${{ fromJson(needs.Params.outputs.params).artifacts.coverage }}
|
||||
artifact: ${{ fromJson(needs.Params.outputs.artifact_names).codecoverage_html }}
|
||||
# Optional
|
||||
python_version: ${{ fromJson(needs.Params.outputs.params).python_version }}
|
||||
python_version: ${{ needs..Params.outputs.python_version }}
|
||||
requirements: '-r tests/requirements.txt'
|
||||
tests_directory: 'tests'
|
||||
unittest_directory: 'unit'
|
||||
secrets:
|
||||
codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||
|
||||
@@ -72,18 +75,22 @@ jobs:
|
||||
needs:
|
||||
- Params
|
||||
with:
|
||||
commands: mypy --html-report htmlmypy -p ToolName
|
||||
artifact: ${{ fromJson(needs.Params.outputs.params).artifacts.typing }}
|
||||
commands: |
|
||||
mypy --junit-xml StaticTypingSummary.xml --html-report htmlmypy -p ToolName
|
||||
html_artifact: ${{ fromJson(needs.Params.outputs.artifact_names).statictyping_html }}
|
||||
junit_artifact: ${{ fromJson(needs.Params.outputs.artifact_names).statictyping_junit }}
|
||||
# Optional
|
||||
python_version: ${{ fromJson(needs.Params.outputs.params).python_version }}
|
||||
python_version: ${{ needs..Params.outputs.python_version }}
|
||||
requirements: '-r tests/requirements.txt'
|
||||
report: 'htmlmypy'
|
||||
html_report: 'htmlmypy'
|
||||
junit_report: 'StaticTypingSummary.xml'
|
||||
allow_failure: true
|
||||
|
||||
PublishTestResults:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@main
|
||||
needs:
|
||||
- UnitTesting
|
||||
- StaticTypeCheck
|
||||
with:
|
||||
# Optional
|
||||
report_files: artifacts/**/*.xml
|
||||
@@ -94,9 +101,9 @@ jobs:
|
||||
- Params
|
||||
- Coverage
|
||||
with:
|
||||
artifact: ${{ fromJson(needs.Params.outputs.params).artifacts.package }}
|
||||
artifact: ${{ fromJson(needs.Params.outputs.artifact_names).package_all }}
|
||||
# Optional
|
||||
python_version: ${{ fromJson(needs.Params.outputs.params).python_version }}
|
||||
python_version: ${{ needs..Params.outputs.python_version }}
|
||||
requirements: 'wheel'
|
||||
|
||||
Release:
|
||||
@@ -116,9 +123,9 @@ jobs:
|
||||
- Release
|
||||
- Package
|
||||
with:
|
||||
artifact: ${{ fromJson(needs.Params.outputs.params).artifacts.package }}
|
||||
artifact: ${{ fromJson(needs.Params.outputs.artifact_names).package_all }}
|
||||
# Optional
|
||||
python_version: ${{ fromJson(needs.Params.outputs.params).python_version }}
|
||||
python_version: ${{ needs..Params.outputs.python_version }}
|
||||
requirements: 'wheel twine'
|
||||
secrets:
|
||||
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||
@@ -129,7 +136,7 @@ jobs:
|
||||
- Params
|
||||
with:
|
||||
# Optional
|
||||
python_version: ${{ fromJson(needs.Params.outputs.params).python_version }}
|
||||
python_version: ${{ needs..Params.outputs.python_version }}
|
||||
|
||||
BuildTheDocs:
|
||||
uses: pyTooling/Actions/.github/workflows/BuildTheDocs.yml@main
|
||||
@@ -137,7 +144,7 @@ jobs:
|
||||
- Params
|
||||
- VerifyDocs
|
||||
with:
|
||||
artifact: ${{ fromJson(needs.Params.outputs.params).artifacts.doc }}
|
||||
artifact: ${{ fromJson(needs.Params.outputs.artifact_names).documentation_html }}
|
||||
|
||||
PublishToGitHubPages:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@main
|
||||
@@ -147,10 +154,10 @@ jobs:
|
||||
- Coverage
|
||||
- StaticTypeCheck
|
||||
with:
|
||||
doc: ${{ fromJson(needs.Params.outputs.params).artifacts.doc }}
|
||||
doc: ${{ fromJson(needs.Params.outputs.artifact_names).documentation_html }}
|
||||
# Optional
|
||||
coverage: ${{ fromJson(needs.Params.outputs.params).artifacts.coverage }}
|
||||
typing: ${{ fromJson(needs.Params.outputs.params).artifacts.typing }}
|
||||
coverage: ${{ fromJson(needs.Params.outputs.artifact_names).codecoverage_html }}
|
||||
typing: ${{ fromJson(needs.Params.outputs.artifact_names).statictyping_html }}
|
||||
|
||||
ArtifactCleanUp:
|
||||
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@main
|
||||
@@ -162,9 +169,10 @@ jobs:
|
||||
- BuildTheDocs
|
||||
- PublishToGitHubPages
|
||||
with:
|
||||
package: ${{ fromJson(needs.Params.outputs.params).artifacts.package }}
|
||||
package: ${{ fromJson(needs.Params.outputs.artifact_names).package_all }}
|
||||
remaining: |
|
||||
${{ fromJson(needs.Params.outputs.params).artifacts.unittesting }}-*
|
||||
${{ fromJson(needs.Params.outputs.params).artifacts.coverage }}
|
||||
${{ fromJson(needs.Params.outputs.params).artifacts.typing }}
|
||||
${{ fromJson(needs.Params.outputs.params).artifacts.doc }}
|
||||
${{ fromJson(needs.Params.outputs.artifact_names).unittesting_xml }}-*
|
||||
${{ fromJson(needs.Params.outputs.artifact_names).codecoverage_html }}
|
||||
${{ fromJson(needs.Params.outputs.artifact_names).statictyping_html }}
|
||||
${{ fromJson(needs.Params.outputs.artifact_names).statictyping_junit }}
|
||||
${{ fromJson(needs.Params.outputs.artifact_names).documentation_html }}
|
||||
|
||||
110
README.md
110
README.md
@@ -7,86 +7,15 @@ language for writing reusable CI code.
|
||||
However, Python being equally popular and capable, usage of JS/TS might be bypassed, with some caveats.
|
||||
This repository gathers reusable CI tooling for testing, packaging and distributing Python projects and documentation.
|
||||
|
||||
|
||||
## Context
|
||||
|
||||
GitHub Actions supports five procedures to reuse code:
|
||||
|
||||
- JavaScript Action:
|
||||
- [docs.github.com: actions/creating-actions/creating-a-javascript-action](https://docs.github.com/en/actions/creating-actions/creating-a-javascript-action)
|
||||
- Container Action:
|
||||
- [docs.github.com: actions/creating-actions/creating-a-docker-container-action](https://docs.github.com/en/actions/creating-actions/creating-a-docker-container-action)
|
||||
- Container Step:
|
||||
- [docs.github.com: actions/learn-github-actions/workflow-syntax-for-github-actions#example-using-a-docker-public-registry-action](https://docs.github.com/en/actions/learn-github-actions/workflow-syntax-for-github-actions#example-using-a-docker-public-registry-action)
|
||||
- [docs.github.com: actions/learn-github-actions/workflow-syntax-for-github-actions#jobsjob_idstepswithargs](https://docs.github.com/en/actions/learn-github-actions/workflow-syntax-for-github-actions#jobsjob_idstepswithargs)
|
||||
- Composite Action:
|
||||
- [docs.github.com: actions/creating-actions/creating-a-composite-action](https://docs.github.com/en/actions/creating-actions/creating-a-composite-action)
|
||||
- [github.blog/changelog: 2020-08-07-github-actions-composite-run-steps](https://github.blog/changelog/2020-08-07-github-actions-composite-run-steps/)
|
||||
- [github.blog/changelog: 2021-08-25-github-actions-reduce-duplication-with-action-compositio](https://github.blog/changelog/2021-08-25-github-actions-reduce-duplication-with-action-composition/)
|
||||
- Reusable Workflow:
|
||||
- [docs.github.com: actions/learn-github-actions/reusing-workflows](https://docs.github.com/en/actions/learn-github-actions/reusing-workflows)
|
||||
- [github.blog/changelog: 2021-10-05-github-actions-dry-your-github-actions-configuration-by-reusing-workflows](https://github.blog/changelog/2021-10-05-github-actions-dry-your-github-actions-configuration-by-reusing-workflows/)
|
||||
|
||||
Container Actions and Container Steps are almost equivalent: Actions use a configuration file (`action.yml`), while
|
||||
Steps do not.
|
||||
Leaving JavaScript and Container Actions and Steps aside, the main differences between Composite Actions and Reusable
|
||||
Workflows are the following:
|
||||
|
||||
- Composite Actions can be executed from a remote/external path or from the checked out branch, and from any location.
|
||||
However, Reusable Workflows can only be used through a remote/external path (`{owner}/{repo}/{path}/{filename}@{ref}`),
|
||||
where `{path}` must be `.github/workflows`, and `@{ref}` is required.
|
||||
See [actions/runner#1493](https://github.com/actions/runner/issues/1493).
|
||||
As a result:
|
||||
- Local Composite Actions cannot be used without a prior repo checkout, but Reusable Workflows can be used without
|
||||
checkout.
|
||||
- Testing development versions of local Reusable Workflows is cumbersome, because PRs do not pick the modifications by
|
||||
default.
|
||||
- Composite Actions can include multiple steps, but not multiple jobs.
|
||||
Conversely, Reusable Workflows can include multiple jobs, and multiple steps in each job.
|
||||
- Composite Actions can include multiple files, so it's possible to use files from the Action or from the user's repository.
|
||||
Conversely, Reusable Workflows are a single YAML file, with no additional files retrieved by default.
|
||||
|
||||
### Callable vs dispatchable workflows
|
||||
|
||||
Reusable Workflows are defined through the `workflow_call` event kind.
|
||||
Similarly, any "regular" Workflow can be triggered through a `workflow_dispatch` event.
|
||||
Both event kinds support `input` options, which are usable within the Workflow.
|
||||
Therefore, one might intuitively try to write a workflow which is both callable and dispatchable.
|
||||
In other words, which can be either reused from another workflow, or triggered through the API.
|
||||
Unfortunately, that is not the case.
|
||||
Although `input` options can be duplicated for both events, GitHub's backend exposes them through different objects.
|
||||
In dispatchable Workflows, the object is `${{ github.event.inputs }}`, while callable workflows receive `${{ inputs }}`.
|
||||
|
||||
As a result, in order to make a reusable workflow dispatchable, a wrapper workflow is required.
|
||||
See, for instance, [hdl/containers: .github/workflows/common.yml](https://github.com/hdl/containers/blob/main/.github/workflows/common.yml) and [hdl/containers: .github/workflows/dispatch.yml](https://github.com/hdl/containers/blob/main/.github/workflows/dispatch.yml).
|
||||
Alternatively, a normalisation job might be used, similar to the `Parameters` in this repo.
|
||||
|
||||
### Call hierarchy
|
||||
|
||||
Reusable Workflows cannot call other Reusable Workflows, however, they can use Composite Actions and Composite Actions
|
||||
can call other Actions.
|
||||
Therefore, in some use cases it is sensible to combine one layer of reusable workflows for orchestrating the jobs, along
|
||||
with multiple layers of composite actions.
|
||||
|
||||
### Script with post step
|
||||
|
||||
JavaScript Actions support defining `pre`, `pre-if`, `post` and `post-if` steps, which allow executing steps at the
|
||||
beginning or the end of a job, regardless of intermediate steps failing.
|
||||
Unfortunately, those are not available for any other Action type.
|
||||
|
||||
Action [with-post-step](with-post-step) is a generic JS Action to execute a main command and to set a command as a post
|
||||
step.
|
||||
It allows using the `post` feature with scripts written in bash, python or any other interpreted language available on
|
||||
the environment.
|
||||
See: [actions/runner#1478](https://github.com/actions/runner/issues/1478).
|
||||
|
||||
See [GitHub Actions and GitHub Reusable Workflows](https://pytooling.github.io/Actions/Background.html) for more
|
||||
background information.
|
||||
|
||||
## Reusable workflows
|
||||
|
||||
This repository provides 10+ Reusable Workflows based on the CI pipelines of the repos in this organisation,
|
||||
[EDA²](https://github.com/edaa-org), [VHDL](https://github.com/vhdl), and others.
|
||||
By combining them, Python packages can be continuously tested and released along with Sphinx documentation sites, to GitHub Releases, GitHub Pages and PyPI.
|
||||
Optionally, coverage and static type check reports can be gathered.
|
||||
This repository provides 10+ *Reusable Workflows* based on the CI pipelines of the repos in this GitHub organisation,
|
||||
[EDA²](https://github.com/edaa-org), [VHDL](https://github.com/vhdl), and others. By combining them, Python packages can
|
||||
be continuously tested and released along with Sphinx documentation sites, to GitHub Releases, GitHub Pages and PyPI.
|
||||
Optionally, coverage and static type check reports can be gathered and integrated into the online documentation.
|
||||
|
||||
[](ExamplePipeline_dark.png)
|
||||
|
||||
@@ -111,28 +40,6 @@ As shown in the screenshots above, the expected order is:
|
||||
optionally upload results as an HTML report.
|
||||
Example `commands`:
|
||||
|
||||
1. Regular package
|
||||
|
||||
```yml
|
||||
commands: mypy --html-report htmlmypy -p ToolName
|
||||
```
|
||||
|
||||
2. Parent namespace package
|
||||
|
||||
```yml
|
||||
commands: |
|
||||
touch Parent/__init__.py
|
||||
mypy --html-report htmlmypy -p ToolName
|
||||
```
|
||||
|
||||
3. Child namespace package
|
||||
|
||||
```yml
|
||||
commands: |
|
||||
cd Parent
|
||||
mypy --html-report ../htmlmypy -p ToolName
|
||||
```
|
||||
|
||||
- [VerifyDocs](.github/workflows/VerifyDocs.yml): extract code examples from the README and test these code snippets.
|
||||
- Packaging and releasing:
|
||||
- [Release](.github/workflows/Release.yml): publish GitHub Release.
|
||||
@@ -162,11 +69,6 @@ Find further usage cases in the following list of projects:
|
||||
- [VHDL/pyVHDLModel](https://github.com/VHDL/pyVHDLModel/tree/main/.github/workflows)
|
||||
|
||||
|
||||
## References
|
||||
|
||||
- [hdl/containers#48](https://github.com/hdl/containers/issues/48)
|
||||
|
||||
|
||||
## Contributors
|
||||
|
||||
* [Patrick Lehmann](https://GitHub.com/Paebbels)
|
||||
|
||||
2
dist/requirements.txt
vendored
Normal file
2
dist/requirements.txt
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
wheel ~= 0.45
|
||||
twine ~= 5.1
|
||||
195
doc/Action/Releaser.rst
Normal file
195
doc/Action/Releaser.rst
Normal file
@@ -0,0 +1,195 @@
|
||||
.. _ACTION/Releaser:
|
||||
|
||||
Releaser
|
||||
########
|
||||
|
||||
**Releaser** is a Docker GitHub Action written in Python.
|
||||
|
||||
**Releaser** allows to keep a GitHub Release of type pre-release and its artifacts up to date with latest builds.
|
||||
Combined with a workflow that is executed periodically, **Releaser** allows to provide a fixed release name for users
|
||||
willing to use daily/nightly artifacts of a project.
|
||||
|
||||
Furthermore, when any `semver <https://semver.org>`__ compliant tagged commit is pushed, **Releaser** can create a
|
||||
release and upload assets.
|
||||
|
||||
Context
|
||||
*******
|
||||
|
||||
GitHub provides official clients for the GitHub API through `github.com/octokit <https://github.com/octokit>`__:
|
||||
|
||||
- `octokit.js <https://github.com/octokit/octokit.js>`__ (`octokit.github.io/rest.js <https://octokit.github.io/rest.js>`__)
|
||||
- `octokit.rb <https://github.com/octokit/octokit.rb>`__ (`octokit.github.io/octokit.rb <http://octokit.github.io/octokit.rb>`__)
|
||||
- `octokit.net <https://github.com/octokit/octokit.net>`__ (`octokitnet.rtfd.io <https://octokitnet.rtfd.io>`__)
|
||||
|
||||
When GitHub Actions was released in 2019, two Actions were made available through
|
||||
`github.com/actions <https://github.com/actions>`__ for dealing with GitHub Releases:
|
||||
|
||||
- `actions/create-release <https://github.com/actions/create-release>`__
|
||||
- `actions/upload-release-asset <https://github.com/actions/upload-release-asset>`__
|
||||
|
||||
However, those Actions were contributed by an employee in spare time, not officially supported by GitHub.
|
||||
Therefore, they were unmaintained before GitHub Actions was out of the private beta
|
||||
(see `actions/upload-release-asset#58 <https://github.com/actions/upload-release-asset/issues/58>`__)
|
||||
and, a year later, archived.
|
||||
Those Actions are based on `actions/toolkit <https://github.com/actions/toolkit>`__'s hydrated version of octokit.js.
|
||||
|
||||
From a practical point of view, `actions/github-script <https://github.com/actions/github-script>`__ is the natural replacement to those Actions, since it allows to use a pre-authenticated *octokit.js* client along with the workflow run context.
|
||||
Still, it requires writing plain JavaScript.
|
||||
|
||||
Alternatively, there are non-official GitHub API libraries available in other languages (see `docs.github.com: rest/overview/libraries <https://docs.github.com/en/rest/overview/libraries>`__).
|
||||
**Releaser** is based on `PyGithub/PyGithub <https://github.com/PyGithub/PyGithub>`__, a Python client for the GitHub API.
|
||||
|
||||
**Releaser** was originally created in `eine/tip <https://github.com/eine/tip>`__, as an enhanced alternative to using
|
||||
``actions/create-release`` and ``actions/upload-release-asset``, in order to cover certain use cases that were being
|
||||
migrated from Travis CI to GitHub Actions.
|
||||
The main limitation of GitHub's Actions was/is verbosity and not being possible to dynamically define the list of assets
|
||||
to be uploaded.
|
||||
|
||||
On the other hand, GitHub Actions artifacts do require login in order to download them.
|
||||
Conversely, assets of GitHub Releases can be downloaded without login.
|
||||
Therefore, in order to make CI results available to the widest audience, some projects prefer having tarballs available
|
||||
as assets.
|
||||
In this context, one of the main use cases of **Releaser** is pushing artifacts as release assets.
|
||||
Thus, the name of the Action.
|
||||
|
||||
GitHub provides an official CLI tool, written in golang: `cli/cli <https://github.com/cli/cli>`__.
|
||||
When the Python version of **Releaser** was written, ``cli`` was evaluated as an alternative to *PyGitHub*.
|
||||
``gh release`` was (and still is) not flexible enough to update the reference of a release, without deleting and
|
||||
recreating it (see `cli.github.com: manual/gh_release_create <https://cli.github.com/manual/gh_release_create>`__).
|
||||
Deletion and recreation is unfortunate, because it notifies all the watchers of a repository
|
||||
(see `eine/tip#111 <https://github.com/eine/tip/issues/111>`__).
|
||||
However, `cli.github.com: manual/gh_release_upload <https://cli.github.com/manual/gh_release_upload>`__ handles uploading
|
||||
artifacts as assets faster and with better stability for larger files than *PyGitHub*
|
||||
(see `msys2/msys2-installer#36 <https://github.com/msys2/msys2-installer/pull/36>`__).
|
||||
Furthermore, the GitHub CLI is installed on GitHub Actions' default virtual environments.
|
||||
Although ``gh`` does not support login through SSH (see `cli/cli#3715 <https://github.com/cli/cli/issues/3715>`__), on GitHub
|
||||
Actions a token is available ``${{ github.token }}``.
|
||||
Therefore, **Releaser** uses ``gh release upload`` internally.
|
||||
|
||||
Usage
|
||||
*****
|
||||
|
||||
The following block shows a minimal YAML workflow file:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
name: 'workflow'
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 0 * * 5'
|
||||
|
||||
jobs:
|
||||
mwe:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
|
||||
# Clone repository
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
# Build your application, tool, artifacts, etc.
|
||||
- name: Build
|
||||
run: |
|
||||
echo "Build some tool and generate some artifacts" > artifact.txt
|
||||
|
||||
# Update tag and pre-release
|
||||
# - Update (force-push) tag to the commit that is used in the workflow.
|
||||
# - Upload artifacts defined by the user.
|
||||
- uses: pyTooling/Actions/releaser@r0
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
files: |
|
||||
artifact.txt
|
||||
README.md
|
||||
|
||||
|
||||
Composite Action
|
||||
================
|
||||
|
||||
The default implementation of **Releaser** is a Container Action.
|
||||
Therefore, a pre-built container image is pulled before starting the job.
|
||||
Alternatively, a Composite Action version is available: ``uses: pyTooling/Actions/releaser/composite@main``.
|
||||
The Composite version installs the dependencies on the host (the runner environment), instead of using a container.
|
||||
Both implementations are functionally equivalent from **Releaser**'s point of view; however, the Composite Action allows
|
||||
users to tweak the version of Python by using `actions/setup-python <https://github.com/actions/setup-python>`__ before.
|
||||
|
||||
Options
|
||||
*******
|
||||
|
||||
All options can be optionally provided as environment variables: ``INPUT_TOKEN``, ``INPUT_FILES``, ``INPUT_TAG``, ``INPUT_RM``
|
||||
and/or ``INPUT_SNAPSHOTS``.
|
||||
|
||||
token (required)
|
||||
================
|
||||
|
||||
Token to make authenticated API calls; can be passed in using ``{{ secrets.GITHUB_TOKEN }}``.
|
||||
|
||||
files (required)
|
||||
================
|
||||
|
||||
Either a single filename/pattern or a multi-line list can be provided. All the artifacts are uploaded regardless of the
|
||||
hierarchy.
|
||||
|
||||
For creating/updating a release without uploading assets, set ``files: none``.
|
||||
|
||||
tag
|
||||
===
|
||||
|
||||
The default tag name for the tip/nightly pre-release is ``tip``, but it can be optionally overriden through option ``tag``.
|
||||
|
||||
rm
|
||||
==
|
||||
|
||||
Set option ``rm`` to ``true`` for systematically removing previous artifacts (e.g. old versions).
|
||||
Otherwise (by default), all previours artifacts are preserved or overwritten.
|
||||
|
||||
Note:
|
||||
If all the assets are removed, or if the release itself is removed, tip/nightly assets won't be available for
|
||||
users until the workflow is successfully run.
|
||||
For instance, Action `setup-ghdl-ci <https://github.com/ghdl/setup-ghdl-ci>`__ uses assets from `ghdl/ghdl: releases/tag/nightly <https://github.com/ghdl/ghdl/releases/tag/nightly>`__.
|
||||
Hence, it is recommended to try removing the conflictive assets only, in order to maximise the availability.
|
||||
|
||||
snapshots
|
||||
=========
|
||||
|
||||
Whether to create releases from any tag or to treat some as snapshots.
|
||||
By default, all the tags with non-empty ``prerelease`` field (see `semver.org: Is there a suggested regular expression (RegEx) to check a SemVer string? <https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string>`__)
|
||||
are considered snapshots; neither a release is created nor assets are uploaded.
|
||||
|
||||
Advanced/complex use cases
|
||||
**************************
|
||||
|
||||
**Releaser** is essentially a very thin wrapper to use the GitHub Actions context data along with the classes
|
||||
and methods of PyGithub.
|
||||
|
||||
Similarly to `actions/github-script <https://github.com/actions/github-script>`__, users with advanced/complex requirements
|
||||
might find it desirable to write their own Python script, instead of using **Releaser**.
|
||||
In fact, since ``shell: python`` is supported in GitHub Actions, using Python does *not* require any Action.
|
||||
For prototyping purposes, the following job might be useful:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
Release:
|
||||
name: '📦 Release'
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- ...
|
||||
if: github.event_name != 'pull_request' && (github.ref == 'refs/heads/master' || contains(github.ref, 'refs/tags/'>`__)
|
||||
steps:
|
||||
|
||||
- uses: actions/download-artifact@v3
|
||||
|
||||
- shell: bash
|
||||
run: pip install PyGithub --progress-bar off
|
||||
|
||||
- name: Set list of files for uploading
|
||||
id: files
|
||||
shell: python
|
||||
run: |
|
||||
from github import Github
|
||||
print("· Get GitHub API handler (authenticate)")
|
||||
gh = Github('${{ github.token }}')
|
||||
print("· Get Repository handler")
|
||||
gh_repo = gh.get_repo('${{ github.repository }}')
|
||||
|
||||
Find a non-trivial use case at `msys2/msys2-autobuild <https://github.com/msys2/msys2-autobuild>`__.
|
||||
33
doc/Action/With-post-step.rst
Normal file
33
doc/Action/With-post-step.rst
Normal file
@@ -0,0 +1,33 @@
|
||||
.. _ACTION/WithPostStep:
|
||||
|
||||
with-post-step
|
||||
##############
|
||||
|
||||
JavaScript Actions support defining ``pre``, ``pre-if``, ``post`` and ``post-if`` steps, which allow executing steps at
|
||||
the beginning or the end of a job, regardless of intermediate steps failing. Unfortunately, those are not available for
|
||||
any other Action type.
|
||||
|
||||
Action **with-post-step** is a generic JavaScript Action to execute a main command and to set a further command as a
|
||||
post step. It allows using the ``post`` feature with scripts written in Bash, Python or any other interpreted language
|
||||
available on the environment.
|
||||
|
||||
**Example Usage:**
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
Image:
|
||||
steps:
|
||||
- ...
|
||||
|
||||
- name: Push container image
|
||||
uses: ./with-post-step
|
||||
with:
|
||||
main: |
|
||||
echo '${{ github.token }}' | docker login ghcr.io -u GitHub-Actions --password-stdin
|
||||
docker push ghcr.io/pytooling/releaser
|
||||
post: docker logout ghcr.io
|
||||
|
||||
.. seealso::
|
||||
|
||||
* `actions/runner#1478 <https://github.com/actions/runner/issues/1478>`__.
|
||||
7
doc/Action/index.rst
Normal file
7
doc/Action/index.rst
Normal file
@@ -0,0 +1,7 @@
|
||||
Overview
|
||||
########
|
||||
|
||||
The following 2 actions are provided by **Actions**:
|
||||
|
||||
* :ref:`ACTION/Releaser`
|
||||
* :ref:`ACTION/WithPostStep`
|
||||
87
doc/Background.rst
Normal file
87
doc/Background.rst
Normal file
@@ -0,0 +1,87 @@
|
||||
Background
|
||||
##########
|
||||
|
||||
GitHub Actions supports five procedures to reuse code:
|
||||
|
||||
- JavaScript Action:
|
||||
|
||||
- `docs.github.com: actions/creating-actions/creating-a-javascript-action <https://docs.github.com/en/actions/creating-actions/creating-a-javascript-action>`__
|
||||
|
||||
- Container Action:
|
||||
|
||||
- `docs.github.com: actions/creating-actions/creating-a-docker-container-action <https://docs.github.com/en/actions/creating-actions/creating-a-docker-container-action>`__
|
||||
|
||||
- Container Step:
|
||||
|
||||
- `docs.github.com: actions/learn-github-actions/workflow-syntax-for-github-actions#example-using-a-docker-public-registry-action <https://docs.github.com/en/actions/learn-github-actions/workflow-syntax-for-github-actions#example-using-a-docker-public-registry-action>`__
|
||||
- `docs.github.com: actions/learn-github-actions/workflow-syntax-for-github-actions#jobsjob_idstepswithargs <https://docs.github.com/en/actions/learn-github-actions/workflow-syntax-for-github-actions#jobsjob_idstepswithargs>`__
|
||||
|
||||
- Composite Action:
|
||||
|
||||
- `docs.github.com: actions/creating-actions/creating-a-composite-action <https://docs.github.com/en/actions/creating-actions/creating-a-composite-action>`__
|
||||
- `github.blog/changelog: 2020-08-07-github-actions-composite-run-steps <https://github.blog/changelog/2020-08-07-github-actions-composite-run-steps/>`__
|
||||
- `github.blog/changelog: 2021-08-25-github-actions-reduce-duplication-with-action-composition <https://github.blog/changelog/2021-08-25-github-actions-reduce-duplication-with-action-composition/>`__
|
||||
|
||||
- Reusable Workflow:
|
||||
|
||||
- `docs.github.com: actions/learn-github-actions/reusing-workflows <https://docs.github.com/en/actions/learn-github-actions/reusing-workflows>`__
|
||||
- `github.blog/changelog: 2021-10-05-github-actions-dry-your-github-actions-configuration-by-reusing-workflows <https://github.blog/changelog/2021-10-05-github-actions-dry-your-github-actions-configuration-by-reusing-workflows/>`__
|
||||
|
||||
Container Actions and Container Steps are almost equivalent: Actions use a configuration file (``action.yml``), while
|
||||
Steps do not.
|
||||
Leaving JavaScript and Container Actions and Steps aside, the main differences between Composite Actions and Reusable
|
||||
Workflows are the following:
|
||||
|
||||
- Composite Actions can be executed from a remote/external path or from the checked out branch, and from any location.
|
||||
However, Reusable Workflows can only be used through a remote/external path (``{owner}/{repo}/{path}/{filename}@{ref}``),
|
||||
where ``{path}`` must be ``.github/workflows``, and ``@{ref}`` is required.
|
||||
See `actions/runner#1493 <https://github.com/actions/runner/issues/1493>`__.
|
||||
As a result:
|
||||
|
||||
- Local Composite Actions cannot be used without a prior repo checkout, but Reusable Workflows can be used without
|
||||
checkout.
|
||||
- Testing development versions of local Reusable Workflows is cumbersome, because PRs do not pick the modifications by
|
||||
default.
|
||||
|
||||
- Composite Actions can include multiple steps, but not multiple jobs.
|
||||
Conversely, Reusable Workflows can include multiple jobs, and multiple steps in each job.
|
||||
- Composite Actions can include multiple files, so it's possible to use files from the Action or from the user's repository.
|
||||
Conversely, Reusable Workflows are a single YAML file, with no additional files retrieved by default.
|
||||
|
||||
Callable vs dispatchable workflows
|
||||
**********************************
|
||||
|
||||
Reusable Workflows are defined through the ``workflow_call`` event kind.
|
||||
Similarly, any "regular" Workflow can be triggered through a ``workflow_dispatch`` event.
|
||||
Both event kinds support ``input`` options, which are usable within the Workflow.
|
||||
Therefore, one might intuitively try to write a workflow which is both callable and dispatchable.
|
||||
In other words, which can be either reused from another workflow, or triggered through the API.
|
||||
Unfortunately, that is not the case.
|
||||
Although ``input`` options can be duplicated for both events, GitHub's backend exposes them through different objects.
|
||||
In dispatchable Workflows, the object is ``${{ github.event.inputs }}``, while callable workflows receive ``${{ inputs }}``.
|
||||
|
||||
As a result, in order to make a reusable workflow dispatchable, a wrapper workflow is required.
|
||||
See, for instance, `hdl/containers: .github/workflows/common.yml <https://github.com/hdl/containers/blob/main/.github/workflows/common.yml>`__
|
||||
and `hdl/containers: .github/workflows/dispatch.yml <https://github.com/hdl/containers/blob/main/.github/workflows/dispatch.yml>`__.
|
||||
Alternatively, a normalisation job might be used, similar to the ``Parameters`` in this repo.
|
||||
|
||||
Call hierarchy
|
||||
**************
|
||||
|
||||
Reusable Workflows cannot call other Reusable Workflows, however, they can use Composite Actions and Composite Actions
|
||||
can call other Actions.
|
||||
Therefore, in some use cases it is sensible to combine one layer of reusable workflows for orchestrating the jobs, along
|
||||
with multiple layers of composite actions.
|
||||
|
||||
Script with post step
|
||||
*********************
|
||||
|
||||
JavaScript Actions support defining ``pre``, ``pre-if``, ``post`` and ``post-if`` steps, which allow executing steps at
|
||||
the beginning or the end of a job, regardless of intermediate steps failing.
|
||||
Unfortunately, those are not available for any other Action type.
|
||||
|
||||
Action [with-post-step](with-post-step) is a generic JS Action to execute a main command and to set a command as a post
|
||||
step.
|
||||
It allows using the ``post`` feature with scripts written in bash, python or any other interpreted language available on
|
||||
the environment.
|
||||
See: `actions/runner#1478 <https://github.com/actions/runner/issues/1478>`__.
|
||||
30
doc/Dependency.rst
Normal file
30
doc/Dependency.rst
Normal file
@@ -0,0 +1,30 @@
|
||||
Dependencies
|
||||
############
|
||||
|
||||
This is a summary of dependencies used by the provided job templates. For more details, see each job template.
|
||||
|
||||
* Actions provided by GitHub
|
||||
|
||||
* :gh:`actions/checkout`
|
||||
* :gh:`actions/upload-artifact`
|
||||
* :gh:`actions/download-artifact`
|
||||
* :gh:`actions/create-release` (unmaintained)
|
||||
* :gh:`actions/setup-python`
|
||||
|
||||
* BuildTheDocs
|
||||
|
||||
* :gh:`buildthedocs/btd`
|
||||
|
||||
* Code Quality Services
|
||||
|
||||
* :gh:`codecov/codecov-action`
|
||||
* :gh:`codacy/codacy-coverage-reporter-action`
|
||||
|
||||
* Reporting
|
||||
|
||||
* :gh:`dorny/test-reporter`
|
||||
|
||||
* Miscellaneous
|
||||
|
||||
* :gh:`msys2/setup-msys2`
|
||||
* :gh:`geekyeggo/delete-artifact`
|
||||
4
doc/Deveopment.rst
Normal file
4
doc/Deveopment.rst
Normal file
@@ -0,0 +1,4 @@
|
||||
Development
|
||||
###########
|
||||
|
||||
.. todo:: Development - Explain how to write new job templates.
|
||||
@@ -1,8 +1,8 @@
|
||||
.. _DOCLICENSE:
|
||||
|
||||
.. Note:: This is a local copy of the `Creative Commons - Attribution 4.0 International (CC BY 4.0) <https://creativecommons.org/licenses/by/4.0/legalcode>`__.
|
||||
.. note:: This is a local copy of the `Creative Commons - Attribution 4.0 International (CC BY 4.0) <https://creativecommons.org/licenses/by/4.0/legalcode>`__.
|
||||
|
||||
.. Attention:: This **CC BY 4.0** license applies only to the **documentation** of this project.
|
||||
.. attention:: This **CC BY 4.0** license applies only to the **documentation** of this project.
|
||||
|
||||
|
||||
Creative Commons Attribution 4.0 International
|
||||
|
||||
110
doc/Instantiation.rst
Normal file
110
doc/Instantiation.rst
Normal file
@@ -0,0 +1,110 @@
|
||||
Instantiantion
|
||||
##############
|
||||
|
||||
The job templates (GitHub Action *Reusable Workflows*) need to be stored in the same directory where normal pipelines
|
||||
(GitHub Action *Workflows*) are located: ``.github/workflows/<template>.yml``. These template files are distinguished
|
||||
from a normal pipeline by a ``on:workflow_call:`` section compared to an ``on:push`` section.
|
||||
|
||||
**Job Template Definition:**
|
||||
|
||||
The ``workflow_call`` allows the definition of input and output parameters.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
<Param1>:
|
||||
# ...
|
||||
outputs:
|
||||
# ...
|
||||
|
||||
jobs:
|
||||
<JobName>:
|
||||
# ...
|
||||
|
||||
**Job Template Instantiation:**
|
||||
|
||||
When instantiating a template, a ``jobs:<Name>:uses`` is used to refer to a template file. Unfortunately, besides the
|
||||
GitHub SLUG (*<Organization>/<Repository>*), also the full path to the template needs to be gives, but still it can't be
|
||||
outside of ``.github/workflows`` to create a cleaner repository structure. Finally, the path contains a branch name
|
||||
postfixed by ``@<branch>`` (tags are still not supported by GitHub Actions). A ``jobs:<Name>:with:`` section can be used
|
||||
to handover input parameters to the template.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
on:
|
||||
push:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
<InstanceName>:
|
||||
uses: <GitHubOrganization>/<Repository>/.github/workflows/<Template>.yml@v0
|
||||
with:
|
||||
<Param1>: <Value>
|
||||
|
||||
|
||||
Example Pipelines
|
||||
*****************
|
||||
|
||||
Documentation Only (Sphinx)
|
||||
===========================
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
name: Pipeline
|
||||
|
||||
on:
|
||||
push:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
BuildTheDocs:
|
||||
uses: pyTooling/Actions/.github/workflows/BuildTheDocs.yml@r0
|
||||
with:
|
||||
artifact: Documentation
|
||||
|
||||
PublishToGitHubPages:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r0
|
||||
needs:
|
||||
- BuildTheDocs
|
||||
with:
|
||||
doc: Documentation
|
||||
|
||||
ArtifactCleanUp:
|
||||
name: 🗑️ Artifact Cleanup
|
||||
needs:
|
||||
- BuildTheDocs
|
||||
- PublishToGitHubPages
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
steps:
|
||||
- name: 🗑️ Delete artifacts
|
||||
uses: geekyeggo/delete-artifact@v5
|
||||
with:
|
||||
name: Documentation
|
||||
|
||||
|
||||
Simple Package
|
||||
==============
|
||||
|
||||
|
||||
Package with Unit Tests
|
||||
=======================
|
||||
|
||||
|
||||
Package with Code Coverage
|
||||
==========================
|
||||
|
||||
Complex Pipeline
|
||||
================
|
||||
|
||||
|
||||
Further Reference Examples
|
||||
**************************
|
||||
|
||||
Find further usage cases in the following list of projects:
|
||||
|
||||
- `edaa-org/pyEDAA.ProjectModel <https://github.com/edaa-org/pyEDAA.ProjectModel/tree/main/.github/workflows>`__
|
||||
- `edaa-org/pySVModel <https://github.com/edaa-org/pySVModel/tree/main/.github/workflows>`__
|
||||
- `VHDL/pyVHDLModel <https://github.com/VHDL/pyVHDLModel/tree/main/.github/workflows>`__
|
||||
90
doc/JobTemplate/ArtifactCleanUp.rst
Normal file
90
doc/JobTemplate/ArtifactCleanUp.rst
Normal file
@@ -0,0 +1,90 @@
|
||||
.. _JOBTMPL/ArtifactCleanup:
|
||||
|
||||
ArtifactCleanUp
|
||||
###############
|
||||
|
||||
This job removes artifacts used to exchange data from job to job.
|
||||
|
||||
**Behavior:**
|
||||
|
||||
1. Delete the package artifact if the current pipeline run was not a tagged run.
|
||||
2. Delete all remaining artifacts if given as a parameter.
|
||||
|
||||
**Dependencies:**
|
||||
|
||||
* :gh:`geekyeggo/delete-artifact`
|
||||
|
||||
|
||||
Instantiation
|
||||
*************
|
||||
|
||||
Simple Example
|
||||
==============
|
||||
|
||||
The simplest variant just uses the artifact name for the package.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
ArtifactCleanUp:
|
||||
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r0
|
||||
with:
|
||||
package: Package
|
||||
|
||||
|
||||
Complex Example
|
||||
===============
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
ArtifactCleanUp:
|
||||
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r0
|
||||
needs:
|
||||
- Params
|
||||
- UnitTesting
|
||||
- BuildTheDocs
|
||||
- PublishToGitHubPages
|
||||
- PublishTestResults
|
||||
with:
|
||||
package: ${{ fromJson(needs.Params.outputs.artifact_names).package_all }}
|
||||
remaining: |
|
||||
${{ fromJson(needs.Params.outputs.artifact_names).unittesting_xml }}-*
|
||||
|
||||
|
||||
Parameters
|
||||
**********
|
||||
|
||||
package
|
||||
=======
|
||||
|
||||
+----------------+----------+----------+----------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==========+
|
||||
| package | yes | string | — — — — |
|
||||
+----------------+----------+----------+----------+
|
||||
|
||||
Artifacts to be removed on not tagged runs.
|
||||
|
||||
|
||||
remaining
|
||||
=========
|
||||
|
||||
+----------------+----------+----------+----------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==========+
|
||||
| remaining | optional | string | ``""`` |
|
||||
+----------------+----------+----------+----------+
|
||||
|
||||
Artifacts to be removed unconditionally.
|
||||
|
||||
|
||||
Secrets
|
||||
*******
|
||||
|
||||
This job template needs no secrets.
|
||||
|
||||
Results
|
||||
*******
|
||||
|
||||
This job template has no output parameters.
|
||||
75
doc/JobTemplate/BuildTheDocs.rst
Normal file
75
doc/JobTemplate/BuildTheDocs.rst
Normal file
@@ -0,0 +1,75 @@
|
||||
.. _JOBTMPL/BuildTheDocs:
|
||||
|
||||
BuildTheDocs
|
||||
############
|
||||
|
||||
This jobs compiles the documentation written in ReStructured Text with Sphinx using BuildTheDocs.
|
||||
|
||||
**Behavior:**
|
||||
|
||||
1. Checkout repository.
|
||||
2. Build the documentation.
|
||||
3. Upload the HTML documentation as an artifact.
|
||||
4. Publish the HTML documentation to GitHub Pages.
|
||||
|
||||
**Dependencies:**
|
||||
|
||||
* :gh:`actions/checkout`
|
||||
* :gh:`buildthedocs/btd`
|
||||
* :gh:`actions/upload-artifact`
|
||||
|
||||
|
||||
|
||||
Instantiation
|
||||
*************
|
||||
|
||||
Simple Example
|
||||
==============
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
BuildTheDocs:
|
||||
uses: pyTooling/Actions/.github/workflows/BuildTheDocs.yml@r0
|
||||
|
||||
|
||||
Complex Example
|
||||
===============
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
BuildTheDocs:
|
||||
uses: pyTooling/Actions/.github/workflows/BuildTheDocs.yml@r0
|
||||
needs:
|
||||
- Params
|
||||
with:
|
||||
artifact: ${{ fromJson(needs.Params.outputs.artifact_names).documentation_html }}
|
||||
|
||||
|
||||
Parameters
|
||||
**********
|
||||
|
||||
artifact
|
||||
========
|
||||
|
||||
+----------------+----------+----------+--------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==============+
|
||||
| artifact | optional | string | ``""`` |
|
||||
+----------------+----------+----------+--------------+
|
||||
|
||||
Name of the documentation artifact.
|
||||
|
||||
If no artifact name is given, the job directly publishes the documentation's HTML content to GitHub Pages.
|
||||
|
||||
|
||||
Secrets
|
||||
*******
|
||||
|
||||
This job template needs no secrets.
|
||||
|
||||
Results
|
||||
*******
|
||||
|
||||
This job template has no output parameters.
|
||||
170
doc/JobTemplate/CoverageCollection.rst
Normal file
170
doc/JobTemplate/CoverageCollection.rst
Normal file
@@ -0,0 +1,170 @@
|
||||
.. _JOBTMPL/CodeCoverage:
|
||||
|
||||
CoverageCollection
|
||||
##################
|
||||
|
||||
This jobs runs the specified unit tests with activated code coverage collection (incl. branch coverage).
|
||||
|
||||
It uses pytest, pytest-cov and coverage.py in a single job run, thus it uses one fixed Python version (usually latest).
|
||||
It generates HTML and Cobertura reports (XML), then it uploads the HTML report as an artifact and the jUnit test results
|
||||
(XML) to `Codecov <https://about.codecov.io/>`__ and `Codacy <https://www.codacy.com/>`__.
|
||||
|
||||
Configuration options to ``pytest`` and ``coverage.py`` should be given via sections ``[tool.pytest.ini_options]`` and
|
||||
``[tool.coverage.*]`` in a ``pyproject.toml`` file.
|
||||
|
||||
**Behavior:**
|
||||
|
||||
1. Checkout repository
|
||||
2. Setup Python and install dependencies
|
||||
3. Extract configuration from ``pyproject.toml`` or ``.coveragerc``.
|
||||
4. Run unit tests and collect code coverage
|
||||
5. Convert coverage data to a Cobertura XML file
|
||||
6. Convert coverage data to a HTML report
|
||||
7. Upload HTML report as an artifact
|
||||
8. Publish Cobertura file to CodeCov
|
||||
9. Publish Cobertura file to Codacy
|
||||
|
||||
**Preconditions:**
|
||||
|
||||
* A CodeCov account was created.
|
||||
* A Codacy account was created.
|
||||
|
||||
**Requirements:**
|
||||
|
||||
Setup a secret (e.g. ``codacy_token``) in GitHub to handover the Codacy project token to the job.
|
||||
|
||||
**Dependencies:**
|
||||
|
||||
* :gh:`actions/checkout`
|
||||
* :gh:`actions/setup-python`
|
||||
* :gh:`actions/upload-artifact`
|
||||
* :gh:`codecov/codecov-action`
|
||||
* :gh:`codacy/codacy-coverage-reporter-action`
|
||||
|
||||
|
||||
Instantiation
|
||||
*************
|
||||
|
||||
Simple Example
|
||||
==============
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
Coverage:
|
||||
uses: pyTooling/Actions/.github/workflows/CoverageCollection.yml@r0
|
||||
with:
|
||||
artifact: Coverage
|
||||
secrets:
|
||||
codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||
|
||||
Complex Example
|
||||
===============
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
Coverage:
|
||||
uses: pyTooling/Actions/.github/workflows/CoverageCollection.yml@r0
|
||||
needs:
|
||||
- Params
|
||||
with:
|
||||
python_version: ${{ needs.Params.outputs.python_version }}
|
||||
artifact: ${{ fromJson(needs.Params.outputs.artifact_names).codecoverage_html }}
|
||||
secrets:
|
||||
codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||
|
||||
Parameters
|
||||
**********
|
||||
|
||||
python_version
|
||||
==============
|
||||
|
||||
+----------------+----------+----------+----------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==========+
|
||||
| python_version | optional | string | 3.11 |
|
||||
+----------------+----------+----------+----------+
|
||||
|
||||
Python version used for running unit tests.
|
||||
|
||||
|
||||
requirements
|
||||
============
|
||||
|
||||
+----------------+----------+----------+-------------------------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+===============================+
|
||||
| requirements | optional | string | ``-r tests/requirements.txt`` |
|
||||
+----------------+----------+----------+-------------------------------+
|
||||
|
||||
Python dependencies to be installed through pip.
|
||||
|
||||
|
||||
tests_directory
|
||||
===============
|
||||
|
||||
+-----------------+----------+----------+-----------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+=================+==========+==========+===========+
|
||||
| tests_directory | optional | string | ``tests`` |
|
||||
+-----------------+----------+----------+-----------+
|
||||
|
||||
Path to the directory containing tests (test working directory).
|
||||
|
||||
|
||||
unittest_directory
|
||||
==================
|
||||
|
||||
+--------------------+----------+----------+-----------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+====================+==========+==========+===========+
|
||||
| unittest_directory | optional | string | ``unit`` |
|
||||
+--------------------+----------+----------+-----------+
|
||||
|
||||
Path to the directory containing unit tests (relative to tests_directory).
|
||||
|
||||
|
||||
coverage_config
|
||||
===============
|
||||
|
||||
+-----------------+----------+----------+--------------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+=================+==========+==========+====================+
|
||||
| coverage_config | optional | string | ``pyproject.toml`` |
|
||||
+-----------------+----------+----------+--------------------+
|
||||
|
||||
Path to the ``.coveragerc`` file. Use ``pyproject.toml`` by default.
|
||||
|
||||
|
||||
artifact
|
||||
========
|
||||
|
||||
+----------------+----------+----------+--------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==============+
|
||||
| artifact | yes | string | — — — — |
|
||||
+----------------+----------+----------+--------------+
|
||||
|
||||
Name of the coverage artifact.
|
||||
|
||||
|
||||
Secrets
|
||||
*******
|
||||
|
||||
codacy_token
|
||||
============
|
||||
|
||||
+----------------+----------+----------+--------------+
|
||||
| Secret Name | Required | Type | Default |
|
||||
+================+==========+==========+==============+
|
||||
| codacy_token | yes | string | — — — — |
|
||||
+----------------+----------+----------+--------------+
|
||||
|
||||
Token to push result to codacy.
|
||||
|
||||
|
||||
Results
|
||||
*******
|
||||
|
||||
This job template has no output parameters.
|
||||
106
doc/JobTemplate/Package.rst
Normal file
106
doc/JobTemplate/Package.rst
Normal file
@@ -0,0 +1,106 @@
|
||||
.. _JOBTMPL/Package:
|
||||
|
||||
Package
|
||||
#######
|
||||
|
||||
This job packages the Python source code as a source package (``*.tar.gz``) and wheel package (``*.whl``) and uploads it
|
||||
as an artifact.
|
||||
|
||||
**Behavior:**
|
||||
|
||||
1. Checkout repository
|
||||
2. Setup Python and install dependencies
|
||||
3. Package Python sources:
|
||||
|
||||
* If parameter ``requirements`` is empty, use ``build`` package and run ``python build``.
|
||||
* If parameter ``requirements`` is ``no-isolation``, use ``build`` package in *no-isolation* mode and run
|
||||
``python build``.
|
||||
* If parameter ``requirements`` is non-empty, use ``setuptools`` package and run ``python setup.py``.
|
||||
|
||||
**Dependencies:**
|
||||
|
||||
* :gh:`actions/checkout`
|
||||
* :gh:`actions/setup-python`
|
||||
* :gh:`actions/upload-artifact`
|
||||
|
||||
Instantiation
|
||||
*************
|
||||
|
||||
Simple Example
|
||||
==============
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
Package:
|
||||
uses: pyTooling/Actions/.github/workflows/Package.yml@r0
|
||||
with:
|
||||
artifact: Package
|
||||
|
||||
|
||||
Complex Example
|
||||
===============
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
Package:
|
||||
uses: pyTooling/Actions/.github/workflows/Package.yml@r0
|
||||
needs:
|
||||
- Params
|
||||
- Coverage
|
||||
with:
|
||||
python_version: ${{ needs.Params.outputs.python_version }}
|
||||
requirements: -r build/requirements.txt
|
||||
artifact: ${{ fromJson(needs.Params.outputs.artifact_names).package_all }}
|
||||
|
||||
|
||||
Parameters
|
||||
**********
|
||||
|
||||
python_version
|
||||
==============
|
||||
|
||||
+----------------+----------+----------+----------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==========+
|
||||
| python_version | optional | string | 3.11 |
|
||||
+----------------+----------+----------+----------+
|
||||
|
||||
Python version.
|
||||
|
||||
|
||||
requirements
|
||||
============
|
||||
|
||||
+----------------+----------+----------+----------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==========+
|
||||
| requirements | optional | string | ``""`` |
|
||||
+----------------+----------+----------+----------+
|
||||
|
||||
Python dependencies to be installed through pip; if empty, use pyproject.toml through build.
|
||||
|
||||
|
||||
artifact
|
||||
========
|
||||
|
||||
+----------------+----------+----------+----------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==========+
|
||||
| artifact | yes | string | — — — — |
|
||||
+----------------+----------+----------+----------+
|
||||
|
||||
Name of the package artifact.
|
||||
|
||||
|
||||
Secrets
|
||||
*******
|
||||
|
||||
This job template needs no secrets.
|
||||
|
||||
|
||||
Results
|
||||
*******
|
||||
|
||||
This job template has no output parameters.
|
||||
413
doc/JobTemplate/Parameters.rst
Normal file
413
doc/JobTemplate/Parameters.rst
Normal file
@@ -0,0 +1,413 @@
|
||||
.. _JOBTMPL/Parameters:
|
||||
|
||||
Parameters
|
||||
##########
|
||||
|
||||
The ``Parameters`` job template is a workaround for the limitations of GitHub Actions to handle global variables in
|
||||
GitHub Actions workflows (see `actions/runner#480 <https://github.com/actions/runner/issues/480>`__.
|
||||
|
||||
It generates output parameters with artifact names and a job matrix to be used in later running jobs.
|
||||
|
||||
**Behavior:**
|
||||
|
||||
.. todo:: Parameters:Behavior Needs documentation.
|
||||
|
||||
**Dependencies:**
|
||||
|
||||
*None*
|
||||
|
||||
Instantiation
|
||||
*************
|
||||
|
||||
Simple Example
|
||||
==============
|
||||
|
||||
The following instantiation example creates a job `Params` derived from job template `Parameters` version `r0`. It only
|
||||
requires a `name` parameter to create the artifact names.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
name: Pipeline
|
||||
|
||||
on:
|
||||
push:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
Params:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r0
|
||||
with:
|
||||
name: pyTooling
|
||||
|
||||
Complex Example
|
||||
===============
|
||||
|
||||
The following instantiation example creates 3 jobs from the same template, but with differing input parameters. The
|
||||
first job `UnitTestingParams` might be used to create a job matrix of unit tests. It creates the cross of default
|
||||
systems (Windows, Ubuntu, macOS, MinGW64, UCRT64) and the given list of Python versions including some mypy versions. In
|
||||
addition a list of excludes (marked as :deletion:`deletions`) and includes (marked as :addition:`additions`) is handed
|
||||
over resulting in the following combinations:
|
||||
|
||||
+------------+-------------+-------------+--------------+--------------+-------------------------+------------+-------------+------------------------------+-------------------------------+
|
||||
| Version | 3.8 🔴 | 3.9 🟠 | 3.10 🟡 | 3.11 🟢 | 3.12 🟢 | 3.13.a1 🟣 | pypy-3.8 🔴 | pypy-3.9 🟠 | pypy-3.10 🟡 |
|
||||
+============+=============+=============+==============+==============+=========================+============+=============+==============================+===============================+
|
||||
| Windows 🧊 | windows:3.8 | windows:3.9 | windows:3.10 | windows:3.11 | | | | :deletion:`windows:pypy-3.9` | :deletion:`windows:pypy-3.10` |
|
||||
+------------+-------------+-------------+--------------+--------------+-------------------------+------------+-------------+------------------------------+-------------------------------+
|
||||
| Ubuntu 🐧 | ubuntu:3.8 | ubuntu:3.9 | ubuntu:3.10 | ubuntu:3.11 | :addition:`ubuntu:3.12` | | | ubuntu:pypy-3.9 | ubuntu:pypy-3.10 |
|
||||
+------------+-------------+-------------+--------------+--------------+-------------------------+------------+-------------+------------------------------+-------------------------------+
|
||||
| macOS 🍎 | macos:3.8 | macos:3.9 | macos:3.10 | macos:3.11 | :addition:`macos:3.12` | | | macos:pypy-3.9 | macos:pypy-3.10 |
|
||||
+------------+-------------+-------------+--------------+--------------+-------------------------+------------+-------------+------------------------------+-------------------------------+
|
||||
| MSYS 🟪 | | | | | | | | | |
|
||||
+------------+-------------+-------------+--------------+--------------+-------------------------+------------+-------------+------------------------------+-------------------------------+
|
||||
| MinGW32 ⬛ | | | | | | | | | |
|
||||
+------------+-------------+-------------+--------------+--------------+-------------------------+------------+-------------+------------------------------+-------------------------------+
|
||||
| MinGW64 🟦 | | | | mingw64:3.11 | | | | | |
|
||||
+------------+-------------+-------------+--------------+--------------+-------------------------+------------+-------------+------------------------------+-------------------------------+
|
||||
| Clang32 🟫 | | | | | | | | | |
|
||||
+------------+-------------+-------------+--------------+--------------+-------------------------+------------+-------------+------------------------------+-------------------------------+
|
||||
| Clang64 🟧 | | | | | | | | | |
|
||||
+------------+-------------+-------------+--------------+--------------+-------------------------+------------+-------------+------------------------------+-------------------------------+
|
||||
| UCRT64 🟨 | | | | | | | | | |
|
||||
+------------+-------------+-------------+--------------+--------------+-------------------------+------------+-------------+------------------------------+-------------------------------+
|
||||
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
name: Pipeline
|
||||
|
||||
on:
|
||||
push:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
UnitTestingParams:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r0
|
||||
with:
|
||||
name: pyTooling
|
||||
python_version_list: "3.8 3.9 3.10 3.11 pypy-3.9 pypy-3.10"
|
||||
include_list: "ubuntu:3.12 macos:3.12"
|
||||
exclude_list: "windows:pypy-3.9 windows:pypy-3.10"
|
||||
|
||||
PerformanceTestingParams:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r0
|
||||
with:
|
||||
name: pyTooling
|
||||
python_version_list: "3.11 3.12"
|
||||
system_list: "ubuntu windows macos"
|
||||
|
||||
PlatformTestingParams:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@dev
|
||||
with:
|
||||
name: pyTooling
|
||||
python_version_list: "3.12"
|
||||
system_list: "ubuntu windows macos mingw32 mingw64 clang64 ucrt64"
|
||||
|
||||
Parameters
|
||||
**********
|
||||
|
||||
name
|
||||
====
|
||||
|
||||
+----------------+----------+----------+--------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==============+
|
||||
| name | yes | string | — — — — |
|
||||
+----------------+----------+----------+--------------+
|
||||
|
||||
The name of the library or package.
|
||||
|
||||
It's used to create artifact names.
|
||||
|
||||
|
||||
python_version
|
||||
==============
|
||||
|
||||
+----------------+----------+----------+----------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==========+
|
||||
| python_version | optional | string | ``3.12`` |
|
||||
+----------------+----------+----------+----------+
|
||||
|
||||
Python version to be used for all jobs requiring a single Python version.
|
||||
|
||||
|
||||
python_version_list
|
||||
===================
|
||||
|
||||
+----------------------+----------+----------+----------------------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+======================+==========+==========+============================+
|
||||
| python_version_list | optional | string | ``3.8 3.9 3.10 3.11 3.12`` |
|
||||
+----------------------+----------+----------+----------------------------+
|
||||
|
||||
Space separated list of CPython versions and/or mypy version to run tests with.
|
||||
|
||||
**Possible values:**
|
||||
|
||||
* ``3.7``, ``3.8``, ``3.9``, ``3.10`` , ``3.11``, ``3.12``, ``3.13``
|
||||
* ``pypy-3.7``, ``pypy-3.8``, ``pypy-3.9``, ``pypy-3.10``
|
||||
|
||||
+------+-----------+------------------+-----------------------------------------+
|
||||
| Icon | Version | Maintained until | Comments |
|
||||
+======+===========+==================+=========================================+
|
||||
| ⚫ | 3.7 | 2023.06.27 | :red:`outdated` |
|
||||
+------+-----------+------------------+-----------------------------------------+
|
||||
| 🔴 | 3.8 | 2024.10 | |
|
||||
+------+-----------+------------------+-----------------------------------------+
|
||||
| 🟠 | 3.9 | 2025.10 | |
|
||||
+------+-----------+------------------+-----------------------------------------+
|
||||
| 🟡 | 3.10 | 2026.10 | |
|
||||
+------+-----------+------------------+-----------------------------------------+
|
||||
| 🟢 | 3.11 | 2027.10 | |
|
||||
+------+-----------+------------------+-----------------------------------------+
|
||||
| 🟢 | 3.12 | 2028.10 | :green:`latest` |
|
||||
+------+-----------+------------------+-----------------------------------------+
|
||||
| 🟣 | 3.13 | 2029.10 | Python 3.13 alpha (or RC) will be used. |
|
||||
+------+-----------+------------------+-----------------------------------------+
|
||||
| ⟲⚫ | pypy-3.7 | ????.?? | |
|
||||
+------+-----------+------------------+-----------------------------------------+
|
||||
| ⟲🔴 | pypy-3.8 | ????.?? | |
|
||||
+------+-----------+------------------+-----------------------------------------+
|
||||
| ⟲🟠 | pypy-3.9 | ????.?? | |
|
||||
+------+-----------+------------------+-----------------------------------------+
|
||||
| ⟲🟡 | pypy-3.10 | ????.?? | |
|
||||
+------+-----------+------------------+-----------------------------------------+
|
||||
|
||||
|
||||
system_list
|
||||
===========
|
||||
|
||||
+----------------+----------+----------+-----------------------------------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+=========================================+
|
||||
| system_list | optional | string | ``ubuntu windows macos mingw64 ucrt64`` |
|
||||
+----------------+----------+----------+-----------------------------------------+
|
||||
|
||||
Space separated list of systems to run tests on.
|
||||
|
||||
**Possible values:**
|
||||
|
||||
* Native systems: ``ubuntu``, ``windows``, ``macos``
|
||||
* MSYS2: ``msys``, ``mingw32``, ``mingw64``, ``clang32``, ``clang64``, ``ucrt64``
|
||||
|
||||
+------+-----------+------------------------------+-----------------------------------------------------------------+
|
||||
| Icon | System | Used version | Comments |
|
||||
+======+===========+==============================+=================================================================+
|
||||
| 🧊 | Windows | Windows Server 2022 (latest) | |
|
||||
+------+-----------+------------------------------+-----------------------------------------------------------------+
|
||||
| 🐧 | Ubuntu | Ubuntu 22.04 (LTS) (latest) | |
|
||||
+------+-----------+------------------------------+-----------------------------------------------------------------+
|
||||
| 🍎 | macOS | macOS Monterey 12 (latest) | While this marked latest, macOS Ventura 13 is already provided. |
|
||||
+------+-----------+------------------------------+-----------------------------------------------------------------+
|
||||
| 🟪 | MSYS | | |
|
||||
+------+-----------+------------------------------+-----------------------------------------------------------------+
|
||||
| ⬛ | MinGW32 | | |
|
||||
+------+-----------+------------------------------+-----------------------------------------------------------------+
|
||||
| 🟦 | MinGW64 | | |
|
||||
+------+-----------+------------------------------+-----------------------------------------------------------------+
|
||||
| 🟫 | Clang32 | | |
|
||||
+------+-----------+------------------------------+-----------------------------------------------------------------+
|
||||
| 🟧 | Clang64 | | |
|
||||
+------+-----------+------------------------------+-----------------------------------------------------------------+
|
||||
| 🟨 | UCRT64 | | |
|
||||
+------+-----------+------------------------------+-----------------------------------------------------------------+
|
||||
|
||||
Source: `Images provided by GitHub <https://github.com/actions/runner-images>`__
|
||||
|
||||
include_list
|
||||
============
|
||||
|
||||
+----------------+----------+----------+----------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==========+
|
||||
| include_list | optional | string | ``""`` |
|
||||
+----------------+----------+----------+----------+
|
||||
|
||||
Space separated list of ``system:python`` items to be included into the list of test.
|
||||
|
||||
**Example:**
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
include_list: "ubuntu:3.11 macos:3.11"
|
||||
|
||||
|
||||
exclude_list
|
||||
============
|
||||
|
||||
+----------------+----------+----------+----------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==========+
|
||||
| exclude_list | optional | string | ``""`` |
|
||||
+----------------+----------+----------+----------+
|
||||
|
||||
Space separated list of ``system:python`` items to be excluded from the list of test.
|
||||
|
||||
**Example:**
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
exclude_list: "windows:pypy-3.8 windows:pypy-3.9"
|
||||
|
||||
|
||||
disable_list
|
||||
============
|
||||
|
||||
+----------------+----------+----------+----------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==========+
|
||||
| disable_list | optional | string | ``""`` |
|
||||
+----------------+----------+----------+----------+
|
||||
|
||||
Space separated list of ``system:python`` items to be temporarily disabled from the list of test.
|
||||
|
||||
Each disabled item creates a warning in the workflow log:
|
||||
|
||||
.. image:: /_static/GH_Workflow_DisabledJobsWarnings.png
|
||||
:scale: 80 %
|
||||
|
||||
|
||||
**Example:**
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
disable_list: "windows:3.10 windows:3.11"
|
||||
|
||||
|
||||
Secrets
|
||||
*******
|
||||
|
||||
This job template needs no secrets.
|
||||
|
||||
Results
|
||||
*******
|
||||
|
||||
python_version
|
||||
==============
|
||||
|
||||
A single string parameter representing the default Python version that should be used across multiple jobs in the same
|
||||
pipeline.
|
||||
|
||||
Such a parameter is needed as a workaround, because GitHub Actions doesn't support proper handling of global pipeline
|
||||
variables. Thus, this job is used to compute an output parameter that can be reused in other jobs.
|
||||
|
||||
**Usage Example:**
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
Params:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r0
|
||||
with:
|
||||
name: pyTooling
|
||||
|
||||
CodeCoverage:
|
||||
uses: pyTooling/Actions/.github/workflows/CoverageCollection.yml@r0
|
||||
needs:
|
||||
- Params
|
||||
with:
|
||||
python_version: ${{ needs.Params.outputs.python_version }}
|
||||
|
||||
python_jobs
|
||||
===========
|
||||
|
||||
A list of dictionaries containing a job description.
|
||||
|
||||
A job description contains the following key-value pairs:
|
||||
|
||||
* ``sysicon`` - icon to display
|
||||
* ``system`` - name of the system
|
||||
* ``runs-on`` - virtual machine image and base operating system
|
||||
* ``runtime`` - name of the runtime environment if not running natively on the VM image
|
||||
* ``shell`` - name of the shell
|
||||
* ``pyicon`` - icon for CPython or pypy
|
||||
* ``python`` - Python version
|
||||
* ``envname`` - full name of the selected environment
|
||||
|
||||
**Usage Example:**
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
Params:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r0
|
||||
with:
|
||||
name: pyTooling
|
||||
|
||||
UnitTesting:
|
||||
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@dev
|
||||
needs:
|
||||
- Params
|
||||
with:
|
||||
jobs: ${{ needs.Params.outputs.python_jobs }}
|
||||
|
||||
This list can be unpacked with ``fromJson(...)`` in a job ``strategy:matrix:include``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
UnitTesting:
|
||||
name: ${{ matrix.sysicon }} ${{ matrix.pyicon }} Unit Tests using Python ${{ matrix.python }}
|
||||
runs-on: ${{ matrix.runs-on }}
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
include: ${{ fromJson(inputs.jobs) }}
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: ${{ matrix.shell }}
|
||||
|
||||
steps:
|
||||
- name: 🐍 Setup Python ${{ matrix.python }}
|
||||
if: matrix.system != 'msys2'
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python }}
|
||||
|
||||
|
||||
artifact_names
|
||||
==============
|
||||
|
||||
A dictionary of artifact names sharing a common prefix.
|
||||
|
||||
The supported artifacts are:
|
||||
|
||||
* ``unittesting_xml`` - UnitTesting XML summary report
|
||||
* ``unittesting_html`` - UnitTesting HTML summary report
|
||||
* ``codecoverage_sqlite`` - Code Coverage internal database (SQLite)
|
||||
* ``codecoverage_json`` - Code Coverage JSON report
|
||||
* ``codecoverage_xml`` - Code Coverage XML report
|
||||
* ``codecoverage_html`` - Code Coverage HTML report
|
||||
* ``statictyping_html`` - Static Type Checking HTML report
|
||||
* ``package_all`` - Packaged Python project (multiple formats)
|
||||
* ``documentation_pdf`` - Documentation in PDF format
|
||||
* ``documentation_html`` - Documentation in HTML format
|
||||
|
||||
**Usage Example:**
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
Params:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r0
|
||||
with:
|
||||
name: pyTooling
|
||||
|
||||
Coverage:
|
||||
uses: pyTooling/Actions/.github/workflows/CoverageCollection.yml@dev
|
||||
needs:
|
||||
- Params
|
||||
with:
|
||||
artifact: ${{ fromJson(needs.Params.outputs.artifact_names).codecoverage_html }}
|
||||
|
||||
|
||||
Params
|
||||
======
|
||||
|
||||
.. attention:: ``Params`` is deprecated.
|
||||
|
||||
* ``params['unittesting']`` |rarr| ``artifact_names['unittesting_xml']``
|
||||
* ``params['coverage']`` |rarr| ``artifact_names['codecoverage_xml']``
|
||||
* ``params['typing']`` |rarr| ``artifact_names['statictyping_html']``
|
||||
* ``params['package']`` |rarr| ``artifact_names['package_all']``
|
||||
* ``params['doc']`` |rarr| ``artifact_names['documentation_html']``
|
||||
139
doc/JobTemplate/PublishOnPyPI.rst
Normal file
139
doc/JobTemplate/PublishOnPyPI.rst
Normal file
@@ -0,0 +1,139 @@
|
||||
.. _JOBTMPL/PyPI:
|
||||
|
||||
PublishOnPyPI
|
||||
#############
|
||||
|
||||
Publish a source (``*.tar.gz``) package and/or wheel (``*.whl``) packages to `PyPI <https://pypi.org/>`__.
|
||||
|
||||
**Behavior:**
|
||||
|
||||
1. Download package artifact
|
||||
2. Publish source package(s) (``*.tar.gz``)
|
||||
3. Publish wheel package(s) (``*.whl``)
|
||||
4. Delete the artifact
|
||||
|
||||
**Preconditions:**
|
||||
|
||||
A PyPI account was created and the package name is either not occupied or the user has access rights for that package.
|
||||
|
||||
**Requirements:**
|
||||
|
||||
Setup a secret (e.g. ``PYPI_TOKEN``) in GitHub to handover the PyPI token to the job.
|
||||
|
||||
**Dependencies:**
|
||||
|
||||
* :gh:`actions/download-artifact`
|
||||
* :gh:`actions/setup-python`
|
||||
* :gh:`geekyeggo/delete-artifact`
|
||||
|
||||
|
||||
Instantiation
|
||||
*************
|
||||
|
||||
Simple Example
|
||||
==============
|
||||
|
||||
The following example demonstrates how to publish the artifact named ``Package`` to PyPI on every pipeline run triggered
|
||||
by a Git tag. A secret is forwarded from GitHub secrets to a job secret.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
# ...
|
||||
|
||||
PublishOnPyPI:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@r0
|
||||
if: startsWith(github.ref, 'refs/tags')
|
||||
with:
|
||||
artifact: Package
|
||||
secrets:
|
||||
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||
|
||||
Complex Example
|
||||
===============
|
||||
|
||||
In this more complex example, the job depends on a parameter creation (``Params``) and packaging job (``Package``). The
|
||||
used Python version is overwritten by a parameter calculated in the ``Params`` jobs. Also the artifact name is managed
|
||||
by that job. Finally, the list of requirements is overwritten to load a list of requirements from ``dist/requirements.txt``.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
Params:
|
||||
# ...
|
||||
|
||||
Package:
|
||||
# ...
|
||||
|
||||
PublishOnPyPI:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@r0
|
||||
if: startsWith(github.ref, 'refs/tags')
|
||||
needs:
|
||||
- Params
|
||||
- Package
|
||||
with:
|
||||
python_version: ${{ needs.Params.outputs.python_version }}
|
||||
requirements: -r dist/requirements.txt
|
||||
artifact: ${{ fromJson(needs.Params.outputs.artifact_names).package_all }}
|
||||
secrets:
|
||||
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||
|
||||
|
||||
Parameters
|
||||
**********
|
||||
|
||||
python_version
|
||||
==============
|
||||
|
||||
+----------------+----------+----------+----------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==========+
|
||||
| python_version | optional | string | ``3.11`` |
|
||||
+----------------+----------+----------+----------+
|
||||
|
||||
Python version used for uploading the package contents via `twine` to PyPI.
|
||||
|
||||
|
||||
requirements
|
||||
============
|
||||
|
||||
+----------------+----------+----------+-----------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+=================+
|
||||
| requirements | optional | string | ``wheel twine`` |
|
||||
+----------------+----------+----------+-----------------+
|
||||
|
||||
List of requirements to be installed for uploading the package contents to PyPI.
|
||||
|
||||
|
||||
artifact
|
||||
========
|
||||
|
||||
+----------------+----------+----------+--------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==============+
|
||||
| artifact | yes | string | — — — — |
|
||||
+----------------+----------+----------+--------------+
|
||||
|
||||
Name of the artifact containing the package(s).
|
||||
|
||||
|
||||
Secrets
|
||||
*******
|
||||
|
||||
PYPI_TOKEN
|
||||
==========
|
||||
|
||||
+----------------+----------+----------+--------------+
|
||||
| Secret Name | Required | Type | Default |
|
||||
+================+==========+==========+==============+
|
||||
| PYPI_TOKEN | yes | string | — — — — |
|
||||
+----------------+----------+----------+--------------+
|
||||
|
||||
The token to access the package at PyPI for uploading new data.
|
||||
|
||||
|
||||
Results
|
||||
*******
|
||||
|
||||
This job template has no output parameters.
|
||||
88
doc/JobTemplate/PublishTestResults.rst
Normal file
88
doc/JobTemplate/PublishTestResults.rst
Normal file
@@ -0,0 +1,88 @@
|
||||
.. _JOBTMPL/PublishTestResults:
|
||||
|
||||
PublishTestResults
|
||||
##################
|
||||
|
||||
This job downloads all artifacts and uploads jUnit XML reports as a Markdown page to GitHub Actions to visualize the
|
||||
results a an item in the job list. For publishing, :gh:`dorny/test-reporter` is used.
|
||||
|
||||
**Behavior:**
|
||||
|
||||
1. Checkout repository
|
||||
2. Download (all) artifacts
|
||||
3. Publish test results as a markdown report page to GitHub Actions.
|
||||
|
||||
.. note::
|
||||
|
||||
The :gh:`actions/download-artifact` does not support wildcards to specify a subset of artifacts for downloading.
|
||||
Thus, all artifacts need to be downloaded.
|
||||
|
||||
**Dependencies:**
|
||||
|
||||
* :gh:`actions/checkout`
|
||||
* :gh:`actions/download-artifact`
|
||||
* :gh:`dorny/test-reporter`
|
||||
|
||||
|
||||
Instantiation
|
||||
*************
|
||||
|
||||
Simple Example
|
||||
==============
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
PublishTestResults:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@r0
|
||||
|
||||
Complex Example
|
||||
===============
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
CodeCoverage:
|
||||
# ...
|
||||
|
||||
UnitTesting:
|
||||
# ...
|
||||
|
||||
PublishTestResults:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@r0
|
||||
needs:
|
||||
- CodeCoverage
|
||||
- UnitTesting
|
||||
|
||||
|
||||
Parameters
|
||||
**********
|
||||
|
||||
report_files
|
||||
============
|
||||
|
||||
+----------------+----------+----------+---------------------------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+=================================+
|
||||
| report_files | optional | string | ``artifacts/**/*.xml`` |
|
||||
+----------------+----------+----------+---------------------------------+
|
||||
|
||||
Pattern of jUnit report files to publish as Markdown.
|
||||
|
||||
The parameter can be a comma separated list. Wildcards are supported.
|
||||
|
||||
.. hint::
|
||||
|
||||
All artifacts are downloaded into directory ``artifacts``, thus the pattern should include this directory as a
|
||||
prefix.
|
||||
|
||||
|
||||
Secrets
|
||||
*******
|
||||
|
||||
This job template needs no secrets.
|
||||
|
||||
Results
|
||||
*******
|
||||
|
||||
This job template has no output parameters.
|
||||
106
doc/JobTemplate/PublishToGitHubPages.rst
Normal file
106
doc/JobTemplate/PublishToGitHubPages.rst
Normal file
@@ -0,0 +1,106 @@
|
||||
.. _JOBTMPL/PublishToGitHubPages:
|
||||
|
||||
PublishToGitHubPages
|
||||
####################
|
||||
|
||||
This job publishes HTML content from artifacts of other jobs to GitHub Pages.
|
||||
|
||||
**Behavior:**
|
||||
|
||||
1. Checkout repository.
|
||||
2. Download artifacts.
|
||||
3. Push HTML files to branch ``gh-pages``.
|
||||
|
||||
**Dependencies:**
|
||||
|
||||
* :gh:`actions/checkout`
|
||||
* :gh:`actions/download-artifact`
|
||||
|
||||
Instantiation
|
||||
*************
|
||||
|
||||
Simple Example
|
||||
==============
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
BuildTheDocs:
|
||||
# ...
|
||||
|
||||
PublishToGitHubPages:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r0
|
||||
needs:
|
||||
- BuildTheDocs
|
||||
with:
|
||||
doc: Documentation
|
||||
|
||||
|
||||
Complex Example
|
||||
===============
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
PublishToGitHubPages:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r0
|
||||
needs:
|
||||
- Params
|
||||
- BuildTheDocs
|
||||
- Coverage
|
||||
- StaticTypeCheck
|
||||
with:
|
||||
doc: ${{ fromJson(needs.Params.outputs.artifact_names).documentation_html }}
|
||||
coverage: ${{ fromJson(needs.Params.outputs.artifact_names).codecoverage_html }}
|
||||
typing: ${{ fromJson(needs.Params.outputs.artifact_names).statictyping_html }}
|
||||
|
||||
|
||||
Parameters
|
||||
**********
|
||||
|
||||
doc
|
||||
===
|
||||
|
||||
+----------------+----------+----------+--------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==============+
|
||||
| doc | yes | string | — — — — |
|
||||
+----------------+----------+----------+--------------+
|
||||
|
||||
Name of the documentation artifact.
|
||||
|
||||
|
||||
coverage
|
||||
========
|
||||
|
||||
+----------------+----------+----------+-----------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+=================+
|
||||
| coverage | optional | string | ``""`` |
|
||||
+----------------+----------+----------+-----------------+
|
||||
|
||||
Name of the coverage artifact.
|
||||
|
||||
|
||||
typing
|
||||
======
|
||||
|
||||
+----------------+----------+----------+-----------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+=================+
|
||||
| typing | optional | string | ``""`` |
|
||||
+----------------+----------+----------+-----------------+
|
||||
|
||||
Name of the typing artifact.
|
||||
|
||||
|
||||
|
||||
Secrets
|
||||
*******
|
||||
|
||||
This job template needs no secrets.
|
||||
|
||||
Results
|
||||
*******
|
||||
|
||||
This job template has no output parameters.
|
||||
96
doc/JobTemplate/Release.rst
Normal file
96
doc/JobTemplate/Release.rst
Normal file
@@ -0,0 +1,96 @@
|
||||
.. _JOBTMPL/GitHubReleasePage:
|
||||
|
||||
Release
|
||||
#######
|
||||
|
||||
This job creates a Release Page on GitHub.
|
||||
|
||||
**Release Template in Markdown**:
|
||||
|
||||
.. parsed-literal::
|
||||
|
||||
**Automated Release created on: ${{ steps.getVariables.outputs.datetime }}**
|
||||
|
||||
# New Features
|
||||
|
||||
* tbd
|
||||
* tbd
|
||||
|
||||
# Changes
|
||||
|
||||
* tbd
|
||||
* tbd
|
||||
|
||||
# Bug Fixes
|
||||
|
||||
* tbd
|
||||
* tbd
|
||||
|
||||
# Documentation
|
||||
|
||||
* tbd
|
||||
* tbd
|
||||
|
||||
# Unit Tests
|
||||
|
||||
* tbd
|
||||
* tbd
|
||||
|
||||
----------
|
||||
# Related Issues and Pull-Requests
|
||||
|
||||
* tbd
|
||||
* tbd
|
||||
|
||||
|
||||
**Behavior:**
|
||||
|
||||
1. Extract information from environment variables provided by GitHub Actions.
|
||||
2. Create a Release Page on GitHub
|
||||
|
||||
**Dependencies:**
|
||||
|
||||
* :gh:`actions/create-release` (unmaintained)
|
||||
|
||||
Instantiation
|
||||
*************
|
||||
|
||||
Simple Example
|
||||
==============
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
Release:
|
||||
uses: pyTooling/Actions/.github/workflows/Release.yml@r0
|
||||
|
||||
|
||||
Complex Example
|
||||
===============
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
Release:
|
||||
uses: pyTooling/Actions/.github/workflows/Release.yml@r0
|
||||
if: startsWith(github.ref, 'refs/tags')
|
||||
needs:
|
||||
- Package
|
||||
|
||||
|
||||
Parameters
|
||||
**********
|
||||
|
||||
This job template needs no input parameters.
|
||||
|
||||
|
||||
Secrets
|
||||
*******
|
||||
|
||||
This job template needs no secrets.
|
||||
|
||||
|
||||
Results
|
||||
*******
|
||||
|
||||
This job template has no output parameters.
|
||||
182
doc/JobTemplate/StaticTypeCheck.rst
Normal file
182
doc/JobTemplate/StaticTypeCheck.rst
Normal file
@@ -0,0 +1,182 @@
|
||||
.. _JOBTMPL/StaticTypeChecking:
|
||||
|
||||
StaticTypeCheck
|
||||
###############
|
||||
|
||||
This job runs a static type check using mypy and collects the results. These results can be converted to a HTML report
|
||||
and then uploaded as an artifact.
|
||||
|
||||
**Behavior:**
|
||||
|
||||
1. Checkout repository
|
||||
2. Setup Python and install dependencies
|
||||
3. Run type checking command(s).
|
||||
4. Upload type checking report as an artifact
|
||||
|
||||
**Dependencies:**
|
||||
|
||||
* :gh:`actions/checkout`
|
||||
* :gh:`actions/setup-python`
|
||||
* :gh:`actions/upload-artifact`
|
||||
|
||||
Instantiation
|
||||
*************
|
||||
|
||||
Simple Example
|
||||
==============
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
StaticTypeCheck:
|
||||
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r0
|
||||
with:
|
||||
commands: |
|
||||
touch pyTooling/__init__.py
|
||||
mypy --html-report htmlmypy -p pyTooling
|
||||
report: 'htmlmypy'
|
||||
artifact: TypeChecking
|
||||
|
||||
Complex Example
|
||||
===============
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
StaticTypeCheck:
|
||||
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r0
|
||||
needs:
|
||||
- Params
|
||||
with:
|
||||
python_version: ${{ needs.Params.outputs.python_version }}
|
||||
commands: |
|
||||
touch pyTooling/__init__.py
|
||||
mypy --html-report htmlmypy -p pyTooling
|
||||
report: 'htmlmypy'
|
||||
artifact: ${{ fromJson(needs.Params.outputs.artifact_names).statictyping_html }}
|
||||
|
||||
Commands
|
||||
========
|
||||
|
||||
Example ``commands``:
|
||||
|
||||
1. Regular package
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
commands: mypy --html-report htmlmypy -p ToolName
|
||||
|
||||
|
||||
2. Parent namespace package
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
commands: |
|
||||
touch Parent/__init__.py
|
||||
mypy --html-report htmlmypy -p ToolName
|
||||
|
||||
3. Child namespace package
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
commands: |
|
||||
cd Parent
|
||||
mypy --html-report ../htmlmypy -p ToolName
|
||||
|
||||
Parameters
|
||||
**********
|
||||
|
||||
python_version
|
||||
==============
|
||||
|
||||
+----------------+----------+----------+-----------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+=================+
|
||||
| python_version | optional | string | ``3.11`` |
|
||||
+----------------+----------+----------+-----------------+
|
||||
|
||||
Python version.
|
||||
|
||||
|
||||
requirements
|
||||
============
|
||||
|
||||
+----------------+----------+----------+-------------------------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+===============================+
|
||||
| requirements | optional | string | ``-r tests/requirements.txt`` |
|
||||
+----------------+----------+----------+-------------------------------+
|
||||
|
||||
Python dependencies to be installed through pip.
|
||||
|
||||
|
||||
commands
|
||||
========
|
||||
|
||||
+----------------+----------+----------+--------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==============+
|
||||
| commands | yes | string | — — — — |
|
||||
+----------------+----------+----------+--------------+
|
||||
|
||||
Commands to run the static type checks.
|
||||
|
||||
|
||||
html_report
|
||||
===========
|
||||
|
||||
+----------------+----------+----------+-----------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+=================+
|
||||
| report | optional | string | ``htmlmypy`` |
|
||||
+----------------+----------+----------+-----------------+
|
||||
|
||||
HTML output directory to upload as an artifact.
|
||||
|
||||
|
||||
junit_report
|
||||
============
|
||||
|
||||
+----------------+----------+----------+-----------------------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+=============================+
|
||||
| report | optional | string | ``StaticTypingSummary.xml`` |
|
||||
+----------------+----------+----------+-----------------------------+
|
||||
|
||||
junit file to upload as an artifact.
|
||||
|
||||
|
||||
html_artifact
|
||||
=============
|
||||
|
||||
+----------------+----------+----------+--------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==============+
|
||||
| html_artifact | yes | string | — — — — |
|
||||
+----------------+----------+----------+--------------+
|
||||
|
||||
Name of the typing artifact (HTML report).
|
||||
|
||||
|
||||
junit_artifact
|
||||
==============
|
||||
|
||||
+----------------+----------+----------+--------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==============+
|
||||
| junit_artifact | optional | string | ``""`` |
|
||||
+----------------+----------+----------+--------------+
|
||||
|
||||
Name of the typing junit artifact (junit XML).
|
||||
|
||||
|
||||
Secrets
|
||||
*******
|
||||
|
||||
This job template needs no secrets.
|
||||
|
||||
|
||||
Results
|
||||
*******
|
||||
|
||||
This job template has no output parameters.
|
||||
159
doc/JobTemplate/UnitTesting.rst
Normal file
159
doc/JobTemplate/UnitTesting.rst
Normal file
@@ -0,0 +1,159 @@
|
||||
.. _JOBTMPL/UnitTesting:
|
||||
|
||||
UnitTesting
|
||||
###########
|
||||
|
||||
This template runs multiple jobs from a matrix as a cross of Python versions and systems. The summary report in junit
|
||||
XML format is optionally uploaded as an artifact.
|
||||
|
||||
Configuration options to ``pytest`` should be given via section ``[tool.pytest.ini_options]`` in a ``pyproject.toml``
|
||||
file.
|
||||
|
||||
**Behavior:**
|
||||
|
||||
1. Checkout repository
|
||||
2. Setup Python and install dependencies
|
||||
3. Run unit tests using ``pytest``.
|
||||
4. Upload junit test summary as an artifact
|
||||
|
||||
**Dependencies:**
|
||||
|
||||
* :gh:`actions/checkout`
|
||||
* :gh:`msys2/setup-msys2`
|
||||
* :gh:`actions/setup-python`
|
||||
* :gh:`actions/upload-artifact`
|
||||
|
||||
Instantiation
|
||||
*************
|
||||
|
||||
Simple Example
|
||||
==============
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
Params:
|
||||
# ...
|
||||
|
||||
UnitTesting:
|
||||
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r0
|
||||
needs:
|
||||
- Params
|
||||
with:
|
||||
jobs: ${{ needs.Params.outputs.python_jobs }}
|
||||
artifact: ${{ fromJson(needs.Params.outputs.artifact_names).unittesting }}
|
||||
|
||||
|
||||
Complex Example
|
||||
===============
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
TBD
|
||||
|
||||
Parameters
|
||||
**********
|
||||
|
||||
jobs
|
||||
====
|
||||
|
||||
+----------------+----------+----------+--------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==============+
|
||||
| jobs | yes | string | — — — — |
|
||||
+----------------+----------+----------+--------------+
|
||||
|
||||
JSON list with environment fields, telling the system and Python versions to run tests with.
|
||||
|
||||
|
||||
requirements
|
||||
============
|
||||
|
||||
+----------------+----------+----------+---------------------------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+=================================+
|
||||
| requirements | optional | string | ``-r tests/requirements.txt`` |
|
||||
+----------------+----------+----------+---------------------------------+
|
||||
|
||||
Python dependencies to be installed through pip.
|
||||
|
||||
|
||||
pacboy
|
||||
======
|
||||
|
||||
+----------------+----------+----------+-----------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+===========+
|
||||
| pacboy | optional | string | ``""`` |
|
||||
+----------------+----------+----------+-----------+
|
||||
|
||||
Additional MSYS2 dependencies to be installed through pacboy (pacman).
|
||||
|
||||
Internally, a workflow step reads the requirements file for Python and compares requested packages with a list of
|
||||
packages that should be installed through pacman/pacboy compared to installation via pip. These are mainly core packages
|
||||
or packages with embedded C code.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
pacboy: >-
|
||||
python-lxml:p
|
||||
|
||||
|
||||
mingw_requirements
|
||||
==================
|
||||
|
||||
+--------------------+----------+----------+----------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+====================+==========+==========+==========+
|
||||
| mingw_requirements | optional | string | ``""`` |
|
||||
+--------------------+----------+----------+----------+
|
||||
|
||||
Override Python dependencies to be installed through pip on MSYS2 (MINGW64) only.
|
||||
|
||||
|
||||
tests_directory
|
||||
===============
|
||||
|
||||
+-----------------+----------+----------+-----------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+=================+==========+==========+===========+
|
||||
| tests_directory | optional | string | ``tests`` |
|
||||
+-----------------+----------+----------+-----------+
|
||||
|
||||
Path to the directory containing tests (test working directory).
|
||||
|
||||
|
||||
unittest_directory
|
||||
==================
|
||||
|
||||
+--------------------+----------+----------+----------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+====================+==========+==========+==========+
|
||||
| unittest_directory | optional | string | ``unit`` |
|
||||
+--------------------+----------+----------+----------+
|
||||
|
||||
Path to the directory containing unit tests (relative to tests_directory).
|
||||
|
||||
|
||||
artifact
|
||||
========
|
||||
|
||||
+----------------+----------+----------+----------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==========+
|
||||
| artifact | optional | string | ``""`` |
|
||||
+----------------+----------+----------+----------+
|
||||
|
||||
Generate unit test report with junitxml and upload results as an artifact.
|
||||
|
||||
|
||||
Secrets
|
||||
*******
|
||||
|
||||
This job template needs no secrets.
|
||||
|
||||
|
||||
Results
|
||||
*******
|
||||
|
||||
This job template has no output parameters.
|
||||
42
doc/JobTemplate/VerifyDocs.rst
Normal file
42
doc/JobTemplate/VerifyDocs.rst
Normal file
@@ -0,0 +1,42 @@
|
||||
.. _JOBTMPL/VerifyDocumentation:
|
||||
|
||||
VerifyDocs
|
||||
##########
|
||||
|
||||
This job extracts code examples from the README and tests these code snippets.
|
||||
|
||||
**Behavior:**
|
||||
|
||||
TBD
|
||||
|
||||
**Dependencies:**
|
||||
|
||||
TBD
|
||||
|
||||
Instantiation
|
||||
*************
|
||||
|
||||
Simple Example
|
||||
==============
|
||||
|
||||
.. todo:: VerifyDocs:SimpleExample Needs documentation.
|
||||
|
||||
Complex Example
|
||||
===============
|
||||
|
||||
.. todo:: VerifyDocs:ComplexExample Needs documentation.
|
||||
|
||||
Parameters
|
||||
**********
|
||||
|
||||
.. todo:: VerifyDocs:Parameters Needs documentation.
|
||||
|
||||
Secrets
|
||||
*******
|
||||
|
||||
This job template needs no secrets.
|
||||
|
||||
Results
|
||||
*******
|
||||
|
||||
This job template has no output parameters.
|
||||
67
doc/JobTemplate/index.rst
Normal file
67
doc/JobTemplate/index.rst
Normal file
@@ -0,0 +1,67 @@
|
||||
.. _JOBTMPL:
|
||||
|
||||
Overview
|
||||
########
|
||||
|
||||
The following list categorizes all pre-defined job templates, which can be instantiated in a pipeline (GitHub Action
|
||||
Workflow). They can also serve as an example for creating or driving own job templates.
|
||||
|
||||
**Table of Contents:**
|
||||
|
||||
.. hlist::
|
||||
:columns: 2
|
||||
|
||||
* **Global Templates**
|
||||
|
||||
* :ref:`JOBTMPL/Parameters`
|
||||
|
||||
* **Unit Tests, Code Coverage, Code Quality, ...**
|
||||
|
||||
* :ref:`JOBTMPL/UnitTesting`
|
||||
* :ref:`JOBTMPL/CodeCoverage`
|
||||
* :ref:`JOBTMPL/StaticTypeChecking`
|
||||
* *code formatting (planned)*
|
||||
* *coding style (planned)*
|
||||
* *code linting (planned)*
|
||||
|
||||
* **Build and Packaging**
|
||||
|
||||
* :ref:`JOBTMPL/Package`
|
||||
|
||||
* **Documentation**
|
||||
|
||||
* :ref:`JOBTMPL/VerifyDocumentation`
|
||||
* :ref:`JOBTMPL/BuildTheDocs`
|
||||
|
||||
* **Releasing, Publishing**
|
||||
|
||||
* :ref:`JOBTMPL/GitHubReleasePage`
|
||||
* :ref:`JOBTMPL/PyPI`
|
||||
* :ref:`JOBTMPL/PublishTestResults`
|
||||
* :ref:`JOBTMPL/PublishToGitHubPages`
|
||||
|
||||
* **Cleanups**
|
||||
|
||||
* :ref:`JOBTMPL/ArtifactCleanup`
|
||||
|
||||
|
||||
Instantiation
|
||||
*************
|
||||
|
||||
When instantiating a template, a ``jobs:<Name>:uses`` is used to refer to a template file. Unfortunately, besides the
|
||||
GitHub SLUG (*<Organization>/<Repository>*), also the full path to the template needs to be gives, but still it can't be
|
||||
outside of ``.github/workflows`` to create a cleaner repository structure. Finally, the path contains a branch name
|
||||
postfixed by ``@<branch>`` (tags are still not supported by GitHub Actions). A ``jobs:<Name>:with:`` section can be used
|
||||
to handover input parameters to the template.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
on:
|
||||
push:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
<InstanceName>:
|
||||
uses: <GitHubOrganization>/<Repository>/.github/workflows/<Template>.yml@v0
|
||||
with:
|
||||
<Param1>: <Value>
|
||||
136
doc/License.rst
Normal file
136
doc/License.rst
Normal file
@@ -0,0 +1,136 @@
|
||||
.. Note:: This is a local copy of the `Apache License Version 2.0 <http://www.apache.org/licenses/LICENSE-2.0>`_.
|
||||
|
||||
Apache License 2.0
|
||||
##################
|
||||
|
||||
Version 2.0, January 2004
|
||||
|
||||
**TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION**
|
||||
|
||||
|
||||
1. Definitions.
|
||||
===============
|
||||
**"License"** shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
**"Licensor"** shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
|
||||
|
||||
**"Legal Entity"** shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that
|
||||
entity. For the purposes of this definition, **"control"** means (i) the power, direct or indirect, to cause the direction or management of such entity, whether
|
||||
by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
**"You"** (or **"Your"**) shall mean an individual or Legal Entity exercising permissions granted by this License.
|
||||
|
||||
**"Source"** form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and
|
||||
configuration files.
|
||||
|
||||
**"Object"** form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object
|
||||
code, generated documentation, and conversions to other media types.
|
||||
|
||||
**"Work"** shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is
|
||||
included in or attached to the work (an example is provided in the Appendix below).
|
||||
|
||||
**"Derivative Works"** shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions,
|
||||
annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works
|
||||
shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
|
||||
|
||||
**"Contribution"** shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative
|
||||
Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to
|
||||
submit on behalf of the copyright owner. For the purposes of this definition, **"submitted"** means any form of electronic, verbal, or written communication
|
||||
sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue
|
||||
tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is
|
||||
conspicuously marked or otherwise designated in writing by the copyright owner as **"Not a Contribution."**
|
||||
|
||||
**"Contributor"** shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently
|
||||
incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License.
|
||||
==============================
|
||||
Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
|
||||
irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such
|
||||
Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License.
|
||||
===========================
|
||||
Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
|
||||
irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such
|
||||
license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of
|
||||
their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim
|
||||
or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then
|
||||
any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution.
|
||||
==================
|
||||
You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form,
|
||||
provided that You meet the following conditions:
|
||||
|
||||
* You must give any other recipients of the Work or Derivative Works a copy of this License; and
|
||||
* You must cause any modified files to carry prominent notices stating that You changed the files; and
|
||||
* You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source
|
||||
form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
|
||||
* If the Work includes a **"NOTICE"** text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the
|
||||
attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the
|
||||
following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the
|
||||
Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE
|
||||
file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute,
|
||||
alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or
|
||||
distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise
|
||||
complies with the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions.
|
||||
===============================
|
||||
Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and
|
||||
conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any
|
||||
separate license agreement you may have executed with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks.
|
||||
==============
|
||||
This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable
|
||||
and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty.
|
||||
==========================
|
||||
Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT,
|
||||
MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and
|
||||
assume any risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability.
|
||||
===========================
|
||||
In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate
|
||||
and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or
|
||||
consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages
|
||||
for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been
|
||||
advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability.
|
||||
==============================================
|
||||
While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other
|
||||
liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole
|
||||
responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
|
||||
|
||||
----------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
**Appendix: How to apply the Apache License to your work**
|
||||
|
||||
To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying
|
||||
information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or
|
||||
class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives.
|
||||
|
||||
.. code-block:: none
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
20
doc/Makefile
Normal file
20
doc/Makefile
Normal file
@@ -0,0 +1,20 @@
|
||||
# Minimal makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line, and also
|
||||
# from the environment for the first two.
|
||||
SPHINXOPTS ?=
|
||||
SPHINXBUILD ?= sphinx-build
|
||||
SOURCEDIR = .
|
||||
BUILDDIR = _build
|
||||
|
||||
# Put it first so that "make" without argument is like "make help".
|
||||
help:
|
||||
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||
|
||||
.PHONY: help Makefile
|
||||
|
||||
# Catch-all target: route all unknown targets to Sphinx using the new
|
||||
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
|
||||
%: Makefile
|
||||
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||
79
doc/Releases.rst
Normal file
79
doc/Releases.rst
Normal file
@@ -0,0 +1,79 @@
|
||||
Releases Management
|
||||
###################
|
||||
|
||||
Releases
|
||||
********
|
||||
|
||||
r1
|
||||
==
|
||||
|
||||
.. note:: Upcoming next release based in `v1.x.y`.
|
||||
|
||||
.. attention:: This release introduces breaking changes.
|
||||
|
||||
r0
|
||||
==
|
||||
|
||||
.. todo:: Releases:r0 Needs documentation.
|
||||
|
||||
Versions
|
||||
********
|
||||
|
||||
.. todo:: Releases:Versions Needs documentation.
|
||||
|
||||
Branches
|
||||
********
|
||||
|
||||
.. mermaid::
|
||||
|
||||
%%{init: { 'logLevel': 'debug', 'theme': 'neutral', 'gitGraph': {'rotateCommitLabel': false} } }%%
|
||||
gitGraph
|
||||
commit id: "-"
|
||||
branch dev
|
||||
commit id: "B"
|
||||
commit id: "C"
|
||||
checkout main
|
||||
merge dev tag: "v0.4.0"
|
||||
checkout dev
|
||||
commit id: "D"
|
||||
commit id: "E"
|
||||
commit id: "F"
|
||||
checkout main
|
||||
merge dev tag: "v0.5.0"
|
||||
|
||||
``dev``
|
||||
=======
|
||||
|
||||
Development is done on branch ``dev``.
|
||||
|
||||
All merge requests need to target this branch.
|
||||
|
||||
``main``
|
||||
========
|
||||
|
||||
Finished development is merged to branch ``main``.
|
||||
|
||||
Each merge-commit is tagged with a semantic version.
|
||||
|
||||
|
||||
Tagging
|
||||
*******
|
||||
|
||||
See context in :ghissue:`#5 Tagging/versioning of this repo <5>`.
|
||||
|
||||
Tag new releases in the ``main`` branch using a semver compatible value, starting with ``v``:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
git checkout main
|
||||
git tag v0.0.0
|
||||
git push upstream v0.0.0
|
||||
|
||||
Move the corresponding release branch (starting with ``r``) forward by creating a merge commit, and using the merged tag
|
||||
as the commit message:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
git checkout r0
|
||||
git merge --no-ff -m 'v0.0.0' v0.0.0
|
||||
git push upstream r0
|
||||
61
doc/RepositoryStructure.rst
Normal file
61
doc/RepositoryStructure.rst
Normal file
@@ -0,0 +1,61 @@
|
||||
Repository Structure
|
||||
####################
|
||||
|
||||
pyTooling Actions assumes a certain repository structure and usage of technologies. Besides assumed directory or file
|
||||
names in default parameters to job templates, almost all can be overwritten if the target repository has a differing
|
||||
structure.
|
||||
|
||||
* Python source code is located in a directory named after the Python package name.
|
||||
|
||||
* A ``<package>/__init__.py`` should be provided with global package information like: version number, author,
|
||||
copyrights, license, maintainer, ...
|
||||
|
||||
* All tests are located in a ``/tests`` directory and further divided into subdirectories by testing approach.
|
||||
|
||||
* E.g. unit tests are located in a ``/tests/unit`` directory.
|
||||
|
||||
* The package documentation is located in a ``/doc`` directory.
|
||||
|
||||
* Documentation is written with ReStructured Text (ReST) and translated using Sphinx.
|
||||
* Documentation requirements are listed in a ``/doc/requirements.txt``.
|
||||
|
||||
* Dependencies are listed in a ``/requirements.txt``.
|
||||
|
||||
* If the build process requires separate dependencies, a ``/build/requirements.txt`` is used.
|
||||
* If the publishing/distribution process requires separate dependencies, a ``/dist/requirements.txt`` is used.
|
||||
* To reduce duplication of dependencies, dependency files should recursively call each other with ``-r <path>``.
|
||||
|
||||
* All Python project settings are stored in a ``pyproject.toml``.
|
||||
* The Python package is described in a ``setup.py``.
|
||||
* Packages are build with ``build`` instead of ``setuptools``.
|
||||
* A repository overview is given in a ``README.md``.
|
||||
|
||||
.. code-block::
|
||||
|
||||
<Repository>/
|
||||
.github/
|
||||
workflows/
|
||||
Pipeline.yml
|
||||
dependabot.yml
|
||||
.vscode/
|
||||
settings.json
|
||||
build/
|
||||
requirements.txt
|
||||
dist/
|
||||
requirements.txt
|
||||
doc/
|
||||
conf.py
|
||||
index.rst
|
||||
requirements.txt
|
||||
<package>
|
||||
__init__.py
|
||||
tests/
|
||||
unit/
|
||||
requirements.txt
|
||||
.editorconfig
|
||||
.gitignore
|
||||
LICENSE.md
|
||||
pyproject.toml
|
||||
README.md
|
||||
requirements.txt
|
||||
setup.py
|
||||
4
doc/TODO.rst
Normal file
4
doc/TODO.rst
Normal file
@@ -0,0 +1,4 @@
|
||||
TODOs
|
||||
#####
|
||||
|
||||
.. todolist::
|
||||
BIN
doc/_static/GH_Workflow_DisabledJobsWarnings.png
vendored
Normal file
BIN
doc/_static/GH_Workflow_DisabledJobsWarnings.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 11 KiB |
115
doc/_static/css/override.css
vendored
Normal file
115
doc/_static/css/override.css
vendored
Normal file
@@ -0,0 +1,115 @@
|
||||
/* theme overrides */
|
||||
.rst-content h1,
|
||||
.rst-content h2 {
|
||||
margin-top: 24px;
|
||||
margin-bottom: 6px;
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.rst-content h3,
|
||||
.rst-content h4,
|
||||
.rst-content h5,
|
||||
.rst-content h6 {
|
||||
margin-top: 12px;
|
||||
margin-bottom: 6px;
|
||||
}
|
||||
|
||||
.rst-content p {
|
||||
margin-bottom: 6px
|
||||
}
|
||||
|
||||
/* general overrides */
|
||||
html {
|
||||
font-size: 15px;
|
||||
}
|
||||
|
||||
footer {
|
||||
font-size: 95%;
|
||||
text-align: center
|
||||
}
|
||||
|
||||
footer p {
|
||||
margin-bottom: 0px /* 12px */;
|
||||
font-size: 95%
|
||||
}
|
||||
|
||||
section > p,
|
||||
.section p,
|
||||
.simple li {
|
||||
text-align: justify
|
||||
}
|
||||
|
||||
.rst-content .topic-title {
|
||||
font-size: larger;
|
||||
font-weight: 700;
|
||||
margin-top: 18px;
|
||||
margin-bottom: 6px;
|
||||
}
|
||||
|
||||
.rst-content p.rubric {
|
||||
text-decoration: underline;
|
||||
font-weight: 700;
|
||||
margin-top: 18px;
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
|
||||
/* wyrm overrides */
|
||||
.wy-menu-vertical header,
|
||||
.wy-menu-vertical p.caption {
|
||||
color: #9b9b9b /* #55a5d9 */;
|
||||
padding: 0 0.809em /* 0 1.618em */;
|
||||
margin: 6px 0 0 0 /* 12px 0 0 */;
|
||||
border-top: 1px solid #9b9b9b;
|
||||
}
|
||||
|
||||
.wy-side-nav-search {
|
||||
margin-bottom: 0 /* .809em */;
|
||||
background-color: #333333 /* #2980b9 */;
|
||||
/* BTD: */
|
||||
/*color: #fcfcfc*/
|
||||
}
|
||||
|
||||
.wy-side-nav-search input[type=text] {
|
||||
border-radius: 0px /* 50px */;
|
||||
}
|
||||
|
||||
.wy-side-nav-search .wy-dropdown > a, .wy-side-nav-search > a {
|
||||
/* BTD: */
|
||||
/*color: #fcfcfc;*/
|
||||
margin-bottom: 0.404em /* .809em */;
|
||||
}
|
||||
|
||||
.wy-side-nav-search > div.version {
|
||||
margin: 0 0 6px 0;
|
||||
/* BTD: */
|
||||
/*margin-top: -.4045em;*/
|
||||
}
|
||||
|
||||
.wy-nav .wy-menu-vertical a:hover {
|
||||
background-color: #333333 /* #2980b9 */;
|
||||
}
|
||||
|
||||
.wy-nav-content {
|
||||
max-width: 1600px /* 800px */ ;
|
||||
}
|
||||
|
||||
.wy-nav-top {
|
||||
background: #333333 /* #2980b9 */;
|
||||
}
|
||||
|
||||
/* Sphinx Design */
|
||||
.sd-tab-set {
|
||||
margin: 0
|
||||
}
|
||||
|
||||
.sd-tab-set > label {
|
||||
padding-top: .5em;
|
||||
padding-right: 1em;
|
||||
padding-bottom: .5em;
|
||||
padding-left: 1em
|
||||
}
|
||||
|
||||
.sd-container-fluid {
|
||||
padding-left: 0;
|
||||
padding-right: 0;
|
||||
}
|
||||
BIN
doc/_static/icon.png
vendored
Normal file
BIN
doc/_static/icon.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 43 KiB |
BIN
doc/_static/logo.png
vendored
Normal file
BIN
doc/_static/logo.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 61 KiB |
160
doc/_templates/autoapi/module.rst
vendored
Normal file
160
doc/_templates/autoapi/module.rst
vendored
Normal file
@@ -0,0 +1,160 @@
|
||||
.. # Template modified by Patrick Lehmann
|
||||
* removed automodule on top, because private members are activated for autodoc (no doubled documentation).
|
||||
* Made sections like 'submodules' bold text, but no headlines to reduce number of ToC levels.
|
||||
|
||||
{{ '=' * node.name|length }}
|
||||
{{ node.name }}
|
||||
{{ '=' * node.name|length }}
|
||||
|
||||
.. automodule:: {{ node.name }}
|
||||
|
||||
{##}
|
||||
{%- block modules -%}
|
||||
{%- if subnodes %}
|
||||
|
||||
**Submodules**
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
{% for item in subnodes %}
|
||||
{{ item.name }}
|
||||
{%- endfor %}
|
||||
{##}
|
||||
{%- endif -%}
|
||||
{%- endblock -%}
|
||||
{##}
|
||||
.. currentmodule:: {{ node.name }}
|
||||
{##}
|
||||
|
||||
{%- if node.variables %}
|
||||
|
||||
**Variables**
|
||||
|
||||
{% for item, obj in node.variables.items() -%}
|
||||
- :py:data:`{{ item }}`
|
||||
{#{ obj|summary }#}
|
||||
{% endfor -%}
|
||||
{%- endif -%}
|
||||
|
||||
{%- if node.functions %}
|
||||
|
||||
**Functions**
|
||||
|
||||
{% for item, obj in node.functions.items() -%}
|
||||
- :py:func:`{{ item }}`:
|
||||
{{ obj|summary }}
|
||||
|
||||
{% endfor -%}
|
||||
{%- endif -%}
|
||||
|
||||
{%- if node.exceptions %}
|
||||
|
||||
**Exceptions**
|
||||
|
||||
{% for item, obj in node.exceptions.items() -%}
|
||||
- :py:exc:`{{ item }}`:
|
||||
{{ obj|summary }}
|
||||
|
||||
{% endfor -%}
|
||||
{%- endif -%}
|
||||
|
||||
{%- if node.classes %}
|
||||
|
||||
**Classes**
|
||||
|
||||
{% for item, obj in node.classes.items() -%}
|
||||
- :py:class:`{{ item }}`:
|
||||
{{ obj|summary }}
|
||||
|
||||
{% endfor -%}
|
||||
{%- endif -%}
|
||||
|
||||
{%- block variables -%}
|
||||
{%- if node.variables %}
|
||||
|
||||
---------------------
|
||||
|
||||
**Variables**
|
||||
|
||||
{#% for item, obj in node.variables.items() -%}
|
||||
- :py:data:`{{ item }}`
|
||||
{% endfor -%#}
|
||||
|
||||
{% for item, obj in node.variables.items() %}
|
||||
.. autodata:: {{ item }}
|
||||
:annotation:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
{{ obj|pprint|indent(6) }}
|
||||
{##}
|
||||
{%- endfor -%}
|
||||
{%- endif -%}
|
||||
{%- endblock -%}
|
||||
|
||||
{%- block functions -%}
|
||||
{%- if node.functions %}
|
||||
|
||||
---------------------
|
||||
|
||||
**Functions**
|
||||
|
||||
{% for item in node.functions %}
|
||||
.. autofunction:: {{ item }}
|
||||
{##}
|
||||
{%- endfor -%}
|
||||
{%- endif -%}
|
||||
{%- endblock -%}
|
||||
|
||||
{%- block exceptions -%}
|
||||
{%- if node.exceptions %}
|
||||
|
||||
---------------------
|
||||
|
||||
**Exceptions**
|
||||
|
||||
{#% for item, obj in node.exceptions.items() -%}
|
||||
- :py:exc:`{{ item }}`:
|
||||
{{ obj|summary }}
|
||||
|
||||
{% endfor -%#}
|
||||
|
||||
{% for item in node.exceptions %}
|
||||
.. autoexception:: {{ item }}
|
||||
|
||||
.. rubric:: Inheritance
|
||||
.. inheritance-diagram:: {{ item }}
|
||||
:parts: 1
|
||||
{##}
|
||||
{%- endfor -%}
|
||||
{%- endif -%}
|
||||
{%- endblock -%}
|
||||
|
||||
{%- block classes -%}
|
||||
{%- if node.classes %}
|
||||
|
||||
---------------------
|
||||
|
||||
**Classes**
|
||||
|
||||
{#% for item, obj in node.classes.items() -%}
|
||||
- :py:class:`{{ item }}`:
|
||||
{{ obj|summary }}
|
||||
|
||||
{% endfor -%#}
|
||||
|
||||
{% for item in node.classes %}
|
||||
.. autoclass:: {{ item }}
|
||||
:members:
|
||||
:private-members:
|
||||
:special-members:
|
||||
:inherited-members:
|
||||
:exclude-members: __weakref__
|
||||
|
||||
.. rubric:: Inheritance
|
||||
.. inheritance-diagram:: {{ item }}
|
||||
:parts: 1
|
||||
{##}
|
||||
{%- endfor -%}
|
||||
{%- endif -%}
|
||||
{%- endblock -%}
|
||||
14
doc/_templates/autoapi/package.rst
vendored
Normal file
14
doc/_templates/autoapi/package.rst
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
.. # Template created by Patrick Lehmann
|
||||
|
||||
Python Class Reference
|
||||
######################
|
||||
|
||||
Reference of all packages and modules:
|
||||
|
||||
.. automodule:: {{ node.name }}
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
{% for item in subnodes %}
|
||||
{{ item.name }}
|
||||
{%- endfor %}
|
||||
305
doc/conf.py
Normal file
305
doc/conf.py
Normal file
@@ -0,0 +1,305 @@
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
from importlib.util import find_spec
|
||||
from sys import path as sys_path
|
||||
from os.path import abspath
|
||||
from pathlib import Path
|
||||
from json import loads
|
||||
|
||||
from pyTooling.Packaging import extractVersionInformation
|
||||
|
||||
ROOT = Path(__file__).resolve().parent
|
||||
|
||||
sys_path.insert(0, abspath("."))
|
||||
sys_path.insert(0, abspath(".."))
|
||||
sys_path.insert(0, abspath("../pyDummy"))
|
||||
# sys_path.insert(0, abspath("_extensions"))
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
# Project information and versioning
|
||||
# ==============================================================================
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
githubNamespace = "pyTooling"
|
||||
githubProject = "Actions"
|
||||
project = "pyDummy"
|
||||
|
||||
packageInformationFile = Path(f"../{project}/__init__.py")
|
||||
versionInformation = extractVersionInformation(packageInformationFile)
|
||||
|
||||
author = versionInformation.Author
|
||||
copyright = versionInformation.Copyright
|
||||
version = ".".join(versionInformation.Version.split(".")[:2]) # e.g. 2.3 The short X.Y version.
|
||||
release = versionInformation.Version
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
# Miscellaneous settings
|
||||
# ==============================================================================
|
||||
# The master toctree document.
|
||||
master_doc = "index"
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ["_templates"]
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
# This pattern also affects html_static_path and html_extra_path.
|
||||
exclude_patterns = [
|
||||
"_build",
|
||||
"_theme",
|
||||
"Thumbs.db",
|
||||
".DS_Store"
|
||||
]
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = "manni"
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
# Restructured Text settings
|
||||
# ==============================================================================
|
||||
prologPath = Path("prolog.inc")
|
||||
try:
|
||||
with prologPath.open("r", encoding="utf-8") as fileHandle:
|
||||
rst_prolog = fileHandle.read()
|
||||
except Exception as ex:
|
||||
print(f"[ERROR:] While reading '{prologPath}'.")
|
||||
print(ex)
|
||||
rst_prolog = ""
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
# Options for HTML output
|
||||
# ==============================================================================
|
||||
html_theme = "sphinx_rtd_theme"
|
||||
html_theme_options = {
|
||||
"logo_only": True,
|
||||
"vcs_pageview_mode": 'blob',
|
||||
"navigation_depth": 5,
|
||||
}
|
||||
html_css_files = [
|
||||
'css/override.css',
|
||||
]
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ["_static"]
|
||||
|
||||
html_logo = str(Path(html_static_path[0]) / "logo.png")
|
||||
html_favicon = str(Path(html_static_path[0]) / "icon.png")
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = f"{githubProject}Doc"
|
||||
|
||||
# If not None, a 'Last updated on:' timestamp is inserted at every page
|
||||
# bottom, using the given strftime format.
|
||||
# The empty string is equivalent to '%b %d, %Y'.
|
||||
html_last_updated_fmt = "%d.%m.%Y"
|
||||
|
||||
# ==============================================================================
|
||||
# Python settings
|
||||
# ==============================================================================
|
||||
modindex_common_prefix = [
|
||||
f"{project}."
|
||||
]
|
||||
|
||||
# ==============================================================================
|
||||
# Options for LaTeX / PDF output
|
||||
# ==============================================================================
|
||||
from textwrap import dedent
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
"papersize": "a4paper",
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
"preamble": dedent(r"""
|
||||
% ================================================================================
|
||||
% User defined additional preamble code
|
||||
% ================================================================================
|
||||
% Add more Unicode characters for pdfLaTeX.
|
||||
% - Alternatively, compile with XeLaTeX or LuaLaTeX.
|
||||
% - https://GitHub.com/sphinx-doc/sphinx/issues/3511
|
||||
%
|
||||
\ifdefined\DeclareUnicodeCharacter
|
||||
\DeclareUnicodeCharacter{2265}{$\geq$}
|
||||
\DeclareUnicodeCharacter{21D2}{$\Rightarrow$}
|
||||
\fi
|
||||
|
||||
|
||||
% ================================================================================
|
||||
"""),
|
||||
|
||||
# Latex figure (float) alignment
|
||||
#'figure_align': 'htbp',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title,
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
( master_doc,
|
||||
f"{githubProject}.tex",
|
||||
f"The {githubProject} Documentation",
|
||||
f"Patrick Lehmann",
|
||||
f"manual"
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
# Extensions
|
||||
# ==============================================================================
|
||||
extensions = [
|
||||
# Standard Sphinx extensions
|
||||
"sphinx.ext.autodoc",
|
||||
"sphinx.ext.extlinks",
|
||||
"sphinx.ext.intersphinx",
|
||||
"sphinx.ext.inheritance_diagram",
|
||||
"sphinx.ext.todo",
|
||||
"sphinx.ext.graphviz",
|
||||
"sphinx.ext.mathjax",
|
||||
"sphinx.ext.ifconfig",
|
||||
"sphinx.ext.viewcode",
|
||||
# SphinxContrib extensions
|
||||
"sphinxcontrib.mermaid",
|
||||
# Other extensions
|
||||
"sphinx_design",
|
||||
"sphinx_copybutton",
|
||||
"sphinx_autodoc_typehints",
|
||||
"autoapi.sphinx",
|
||||
"sphinx_reports",
|
||||
# User defined extensions
|
||||
]
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
# Sphinx.Ext.InterSphinx
|
||||
# ==============================================================================
|
||||
intersphinx_mapping = {
|
||||
"python": ("https://docs.python.org/3", None),
|
||||
}
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
# Sphinx.Ext.AutoDoc
|
||||
# ==============================================================================
|
||||
# see: https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#configuration
|
||||
#autodoc_default_options = {
|
||||
# "private-members": True,
|
||||
# "special-members": True,
|
||||
# "inherited-members": True,
|
||||
# "exclude-members": "__weakref__"
|
||||
#}
|
||||
#autodoc_class_signature = "separated"
|
||||
autodoc_member_order = "bysource" # alphabetical, groupwise, bysource
|
||||
autodoc_typehints = "both"
|
||||
#autoclass_content = "both"
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
# Sphinx.Ext.ExtLinks
|
||||
# ==============================================================================
|
||||
extlinks = {
|
||||
"gh": (f"https://GitHub.com/%s", "gh:%s"),
|
||||
"ghissue": (f"https://GitHub.com/{githubNamespace}/{githubProject}/issues/%s", "issue #%s"),
|
||||
"ghpull": (f"https://GitHub.com/{githubNamespace}/{githubProject}/pull/%s", "pull request #%s"),
|
||||
"ghsrc": (f"https://GitHub.com/{githubNamespace}/{githubProject}/blob/main/%s", None),
|
||||
"wiki": (f"https://en.wikipedia.org/wiki/%s", None),
|
||||
}
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
# Sphinx.Ext.Graphviz
|
||||
# ==============================================================================
|
||||
graphviz_output_format = "svg"
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
# SphinxContrib.Mermaid
|
||||
# ==============================================================================
|
||||
mermaid_params = [
|
||||
'--backgroundColor', 'transparent',
|
||||
]
|
||||
mermaid_verbose = True
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
# Sphinx.Ext.Inheritance_Diagram
|
||||
# ==============================================================================
|
||||
inheritance_node_attrs = {
|
||||
# "shape": "ellipse",
|
||||
# "fontsize": 14,
|
||||
# "height": 0.75,
|
||||
"color": "dodgerblue1",
|
||||
"style": "filled"
|
||||
}
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
# Sphinx.Ext.ToDo
|
||||
# ==============================================================================
|
||||
# If true, `todo` and `todoList` produce output, else they produce nothing.
|
||||
todo_include_todos = True
|
||||
todo_link_only = True
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
# sphinx-reports
|
||||
# ==============================================================================
|
||||
# report_unittest_testsuites = {
|
||||
# "src": {
|
||||
# "name": f"{project}",
|
||||
# "xml_report": "../report/unit/unittest.xml",
|
||||
# }
|
||||
# }
|
||||
# report_codecov_packages = {
|
||||
# "src": {
|
||||
# "name": f"{project}",
|
||||
# "json_report": "../report/coverage/coverage.json",
|
||||
# "fail_below": 80,
|
||||
# "levels": "default"
|
||||
# }
|
||||
# }
|
||||
# report_doccov_packages = {
|
||||
# "src": {
|
||||
# "name": f"{project}",
|
||||
# "directory": f"../{project}",
|
||||
# "fail_below": 80,
|
||||
# "levels": "default"
|
||||
# }
|
||||
# }
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
# Sphinx_Design
|
||||
# ==============================================================================
|
||||
# sd_fontawesome_latex = True
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
# AutoAPI.Sphinx
|
||||
# ==============================================================================
|
||||
autoapi_modules = {
|
||||
f"{project}": {
|
||||
"template": "package",
|
||||
"output": project,
|
||||
"override": True
|
||||
}
|
||||
}
|
||||
|
||||
for directory in [mod for mod in Path(f"../{project}").iterdir() if mod.is_dir() and mod.name != "__pycache__"]:
|
||||
print(f"Adding module rule for '{project}.{directory.name}'")
|
||||
autoapi_modules[f"{project}.{directory.name}"] = {
|
||||
"template": "module",
|
||||
"output": project,
|
||||
"override": True
|
||||
}
|
||||
7
doc/coverage/index.rst
Normal file
7
doc/coverage/index.rst
Normal file
@@ -0,0 +1,7 @@
|
||||
Code Coverage Report
|
||||
####################
|
||||
|
||||
Code coverage report generated with `pytest <https://github.com/pytest-dev/pytest>`__ and `Coverage.py <https://github.com/nedbat/coveragepy/tree/master>`__.
|
||||
|
||||
.. #report:code-coverage::
|
||||
:packageid: src
|
||||
195
doc/index.rst
Normal file
195
doc/index.rst
Normal file
@@ -0,0 +1,195 @@
|
||||
.. include:: shields.inc
|
||||
|
||||
.. raw:: latex
|
||||
|
||||
\part{Introduction}
|
||||
|
||||
.. only:: html
|
||||
|
||||
| |SHIELD:svg:pyTooling-github| |SHIELD:svg:pyTooling-src-license| |SHIELD:svg:pyTooling-ghp-doc| |SHIELD:svg:pyTooling-doc-license|
|
||||
| |SHIELD:svg:pyTooling-tag| |SHIELD:svg:pyTooling-date|
|
||||
|
||||
.. Disabled shields: |SHIELD:svg:pyTooling-gitter|
|
||||
|
||||
.. only:: latex
|
||||
|
||||
|SHIELD:png:pyTooling-github| |SHIELD:png:pyTooling-src-license| |SHIELD:png:pyTooling-ghp-doc| |SHIELD:png:pyTooling-doc-license|
|
||||
|SHIELD:png:pyTooling-tag| |SHIELD:png:pyTooling-date|
|
||||
|
||||
.. Disabled shields: |SHIELD:svg:pyTooling-gitter|
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
pyTooling Actions Documentation
|
||||
###############################
|
||||
|
||||
**pyTooling Actions** are reusable steps and workflows for GitHub Actions easing the creation and maintenance of
|
||||
workflows for Python projects on GitHub.
|
||||
|
||||
Introduction
|
||||
************
|
||||
|
||||
GitHub Actions workflows, actions and documentation are mostly focused on JavaScript/TypeScript as the scripting
|
||||
language for writing reusable CI code.
|
||||
However, Python being equally popular and capable, usage of JS/TS might be bypassed, with some caveats.
|
||||
This repository gathers reusable CI tooling for testing, packaging and distributing Python projects and documentation.
|
||||
|
||||
|
||||
GitHub Action Job Templates
|
||||
***************************
|
||||
|
||||
The following list categorizes all pre-defined job templates, which can be instantiated in a pipeline (GitHub Action
|
||||
Workflow):
|
||||
|
||||
.. hlist::
|
||||
:columns: 2
|
||||
|
||||
* **Global Templates**
|
||||
|
||||
* :ref:`JOBTMPL/Parameters`
|
||||
|
||||
* **Unit Tests, Code Coverage, Code Quality, ...**
|
||||
|
||||
* :ref:`JOBTMPL/UnitTesting`
|
||||
* :ref:`JOBTMPL/CodeCoverage`
|
||||
* :ref:`JOBTMPL/StaticTypeChecking`
|
||||
* *code formatting (planned)*
|
||||
* *coding style (planned)*
|
||||
* *code linting (planned)*
|
||||
|
||||
* **Build and Packaging**
|
||||
|
||||
* :ref:`JOBTMPL/Package`
|
||||
|
||||
* **Documentation**
|
||||
|
||||
* :ref:`JOBTMPL/VerifyDocumentation`
|
||||
* :ref:`JOBTMPL/BuildTheDocs`
|
||||
|
||||
* **Releasing, Publishing**
|
||||
|
||||
* :ref:`JOBTMPL/GitHubReleasePage`
|
||||
* :ref:`JOBTMPL/PyPI`
|
||||
* :ref:`JOBTMPL/PublishTestResults`
|
||||
* :ref:`JOBTMPL/PublishToGitHubPages`
|
||||
|
||||
* **Cleanups**
|
||||
|
||||
* :ref:`JOBTMPL/ArtifactCleanup`
|
||||
|
||||
|
||||
Example Pipelines
|
||||
=================
|
||||
|
||||
``ExamplePipeline.yml`` is an example Workflow which uses all of the Reusable Workflows.
|
||||
Python package/tool developers can copy it into their repos, in order to use al the reusable workflows straightaway.
|
||||
Minimal required modifications are the following:
|
||||
|
||||
- Set the ``name`` input of job ``Parameters``.
|
||||
- Specify the ``commands`` input of job ``StaticTypeCheck``.
|
||||
|
||||
|
||||
GitHub Actions
|
||||
**************
|
||||
|
||||
* :ref:`ACTION/Releaser`
|
||||
* :ref:`ACTION/WithPostStep`
|
||||
|
||||
References
|
||||
**********
|
||||
|
||||
- `hdl/containers#48 <https://github.com/hdl/containers/issues/48>`__
|
||||
|
||||
|
||||
.. _CONTRIBUTORS:
|
||||
|
||||
Contributors
|
||||
************
|
||||
|
||||
* `Patrick Lehmann <https://GitHub.com/Paebbels>`__
|
||||
* `Unai Martinez-Corral <https://GitHub.com/umarcor>`__ (Maintainer)
|
||||
* `and more... <https://GitHub.com/pyTooling/Actions/graphs/contributors>`__
|
||||
|
||||
|
||||
.. _LICENSE:
|
||||
|
||||
License
|
||||
*******
|
||||
|
||||
.. only:: html
|
||||
|
||||
This Python package (source code) is licensed under `Apache License 2.0 <Code-License.html>`__. |br|
|
||||
The accompanying documentation is licensed under `Creative Commons - Attribution 4.0 (CC-BY 4.0) <Doc-License.html>`__.
|
||||
|
||||
.. only:: latex
|
||||
|
||||
This Python package (source code) is licensed under **Apache License 2.0**. |br|
|
||||
The accompanying documentation is licensed under **Creative Commons - Attribution 4.0 (CC-BY 4.0)**.
|
||||
|
||||
|
||||
.. toctree::
|
||||
:caption: Introduction
|
||||
:hidden:
|
||||
|
||||
Background
|
||||
RepositoryStructure
|
||||
Instantiation
|
||||
Deveopment
|
||||
Dependency
|
||||
Releases
|
||||
|
||||
.. raw:: latex
|
||||
|
||||
\part{Main Documentation}
|
||||
|
||||
.. toctree::
|
||||
:caption: Actions
|
||||
:hidden:
|
||||
|
||||
Action/index
|
||||
Action/Releaser
|
||||
Action/With-post-step
|
||||
|
||||
.. toctree::
|
||||
:caption: Job Templates
|
||||
:hidden:
|
||||
|
||||
JobTemplate/index
|
||||
JobTemplate/Parameters
|
||||
JobTemplate/CoverageCollection
|
||||
JobTemplate/UnitTesting
|
||||
JobTemplate/StaticTypeCheck
|
||||
JobTemplate/PublishTestResults
|
||||
JobTemplate/Package
|
||||
JobTemplate/PublishOnPyPI
|
||||
JobTemplate/VerifyDocs
|
||||
JobTemplate/BuildTheDocs
|
||||
JobTemplate/PublishToGitHubPages
|
||||
JobTemplate/Release
|
||||
JobTemplate/ArtifactCleanUp
|
||||
|
||||
.. raw:: latex
|
||||
|
||||
\part{pyDummy Example}
|
||||
|
||||
.. toctree::
|
||||
:caption: pyDummy Example
|
||||
:hidden:
|
||||
|
||||
pyDummy/pyDummy
|
||||
unittests/index
|
||||
coverage/index
|
||||
Doc. Coverage Report <DocCoverage>
|
||||
Static Type Check Report ➚ <typing/index>
|
||||
|
||||
.. raw:: latex
|
||||
|
||||
\part{Appendix}
|
||||
|
||||
.. toctree::
|
||||
:caption: Appendix
|
||||
:hidden:
|
||||
|
||||
License
|
||||
Doc-License
|
||||
TODO
|
||||
36
doc/make.bat
Normal file
36
doc/make.bat
Normal file
@@ -0,0 +1,36 @@
|
||||
@ECHO OFF
|
||||
|
||||
pushd %~dp0
|
||||
|
||||
REM Command file for Sphinx documentation
|
||||
|
||||
if "%SPHINXBUILD%" == "" (
|
||||
set SPHINXBUILD=sphinx-build
|
||||
)
|
||||
set SOURCEDIR=.
|
||||
set BUILDDIR=_build
|
||||
set SPHINXOPTS=-v
|
||||
|
||||
if "%1" == "" goto help
|
||||
|
||||
%SPHINXBUILD% >NUL 2>NUL
|
||||
if errorlevel 9009 (
|
||||
echo.
|
||||
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
|
||||
echo.installed, then set the SPHINXBUILD environment variable to point
|
||||
echo.to the full path of the 'sphinx-build' executable. Alternatively you
|
||||
echo.may add the Sphinx directory to PATH.
|
||||
echo.
|
||||
echo.If you don't have Sphinx installed, grab it from
|
||||
echo.http://sphinx-doc.org/
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
||||
goto end
|
||||
|
||||
:help
|
||||
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
||||
|
||||
:end
|
||||
popd
|
||||
59
doc/prolog.inc
Normal file
59
doc/prolog.inc
Normal file
@@ -0,0 +1,59 @@
|
||||
.. # Load pre-defined aliases and graphical characters like © from docutils
|
||||
# <file> is used to denote the special path
|
||||
# <Python>\Lib\site-packages\docutils\parsers\rst\include
|
||||
.. include:: <isonum.txt>
|
||||
.. include:: <mmlalias.txt>
|
||||
|
||||
.. # define a hard line break for HTML
|
||||
.. |br| raw:: html
|
||||
|
||||
<br />
|
||||
|
||||
.. # define horizontal line for HTML
|
||||
.. |hr| raw:: html
|
||||
|
||||
<hr />
|
||||
|
||||
.. # define additional CSS based styles and ReST roles for HTML
|
||||
.. raw:: html
|
||||
|
||||
<style type="text/css">
|
||||
span.bolditalic {font-weight: bold; font-style: italic; }
|
||||
span.underline {text-decoration: underline; }
|
||||
span.strike {text-decoration: line-through; }
|
||||
span.xlarge {font-size: x-large; }
|
||||
span.colorred {color: #CC0000; }
|
||||
span.colorgreen {color: #009933; }
|
||||
span.colorblue {color: #0066FF; }
|
||||
span.colorpurple {color: #9900CC; }
|
||||
</style>
|
||||
|
||||
.. role:: bolditalic
|
||||
:class: bolditalic
|
||||
|
||||
.. role:: underline
|
||||
:class: underline
|
||||
|
||||
.. role:: strike
|
||||
:class: strike
|
||||
|
||||
.. role:: xlarge
|
||||
:class: xlarge
|
||||
|
||||
.. role:: red
|
||||
:class: colorred
|
||||
.. role:: green
|
||||
:class: colorgreen
|
||||
.. role:: blue
|
||||
:class: colorblue
|
||||
.. role:: purple
|
||||
:class: colorpurple
|
||||
|
||||
.. role:: deletion
|
||||
:class: colorred strike
|
||||
.. role:: addition
|
||||
:class: colorgreen
|
||||
|
||||
.. role:: pycode(code)
|
||||
:language: python
|
||||
:class: highlight
|
||||
0
doc/pyDummy/.gitempty
Normal file
0
doc/pyDummy/.gitempty
Normal file
19
doc/requirements.txt
Normal file
19
doc/requirements.txt
Normal file
@@ -0,0 +1,19 @@
|
||||
-r ../requirements.txt
|
||||
|
||||
pyTooling ~= 8.0
|
||||
|
||||
# Enforce latest version on ReadTheDocs
|
||||
sphinx ~= 8.1
|
||||
docutils ~= 0.21
|
||||
docutils_stubs ~= 0.0.22
|
||||
|
||||
# ReadTheDocs Theme
|
||||
sphinx_rtd_theme ~= 3.0
|
||||
|
||||
# Sphinx Extenstions
|
||||
sphinxcontrib-mermaid>=0.9.2
|
||||
autoapi >= 2.0.1
|
||||
sphinx_design ~= 0.6.1
|
||||
sphinx-copybutton >= 0.5.2
|
||||
sphinx_autodoc_typehints ~= 2.5
|
||||
sphinx_reports ~= 0.7
|
||||
74
doc/shields.inc
Normal file
74
doc/shields.inc
Normal file
@@ -0,0 +1,74 @@
|
||||
.. # Use http://b64.io/ to encode any image to base64. Then replace `/` with
|
||||
# `%2F` and `+` with `%2B` (or use http://meyerweb.com/eric/tools/dencoder/).
|
||||
# Beware that `?logo=data:image/png;base64,` must also be converted to
|
||||
# percent encoding so that the URL is properly parsed.
|
||||
|
||||
.. # Sourcecode link to GitHub
|
||||
.. |SHIELD:svg:pyTooling-github| image:: https://img.shields.io/badge/pyTooling-Actions-63bf7f.svg?longCache=true&style=flat-square&longCache=true&logo=GitHub
|
||||
:alt: Sourcecode on GitHub
|
||||
:height: 22
|
||||
:target: https://GitHub.com/pyTooling/Actions
|
||||
.. |SHIELD:png:pyTooling-github| image:: https://raster.shields.io/badge/pyTooling-Actions-63bf7f.svg?longCache=true&style=flat-square&longCache=true&logo=GitHub
|
||||
:alt: Sourcecode on GitHub
|
||||
:height: 22
|
||||
:target: https://GitHub.com/pyTooling/Actions
|
||||
|
||||
.. # Sourcecode license
|
||||
.. |SHIELD:svg:pyTooling-src-license| image:: https://img.shields.io/pypi/l/pyTooling?longCache=true&style=flat-square&logo=Apache&label=code
|
||||
:alt: Code license
|
||||
:height: 22
|
||||
:target: Code-License.html
|
||||
.. |SHIELD:png:pyTooling-src-license| image:: https://img.shields.io/pypi/l/pyTooling?longCache=true&style=flat-square&logo=Apache&label=code
|
||||
:alt: Code license
|
||||
:height: 22
|
||||
:target: https://GitHub.com/pyTooling/Actions/blob/main/LICENSE.md
|
||||
|
||||
.. # GitHub tag
|
||||
.. |SHIELD:svg:pyTooling-tag| image:: https://img.shields.io/github/v/tag/pyTooling/Actions?longCache=true&style=flat-square&logo=GitHub&include_prereleases
|
||||
:alt: GitHub tag (latest SemVer incl. pre-release
|
||||
:height: 22
|
||||
:target: https://GitHub.com/pyTooling/Actions/tags
|
||||
.. |SHIELD:png:pyTooling-tag| image:: https://raster.shields.io/github/v/tag/pyTooling/Actions?longCache=true&style=flat-square&logo=GitHub&include_prereleases
|
||||
:alt: GitHub tag (latest SemVer incl. pre-release
|
||||
:height: 22
|
||||
:target: https://GitHub.com/pyTooling/Actions/tags
|
||||
|
||||
.. # GitHub release date
|
||||
.. |SHIELD:svg:pyTooling-date| image:: https://img.shields.io/github/release-date/pyTooling/Actions?longCache=true&style=flat-square&logo=GitHub
|
||||
:alt: GitHub release date
|
||||
:height: 22
|
||||
:target: https://GitHub.com/pyTooling/Actions/releases
|
||||
.. |SHIELD:png:pyTooling-date| image:: https://raster.shields.io/github/release-date/pyTooling/Actions?longCache=true&style=flat-square&logo=GitHub
|
||||
:alt: GitHub release date
|
||||
:height: 22
|
||||
:target: https://GitHub.com/pyTooling/Actions/releases
|
||||
|
||||
.. # Documentation license
|
||||
.. |SHIELD:svg:pyTooling-doc-license| image:: https://img.shields.io/badge/doc-CC--BY%204.0-green?longCache=true&style=flat-square&logo=CreativeCommons&logoColor=fff
|
||||
:alt: Documentation License
|
||||
:height: 22
|
||||
:target: License.html
|
||||
.. |SHIELD:png:pyTooling-doc-license| image:: https://raster.shields.io/badge/doc-CC--BY%204.0-green?longCache=true&style=flat-square&logo=CreativeCommons&logoColor=fff
|
||||
:alt: Documentation License
|
||||
:height: 22
|
||||
:target: https://GitHub.com/pyTooling/Actions/blob/main/doc/License.rst
|
||||
|
||||
.. # GHPages - read now
|
||||
.. |SHIELD:svg:pyTooling-ghp-doc| image:: https://img.shields.io/website?longCache=true&style=flat-square&label=pyTooling.github.io%2FpyTooling&logo=GitHub&logoColor=fff&up_color=blueviolet&up_message=Read%20now%20%E2%9E%9A&url=https%3A%2F%2FpyTooling.github.io%2FpyTooling%2Findex.html
|
||||
:alt: Documentation - Read Now!
|
||||
:height: 22
|
||||
:target: https://pyTooling.github.io/pyTooling/
|
||||
.. |SHIELD:png:pyTooling-ghp-doc| image:: https://raster.shields.io/website?longCache=true&style=flat-square&label=pyTooling.github.io%2FpyTooling&logo=GitHub&logoColor=fff&up_color=blueviolet&up_message=Read%20now%20%E2%9E%9A&url=https%3A%2F%2FpyTooling.github.io%2FpyTooling%2Findex.html
|
||||
:alt: Documentation - Read Now!
|
||||
:height: 22
|
||||
:target: https://pyTooling.github.io/pyTooling/
|
||||
|
||||
.. # Gitter
|
||||
.. |SHIELD:svg:pyTooling-gitter| image:: https://img.shields.io/badge/chat-on%20gitter-4db797.svg?longCache=true&style=flat-square&logo=gitter&logoColor=e8ecef
|
||||
:alt: Documentation License
|
||||
:height: 22
|
||||
:target: https://gitter.im/hdl/community
|
||||
.. |SHIELD:png:pyTooling-gitter| image:: https://raster.shields.io/badge/chat-on%20gitter-4db797.svg?longCache=true&style=flat-square&logo=gitter&logoColor=e8ecef
|
||||
:alt: Documentation License
|
||||
:height: 22
|
||||
:target: https://gitter.im/hdl/community
|
||||
8
doc/typing/index.rst
Normal file
8
doc/typing/index.rst
Normal file
@@ -0,0 +1,8 @@
|
||||
Static Type Checking Report
|
||||
###########################
|
||||
|
||||
*Placeholder for the Static Type Checking report generated with* ``mypy``.
|
||||
|
||||
.. #raw:: html
|
||||
|
||||
<iframe src="../../../../report/typing/index.html" width="100%" height="500px" style="border:none;"/>
|
||||
7
doc/unittests/index.rst
Normal file
7
doc/unittests/index.rst
Normal file
@@ -0,0 +1,7 @@
|
||||
Unittest Summary Report
|
||||
#######################
|
||||
|
||||
Unittest report generated with `pytest <https://github.com/pytest-dev/pytest>`__.
|
||||
|
||||
.. #report:unittest-summary::
|
||||
:reportid: src
|
||||
101
pyDummy/__init__.py
Normal file
101
pyDummy/__init__.py
Normal file
@@ -0,0 +1,101 @@
|
||||
# ==================================================================================================================== #
|
||||
# _____ _ _ _ _ _ #
|
||||
# _ __ _ |_ _|__ ___ | (_)_ __ __ _ / \ ___| |_(_) ___ _ __ ___ #
|
||||
# | '_ \| | | || |/ _ \ / _ \| | | '_ \ / _` | / _ \ / __| __| |/ _ \| '_ \/ __| #
|
||||
# | |_) | |_| || | (_) | (_) | | | | | | (_| |_ / ___ \ (__| |_| | (_) | | | \__ \ #
|
||||
# | .__/ \__, ||_|\___/ \___/|_|_|_| |_|\__, (_)_/ \_\___|\__|_|\___/|_| |_|___/ #
|
||||
# |_| |___/ |___/ #
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# #
|
||||
# License: #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2017-2024 Patrick Lehmann - Bötzingen, Germany #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
#
|
||||
"""
|
||||
A module for a set of dummy classes.
|
||||
"""
|
||||
|
||||
__author__ = "Patrick Lehmann"
|
||||
__email__ = "Paebbels@gmail.com"
|
||||
__copyright__ = "2017-2024, Patrick Lehmann"
|
||||
__license__ = "Apache License, Version 2.0"
|
||||
__version__ = "0.4.4"
|
||||
__keywords__ = ["GitHub Actions"]
|
||||
__issue_tracker__ = "https://GitHub.com/pyTooling/Actions/issues"
|
||||
|
||||
from pyTooling.Decorators import export, readonly
|
||||
from pyTooling.Platform import Platform
|
||||
|
||||
|
||||
@export
|
||||
class Base:
|
||||
"""
|
||||
A base-class for dummy applications.
|
||||
"""
|
||||
|
||||
_value: int #: An internal value.
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""
|
||||
Initializes the base-class.
|
||||
"""
|
||||
self._value = 0
|
||||
|
||||
@readonly
|
||||
def Value(self) -> int:
|
||||
"""
|
||||
Read-only property to return the internal value.
|
||||
|
||||
:return: Internal value.
|
||||
"""
|
||||
return self._value
|
||||
|
||||
|
||||
@export
|
||||
class Application(Base):
|
||||
"""
|
||||
A dummy application for demonstration purposes.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""
|
||||
Initializes the dummy application.
|
||||
"""
|
||||
super().__init__()
|
||||
|
||||
platform = Platform()
|
||||
if platform.IsNativeLinux:
|
||||
self._value += 1
|
||||
elif platform.IsNativeMacOS:
|
||||
self._value += 2
|
||||
elif platform.IsNativeWindows:
|
||||
self._value += 3
|
||||
elif platform.IsMSYSOnWindows:
|
||||
self._value += 11
|
||||
elif platform.IsMinGW32OnWindows:
|
||||
self._value += 12
|
||||
elif platform.IsMinGW64OnWindows:
|
||||
self._value += 13
|
||||
elif platform.IsUCRT64OnWindows:
|
||||
self._value += 14
|
||||
elif platform.IsClang32OnWindows:
|
||||
self._value += 15
|
||||
elif platform.IsClang64OnWindows:
|
||||
self._value += 16
|
||||
0
pyDummy/py.typed
Normal file
0
pyDummy/py.typed
Normal file
101
pyExamples/Extensions/__init__.py
Normal file
101
pyExamples/Extensions/__init__.py
Normal file
@@ -0,0 +1,101 @@
|
||||
# ==================================================================================================================== #
|
||||
# _____ _ _ _ _ _ #
|
||||
# _ __ _ |_ _|__ ___ | (_)_ __ __ _ / \ ___| |_(_) ___ _ __ ___ #
|
||||
# | '_ \| | | || |/ _ \ / _ \| | | '_ \ / _` | / _ \ / __| __| |/ _ \| '_ \/ __| #
|
||||
# | |_) | |_| || | (_) | (_) | | | | | | (_| |_ / ___ \ (__| |_| | (_) | | | \__ \ #
|
||||
# | .__/ \__, ||_|\___/ \___/|_|_|_| |_|\__, (_)_/ \_\___|\__|_|\___/|_| |_|___/ #
|
||||
# |_| |___/ |___/ #
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# #
|
||||
# License: #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2017-2024 Patrick Lehmann - Bötzingen, Germany #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
#
|
||||
"""
|
||||
A module for a set of dummy classes.
|
||||
"""
|
||||
|
||||
__author__ = "Patrick Lehmann"
|
||||
__email__ = "Paebbels@gmail.com"
|
||||
__copyright__ = "2017-2024, Patrick Lehmann"
|
||||
__license__ = "Apache License, Version 2.0"
|
||||
__version__ = "0.14.8"
|
||||
__keywords__ = ["GitHub Actions"]
|
||||
__issue_tracker__ = "https://GitHub.com/pyTooling/Actions/issues"
|
||||
|
||||
from pyTooling.Decorators import export, readonly
|
||||
from pyTooling.Platform import Platform
|
||||
|
||||
|
||||
@export
|
||||
class Base:
|
||||
"""
|
||||
A base-class for dummy applications.
|
||||
"""
|
||||
|
||||
_value: int #: An internal value.
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""
|
||||
Initializes the base-class.
|
||||
"""
|
||||
self._value = 0
|
||||
|
||||
@readonly
|
||||
def Value(self) -> int:
|
||||
"""
|
||||
Read-only property to return the internal value.
|
||||
|
||||
:return: Internal value.
|
||||
"""
|
||||
return self._value
|
||||
|
||||
|
||||
@export
|
||||
class Application(Base):
|
||||
"""
|
||||
A dummy application for demonstration purposes.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""
|
||||
Initializes the dummy application.
|
||||
"""
|
||||
super().__init__()
|
||||
|
||||
platform = Platform()
|
||||
if platform.IsNativeLinux:
|
||||
self._value += 1
|
||||
elif platform.IsNativeMacOS:
|
||||
self._value += 2
|
||||
elif platform.IsNativeWindows:
|
||||
self._value += 3
|
||||
elif platform.IsMSYSOnWindows:
|
||||
self._value += 11
|
||||
elif platform.IsMinGW32OnWindows:
|
||||
self._value += 12
|
||||
elif platform.IsMinGW64OnWindows:
|
||||
self._value += 13
|
||||
elif platform.IsUCRT64OnWindows:
|
||||
self._value += 14
|
||||
elif platform.IsClang32OnWindows:
|
||||
self._value += 15
|
||||
elif platform.IsClang64OnWindows:
|
||||
self._value += 16
|
||||
0
pyExamples/Extensions/py.typed
Normal file
0
pyExamples/Extensions/py.typed
Normal file
73
pyproject.toml
Normal file
73
pyproject.toml
Normal file
@@ -0,0 +1,73 @@
|
||||
[build-system]
|
||||
requires = [
|
||||
"setuptools ~= 75.5",
|
||||
"wheel ~= 0.45",
|
||||
"pyTooling ~= 8.0"
|
||||
]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[tool.black]
|
||||
line-length = 120
|
||||
|
||||
[tool.mypy]
|
||||
files = ["pyDummy"]
|
||||
python_version = "3.12"
|
||||
#ignore_missing_imports = true
|
||||
strict = true
|
||||
pretty = true
|
||||
show_error_context = true
|
||||
show_error_codes = true
|
||||
namespace_packages = true
|
||||
html_report = "report/typing"
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
addopts = "--tb=native"
|
||||
# Don't set 'python_classes = *' otherwise, pytest doesn't search for classes
|
||||
# derived from unittest.Testcase
|
||||
python_files = "*"
|
||||
python_functions = "test_*"
|
||||
filterwarnings = [
|
||||
"error::DeprecationWarning",
|
||||
"error::PendingDeprecationWarning"
|
||||
]
|
||||
|
||||
[tool.interrogate]
|
||||
color = true
|
||||
verbose = 1 # possible values: 0 (minimal output), 1 (-v), 2 (-vv)
|
||||
fail-under = 59
|
||||
generate-badge = "."
|
||||
badge-format = "png"
|
||||
ignore-setters = true
|
||||
|
||||
[tool.coverage.run]
|
||||
branch = true
|
||||
relative_files = true
|
||||
omit = [
|
||||
"*site-packages*",
|
||||
"setup.py",
|
||||
"tests/benchmark/*",
|
||||
"tests/performance/*",
|
||||
"tests/platform/*",
|
||||
"tests/unit/*"
|
||||
]
|
||||
|
||||
[tool.coverage.report]
|
||||
skip_covered = false
|
||||
skip_empty = true
|
||||
exclude_lines = [
|
||||
"pragma: no cover",
|
||||
"raise NotImplementedError"
|
||||
]
|
||||
omit = [
|
||||
"tests/*"
|
||||
]
|
||||
|
||||
[tool.coverage.xml]
|
||||
output = "report/coverage/coverage.xml"
|
||||
|
||||
[tool.coverage.json]
|
||||
output = "report/coverage/coverage.json"
|
||||
|
||||
[tool.coverage.html]
|
||||
directory = "report/coverage/html"
|
||||
title="Code Coverage of pyDummy"
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM python:3.9-slim-bullseye
|
||||
FROM python:3.12-slim-bookworm
|
||||
COPY releaser.py /releaser.py
|
||||
RUN pip install PyGithub --progress-bar off \
|
||||
&& apt update -qq \
|
||||
|
||||
@@ -75,11 +75,11 @@ on:
|
||||
|
||||
jobs:
|
||||
mwe:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
|
||||
# Clone repository
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
# Build your application, tool, artifacts, etc.
|
||||
- name: Build
|
||||
@@ -156,13 +156,13 @@ For prototyping purposes, the following job might be useful:
|
||||
```yml
|
||||
Release:
|
||||
name: '📦 Release'
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- ...
|
||||
if: github.event_name != 'pull_request' && (github.ref == 'refs/heads/master' || contains(github.ref, 'refs/tags/'))
|
||||
steps:
|
||||
|
||||
- uses: actions/download-artifact@v2
|
||||
- uses: actions/download-artifact@v3
|
||||
|
||||
- shell: bash
|
||||
run: pip install PyGithub --progress-bar off
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2022 The pyTooling Authors #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2022 The pyTooling Authors #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
@@ -45,7 +45,9 @@ runs:
|
||||
steps:
|
||||
|
||||
- shell: bash
|
||||
run: pip install PyGithub --progress-bar off
|
||||
run: |
|
||||
[ "$(source /etc/os-release && echo $VERSION_ID)" == "24.04" ] && UBUNTU_2404_ARGS='--break-system-packages' || unset UBUNTU_2404_ARGS
|
||||
pip install --disable-pip-version-check --progress-bar off $UBUNTU_2404_ARGS PyGithub
|
||||
|
||||
- shell: bash
|
||||
run: '''${{ github.action_path }}/../releaser.py'''
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2022 The pyTooling Authors #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
|
||||
1
requirements.txt
Normal file
1
requirements.txt
Normal file
@@ -0,0 +1 @@
|
||||
pyTooling ~= 8.0
|
||||
316
run.ps1
Normal file
316
run.ps1
Normal file
@@ -0,0 +1,316 @@
|
||||
[CmdletBinding()]
|
||||
Param(
|
||||
# Clean up all files and directories
|
||||
[switch]$clean,
|
||||
|
||||
# Commands
|
||||
[switch]$all,
|
||||
[switch]$copyall,
|
||||
|
||||
[switch]$doc,
|
||||
[switch]$livedoc,
|
||||
[switch]$doccov,
|
||||
|
||||
[switch]$unit,
|
||||
[switch]$liveunit,
|
||||
[switch]$copyunit,
|
||||
|
||||
[switch]$cov,
|
||||
[switch]$livecov,
|
||||
[switch]$copycov,
|
||||
|
||||
[switch]$type,
|
||||
[switch]$livetype,
|
||||
[switch]$copytype,
|
||||
|
||||
[switch]$nooutput,
|
||||
|
||||
[switch]$build,
|
||||
[switch]$install,
|
||||
|
||||
# Display this help"
|
||||
[switch]$help
|
||||
)
|
||||
|
||||
$PackageName = "Actions"
|
||||
|
||||
# set default values
|
||||
$EnableDebug = [bool]$PSCmdlet.MyInvocation.BoundParameters["Debug"]
|
||||
$EnableVerbose = [bool]$PSCmdlet.MyInvocation.BoundParameters["Verbose"] -or $EnableDebug
|
||||
|
||||
# Display help if no command was selected
|
||||
$help = $help -or ( -not(
|
||||
$all -or $copyall -or
|
||||
$clean -or
|
||||
$doc -or $livedoc -or $doccov -or
|
||||
$unit -or $liveunit -or $copyunit -or
|
||||
$cov -or $livecov -or $copycov -or
|
||||
$type -or $livetype -or $copytype -or
|
||||
$build -or $install
|
||||
)
|
||||
)
|
||||
|
||||
Write-Host "================================================================================" -ForegroundColor Magenta
|
||||
Write-Host "$PackageName Documentation Compilation and Assembly Tool" -ForegroundColor Magenta
|
||||
Write-Host "================================================================================" -ForegroundColor Magenta
|
||||
|
||||
if ($help)
|
||||
{ Get-Help $MYINVOCATION.MyCommand.Path -Detailed
|
||||
exit 0
|
||||
}
|
||||
|
||||
if ($all)
|
||||
{ $doc = $true
|
||||
$unit = $true
|
||||
# $copyunit = $true
|
||||
$cov = $true
|
||||
# $copycov = $true
|
||||
$type = $true
|
||||
$copytype = $true
|
||||
}
|
||||
if ($copyall)
|
||||
{# $copyunit = $true
|
||||
# $copycov = $true
|
||||
$copytype = $true
|
||||
}
|
||||
|
||||
if ($clean)
|
||||
{ Write-Host -ForegroundColor DarkYellow "[live][DOC] Cleaning documentation directories ..."
|
||||
rm -Force .\doc\$PackageName\*
|
||||
.\doc\make.bat clean
|
||||
Write-Host -ForegroundColor DarkYellow "[live][BUILD] Cleaning build directories ..."
|
||||
rm -Force .\build\bdist.win-amd64
|
||||
rm -Force .\build\lib
|
||||
}
|
||||
|
||||
if ($build)
|
||||
{ Write-Host -ForegroundColor Yellow "[live][BUILD] Cleaning build directories ..."
|
||||
rm -Force .\build\bdist.win-amd64
|
||||
rm -Force .\build\lib
|
||||
Write-Host -ForegroundColor Yellow "[live][BUILD] Building $PackageName package as wheel ..."
|
||||
py -3.12 -m build --wheel
|
||||
|
||||
Write-Host -ForegroundColor Yellow "[live][BUILD] Building wheel finished"
|
||||
}
|
||||
if ($install)
|
||||
{ if (!([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator"))
|
||||
{ Write-Host -ForegroundColor Yellow "[live][INSTALL] Installing $PackageName with administrator rights ..."
|
||||
$proc = Start-Process pwsh.exe "-NoProfile -ExecutionPolicy Bypass -WorkingDirectory `"$PSScriptRoot`" -File `"$PSCommandPath`" `"-install`"" -Verb RunAs -Wait
|
||||
|
||||
# Write-Host -ForegroundColor Yellow "[live][INSTALL] Wait on administrator console ..."
|
||||
# Wait-Process -Id $proc.Id
|
||||
}
|
||||
else
|
||||
{ Write-Host -ForegroundColor Cyan "[ADMIN][UNINSTALL] Uninstalling $PackageName ..."
|
||||
py -3.12 -m pip uninstall -y $PackageName
|
||||
Write-Host -ForegroundColor Cyan "[ADMIN][INSTALL] Installing $PackageName from wheel ..."
|
||||
py -3.12 -m pip install .\dist\$PackageName-6.7.0-py3-none-any.whl
|
||||
|
||||
Write-Host -ForegroundColor Cyan "[ADMIN][INSTALL] Closing window in 5 seconds ..."
|
||||
Start-Sleep -Seconds 5
|
||||
}
|
||||
}
|
||||
|
||||
$jobs = @()
|
||||
|
||||
if ($livedoc)
|
||||
{ Write-Host -ForegroundColor DarkYellow "[live][DOC] Building documentation using Sphinx ..."
|
||||
|
||||
.\doc\make.bat html --verbose
|
||||
|
||||
Write-Host -ForegroundColor DarkYellow "[live][DOC] Documentation finished"
|
||||
}
|
||||
elseif ($doc)
|
||||
{ Write-Host -ForegroundColor DarkYellow "[Job1][DOC] Building documentation using Sphinx ..."
|
||||
Write-Host -ForegroundColor DarkGreen "[SCRIPT] Starting Documentation job ..."
|
||||
|
||||
# Compile documentation
|
||||
$compileDocFunc = {
|
||||
.\doc\make.bat html --verbose
|
||||
}
|
||||
$docJob = Start-Job -Name "Documentation" -ScriptBlock $compileDocFunc
|
||||
# $jobs += $docJob
|
||||
}
|
||||
|
||||
|
||||
if ($doccov)
|
||||
{
|
||||
.\doc\make.bat coverage
|
||||
}
|
||||
|
||||
if ($liveunit)
|
||||
{ Write-Host -ForegroundColor DarkYellow "[live][UNIT] Running Unit Tests using pytest ..."
|
||||
|
||||
$env:ENVIRONMENT_NAME = "Windows (x86-64)"
|
||||
pytest -raP --color=yes --junitxml=report/unit/unittest.xml --template=html1/index.html --report=report/unit/html/index.html --split-report tests/unit
|
||||
|
||||
if ($copyunit)
|
||||
{ cp -Recurse -Force .\report\unit\html\* .\doc\_build\html\unittests
|
||||
Write-Host -ForegroundColor DarkBlue "[live][UNIT] Copyed unit testing report to 'unittests' directory in HTML directory"
|
||||
}
|
||||
|
||||
Write-Host -ForegroundColor DarkYellow "[live][UNIT] Unit Tests finished"
|
||||
}
|
||||
elseif ($unit)
|
||||
{ Write-Host -ForegroundColor DarkYellow "[Job2][UNIT] Running Unit Tests using pytest ..."
|
||||
Write-Host -ForegroundColor DarkGreen "[SCRIPT] Starting UnitTests jobs ..."
|
||||
|
||||
# Run unit tests
|
||||
$runUnitFunc = {
|
||||
$env:ENVIRONMENT_NAME = "Windows (x86-64)"
|
||||
pytest -raP --color=yes --junitxml=report/unit/unittest.xml --template=html1/index.html --report=report/unit/html/index.html --split-report tests/unit
|
||||
}
|
||||
$unitJob = Start-Job -Name "UnitTests" -ScriptBlock $runUnitFunc
|
||||
$jobs += $unitJob
|
||||
}
|
||||
|
||||
if ($livecov)
|
||||
{ Write-Host -ForegroundColor DarkMagenta "[live][COV] Running Unit Tests with coverage ..."
|
||||
|
||||
$env:ENVIRONMENT_NAME = "Windows (x86-64)"
|
||||
coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -ra --tb=line --color=yes tests/unit
|
||||
|
||||
Write-Host -ForegroundColor DarkMagenta "[live][COV] Convert coverage report to HTML ..."
|
||||
coverage html
|
||||
|
||||
Write-Host -ForegroundColor DarkMagenta "[live][COV] Convert coverage report to XML (Cobertura) ..."
|
||||
coverage xml
|
||||
|
||||
Write-Host -ForegroundColor DarkMagenta "[live][COV] Convert coverage report to JSON ..."
|
||||
coverage json
|
||||
|
||||
Write-Host -ForegroundColor DarkMagenta "[live][COV] Write coverage report to console ..."
|
||||
coverage report
|
||||
|
||||
if ($copycov)
|
||||
{ cp -Recurse -Force .\report\coverage\html\* .\doc\_build\html\coverage
|
||||
Write-Host -ForegroundColor DarkMagenta "[live][COV] Copyed code coverage report to 'coverage' directory in HTML directory"
|
||||
}
|
||||
|
||||
Write-Host -ForegroundColor DarkMagenta "[live][COV] Coverage finished"
|
||||
}
|
||||
elseif ($cov)
|
||||
{ Write-Host -ForegroundColor DarkMagenta "[live][COV] Running Unit Tests with coverage ..."
|
||||
Write-Host -ForegroundColor DarkMagenta "[SCRIPT] Starting Coverage jobs ..."
|
||||
|
||||
# Collect coverage
|
||||
$collectCovFunc = {
|
||||
$env:ENVIRONMENT_NAME = "Windows (x86-64)"
|
||||
coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -ra --tb=line --color=yes tests/unit
|
||||
|
||||
Write-Host -ForegroundColor DarkMagenta "[Job3][COV] Convert coverage report to HTML ..."
|
||||
coverage html
|
||||
|
||||
Write-Host -ForegroundColor DarkMagenta "[Job3][COV] Convert coverage report to XML (Cobertura) ..."
|
||||
coverage xml
|
||||
|
||||
Write-Host -ForegroundColor DarkMagenta "[Job3][COV] Convert coverage report to JSON ..."
|
||||
coverage json
|
||||
}
|
||||
$covJob = Start-Job -Name "Coverage" -ScriptBlock $collectCovFunc
|
||||
$jobs += $covJob
|
||||
}
|
||||
|
||||
if ($livetype)
|
||||
{ Write-Host -ForegroundColor DarkCyan "[live][TYPE] Running static type analysis using mypy ..."
|
||||
|
||||
$env:MYPY_FORCE_COLOR = 1
|
||||
mypy.exe -p $PackageName
|
||||
|
||||
if ($copytype)
|
||||
{ cp -Recurse -Force .\report\typing\* .\doc\_build\html\typing
|
||||
Write-Host -ForegroundColor DarkCyan "[live][TYPE] Copyed typing report to 'typing' directory in HTML directory."
|
||||
}
|
||||
|
||||
Write-Host -ForegroundColor DarkCyan "[live][TYPE] Static type analysis finished"
|
||||
}
|
||||
elseif ($type)
|
||||
{ Write-Host -ForegroundColor DarkCyan "[live][TYPE] Running static type analysis using mypy ..."
|
||||
Write-Host -ForegroundColor DarkCyan "[SCRIPT] Starting Typing jobs ..."
|
||||
|
||||
# Analyze types
|
||||
$analyzeTypesFunc = {
|
||||
$env:MYPY_FORCE_COLOR = 1
|
||||
mypy.exe -p $PackageName
|
||||
}
|
||||
$typeJob = Start-Job -Name "Typing" -ScriptBlock $analyzeTypesFunc
|
||||
$jobs += $typeJob
|
||||
}
|
||||
|
||||
|
||||
if ($doc)
|
||||
{ Write-Host -ForegroundColor DarkGreen "[SCRIPT] Waiting on Documentation job ..."
|
||||
Wait-Job -Job $docJob
|
||||
Write-Host -ForegroundColor DarkYellow "[Job1][DOC] Documentation finished"
|
||||
}
|
||||
if ($jobs.Count -ne 0)
|
||||
{
|
||||
Write-Host -ForegroundColor DarkGreen ( "[SCRIPT] Waiting on {0} jobs ({1}) ..." -f $jobs.Count, (($jobs | %{ $_.Name }) -join ", "))
|
||||
Wait-Job -Job $jobs
|
||||
}
|
||||
|
||||
|
||||
if (-not $liveunit -and $copyunit)
|
||||
{
|
||||
# if ($unit)
|
||||
# { Wait-Job -Job $unitJob
|
||||
# Write-Host -ForegroundColor DarkBlue "[Job2][UNIT] Unit tests finished"
|
||||
# }
|
||||
cp -Recurse -Force .\report\unit\html\* .\doc\_build\html\unittests
|
||||
Write-Host -ForegroundColor DarkBlue "[post][UNIT] Copyed unit testing report to 'unittests' directory in HTML directory"
|
||||
}
|
||||
if (-not ($livecov -or $cov) -and $copycov)
|
||||
{
|
||||
# if ($cov)
|
||||
# { Wait-Job -Job $unitJob
|
||||
# Write-Host -ForegroundColor DarkMagenta "[Job3][UNIT] Coverage collection finished"
|
||||
# }
|
||||
cp -Recurse -Force .\report\coverage\html\* .\doc\_build\html\coverage
|
||||
Write-Host -ForegroundColor DarkMagenta "[post][COV] Copyed code coverage report to 'coverage' directory in HTML directory"
|
||||
}
|
||||
if (-not $livetype -and $copytype)
|
||||
{
|
||||
# if ($type)
|
||||
# { Wait-Job -Job $typeJob
|
||||
# Write-Host -ForegroundColor DarkCyan "[Job4][UNIT] Static type analysis finished"
|
||||
# }
|
||||
cp -Recurse -Force .\report\typing\* .\doc\_build\html\typing
|
||||
Write-Host -ForegroundColor DarkCyan "[post][TYPE] Copyed typing report to 'typing' directory in HTML directory."
|
||||
}
|
||||
|
||||
|
||||
if ($type)
|
||||
{ Write-Host -ForegroundColor DarkCyan "================================================================================"
|
||||
if (-not $nooutput)
|
||||
{ Receive-Job -Job $typeJob
|
||||
}
|
||||
Remove-Job -Job $typeJob
|
||||
}
|
||||
if ($doc)
|
||||
{ Write-Host -ForegroundColor DarkYellow "================================================================================"
|
||||
if (-not $nooutput)
|
||||
{ Receive-Job -Job $docJob
|
||||
}
|
||||
Remove-Job -Job $docJob
|
||||
}
|
||||
if ($unit)
|
||||
{ Write-Host -ForegroundColor DarkBlue "================================================================================"
|
||||
if (-not $nooutput)
|
||||
{ Receive-Job -Job $unitJob
|
||||
}
|
||||
Remove-Job -Job $unitJob
|
||||
}
|
||||
if ($cov)
|
||||
{ Write-Host -ForegroundColor DarkMagenta "================================================================================"
|
||||
if (-not $nooutput)
|
||||
{ Receive-Job -Job $covJob
|
||||
}
|
||||
Remove-Job -Job $covJob
|
||||
|
||||
if ($copycov)
|
||||
{ cp -Recurse -Force .\report\coverage\html\* .\doc\_build\html\coverage
|
||||
Write-Host -ForegroundColor DarkMagenta "[post][COV] Copyed code coverage report to 'coverage' directory in HTML directory"
|
||||
}
|
||||
}
|
||||
Write-Host -ForegroundColor DarkGreen "================================================================================"
|
||||
Write-Host -ForegroundColor DarkGreen "[SCRIPT] Finished"
|
||||
51
setup.py
Normal file
51
setup.py
Normal file
@@ -0,0 +1,51 @@
|
||||
# ==================================================================================================================== #
|
||||
# _____ _ _ _ _ _ #
|
||||
# _ __ _ |_ _|__ ___ | (_)_ __ __ _ / \ ___| |_(_) ___ _ __ ___ #
|
||||
# | '_ \| | | || |/ _ \ / _ \| | | '_ \ / _` | / _ \ / __| __| |/ _ \| '_ \/ __| #
|
||||
# | |_) | |_| || | (_) | (_) | | | | | | (_| |_ / ___ \ (__| |_| | (_) | | | \__ \ #
|
||||
# | .__/ \__, ||_|\___/ \___/|_|_|_| |_|\__, (_)_/ \_\___|\__|_|\___/|_| |_|___/ #
|
||||
# |_| |___/ |___/ #
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# #
|
||||
# License: #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2017-2024 Patrick Lehmann - Bötzingen, Germany #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
#
|
||||
"""Package installer for 'pyDummy'."""
|
||||
from setuptools import setup
|
||||
|
||||
from pathlib import Path
|
||||
from pyTooling.Packaging import DescribePythonPackageHostedOnGitHub
|
||||
|
||||
gitHubNamespace = "pyTooling"
|
||||
packageName = "pyDummy"
|
||||
packageDirectory = packageName
|
||||
packageInformationFile = Path(f"{packageDirectory}/__init__.py")
|
||||
|
||||
setup(**DescribePythonPackageHostedOnGitHub(
|
||||
packageName=packageName,
|
||||
description="pyDummy is a test package to verify GitHub actions for Python projects.",
|
||||
gitHubNamespace=gitHubNamespace,
|
||||
unittestRequirementsFile=Path("tests/requirements.txt"),
|
||||
sourceFileWithVersion=packageInformationFile,
|
||||
dataFiles={
|
||||
packageName: ["py.typed"]
|
||||
}
|
||||
))
|
||||
91
tests/pacman_packages.py
Normal file
91
tests/pacman_packages.py
Normal file
@@ -0,0 +1,91 @@
|
||||
from os import getenv
|
||||
from pathlib import Path
|
||||
from re import compile
|
||||
from sys import version
|
||||
|
||||
print(f"Python: {version}")
|
||||
|
||||
|
||||
def loadRequirementsFile(requirementsFile: Path):
|
||||
requirements = []
|
||||
with requirementsFile.open("r", encoding="utf-8") as file:
|
||||
for line in file.readlines():
|
||||
line = line.strip()
|
||||
if line.startswith("#") or line.startswith("https") or line == "":
|
||||
continue
|
||||
elif line.startswith("-r"):
|
||||
# Remove the first word/argument (-r)
|
||||
requirements += loadRequirementsFile(requirementsFile.parent / line[2:].lstrip())
|
||||
else:
|
||||
requirements.append(line)
|
||||
|
||||
return requirements
|
||||
|
||||
|
||||
requirements = "-r ../tests/requirements.txt"
|
||||
if requirements.startswith("-r"):
|
||||
requirementsFile = Path(requirements[2:].lstrip())
|
||||
try:
|
||||
dependencies = loadRequirementsFile(requirementsFile)
|
||||
except FileNotFoundError as ex:
|
||||
print(f"::error title=FileNotFound::{ex}")
|
||||
exit(1)
|
||||
else:
|
||||
dependencies = [req.strip() for req in requirements.split(" ")]
|
||||
|
||||
packages = {
|
||||
"coverage": "python-coverage:p",
|
||||
"igraph": "igraph:p",
|
||||
"jinja2": "python-markupsafe:p",
|
||||
"lxml": "python-lxml:p",
|
||||
"numpy": "python-numpy:p",
|
||||
"markupsafe": "python-markupsafe:p",
|
||||
"pip": "python-pip:p",
|
||||
"ruamel.yaml": "python-ruamel-yaml:p python-ruamel.yaml.clib:p",
|
||||
"sphinx": "python-markupsafe:p",
|
||||
"tomli": "python-tomli:p",
|
||||
"wheel": "python-wheel:p",
|
||||
"pyEDAA.ProjectModel": "python-ruamel-yaml:p python-ruamel.yaml.clib:p python-lxml:p",
|
||||
"pyEDAA.Reports": "python-ruamel-yaml:p python-ruamel.yaml.clib:p python-lxml:p",
|
||||
}
|
||||
subPackages = {
|
||||
"pytooling": {
|
||||
"yaml": "python-ruamel-yaml:p python-ruamel.yaml.clib:p",
|
||||
},
|
||||
}
|
||||
|
||||
regExp = compile(
|
||||
r"(?P<PackageName>[\w_\-\.]+)(?:\[(?P<SubPackages>(?:\w+)(?:\s*,\s*\w+)*)\])?(?:\s*(?P<Comperator>[<>~=]+)\s*)(?P<Version>\d+(?:\.\d+)*)(?:-(?P<VersionExtension>\w+))?")
|
||||
|
||||
pacboyPackages = set(("python-pip:p", "python-wheel:p", "python-tomli:p"))
|
||||
print(f"Processing dependencies ({len(dependencies)}):")
|
||||
for dependency in dependencies:
|
||||
print(f" {dependency}")
|
||||
|
||||
match = regExp.match(dependency.lower())
|
||||
if not match:
|
||||
print(f" Wrong format: {dependency}")
|
||||
print(f"::error title=Identifying Pacboy Packages::Unrecognized dependency format '{dependency}'")
|
||||
continue
|
||||
|
||||
package = match["PackageName"]
|
||||
if package in packages:
|
||||
rewrite = packages[package]
|
||||
print(f" Found rewrite rule for '{package}': {rewrite}")
|
||||
pacboyPackages.add(rewrite)
|
||||
|
||||
if match["SubPackages"] and package in subPackages:
|
||||
for subPackage in match["SubPackages"].split(","):
|
||||
if subPackage in subPackages[package]:
|
||||
rewrite = subPackages[package][subPackage]
|
||||
print(f" Found rewrite rule for '{package}[..., {subPackage}, ...]': {rewrite}")
|
||||
pacboyPackages.add(rewrite)
|
||||
|
||||
# Write jobs to special file
|
||||
github_output = Path(getenv("GITHUB_OUTPUT"))
|
||||
print(f"GITHUB_OUTPUT: {github_output}")
|
||||
with github_output.open("a+", encoding="utf-8") as f:
|
||||
f.write(f"pacboy_packages={' '.join(pacboyPackages)}\n")
|
||||
|
||||
print(f"GITHUB_OUTPUT:")
|
||||
print(f"pacboy_packages={' '.join(pacboyPackages)}\n")
|
||||
98
tests/platform/Specific.py
Normal file
98
tests/platform/Specific.py
Normal file
@@ -0,0 +1,98 @@
|
||||
# ==================================================================================================================== #
|
||||
# _____ _ _ _ _ _ #
|
||||
# _ __ _ |_ _|__ ___ | (_)_ __ __ _ / \ ___| |_(_) ___ _ __ ___ #
|
||||
# | '_ \| | | || |/ _ \ / _ \| | | '_ \ / _` | / _ \ / __| __| |/ _ \| '_ \/ __| #
|
||||
# | |_) | |_| || | (_) | (_) | | | | | | (_| |_ / ___ \ (__| |_| | (_) | | | \__ \ #
|
||||
# | .__/ \__, ||_|\___/ \___/|_|_|_| |_|\__, (_)_/ \_\___|\__|_|\___/|_| |_|___/ #
|
||||
# |_| |___/ |___/ #
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# #
|
||||
# License: #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2017-2024 Patrick Lehmann - Bötzingen, Germany #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
#
|
||||
from unittest import TestCase
|
||||
|
||||
from pytest import mark
|
||||
from pyTooling.Platform import CurrentPlatform
|
||||
|
||||
from pyDummy import Application
|
||||
|
||||
|
||||
if __name__ == "__main__": # pragma: no cover
|
||||
print("ERROR: you called a testcase declaration file as an executable module.")
|
||||
print("Use: 'python -m unittest <testcase module>'")
|
||||
exit(1)
|
||||
|
||||
|
||||
class PlatformTesting(TestCase):
|
||||
@mark.skipif(not CurrentPlatform.IsNativeLinux, reason="Skipped, if current platform isn't native Linux.")
|
||||
def test_ApplicationOnNativeLinux(self):
|
||||
app = Application()
|
||||
|
||||
self.assertEqual(1, app.Value)
|
||||
|
||||
@mark.skipif(not CurrentPlatform.IsNativeMacOS, reason="Skipped, if current platform isn't native macOS.")
|
||||
def test_ApplicationOnNativeMacOS(self):
|
||||
app = Application()
|
||||
|
||||
self.assertEqual(2, app.Value)
|
||||
|
||||
@mark.skipif(not CurrentPlatform.IsNativeWindows, reason="Skipped, if current platform isn't native Windows.")
|
||||
def test_ApplicationOnNativeWindows(self):
|
||||
app = Application()
|
||||
|
||||
self.assertEqual(3, app.Value)
|
||||
|
||||
@mark.skipif(not CurrentPlatform.IsMSYSOnWindows, reason="Skipped, if current platform isn't MSYS on Windows.")
|
||||
def test_ApplicationOnMSYS2OnWindows(self):
|
||||
app = Application()
|
||||
|
||||
self.assertEqual(11, app.Value)
|
||||
|
||||
@mark.skipif(not CurrentPlatform.IsMinGW32OnWindows, reason="Skipped, if current platform isn't MinGW32 on Windows.")
|
||||
def test_ApplicationOnMinGW32OnWindows(self):
|
||||
app = Application()
|
||||
|
||||
self.assertEqual(12, app.Value)
|
||||
|
||||
@mark.skipif(not CurrentPlatform.IsMinGW64OnWindows, reason="Skipped, if current platform isn't MinGW64 on Windows.")
|
||||
def test_ApplicationOnMinGW64OnWindows(self):
|
||||
app = Application()
|
||||
|
||||
self.assertEqual(13, app.Value)
|
||||
|
||||
@mark.skipif(not CurrentPlatform.IsUCRT64OnWindows, reason="Skipped, if current platform isn't UCRT64 on Windows.")
|
||||
def test_ApplicationOnURTC64OnWindows(self):
|
||||
app = Application()
|
||||
|
||||
self.assertEqual(14, app.Value)
|
||||
|
||||
@mark.skipif(not CurrentPlatform.IsClang32OnWindows, reason="Skipped, if current platform isn't Clang32 on Windows.")
|
||||
def test_ApplicationOnClang32OnWindows(self):
|
||||
app = Application()
|
||||
|
||||
self.assertEqual(15, app.Value)
|
||||
|
||||
@mark.skipif(not CurrentPlatform.IsClang64OnWindows, reason="Skipped, if current platform isn't Clang64 on Windows.")
|
||||
def test_ApplicationOnClang64OnWindows(self):
|
||||
app = Application()
|
||||
|
||||
self.assertEqual(16, app.Value)
|
||||
216
tests/python_jobs.py
Normal file
216
tests/python_jobs.py
Normal file
@@ -0,0 +1,216 @@
|
||||
from json import dumps as json_dumps
|
||||
from os import getenv
|
||||
from pathlib import Path
|
||||
from textwrap import dedent
|
||||
from typing import Iterable
|
||||
|
||||
name = "example".strip()
|
||||
python_version = "3.12".strip()
|
||||
systems = "ubuntu windows macos-arm mingw64 ucrt64".strip()
|
||||
versions = "3.8 3.9 3.10 3.11 3.12".strip()
|
||||
include_list = "".strip()
|
||||
exclude_list = "".strip()
|
||||
disable_list = "".strip()
|
||||
|
||||
currentMSYS2Version = "3.11"
|
||||
currentAlphaVersion = "3.13"
|
||||
currentAlphaRelease = "3.13.0-alpha.1"
|
||||
|
||||
if systems == "":
|
||||
print("::error title=Parameter::system_list is empty.")
|
||||
else:
|
||||
systems = [sys.strip() for sys in systems.split(" ")]
|
||||
|
||||
if versions == "":
|
||||
versions = [python_version]
|
||||
else:
|
||||
versions = [ver.strip() for ver in versions.split(" ")]
|
||||
|
||||
if include_list == "":
|
||||
includes = []
|
||||
else:
|
||||
includes = [tuple(include.strip().split(":")) for include in include_list.split(" ")]
|
||||
|
||||
if exclude_list == "":
|
||||
excludes = []
|
||||
else:
|
||||
excludes = [exclude.strip() for exclude in exclude_list.split(" ")]
|
||||
|
||||
if disable_list == "":
|
||||
disabled = []
|
||||
else:
|
||||
disabled = [disable.strip() for disable in disable_list.split(" ")]
|
||||
|
||||
if "3.7" in versions:
|
||||
print("::warning title=Deprecated::Support for Python 3.7 ended in 2023.06.27.")
|
||||
if "msys2" in systems:
|
||||
print("::warning title=Deprecated::System 'msys2' will be replaced by 'mingw64'.")
|
||||
if currentAlphaVersion in versions:
|
||||
print(f"::notice title=Experimental::Python {currentAlphaVersion} ({currentAlphaRelease}) is a pre-release.")
|
||||
for disable in disabled:
|
||||
print(f"::warning title=Disabled Python Job::System '{disable}' temporarily disabled.")
|
||||
|
||||
# see https://raw.githubusercontent.com/actions/python-versions/main/versions-manifest.json
|
||||
data = {
|
||||
# Python and PyPy versions supported by "setup-python" action
|
||||
"python": {
|
||||
"3.7": {"icon": "⚫", "until": "2023.06.27"},
|
||||
"3.8": {"icon": "🔴", "until": "2024.10"},
|
||||
"3.9": {"icon": "🟠", "until": "2025.10"},
|
||||
"3.10": {"icon": "🟡", "until": "2026.10"},
|
||||
"3.11": {"icon": "🟢", "until": "2027.10"},
|
||||
"3.12": {"icon": "🟢", "until": "2028.10"},
|
||||
# "3.13": { "icon": "🟣", "until": "2028.10" },
|
||||
"pypy-3.7": {"icon": "⟲⚫", "until": "????.??"},
|
||||
"pypy-3.8": {"icon": "⟲🔴", "until": "????.??"},
|
||||
"pypy-3.9": {"icon": "⟲🟠", "until": "????.??"},
|
||||
"pypy-3.10": {"icon": "⟲🟡", "until": "????.??"},
|
||||
},
|
||||
# Runner systems (runner images) supported by GitHub Actions
|
||||
"sys": {
|
||||
"ubuntu": {"icon": "🐧", "runs-on": "ubuntu-24.04", "shell": "bash", "name": "Linux (x86-64)"},
|
||||
"windows": {"icon": "🪟", "runs-on": "windows-latest", "shell": "pwsh", "name": "Windows (x86-64)"},
|
||||
"macos": {"icon": "🍎", "runs-on": "macos-latest-large", "shell": "bash", "name": "macOS (x86-64)"},
|
||||
"macos-arm": {"icon": "🍏", "runs-on": "macos-latest", "shell": "bash", "name": "macOS (aarch64)"},
|
||||
},
|
||||
# Runtimes provided by MSYS2
|
||||
"runtime": {
|
||||
"msys": {"icon": "🪟🟪", "name": "Windows+MSYS2 (x86-64) - MSYS"},
|
||||
"mingw32": {"icon": "🪟⬛", "name": "Windows+MSYS2 (x86-64) - MinGW32"},
|
||||
"mingw64": {"icon": "🪟🟦", "name": "Windows+MSYS2 (x86-64) - MinGW64"},
|
||||
"clang32": {"icon": "🪟🟫", "name": "Windows+MSYS2 (x86-64) - Clang32"},
|
||||
"clang64": {"icon": "🪟🟧", "name": "Windows+MSYS2 (x86-64) - Clang64"},
|
||||
"ucrt64": {"icon": "🪟🟨", "name": "Windows+MSYS2 (x86-64) - UCRT64"},
|
||||
}
|
||||
}
|
||||
|
||||
print(f"includes ({len(includes)}):")
|
||||
for system, version in includes:
|
||||
print(f"- {system}:{version}")
|
||||
print(f"excludes ({len(excludes)}):")
|
||||
for exclude in excludes:
|
||||
print(f"- {exclude}")
|
||||
print(f"disabled ({len(disabled)}):")
|
||||
for disable in disabled:
|
||||
print(f"- {disable}")
|
||||
|
||||
|
||||
def match(combination: str, pattern: str) -> bool:
|
||||
system, version = combination.split(":")
|
||||
sys, ver = pattern.split(":")
|
||||
|
||||
if sys == "*":
|
||||
return (ver == "*") or (version == ver)
|
||||
elif system == sys:
|
||||
return (ver == "*") or (version == ver)
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def notIn(combination: str, patterns: Iterable[str]) -> bool:
|
||||
for pattern in patterns:
|
||||
if match(combination, pattern):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
combinations = [
|
||||
(system, version)
|
||||
for system in systems
|
||||
if system in data["sys"]
|
||||
for version in versions
|
||||
if version in data["python"]
|
||||
and notIn(f"{system}:{version}", excludes)
|
||||
and notIn(f"{system}:{version}", disabled)
|
||||
] + [
|
||||
(system, currentMSYS2Version)
|
||||
for system in systems
|
||||
if system in data["runtime"]
|
||||
and notIn(f"{system}:{currentMSYS2Version}", excludes)
|
||||
and notIn(f"{system}:{currentMSYS2Version}", disabled)
|
||||
] + [
|
||||
(system, version)
|
||||
for system, version in includes
|
||||
if system in data["sys"]
|
||||
and version in data["python"]
|
||||
and notIn(f"{system}:{version}", disabled)
|
||||
]
|
||||
print(f"Combinations ({len(combinations)}):")
|
||||
for system, version in combinations:
|
||||
print(f"- {system}:{version}")
|
||||
|
||||
jobs = [
|
||||
{
|
||||
"sysicon": data["sys"][system]["icon"],
|
||||
"system": system,
|
||||
"runs-on": data["sys"][system]["runs-on"],
|
||||
"runtime": "native",
|
||||
"shell": data["sys"][system]["shell"],
|
||||
"pyicon": data["python"][version]["icon"],
|
||||
"python": currentAlphaRelease if version == currentAlphaVersion else version,
|
||||
"envname": data["sys"][system]["name"],
|
||||
}
|
||||
for system, version in combinations if system in data["sys"]
|
||||
] + [
|
||||
{
|
||||
"sysicon": data["runtime"][runtime]["icon"],
|
||||
"system": "msys2",
|
||||
"runs-on": "windows-latest",
|
||||
"runtime": runtime.upper(),
|
||||
"shell": "msys2 {0}",
|
||||
"pyicon": data["python"][currentMSYS2Version]["icon"],
|
||||
"python": version,
|
||||
"envname": data["runtime"][runtime]["name"],
|
||||
}
|
||||
for runtime, version in combinations if runtime not in data["sys"]
|
||||
]
|
||||
|
||||
artifact_names = {
|
||||
"unittesting_xml": f"{name}-UnitTestReportSummary-XML",
|
||||
"unittesting_html": f"{name}-UnitTestReportSummary-HTML",
|
||||
"perftesting_xml": f"{name}-PerformanceTestReportSummary-XML",
|
||||
"benchtesting_xml": f"{name}-BenchmarkTestReportSummary-XML",
|
||||
"apptesting_xml": f"{name}-ApplicationTestReportSummary-XML",
|
||||
"codecoverage_sqlite": f"{name}-CodeCoverage-SQLite",
|
||||
"codecoverage_xml": f"{name}-CodeCoverage-XML",
|
||||
"codecoverage_json": f"{name}-CodeCoverage-JSON",
|
||||
"codecoverage_html": f"{name}-CodeCoverage-HTML",
|
||||
"statictyping_html": f"{name}-StaticTyping-HTML",
|
||||
"package_all": f"{name}-Packages",
|
||||
"documentation_html": f"{name}-Documentation-HTML",
|
||||
"documentation_latex": f"{name}-Documentation-LaTeX",
|
||||
"documentation_pdf": f"{name}-Documentation-PDF",
|
||||
}
|
||||
|
||||
# Deprecated structure
|
||||
params = {
|
||||
"python_version": python_version,
|
||||
"artifacts": {
|
||||
"unittesting": f"{artifact_names['unittesting_xml']}",
|
||||
"coverage": f"{artifact_names['codecoverage_html']}",
|
||||
"typing": f"{artifact_names['statictyping_html']}",
|
||||
"package": f"{artifact_names['package_all']}",
|
||||
"doc": f"{artifact_names['documentation_html']}",
|
||||
}
|
||||
}
|
||||
|
||||
print("Parameters:")
|
||||
print(f" python_version: {python_version}")
|
||||
print(f" python_jobs ({len(jobs)}):\n" +
|
||||
"".join(
|
||||
[f" {{ " + ", ".join([f"\"{key}\": \"{value}\"" for key, value in job.items()]) + f" }},\n" for job in jobs])
|
||||
)
|
||||
print(f" artifact_names ({len(artifact_names)}):")
|
||||
for id, name in artifact_names.items():
|
||||
print(f" {id:>20}: {name}")
|
||||
|
||||
# Write jobs to special file
|
||||
github_output = Path(getenv("GITHUB_OUTPUT"))
|
||||
print(f"GITHUB_OUTPUT: {github_output}")
|
||||
with github_output.open("a+", encoding="utf-8") as f:
|
||||
f.write(dedent(f"""\
|
||||
python_version={python_version}
|
||||
python_jobs={json_dumps(jobs)}
|
||||
artifact_names={json_dumps(artifact_names)}
|
||||
params={json_dumps(params)}
|
||||
"""))
|
||||
13
tests/requirements.txt
Normal file
13
tests/requirements.txt
Normal file
@@ -0,0 +1,13 @@
|
||||
-r ../requirements.txt
|
||||
|
||||
# Coverage collection
|
||||
Coverage ~= 7.6
|
||||
|
||||
# Test Runner
|
||||
pytest ~= 8.3
|
||||
pytest-cov ~= 6.0
|
||||
|
||||
# Static Type Checking
|
||||
mypy ~= 1.13
|
||||
typing_extensions ~= 4.12
|
||||
lxml ~= 5.3
|
||||
40
tests/unit/Dummy.py
Normal file
40
tests/unit/Dummy.py
Normal file
@@ -0,0 +1,40 @@
|
||||
# ==================================================================================================================== #
|
||||
# _____ _ _ _ _ _ #
|
||||
# _ __ _ |_ _|__ ___ | (_)_ __ __ _ / \ ___| |_(_) ___ _ __ ___ #
|
||||
# | '_ \| | | || |/ _ \ / _ \| | | '_ \ / _` | / _ \ / __| __| |/ _ \| '_ \/ __| #
|
||||
# | |_) | |_| || | (_) | (_) | | | | | | (_| |_ / ___ \ (__| |_| | (_) | | | \__ \ #
|
||||
# | .__/ \__, ||_|\___/ \___/|_|_|_| |_|\__, (_)_/ \_\___|\__|_|\___/|_| |_|___/ #
|
||||
# |_| |___/ |___/ #
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# #
|
||||
# License: #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2017-2024 Patrick Lehmann - Bötzingen, Germany #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
#
|
||||
from unittest import TestCase
|
||||
|
||||
from pyDummy import Application
|
||||
|
||||
|
||||
class Instantiation(TestCase):
|
||||
def test_Application(self):
|
||||
app = Application()
|
||||
|
||||
self.assertGreater(app.Value, 0)
|
||||
31
tests/unit/__init__.py
Normal file
31
tests/unit/__init__.py
Normal file
@@ -0,0 +1,31 @@
|
||||
# ==================================================================================================================== #
|
||||
# _____ _ _ _ _ _ #
|
||||
# _ __ _ |_ _|__ ___ | (_)_ __ __ _ / \ ___| |_(_) ___ _ __ ___ #
|
||||
# | '_ \| | | || |/ _ \ / _ \| | | '_ \ / _` | / _ \ / __| __| |/ _ \| '_ \/ __| #
|
||||
# | |_) | |_| || | (_) | (_) | | | | | | (_| |_ / ___ \ (__| |_| | (_) | | | \__ \ #
|
||||
# | .__/ \__, ||_|\___/ \___/|_|_|_| |_|\__, (_)_/ \_\___|\__|_|\___/|_| |_|___/ #
|
||||
# |_| |___/ |___/ #
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# #
|
||||
# License: #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2017-2024 Patrick Lehmann - Bötzingen, Germany #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
#
|
||||
"""Test code for pyDummy."""
|
||||
1
tests/unit/requirements.txt
Normal file
1
tests/unit/requirements.txt
Normal file
@@ -0,0 +1 @@
|
||||
-r ../requirements.txt
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user