mirror of
https://github.com/pyTooling/Actions.git
synced 2026-02-14 12:06:56 +08:00
Compare commits
448 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
679ec24c80 | ||
|
|
3a13486ea6 | ||
|
|
34fb9c9869 | ||
|
|
7523c4adca | ||
|
|
530ad7a4a1 | ||
|
|
bd3f2afaf3 | ||
|
|
b1e4cb961f | ||
|
|
1e6b71e87b | ||
|
|
5d8a608893 | ||
|
|
c3b7b3ca64 | ||
|
|
f61b77ee72 | ||
|
|
5f18024dd4 | ||
|
|
d0f07e1af4 | ||
|
|
2b5a9bdeff | ||
|
|
d324bdacee | ||
|
|
9296bd6e7d | ||
|
|
c37f727e2d | ||
|
|
a4559e8e63 | ||
|
|
bc94fba95e | ||
|
|
d6ec94fea7 | ||
|
|
4bc4ec5cf4 | ||
|
|
e21f89670a | ||
|
|
c13dd2521c | ||
|
|
cf2e89a622 | ||
|
|
a9a3c400fd | ||
|
|
970b4ae021 | ||
|
|
1807741b0a | ||
|
|
cf095afe77 | ||
|
|
12991ee38a | ||
|
|
70134b31c3 | ||
|
|
38514a9005 | ||
|
|
9ceefdbf5d | ||
|
|
b1bc6e50a3 | ||
|
|
fdee9e011f | ||
|
|
3e50c2ed5b | ||
|
|
9e0b1c69f1 | ||
|
|
f084e02f01 | ||
|
|
c34d4e240e | ||
|
|
6d04009bd6 | ||
|
|
50d32d1950 | ||
|
|
7733e8998f | ||
|
|
4c28b9d003 | ||
|
|
bafea7d082 | ||
|
|
9ca7b04f37 | ||
|
|
7a0ee75fd5 | ||
|
|
bc876f7171 | ||
|
|
edca070047 | ||
|
|
21c2f48dad | ||
|
|
9338fbd106 | ||
|
|
6869d0f666 | ||
|
|
bef77effcb | ||
|
|
9808b6c7f9 | ||
|
|
e7e95b446d | ||
|
|
df0889b86b | ||
|
|
87978fd1f6 | ||
|
|
3e95c89362 | ||
|
|
f737b07992 | ||
|
|
77a6b4c00a | ||
|
|
ef5c852097 | ||
|
|
bec076bd66 | ||
|
|
c924651632 | ||
|
|
b9b9b0b1d4 | ||
|
|
1cef082753 | ||
|
|
c3a999c754 | ||
|
|
9760023567 | ||
|
|
fbbb39046a | ||
|
|
1d0c8b36e8 | ||
|
|
c9d0e8e9c6 | ||
|
|
00269cf507 | ||
|
|
a15499a807 | ||
|
|
13076012dd | ||
|
|
b2ac6bc0d9 | ||
|
|
e88aa7b973 | ||
|
|
855d432978 | ||
|
|
bf6ba9ba19 | ||
|
|
93cdeb9cba | ||
|
|
72a8705e6c | ||
|
|
ea96cce0d1 | ||
|
|
59ce0fa84a | ||
|
|
c8362d99cc | ||
|
|
0e9d878f0e | ||
|
|
5d67896606 | ||
|
|
4b058faf3e | ||
|
|
474a8024d1 | ||
|
|
5dc19a5d65 | ||
|
|
188feb556b | ||
|
|
d58db55086 | ||
|
|
ee9a3fbdcd | ||
|
|
8dfc484c42 | ||
|
|
960b7089e7 | ||
|
|
706ef39595 | ||
|
|
04881fc4ca | ||
|
|
e444e57112 | ||
|
|
cea83bc2ae | ||
|
|
440553e7fb | ||
|
|
26461822b5 | ||
|
|
7a341dbe8f | ||
|
|
33b99a3b4e | ||
|
|
5e0aa52e5d | ||
|
|
2862238ee5 | ||
|
|
ebd20f5aea | ||
|
|
2004711d48 | ||
|
|
02d386a9e1 | ||
|
|
e0af5055a8 | ||
|
|
cc1dade947 | ||
|
|
b87d11502b | ||
|
|
fa96ee9197 | ||
|
|
0495bfb18c | ||
|
|
f62d5d93ea | ||
|
|
13c1a56f92 | ||
|
|
da3cdbe96a | ||
|
|
5fe793e3fa | ||
|
|
c38ff2af3c | ||
|
|
98f0fffaf6 | ||
|
|
0fef6f8a4d | ||
|
|
92ce834303 | ||
|
|
607637b278 | ||
|
|
dfc9221529 | ||
|
|
d4afc820ab | ||
|
|
ae13aa2dff | ||
|
|
7879c05ab7 | ||
|
|
df4815f666 | ||
|
|
8b7a8009a6 | ||
|
|
6b4af68fa4 | ||
|
|
0db1821658 | ||
|
|
6d84311338 | ||
|
|
4406abe788 | ||
|
|
e9d0dc3dba | ||
|
|
f9a74102d9 | ||
|
|
b33e0f2782 | ||
|
|
ae32d20719 | ||
|
|
c3c6a09a9b | ||
|
|
87fa2b693a | ||
|
|
be27e58d8c | ||
|
|
6d039bba90 | ||
|
|
0753edca95 | ||
|
|
461931099a | ||
|
|
0802f6d02f | ||
|
|
3b95a36955 | ||
|
|
583eed8c84 | ||
|
|
0e567aebc4 | ||
|
|
60281e01e2 | ||
|
|
fc9ddee4e2 | ||
|
|
66572dca45 | ||
|
|
5663891b89 | ||
|
|
e4881c0956 | ||
|
|
9ae9a199bb | ||
|
|
cfdff6a993 | ||
|
|
c8c793dd86 | ||
|
|
907c0e2239 | ||
|
|
209f10675c | ||
|
|
b2c4408f73 | ||
|
|
981141f194 | ||
|
|
4eb6a73d77 | ||
|
|
e2aa830a51 | ||
|
|
3920096e31 | ||
|
|
8ad5a861b4 | ||
|
|
370e7ac1d9 | ||
|
|
df7cf39d3e | ||
|
|
b044ad96ca | ||
|
|
a4a54df1da | ||
|
|
19b2b7c6b6 | ||
|
|
6844d48a6f | ||
|
|
67d7ec2c73 | ||
|
|
41e1e109c3 | ||
|
|
8a801bd851 | ||
|
|
0efec87463 | ||
|
|
cf6fbd4d8e | ||
|
|
e678c1f377 | ||
|
|
94a0c91f69 | ||
|
|
79ed372079 | ||
|
|
ce36b28f42 | ||
|
|
dfeee1fafe | ||
|
|
a43485a5f1 | ||
|
|
367819ac38 | ||
|
|
ec038f96e8 | ||
|
|
3ed3cc82f0 | ||
|
|
582c5620b7 | ||
|
|
5237a1c53c | ||
|
|
a6a92e9c02 | ||
|
|
56726cf929 | ||
|
|
60967bdde1 | ||
|
|
d754745237 | ||
|
|
996c1b6f94 | ||
|
|
3cb8ca83a4 | ||
|
|
316eaa115e | ||
|
|
26586b21cc | ||
|
|
1c90019ed0 | ||
|
|
f5511dc0bd | ||
|
|
e0bd24de74 | ||
|
|
29fea10f2c | ||
|
|
6757b3e5f3 | ||
|
|
bd9357990c | ||
|
|
f567f4bf0d | ||
|
|
f488d4367b | ||
|
|
1306a815b3 | ||
|
|
cebd214123 | ||
|
|
3b00121ca5 | ||
|
|
adef08d3bd | ||
|
|
191a6471ed | ||
|
|
3ae8451cc0 | ||
|
|
f4951ec52e | ||
|
|
aba4e3d496 | ||
|
|
6a192321b6 | ||
|
|
fc8bb4241b | ||
|
|
9171343062 | ||
|
|
e526218346 | ||
|
|
6c0e90b968 | ||
|
|
c2769bde2a | ||
|
|
49ff1bdab8 | ||
|
|
24aa375ab6 | ||
|
|
d15059eccb | ||
|
|
70e8f32351 | ||
|
|
025cc4ff4e | ||
|
|
cc576ce25a | ||
|
|
decf16ff8f | ||
|
|
9faa1459c9 | ||
|
|
eb1108c0f0 | ||
|
|
47ef801713 | ||
|
|
27272d362c | ||
|
|
b12b2d65be | ||
|
|
bc41308f30 | ||
|
|
bbcfd70907 | ||
|
|
c2c2516ec8 | ||
|
|
57a0827b14 | ||
|
|
17bc23954a | ||
|
|
97fd0e5927 | ||
|
|
60dd89651a | ||
|
|
d3889a00ab | ||
|
|
674b4ed239 | ||
|
|
e9e62c5ef6 | ||
|
|
8bc6ca673c | ||
|
|
2e15b32bad | ||
|
|
e5af317346 | ||
|
|
7aae4b2aaa | ||
|
|
8764150071 | ||
|
|
5cc87cf754 | ||
|
|
301584a670 | ||
|
|
3c61bc24ea | ||
|
|
69d566d369 | ||
|
|
439290c700 | ||
|
|
96bccfbd18 | ||
|
|
7ef3bc0a4a | ||
|
|
e71e5ee302 | ||
|
|
cdc60e317c | ||
|
|
22ce7c64e2 | ||
|
|
4605e5a374 | ||
|
|
0a9177eaff | ||
|
|
cf2c3f19cf | ||
|
|
26768a3855 | ||
|
|
91ea5db1ce | ||
|
|
81c1f12836 | ||
|
|
5f6d1b5254 | ||
|
|
a64e575bdd | ||
|
|
e40d45fcaa | ||
|
|
9a54fc4002 | ||
|
|
8fa62eeab1 | ||
|
|
1748911f70 | ||
|
|
4aa82d16d3 | ||
|
|
be1eaa4de4 | ||
|
|
6285d65fd0 | ||
|
|
28d2560506 | ||
|
|
d3eb6e611d | ||
|
|
b0edc772b0 | ||
|
|
968e0f4ef9 | ||
|
|
364a2667ed | ||
|
|
d5820666d4 | ||
|
|
dc5eb76f58 | ||
|
|
f776c3cc5a | ||
|
|
9992109467 | ||
|
|
7ace9f065b | ||
|
|
4a9ba5ad6f | ||
|
|
d7ad8f1387 | ||
|
|
8d21ca154d | ||
|
|
1ab3b1a1e9 | ||
|
|
ebdc386c6e | ||
|
|
f6c7c4c275 | ||
|
|
c78c1c8503 | ||
|
|
2d10c74d2f | ||
|
|
88175d0d21 | ||
|
|
0a1d11d24f | ||
|
|
222eb31ddc | ||
|
|
c997afb2c2 | ||
|
|
6c73825f18 | ||
|
|
03827ea0b6 | ||
|
|
35660ac998 | ||
|
|
221696c46a | ||
|
|
477e89aba2 | ||
|
|
b5f5716522 | ||
|
|
f37ab8dcb1 | ||
|
|
52b0f2398e | ||
|
|
798c2cb9db | ||
|
|
e8b0902eb2 | ||
|
|
d9ff527a75 | ||
|
|
8d0bd87229 | ||
|
|
31f02bb75b | ||
|
|
c11c7295f6 | ||
|
|
09205eccb8 | ||
|
|
72f3e4c148 | ||
|
|
d340857eb3 | ||
|
|
c8b411bb64 | ||
|
|
895a8764b3 | ||
|
|
4afadf2861 | ||
|
|
2305ab2027 | ||
|
|
37a055c776 | ||
|
|
f0ad308283 | ||
|
|
562b28ee34 | ||
|
|
00c43821dd | ||
|
|
8cfda1f21a | ||
|
|
457870d760 | ||
|
|
df3d45363b | ||
|
|
6ba0204549 | ||
|
|
7a7976677e | ||
|
|
4579381b78 | ||
|
|
14ac6c6386 | ||
|
|
18357ec213 | ||
|
|
4220a50041 | ||
|
|
d8264eab8a | ||
|
|
b9d3839abb | ||
|
|
997d548e60 | ||
|
|
83cd572694 | ||
|
|
68a446b9b6 | ||
|
|
43f0b79e88 | ||
|
|
b3d8a9c5ec | ||
|
|
edb6ca364e | ||
|
|
e00f5cf53d | ||
|
|
0da8c5a5c5 | ||
|
|
c9bee6fe65 | ||
|
|
94bb01d586 | ||
|
|
0fdef33cb4 | ||
|
|
e1f7599d79 | ||
|
|
dad5e71bfe | ||
|
|
60d77c2292 | ||
|
|
3f489f0bed | ||
|
|
26afa43fa4 | ||
|
|
c8003f1a0e | ||
|
|
6413469cdf | ||
|
|
8dbacda32c | ||
|
|
78b225195f | ||
|
|
1fbeef36d6 | ||
|
|
9846c9e60c | ||
|
|
b8564eb389 | ||
|
|
62cd2d1d0f | ||
|
|
9bd8004dfb | ||
|
|
925b44a8a8 | ||
|
|
9d8c1ecc05 | ||
|
|
66c7b4b619 | ||
|
|
b399aa8f93 | ||
|
|
dcd0a4b617 | ||
|
|
d7c765ba79 | ||
|
|
fa10ed076c | ||
|
|
9dfafd588e | ||
|
|
09f7504de4 | ||
|
|
6ad23eabf5 | ||
|
|
bb855d572d | ||
|
|
250cceb80d | ||
|
|
615aafc0b4 | ||
|
|
1f3d12ef95 | ||
|
|
608670c6b9 | ||
|
|
9cee3342e3 | ||
|
|
aec2613cd1 | ||
|
|
7c406d96e7 | ||
|
|
459faf880a | ||
|
|
e832625624 | ||
|
|
ee02a39a5e | ||
|
|
52491e6bcc | ||
|
|
4177a535f1 | ||
|
|
548437b824 | ||
|
|
596d0d774f | ||
|
|
877928ba4a | ||
|
|
a06d90f456 | ||
|
|
dcaff1e5a1 | ||
|
|
4d666520a0 | ||
|
|
cea369d703 | ||
|
|
08a19429d4 | ||
|
|
0cf056b6fe | ||
|
|
72b869a6fc | ||
|
|
9255fdf781 | ||
|
|
5b2cd8f077 | ||
|
|
85644b3456 | ||
|
|
6ff98fbb50 | ||
|
|
6d94f6f471 | ||
|
|
f736ec2e07 | ||
|
|
e33a3ce242 | ||
|
|
59beccb198 | ||
|
|
bb26a274f3 | ||
|
|
6562cb4cd6 | ||
|
|
d4ac697a4f | ||
|
|
e77a0ecf98 | ||
|
|
ce34dc098a | ||
|
|
774ea7970e | ||
|
|
ecd2f22ee6 | ||
|
|
4a8c4b6320 | ||
|
|
0699b9f9e7 | ||
|
|
e0c0b37621 | ||
|
|
da1aedeb6b | ||
|
|
5de7bff8b7 | ||
|
|
add5afbf9d | ||
|
|
f1e0354afa | ||
|
|
00105dd491 | ||
|
|
40856aa731 | ||
|
|
c2f7d27321 | ||
|
|
c4e1cce63b | ||
|
|
d863682358 | ||
|
|
b0288cbe4b | ||
|
|
4fac597238 | ||
|
|
2b3b021c91 | ||
|
|
22afac70b8 | ||
|
|
d297c02407 | ||
|
|
0283c5f6a3 | ||
|
|
6704be35b0 | ||
|
|
fbddab5a80 | ||
|
|
c4e236e627 | ||
|
|
aa63c214f8 | ||
|
|
6c119278c0 | ||
|
|
4df89a2f6a | ||
|
|
9c31f34c4e | ||
|
|
df86bea2f9 | ||
|
|
2032cff787 | ||
|
|
9c0c5084d1 | ||
|
|
7803d6efb9 | ||
|
|
11ce447dda | ||
|
|
7bc8117e1d | ||
|
|
cec300fd51 | ||
|
|
0ae50caafe | ||
|
|
5dbb1c55c1 | ||
|
|
fb0c52cbfb | ||
|
|
02db3d0242 | ||
|
|
c5d663973f | ||
|
|
090df199ac | ||
|
|
257749f997 | ||
|
|
0698ef44ac | ||
|
|
0b4083dfda | ||
|
|
86bdfdbb1b | ||
|
|
4c1a1385fb | ||
|
|
ceecb32683 | ||
|
|
2d1af55952 | ||
|
|
4763c86e77 | ||
|
|
13eab642be | ||
|
|
9b5ac360b7 | ||
|
|
642b99b75d | ||
|
|
4faf2667a2 | ||
|
|
fe58d3aba3 | ||
|
|
fc695b9661 | ||
|
|
b4dbe55c26 | ||
|
|
6cf8de253a | ||
|
|
65fae902fc | ||
|
|
c9d3643b20 |
30
.editorconfig
Normal file
30
.editorconfig
Normal file
@@ -0,0 +1,30 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
charset = utf-8
|
||||
# end_of_line = lf
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
indent_style = tab
|
||||
indent_size = 2
|
||||
tab_width = 2
|
||||
|
||||
|
||||
[*.py]
|
||||
indent_style = tab
|
||||
indent_size = 2
|
||||
|
||||
[*.{yml,yaml}]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
[*.{json,ini}]
|
||||
indent_style = tab
|
||||
indent_size = 2
|
||||
|
||||
[*.md]
|
||||
trim_trailing_whitespace = false
|
||||
|
||||
[*.rst]
|
||||
indent_style = space
|
||||
indent_size = 3
|
||||
16
.github/dependabot.yml
vendored
Normal file
16
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
version: 2
|
||||
updates:
|
||||
# Maintain GitHub Action runners
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
target-branch: dev
|
||||
commit-message:
|
||||
prefix: "[Dependabot]"
|
||||
labels:
|
||||
- Dependencies
|
||||
assignees:
|
||||
- Paebbels
|
||||
reviewers:
|
||||
- Paebbels
|
||||
schedule:
|
||||
interval: "daily" # Checks on Monday trough Friday.
|
||||
30
.github/pull_request_template.md
vendored
Normal file
30
.github/pull_request_template.md
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
# New Features
|
||||
|
||||
* tbd
|
||||
* tbd
|
||||
|
||||
# Changes
|
||||
|
||||
* tbd
|
||||
* tbd
|
||||
|
||||
# Bug Fixes
|
||||
|
||||
* tbd
|
||||
* tbd
|
||||
|
||||
# Documentation
|
||||
|
||||
* tbd
|
||||
* tbd
|
||||
|
||||
# Unit Tests
|
||||
|
||||
* tbd
|
||||
* tbd
|
||||
|
||||
----------
|
||||
# Related Issues and Pull-Requests
|
||||
|
||||
* tbd
|
||||
* tbd
|
||||
264
.github/workflows/ApplicationTesting.yml
vendored
Normal file
264
.github/workflows/ApplicationTesting.yml
vendored
Normal file
@@ -0,0 +1,264 @@
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: Application Testing
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
jobs:
|
||||
description: 'JSON list with environment fields, telling the system and Python versions to run tests with.'
|
||||
required: true
|
||||
type: string
|
||||
wheel:
|
||||
description: "Wheel package as input artifact."
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
requirements:
|
||||
description: 'Python dependencies to be installed through pip.'
|
||||
required: false
|
||||
default: '-r tests/requirements.txt'
|
||||
type: string
|
||||
pacboy:
|
||||
description: 'MSYS2 dependencies to be installed through pacboy (pacman).'
|
||||
required: false
|
||||
default: ""
|
||||
type: string
|
||||
mingw_requirements:
|
||||
description: 'Override Python dependencies to be installed through pip on MSYS2 (MINGW64) only.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
root_directory:
|
||||
description: 'Working directory for running tests.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
tests_directory:
|
||||
description: 'Path to the directory containing tests (relative to root_directory).'
|
||||
required: false
|
||||
default: 'tests'
|
||||
type: string
|
||||
apptest_directory:
|
||||
description: 'Path to the directory containing application tests (relative to tests_directory).'
|
||||
required: false
|
||||
default: 'app'
|
||||
type: string
|
||||
apptest_xml_artifact:
|
||||
description: "Generate application test report with junitxml and upload results as an artifact."
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
ApplicationTesting:
|
||||
name: ${{ matrix.sysicon }} ${{ matrix.pyicon }} Application Tests using Python ${{ matrix.python }}
|
||||
runs-on: ${{ matrix.runs-on }}
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include: ${{ fromJson(inputs.jobs) }}
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: ${{ matrix.shell }}
|
||||
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.wheel }}' from 'Package' job
|
||||
uses: pyTooling/download-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.wheel }}
|
||||
path: install
|
||||
|
||||
- name: Compute pacman/pacboy packages
|
||||
id: pacboy
|
||||
if: matrix.system == 'msys2'
|
||||
shell: python
|
||||
run: |
|
||||
from os import getenv
|
||||
from pathlib import Path
|
||||
from re import compile
|
||||
from sys import version
|
||||
|
||||
print(f"Python: {version}")
|
||||
|
||||
def loadRequirementsFile(requirementsFile: Path):
|
||||
requirements = []
|
||||
with requirementsFile.open("r") as file:
|
||||
for line in file.readlines():
|
||||
line = line.strip()
|
||||
if line.startswith("#") or line.startswith("https") or line == "":
|
||||
continue
|
||||
elif line.startswith("-r"):
|
||||
# Remove the first word/argument (-r)
|
||||
requirements += loadRequirementsFile(requirementsFile.parent / line[2:].lstrip())
|
||||
else:
|
||||
requirements.append(line)
|
||||
|
||||
return requirements
|
||||
|
||||
requirements = "${{ inputs.requirements }}"
|
||||
if requirements.startswith("-r"):
|
||||
requirementsFile = Path(requirements[2:].lstrip())
|
||||
try:
|
||||
dependencies = loadRequirementsFile(requirementsFile)
|
||||
except FileNotFoundError as ex:
|
||||
print(f"::error title=FileNotFoundError::{ex}")
|
||||
exit(1)
|
||||
else:
|
||||
dependencies = [req.strip() for req in requirements.split(" ")]
|
||||
|
||||
packages = {
|
||||
"coverage": "python-coverage:p",
|
||||
"docstr_coverage": "python-pyyaml:p",
|
||||
"igraph": "igraph:p",
|
||||
"jinja2": "python-markupsafe:p",
|
||||
"lxml": "python-lxml:p",
|
||||
"numpy": "python-numpy:p",
|
||||
"markupsafe": "python-markupsafe:p",
|
||||
"pip": "python-pip:p",
|
||||
"pyyaml": "python-pyyaml:p",
|
||||
"ruamel.yaml": "python-ruamel-yaml:p python-ruamel.yaml.clib:p",
|
||||
"sphinx": "python-markupsafe:p",
|
||||
"tomli": "python-tomli:p",
|
||||
"wheel": "python-wheel:p",
|
||||
"pyEDAA.ProjectModel": "python-ruamel-yaml:p python-ruamel.yaml.clib:p python-lxml:p",
|
||||
"pyEDAA.Reports": "python-ruamel-yaml:p python-ruamel.yaml.clib:p python-lxml:p",
|
||||
}
|
||||
subPackages = {
|
||||
"pytooling": {
|
||||
"yaml": "python-ruamel-yaml:p python-ruamel.yaml.clib:p",
|
||||
}
|
||||
}
|
||||
|
||||
regExp = compile(r"(?P<PackageName>[\w_\-\.]+)(?:\[(?P<SubPackages>(?:\w+)(?:\s*,\s*\w+)*)\])?(?:\s*(?P<Comperator>[<>~=]+)\s*)(?P<Version>\d+(?:\.\d+)*)(?:-(?P<VersionExtension>\w+))?")
|
||||
|
||||
pacboyPackages = set(("python-pip:p", "python-wheel:p", "python-tomli:p"))
|
||||
print(f"Processing dependencies ({len(dependencies)}):")
|
||||
for dependency in dependencies:
|
||||
print(f" {dependency}")
|
||||
|
||||
match = regExp.match(dependency.lower())
|
||||
if not match:
|
||||
print(f" Wrong format: {dependency}")
|
||||
print(f"::error title=Identifying Pacboy Packages::Unrecognized dependency format '{dependency}'")
|
||||
continue
|
||||
|
||||
package = match["PackageName"]
|
||||
if package in packages:
|
||||
rewrite = packages[package]
|
||||
print(f" Found rewrite rule for '{package}': {rewrite}")
|
||||
pacboyPackages.add(rewrite)
|
||||
|
||||
if match["SubPackages"] and package in subPackages:
|
||||
for subPackage in match["SubPackages"].split(","):
|
||||
if subPackage in subPackages[package]:
|
||||
rewrite = subPackages[package][subPackage]
|
||||
print(f" Found rewrite rule for '{package}[..., {subPackage}, ...]': {rewrite}")
|
||||
pacboyPackages.add(rewrite)
|
||||
|
||||
# Write jobs to special file
|
||||
github_output = Path(getenv("GITHUB_OUTPUT"))
|
||||
print(f"GITHUB_OUTPUT: {github_output}")
|
||||
with github_output.open("a+") as f:
|
||||
f.write(f"pacboy_packages={' '.join(pacboyPackages)}\n")
|
||||
|
||||
- name: '🟦 Setup MSYS2 for ${{ matrix.runtime }}'
|
||||
uses: msys2/setup-msys2@v2
|
||||
if: matrix.system == 'msys2'
|
||||
with:
|
||||
msystem: ${{ matrix.runtime }}
|
||||
update: true
|
||||
pacboy: >-
|
||||
${{ steps.pacboy.outputs.pacboy_packages }}
|
||||
${{ inputs.pacboy }}
|
||||
|
||||
- name: 🐍 Setup Python ${{ matrix.python }}
|
||||
uses: actions/setup-python@v5
|
||||
if: matrix.system != 'msys2'
|
||||
with:
|
||||
python-version: ${{ matrix.python }}
|
||||
|
||||
- name: 🔧 Install wheel and pip dependencies (native)
|
||||
if: matrix.system != 'msys2'
|
||||
run: |
|
||||
python -m pip install --disable-pip-version-check -U wheel
|
||||
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
||||
|
||||
- name: 🔧 Install pip dependencies (MSYS2)
|
||||
if: matrix.system == 'msys2'
|
||||
run: |
|
||||
if [ -n '${{ inputs.mingw_requirements }}' ]; then
|
||||
python -m pip install --disable-pip-version-check ${{ inputs.mingw_requirements }}
|
||||
else
|
||||
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
||||
fi
|
||||
|
||||
- name: 🔧 Install wheel from artifact
|
||||
run: |
|
||||
ls -l install
|
||||
python -m pip install --disable-pip-version-check -U install/*.whl
|
||||
|
||||
- name: ✅ Run application tests (Ubuntu/macOS)
|
||||
if: matrix.system != 'windows'
|
||||
run: |
|
||||
export ENVIRONMENT_NAME="${{ matrix.envname }}"
|
||||
|
||||
cd "${{ inputs.root_directory || '.' }}"
|
||||
[ -n '${{ inputs.apptest_xml_artifact }}' ] && PYTEST_ARGS='--junitxml=report/unit/TestReportSummary.xml' || unset PYTEST_ARGS
|
||||
if [ -n '${{ inputs.coverage_config }}' ]; then
|
||||
printf "%s\n" "coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.apptest_directory }}"
|
||||
coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.apptest_directory }}
|
||||
else
|
||||
printf "%s\n" "python -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.apptest_directory }}"
|
||||
python -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.apptest_directory }}
|
||||
fi
|
||||
|
||||
- name: ✅ Run application tests (Windows)
|
||||
if: matrix.system == 'windows'
|
||||
run: |
|
||||
$env:ENVIRONMENT_NAME = "${{ matrix.envname }}"
|
||||
|
||||
cd "${{ inputs.root_directory || '.' }}"
|
||||
$PYTEST_ARGS = if ("${{ inputs.apptest_xml_artifact }}") { "--junitxml=report/unit/TestReportSummary.xml" } else { "" }
|
||||
if ("${{ inputs.coverage_config }}") {
|
||||
Write-Host "coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.apptest_directory }}"
|
||||
coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.apptest_directory }}
|
||||
} else {
|
||||
Write-Host "python -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.apptest_directory }}"
|
||||
python -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.apptest_directory }}
|
||||
}
|
||||
|
||||
- name: 📤 Upload 'TestReportSummary.xml' artifact
|
||||
if: inputs.apptest_xml_artifact != ''
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.apptest_xml_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
||||
working-directory: report/unit
|
||||
path: TestReportSummary.xml
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
35
.github/workflows/ArtifactCleanUp.yml
vendored
35
.github/workflows/ArtifactCleanUp.yml
vendored
@@ -1,8 +1,35 @@
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: ArtifactCleanUp
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
package:
|
||||
description: 'Artifacts to be removed on not tagged runs.'
|
||||
required: true
|
||||
@@ -14,21 +41,19 @@ on:
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
|
||||
ArtifactCleanUp:
|
||||
name: 🗑️ Artifact Cleanup
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
|
||||
steps:
|
||||
|
||||
- name: 🗑️ Delete package Artifacts
|
||||
uses: geekyeggo/delete-artifact@v5
|
||||
if: ${{ ! startsWith(github.ref, 'refs/tags') }}
|
||||
uses: geekyeggo/delete-artifact@v1
|
||||
with:
|
||||
name: ${{ inputs.package }}
|
||||
|
||||
- name: 🗑️ Delete remaining Artifacts
|
||||
uses: geekyeggo/delete-artifact@v5
|
||||
if: ${{ inputs.remaining != '' }}
|
||||
uses: geekyeggo/delete-artifact@v1
|
||||
with:
|
||||
name: ${{ inputs.remaining }}
|
||||
|
||||
53
.github/workflows/BuildTheDocs.yml
vendored
53
.github/workflows/BuildTheDocs.yml
vendored
@@ -1,3 +1,25 @@
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: Documentation
|
||||
|
||||
on:
|
||||
@@ -5,18 +27,21 @@ on:
|
||||
inputs:
|
||||
artifact:
|
||||
description: 'Name of the documentation artifact.'
|
||||
required: true
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
|
||||
BuildTheDocs:
|
||||
name: 📓 Run BuildTheDocs
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
steps:
|
||||
- name: '❗ Deprecation message'
|
||||
run: printf "%s\n" "::warning title=Deprecated::'BuildTheDocs.yml' is not maintained anymore. Please switch to 'SphinxDocumentation.yml', 'LaTeXDocumentation.yml' and 'ExtractConfiguration.yml'."
|
||||
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: 🛳️ Build documentation
|
||||
uses: buildthedocs/btd@v0
|
||||
@@ -24,8 +49,24 @@ jobs:
|
||||
skip-deploy: true
|
||||
|
||||
- name: 📤 Upload 'documentation' artifacts
|
||||
uses: actions/upload-artifact@master
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
if: inputs.artifact != ''
|
||||
with:
|
||||
name: ${{ inputs.artifact }}
|
||||
path: doc/_build/html
|
||||
working-directory: doc/_build/html
|
||||
path: '*'
|
||||
retention-days: 1
|
||||
|
||||
- name: '📓 Publish site to GitHub Pages'
|
||||
if: inputs.artifact == '' && github.event_name != 'pull_request'
|
||||
run: |
|
||||
cp --recursive -T doc/_build/html public
|
||||
cd public
|
||||
touch .nojekyll
|
||||
git init
|
||||
cp ../.git/config ./.git/config
|
||||
git add .
|
||||
git config --local user.email "BuildTheDocs@GitHubActions"
|
||||
git config --local user.name "GitHub Actions"
|
||||
git commit -a -m "update ${{ github.sha }}"
|
||||
git push -u origin +HEAD:gh-pages
|
||||
|
||||
72
.github/workflows/CheckDocumentation.yml
vendored
Normal file
72
.github/workflows/CheckDocumentation.yml
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: Check Documentation
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
default: '3.12'
|
||||
type: string
|
||||
directory:
|
||||
description: 'Source code directory to check.'
|
||||
required: true
|
||||
type: string
|
||||
fail_under:
|
||||
description: 'Minimum required documentation coverage level'
|
||||
required: false
|
||||
default: 80
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
DocCoverage:
|
||||
name: 👀 Check documentation coverage
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: 🐍 Setup Python ${{ inputs.python_version }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ inputs.python_version }}
|
||||
|
||||
- name: 🔧 Install wheel,tomli and pip dependencies (native)
|
||||
run: |
|
||||
python -m pip install --disable-pip-version-check -U docstr_coverage interrogate[png]
|
||||
|
||||
- name: Run 'interrogate' Documentation Coverage Check
|
||||
continue-on-error: true
|
||||
run: |
|
||||
interrogate -c pyproject.toml --fail-under=${{ inputs.fail_under }} && printf "%s\n" "::error title=interrogate::Insufficient documentation quality (goal: ${{ inputs.fail_under }})"
|
||||
|
||||
- name: Run 'docstr_coverage' Documentation Coverage Check
|
||||
continue-on-error: true
|
||||
run: |
|
||||
docstr-coverage -v 2 --fail-under=${{ inputs.fail_under }} ${{ inputs.directory }} && printf "%s\n" "::error title=docstr-coverage::Insufficient documentation quality (goal: ${{ inputs.fail_under }})"
|
||||
323
.github/workflows/CompletePipeline.yml
vendored
Normal file
323
.github/workflows/CompletePipeline.yml
vendored
Normal file
@@ -0,0 +1,323 @@
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: Namespace Package
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
package_namespace:
|
||||
description: 'Name of the tool''s namespace.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
package_name:
|
||||
description: 'Name of the tool''s package.'
|
||||
required: true
|
||||
type: string
|
||||
unittest_python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
default: '3.13'
|
||||
type: string
|
||||
unittest_python_version_list:
|
||||
description: 'Space separated list of Python versions to run tests with.'
|
||||
required: false
|
||||
default: '3.9 3.10 3.11 3.12 3.13'
|
||||
type: string
|
||||
unittest_system_list:
|
||||
description: 'Space separated list of systems to run tests on.'
|
||||
required: false
|
||||
default: 'ubuntu windows macos macos-arm mingw64 ucrt64'
|
||||
type: string
|
||||
unittest_include_list:
|
||||
description: 'Space separated list of system:python items to be included into the list of test.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
unittest_exclude_list:
|
||||
description: 'Space separated list of system:python items to be excluded from the list of test.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
unittest_disable_list:
|
||||
description: 'Space separated list of system:python items to be disabled from the list of test.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
apptest_python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
default: '3.13'
|
||||
type: string
|
||||
apptest_python_version_list:
|
||||
description: 'Space separated list of Python versions to run tests with.'
|
||||
required: false
|
||||
default: ""
|
||||
type: string
|
||||
apptest_system_list:
|
||||
description: 'Space separated list of systems to run tests on.'
|
||||
required: false
|
||||
default: 'ubuntu windows macos macos-arm ucrt64'
|
||||
type: string
|
||||
apptest_include_list:
|
||||
description: 'Space separated list of system:python items to be included into the list of test.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
apptest_exclude_list:
|
||||
description: 'Space separated list of system:python items to be excluded from the list of test.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
apptest_disable_list:
|
||||
description: 'Space separated list of system:python items to be disabled from the list of test.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
secrets:
|
||||
PYPI_TOKEN:
|
||||
description: "Token for pushing releases to PyPI."
|
||||
required: false
|
||||
CODACY_PROJECT_TOKEN:
|
||||
description: "Token for pushing coverage results to Codacy."
|
||||
required: false
|
||||
|
||||
jobs:
|
||||
ConfigParams:
|
||||
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@main
|
||||
with:
|
||||
package_namespace: ${{ inputs.package_namespace }}
|
||||
package_name: ${{ inputs.package_name }}
|
||||
|
||||
UnitTestingParams:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
with:
|
||||
package_namespace: ${{ inputs.package_namespace }}
|
||||
package_name: ${{ inputs.package_name }}
|
||||
python_version: ${{ inputs.unittest_python_version }}
|
||||
python_version_list: ${{ inputs.unittest_python_version_list }}
|
||||
system_list: ${{ inputs.unittest_system_list }}
|
||||
include_list: ${{ inputs.unittest_include_list }}
|
||||
exclude_list: ${{ inputs.unittest_exclude_list }}
|
||||
disable_list: ${{ inputs.unittest_disable_list }}
|
||||
|
||||
AppTestingParams:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
with:
|
||||
package_namespace: ${{ inputs.package_namespace }}
|
||||
package_name: ${{ inputs.package_name }}
|
||||
python_version: ${{ inputs.apptest_python_version }}
|
||||
python_version_list: ${{ inputs.apptest_python_version_list }}
|
||||
system_list: ${{ inputs.apptest_system_list }}
|
||||
include_list: ${{ inputs.apptest_include_list }}
|
||||
exclude_list: ${{ inputs.apptest_exclude_list }}
|
||||
disable_list: ${{ inputs.apptest_disable_list }}
|
||||
|
||||
UnitTesting:
|
||||
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@main
|
||||
needs:
|
||||
- ConfigParams
|
||||
- UnitTestingParams
|
||||
with:
|
||||
jobs: ${{ needs.UnitTestingParams.outputs.python_jobs }}
|
||||
requirements: "-r tests/unit/requirements.txt"
|
||||
# pacboy: "msys/git python-lxml:p"
|
||||
unittest_report_xml_directory: ${{ needs.ConfigParams.outputs.unittest_report_xml_directory }}
|
||||
unittest_report_xml_filename: ${{ needs.ConfigParams.outputs.unittest_report_xml_filename }}
|
||||
coverage_report_html_directory: ${{ needs.ConfigParams.outputs.coverage_report_html_directory }}
|
||||
unittest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||
coverage_sqlite_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}
|
||||
|
||||
StaticTypeCheck:
|
||||
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@main
|
||||
needs:
|
||||
- ConfigParams
|
||||
- UnitTestingParams
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
commands: |
|
||||
${{ needs.ConfigParams.outputs.mypy_prepare_command }}
|
||||
mypy --html-report report/typing -p ${{ needs.ConfigParams.outputs.package_fullname }}
|
||||
html_report: 'report/typing'
|
||||
html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
|
||||
|
||||
DocCoverage:
|
||||
uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@main
|
||||
needs:
|
||||
- ConfigParams
|
||||
- UnitTestingParams
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
directory: ${{ inputs.package_namespace }}/${{ inputs.package_name }}
|
||||
# fail_below: 70
|
||||
|
||||
Package:
|
||||
uses: pyTooling/Actions/.github/workflows/Package.yml@main
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- UnitTesting
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
||||
|
||||
# AppTesting:
|
||||
# uses: pyTooling/Actions/.github/workflows/ApplicationTesting.yml@main
|
||||
# needs:
|
||||
# - AppTestingParams
|
||||
# - UnitTestingParams
|
||||
# - Package
|
||||
# with:
|
||||
# jobs: ${{ needs.AppTestingParams.outputs.python_jobs }}
|
||||
# wheel: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
||||
# apptest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).apptesting_xml }}
|
||||
|
||||
PublishCoverageResults:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@main
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- UnitTesting
|
||||
with:
|
||||
# coverage_sqlite_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}
|
||||
# coverage_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }}
|
||||
coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}
|
||||
coverage_html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||
secrets:
|
||||
codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||
|
||||
PublishTestResults:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@main
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- UnitTesting
|
||||
with:
|
||||
additional_merge_args: '-d "--pytest=rewrite-dunder-init;reduce-depth:pytest.tests.unit"'
|
||||
merged_junit_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||
|
||||
# VerifyDocs:
|
||||
# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@main
|
||||
# needs:
|
||||
# - UnitTestingParams
|
||||
# with:
|
||||
# python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
|
||||
Documentation:
|
||||
uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@main
|
||||
needs:
|
||||
- ConfigParams
|
||||
- UnitTestingParams
|
||||
- PublishTestResults
|
||||
- PublishCoverageResults
|
||||
# - VerifyDocs
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
coverage_report_json_directory: ${{ needs.ConfigParams.outputs.coverage_report_json_directory }}
|
||||
unittest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}-ubuntu-native-3.12
|
||||
coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}
|
||||
html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_html }}
|
||||
latex_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }}
|
||||
|
||||
IntermediateCleanUp:
|
||||
uses: pyTooling/Actions/.github/workflows/IntermediateCleanUp.yml@main
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- PublishCoverageResults
|
||||
- PublishTestResults
|
||||
- Documentation
|
||||
with:
|
||||
sqlite_coverage_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}-
|
||||
xml_unittest_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}-
|
||||
|
||||
# PDFDocumentation:
|
||||
# uses: pyTooling/Actions/.github/workflows/LaTeXDocumentation.yml@main
|
||||
# needs:
|
||||
# - UnitTestingParams
|
||||
# - Documentation
|
||||
# with:
|
||||
# document: pyEDAA.ProjectModel
|
||||
# latex_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }}
|
||||
# pdf_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_pdf }}
|
||||
|
||||
PublishToGitHubPages:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@main
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- Documentation
|
||||
# - PDFDocumentation
|
||||
- PublishCoverageResults
|
||||
- StaticTypeCheck
|
||||
with:
|
||||
doc: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_html }}
|
||||
coverage: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||
typing: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
|
||||
|
||||
ReleasePage:
|
||||
uses: pyTooling/Actions/.github/workflows/Release.yml@main
|
||||
if: startsWith(github.ref, 'refs/tags')
|
||||
needs:
|
||||
- Package
|
||||
# - AppTesting
|
||||
- PublishToGitHubPages
|
||||
|
||||
PublishOnPyPI:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@main
|
||||
if: startsWith(github.ref, 'refs/tags')
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- ReleasePage
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
requirements: -r dist/requirements.txt
|
||||
artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
||||
secrets:
|
||||
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||
|
||||
ArtifactCleanUp:
|
||||
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@main
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- UnitTesting
|
||||
- StaticTypeCheck
|
||||
- Documentation
|
||||
# - PDFDocumentation
|
||||
- PublishTestResults
|
||||
- PublishCoverageResults
|
||||
- PublishToGitHubPages
|
||||
# - PublishOnPyPI
|
||||
- IntermediateCleanUp
|
||||
with:
|
||||
package: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
||||
remaining: |
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_html }}-*
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }}-*
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}-*
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}-*
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_html }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_html }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }}
|
||||
# ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).apptesting_xml }}-*
|
||||
# ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_pdf }}
|
||||
139
.github/workflows/CoverageCollection.yml
vendored
139
.github/workflows/CoverageCollection.yml
vendored
@@ -1,18 +1,60 @@
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: Coverage Collection
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
default: '3.10'
|
||||
default: '3.11'
|
||||
type: string
|
||||
requirements:
|
||||
description: 'Python dependencies to be installed through pip.'
|
||||
required: false
|
||||
default: '-r tests/requirements.txt'
|
||||
type: string
|
||||
tests_directory:
|
||||
description: 'Path to the directory containing tests (test working directory).'
|
||||
required: false
|
||||
default: 'tests'
|
||||
type: string
|
||||
unittest_directory:
|
||||
description: 'Path to the directory containing unit tests (relative to tests_directory).'
|
||||
required: false
|
||||
default: 'unit'
|
||||
type: string
|
||||
coverage_config:
|
||||
description: 'Path to the .coveragerc file. Use pyproject.toml by default.'
|
||||
required: false
|
||||
default: 'pyproject.toml'
|
||||
type: string
|
||||
artifact:
|
||||
description: 'Name of the coverage artifact.'
|
||||
required: true
|
||||
@@ -26,55 +68,120 @@ jobs:
|
||||
|
||||
Coverage:
|
||||
name: 📈 Collect Coverage Data using Python ${{ inputs.python_version }}
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
|
||||
steps:
|
||||
- name: '❗ Deprecation message'
|
||||
run: printf "%s\n" "::warning title=Deprecated::'CoverageCollection.yml' is not maintained anymore. Please switch to 'UnitTesting.yml', 'PublishCoverageResults.yml' and 'PublishTestResults.yml'."
|
||||
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
lfs: true
|
||||
submodules: true
|
||||
|
||||
- name: 🐍 Setup Python ${{ inputs.python_version }}
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ inputs.python_version }}
|
||||
|
||||
- name: 🗂 Install dependencies
|
||||
run: |
|
||||
python -m pip install -U pip
|
||||
python -m pip install ${{ inputs.requirements }}
|
||||
python -m pip install --disable-pip-version-check tomli
|
||||
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
||||
|
||||
- name: 🔁 Extract configurations from pyproject.toml
|
||||
id: getVariables
|
||||
shell: python
|
||||
run: |
|
||||
from os import getenv
|
||||
from pathlib import Path
|
||||
from tomli import load as tomli_load
|
||||
from textwrap import dedent
|
||||
|
||||
htmlDirectory = 'htmlcov'
|
||||
xmlFile = './coverage.xml'
|
||||
coverageRC = "${{ inputs.coverage_config }}".strip()
|
||||
|
||||
# Read output paths from 'pyproject.toml' file
|
||||
if coverageRC == "pyproject.toml":
|
||||
pyProjectFile = Path("pyproject.toml")
|
||||
if pyProjectFile.exists():
|
||||
with pyProjectFile.open("rb") as file:
|
||||
pyProjectSettings = tomli_load(file)
|
||||
|
||||
htmlDirectory = pyProjectSettings["tool"]["coverage"]["html"]["directory"]
|
||||
xmlFile = pyProjectSettings["tool"]["coverage"]["xml"]["output"]
|
||||
else:
|
||||
print(f"File '{pyProjectFile}' not found.")
|
||||
print(f"::error title=FileNotFoundError::File '{pyProjectFile}' not found.")
|
||||
exit(1)
|
||||
|
||||
# Read output paths from '.coveragerc' file
|
||||
elif len(coverageRC) > 0:
|
||||
coverageRCFile = Path(coverageRC)
|
||||
if coverageRCFile.exists():
|
||||
with coverageRCFile.open("rb") as file:
|
||||
coverageRCSettings = tomli_load(file)
|
||||
|
||||
htmlDirectory = coverageRCSettings["html"]["directory"]
|
||||
xmlFile = coverageRCSettings["xml"]["output"]
|
||||
else:
|
||||
print(f"File '{coverageRCFile}' not found.")
|
||||
print(f"::error title=FileNotFoundError::File '{coverageRCFile}' not found.")
|
||||
exit(1)
|
||||
|
||||
# Write jobs to special file
|
||||
github_output = Path(getenv("GITHUB_OUTPUT"))
|
||||
print(f"GITHUB_OUTPUT: {github_output}")
|
||||
with github_output.open("a+", encoding="utf-8") as f:
|
||||
f.write(dedent(f"""\
|
||||
coverage_report_html_directory={htmlDirectory}
|
||||
coverage_report_xml={xmlFile}
|
||||
"""))
|
||||
|
||||
print(f"DEBUG:\n html={htmlDirectory}\n xml={xmlFile}")
|
||||
|
||||
- name: Collect coverage
|
||||
continue-on-error: true
|
||||
run: |
|
||||
python -m pytest -rA --cov=.. --cov-config=tests/.coveragerc tests/unit --color=yes
|
||||
export ENVIRONMENT_NAME="Linux (x86-64)"
|
||||
export PYTHONPATH=$(pwd)
|
||||
ABSDIR=$(pwd)
|
||||
cd "${{ inputs.tests_directory || '.' }}"
|
||||
[ -n '${{ inputs.coverage_config }}' ] && PYCOV_ARGS="--cov-config=${ABSDIR}/${{ inputs.coverage_config }}" || unset PYCOV_ARGS
|
||||
printf "%s\n" "python -m pytest -rA --cov=${ABSDIR} ${PYCOV_ARGS} ${{ inputs.unittest_directory }} --color=yes"
|
||||
python -m pytest -rA --cov=${ABSDIR} $PYCOV_ARGS ${{ inputs.unittest_directory }} --color=yes
|
||||
|
||||
- name: Convert to cobertura format
|
||||
run: coverage xml
|
||||
run: coverage xml --data-file=${{ inputs.tests_directory || '.' }}/.coverage
|
||||
|
||||
- name: Convert to HTML format
|
||||
run: |
|
||||
coverage html
|
||||
rm htmlcov/.gitignore
|
||||
coverage html --data-file=${{ inputs.tests_directory || '.' }}/.coverage -d ${{ steps.getVariables.outputs.coverage_report_html_directory }}
|
||||
rm ${{ steps.getVariables.outputs.coverage_report_html_directory }}/.gitignore
|
||||
|
||||
- name: 📤 Upload 'Coverage Report' artifact
|
||||
continue-on-error: true
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.artifact }}
|
||||
path: htmlcov
|
||||
working-directory: ${{ steps.getVariables.outputs.coverage_report_html_directory }}
|
||||
path: '*'
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- name: 📊 Publish coverage at CodeCov
|
||||
continue-on-error: true
|
||||
uses: codecov/codecov-action@v1
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
file: ./coverage.xml
|
||||
files: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
||||
flags: unittests
|
||||
env_vars: PYTHON
|
||||
|
||||
- name: 📉 Publish coverage at Codacy
|
||||
continue-on-error: true
|
||||
uses: codacy/codacy-coverage-reporter-action@master
|
||||
uses: codacy/codacy-coverage-reporter-action@v1
|
||||
with:
|
||||
project-token: ${{ secrets.codacy_token }}
|
||||
coverage-reports: ./coverage.xml
|
||||
coverage-reports: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
||||
|
||||
221
.github/workflows/ExtractConfiguration.yml
vendored
Normal file
221
.github/workflows/ExtractConfiguration.yml
vendored
Normal file
@@ -0,0 +1,221 @@
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: Extract Configuration
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
default: '3.12'
|
||||
type: string
|
||||
package_namespace:
|
||||
description: 'Name of the tool''s namespace.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
package_name:
|
||||
description: 'Name of the tool''s package.'
|
||||
required: true
|
||||
type: string
|
||||
coverage_config:
|
||||
description: 'Path to the .coveragerc file. Use pyproject.toml by default.'
|
||||
required: false
|
||||
default: 'pyproject.toml'
|
||||
type: string
|
||||
|
||||
outputs:
|
||||
package_fullname:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.package_fullname }}
|
||||
package_directory:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.package_directory }}
|
||||
mypy_prepare_command:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.mypy_prepare_command }}
|
||||
unittest_report_xml_directory:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.unittest_report_xml_directory }}
|
||||
unittest_report_xml_filename:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.unittest_report_xml_filename }}
|
||||
unittest_report_xml:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.unittest_report_xml }}
|
||||
coverage_report_html_directory:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.coverage_report_html_directory }}
|
||||
coverage_report_xml_directory:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.coverage_report_xml_directory }}
|
||||
coverage_report_xml_filename:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.coverage_report_xml_filename }}
|
||||
coverage_report_xml:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.coverage_report_xml }}
|
||||
coverage_report_json_directory:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.coverage_report_json_directory }}
|
||||
coverage_report_json_filename:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.coverage_report_json_filename }}
|
||||
coverage_report_json:
|
||||
description: ""
|
||||
value: ${{ jobs.Extract.outputs.coverage_report_json }}
|
||||
|
||||
jobs:
|
||||
Extract:
|
||||
name: 📓 Extract configurations from pyproject.toml
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
outputs:
|
||||
package_fullname: ${{ steps.getPackageName.outputs.package_fullname }}
|
||||
package_directory: ${{ steps.getPackageName.outputs.package_directory }}
|
||||
mypy_prepare_command: ${{ steps.getPackageName.outputs.mypy_prepare_command }}
|
||||
unittest_report_xml_directory: ${{ steps.getVariables.outputs.unittest_report_xml_directory }}
|
||||
unittest_report_xml_filename: ${{ steps.getVariables.outputs.unittest_report_xml_filename }}
|
||||
unittest_report_xml: ${{ steps.getVariables.outputs.unittest_report_xml }}
|
||||
coverage_report_html_directory: ${{ steps.getVariables.outputs.coverage_report_html_directory }}
|
||||
coverage_report_xml_directory: ${{ steps.getVariables.outputs.coverage_report_xml_directory }}
|
||||
coverage_report_xml_filename: ${{ steps.getVariables.outputs.coverage_report_xml_filename }}
|
||||
coverage_report_xml: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
||||
coverage_report_json_directory: ${{ steps.getVariables.outputs.coverage_report_json_directory }}
|
||||
coverage_report_json_filename: ${{ steps.getVariables.outputs.coverage_report_json_filename }}
|
||||
coverage_report_json: ${{ steps.getVariables.outputs.coverage_report_json }}
|
||||
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: 🐍 Setup Python ${{ inputs.python_version }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ inputs.python_version }}
|
||||
|
||||
- name: 🔧 Install wheel,tomli and pip dependencies (native)
|
||||
run: |
|
||||
python -m pip install --disable-pip-version-check -U wheel tomli
|
||||
|
||||
- name: 🔁 Full package name and directory
|
||||
id: getPackageName
|
||||
shell: python
|
||||
run: |
|
||||
from os import getenv
|
||||
from pathlib import Path
|
||||
from textwrap import dedent
|
||||
|
||||
namespace = "${{ inputs.package_namespace }}".strip()
|
||||
name = "${{ inputs.package_name }}".strip()
|
||||
|
||||
if namespace == "" or namespace == ".":
|
||||
fullname = f"{name}"
|
||||
directory = f"{name}"
|
||||
mypy_prepare_command = ""
|
||||
else:
|
||||
fullname = f"{namespace}.{name}"
|
||||
directory = f"{namespace}/{name}"
|
||||
mypy_prepare_command = f"touch {namespace}/__init__.py"
|
||||
|
||||
github_output = Path(getenv("GITHUB_OUTPUT"))
|
||||
print(f"GITHUB_OUTPUT: {github_output}")
|
||||
with github_output.open("a+", encoding="utf-8") as f:
|
||||
f.write(dedent(f"""\
|
||||
package_fullname={fullname}
|
||||
package_directory={directory}
|
||||
mypy_prepare_command={mypy_prepare_command}
|
||||
"""))
|
||||
|
||||
- name: 🔁 Extract configurations from pyproject.toml
|
||||
id: getVariables
|
||||
shell: python
|
||||
run: |
|
||||
from os import getenv
|
||||
from pathlib import Path
|
||||
from sys import version
|
||||
from textwrap import dedent
|
||||
|
||||
print(f"Python: {version}")
|
||||
|
||||
from tomli import load as tomli_load
|
||||
|
||||
unittestXMLFile = Path("./unittest.xml")
|
||||
coverageHTMLDirectory = Path("htmlcov")
|
||||
coverageXMLFile = Path("./coverage.xml")
|
||||
coverageJSONFile = Path("./coverage.json")
|
||||
coverageRC = "${{ inputs.coverage_config }}".strip()
|
||||
|
||||
# Read output paths from 'pyproject.toml' file
|
||||
if coverageRC == "pyproject.toml":
|
||||
pyProjectFile = Path("pyproject.toml")
|
||||
if pyProjectFile.exists():
|
||||
with pyProjectFile.open("rb") as file:
|
||||
pyProjectSettings = tomli_load(file)
|
||||
|
||||
unittestXMLFile = Path(pyProjectSettings["tool"]["pytest"]["junit_xml"])
|
||||
coverageHTMLDirectory = Path(pyProjectSettings["tool"]["coverage"]["html"]["directory"])
|
||||
coverageXMLFile = Path(pyProjectSettings["tool"]["coverage"]["xml"]["output"])
|
||||
coverageJSONFile= Path(pyProjectSettings["tool"]["coverage"]["json"]["output"])
|
||||
else:
|
||||
print(f"File '{pyProjectFile}' not found.")
|
||||
print(f"::error title=FileNotFoundError::File '{pyProjectFile}' not found.")
|
||||
exit(1)
|
||||
|
||||
# Read output paths from '.coveragerc' file
|
||||
elif len(coverageRC) > 0:
|
||||
coverageRCFile = Path(coverageRC)
|
||||
if coverageRCFile.exists():
|
||||
with coverageRCFile.open("rb") as file:
|
||||
coverageRCSettings = tomli_load(file)
|
||||
|
||||
coverageHTMLDirectory = Path(coverageRCSettings["html"]["directory"])
|
||||
coverageXMLFile = Path(coverageRCSettings["xml"]["output"])
|
||||
coverageJSONFile = Path(coverageRCSettings["json"]["output"])
|
||||
else:
|
||||
print(f"File '{coverageRCFile}' not found.")
|
||||
print(f"::error title=FileNotFoundError::File '{coverageRCFile}' not found.")
|
||||
exit(1)
|
||||
|
||||
# Write jobs to special file
|
||||
github_output = Path(getenv("GITHUB_OUTPUT"))
|
||||
print(f"GITHUB_OUTPUT: {github_output}")
|
||||
with github_output.open("a+", encoding="utf-8") as f:
|
||||
f.write(dedent(f"""\
|
||||
unittest_report_xml_directory={unittestXMLFile.parent.as_posix()}
|
||||
unittest_report_xml_filename={unittestXMLFile.name}
|
||||
unittest_report_xml={unittestXMLFile.as_posix()}
|
||||
coverage_report_html_directory={coverageHTMLDirectory.as_posix()}
|
||||
coverage_report_xml_directory={coverageXMLFile.parent.as_posix()}
|
||||
coverage_report_xml_filename={coverageXMLFile.name}
|
||||
coverage_report_xml={coverageXMLFile.as_posix()}
|
||||
coverage_report_json_directory={coverageJSONFile.parent.as_posix()}
|
||||
coverage_report_json_filename={coverageJSONFile.name}
|
||||
coverage_report_json={coverageJSONFile.as_posix()}
|
||||
"""))
|
||||
|
||||
print(f"DEBUG:\n unittest xml: {unittestXMLFile}\n coverage html: {coverageHTMLDirectory}\n coverage xml: {coverageXMLFile}\n coverage json: {coverageJSONFile}")
|
||||
58
.github/workflows/IntermediateCleanUp.yml
vendored
Normal file
58
.github/workflows/IntermediateCleanUp.yml
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: Intermediate Cleanup
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
sqlite_coverage_artifacts_prefix:
|
||||
description: 'Prefix for SQLite coverage artifacts'
|
||||
required: false
|
||||
type: string
|
||||
xml_unittest_artifacts_prefix:
|
||||
description: 'Prefix for XML unittest artifacts'
|
||||
required: false
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
IntermediateCleanUp:
|
||||
name: 🗑️ Intermediate Artifact Cleanup
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
steps:
|
||||
- name: 🗑️ Delete SQLite coverage artifacts from matrix jobs
|
||||
uses: geekyeggo/delete-artifact@v5
|
||||
if: inputs.sqlite_coverage_artifacts_prefix != ''
|
||||
continue-on-error: true
|
||||
with:
|
||||
name: ${{ inputs.sqlite_coverage_artifacts_prefix }}*
|
||||
|
||||
- name: 🗑️ Delete JUnit XML artifacts from matrix jobs
|
||||
uses: geekyeggo/delete-artifact@v5
|
||||
if: inputs.xml_unittest_artifacts_prefix != ''
|
||||
continue-on-error: true
|
||||
with:
|
||||
name: ${{ inputs.xml_unittest_artifacts_prefix }}*
|
||||
82
.github/workflows/LaTeXDocumentation.yml
vendored
Normal file
82
.github/workflows/LaTeXDocumentation.yml
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: LaTeX Documentation
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
document:
|
||||
description: 'LaTeX root document without *.tex extension.'
|
||||
required: true
|
||||
type: string
|
||||
latex_artifact:
|
||||
description: 'Name of the LaTeX documentation artifact.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
pdf_artifact:
|
||||
description: 'Name of the PDF documentation artifact.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
PDFDocumentation:
|
||||
name: 📓 Converting LaTeX Documentation to PDF
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
steps:
|
||||
- name: 📥 Download artifacts '${{ inputs.latex_artifact }}' from 'SphinxDocumentation' job
|
||||
uses: pyTooling/download-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.latex_artifact }}
|
||||
path: latex
|
||||
|
||||
- name: Debug
|
||||
run: |
|
||||
tree -pash .
|
||||
|
||||
- name: Build LaTeX document using 'pytooling/miktex:sphinx'
|
||||
uses: addnab/docker-run-action@v3
|
||||
with:
|
||||
image: pytooling/miktex:sphinx
|
||||
options: -v ${{ github.workspace }}/latex:/latex --workdir /latex
|
||||
run: |
|
||||
which pdflatex
|
||||
pwd
|
||||
ls -lAh
|
||||
|
||||
latexmk -xelatex ${{ inputs.document }}.tex
|
||||
|
||||
- name: 📤 Upload 'PDF Documentation' artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
if: inputs.pdf_artifact != ''
|
||||
with:
|
||||
name: ${{ inputs.pdf_artifact }}
|
||||
working-directory: latex
|
||||
path: ${{ inputs.document }}.pdf
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
525
.github/workflows/NightlyRelease.yml
vendored
Normal file
525
.github/workflows/NightlyRelease.yml
vendored
Normal file
@@ -0,0 +1,525 @@
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: Nightly
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image:
|
||||
description: 'Name of the Ubuntu image.'
|
||||
required: false
|
||||
default: 'ubuntu-24.04'
|
||||
type: string
|
||||
nightly_name:
|
||||
description: 'Name of the nightly release.'
|
||||
required: false
|
||||
default: 'nightly'
|
||||
type: string
|
||||
nightly_title:
|
||||
description: 'Title of the nightly release.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
nightly_description:
|
||||
description: 'Description of the nightly release.'
|
||||
required: false
|
||||
default: 'Release of artifacts from latest CI pipeline.'
|
||||
type: string
|
||||
draft:
|
||||
description: 'Specify if this is a draft.'
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
prerelease:
|
||||
description: 'Specify if this is a pre-release.'
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
latest:
|
||||
description: 'Specify if this is the latest release.'
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
replacements:
|
||||
description: 'Multi-line string containing search=replace patterns.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
assets:
|
||||
description: 'Multi-line string containing artifact:file:title asset descriptions.'
|
||||
required: true
|
||||
type: string
|
||||
inventory-json:
|
||||
type: string
|
||||
required: false
|
||||
default: ''
|
||||
inventory-version:
|
||||
type: string
|
||||
required: false
|
||||
default: ''
|
||||
inventory-categories:
|
||||
type: string
|
||||
required: false
|
||||
default: ''
|
||||
tarball-name:
|
||||
type: string
|
||||
required: false
|
||||
default: '__pyTooling_upload_artifact__.tar'
|
||||
|
||||
jobs:
|
||||
Release:
|
||||
name: 📝 Update 'Nightly Page' on GitHub
|
||||
runs-on: ${{ inputs.ubuntu_image }}
|
||||
permissions:
|
||||
contents: write
|
||||
actions: write
|
||||
# attestations: write
|
||||
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
# The command 'git describe' (used for version) needs the history.
|
||||
fetch-depth: 0
|
||||
|
||||
- name: 🔧 Install zstd
|
||||
run: sudo apt-get install -y --no-install-recommends zstd
|
||||
|
||||
- name: 📑 Delete (old) Release Page
|
||||
id: deleteReleasePage
|
||||
run: |
|
||||
set +e
|
||||
|
||||
ANSI_LIGHT_RED=$'\x1b[91m'
|
||||
ANSI_LIGHT_GREEN=$'\x1b[92m'
|
||||
ANSI_LIGHT_YELLOW=$'\x1b[93m'
|
||||
ANSI_NOCOLOR=$'\x1b[0m'
|
||||
|
||||
export GH_TOKEN=${{ github.token }}
|
||||
|
||||
printf "%s" "Deleting release '${{ inputs.nightly_name }}' ... "
|
||||
message="$(gh release delete ${{ inputs.nightly_name }} --yes 2>&1)"
|
||||
if [[ $? -eq 0 ]]; then
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
elif [[ "${message}" == "release not found" ]]; then
|
||||
printf "%s\n" "${ANSI_LIGHT_YELLOW}[NOT FOUND]${ANSI_NOCOLOR}"
|
||||
else
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't delete release '${{ inputs.nightly_name }}' -> Error: '${message}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=InternalError::Couldn't delete release '${{ inputs.nightly_name }}' -> Error: '${message}'."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: 📑 (Re)create (new) Release Page
|
||||
id: createReleasePage
|
||||
run: |
|
||||
set +e
|
||||
|
||||
ANSI_LIGHT_RED=$'\x1b[91m'
|
||||
ANSI_LIGHT_GREEN=$'\x1b[92m'
|
||||
ANSI_NOCOLOR=$'\x1b[0m'
|
||||
|
||||
export GH_TOKEN=${{ github.token }}
|
||||
|
||||
addDraft="--draft"
|
||||
|
||||
if [[ "${{ inputs.prerelease }}" == "true" ]]; then
|
||||
addPreRelease="--prerelease"
|
||||
fi
|
||||
|
||||
if [[ "${{ inputs.latest }}" == "false" ]]; then
|
||||
addLatest="--latest=false"
|
||||
fi
|
||||
|
||||
if [[ "${{ inputs.nightly_title }}" != "" ]]; then
|
||||
addTitle=("--title" "${{ inputs.nightly_title }}")
|
||||
fi
|
||||
|
||||
cat <<'EOF' > __NoTeS__.md
|
||||
${{ inputs.nightly_description }}
|
||||
EOF
|
||||
if [[ -s __NoTeS__.md ]]; then
|
||||
addNotes=("--notes-file" "__NoTeS__.md")
|
||||
fi
|
||||
|
||||
# Apply replacements
|
||||
while IFS=$'\r\n' read -r patternLine; do
|
||||
# skip empty lines
|
||||
[[ "$patternLine" == "" ]] && continue
|
||||
|
||||
pattern="${patternLine%%=*}"
|
||||
replacement="${patternLine#*=}"
|
||||
sed -i -e "s/%$pattern%/$replacement/g" "__NoTeS__.md"
|
||||
done <<<'${{ inputs.replacements }}'
|
||||
|
||||
# Add footer line
|
||||
cat <<EOF >> __NoTeS__.md
|
||||
|
||||
--------
|
||||
Published from [${{ github.workflow }}](https://github.com/Paebbels/ghdl/actions/runs/${{ github.run_id }}) workflow triggered by @${{ github.actor }} on $(date '+%Y-%m-%d %H:%M:%S %Z').
|
||||
EOF
|
||||
|
||||
printf "%s\n" "Creating release '${{ inputs.nightly_name }}' ... "
|
||||
message="$(gh release create "${{ inputs.nightly_name }}" --verify-tag $addDraft $addPreRelease $addLatest "${addTitle[@]}" "${addNotes[@]}" 2>&1)"
|
||||
if [[ $? -eq 0 ]]; then
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
else
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't create release '${{ inputs.nightly_name }}' -> Error: '${message}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=InternalError::Couldn't create release '${{ inputs.nightly_name }}' -> Error: '${message}'."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: 📥 Download artifacts and upload as assets
|
||||
id: uploadAssets
|
||||
run: |
|
||||
set +e
|
||||
|
||||
ANSI_LIGHT_RED=$'\x1b[91m'
|
||||
ANSI_LIGHT_GREEN=$'\x1b[92m'
|
||||
ANSI_LIGHT_YELLOW=$'\x1b[93m'
|
||||
ANSI_LIGHT_BLUE="\e[94m"
|
||||
ANSI_NOCOLOR=$'\x1b[0m'
|
||||
|
||||
export GH_TOKEN=${{ github.token }}
|
||||
|
||||
Replace() {
|
||||
line="$1"
|
||||
while IFS=$'\r\n' read -r patternLine; do
|
||||
# skip empty lines
|
||||
[[ "$patternLine" == "" ]] && continue
|
||||
|
||||
pattern="${patternLine%%=*}"
|
||||
replacement="${patternLine#*=}"
|
||||
line="${line//"%$pattern%"/"$replacement"}"
|
||||
done <<<'${{ inputs.replacements }}'
|
||||
printf "%s\n" "$line"
|
||||
}
|
||||
|
||||
# Create JSON inventory
|
||||
if [[ "${{ inputs.inventory-json }}" != "" ]]; then
|
||||
VERSION="1.0"
|
||||
|
||||
# Split categories by ',' into a Bash array.
|
||||
# See https://stackoverflow.com/a/45201229/3719459
|
||||
if [[ "${{ inputs.inventory-categories }}" != "" ]]; then
|
||||
readarray -td, inventoryCategories <<<"${{ inputs.inventory-categories }},"
|
||||
unset 'inventoryCategories[-1]'
|
||||
declare -p inventoryCategories
|
||||
else
|
||||
inventoryCategories=""
|
||||
fi
|
||||
|
||||
jsonInventory=$(jq -c -n \
|
||||
--arg version "${VERSION}" \
|
||||
--arg date "$(date +"%Y-%m-%dT%H-%M-%S%:z")" \
|
||||
--argjson jsonMeta "$(jq -c -n \
|
||||
--arg tag "${{ inputs.nightly_name }}" \
|
||||
--arg version "${{ inputs.inventory-version }}" \
|
||||
--arg hash "${{ github.sha }}" \
|
||||
--arg repo "${{ github.server_url }}/${{ github.repository }}" \
|
||||
--arg release "${{ github.server_url }}/${{ github.repository }}/releases/download/${{ inputs.nightly_name }}" \
|
||||
--argjson categories "$(jq -c -n \
|
||||
'$ARGS.positional' \
|
||||
--args "${inventoryCategories[@]}" \
|
||||
)" \
|
||||
'{"tag": $tag, "version": $version, "git-hash": $hash, "repository-url": $repo, "release-url": $release, "categories": $categories}' \
|
||||
)" \
|
||||
'{"version": 1.0, "timestamp": $date, "meta": $jsonMeta, "files": {}}'
|
||||
)
|
||||
fi
|
||||
|
||||
ERRORS=0
|
||||
# A dictionary of 0/1 to avoid duplicate downloads
|
||||
declare -A downloadedArtifacts
|
||||
# A dictionary to check for duplicate asset files in release
|
||||
declare -A assetFilenames
|
||||
while IFS=$'\r\n' read -r assetLine; do
|
||||
if [[ "${assetLine}" == "" ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
# split assetLine colon separated triple: artifact:asset:title
|
||||
artifact="${assetLine%%:*}"
|
||||
assetLine="${assetLine#*:}"
|
||||
asset="${assetLine%%:*}"
|
||||
assetLine="${assetLine#*:}"
|
||||
if [[ "${{ inputs.inventory-json }}" == "" ]]; then
|
||||
categories=""
|
||||
title="${assetLine##*:}"
|
||||
else
|
||||
categories="${assetLine%%:*}"
|
||||
title="${assetLine##*:}"
|
||||
fi
|
||||
|
||||
# remove leading whitespace
|
||||
asset="${asset#"${asset%%[![:space:]]*}"}"
|
||||
categories="${categories#"${categories%%[![:space:]]*}"}"
|
||||
title="${title#"${title%%[![:space:]]*}"}"
|
||||
|
||||
# apply replacements
|
||||
asset="$(Replace "${asset}")"
|
||||
title="$(Replace "${title}")"
|
||||
|
||||
printf "%s\n" "Publish asset '${asset}' from artifact '${artifact}' with title '${title}'"
|
||||
printf " %s" "Checked asset for duplicates ... "
|
||||
if [[ -n "${assetFilenames[$asset]}" ]]; then
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=DuplicateAsset::Asset '${asset}' from artifact '${artifact}' was already uploaded to release '${{ inputs.nightly_name }}'."
|
||||
ERRORS=1
|
||||
continue
|
||||
else
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
assetFilenames[$asset]=1
|
||||
fi
|
||||
|
||||
# Download artifact by artifact name
|
||||
if [[ -n "${downloadedArtifacts[$artifact]}" ]]; then
|
||||
printf " %s\n" "downloading '${artifact}' ... ${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}"
|
||||
else
|
||||
echo " downloading '${artifact}' ... "
|
||||
printf " %s" "gh run download $GITHUB_RUN_ID --dir \"${artifact}\" --name \"${artifact}\" "
|
||||
gh run download $GITHUB_RUN_ID --dir "${artifact}" --name "${artifact}"
|
||||
if [[ $? -eq 0 ]]; then
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
else
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't download artifact '${artifact}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=ArtifactNotFound::Couldn't download artifact '${artifact}'."
|
||||
ERRORS=1
|
||||
continue
|
||||
fi
|
||||
downloadedArtifacts[$artifact]=1
|
||||
|
||||
printf " %s" "Checking for embedded tarball ... "
|
||||
if [[ -f "${artifact}/${{ inputs.tarball-name }}" ]]; then
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[FOUND]${ANSI_NOCOLOR}"
|
||||
|
||||
pushd "${artifact}" > /dev/null
|
||||
|
||||
printf " %s" "Extracting embedded tarball ... "
|
||||
tar -xf "${{ inputs.tarball-name }}"
|
||||
if [[ $? -ne 0 ]]; then
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
||||
else
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
fi
|
||||
|
||||
printf " %s" "Removing temporary tarball ... "
|
||||
rm -f "${{ inputs.tarball-name }}"
|
||||
if [[ $? -ne 0 ]]; then
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
||||
else
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
fi
|
||||
|
||||
popd > /dev/null
|
||||
else
|
||||
printf "%s\n" "${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Check if artifact should be compressed (zip, tgz) or if asset was part of the downloaded artifact.
|
||||
printf " %s" "checking asset '${artifact}/${asset}' ... "
|
||||
if [[ "${asset}" == !*.zip ]]; then
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[ZIP]${ANSI_NOCOLOR}"
|
||||
asset="${asset##*!}"
|
||||
printf "::group:: %s\n" "Compressing artifact '${artifact}' to '${asset}' ..."
|
||||
(
|
||||
cd "${artifact}" && \
|
||||
zip -r "../${asset}" *
|
||||
)
|
||||
retCode=$?
|
||||
printf "::endgroup::\n"
|
||||
if [[ $retCode -eq 0 ]]; then
|
||||
printf " %s\n" "Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
uploadFile="${asset}"
|
||||
else
|
||||
printf " %s\n" "Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zip file '${asset}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=CompressionError::Couldn't compress '${artifact}' to zip file '${asset}'."
|
||||
ERRORS=1
|
||||
continue
|
||||
fi
|
||||
elif [[ "${asset}" == !*.tgz || "${asset}" == !*.tar.gz || "${asset}" == \$*.tgz || "${asset}" == \$*.tar.gz ]]; then
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[TAR/GZ]${ANSI_NOCOLOR}"
|
||||
|
||||
if [[ "${asset:0:1}" == "\$" ]]; then
|
||||
asset="${asset##*$}"
|
||||
dirName="${asset%.*}"
|
||||
printf " %s\n" "Compressing artifact '${artifact}' to '${asset}' ..."
|
||||
tar -c --gzip --owner=0 --group=0 --file="${asset}" --directory="${artifact}" --transform "s|^\.|${dirName%.tar}|" .
|
||||
retCode=$?
|
||||
else
|
||||
asset="${asset##*!}"
|
||||
printf " %s\n" "Compressing artifact '${artifact}' to '${asset}' ..."
|
||||
(
|
||||
cd "${artifact}" && \
|
||||
tar -c --gzip --owner=0 --group=0 --file="../${asset}" *
|
||||
)
|
||||
retCode=$?
|
||||
fi
|
||||
|
||||
if [[ $retCode -eq 0 ]]; then
|
||||
printf " %s\n" "Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
uploadFile="${asset}"
|
||||
else
|
||||
printf " %s\n" "Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to tgz file '${asset}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=CompressionError::Couldn't compress '${artifact}' to tgz file '${asset}'."
|
||||
ERRORS=1
|
||||
continue
|
||||
fi
|
||||
elif [[ "${asset}" == !*.tzst || "${asset}" == !*.tar.zst || "${asset}" == \$*.tzst || "${asset}" == \$*.tar.zst ]]; then
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[ZST]${ANSI_NOCOLOR}"
|
||||
|
||||
if [[ "${asset:0:1}" == "\$" ]]; then
|
||||
asset="${asset##*$}"
|
||||
dirName="${asset%.*}"
|
||||
printf " %s\n" "Compressing artifact '${artifact}' to '${asset}' ..."
|
||||
tar -c --zstd --owner=0 --group=0 --file="${asset}" --directory="${artifact}" --transform "s|^\.|${dirName%.tar}|" .
|
||||
retCode=$?
|
||||
else
|
||||
asset="${asset##*!}"
|
||||
printf " %s\n" "Compressing artifact '${artifact}' to '${asset}' ..."
|
||||
(
|
||||
cd "${artifact}" && \
|
||||
tar -c --zstd --owner=0 --group=0 --file="../${asset}" *
|
||||
)
|
||||
retCode=$?
|
||||
fi
|
||||
|
||||
if [[ $retCode -eq 0 ]]; then
|
||||
printf " %s\n" "Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
uploadFile="${asset}"
|
||||
else
|
||||
printf " %s\n" "Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zst file '${asset}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=CompressionError::Couldn't compress '${artifact}' to zst file '${asset}'."
|
||||
ERRORS=1
|
||||
continue
|
||||
fi
|
||||
elif [[ -e "${artifact}/${asset}" ]]; then
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
uploadFile="${artifact}/${asset}"
|
||||
else
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't find asset '${asset}' in artifact '${artifact}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=FileNotFound::Couldn't find asset '${asset}' in artifact '${artifact}'."
|
||||
ERRORS=1
|
||||
continue
|
||||
fi
|
||||
|
||||
# Add asset to JSON inventory
|
||||
if [[ "${{ inputs.inventory-json }}" != "" ]]; then
|
||||
if [[ "${categories}" != "${title}" ]]; then
|
||||
printf " %s\n" "adding file '${uploadFile#*/}' with '${categories//;/ → }' to JSON inventory ..."
|
||||
category=""
|
||||
jsonEntry=$(jq -c -n \
|
||||
--arg title "${title}" \
|
||||
--arg file "${uploadFile#*/}" \
|
||||
'{"file": $file, "title": $title}' \
|
||||
)
|
||||
|
||||
while [[ "${categories}" != "${category}" ]]; do
|
||||
category="${categories##*,}"
|
||||
categories="${categories%,*}"
|
||||
jsonEntry=$(jq -c -n --arg cat "${category}" --argjson value "${jsonEntry}" '{$cat: $value}')
|
||||
done
|
||||
|
||||
jsonInventory=$(jq -c -n \
|
||||
--argjson inventory "${jsonInventory}" \
|
||||
--argjson file "${jsonEntry}" \
|
||||
'$inventory * {"files": $file}' \
|
||||
)
|
||||
else
|
||||
printf " %s\n" "adding file '${uploadFile#*/}' to JSON inventory ... ${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Upload asset to existing release page
|
||||
printf " %s" "uploading asset '${asset}' from '${uploadFile}' with title '${title}' ... "
|
||||
gh release upload ${{ inputs.nightly_name }} "${uploadFile}#${title}" --clobber
|
||||
if [[ $? -eq 0 ]]; then
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
else
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=UploadError::Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'."
|
||||
ERRORS=1
|
||||
continue
|
||||
fi
|
||||
done <<<'${{ inputs.assets }}'
|
||||
|
||||
if [[ "${{ inputs.inventory-json }}" != "" ]]; then
|
||||
inventoryTitle="Release Inventory (JSON)"
|
||||
|
||||
printf "%s\n" "Publish asset '${{ inputs.inventory-json }}' with title '${inventoryTitle}'"
|
||||
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Writing JSON inventory to '${{ inputs.inventory-json }}' ...."
|
||||
printf "%s\n" "$(jq -n --argjson inventory "${jsonInventory}" '$inventory')" > "${{ inputs.inventory-json }}"
|
||||
cat "${{ inputs.inventory-json }}"
|
||||
printf "::endgroup::\n"
|
||||
|
||||
# Upload inventory asset to existing release page
|
||||
printf " %s" "uploading asset '${{ inputs.inventory-json }}' title '${inventoryTitle}' ... "
|
||||
gh release upload ${{ inputs.nightly_name }} "${{ inputs.inventory-json }}#${inventoryTitle}" --clobber
|
||||
if [[ $? -eq 0 ]]; then
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
else
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't upload asset '${{ inputs.inventory-json }}' to release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=UploadError::Couldn't upload asset '${{ inputs.inventory-json }}' to release '${{ inputs.nightly_name }}'."
|
||||
ERRORS=1
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
|
||||
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Inspecting downloaded artifacts ..."
|
||||
tree -pash -L 3 .
|
||||
printf "::endgroup::\n"
|
||||
|
||||
if [[ $ERROR -ne 0 ]]; then
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}Errors detected in previous steps.${ANSI_NOCOLOR}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: 📑 Remove draft state from Release Page
|
||||
if: ${{ ! inputs.draft }}
|
||||
run: |
|
||||
set +e
|
||||
|
||||
ANSI_LIGHT_RED=$'\x1b[91m'
|
||||
ANSI_LIGHT_GREEN=$'\x1b[92m'
|
||||
ANSI_NOCOLOR=$'\x1b[0m'
|
||||
|
||||
export GH_TOKEN=${{ github.token }}
|
||||
|
||||
# Remove draft-state from release page
|
||||
printf "%s" "Remove draft-state from release '${title}' ... "
|
||||
gh release edit --draft=false "${{ inputs.nightly_name }}"
|
||||
if [[ $? -eq 0 ]]; then
|
||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||
else
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't remove draft-state from release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
|
||||
printf "%s\n" "::error title=ReleasePage::Couldn't remove draft-state from release '${{ inputs.nightly_name }}'."
|
||||
fi
|
||||
90
.github/workflows/Package.yml
vendored
90
.github/workflows/Package.yml
vendored
@@ -1,17 +1,44 @@
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: Package
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
default: '3.10'
|
||||
default: '3.12'
|
||||
type: string
|
||||
requirements:
|
||||
description: 'Python dependencies to be installed through pip.'
|
||||
description: 'Python dependencies to be installed through pip; if empty, use pyproject.toml through build.'
|
||||
required: false
|
||||
default: 'wheel'
|
||||
default: ''
|
||||
type: string
|
||||
artifact:
|
||||
description: 'Name of the package artifact.'
|
||||
@@ -22,32 +49,67 @@ jobs:
|
||||
|
||||
Package:
|
||||
name: 📦 Package in Source and Wheel Format
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
lfs: true
|
||||
submodules: true
|
||||
|
||||
- name: 🐍 Setup Python ${{ inputs.python_version }}
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ inputs.python_version }}
|
||||
|
||||
- name: 🔧 Install dependencies for packaging and release
|
||||
run: |
|
||||
python -m pip install -U pip
|
||||
python -m pip install ${{ inputs.requirements }}
|
||||
# build
|
||||
|
||||
- name: 🔨 Build Python package (source distribution)
|
||||
- name: 🔧 [build] Install dependencies for packaging and release
|
||||
if: inputs.requirements == ''
|
||||
run: python -m pip install --disable-pip-version-check build
|
||||
|
||||
- name: 🔨 [build] Build Python package (source distribution)
|
||||
if: inputs.requirements == ''
|
||||
run: python -m build --sdist
|
||||
|
||||
- name: 🔨 [build] Build Python package (binary distribution - wheel)
|
||||
if: inputs.requirements == ''
|
||||
run: python -m build --wheel
|
||||
|
||||
# build (not isolated)
|
||||
|
||||
- name: 🔧 [build] Install dependencies for packaging and release
|
||||
if: inputs.requirements == 'no-isolation'
|
||||
run: python -m pip install --disable-pip-version-check build
|
||||
|
||||
- name: 🔨 [build] Build Python package (source distribution)
|
||||
if: inputs.requirements == 'no-isolation'
|
||||
run: python -m build --no-isolation --sdist
|
||||
|
||||
- name: 🔨 [build] Build Python package (binary distribution - wheel)
|
||||
if: inputs.requirements == 'no-isolation'
|
||||
run: python -m build --no-isolation --wheel
|
||||
|
||||
# setuptools
|
||||
|
||||
- name: 🔧 [setuptools] Install dependencies for packaging and release
|
||||
if: inputs.requirements != '' && inputs.requirements != 'no-isolation'
|
||||
run: python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
||||
|
||||
- name: 🔨 [setuptools] Build Python package (source distribution)
|
||||
if: inputs.requirements != '' && inputs.requirements != 'no-isolation'
|
||||
run: python setup.py sdist
|
||||
|
||||
- name: 🔨 Build Python package (binary distribution - wheel)
|
||||
- name: 🔨 [setuptools] Build Python package (binary distribution - wheel)
|
||||
if: inputs.requirements != '' && inputs.requirements != 'no-isolation'
|
||||
run: python setup.py bdist_wheel
|
||||
|
||||
- name: 📤 Upload wheel artifact
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.artifact }}
|
||||
path: dist/
|
||||
working-directory: dist
|
||||
path: '*'
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
343
.github/workflows/Parameters.yml
vendored
Normal file
343
.github/workflows/Parameters.yml
vendored
Normal file
@@ -0,0 +1,343 @@
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: Parameters
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
name:
|
||||
description: 'Name of the tool.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
package_namespace:
|
||||
description: 'Name of the tool''s namespace.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
package_name:
|
||||
description: 'Name of the tool''s package.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
default: '3.13'
|
||||
type: string
|
||||
python_version_list:
|
||||
description: 'Space separated list of Python versions to run tests with.'
|
||||
required: false
|
||||
default: '3.9 3.10 3.11 3.12 3.13'
|
||||
type: string
|
||||
system_list:
|
||||
description: 'Space separated list of systems to run tests on.'
|
||||
required: false
|
||||
default: 'ubuntu windows macos macos-arm mingw64 ucrt64'
|
||||
type: string
|
||||
include_list:
|
||||
description: 'Space separated list of system:python items to be included into the list of test.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
exclude_list:
|
||||
description: 'Space separated list of system:python items to be excluded from the list of test.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
disable_list:
|
||||
description: 'Space separated list of system:python items to be disabled from the list of test.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
ubuntu_image:
|
||||
description: 'The used GitHub Action image for Ubuntu based jobs.'
|
||||
required: false
|
||||
default: 'ubuntu-24.04'
|
||||
type: string
|
||||
windows_image:
|
||||
description: 'The used GitHub Action image for Windows based jobs.'
|
||||
required: false
|
||||
default: 'windows-2022'
|
||||
type: string
|
||||
macos_intel_image:
|
||||
description: 'The used GitHub Action image for macOS (Intel x86-64) based jobs.'
|
||||
required: false
|
||||
default: 'macos-13'
|
||||
type: string
|
||||
macos_arm_image:
|
||||
description: 'The used GitHub Action image for macOS (ARM aarch64) based jobs.'
|
||||
required: false
|
||||
default: 'macos-14'
|
||||
type: string
|
||||
|
||||
outputs:
|
||||
python_version:
|
||||
description: "Default Python version for other jobs."
|
||||
value: ${{ jobs.Parameters.outputs.python_version }}
|
||||
python_jobs:
|
||||
description: "List of Python versions (and system combinations) to be used in the matrix of other jobs."
|
||||
value: ${{ jobs.Parameters.outputs.python_jobs }}
|
||||
artifact_names:
|
||||
description: "Pre-defined artifact names for other jobs."
|
||||
value: ${{ jobs.Parameters.outputs.artifact_names }}
|
||||
params:
|
||||
description: "Parameters to be used in other jobs."
|
||||
value: ${{ jobs.Parameters.outputs.params }}
|
||||
|
||||
jobs:
|
||||
Parameters:
|
||||
name: ✎ Generate pipeline parameters
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
outputs:
|
||||
python_version: ${{ steps.params.outputs.python_version }}
|
||||
python_jobs: ${{ steps.params.outputs.python_jobs }}
|
||||
artifact_names: ${{ steps.params.outputs.artifact_names }}
|
||||
params: ${{ steps.params.outputs.params }}
|
||||
|
||||
steps:
|
||||
- name: Generate 'params' and 'python_jobs'
|
||||
id: params
|
||||
shell: python
|
||||
run: |
|
||||
from json import dumps as json_dumps
|
||||
from os import getenv
|
||||
from pathlib import Path
|
||||
from textwrap import dedent
|
||||
from typing import Iterable
|
||||
|
||||
package_namespace = "${{ inputs.package_namespace }}".strip()
|
||||
package_name = "${{ inputs.package_name }}".strip()
|
||||
name = "${{ inputs.name }}".strip()
|
||||
python_version = "${{ inputs.python_version }}".strip()
|
||||
systems = "${{ inputs.system_list }}".strip()
|
||||
versions = "${{ inputs.python_version_list }}".strip()
|
||||
include_list = "${{ inputs.include_list }}".strip()
|
||||
exclude_list = "${{ inputs.exclude_list }}".strip()
|
||||
disable_list = "${{ inputs.disable_list }}".strip()
|
||||
|
||||
if name == "":
|
||||
if package_namespace == "" or package_namespace == ".":
|
||||
name = f"{package_name}"
|
||||
else:
|
||||
name = f"{package_namespace}.{package_name}"
|
||||
|
||||
currentMSYS2Version = "3.12"
|
||||
currentAlphaVersion = "3.14"
|
||||
currentAlphaRelease = "3.14.0-alpha.1"
|
||||
|
||||
if systems == "":
|
||||
print("::error title=Parameter::system_list is empty.")
|
||||
else:
|
||||
systems = [sys.strip() for sys in systems.split(" ")]
|
||||
|
||||
if versions == "":
|
||||
versions = [ python_version ]
|
||||
else:
|
||||
versions = [ver.strip() for ver in versions.split(" ")]
|
||||
|
||||
if include_list == "":
|
||||
includes = []
|
||||
else:
|
||||
includes = [tuple(include.strip().split(":")) for include in include_list.split(" ")]
|
||||
|
||||
if exclude_list == "":
|
||||
excludes = []
|
||||
else:
|
||||
excludes = [exclude.strip() for exclude in exclude_list.split(" ")]
|
||||
|
||||
if disable_list == "":
|
||||
disabled = []
|
||||
else:
|
||||
disabled = [disable.strip() for disable in disable_list.split(" ")]
|
||||
|
||||
if "3.8" in versions:
|
||||
print("::warning title=Deprecated::Support for Python 3.8 ended in 2024.10.")
|
||||
if "msys2" in systems:
|
||||
print("::warning title=Deprecated::System 'msys2' will be replaced by 'mingw64'.")
|
||||
if currentAlphaVersion in versions:
|
||||
print(f"::notice title=Experimental::Python {currentAlphaVersion} ({currentAlphaRelease}) is a pre-release.")
|
||||
for disable in disabled:
|
||||
print(f"::warning title=Disabled Python Job::System '{disable}' temporarily disabled.")
|
||||
|
||||
# see https://raw.githubusercontent.com/actions/python-versions/main/versions-manifest.json
|
||||
data = {
|
||||
# Python and PyPy versions supported by "setup-python" action
|
||||
"python": {
|
||||
"3.8": { "icon": "⚫", "until": "2024.10" },
|
||||
"3.9": { "icon": "🔴", "until": "2025.10" },
|
||||
"3.10": { "icon": "🟠", "until": "2026.10" },
|
||||
"3.11": { "icon": "🟡", "until": "2027.10" },
|
||||
"3.12": { "icon": "🟢", "until": "2028.10" },
|
||||
"3.13": { "icon": "🟢", "until": "2029.10" },
|
||||
"3.14": { "icon": "🟣", "until": "2030.10" },
|
||||
"pypy-3.7": { "icon": "⟲⚫", "until": "????.??" },
|
||||
"pypy-3.8": { "icon": "⟲🔴", "until": "????.??" },
|
||||
"pypy-3.9": { "icon": "⟲🟠", "until": "????.??" },
|
||||
"pypy-3.10": { "icon": "⟲🟡", "until": "????.??" },
|
||||
},
|
||||
# Runner systems (runner images) supported by GitHub Actions
|
||||
"sys": {
|
||||
"ubuntu": { "icon": "🐧", "runs-on": "${{ inputs.ubuntu_image }}", "shell": "bash", "name": "Linux (x86-64)" },
|
||||
"windows": { "icon": "🪟", "runs-on": "${{ inputs.windows_image }}", "shell": "pwsh", "name": "Windows (x86-64)" },
|
||||
"macos": { "icon": "🍎", "runs-on": "${{ inputs.macos_intel_image }}", "shell": "bash", "name": "macOS (x86-64)" },
|
||||
"macos-arm": { "icon": "🍏", "runs-on": "${{ inputs.macos_arm_image }}", "shell": "bash", "name": "macOS (aarch64)" },
|
||||
},
|
||||
# Runtimes provided by MSYS2
|
||||
"runtime": {
|
||||
"msys": { "icon": "🪟🟪", "name": "Windows+MSYS2 (x86-64) - MSYS" },
|
||||
"mingw32": { "icon": "🪟⬛", "name": "Windows+MSYS2 (x86-64) - MinGW32" },
|
||||
"mingw64": { "icon": "🪟🟦", "name": "Windows+MSYS2 (x86-64) - MinGW64" },
|
||||
"clang32": { "icon": "🪟🟫", "name": "Windows+MSYS2 (x86-64) - Clang32" },
|
||||
"clang64": { "icon": "🪟🟧", "name": "Windows+MSYS2 (x86-64) - Clang64" },
|
||||
"ucrt64": { "icon": "🪟🟨", "name": "Windows+MSYS2 (x86-64) - UCRT64" },
|
||||
}
|
||||
}
|
||||
|
||||
print(f"includes ({len(includes)}):")
|
||||
for system,version in includes:
|
||||
print(f"- {system}:{version}")
|
||||
print(f"excludes ({len(excludes)}):")
|
||||
for exclude in excludes:
|
||||
print(f"- {exclude}")
|
||||
print(f"disabled ({len(disabled)}):")
|
||||
for disable in disabled:
|
||||
print(f"- {disable}")
|
||||
|
||||
def match(combination: str, pattern: str) -> bool:
|
||||
system, version = combination.split(":")
|
||||
sys, ver = pattern.split(":")
|
||||
|
||||
if sys == "*":
|
||||
return (ver == "*") or (version == ver)
|
||||
elif system == sys:
|
||||
return (ver == "*") or (version == ver)
|
||||
else:
|
||||
return False
|
||||
|
||||
def notIn(combination: str, patterns: Iterable[str]) -> bool:
|
||||
for pattern in patterns:
|
||||
if match(combination, pattern):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
combinations = [
|
||||
(system, version)
|
||||
for system in systems
|
||||
if system in data["sys"]
|
||||
for version in versions
|
||||
if version in data["python"]
|
||||
and notIn(f"{system}:{version}", excludes)
|
||||
and notIn(f"{system}:{version}", disabled)
|
||||
] + [
|
||||
(system, currentMSYS2Version)
|
||||
for system in systems
|
||||
if system in data["runtime"]
|
||||
and notIn(f"{system}:{currentMSYS2Version}", excludes)
|
||||
and notIn(f"{system}:{currentMSYS2Version}", disabled)
|
||||
] + [
|
||||
(system, version)
|
||||
for system, version in includes
|
||||
if system in data["sys"]
|
||||
and version in data["python"]
|
||||
and notIn(f"{system}:{version}", disabled)
|
||||
]
|
||||
print(f"Combinations ({len(combinations)}):")
|
||||
for system, version in combinations:
|
||||
print(f"- {system}:{version}")
|
||||
|
||||
jobs = [
|
||||
{
|
||||
"sysicon": data["sys"][system]["icon"],
|
||||
"system": system,
|
||||
"runs-on": data["sys"][system]["runs-on"],
|
||||
"runtime": "native",
|
||||
"shell": data["sys"][system]["shell"],
|
||||
"pyicon": data["python"][version]["icon"],
|
||||
"python": currentAlphaRelease if version == currentAlphaVersion else version,
|
||||
"envname": data["sys"][system]["name"],
|
||||
}
|
||||
for system, version in combinations if system in data["sys"]
|
||||
] + [
|
||||
{
|
||||
"sysicon": data["runtime"][runtime]["icon"],
|
||||
"system": "msys2",
|
||||
"runs-on": "windows-latest",
|
||||
"runtime": runtime.upper(),
|
||||
"shell": "msys2 {0}",
|
||||
"pyicon": data["python"][currentMSYS2Version]["icon"],
|
||||
"python": version,
|
||||
"envname": data["runtime"][runtime]["name"],
|
||||
}
|
||||
for runtime, version in combinations if runtime not in data["sys"]
|
||||
]
|
||||
|
||||
artifact_names = {
|
||||
"unittesting_xml": f"{name}-UnitTestReportSummary-XML",
|
||||
"unittesting_html": f"{name}-UnitTestReportSummary-HTML",
|
||||
"perftesting_xml": f"{name}-PerformanceTestReportSummary-XML",
|
||||
"benchtesting_xml": f"{name}-BenchmarkTestReportSummary-XML",
|
||||
"apptesting_xml": f"{name}-ApplicationTestReportSummary-XML",
|
||||
"codecoverage_sqlite": f"{name}-CodeCoverage-SQLite",
|
||||
"codecoverage_xml": f"{name}-CodeCoverage-XML",
|
||||
"codecoverage_json": f"{name}-CodeCoverage-JSON",
|
||||
"codecoverage_html": f"{name}-CodeCoverage-HTML",
|
||||
"statictyping_html": f"{name}-StaticTyping-HTML",
|
||||
"package_all": f"{name}-Packages",
|
||||
"documentation_html": f"{name}-Documentation-HTML",
|
||||
"documentation_latex": f"{name}-Documentation-LaTeX",
|
||||
"documentation_pdf": f"{name}-Documentation-PDF",
|
||||
}
|
||||
|
||||
print("Parameters:")
|
||||
print(f" python_version: {python_version}")
|
||||
print(f" python_jobs ({len(jobs)}):\n" +
|
||||
"".join([f" {{ " + ", ".join([f"\"{key}\": \"{value}\"" for key, value in job.items()]) + f" }},\n" for job in jobs])
|
||||
)
|
||||
print(f" artifact_names ({len(artifact_names)}):")
|
||||
for id, name in artifact_names.items():
|
||||
print(f" {id:>20}: {name}")
|
||||
|
||||
# Write jobs to special file
|
||||
github_output = Path(getenv("GITHUB_OUTPUT"))
|
||||
print(f"GITHUB_OUTPUT: {github_output}")
|
||||
with github_output.open("a+", encoding="utf-8") as f:
|
||||
f.write(dedent(f"""\
|
||||
python_version={python_version}
|
||||
python_jobs={json_dumps(jobs)}
|
||||
artifact_names={json_dumps(artifact_names)}
|
||||
"""))
|
||||
|
||||
- name: Verify out parameters
|
||||
id: verify
|
||||
run: |
|
||||
printf "python_version: %s\n" '${{ steps.params.outputs.python_version }}'
|
||||
printf "python_jobs: %s\n" '${{ steps.params.outputs.python_jobs }}'
|
||||
printf "artifact_names: %s\n" '${{ steps.params.outputs.artifact_names }}'
|
||||
printf "params: %s\n" '${{ steps.params.outputs.params }}'
|
||||
69
.github/workflows/Params.yml
vendored
69
.github/workflows/Params.yml
vendored
@@ -1,69 +0,0 @@
|
||||
name: Params
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
default: '3.10'
|
||||
type: string
|
||||
python_version_list:
|
||||
description: 'Space separated list of Python versions to run tests with.'
|
||||
required: false
|
||||
default: '3.6 3.7 3.8 3.9 3.10'
|
||||
type: string
|
||||
name:
|
||||
description: 'Name of the tool.'
|
||||
required: true
|
||||
type: string
|
||||
outputs:
|
||||
params:
|
||||
description: "Parameters to be used in other jobs."
|
||||
value: ${{ jobs.Params.outputs.params }}
|
||||
python_jobs:
|
||||
description: "List of Python versions to be used in the matrix of other jobs."
|
||||
value: ${{ jobs.Params.outputs.python_jobs }}
|
||||
|
||||
jobs:
|
||||
|
||||
Params:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
params: ${{ steps.params.outputs.params }}
|
||||
python_jobs: ${{ steps.params.outputs.python_jobs }}
|
||||
steps:
|
||||
|
||||
- name: Generate 'params' and 'python_jobs'
|
||||
id: params
|
||||
shell: python
|
||||
run: |
|
||||
name = '${{ inputs.name }}'
|
||||
params = {
|
||||
'python_version': '${{ inputs.python_version }}',
|
||||
'artifacts': {
|
||||
'unittesting': f'{name}-TestReport',
|
||||
'coverage': f'{name}-coverage',
|
||||
'typing': f'{name}-typing',
|
||||
'package': f'{name}-package',
|
||||
'doc': f'{name}-doc',
|
||||
}
|
||||
}
|
||||
print(f'::set-output name=params::{params!s}')
|
||||
print("Params:")
|
||||
print(params)
|
||||
|
||||
data = {
|
||||
'3.6': { 'icon': '🔴', 'until': '23.12.2021' },
|
||||
'3.7': { 'icon': '🟠', 'until': '27.06.2023' },
|
||||
'3.8': { 'icon': '🟡', 'until': 'Oct. 2024' },
|
||||
'3.9': { 'icon': '🟢', 'until': 'Oct. 2025' },
|
||||
'3.10': { 'icon': '🟢', 'until': 'Oct. 2026' },
|
||||
}
|
||||
jobs = [
|
||||
{'python': version, 'icon': data[version]['icon']}
|
||||
for version in '${{ inputs.python_version_list }}'.split(' ')
|
||||
]
|
||||
print(f'::set-output name=python_jobs::{jobs!s}')
|
||||
print("Python jobs:")
|
||||
print(jobs)
|
||||
236
.github/workflows/PublishCoverageResults.yml
vendored
Normal file
236
.github/workflows/PublishCoverageResults.yml
vendored
Normal file
@@ -0,0 +1,236 @@
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: Publish Code Coverage Results
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
coverage_artifacts_pattern:
|
||||
required: false
|
||||
default: '*-CodeCoverage-SQLite-*'
|
||||
type: string
|
||||
coverage_config:
|
||||
description: 'Path to the .coveragerc file. Use pyproject.toml by default.'
|
||||
required: false
|
||||
default: 'pyproject.toml'
|
||||
type: string
|
||||
coverage_sqlite_artifact:
|
||||
description: 'Name of the SQLite coverage artifact.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
coverage_xml_artifact:
|
||||
description: 'Name of the XML coverage artifact.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
coverage_json_artifact:
|
||||
description: 'Name of the JSON coverage artifact.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
coverage_html_artifact:
|
||||
description: 'Name of the HTML coverage artifact.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
secrets:
|
||||
codacy_token:
|
||||
description: 'Token to push result to codacy.'
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
PublishCoverageResults:
|
||||
name: 📊 Publish Code Coverage Results
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
if: always()
|
||||
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
lfs: true
|
||||
submodules: true
|
||||
|
||||
- name: 📥 Download Artifacts
|
||||
uses: pyTooling/download-artifact@v4
|
||||
with:
|
||||
pattern: ${{ inputs.coverage_artifacts_pattern }}
|
||||
path: artifacts
|
||||
|
||||
- name: 🔎 Inspect extracted artifact (tarball)
|
||||
run: |
|
||||
tree -pash artifacts
|
||||
|
||||
- name: 🔧 Install coverage and tomli
|
||||
run: |
|
||||
python -m pip install -U --disable-pip-version-check --break-system-packages coverage[toml] tomli
|
||||
|
||||
- name: 🔁 Extract configurations from pyproject.toml
|
||||
id: getVariables
|
||||
shell: python
|
||||
run: |
|
||||
from os import getenv
|
||||
from pathlib import Path
|
||||
from sys import version
|
||||
from textwrap import dedent
|
||||
|
||||
print(f"Python: {version}")
|
||||
|
||||
from tomli import load as tomli_load
|
||||
|
||||
htmlDirectory = Path("htmlcov")
|
||||
xmlFile = Path("./coverage.xml")
|
||||
jsonFile = Path("./coverage.json")
|
||||
coverageRC = "${{ inputs.coverage_config }}".strip()
|
||||
|
||||
# Read output paths from 'pyproject.toml' file
|
||||
if coverageRC == "pyproject.toml":
|
||||
pyProjectFile = Path("pyproject.toml")
|
||||
if pyProjectFile.exists():
|
||||
with pyProjectFile.open("rb") as file:
|
||||
pyProjectSettings = tomli_load(file)
|
||||
|
||||
htmlDirectory = Path(pyProjectSettings["tool"]["coverage"]["html"]["directory"])
|
||||
xmlFile = Path(pyProjectSettings["tool"]["coverage"]["xml"]["output"])
|
||||
jsonFile = Path(pyProjectSettings["tool"]["coverage"]["json"]["output"])
|
||||
else:
|
||||
print(f"File '{pyProjectFile}' not found.")
|
||||
print(f"::error title=FileNotFoundError::File '{pyProjectFile}' not found.")
|
||||
exit(1)
|
||||
|
||||
# Read output paths from '.coveragerc' file
|
||||
elif len(coverageRC) > 0:
|
||||
coverageRCFile = Path(coverageRC)
|
||||
if coverageRCFile.exists():
|
||||
with coverageRCFile.open("rb") as file:
|
||||
coverageRCSettings = tomli_load(file)
|
||||
|
||||
htmlDirectory = Path(coverageRCSettings["html"]["directory"])
|
||||
xmlFile = Path(coverageRCSettings["xml"]["output"])
|
||||
jsonFile = Path(coverageRCSettings["json"]["output"])
|
||||
else:
|
||||
print(f"File '{coverageRCFile}' not found.")
|
||||
print(f"::error title=FileNotFoundError::File '{coverageRCFile}' not found.")
|
||||
exit(1)
|
||||
|
||||
# Write jobs to special file
|
||||
github_output = Path(getenv("GITHUB_OUTPUT"))
|
||||
print(f"GITHUB_OUTPUT: {github_output}")
|
||||
with github_output.open("a+", encoding="utf-8") as f:
|
||||
f.write(dedent(f"""\
|
||||
coverage_report_html_directory={htmlDirectory.as_posix()}
|
||||
coverage_report_xml={xmlFile}
|
||||
coverage_report_json={jsonFile}
|
||||
"""))
|
||||
|
||||
print(f"DEBUG:\n html={htmlDirectory}\n xml={xmlFile}\n json={jsonFile}")
|
||||
|
||||
- name: Rename .coverage files and move them all into 'coverage/'
|
||||
run: |
|
||||
mkdir -p coverage
|
||||
find artifacts/ -type f -path "*SQLite*.coverage" -exec sh -c 'cp -v $0 "coverage/$(basename $0).$(basename $(dirname $0))"' {} ';'
|
||||
tree -pash coverage
|
||||
|
||||
- name: Combine SQLite files (using Coverage.py)
|
||||
run: coverage combine --data-file=.coverage coverage/
|
||||
|
||||
- name: Report code coverage
|
||||
run: coverage report --rcfile=pyproject.toml --data-file=.coverage
|
||||
|
||||
- name: Convert to XML format (Cobertura)
|
||||
if: inputs.coverage_xml_artifact != ''
|
||||
run: coverage xml --data-file=.coverage
|
||||
|
||||
- name: Convert to JSON format
|
||||
if: inputs.coverage_json_artifact != ''
|
||||
run: coverage json --data-file=.coverage
|
||||
|
||||
- name: Convert to HTML format
|
||||
if: inputs.coverage_html_artifact != ''
|
||||
run: |
|
||||
coverage html --data-file=.coverage -d report/coverage/html
|
||||
rm report/coverage/html/.gitignore
|
||||
tree -pash report/coverage/html
|
||||
|
||||
- name: 📤 Upload 'Coverage SQLite Database' artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
if: inputs.coverage_sqlite_artifact != ''
|
||||
continue-on-error: true
|
||||
with:
|
||||
name: ${{ inputs.coverage_sqlite_artifact }}
|
||||
path: .coverage
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- name: 📤 Upload 'Coverage XML Report' artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
if: inputs.coverage_xml_artifact != ''
|
||||
continue-on-error: true
|
||||
with:
|
||||
name: ${{ inputs.coverage_xml_artifact }}
|
||||
path: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- name: 📤 Upload 'Coverage JSON Report' artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
if: inputs.coverage_json_artifact != ''
|
||||
continue-on-error: true
|
||||
with:
|
||||
name: ${{ inputs.coverage_json_artifact }}
|
||||
path: ${{ steps.getVariables.outputs.coverage_report_json }}
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- name: 📤 Upload 'Coverage HTML Report' artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
if: inputs.coverage_html_artifact != ''
|
||||
continue-on-error: true
|
||||
with:
|
||||
name: ${{ inputs.coverage_html_artifact }}
|
||||
working-directory: ${{ steps.getVariables.outputs.coverage_report_html_directory }}
|
||||
path: '*'
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- name: 📊 Publish code coverage at CodeCov
|
||||
uses: codecov/codecov-action@v5
|
||||
if: inputs.CodeCov == true
|
||||
continue-on-error: true
|
||||
with:
|
||||
files: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
||||
flags: unittests
|
||||
env_vars: PYTHON
|
||||
|
||||
- name: 📉 Publish code coverage at Codacy
|
||||
uses: codacy/codacy-coverage-reporter-action@v1
|
||||
if: inputs.Codacy == true
|
||||
continue-on-error: true
|
||||
with:
|
||||
project-token: ${{ secrets.codacy_token }}
|
||||
coverage-reports: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
||||
43
.github/workflows/PublishOnPyPI.yml
vendored
43
.github/workflows/PublishOnPyPI.yml
vendored
@@ -1,12 +1,39 @@
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: Publish on PyPI
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
default: '3.10'
|
||||
default: '3.12'
|
||||
type: string
|
||||
requirements:
|
||||
description: 'Python dependencies to be installed through pip.'
|
||||
@@ -26,24 +53,22 @@ jobs:
|
||||
|
||||
PublishOnPyPI:
|
||||
name: 🚀 Publish to PyPI
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
|
||||
steps:
|
||||
- name: 📥 Download artifacts '${{ inputs.artifact }}' from 'Package' job
|
||||
uses: actions/download-artifact@v2
|
||||
uses: pyTooling/download-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.artifact }}
|
||||
path: dist/
|
||||
path: dist
|
||||
|
||||
- name: 🐍 Setup Python ${{ inputs.python_version }}
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ inputs.python_version }}
|
||||
|
||||
- name: ⚙ Install dependencies for packaging and release
|
||||
run: |
|
||||
python -m pip install -U pip
|
||||
python -m pip install ${{ inputs.requirements }}
|
||||
run: python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
||||
|
||||
- name: ⤴ Release Python source package to PyPI
|
||||
env:
|
||||
@@ -58,6 +83,6 @@ jobs:
|
||||
run: twine upload dist/*.whl
|
||||
|
||||
- name: 🗑️ Delete packaging Artifacts
|
||||
uses: geekyeggo/delete-artifact@v1
|
||||
uses: geekyeggo/delete-artifact@v5
|
||||
with:
|
||||
name: ${{ inputs.artifact }}
|
||||
|
||||
109
.github/workflows/PublishTestResults.yml
vendored
Normal file
109
.github/workflows/PublishTestResults.yml
vendored
Normal file
@@ -0,0 +1,109 @@
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: Publish Unit Test Results
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
unittest_artifacts_pattern:
|
||||
required: false
|
||||
default: '*-UnitTestReportSummary-XML-*'
|
||||
type: string
|
||||
merged_junit_artifact:
|
||||
description: 'Name of the merged JUnit Test Summary artifact.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
additional_merge_args:
|
||||
description: 'Additional merging arguments.'
|
||||
required: false
|
||||
default: '"--pytest=rewrite-dunder-init;reduce-depth:pytest.tests.unit"'
|
||||
type: string
|
||||
publish:
|
||||
description: 'Publish test report summary via Dorny Test-Reporter'
|
||||
required: false
|
||||
default: true
|
||||
type: boolean
|
||||
report_title:
|
||||
description: 'Title of the summary report in the pipeline''s sidebar'
|
||||
required: false
|
||||
default: 'Unit Test Results'
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
PublishTestResults:
|
||||
name: 📊 Publish Test Results
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
if: always()
|
||||
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: 📥 Download Artifacts
|
||||
uses: pyTooling/download-artifact@v4
|
||||
with:
|
||||
pattern: ${{ inputs.unittest_artifacts_pattern }}
|
||||
path: artifacts
|
||||
|
||||
- name: 🔎 Inspect extracted artifact (tarball)
|
||||
run: |
|
||||
tree -pash artifacts
|
||||
|
||||
- name: 🔧 Install pyEDAA.Reports (JUunit Parser and Merger)
|
||||
run: |
|
||||
python -m pip install --disable-pip-version-check --break-system-packages -U pyEDAA.Reports
|
||||
|
||||
- name: Rename JUnit files and move them all into 'junit/'
|
||||
run: |
|
||||
mkdir -p junit
|
||||
find artifacts/ -type f -path "*.xml" -exec sh -c 'cp -v $0 "junit/$(basename $(dirname $0)).$(basename $0)"' {} ';'
|
||||
tree -pash junit
|
||||
|
||||
- name: 🔁 Merge JUnit Unit Test Summaries
|
||||
run: |
|
||||
pyedaa-reports -v unittest "--merge=pyTest-JUnit:junit/*.xml" ${{ inputs.additional_merge_args }} "--output=pyTest-JUnit:Unittesting.xml"
|
||||
printf "%s\n" "cat Unittesting.xml"
|
||||
cat Unittesting.xml
|
||||
|
||||
- name: 📊 Publish Unit Test Results
|
||||
uses: dorny/test-reporter@v1
|
||||
if: inputs.publish && inputs.report_title != ''
|
||||
with:
|
||||
name: ${{ inputs.report_title }}
|
||||
path: Unittesting.xml
|
||||
reporter: java-junit
|
||||
|
||||
- name: 📤 Upload merged 'JUnit Test Summary' artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
if: inputs.merged_junit_artifact != ''
|
||||
with:
|
||||
name: ${{ inputs.merged_junit_artifact }}
|
||||
path: Unittesting.xml
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
39
.github/workflows/PublishToGitHubPages.yml
vendored
39
.github/workflows/PublishToGitHubPages.yml
vendored
@@ -1,8 +1,35 @@
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: Publish to GitHub Pages
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
doc:
|
||||
description: 'Name of the documentation artifact.'
|
||||
required: true
|
||||
@@ -22,28 +49,28 @@ jobs:
|
||||
|
||||
PublishToGitHubPages:
|
||||
name: 📚 Publish to GH-Pages
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.doc }}' from 'BuildTheDocs' job
|
||||
uses: actions/download-artifact@v2
|
||||
- name: 📥 Download artifacts '${{ inputs.doc }}' from 'SphinxDocumentation' job
|
||||
uses: pyTooling/download-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.doc }}
|
||||
path: public
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.coverage }}' from 'Coverage' job
|
||||
uses: pyTooling/download-artifact@v4
|
||||
if: ${{ inputs.coverage != '' }}
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: ${{ inputs.coverage }}
|
||||
path: public/coverage
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.typing }}' from 'StaticTypeCheck' job
|
||||
uses: pyTooling/download-artifact@v4
|
||||
if: ${{ inputs.typing != '' }}
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: ${{ inputs.typing }}
|
||||
path: public/typing
|
||||
|
||||
63
.github/workflows/Release.yml
vendored
63
.github/workflows/Release.yml
vendored
@@ -1,13 +1,40 @@
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: Release
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
|
||||
Release:
|
||||
name: 📝 Create 'Release Page' on GitHub
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
|
||||
steps:
|
||||
- name: 🔁 Extract Git tag from GITHUB_REF
|
||||
@@ -17,13 +44,13 @@ jobs:
|
||||
RELEASE_VERSION=${GIT_TAG#v}
|
||||
RELEASE_DATETIME="$(date --utc '+%d.%m.%Y - %H:%M:%S')"
|
||||
# write to step outputs
|
||||
echo ::set-output name=gitTag::${GIT_TAG}
|
||||
echo ::set-output name=version::${RELEASE_VERSION}
|
||||
echo ::set-output name=datetime::${RELEASE_DATETIME}
|
||||
echo "gitTag=${GIT_TAG}" >> $GITHUB_OUTPUT
|
||||
echo "version=${RELEASE_VERSION}" >> $GITHUB_OUTPUT
|
||||
echo "datetime=${RELEASE_DATETIME}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: 📑 Create Release Page
|
||||
id: createReleasePage
|
||||
uses: actions/create-release@v1
|
||||
id: createReleasePage
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
with:
|
||||
@@ -33,12 +60,34 @@ jobs:
|
||||
**Automated Release created on: ${{ steps.getVariables.outputs.datetime }}**
|
||||
|
||||
# New Features
|
||||
|
||||
* tbd
|
||||
* tbd
|
||||
|
||||
# Changes
|
||||
|
||||
* tbd
|
||||
* tbd
|
||||
|
||||
# Bug Fixes
|
||||
|
||||
* tbd
|
||||
draft: false
|
||||
* tbd
|
||||
|
||||
# Documentation
|
||||
|
||||
* tbd
|
||||
* tbd
|
||||
|
||||
# Unit Tests
|
||||
|
||||
* tbd
|
||||
* tbd
|
||||
|
||||
----------
|
||||
# Related Issues and Pull-Requests
|
||||
|
||||
* tbd
|
||||
* tbd
|
||||
draft: true
|
||||
prerelease: false
|
||||
|
||||
274
.github/workflows/SphinxDocumentation.yml
vendored
Normal file
274
.github/workflows/SphinxDocumentation.yml
vendored
Normal file
@@ -0,0 +1,274 @@
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: Documentation
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
default: '3.12'
|
||||
type: string
|
||||
requirements:
|
||||
description: 'Python dependencies to be installed through pip.'
|
||||
required: false
|
||||
default: '-r doc/requirements.txt'
|
||||
type: string
|
||||
doc_directory:
|
||||
description: 'Path to the directory containing documentation (Sphinx working directory).'
|
||||
required: false
|
||||
default: 'doc'
|
||||
type: string
|
||||
coverage_report_json_directory:
|
||||
description: ''
|
||||
required: true
|
||||
type: string
|
||||
coverage_json_artifact:
|
||||
description: 'Name of the coverage JSON artifact.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
unittest_xml_artifact:
|
||||
description: 'Name of the unittest XML artifact.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
unittest_xml_directory:
|
||||
description: 'Directory where unittest XML artifact is extracted.'
|
||||
required: false
|
||||
default: 'report/unit'
|
||||
type: string
|
||||
html_artifact:
|
||||
description: 'Name of the HTML documentation artifact.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
latex_artifact:
|
||||
description: 'Name of the LaTeX documentation artifact.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
Sphinx-HTML:
|
||||
name: 📓 HTML Documentation using Sphinx and Python ${{ inputs.python_version }}
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
lfs: true
|
||||
submodules: true
|
||||
|
||||
- name: 🔧 Install graphviz
|
||||
run: sudo apt-get install -y --no-install-recommends graphviz
|
||||
|
||||
- name: 🐍 Setup Python ${{ inputs.python_version }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ inputs.python_version }}
|
||||
|
||||
- name: 🔧 Install wheel,tomli and pip dependencies (native)
|
||||
run: |
|
||||
python -m pip install --disable-pip-version-check -U wheel
|
||||
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.unittest_xml_artifact }}' from 'Unittesting' job
|
||||
uses: pyTooling/download-artifact@v4
|
||||
if: inputs.unittest_xml_artifact != ''
|
||||
with:
|
||||
name: ${{ inputs.unittest_xml_artifact }}
|
||||
path: ${{ inputs.unittest_xml_directory }}
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.coverage_json_artifact }}' from 'PublishCoverageResults' job
|
||||
uses: pyTooling/download-artifact@v4
|
||||
if: inputs.coverage_json_artifact != ''
|
||||
with:
|
||||
name: ${{ inputs.coverage_json_artifact }}
|
||||
path: ${{ inputs.coverage_report_json_directory }}
|
||||
|
||||
- name: ☑ Generate HTML documentation
|
||||
if: inputs.html_artifact != ''
|
||||
run: |
|
||||
export PYTHONPATH=$(pwd)
|
||||
|
||||
cd "${{ inputs.doc_directory || '.' }}"
|
||||
sphinx-build -v -n -b html -d _build/doctrees -j $(nproc) -w _build/html.log . _build/html
|
||||
|
||||
- name: 📤 Upload 'HTML Documentation' artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
if: inputs.html_artifact != ''
|
||||
continue-on-error: true
|
||||
with:
|
||||
name: ${{ inputs.html_artifact }}
|
||||
working-directory: ${{ inputs.doc_directory }}/_build/html
|
||||
path: '*'
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
Sphinx-LaTeX:
|
||||
name: 📓 LaTeX Documentation using Sphinx and Python ${{ inputs.python_version }}
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
lfs: true
|
||||
submodules: true
|
||||
|
||||
- name: 🔧 Install graphviz
|
||||
run: sudo apt-get install -y --no-install-recommends graphviz
|
||||
|
||||
- name: 🐍 Setup Python ${{ inputs.python_version }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ inputs.python_version }}
|
||||
|
||||
- name: 🔧 Install wheel,tomli and pip dependencies (native)
|
||||
run: |
|
||||
python -m pip install --disable-pip-version-check -U wheel
|
||||
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.unittest_xml_artifact }}' from 'Unittesting' job
|
||||
uses: pyTooling/download-artifact@v4
|
||||
if: inputs.unittest_xml_artifact != ''
|
||||
with:
|
||||
name: ${{ inputs.unittest_xml_artifact }}
|
||||
path: ${{ inputs.unittest_xml_directory }}
|
||||
|
||||
- name: 📥 Download artifacts '${{ inputs.coverage_json_artifact }}' from 'PublishCoverageResults' job
|
||||
uses: pyTooling/download-artifact@v4
|
||||
if: inputs.coverage_json_artifact != ''
|
||||
with:
|
||||
name: ${{ inputs.coverage_json_artifact }}
|
||||
path: ${{ inputs.coverage_report_json_directory }}
|
||||
|
||||
- name: ☑ Generate LaTeX documentation
|
||||
if: inputs.latex_artifact != ''
|
||||
# continue-on-error: true
|
||||
run: |
|
||||
export PYTHONPATH=$(pwd)
|
||||
|
||||
cd "${{ inputs.doc_directory || '.' }}"
|
||||
sphinx-build -v -n -b latex -d _build/doctrees -j $(nproc) -w _build/latex.log . _build/latex
|
||||
# --builder html --doctree-dir _build/doctrees --verbose --fresh-env --write-all --nitpicky --warning-file _build/html.log . _build/html
|
||||
|
||||
- name: Workaround I - https://github.com/sphinx-doc/sphinx/issues/13190
|
||||
if: inputs.latex_artifact != ''
|
||||
run: |
|
||||
printf "Changing directory to 'doc/_build/latex' ...\n"
|
||||
cd doc/_build/latex
|
||||
|
||||
MIMETYPE_EXTENSIONS=(
|
||||
"image/png:png"
|
||||
"image/jpeg:jpg"
|
||||
"image/svg+xml:svg"
|
||||
)
|
||||
|
||||
printf "Changing file extension according to MIME type ...\n"
|
||||
while IFS=$'\n' read -r file; do
|
||||
printf " Checking '%s' ... " "${file}"
|
||||
mime="$(file --mime-type -b "${file}")"
|
||||
printf "[%s]\n" "${mime}"
|
||||
|
||||
found=0
|
||||
for MIME in "${MIMETYPE_EXTENSIONS[@]}"; do
|
||||
mimetype="${MIME%%:*}"
|
||||
extension="${MIME#*:}"
|
||||
|
||||
if [[ "${mime}" == "${mimetype}" && "${file##*.}" != "${extension}" ]]; then
|
||||
printf " Rename file to '%s' " "${file}.${extension}"
|
||||
mv "${file}" "${file}.${extension}"
|
||||
if [[ $? -eq 0 ]]; then
|
||||
printf "[OK]\n"
|
||||
else
|
||||
printf "[FAILED]\n"
|
||||
fi
|
||||
|
||||
printf " Patching LaTeX file for '%s' " "${file}"
|
||||
sed -i "s:{{${file%.*}}\.${file##*.}}:{{${file}}.${extension}}:g" *.tex
|
||||
if [[ $? -eq 0 ]]; then
|
||||
printf "[OK]\n"
|
||||
else
|
||||
printf "[FAILED]\n"
|
||||
fi
|
||||
|
||||
found=1
|
||||
break
|
||||
fi
|
||||
done
|
||||
if [[ $found -eq 0 ]]; then
|
||||
printf "[SKIPPED]\n"
|
||||
fi
|
||||
done <<<$(find . -type f -not -iname "*.cls" -not -iname "*.sty" -not -iname "*.xdy" -not -iname "*.svg" -not -iname "*.png" -not -iname "*.jpg" | sed 's:./::')
|
||||
|
||||
- name: Workaround II - https://github.com/sphinx-doc/sphinx/issues/13189
|
||||
if: inputs.latex_artifact != ''
|
||||
run: |
|
||||
printf "Changing directory to 'doc/_build/latex' ...\n"
|
||||
cd doc/_build/latex
|
||||
|
||||
printf "Searching for downloaded images, that need normalization ...\n"
|
||||
for imageExt in png svg jpg jpeg; do
|
||||
printf " Processing '%s' ...\n" "${imageExt}"
|
||||
while IFS=$'\n' read -r imageFile; do
|
||||
newFile="${imageFile//%/_}";
|
||||
|
||||
printf " %s\n" "$imageFile";
|
||||
if [[ "${imageFile}" != "${newFile}" ]]; then
|
||||
printf " Rename file to '%s' " "${newFile}"
|
||||
mv "${imageFile}" "${newFile}"
|
||||
if [[ $? -eq 0 ]]; then
|
||||
printf "[OK]\n"
|
||||
else
|
||||
printf "[FAILED]\n"
|
||||
fi
|
||||
|
||||
printf " Patching LaTeX file for '%s' " "${newFile}"
|
||||
sed -i "s:{{${imageFile%.*}}\.${imageFile##*.}}:{{${newFile%.*}}.${newFile##*.}}:g" *.tex
|
||||
if [[ $? -eq 0 ]]; then
|
||||
printf "[OK]\n"
|
||||
else
|
||||
printf "[FAILED]\n"
|
||||
fi
|
||||
fi
|
||||
done <<<$(find . -type f -iname "*.$imageExt" | sed 's:./::')
|
||||
done
|
||||
|
||||
- name: 📤 Upload 'LaTeX Documentation' artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
if: inputs.latex_artifact != ''
|
||||
continue-on-error: true
|
||||
with:
|
||||
name: ${{ inputs.latex_artifact }}
|
||||
working-directory: ${{ inputs.doc_directory }}/_build/latex
|
||||
path: '*'
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
84
.github/workflows/StaticTypeCheck.yml
vendored
84
.github/workflows/StaticTypeCheck.yml
vendored
@@ -1,62 +1,108 @@
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: Static Type Check
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
default: '3.10'
|
||||
default: '3.12'
|
||||
type: string
|
||||
requirements:
|
||||
description: 'Python dependencies to be installed through pip.'
|
||||
required: false
|
||||
default: '-r tests/requirements.txt'
|
||||
type: string
|
||||
report:
|
||||
description: 'Directory to upload as an artifact.'
|
||||
required: false
|
||||
default: 'htmlmypy'
|
||||
type: string
|
||||
commands:
|
||||
description: 'Commands to run the static type checks.'
|
||||
required: true
|
||||
type: string
|
||||
artifact:
|
||||
description: 'Name of the typing artifact.'
|
||||
html_report:
|
||||
description: 'Directory to upload as an artifact.'
|
||||
required: false
|
||||
default: 'htmlmypy'
|
||||
type: string
|
||||
junit_report:
|
||||
description: 'junit file to upload as an artifact.'
|
||||
required: false
|
||||
default: 'StaticTypingSummary.xml'
|
||||
type: string
|
||||
html_artifact:
|
||||
description: 'Name of the typing artifact (HTML report).'
|
||||
required: true
|
||||
type: string
|
||||
junit_artifact:
|
||||
description: 'Name of the typing junit artifact (junit XML).'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
|
||||
StaticTypeCheck:
|
||||
name: 👀 Check Static Typing using Python ${{ inputs.python_version }}
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: 🐍 Setup Python ${{ inputs.python_version }}
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ inputs.python_version }}
|
||||
|
||||
- name: 🗂 Install dependencies
|
||||
run: |
|
||||
python -m pip install -U pip
|
||||
python -m pip install ${{ inputs.requirements }}
|
||||
run: python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
||||
|
||||
- name: Check Static Typing
|
||||
continue-on-error: true
|
||||
run: ${{ inputs.commands }}
|
||||
|
||||
- name: 📤 Upload 'Static Typing Report' artifact
|
||||
if: ${{ inputs.artifact != '' }}
|
||||
- name: 📤 Upload 'Static Typing Report' HTML artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
if: ${{ inputs.html_artifact != '' }}
|
||||
continue-on-error: true
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: ${{ inputs.artifact }}
|
||||
path: ${{ inputs.report }}
|
||||
name: ${{ inputs.html_artifact }}
|
||||
working-directory: ${{ inputs.html_report }}
|
||||
path: '*'
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- name: 📤 Upload 'Static Typing Report' JUnit artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
if: ${{ inputs.junit_artifact != '' }}
|
||||
continue-on-error: true
|
||||
with:
|
||||
name: ${{ inputs.junit_artifact }}
|
||||
path: ${{ inputs.junit_report }}
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
176
.github/workflows/TestReleaser.yml
vendored
Normal file
176
.github/workflows/TestReleaser.yml
vendored
Normal file
@@ -0,0 +1,176 @@
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: Test Releaser
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
- '!tip'
|
||||
- '!v*'
|
||||
branches:
|
||||
- '**'
|
||||
- '!r*'
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 0 * * 4'
|
||||
|
||||
env:
|
||||
CI: true
|
||||
|
||||
jobs:
|
||||
|
||||
|
||||
Image:
|
||||
runs-on: ubuntu-24.04
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Build container image
|
||||
run: docker build -t ghcr.io/pytooling/releaser -f releaser/Dockerfile releaser
|
||||
|
||||
- name: Push container image
|
||||
uses: ./with-post-step
|
||||
with:
|
||||
main: |
|
||||
echo '${{ github.token }}' | docker login ghcr.io -u GitHub-Actions --password-stdin
|
||||
docker push ghcr.io/pytooling/releaser
|
||||
post: docker logout ghcr.io
|
||||
|
||||
|
||||
Composite:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- run: printf "%s\n" "Build some tool and generate some (versioned) artifacts" > artifact-$(date -u +"%Y-%m-%dT%H-%M-%SZ").txt
|
||||
|
||||
- name: Single
|
||||
uses: ./releaser/composite
|
||||
with:
|
||||
rm: true
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
files: artifact-*.txt
|
||||
|
||||
- name: List
|
||||
uses: ./releaser/composite
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
files: |
|
||||
artifact-*.txt
|
||||
README.md
|
||||
|
||||
- name: Add artifacts/*.txt
|
||||
run: |
|
||||
mkdir artifacts
|
||||
printf "%s\n" "Build some tool and generate some artifacts" > artifacts/artifact.txt
|
||||
touch artifacts/empty_file.txt
|
||||
|
||||
- name: Single in subdir
|
||||
uses: ./releaser/composite
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
files: artifacts/artifact.txt
|
||||
|
||||
- name: Add artifacts/*.md
|
||||
run: |
|
||||
printf "%s\n" "releaser hello" > artifacts/hello.md
|
||||
printf "%s\n" "releaser world" > artifacts/world.md
|
||||
|
||||
- name: Directory wildcard
|
||||
uses: ./releaser/composite
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
files: artifacts/*
|
||||
|
||||
- name: Add artifacts/subdir
|
||||
run: |
|
||||
mkdir artifacts/subdir
|
||||
printf "%s\n" "Test recursive glob" > artifacts/subdir/deep_file.txt
|
||||
|
||||
- name: Directory wildcard (recursive)
|
||||
uses: ./releaser/composite
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
files: artifacts/**
|
||||
|
||||
|
||||
Test:
|
||||
needs:
|
||||
- Image
|
||||
- Composite
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- run: printf "%s\n" "Build some tool and generate some (versioned) artifacts" > artifact-$(date -u +"%Y-%m-%dT%H-%M-%SZ").txt
|
||||
|
||||
- name: Single
|
||||
uses: ./releaser
|
||||
with:
|
||||
rm: true
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
files: artifact-*.txt
|
||||
|
||||
- name: List
|
||||
uses: ./releaser
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
files: |
|
||||
artifact-*.txt
|
||||
README.md
|
||||
|
||||
- name: Add artifacts/*.txt
|
||||
run: |
|
||||
mkdir artifacts
|
||||
printf "%s\n" "Build some tool and generate some artifacts" > artifacts/artifact.txt
|
||||
touch artifacts/empty_file.txt
|
||||
|
||||
- name: Single in subdir
|
||||
uses: ./releaser
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
files: artifacts/artifact.txt
|
||||
|
||||
- name: Add artifacts/*.md
|
||||
run: |
|
||||
printf "%s\n" "releaser hello" > artifacts/hello.md
|
||||
printf "%s\n" "releaser world" > artifacts/world.md
|
||||
|
||||
- name: Directory wildcard
|
||||
uses: ./releaser
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
files: artifacts/*
|
||||
|
||||
- name: Add artifacts/subdir
|
||||
run: |
|
||||
mkdir artifacts/subdir
|
||||
printf "%s\n" "Test recursive glob" > artifacts/subdir/deep_file.txt
|
||||
|
||||
- name: Directory wildcard (recursive)
|
||||
uses: ./releaser
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
files: artifacts/**
|
||||
437
.github/workflows/UnitTesting.yml
vendored
437
.github/workflows/UnitTesting.yml
vendored
@@ -1,58 +1,455 @@
|
||||
name: Unit Testing
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: Unit Testing (Matrix)
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
jobs:
|
||||
description: 'Space separated list of Python versions to run tests with.'
|
||||
description: 'JSON list with environment fields, telling the system and Python versions to run tests with.'
|
||||
required: true
|
||||
type: string
|
||||
apt:
|
||||
description: 'Ubuntu dependencies to be installed through apt.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
brew:
|
||||
description: 'macOS dependencies to be installed through brew.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
pacboy:
|
||||
description: 'MSYS2 dependencies to be installed through pacboy (pacman).'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
requirements:
|
||||
description: 'Python dependencies to be installed through pip.'
|
||||
required: false
|
||||
default: '-r tests/requirements.txt'
|
||||
type: string
|
||||
artifact:
|
||||
mingw_requirements:
|
||||
description: 'Override Python dependencies to be installed through pip on MSYS2 (MINGW64) only.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
macos_before_script:
|
||||
description: 'Scripts to execute before pytest on macOS (Intel).'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
macos_arm_before_script:
|
||||
description: 'Scripts to execute before pytest on macOS (ARM).'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
ubuntu_before_script:
|
||||
description: 'Scripts to execute before pytest on Ubuntu.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
mingw64_before_script:
|
||||
description: 'Scripts to execute before pytest on Windows within MSYS2 MinGW64.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
ucrt64_before_script:
|
||||
description: 'Scripts to execute before pytest on Windows within MSYS2 UCRT64.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
root_directory:
|
||||
description: 'Working directory for running tests.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
tests_directory:
|
||||
description: 'Path to the directory containing tests (relative to root_directory).'
|
||||
required: false
|
||||
default: 'tests'
|
||||
type: string
|
||||
unittest_directory:
|
||||
description: 'Path to the directory containing unit tests (relative to tests_directory).'
|
||||
required: false
|
||||
default: 'unit'
|
||||
type: string
|
||||
unittest_report_xml_directory:
|
||||
description: 'Path where to save the unittest summary report XML.'
|
||||
required: false
|
||||
default: 'report/unit'
|
||||
type: string
|
||||
unittest_report_xml_filename:
|
||||
description: 'Filename of the unittest summary report XML.'
|
||||
required: false
|
||||
default: 'TestReportSummary.xml'
|
||||
type: string
|
||||
coverage_config:
|
||||
description: 'Path to the .coveragerc file. Use pyproject.toml by default.'
|
||||
required: false
|
||||
default: 'pyproject.toml'
|
||||
type: string
|
||||
coverage_report_html_directory:
|
||||
description: ''
|
||||
required: false
|
||||
default: 'report/coverage/html'
|
||||
type: string
|
||||
unittest_xml_artifact:
|
||||
description: "Generate unit test report with junitxml and upload results as an artifact."
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
unittest_html_artifact:
|
||||
description: "Generate unit test report with junitxml and upload results as an artifact."
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
coverage_sqlite_artifact:
|
||||
description: 'Name of the SQLite coverage artifact.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
coverage_xml_artifact:
|
||||
description: 'Name of the XML coverage artifact.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
coverage_json_artifact:
|
||||
description: 'Name of the JSON coverage artifact.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
coverage_html_artifact:
|
||||
description: 'Name of the HTML coverage artifact.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
|
||||
UnitTesting:
|
||||
name: ${{ matrix.icon }} Unit Tests using Python ${{ matrix.python }}
|
||||
runs-on: ubuntu-latest
|
||||
name: ${{ matrix.sysicon }} ${{ matrix.pyicon }} Unit Tests - Python ${{ matrix.python }}
|
||||
runs-on: ${{ matrix.runs-on }}
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include: ${{ fromJson(inputs.jobs) }}
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: ${{ matrix.shell }}
|
||||
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
lfs: true
|
||||
submodules: true
|
||||
|
||||
# Package Manager steps
|
||||
- name: 🔧 Install homebrew dependencies on macOS
|
||||
if: ( matrix.system == 'macos' || matrix.system == 'macos-arm' ) && inputs.brew != ''
|
||||
run: brew install ${{ inputs.brew }}
|
||||
|
||||
- name: 🔧 Install apt dependencies on Ubuntu
|
||||
if: matrix.system == 'ubuntu' && inputs.apt != ''
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends ${{ inputs.apt }}
|
||||
|
||||
# Compute Dependencies for MSYS2 steps
|
||||
|
||||
- name: 🔧 Install dependencies (system Python for Python shell)
|
||||
if: matrix.system == 'msys2'
|
||||
shell: pwsh
|
||||
run: |
|
||||
py -3.9 -m pip install --disable-pip-version-check -U tomli
|
||||
|
||||
- name: Compute pacman/pacboy packages
|
||||
id: pacboy
|
||||
if: matrix.system == 'msys2'
|
||||
shell: python
|
||||
run: |
|
||||
from os import getenv
|
||||
from pathlib import Path
|
||||
from re import compile
|
||||
from sys import version
|
||||
|
||||
print(f"Python: {version}")
|
||||
|
||||
def loadRequirementsFile(requirementsFile: Path):
|
||||
requirements = []
|
||||
with requirementsFile.open("r") as file:
|
||||
for line in file.readlines():
|
||||
line = line.strip()
|
||||
if line.startswith("#") or line.startswith("https") or line == "":
|
||||
continue
|
||||
elif line.startswith("-r"):
|
||||
# Remove the first word/argument (-r)
|
||||
requirements += loadRequirementsFile(requirementsFile.parent / line[2:].lstrip())
|
||||
else:
|
||||
requirements.append(line)
|
||||
|
||||
return requirements
|
||||
|
||||
requirements = "${{ inputs.requirements }}"
|
||||
if requirements.startswith("-r"):
|
||||
requirementsFile = Path(requirements[2:].lstrip())
|
||||
try:
|
||||
dependencies = loadRequirementsFile(requirementsFile)
|
||||
except FileNotFoundError as ex:
|
||||
print(f"::error title=FileNotFoundError::{ex}")
|
||||
exit(1)
|
||||
else:
|
||||
dependencies = [req.strip() for req in requirements.split(" ")]
|
||||
|
||||
packages = {
|
||||
"coverage": "python-coverage:p",
|
||||
"docstr_coverage": "python-pyaml:p",
|
||||
"igraph": "igraph:p",
|
||||
"jinja2": "python-markupsafe:p",
|
||||
"lxml": "python-lxml:p",
|
||||
"numpy": "python-numpy:p",
|
||||
"markupsafe": "python-markupsafe:p",
|
||||
"pip": "python-pip:p",
|
||||
"pyyaml": "python-pyyaml:p",
|
||||
"ruamel.yaml": "python-ruamel-yaml:p",
|
||||
# "ruamel.yaml": "python-ruamel-yaml:p python-ruamel.yaml.clib:p",
|
||||
"sphinx": "python-markupsafe:p",
|
||||
"tomli": "python-tomli:p",
|
||||
"wheel": "python-wheel:p",
|
||||
"pyedaa.projectmodel": "python-ruamel-yaml:p python-ruamel.yaml.clib:p python-lxml:p",
|
||||
"pyedaa.reports": "python-ruamel-yaml:p python-ruamel.yaml.clib:p python-lxml:p",
|
||||
}
|
||||
subPackages = {
|
||||
"pytooling": {
|
||||
"yaml": "python-ruamel-yaml:p python-ruamel.yaml.clib:p",
|
||||
},
|
||||
}
|
||||
|
||||
regExp = compile(r"(?P<PackageName>[\w_\-\.]+)(?:\[(?P<SubPackages>(?:\w+)(?:\s*,\s*\w+)*)\])?(?:\s*(?P<Comperator>[<>~=]+)\s*)(?P<Version>\d+(?:\.\d+)*)(?:-(?P<VersionExtension>\w+))?")
|
||||
|
||||
pacboyPackages = set(("python-pip:p", "python-wheel:p", "python-tomli:p"))
|
||||
print(f"Processing dependencies ({len(dependencies)}):")
|
||||
for dependency in dependencies:
|
||||
print(f" {dependency}")
|
||||
|
||||
match = regExp.match(dependency.lower())
|
||||
if not match:
|
||||
print(f" Wrong format: {dependency}")
|
||||
print(f"::error title=Identifying Pacboy Packages::Unrecognized dependency format '{dependency}'")
|
||||
continue
|
||||
|
||||
package = match["PackageName"]
|
||||
if package in packages:
|
||||
rewrite = packages[package]
|
||||
print(f" Found rewrite rule for '{package}': {rewrite}")
|
||||
pacboyPackages.add(rewrite)
|
||||
|
||||
if match["SubPackages"] and package in subPackages:
|
||||
for subPackage in match["SubPackages"].split(","):
|
||||
if subPackage in subPackages[package]:
|
||||
rewrite = subPackages[package][subPackage]
|
||||
print(f" Found rewrite rule for '{package}[..., {subPackage}, ...]': {rewrite}")
|
||||
pacboyPackages.add(rewrite)
|
||||
|
||||
# Write jobs to special file
|
||||
github_output = Path(getenv("GITHUB_OUTPUT"))
|
||||
print(f"GITHUB_OUTPUT: {github_output}")
|
||||
with github_output.open("a+") as f:
|
||||
f.write(f"pacboy_packages={' '.join(pacboyPackages)}\n")
|
||||
|
||||
# Python setup
|
||||
|
||||
- name: '🟦 Setup MSYS2 for ${{ matrix.runtime }}'
|
||||
uses: msys2/setup-msys2@v2
|
||||
if: matrix.system == 'msys2'
|
||||
with:
|
||||
msystem: ${{ matrix.runtime }}
|
||||
update: true
|
||||
pacboy: >-
|
||||
${{ steps.pacboy.outputs.pacboy_packages }}
|
||||
${{ inputs.pacboy }}
|
||||
|
||||
- name: 🐍 Setup Python ${{ matrix.python }}
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v5
|
||||
if: matrix.system != 'msys2'
|
||||
with:
|
||||
python-version: ${{ matrix.python }}
|
||||
|
||||
- name: 🔧 Install dependencies
|
||||
run: |
|
||||
python -m pip install -U pip
|
||||
python -m pip install ${{ inputs.requirements }}
|
||||
# Python Dependency steps
|
||||
|
||||
- name: ☑ Run unit tests
|
||||
- name: 🔧 Install wheel,tomli and pip dependencies (native)
|
||||
if: matrix.system != 'msys2'
|
||||
run: |
|
||||
[ 'x${{ inputs.artifact }}' != 'x' ] && PYTEST_ARGS='--junitxml=TestReport.xml' || unset PYTEST_ARGS
|
||||
python -m pytest -rA tests/unit $PYTEST_ARGS --color=yes
|
||||
python -m pip install --disable-pip-version-check -U wheel tomli
|
||||
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
||||
|
||||
- name: 📤 Upload 'TestReport.xml' artifact
|
||||
if: inputs.TestReport == 'true'
|
||||
uses: actions/upload-artifact@v2
|
||||
- name: 🔧 Install pip dependencies (MSYS2)
|
||||
if: matrix.system == 'msys2'
|
||||
run: |
|
||||
if [ -n '${{ inputs.mingw_requirements }}' ]; then
|
||||
python -m pip install --disable-pip-version-check ${{ inputs.mingw_requirements }}
|
||||
else
|
||||
python -m pip install --disable-pip-version-check ${{ inputs.requirements }}
|
||||
fi
|
||||
|
||||
# Before scripts
|
||||
|
||||
- name: 🍎 macOS (Intel) before scripts
|
||||
if: matrix.system == 'macos' && inputs.macos_before_script != ''
|
||||
run: ${{ inputs.macos_before_script }}
|
||||
|
||||
- name: 🍏 macOS (ARM) before scripts
|
||||
if: matrix.system == 'macos-arm' && inputs.macos_arm_before_script != ''
|
||||
run: ${{ inputs.macos_arm_before_script }}
|
||||
|
||||
- name: 🐧 Ubuntu before scripts
|
||||
if: matrix.system == 'ubuntu' && inputs.ubuntu_before_script != ''
|
||||
run: ${{ inputs.ubuntu_before_script }}
|
||||
|
||||
# Windows before script
|
||||
|
||||
- name: 🪟🟦 MinGW64 before scripts
|
||||
if: matrix.system == 'msys2' && matrix.runtime == 'MINGW64' && inputs.mingw64_before_script != ''
|
||||
run: ${{ inputs.mingw64_before_script }}
|
||||
|
||||
- name: 🪟🟨 UCRT64 before scripts
|
||||
if: matrix.system == 'msys2' && matrix.runtime == 'UCRT64' && inputs.ucrt64_before_script != ''
|
||||
run: ${{ inputs.ucrt64_before_script }}
|
||||
|
||||
# Run pytests
|
||||
|
||||
- name: ✅ Run unit tests (Ubuntu/macOS)
|
||||
if: matrix.system != 'windows'
|
||||
run: |
|
||||
export ENVIRONMENT_NAME="${{ matrix.envname }}"
|
||||
export PYTHONPATH=$(pwd)
|
||||
|
||||
cd "${{ inputs.root_directory || '.' }}"
|
||||
[ -n '${{ inputs.unittest_xml_artifact }}' ] && PYTEST_ARGS='--junitxml=${{ inputs.unittest_report_xml_directory }}/${{ inputs.unittest_report_xml_filename }}' || unset PYTEST_ARGS
|
||||
if [ -n '${{ inputs.coverage_config }}' ]; then
|
||||
printf "%s\n" "coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }}"
|
||||
coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }}
|
||||
else
|
||||
printf "%s\n" "python -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }}"
|
||||
python -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }}
|
||||
fi
|
||||
|
||||
- name: ✅ Run unit tests (Windows)
|
||||
if: matrix.system == 'windows'
|
||||
run: |
|
||||
$env:ENVIRONMENT_NAME = "${{ matrix.envname }}"
|
||||
$env:PYTHONPATH = (Get-Location).ToString()
|
||||
|
||||
cd "${{ inputs.root_directory || '.' }}"
|
||||
$PYTEST_ARGS = if ("${{ inputs.unittest_xml_artifact }}") { "--junitxml=${{ inputs.unittest_report_xml_directory }}/${{ inputs.unittest_report_xml_filename }}" } else { "" }
|
||||
if ("${{ inputs.coverage_config }}") {
|
||||
Write-Host "coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }}"
|
||||
coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }}
|
||||
} else {
|
||||
Write-Host "python -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }}"
|
||||
python -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }}
|
||||
}
|
||||
|
||||
- name: Convert coverage to XML format (Cobertura)
|
||||
if: inputs.coverage_xml_artifact != ''
|
||||
continue-on-error: true
|
||||
run: coverage xml --data-file=.coverage
|
||||
|
||||
- name: Convert coverage to JSON format
|
||||
if: inputs.coverage_json_artifact != ''
|
||||
continue-on-error: true
|
||||
run: coverage json --data-file=.coverage
|
||||
|
||||
- name: Convert coverage to HTML format
|
||||
if: inputs.coverage_html_artifact != ''
|
||||
continue-on-error: true
|
||||
run: |
|
||||
coverage html --data-file=.coverage -d ${{ inputs.coverage_report_html_directory }}
|
||||
rm ${{ inputs.coverage_report_html_directory }}/.gitignore
|
||||
|
||||
# Upload artifacts
|
||||
|
||||
- name: 📤 Upload '${{ inputs.unittest_report_xml_filename }}' artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
if: inputs.unittest_xml_artifact != ''
|
||||
continue-on-error: true
|
||||
with:
|
||||
name: ${{ inputs.artifact }}-${{ matrix.python }}
|
||||
path: TestReport.xml
|
||||
name: ${{ inputs.unittest_xml_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
||||
working-directory: ${{ inputs.unittest_report_xml_directory }}
|
||||
path: ${{ inputs.unittest_report_xml_filename }}
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
# - name: 📤 Upload 'Unit Tests HTML Report' artifact
|
||||
# if: inputs.unittest_html_artifact != ''
|
||||
# continue-on-error: true
|
||||
# uses: pyTooling/upload-artifact@v4
|
||||
# with:
|
||||
# name: ${{ inputs.unittest_html_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
||||
# path: ${{ steps.getVariables.outputs.unittest_report_html_directory }}
|
||||
# if-no-files-found: error
|
||||
# retention-days: 1
|
||||
|
||||
- name: 📤 Upload 'Coverage SQLite Database' artifact
|
||||
if: inputs.coverage_sqlite_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.coverage_sqlite_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
||||
path: .coverage
|
||||
include-hidden-files: true
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- name: 📤 Upload 'Coverage XML Report' artifact
|
||||
if: inputs.coverage_xml_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.coverage_xml_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
||||
path: ${{ steps.getVariables.outputs.coverage_report_xml }}
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- name: 📤 Upload 'Coverage JSON Report' artifact
|
||||
if: inputs.coverage_json_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.coverage_json_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
||||
path: ${{ steps.getVariables.outputs.coverage_report_json }}
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- name: 📤 Upload 'Coverage HTML Report' artifact
|
||||
if: inputs.coverage_html_artifact != ''
|
||||
continue-on-error: true
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.coverage_html_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }}
|
||||
working-directory: ${{ steps.getVariables.outputs.coverage_report_html_directory }}
|
||||
path: '*'
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
37
.github/workflows/VerifyDocs.yml
vendored
37
.github/workflows/VerifyDocs.yml
vendored
@@ -1,26 +1,53 @@
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: Verify examples
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ubuntu_image_version:
|
||||
description: 'Ubuntu image version.'
|
||||
required: false
|
||||
default: '24.04'
|
||||
type: string
|
||||
python_version:
|
||||
description: 'Python version.'
|
||||
required: false
|
||||
default: '3.10'
|
||||
default: '3.12'
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
|
||||
VerifyDocs:
|
||||
name: 👍 Verify example snippets using Python ${{ inputs.python_version }}
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}"
|
||||
|
||||
steps:
|
||||
- name: ⏬ Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: 🐍 Setup Python
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ inputs.python_version }}
|
||||
|
||||
@@ -50,7 +77,7 @@ jobs:
|
||||
- name: Print example.py
|
||||
run: cat tests/docs/example.py
|
||||
|
||||
- name: ☑ Run example snippet
|
||||
- name: ✅ Run example snippet
|
||||
working-directory: tests/docs
|
||||
run: |
|
||||
python3 example.py
|
||||
|
||||
61
.github/workflows/_Checking_ArtifactCleanup.yml
vendored
Normal file
61
.github/workflows/_Checking_ArtifactCleanup.yml
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
name: Verification Pipeline for ArtifactCleanup
|
||||
|
||||
on:
|
||||
push:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
Params:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
with:
|
||||
name: Example
|
||||
python_version_list: "3.12 3.13"
|
||||
system_list: "ubuntu windows"
|
||||
|
||||
Testing:
|
||||
name: Artifact generation ${{ matrix.system }}-${{ matrix.python }}
|
||||
needs:
|
||||
- Params
|
||||
runs-on: ${{ matrix.runs-on }}
|
||||
strategy:
|
||||
matrix:
|
||||
include: ${{ fromJson(needs.Params.outputs.python_jobs) }}
|
||||
steps:
|
||||
- name: Content creation for ${{ matrix.system }}-${{ matrix.python }}
|
||||
run: printf "%s\n" "${{ matrix.runs-on }}-${{ matrix.python }}" >> artifact.txt
|
||||
|
||||
- name: 📤 Upload artifact for ${{ matrix.system }}-${{ matrix.python }}
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ fromJson(needs.Params.outputs.artifact_names).unittesting_xml }}-${{ matrix.system }}-${{ matrix.python }}
|
||||
path: artifact.txt
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
Package:
|
||||
name: Package generation
|
||||
needs:
|
||||
- Params
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Package creation
|
||||
run: printf "%s\n" "Package" >> package.txt
|
||||
|
||||
- name: 📤 Upload artifact for ${{ matrix.system }}-${{ matrix.python }}
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ fromJson(needs.Params.outputs.artifact_names).package_all }}
|
||||
path: package.txt
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
ArtifactCleanUp:
|
||||
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@main
|
||||
needs:
|
||||
- Params
|
||||
- Testing
|
||||
- Package
|
||||
with:
|
||||
package: ${{ fromJson(needs.Params.outputs.artifact_names).package_all }}
|
||||
remaining: |
|
||||
${{ fromJson(needs.Params.outputs.artifact_names).unittesting_xml }}-*
|
||||
236
.github/workflows/_Checking_JobTemplates.yml
vendored
Normal file
236
.github/workflows/_Checking_JobTemplates.yml
vendored
Normal file
@@ -0,0 +1,236 @@
|
||||
name: Verification of Job Templates
|
||||
|
||||
on:
|
||||
push:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
ConfigParams:
|
||||
uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@main
|
||||
with:
|
||||
package_name: pyDummy
|
||||
|
||||
UnitTestingParams:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
with:
|
||||
name: pyDummy
|
||||
python_version_list: "3.9 3.10 3.11 3.12 3.13 pypy-3.9 pypy-3.10"
|
||||
# disable_list: "windows:pypy-3.10"
|
||||
|
||||
PlatformTestingParams:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
with:
|
||||
name: Platform
|
||||
python_version_list: ""
|
||||
system_list: "ubuntu windows macos mingw64 clang64 ucrt64"
|
||||
|
||||
UnitTesting:
|
||||
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@main
|
||||
needs:
|
||||
- ConfigParams
|
||||
- UnitTestingParams
|
||||
with:
|
||||
jobs: ${{ needs.UnitTestingParams.outputs.python_jobs }}
|
||||
unittest_report_xml_directory: ${{ needs.ConfigParams.outputs.unittest_report_xml_directory }}
|
||||
unittest_report_xml_filename: ${{ needs.ConfigParams.outputs.unittest_report_xml_filename }}
|
||||
coverage_report_html_directory: ${{ needs.ConfigParams.outputs.coverage_report_html_directory }}
|
||||
unittest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||
unittest_html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_html }}
|
||||
coverage_sqlite_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}
|
||||
# coverage_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }}
|
||||
# coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}
|
||||
# coverage_html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||
|
||||
PlatformTesting:
|
||||
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@main
|
||||
needs:
|
||||
- ConfigParams
|
||||
- PlatformTestingParams
|
||||
with:
|
||||
jobs: ${{ needs.PlatformTestingParams.outputs.python_jobs }}
|
||||
# tests_directory: ""
|
||||
unittest_directory: platform
|
||||
unittest_report_xml_directory: ${{ needs.ConfigParams.outputs.unittest_report_xml_directory }}
|
||||
unittest_report_xml_filename: ${{ needs.ConfigParams.outputs.unittest_report_xml_filename }}
|
||||
coverage_report_html_directory: ${{ needs.ConfigParams.outputs.coverage_report_html_directory }}
|
||||
unittest_xml_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||
unittest_html_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).unittesting_html }}
|
||||
coverage_sqlite_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_sqlite }}
|
||||
coverage_xml_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_xml }}
|
||||
coverage_json_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_json }}
|
||||
coverage_html_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||
|
||||
StaticTypeCheck:
|
||||
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@main
|
||||
needs:
|
||||
- ConfigParams
|
||||
- UnitTestingParams
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
commands: |
|
||||
${{ needs.ConfigParams.outputs.mypy_prepare_command }}
|
||||
mypy --html-report htmlmypy -p ${{ needs.ConfigParams.outputs.package_fullname }}
|
||||
html_report: 'htmlmypy'
|
||||
html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
|
||||
|
||||
DocCoverage:
|
||||
uses: pyTooling/Actions/.github/workflows/CheckDocumentation.yml@main
|
||||
needs:
|
||||
- ConfigParams
|
||||
- UnitTestingParams
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
directory: ${{ needs.ConfigParams.outputs.package_directors }}
|
||||
# fail_below: 70
|
||||
|
||||
Package:
|
||||
uses: pyTooling/Actions/.github/workflows/Package.yml@main
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- UnitTesting
|
||||
- PlatformTesting
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
||||
|
||||
PublishCoverageResults:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@main
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- UnitTesting
|
||||
- PlatformTesting
|
||||
with:
|
||||
coverage_sqlite_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}
|
||||
coverage_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }}
|
||||
coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}
|
||||
coverage_html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||
secrets:
|
||||
codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||
|
||||
PublishTestResults:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@main
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- UnitTesting
|
||||
- PlatformTesting
|
||||
with:
|
||||
additional_merge_args: '-d "--pytest=rewrite-dunder-init;reduce-depth:pytest.tests.unit;reduce-depth:pytest.tests.platform"'
|
||||
merged_junit_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||
|
||||
# VerifyDocs:
|
||||
# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@main
|
||||
# needs:
|
||||
# - UnitTestingParams
|
||||
# with:
|
||||
# python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
|
||||
Documentation:
|
||||
uses: pyTooling/Actions/.github/workflows/SphinxDocumentation.yml@main
|
||||
needs:
|
||||
- ConfigParams
|
||||
- UnitTestingParams
|
||||
- PublishTestResults
|
||||
- PublishCoverageResults
|
||||
# - VerifyDocs
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
coverage_report_json_directory: ${{ needs.ConfigParams.outputs.coverage_report_json_directory }}
|
||||
unittest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||
coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}
|
||||
html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_html }}
|
||||
latex_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }}
|
||||
|
||||
IntermediateCleanUp:
|
||||
uses: pyTooling/Actions/.github/workflows/IntermediateCleanUp.yml@main
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- PublishCoverageResults
|
||||
- PublishTestResults
|
||||
- Documentation
|
||||
with:
|
||||
sqlite_coverage_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}-
|
||||
xml_unittest_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}-
|
||||
|
||||
PDFDocumentation:
|
||||
uses: pyTooling/Actions/.github/workflows/LaTeXDocumentation.yml@main
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- Documentation
|
||||
with:
|
||||
document: Actions
|
||||
latex_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }}
|
||||
pdf_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_pdf }}
|
||||
|
||||
PublishToGitHubPages:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@main
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- Documentation
|
||||
- PDFDocumentation
|
||||
- PublishCoverageResults
|
||||
- StaticTypeCheck
|
||||
with:
|
||||
doc: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_html }}
|
||||
coverage: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||
typing: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
|
||||
|
||||
ReleasePage:
|
||||
uses: pyTooling/Actions/.github/workflows/Release.yml@main
|
||||
if: startsWith(github.ref, 'refs/tags')
|
||||
needs:
|
||||
- UnitTesting
|
||||
- PlatformTesting
|
||||
# - StaticTypeCheck
|
||||
- Package
|
||||
- PublishToGitHubPages
|
||||
|
||||
PublishOnPyPI:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@main
|
||||
if: startsWith(github.ref, 'refs/tags')
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- ReleasePage
|
||||
# - Package
|
||||
with:
|
||||
python_version: ${{ needs.UnitTestingParams.outputs.python_version }}
|
||||
requirements: -r dist/requirements.txt
|
||||
artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
||||
secrets:
|
||||
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||
|
||||
ArtifactCleanUp:
|
||||
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@main
|
||||
needs:
|
||||
- UnitTestingParams
|
||||
- PlatformTestingParams
|
||||
- UnitTesting
|
||||
- StaticTypeCheck
|
||||
- PlatformTesting
|
||||
- Documentation
|
||||
- PDFDocumentation
|
||||
- PublishTestResults
|
||||
- PublishCoverageResults
|
||||
- PublishToGitHubPages
|
||||
- IntermediateCleanUp
|
||||
with:
|
||||
package: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }}
|
||||
remaining: |
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}-*
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_html }}-*
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }}-*
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}-*
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}-*
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_html }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_html }}
|
||||
${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_latex }}
|
||||
${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).unittesting_xml }}-*
|
||||
${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).unittesting_html }}-*
|
||||
${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_xml }}-*
|
||||
${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_json }}-*
|
||||
${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_html }}-*
|
||||
15
.github/workflows/_Checking_NamespacePackage_Pipeline.yml
vendored
Normal file
15
.github/workflows/_Checking_NamespacePackage_Pipeline.yml
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
name: Verification of Pipeline Templates (Namespace Package)
|
||||
|
||||
on:
|
||||
push:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
NamespacePackage:
|
||||
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@main
|
||||
with:
|
||||
package_namespace: pyExamples
|
||||
package_name: Extensions
|
||||
secrets:
|
||||
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||
CODACY_PROJECT_TOKEN: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||
120
.github/workflows/_Checking_Nightly.yml
vendored
Normal file
120
.github/workflows/_Checking_Nightly.yml
vendored
Normal file
@@ -0,0 +1,120 @@
|
||||
name: Verification of Nightly Releases
|
||||
|
||||
on:
|
||||
push:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
Build:
|
||||
name: Build something
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
steps:
|
||||
- name: 🖉 Build 1
|
||||
run: |
|
||||
printf "%s\n" "Document 1 $(date --utc '+%d.%m.%Y - %H:%M:%S')" > document1.txt
|
||||
printf "%s\n" "Analysis log $(date --utc '+%d.%m.%Y - %H:%M:%S')" > analysis.log
|
||||
printf "%s\n" "Build log $(date --utc '+%d.%m.%Y - %H:%M:%S')" > build.log
|
||||
|
||||
- name: 📤 Upload artifact
|
||||
uses: pyTooling/upload-artifact@v4
|
||||
with:
|
||||
name: document
|
||||
path: |
|
||||
document1.txt
|
||||
*.log
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- name: 🖉 Program
|
||||
run: |
|
||||
printf "%s\n" "Document other $(date --utc '+%d.%m.%Y - %H:%M:%S')" > document1.txt
|
||||
printf "%s\n" "Program $(date --utc '+%d.%m.%Y - %H:%M:%S')" > program.py
|
||||
|
||||
- name: 📤 Upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: other
|
||||
path: |
|
||||
*.txt
|
||||
*.py
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
NightlyPage:
|
||||
uses: pyTooling/Actions/.github/workflows/NightlyRelease.yml@main
|
||||
needs:
|
||||
- Build
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: write
|
||||
actions: write
|
||||
# attestations: write
|
||||
with:
|
||||
prerelease: true
|
||||
replacements: |
|
||||
version=4.2.0
|
||||
tool=myTool
|
||||
prog=program
|
||||
nightly_title: "Nightly Test Release"
|
||||
nightly_description: |
|
||||
This *nightly* release contains all latest and important artifacts created by %tool%'s CI pipeline.
|
||||
|
||||
# %tool% %version%
|
||||
|
||||
* %prog%
|
||||
assets: |
|
||||
document: document1.txt: Documentation
|
||||
document: build.log: Logfile - %tool% - %tool%
|
||||
other: document1.txt: SBOM - %version%
|
||||
other: %prog%.py: Application - %tool% - %version%
|
||||
document:!archive1.zip: Archive 1 - zip
|
||||
document:!archive2.tgz: Archive 2 - tgz
|
||||
document:!archive3.tar.gz: Archive 3 - tar.gz
|
||||
document:!archive4.tzst: Archive 4 - tzst
|
||||
document:!archive5.tar.zst:Archive 5 - tar.zst
|
||||
document:$archive6.tgz: Archive 6 - tgz + dir
|
||||
document:$archive7.tar.gz: Archive 7 - tar.gz + dir
|
||||
document:$archive8.tzst: Archive 8 - tzst + dir
|
||||
document:$archive9.tar.zst:Archive 9 - tar.zst + dir
|
||||
|
||||
NightlyPageWithInventory:
|
||||
uses: ./.github/workflows/NightlyRelease.yml
|
||||
needs:
|
||||
- Build
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: write
|
||||
actions: write
|
||||
# attestations: write
|
||||
with:
|
||||
replacements: |
|
||||
version=4.2.0
|
||||
tool=myTool
|
||||
prog=program
|
||||
nightly_name: inventory
|
||||
nightly_title: "Nightly Test Release with Inventory"
|
||||
nightly_description: |
|
||||
This *nightly* release contains all latest and important artifacts created by %tool%'s CI pipeline.
|
||||
|
||||
# %tool% %version%
|
||||
|
||||
* %prog%
|
||||
* iventory.json
|
||||
inventory-json: "inventory.json"
|
||||
inventory-version: 4.2.5
|
||||
inventory-categories: "kind1,kind2"
|
||||
assets: |
|
||||
document: document1.txt: doc,html: Documentation
|
||||
document: build.log: build,log: Logfile - %tool% - %tool%
|
||||
other: document1.txt: build,SBOM:SBOM - %version%
|
||||
other: %prog%.py: app,binary:Application - %tool% - %version%
|
||||
document:!archive1.zip: Archive 1 - zip
|
||||
document:!archive2.tgz: Archive 2 - tgz
|
||||
document:!archive3.tar.gz: Archive 3 - tar.gz
|
||||
document:!archive4.tzst: Archive 4 - tzst
|
||||
document:!archive5.tar.zst: Archive 5 - tar.zst
|
||||
document:$archive6.tgz: Archive 6 - tgz + dir
|
||||
document:$archive7.tar.gz: Archive 7 - tar.gz + dir
|
||||
document:$archive8.tzst: Archive 8 - tzst + dir
|
||||
document:$archive9.tar.zst: Archive 9 - tar.zst + dir
|
||||
514
.github/workflows/_Checking_Parameters.yml
vendored
Normal file
514
.github/workflows/_Checking_Parameters.yml
vendored
Normal file
@@ -0,0 +1,514 @@
|
||||
name: Verification Pipeline for Parameters
|
||||
|
||||
on:
|
||||
push:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
Params_Default:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
with:
|
||||
name: Example
|
||||
|
||||
Params_PythonVersions:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
with:
|
||||
name: Example
|
||||
python_version_list: "3.11 3.12 pypy-3.9 pypy-3.10"
|
||||
|
||||
Params_Systems:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
with:
|
||||
name: Example
|
||||
system_list: "windows mingw32 mingw64"
|
||||
|
||||
Params_Include:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
with:
|
||||
name: Example
|
||||
python_version_list: "3.11"
|
||||
system_list: "ubuntu windows macos macos-arm"
|
||||
include_list: "ubuntu:3.12 ubuntu:3.13"
|
||||
|
||||
Params_Exclude:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
with:
|
||||
name: Example
|
||||
python_version_list: "3.12"
|
||||
system_list: "ubuntu windows macos macos-arm"
|
||||
exclude_list: "windows:3.12 windows:3.13"
|
||||
|
||||
Params_Disable:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
with:
|
||||
name: Example
|
||||
python_version_list: "3.12"
|
||||
system_list: "ubuntu windows macos macos-arm"
|
||||
disable_list: "windows:3.12 windows:3.13"
|
||||
|
||||
Params_All:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@main
|
||||
with:
|
||||
name: Example
|
||||
python_version_list: "3.12 3.13"
|
||||
system_list: "ubuntu windows macos macos-arm"
|
||||
include_list: "windows:3.10 windows:3.11 windows:3.13"
|
||||
exclude_list: "macos:3.12 macos:3.13"
|
||||
|
||||
Params_Check:
|
||||
needs:
|
||||
- Params_Default
|
||||
- Params_PythonVersions
|
||||
- Params_Systems
|
||||
- Params_Include
|
||||
- Params_Exclude
|
||||
- Params_Disable
|
||||
- Params_All
|
||||
runs-on: ubuntu-24.04
|
||||
defaults:
|
||||
run:
|
||||
shell: python
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
shell: bash
|
||||
run: pip install --disable-pip-version-check --break-system-packages pyTooling
|
||||
# Params_Default
|
||||
- name: Checking results from 'Params_Default'
|
||||
run: |
|
||||
from json import loads as json_loads
|
||||
from sys import exit
|
||||
|
||||
from pyTooling.Common import zipdicts
|
||||
|
||||
expectedPythonVersion = "3.13"
|
||||
expectedPythons = ["3.9", "3.10", "3.11", "3.12", "3.13"]
|
||||
expectedSystems = ["ubuntu", "windows", "macos", "macos-arm"]
|
||||
expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons] + ["mingw64:3.12", "ucrt64:3.11"]
|
||||
expectedName = "Example"
|
||||
expectedArtifacts = {
|
||||
"unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML",
|
||||
"unittesting_html": f"{expectedName}-UnitTestReportSummary-HTML",
|
||||
"perftesting_xml": f"{expectedName}-PerformanceTestReportSummary-XML",
|
||||
"benchtesting_xml": f"{expectedName}-BenchmarkTestReportSummary-XML",
|
||||
"apptesting_xml": f"{expectedName}-ApplicationTestReportSummary-XML",
|
||||
"codecoverage_sqlite": f"{expectedName}-CodeCoverage-SQLite",
|
||||
"codecoverage_xml": f"{expectedName}-CodeCoverage-XML",
|
||||
"codecoverage_json": f"{expectedName}-CodeCoverage-JSON",
|
||||
"codecoverage_html": f"{expectedName}-CodeCoverage-HTML",
|
||||
"statictyping_html": f"{expectedName}-StaticTyping-HTML",
|
||||
"package_all": f"{expectedName}-Packages",
|
||||
"documentation_html": f"{expectedName}-Documentation-HTML",
|
||||
"documentation_latex": f"{expectedName}-Documentation-LaTeX",
|
||||
"documentation_pdf": f"{expectedName}-Documentation-PDF",
|
||||
}
|
||||
|
||||
actualPythonVersion = """${{ needs.Params_Default.outputs.python_version }}"""
|
||||
actualPythonJobs = json_loads("""${{ needs.Params_Default.outputs.python_jobs }}""".replace("'", '"'))
|
||||
actualArtifactNames = json_loads("""${{ needs.Params_Default.outputs.artifact_names }}""".replace("'", '"'))
|
||||
errors = 0
|
||||
|
||||
if actualPythonVersion != expectedPythonVersion:
|
||||
print(f"'python_version' does not match: '{actualPythonVersion}' != '{expectedPythonVersion}'.")
|
||||
errors += 1
|
||||
if len(actualPythonJobs) != len(expectedJobs):
|
||||
print(f"Number of 'python_jobs' does not match: {len(actualPythonJobs)} != {len(expectedJobs)}.")
|
||||
print("Actual jobs:")
|
||||
for job in actualPythonJobs:
|
||||
if job['system'] == "msys2":
|
||||
print(f" {job['runtime'].lower()}:{job['python']}")
|
||||
else:
|
||||
print(f" {job['system']}:{job['python']}")
|
||||
print("Expected jobs:")
|
||||
for job in expectedJobs:
|
||||
print(f" {job}")
|
||||
errors += 1
|
||||
if len(actualArtifactNames) != len(expectedArtifacts):
|
||||
print(f"Number of 'artifact_names' does not match: {len(actualArtifactNames)} != {len(expectedArtifacts)}.")
|
||||
errors += 1
|
||||
else:
|
||||
for key, actual, expected in zipdicts(actualArtifactNames, expectedArtifacts):
|
||||
if actual != expected:
|
||||
print(f"Artifact name '{key}' does not match: {actual} != {expected}.")
|
||||
errors += 1
|
||||
|
||||
if errors == 0:
|
||||
print(f"All checks PASSED.")
|
||||
exit(errors)
|
||||
|
||||
# Params_PythonVersions
|
||||
- name: Checking results from 'Params_PythonVersions'
|
||||
run: |
|
||||
from json import loads as json_loads
|
||||
from sys import exit
|
||||
|
||||
from pyTooling.Common import zipdicts
|
||||
|
||||
expectedPythonVersion = "3.13"
|
||||
expectedPythons = ["3.11", "3.12", "pypy-3.9", "pypy-3.10"]
|
||||
expectedSystems = ["ubuntu", "windows", "macos", "macos-arm"]
|
||||
expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons] + ["mingw64:3.12", "ucrt64:3.11"]
|
||||
expectedName = "Example"
|
||||
expectedArtifacts = {
|
||||
"unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML",
|
||||
"unittesting_html": f"{expectedName}-UnitTestReportSummary-HTML",
|
||||
"perftesting_xml": f"{expectedName}-PerformanceTestReportSummary-XML",
|
||||
"benchtesting_xml": f"{expectedName}-BenchmarkTestReportSummary-XML",
|
||||
"apptesting_xml": f"{expectedName}-ApplicationTestReportSummary-XML",
|
||||
"codecoverage_sqlite": f"{expectedName}-CodeCoverage-SQLite",
|
||||
"codecoverage_xml": f"{expectedName}-CodeCoverage-XML",
|
||||
"codecoverage_json": f"{expectedName}-CodeCoverage-JSON",
|
||||
"codecoverage_html": f"{expectedName}-CodeCoverage-HTML",
|
||||
"statictyping_html": f"{expectedName}-StaticTyping-HTML",
|
||||
"package_all": f"{expectedName}-Packages",
|
||||
"documentation_html": f"{expectedName}-Documentation-HTML",
|
||||
"documentation_latex": f"{expectedName}-Documentation-LaTeX",
|
||||
"documentation_pdf": f"{expectedName}-Documentation-PDF",
|
||||
}
|
||||
|
||||
actualPythonVersion = """${{ needs.Params_PythonVersions.outputs.python_version }}"""
|
||||
actualPythonJobs = json_loads("""${{ needs.Params_PythonVersions.outputs.python_jobs }}""".replace("'", '"'))
|
||||
actualArtifactNames = json_loads("""${{ needs.Params_PythonVersions.outputs.artifact_names }}""".replace("'", '"'))
|
||||
errors = 0
|
||||
|
||||
if actualPythonVersion != expectedPythonVersion:
|
||||
print(f"'python_version' does not match: '{actualPythonVersion}' != '{expectedPythonVersion}'.")
|
||||
errors += 1
|
||||
if len(actualPythonJobs) != len(expectedJobs):
|
||||
print(f"Number of 'python_jobs' does not match: {len(actualPythonJobs)} != {len(expectedJobs)}.")
|
||||
print("Actual jobs:")
|
||||
for job in actualPythonJobs:
|
||||
if job['system'] == "msys2":
|
||||
print(f" {job['runtime'].lower()}:{job['python']}")
|
||||
else:
|
||||
print(f" {job['system']}:{job['python']}")
|
||||
print("Expected jobs:")
|
||||
for job in expectedJobs:
|
||||
print(f" {job}")
|
||||
errors += 1
|
||||
if len(actualArtifactNames) != len(expectedArtifacts):
|
||||
print(f"Number of 'artifact_names' does not match: {len(actualArtifactNames)} != {len(expectedArtifacts)}.")
|
||||
errors += 1
|
||||
else:
|
||||
for key, actual, expected in zipdicts(actualArtifactNames, expectedArtifacts):
|
||||
if actual != expected:
|
||||
print(f"Artifact name '{key}' does not match: {actual} != {expected}.")
|
||||
errors += 1
|
||||
|
||||
if errors == 0:
|
||||
print(f"All checks PASSED.")
|
||||
exit(errors)
|
||||
|
||||
# Params_Systems
|
||||
- name: Checking results from 'Params_Systems'
|
||||
run: |
|
||||
from json import loads as json_loads
|
||||
from sys import exit
|
||||
|
||||
from pyTooling.Common import zipdicts
|
||||
|
||||
expectedPythonVersion = "3.13"
|
||||
expectedPythons = ["3.9", "3.10", "3.11", "3.12", "3.13"]
|
||||
expectedSystems = ["windows"]
|
||||
expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons] + ["mingw32:3.12", "mingw64:3.11"]
|
||||
expectedName = "Example"
|
||||
expectedArtifacts = {
|
||||
"unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML",
|
||||
"unittesting_html": f"{expectedName}-UnitTestReportSummary-HTML",
|
||||
"perftesting_xml": f"{expectedName}-PerformanceTestReportSummary-XML",
|
||||
"benchtesting_xml": f"{expectedName}-BenchmarkTestReportSummary-XML",
|
||||
"apptesting_xml": f"{expectedName}-ApplicationTestReportSummary-XML",
|
||||
"codecoverage_sqlite": f"{expectedName}-CodeCoverage-SQLite",
|
||||
"codecoverage_xml": f"{expectedName}-CodeCoverage-XML",
|
||||
"codecoverage_json": f"{expectedName}-CodeCoverage-JSON",
|
||||
"codecoverage_html": f"{expectedName}-CodeCoverage-HTML",
|
||||
"statictyping_html": f"{expectedName}-StaticTyping-HTML",
|
||||
"package_all": f"{expectedName}-Packages",
|
||||
"documentation_html": f"{expectedName}-Documentation-HTML",
|
||||
"documentation_latex": f"{expectedName}-Documentation-LaTeX",
|
||||
"documentation_pdf": f"{expectedName}-Documentation-PDF",
|
||||
}
|
||||
|
||||
actualPythonVersion = """${{ needs.Params_Systems.outputs.python_version }}"""
|
||||
actualPythonJobs = json_loads("""${{ needs.Params_Systems.outputs.python_jobs }}""".replace("'", '"'))
|
||||
actualArtifactNames = json_loads("""${{ needs.Params_Systems.outputs.artifact_names }}""".replace("'", '"'))
|
||||
errors = 0
|
||||
|
||||
if actualPythonVersion != expectedPythonVersion:
|
||||
print(f"'python_version' does not match: '{actualPythonVersion}' != '{expectedPythonVersion}'.")
|
||||
errors += 1
|
||||
if len(actualPythonJobs) != len(expectedJobs):
|
||||
print(f"Number of 'python_jobs' does not match: {len(actualPythonJobs)} != {len(expectedJobs)}.")
|
||||
print("Actual jobs:")
|
||||
for job in actualPythonJobs:
|
||||
if job['system'] == "msys2":
|
||||
print(f" {job['runtime'].lower()}:{job['python']}")
|
||||
else:
|
||||
print(f" {job['system']}:{job['python']}")
|
||||
print("Expected jobs:")
|
||||
for job in expectedJobs:
|
||||
print(f" {job}")
|
||||
errors += 1
|
||||
if len(actualArtifactNames) != len(expectedArtifacts):
|
||||
print(f"Number of 'artifact_names' does not match: {len(actualArtifactNames)} != {len(expectedArtifacts)}.")
|
||||
errors += 1
|
||||
else:
|
||||
for key, actual, expected in zipdicts(actualArtifactNames, expectedArtifacts):
|
||||
if actual != expected:
|
||||
print(f"Artifact name '{key}' does not match: {actual} != {expected}.")
|
||||
errors += 1
|
||||
|
||||
if errors == 0:
|
||||
print(f"All checks PASSED.")
|
||||
exit(errors)
|
||||
|
||||
# Params_Include
|
||||
- name: Checking results from 'Params_Include'
|
||||
run: |
|
||||
from json import loads as json_loads
|
||||
from sys import exit
|
||||
|
||||
from pyTooling.Common import zipdicts
|
||||
|
||||
expectedPythonVersion = "3.13"
|
||||
expectedPythons = ["3.12"]
|
||||
expectedSystems = ["ubuntu", "windows", "macos", "macos-arm"]
|
||||
expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons] + ["ubuntu:3.11", "ubuntu:3.12"]
|
||||
expectedName = "Example"
|
||||
expectedArtifacts = {
|
||||
"unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML",
|
||||
"unittesting_html": f"{expectedName}-UnitTestReportSummary-HTML",
|
||||
"perftesting_xml": f"{expectedName}-PerformanceTestReportSummary-XML",
|
||||
"benchtesting_xml": f"{expectedName}-BenchmarkTestReportSummary-XML",
|
||||
"apptesting_xml": f"{expectedName}-ApplicationTestReportSummary-XML",
|
||||
"codecoverage_sqlite": f"{expectedName}-CodeCoverage-SQLite",
|
||||
"codecoverage_xml": f"{expectedName}-CodeCoverage-XML",
|
||||
"codecoverage_json": f"{expectedName}-CodeCoverage-JSON",
|
||||
"codecoverage_html": f"{expectedName}-CodeCoverage-HTML",
|
||||
"statictyping_html": f"{expectedName}-StaticTyping-HTML",
|
||||
"package_all": f"{expectedName}-Packages",
|
||||
"documentation_html": f"{expectedName}-Documentation-HTML",
|
||||
"documentation_latex": f"{expectedName}-Documentation-LaTeX",
|
||||
"documentation_pdf": f"{expectedName}-Documentation-PDF",
|
||||
}
|
||||
|
||||
actualPythonVersion = """${{ needs.Params_Include.outputs.python_version }}"""
|
||||
actualPythonJobs = json_loads("""${{ needs.Params_Include.outputs.python_jobs }}""".replace("'", '"'))
|
||||
actualArtifactNames = json_loads("""${{ needs.Params_Include.outputs.artifact_names }}""".replace("'", '"'))
|
||||
errors = 0
|
||||
|
||||
if actualPythonVersion != expectedPythonVersion:
|
||||
print(f"'python_version' does not match: '{actualPythonVersion}' != '{expectedPythonVersion}'.")
|
||||
errors += 1
|
||||
if len(actualPythonJobs) != len(expectedJobs):
|
||||
print(f"Number of 'python_jobs' does not match: {len(actualPythonJobs)} != {len(expectedJobs)}.")
|
||||
print("Actual jobs:")
|
||||
for job in actualPythonJobs:
|
||||
if job['system'] == "msys2":
|
||||
print(f" {job['runtime'].lower()}:{job['python']}")
|
||||
else:
|
||||
print(f" {job['system']}:{job['python']}")
|
||||
print("Expected jobs:")
|
||||
for job in expectedJobs:
|
||||
print(f" {job}")
|
||||
errors += 1
|
||||
if len(actualArtifactNames) != len(expectedArtifacts):
|
||||
print(f"Number of 'artifact_names' does not match: {len(actualArtifactNames)} != {len(expectedArtifacts)}.")
|
||||
errors += 1
|
||||
else:
|
||||
for key, actual, expected in zipdicts(actualArtifactNames, expectedArtifacts):
|
||||
if actual != expected:
|
||||
print(f"Artifact name '{key}' does not match: {actual} != {expected}.")
|
||||
errors += 1
|
||||
|
||||
if errors == 0:
|
||||
print(f"All checks PASSED.")
|
||||
exit(errors)
|
||||
|
||||
# Params_Exclude
|
||||
- name: Checking results from 'Params_Exclude'
|
||||
run: |
|
||||
from json import loads as json_loads
|
||||
from sys import exit
|
||||
|
||||
from pyTooling.Common import zipdicts
|
||||
|
||||
expectedPythonVersion = "3.13"
|
||||
expectedPythons = ["3.12"]
|
||||
expectedSystems = ["ubuntu", "macos", "macos-arm"]
|
||||
expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons]
|
||||
expectedName = "Example"
|
||||
expectedArtifacts = {
|
||||
"unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML",
|
||||
"unittesting_html": f"{expectedName}-UnitTestReportSummary-HTML",
|
||||
"perftesting_xml": f"{expectedName}-PerformanceTestReportSummary-XML",
|
||||
"benchtesting_xml": f"{expectedName}-BenchmarkTestReportSummary-XML",
|
||||
"apptesting_xml": f"{expectedName}-ApplicationTestReportSummary-XML",
|
||||
"codecoverage_sqlite": f"{expectedName}-CodeCoverage-SQLite",
|
||||
"codecoverage_xml": f"{expectedName}-CodeCoverage-XML",
|
||||
"codecoverage_json": f"{expectedName}-CodeCoverage-JSON",
|
||||
"codecoverage_html": f"{expectedName}-CodeCoverage-HTML",
|
||||
"statictyping_html": f"{expectedName}-StaticTyping-HTML",
|
||||
"package_all": f"{expectedName}-Packages",
|
||||
"documentation_html": f"{expectedName}-Documentation-HTML",
|
||||
"documentation_latex": f"{expectedName}-Documentation-LaTeX",
|
||||
"documentation_pdf": f"{expectedName}-Documentation-PDF",
|
||||
}
|
||||
|
||||
actualPythonVersion = """${{ needs.Params_Exclude.outputs.python_version }}"""
|
||||
actualPythonJobs = json_loads("""${{ needs.Params_Exclude.outputs.python_jobs }}""".replace("'", '"'))
|
||||
actualArtifactNames = json_loads("""${{ needs.Params_Exclude.outputs.artifact_names }}""".replace("'", '"'))
|
||||
errors = 0
|
||||
|
||||
if actualPythonVersion != expectedPythonVersion:
|
||||
print(f"'python_version' does not match: '{actualPythonVersion}' != '{expectedPythonVersion}'.")
|
||||
errors += 1
|
||||
if len(actualPythonJobs) != len(expectedJobs):
|
||||
print(f"Number of 'python_jobs' does not match: {len(actualPythonJobs)} != {len(expectedJobs)}.")
|
||||
print("Actual jobs:")
|
||||
for job in actualPythonJobs:
|
||||
if job['system'] == "msys2":
|
||||
print(f" {job['runtime'].lower()}:{job['python']}")
|
||||
else:
|
||||
print(f" {job['system']}:{job['python']}")
|
||||
print("Expected jobs:")
|
||||
for job in expectedJobs:
|
||||
print(f" {job}")
|
||||
errors += 1
|
||||
if len(actualArtifactNames) != len(expectedArtifacts):
|
||||
print(f"Number of 'artifact_names' does not match: {len(actualArtifactNames)} != {len(expectedArtifacts)}.")
|
||||
errors += 1
|
||||
else:
|
||||
for key, actual, expected in zipdicts(actualArtifactNames, expectedArtifacts):
|
||||
if actual != expected:
|
||||
print(f"Artifact name '{key}' does not match: {actual} != {expected}.")
|
||||
errors += 1
|
||||
|
||||
if errors == 0:
|
||||
print(f"All checks PASSED.")
|
||||
exit(errors)
|
||||
|
||||
# Params_Disable
|
||||
- name: Checking results from 'Params_Disable'
|
||||
run: |
|
||||
from json import loads as json_loads
|
||||
from sys import exit
|
||||
|
||||
from pyTooling.Common import zipdicts
|
||||
|
||||
expectedPythonVersion = "3.13"
|
||||
expectedPythons = ["3.12"]
|
||||
expectedSystems = ["ubuntu", "macos", "macos-arm"]
|
||||
expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons]
|
||||
expectedName = "Example"
|
||||
expectedArtifacts = {
|
||||
"unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML",
|
||||
"unittesting_html": f"{expectedName}-UnitTestReportSummary-HTML",
|
||||
"perftesting_xml": f"{expectedName}-PerformanceTestReportSummary-XML",
|
||||
"benchtesting_xml": f"{expectedName}-BenchmarkTestReportSummary-XML",
|
||||
"apptesting_xml": f"{expectedName}-ApplicationTestReportSummary-XML",
|
||||
"codecoverage_sqlite": f"{expectedName}-CodeCoverage-SQLite",
|
||||
"codecoverage_xml": f"{expectedName}-CodeCoverage-XML",
|
||||
"codecoverage_json": f"{expectedName}-CodeCoverage-JSON",
|
||||
"codecoverage_html": f"{expectedName}-CodeCoverage-HTML",
|
||||
"statictyping_html": f"{expectedName}-StaticTyping-HTML",
|
||||
"package_all": f"{expectedName}-Packages",
|
||||
"documentation_html": f"{expectedName}-Documentation-HTML",
|
||||
"documentation_latex": f"{expectedName}-Documentation-LaTeX",
|
||||
"documentation_pdf": f"{expectedName}-Documentation-PDF",
|
||||
}
|
||||
|
||||
actualPythonVersion = """${{ needs.Params_Exclude.outputs.python_version }}"""
|
||||
actualPythonJobs = json_loads("""${{ needs.Params_Exclude.outputs.python_jobs }}""".replace("'", '"'))
|
||||
actualArtifactNames = json_loads("""${{ needs.Params_Exclude.outputs.artifact_names }}""".replace("'", '"'))
|
||||
errors = 0
|
||||
|
||||
if actualPythonVersion != expectedPythonVersion:
|
||||
print(f"'python_version' does not match: '{actualPythonVersion}' != '{expectedPythonVersion}'.")
|
||||
errors += 1
|
||||
if len(actualPythonJobs) != len(expectedJobs):
|
||||
print(f"Number of 'python_jobs' does not match: {len(actualPythonJobs)} != {len(expectedJobs)}.")
|
||||
print("Actual jobs:")
|
||||
for job in actualPythonJobs:
|
||||
if job['system'] == "msys2":
|
||||
print(f" {job['runtime'].lower()}:{job['python']}")
|
||||
else:
|
||||
print(f" {job['system']}:{job['python']}")
|
||||
print("Expected jobs:")
|
||||
for job in expectedJobs:
|
||||
print(f" {job}")
|
||||
errors += 1
|
||||
if len(actualArtifactNames) != len(expectedArtifacts):
|
||||
print(f"Number of 'artifact_names' does not match: {len(actualArtifactNames)} != {len(expectedArtifacts)}.")
|
||||
errors += 1
|
||||
else:
|
||||
for key, actual, expected in zipdicts(actualArtifactNames, expectedArtifacts):
|
||||
if actual != expected:
|
||||
print(f"Artifact name '{key}' does not match: {actual} != {expected}.")
|
||||
errors += 1
|
||||
|
||||
if errors == 0:
|
||||
print(f"All checks PASSED.")
|
||||
exit(errors)
|
||||
|
||||
# Params_All
|
||||
- name: Checking results from 'Params_All'
|
||||
run: |
|
||||
from json import loads as json_loads
|
||||
from sys import exit
|
||||
|
||||
from pyTooling.Common import zipdicts
|
||||
|
||||
expectedPythonVersion = "3.13"
|
||||
expectedPythons = ["3.12", "3.13"]
|
||||
expectedSystems = ["ubuntu", "macos-arm", "windows"]
|
||||
expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons] + ["windows:3.10", "windows:3.11", "windows:3.13"]
|
||||
expectedName = "Example"
|
||||
expectedArtifacts = {
|
||||
"unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML",
|
||||
"unittesting_html": f"{expectedName}-UnitTestReportSummary-HTML",
|
||||
"perftesting_xml": f"{expectedName}-PerformanceTestReportSummary-XML",
|
||||
"benchtesting_xml": f"{expectedName}-BenchmarkTestReportSummary-XML",
|
||||
"apptesting_xml": f"{expectedName}-ApplicationTestReportSummary-XML",
|
||||
"codecoverage_sqlite": f"{expectedName}-CodeCoverage-SQLite",
|
||||
"codecoverage_xml": f"{expectedName}-CodeCoverage-XML",
|
||||
"codecoverage_json": f"{expectedName}-CodeCoverage-JSON",
|
||||
"codecoverage_html": f"{expectedName}-CodeCoverage-HTML",
|
||||
"statictyping_html": f"{expectedName}-StaticTyping-HTML",
|
||||
"package_all": f"{expectedName}-Packages",
|
||||
"documentation_html": f"{expectedName}-Documentation-HTML",
|
||||
"documentation_latex": f"{expectedName}-Documentation-LaTeX",
|
||||
"documentation_pdf": f"{expectedName}-Documentation-PDF",
|
||||
}
|
||||
|
||||
actualPythonVersion = """${{ needs.Params_All.outputs.python_version }}"""
|
||||
actualPythonJobs = json_loads("""${{ needs.Params_All.outputs.python_jobs }}""".replace("'", '"'))
|
||||
actualArtifactNames = json_loads("""${{ needs.Params_All.outputs.artifact_names }}""".replace("'", '"'))
|
||||
errors = 0
|
||||
|
||||
if actualPythonVersion != expectedPythonVersion:
|
||||
print(f"'python_version' does not match: '{actualPythonVersion}' != '{expectedPythonVersion}'.")
|
||||
errors += 1
|
||||
if len(actualPythonJobs) != len(expectedJobs):
|
||||
print(f"Number of 'python_jobs' does not match: {len(actualPythonJobs)} != {len(expectedJobs)}.")
|
||||
print("Actual jobs:")
|
||||
for job in actualPythonJobs:
|
||||
if job['system'] == "msys2":
|
||||
print(f" {job['runtime'].lower()}:{job['python']}")
|
||||
else:
|
||||
print(f" {job['system']}:{job['python']}")
|
||||
print("Expected jobs:")
|
||||
for job in expectedJobs:
|
||||
print(f" {job}")
|
||||
errors += 1
|
||||
if len(actualArtifactNames) != len(expectedArtifacts):
|
||||
print(f"Number of 'artifact_names' does not match: {len(actualArtifactNames)} != {len(expectedArtifacts)}.")
|
||||
errors += 1
|
||||
else:
|
||||
for key, actual, expected in zipdicts(actualArtifactNames, expectedArtifacts):
|
||||
if actual != expected:
|
||||
print(f"Artifact name '{key}' does not match: {actual} != {expected}.")
|
||||
errors += 1
|
||||
|
||||
if errors == 0:
|
||||
print(f"All checks PASSED.")
|
||||
exit(errors)
|
||||
14
.github/workflows/_Checking_SimplePackage_Pipeline.yml
vendored
Normal file
14
.github/workflows/_Checking_SimplePackage_Pipeline.yml
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
name: Verification of Pipeline Templates (Simple Package)
|
||||
|
||||
on:
|
||||
push:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
SimplePackage:
|
||||
uses: pyTooling/Actions/.github/workflows/CompletePipeline.yml@main
|
||||
with:
|
||||
package_name: pyDummy
|
||||
secrets:
|
||||
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||
CODACY_PROJECT_TOKEN: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||
38
.gitignore
vendored
Normal file
38
.gitignore
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
# Python cache and object files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
|
||||
# Coverage.py
|
||||
.coverage
|
||||
.cov
|
||||
coverage.xml
|
||||
/report/coverage
|
||||
|
||||
# mypy
|
||||
/report/typing
|
||||
|
||||
# pytest
|
||||
/report/unit
|
||||
/tests/*.github
|
||||
|
||||
# setuptools
|
||||
/build/**/*.*
|
||||
/dist/**/*.*
|
||||
/*.egg-info
|
||||
|
||||
# Dependencies
|
||||
!requirements.txt
|
||||
|
||||
# Sphinx
|
||||
doc/_build/
|
||||
doc/pyDummy/**/*.*
|
||||
!doc/pyDummy/index.rst
|
||||
|
||||
# BuildTheDocs
|
||||
doc/_theme/**/*.*
|
||||
|
||||
# PyCharm project files
|
||||
/.idea/workspace.xml
|
||||
|
||||
# Git files
|
||||
!.git*
|
||||
8
.idea/Actions.iml
generated
Normal file
8
.idea/Actions.iml
generated
Normal file
@@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module type="PYTHON_MODULE" version="4">
|
||||
<component name="NewModuleRootManager">
|
||||
<content url="file://$MODULE_DIR$" />
|
||||
<orderEntry type="jdk" jdkName="Python 3.13" jdkType="Python SDK" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
</component>
|
||||
</module>
|
||||
6
.idea/inspectionProfiles/profiles_settings.xml
generated
Normal file
6
.idea/inspectionProfiles/profiles_settings.xml
generated
Normal file
@@ -0,0 +1,6 @@
|
||||
<component name="InspectionProjectProfileManager">
|
||||
<settings>
|
||||
<option name="USE_PROJECT_PROFILE" value="false" />
|
||||
<version value="1.0" />
|
||||
</settings>
|
||||
</component>
|
||||
8
.idea/modules.xml
generated
Normal file
8
.idea/modules.xml
generated
Normal file
@@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ProjectModuleManager">
|
||||
<modules>
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/Actions.iml" filepath="$PROJECT_DIR$/.idea/Actions.iml" />
|
||||
</modules>
|
||||
</component>
|
||||
</project>
|
||||
3
.vscode/settings.json
vendored
Normal file
3
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"files.trimTrailingWhitespace": false,
|
||||
}
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 74 KiB |
@@ -1,130 +0,0 @@
|
||||
name: Unit Testing, Coverage Collection, Package, Release, Documentation and Publish
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
|
||||
# This job is a workaround for global variables
|
||||
# See https://github.com/actions/runner/issues/480
|
||||
Params:
|
||||
uses: pyTooling/Actions/.github/workflows/Params.yml@main
|
||||
with:
|
||||
name: ToolName
|
||||
# Optional
|
||||
python_version: '3.10'
|
||||
python_version_list: '3.8 3.9 3.10'
|
||||
|
||||
UnitTesting:
|
||||
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@main
|
||||
needs:
|
||||
- Params
|
||||
with:
|
||||
jobs: ${{ needs.Params.outputs.python_jobs }}
|
||||
# Optional
|
||||
requirements: '-r tests/requirements.txt'
|
||||
artifact: ${{ fromJson(needs.Params.outputs.params).artifacts.unittesting }}
|
||||
|
||||
Coverage:
|
||||
uses: pyTooling/Actions/.github/workflows/CoverageCollection.yml@main
|
||||
needs:
|
||||
- Params
|
||||
with:
|
||||
artifact: ${{ fromJson(needs.Params.outputs.params).artifacts.coverage }}
|
||||
# Optional
|
||||
python_version: ${{ fromJson(needs.Params.outputs.params).python_version }}
|
||||
requirements: '-r tests/requirements.txt'
|
||||
secrets:
|
||||
codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||
|
||||
StaticTypeCheck:
|
||||
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@main
|
||||
needs:
|
||||
- Params
|
||||
with:
|
||||
commands: mypy --html-report htmlmypy -p ToolName
|
||||
artifact: ${{ fromJson(needs.Params.outputs.params).artifacts.typing }}
|
||||
# Optional
|
||||
python_version: ${{ fromJson(needs.Params.outputs.params).python_version }}
|
||||
requirements: '-r tests/requirements.txt'
|
||||
report: 'htmlmypy'
|
||||
|
||||
Release:
|
||||
uses: pyTooling/Actions/.github/workflows/Release.yml@main
|
||||
if: startsWith(github.ref, 'refs/tags')
|
||||
needs:
|
||||
- UnitTesting
|
||||
- Coverage
|
||||
- StaticTypeCheck
|
||||
|
||||
Package:
|
||||
uses: pyTooling/Actions/.github/workflows/Package.yml@main
|
||||
if: startsWith(github.ref, 'refs/tags')
|
||||
needs:
|
||||
- Params
|
||||
- Coverage
|
||||
with:
|
||||
artifact: ${{ fromJson(needs.Params.outputs.params).artifacts.package }}
|
||||
# Optional
|
||||
python_version: ${{ fromJson(needs.Params.outputs.params).python_version }}
|
||||
requirements: 'wheel'
|
||||
|
||||
PublishOnPyPI:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@main
|
||||
if: startsWith(github.ref, 'refs/tags')
|
||||
needs:
|
||||
- Params
|
||||
- Release
|
||||
- Package
|
||||
with:
|
||||
artifact: ${{ fromJson(needs.Params.outputs.params).artifacts.package }}
|
||||
# Optional
|
||||
python_version: ${{ fromJson(needs.Params.outputs.params).python_version }}
|
||||
requirements: 'wheel twine'
|
||||
secrets:
|
||||
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||
|
||||
VerifyDocs:
|
||||
uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@main
|
||||
needs:
|
||||
- Params
|
||||
with:
|
||||
# Optional
|
||||
python_version: ${{ fromJson(needs.Params.outputs.params).python_version }}
|
||||
|
||||
BuildTheDocs:
|
||||
uses: pyTooling/Actions/.github/workflows/BuildTheDocs.yml@main
|
||||
needs:
|
||||
- Params
|
||||
- VerifyDocs
|
||||
with:
|
||||
artifact: ${{ fromJson(needs.Params.outputs.params).artifacts.doc }}
|
||||
|
||||
PublishToGitHubPages:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@main
|
||||
needs:
|
||||
- Params
|
||||
- BuildTheDocs
|
||||
- Coverage
|
||||
- StaticTypeCheck
|
||||
with:
|
||||
doc: ${{ fromJson(needs.Params.outputs.params).artifacts.doc }}
|
||||
# Optional
|
||||
coverage: ${{ fromJson(needs.Params.outputs.params).artifacts.coverage }}
|
||||
typing: ${{ fromJson(needs.Params.outputs.params).artifacts.typing }}
|
||||
|
||||
ArtifactCleanUp:
|
||||
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@main
|
||||
needs:
|
||||
- Params
|
||||
- Coverage
|
||||
- StaticTypeCheck
|
||||
- BuildTheDocs
|
||||
- PublishToGitHubPages
|
||||
with:
|
||||
package: ${{ fromJson(needs.Params.outputs.params).artifacts.package }}
|
||||
remaining: |
|
||||
${{ fromJson(needs.Params.outputs.params).artifacts.unittesting }}-*
|
||||
${{ fromJson(needs.Params.outputs.params).artifacts.coverage }}
|
||||
${{ fromJson(needs.Params.outputs.params).artifacts.typing }}
|
||||
${{ fromJson(needs.Params.outputs.params).artifacts.doc }}
|
||||
82
LICENSE.md
Normal file
82
LICENSE.md
Normal file
@@ -0,0 +1,82 @@
|
||||
This is a local copy of the Apache License Version 2.0.
|
||||
The original can be obtained here: [http://www.apache.org/licenses/LICENSE-2.0](http://www.apache.org/licenses/LICENSE-2.0)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
# Apache License
|
||||
|
||||
Version 2.0, January 2004
|
||||
|
||||
## TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
### 1. Definitions.
|
||||
|
||||
*"License"* shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
*"Licensor"* shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
|
||||
|
||||
*"Legal Entity"* shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
*"You"* (or *"Your"*) shall mean an individual or Legal Entity exercising permissions granted by this License.
|
||||
|
||||
*"Source"* form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
|
||||
|
||||
*"Object"* form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
|
||||
|
||||
*"Work"* shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
|
||||
|
||||
*"Derivative Works"* shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
|
||||
|
||||
*"Contribution"* shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, *"submitted"* means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as *"Not a Contribution."*
|
||||
|
||||
*"Contributor"* shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
|
||||
|
||||
### 2. Grant of Copyright License.
|
||||
Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
|
||||
|
||||
### 3. Grant of Patent License.
|
||||
Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
|
||||
|
||||
### 4. Redistribution.
|
||||
You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
|
||||
|
||||
- You must give any other recipients of the Work or Derivative Works a copy of this License; and
|
||||
- You must cause any modified files to carry prominent notices stating that You changed the files; and
|
||||
- You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
|
||||
- If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
|
||||
|
||||
### 5. Submission of Contributions.
|
||||
Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
|
||||
|
||||
### 6. Trademarks.
|
||||
This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
### 7. Disclaimer of Warranty.
|
||||
Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
|
||||
|
||||
### 8. Limitation of Liability.
|
||||
In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
|
||||
|
||||
### 9. Accepting Warranty or Additional Liability.
|
||||
While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
|
||||
|
||||
|
||||
## Appendix: How to apply the Apache License to your work
|
||||
|
||||
To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
204
README.md
204
README.md
@@ -7,139 +7,104 @@ language for writing reusable CI code.
|
||||
However, Python being equally popular and capable, usage of JS/TS might be bypassed, with some caveats.
|
||||
This repository gathers reusable CI tooling for testing, packaging and distributing Python projects and documentation.
|
||||
|
||||
## Context
|
||||
See [GitHub Actions and GitHub Reusable Workflows](https://pytooling.github.io/Actions/Background.html) for more
|
||||
background information.
|
||||
|
||||
GitHub Actions supports four types of reusable code:
|
||||
## Reusable Actions
|
||||
|
||||
- JavaScript Action.
|
||||
- [docs.github.com: actions/creating-actions/creating-a-javascript-action](https://docs.github.com/en/actions/creating-actions/creating-a-javascript-action)
|
||||
- Container Action.
|
||||
- [docs.github.com: actions/creating-actions/creating-a-docker-container-action](https://docs.github.com/en/actions/creating-actions/creating-a-docker-container-action)
|
||||
- Composite Action.
|
||||
- [docs.github.com: actions/creating-actions/creating-a-composite-action](https://docs.github.com/en/actions/creating-actions/creating-a-composite-action)
|
||||
- [github.blog/changelog: 2020-08-07-github-actions-composite-run-steps](https://github.blog/changelog/2020-08-07-github-actions-composite-run-steps/)
|
||||
- [github.blog/changelog: 2021-08-25-github-actions-reduce-duplication-with-action-compositio](https://github.blog/changelog/2021-08-25-github-actions-reduce-duplication-with-action-composition/)
|
||||
- Reusable Workflows.
|
||||
- [docs.github.com: actions/learn-github-actions/reusing-workflows](https://docs.github.com/en/actions/learn-github-actions/reusing-workflows)
|
||||
- [github.blog/changelog: 2021-10-05-github-actions-dry-your-github-actions-configuration-by-reusing-workflows](https://github.blog/changelog/2021-10-05-github-actions-dry-your-github-actions-configuration-by-reusing-workflows/)
|
||||
- **Artifacts:**
|
||||
[**pyTooling/upload-artifact**](https://github.com/pyTooling/upload-artifact): The upload-artifact action will
|
||||
preserve file attributes like permissions.
|
||||
|
||||
Leaving JavaScript and Container Actions aside, the main differences between Composite Actions and Reusable Workflows
|
||||
are the following:
|
||||
[**pyTooling/download-artifact**](https://github.com/pyTooling/download-artifact): The download-artifact action will
|
||||
preserve file attributes like permissions.
|
||||
|
||||
- Composite Actions can be executed from a remote/external path or from the checked out branch, and from any location.
|
||||
However, Reusable Workflows can only be used through a remote/external path (`{owner}/{repo}/{path}/{filename}@{ref}`),
|
||||
where `{path}` must be `.github/workflows`, and `@{ref}` is required.
|
||||
See [actions/runner#1493](https://github.com/actions/runner/issues/1493).
|
||||
As a result:
|
||||
- Local Composite Actions cannot be used without a prior repo checkout, but Reusable Workflows can be used without
|
||||
checkout.
|
||||
- Testing development versions of local Reusable Workflows is cumbersome, because PRs do not pick the modifications by
|
||||
default.
|
||||
- Composite Actions can include multiple steps, but not multiple jobs.
|
||||
Conversely, Reusable Workflows can include multiple jobs, and multiple steps in each job.
|
||||
- Composite Actions can include multiple files, so it's possible to use files from the Action or from the user's repository.
|
||||
Conversely, Reusable Workflows are a single YAML file, with no additional files retrieved by default.
|
||||
## Predefined Docker Images
|
||||
|
||||
### Callable vs dispatchable workflows
|
||||
- **Documentation:**
|
||||
[**MikTeX**](https://github.com/pyTooling/MikTeX): A predefined MikTeX image based on Debian Bookworm + Python 3.13
|
||||
with specific tools for documentation generation using e.g. Sphinx and related extensions.
|
||||
|
||||
Reusable Workflows are defined through the `workflow_call` event kind.
|
||||
Similarly, any "regular" Workflow can be triggered through a `workflow_dispatch` event.
|
||||
Both event kinds support `input` options, which are usable within the Workflow.
|
||||
Therefore, one might intuitively try to write a workflow which is both callable and dispatchable.
|
||||
In other words, which can be either reused from another workflow, or triggered through the API.
|
||||
Unfortunately, that is not the case.
|
||||
Although `input` options can be duplicated for both events, GitHub's backend exposes them through different objects.
|
||||
In dispatchable Workflows, the object is `${{ github.event.inputs }}`, while callable workflows receive `${{ inputs }}`.
|
||||
## Reusable Workflows
|
||||
|
||||
As a result, in order to make a reusable workflow dispatchable, a wrapper workflow is required.
|
||||
See, for instance, [hdl/containers: .github/workflows/common.yml](https://github.com/hdl/containers/blob/main/.github/workflows/common.yml) and [hdl/containers: .github/workflows/dispatch.yml](https://github.com/hdl/containers/blob/main/.github/workflows/dispatch.yml).
|
||||
Alternatively, a normalisation job might be used, similar to the `Params` in this repo.
|
||||
This repository provides 10+ *Reusable Workflows* based on the CI pipelines of the repos in this GitHub organisation,
|
||||
[EDA²](https://github.com/edaa-org), [VHDL](https://github.com/vhdl), and others. By combining them, Python packages can
|
||||
be continuously tested and released along with Sphinx documentation sites, to GitHub Releases, GitHub Pages and PyPI.
|
||||
Optionally, coverage and static type check reports can be gathered and integrated into the online documentation.
|
||||
|
||||
### Call hierarchy
|
||||
[](doc/_static/pyTooling-Actions-SimplePackage.png)
|
||||
|
||||
Reusable Workflows cannot call other Reusable Workflows, however, they can use Composite Actions and Composite Actions
|
||||
can call other Actions.
|
||||
Therefore, in some use cases it is sensible to combine one layer of reusable workflows for orchestrating the jobs, along
|
||||
with multiple layers of composite actions.
|
||||
As shown in the screenshots above, the expected order is:
|
||||
|
||||
### Script with post step
|
||||
- **Global:**
|
||||
[**Parameters**](.github/workflows/Parameters.yml): It generates output parameters with artifact names and job matrices
|
||||
to be used in later running jobs.
|
||||
It's a workaround for the limitations to handle global variables in GitHub Actions workflows (see
|
||||
[actions/runner#480](https://github.com/actions/runner/issues/480)).
|
||||
|
||||
[**ExtractConfiguration**](.github/workflows/ExtractConfiguration.yml): extracts configuration values from
|
||||
`pyproject.toml` and exposes configured paths and filenames as job output parameters.
|
||||
- **Predefined pipelines:**
|
||||
[**CompletePipeline**](.github/workflows/CompletePipeline.yml): is a predefined pipeline for typical Python projects
|
||||
using all predefined job templates of pyTooling at once: (unit testing, code coverage, static typing, documentation
|
||||
report generation and publishing, packaging, releasing, ...)
|
||||
- **Code testing/analysis:**
|
||||
[**ApplicationTesting**](.github/workflows/ApplicationTesting.yml): like UnitTesting, but running tests using an
|
||||
installed Python package.
|
||||
|
||||
[**UnitTesting**](.github/workflows/UnitTesting.yml): run unit test with `pytest` using multiple versions of Python, and
|
||||
optionally upload results as XML reports. Configuration options to `pytest` should be given via section
|
||||
`[tool.pytest.ini_options]` in a `pyproject.toml` file.
|
||||
Besides test results, also code coverage data (incl. branch coverage) can be collected using
|
||||
`pytest`/`pytest-cov`/`coverage.py`. Configuration options to `coverage.py` should be given via section
|
||||
`[tool.coverage.*]` in a `pyproject.toml` file.
|
||||
While multiple report formats can be created in the job, it's recommended to use `PublishTestResults` and/or
|
||||
`PublishCoverageResults` to merge results from matrix runs and then generate final reports as XML, JSON or HTML.
|
||||
Finally, reports can be published to GitHub Pages or cloud services like Codecov and Codacy.
|
||||
|
||||
[**StaticTypeCheck**](.github/workflows/StaticTypeCheck.yml): collect static type check result with `mypy`, and
|
||||
optionally upload results as an HTML report.
|
||||
|
||||
[**VerifyDocs**](.github/workflows/VerifyDocs.yml): extract code examples from the README and test these code snippets.
|
||||
- **Packaging and releasing:**
|
||||
[**Package**](.github/workflows/Package.yml): generate source and wheel packages, and upload them as an artifact.
|
||||
|
||||
JavaScript Actions support defining `pre`, `pre-if`, `post` and `post-if` steps, which allow executing steps at the
|
||||
beginning or the end of a job, regardless of intermediate steps failing.
|
||||
Unfortunately, those are not available for any other Action type.
|
||||
[**PublishOnPyPI**](.github/workflows/PublishOnPyPI.yml): publish source and wheel packages to PyPI.
|
||||
|
||||
Action [with-post-step](with-post-step) is a generic JS Action to execute a main command and to set a command as a post
|
||||
step.
|
||||
It allows using the `post` feature with scripts written in bash, python or any other interpreted language available on
|
||||
the environment.
|
||||
See: [actions/runner#1478](https://github.com/actions/runner/issues/1478).
|
||||
[**PublishTestResults**](.github/workflows/PublishTestResults.yml): publish unit test results through GH action `dorny/test-reporter`.
|
||||
|
||||
## Reusable workflows
|
||||
[**PublishCoverageResults**](.github/workflows/PublishCoverageResults.yml): publish ucode coverage results.
|
||||
|
||||
This repository provides 10+ Reusable Workflows based on the CI pipelines of the repos in this organisation,
|
||||
[EDA²](https://github.com/edaa-org), [VHDL](https://github.com/vhdl), and others.
|
||||
By combining them, Python packages can be continuously tested and released along with Sphinx documentation sites, to GitHub Releases, GitHub Pages and PyPI.
|
||||
Optionally, coverage and static type check reports can be gathered.
|
||||
[**NightlyRelease**](.github/workflows/NightlyRelease.yml): publish GitHub Release.
|
||||
|
||||
[](ExamplePipeline.png)
|
||||
[**Release**](.github/workflows/Release.yml): publish GitHub Release.
|
||||
- **Documentation:**
|
||||
[**SphinxDocumentation**](.github/workflows/PublishCoverageResults.yml): create HTML and LaTeX documentation using
|
||||
Sphinx.
|
||||
|
||||
[**LaTeXDocumentation**](.github/workflows/LaTeXDocumentation.yml): compile LaTeX documentation to a PDF file using
|
||||
MikTeX.
|
||||
|
||||
[**PublishToGitHubPages**](.github/workflows/PublishToGitHubPages.yml): publish HTML documentation to GitHub Pages.
|
||||
- **Cleanup:**
|
||||
[**IntermediateCleanUp**](.github/workflows/IntermediateCleanUp.yml): delete intermediate artifacts.
|
||||
|
||||
[**ArtifactCleanUp**](.github/workflows/ArtifactCleanUp.yml): delete artifacts.
|
||||
- **⚠ Deprecated ⚠:**
|
||||
[**CoverageCollection**](.github/workflows/CoverageCollection.yml): Use `UnitTesting`, because is can collect code
|
||||
coverage too. This avoids code duplication in job templates.
|
||||
|
||||
[**BuildTheDocs**](.github/workflows/BuildTheDocs.yml): Use `SphinxDocumentation`, `LaTeXDocumentation` and
|
||||
`PublishToGitHubPages`. BuildTheDocs isn't maintained anymore.
|
||||
|
||||
As shown in the screenshot above, the expected order is:
|
||||
|
||||
- Global:
|
||||
- [Params](.github/workflows/Params.yml): a workaround for the limitations to handle global variables in
|
||||
GitHub Actions workflows (see [actions/runner#480](https://github.com/actions/runner/issues/480)).
|
||||
It generates outputs with artifact names and job matrices to be used in other jobs.
|
||||
- Code testing/analysis:
|
||||
- [UnitTesting](.github/workflows/UnitTesting.yml): run unit test with `pytest` using multiple versions of Python, and
|
||||
optionally upload results as XML reports.
|
||||
- [CoverageCollection](.github/workflows/CoverageCollection.yml): collect coverage data with `pytest` using a single
|
||||
version of Python, generate HTML and Cobertura (XML) reports, upload the HTML report as an artifact, and upload the
|
||||
results to Codecov and Codacy.
|
||||
- [StaticTypeCheck](.github/workflows/StaticTypeCheck.yml): collect static type check result with `mypy`, and
|
||||
optionally upload results as an HTML report.
|
||||
Example `commands`:
|
||||
|
||||
1. Regular package
|
||||
|
||||
```yml
|
||||
commands: mypy --html-report htmlmypy -p ToolName
|
||||
```
|
||||
|
||||
2. Parent namespace package
|
||||
|
||||
```yml
|
||||
commands: |
|
||||
touch Parent/__init__.py
|
||||
mypy --html-report htmlmypy -p ToolName
|
||||
```
|
||||
|
||||
3. Child namespace package
|
||||
|
||||
```yml
|
||||
commands: |
|
||||
cd Parent
|
||||
mypy --html-report ../htmlmypy -p ToolName
|
||||
```
|
||||
|
||||
- [VerifyDocs](.github/workflows/VerifyDocs.yml): extract code examples from the README and test.
|
||||
- Packaging and releasing:
|
||||
- [Release](.github/workflows/Release.yml): publish GitHub Release.
|
||||
- [Package](.github/workflows/Package.yml): generate source and wheel packages, and upload them as an artifact.
|
||||
- [PublishOnPyPI](.github/workflows/PublishOnPyPI.yml): publish source and wheel packages to PyPI.
|
||||
- Documentation:
|
||||
- [BuildTheDocs](.github/workflows/BuildTheDocs.yml): build Sphinx documentation with BuildTheDocs, and upload HTML as
|
||||
an artifact.
|
||||
- [PublishToGitHubPages](.github/workflows/PublishToGitHubPages.yml): publish HTML documentation to GitHub Pages.
|
||||
- Cleanup:
|
||||
- [ArtifactCleanUp](.github/workflows/ArtifactCleanUp.yml): delete artifacts.
|
||||
|
||||
### Example pipeline
|
||||
|
||||
[ExamplePipeline.yml](ExamplePipeline.yml) is an example Workflow which uses all of the Reusable Workflows.
|
||||
ExamplePipeline.yml is an example Workflow which uses all of the Reusable Workflows.
|
||||
Python package/tool developers can copy it into their repos, in order to use al the reusable workflows straightaway.
|
||||
Minimal required modifications are the following:
|
||||
|
||||
- Set the `name` input of job `Params`.
|
||||
- Set the `name` input of job `Parameters`.
|
||||
- Specify the `commands` input of job `StaticTypeCheck`.
|
||||
|
||||
Find further usage cases in the following list of projects:
|
||||
@@ -147,3 +112,20 @@ Find further usage cases in the following list of projects:
|
||||
- [edaa-org/pyEDAA.ProjectModel](https://github.com/edaa-org/pyEDAA.ProjectModel/tree/main/.github/workflows)
|
||||
- [edaa-org/pySVModel](https://github.com/edaa-org/pySVModel/tree/main/.github/workflows)
|
||||
- [VHDL/pyVHDLModel](https://github.com/VHDL/pyVHDLModel/tree/main/.github/workflows)
|
||||
|
||||
|
||||
## Contributors
|
||||
|
||||
* [Patrick Lehmann](https://GitHub.com/Paebbels)
|
||||
* [Unai Martinez-Corral](https://GitHub.com/umarcor) (Maintainer)
|
||||
* [and more...](https://GitHub.com/pyTooling/Actions/graphs/contributors)
|
||||
|
||||
|
||||
## License
|
||||
|
||||
This Python package (source code) licensed under [Apache License 2.0](LICENSE.md).
|
||||
The accompanying documentation is licensed under [Creative Commons - Attribution 4.0 (CC-BY 4.0)](doc/Doc-License.rst).
|
||||
|
||||
---
|
||||
|
||||
SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
2
dist/requirements.txt
vendored
Normal file
2
dist/requirements.txt
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
wheel ~= 0.45
|
||||
twine ~= 6.0
|
||||
195
doc/Action/Releaser.rst
Normal file
195
doc/Action/Releaser.rst
Normal file
@@ -0,0 +1,195 @@
|
||||
.. _ACTION/Releaser:
|
||||
|
||||
Releaser
|
||||
########
|
||||
|
||||
**Releaser** is a Docker GitHub Action written in Python.
|
||||
|
||||
**Releaser** allows to keep a GitHub Release of type pre-release and its artifacts up to date with latest builds.
|
||||
Combined with a workflow that is executed periodically, **Releaser** allows to provide a fixed release name for users
|
||||
willing to use daily/nightly artifacts of a project.
|
||||
|
||||
Furthermore, when any `semver <https://semver.org>`__ compliant tagged commit is pushed, **Releaser** can create a
|
||||
release and upload assets.
|
||||
|
||||
Context
|
||||
*******
|
||||
|
||||
GitHub provides official clients for the GitHub API through `github.com/octokit <https://github.com/octokit>`__:
|
||||
|
||||
- `octokit.js <https://github.com/octokit/octokit.js>`__ (`octokit.github.io/rest.js <https://octokit.github.io/rest.js>`__)
|
||||
- `octokit.rb <https://github.com/octokit/octokit.rb>`__ (`octokit.github.io/octokit.rb <http://octokit.github.io/octokit.rb>`__)
|
||||
- `octokit.net <https://github.com/octokit/octokit.net>`__ (`octokitnet.rtfd.io <https://octokitnet.rtfd.io>`__)
|
||||
|
||||
When GitHub Actions was released in 2019, two Actions were made available through
|
||||
`github.com/actions <https://github.com/actions>`__ for dealing with GitHub Releases:
|
||||
|
||||
- `actions/create-release <https://github.com/actions/create-release>`__
|
||||
- `actions/upload-release-asset <https://github.com/actions/upload-release-asset>`__
|
||||
|
||||
However, those Actions were contributed by an employee in spare time, not officially supported by GitHub.
|
||||
Therefore, they were unmaintained before GitHub Actions was out of the private beta
|
||||
(see `actions/upload-release-asset#58 <https://github.com/actions/upload-release-asset/issues/58>`__)
|
||||
and, a year later, archived.
|
||||
Those Actions are based on `actions/toolkit <https://github.com/actions/toolkit>`__'s hydrated version of octokit.js.
|
||||
|
||||
From a practical point of view, `actions/github-script <https://github.com/actions/github-script>`__ is the natural replacement to those Actions, since it allows to use a pre-authenticated *octokit.js* client along with the workflow run context.
|
||||
Still, it requires writing plain JavaScript.
|
||||
|
||||
Alternatively, there are non-official GitHub API libraries available in other languages (see `docs.github.com: rest/overview/libraries <https://docs.github.com/en/rest/overview/libraries>`__).
|
||||
**Releaser** is based on `PyGithub/PyGithub <https://github.com/PyGithub/PyGithub>`__, a Python client for the GitHub API.
|
||||
|
||||
**Releaser** was originally created in `eine/tip <https://github.com/eine/tip>`__, as an enhanced alternative to using
|
||||
``actions/create-release`` and ``actions/upload-release-asset``, in order to cover certain use cases that were being
|
||||
migrated from Travis CI to GitHub Actions.
|
||||
The main limitation of GitHub's Actions was/is verbosity and not being possible to dynamically define the list of assets
|
||||
to be uploaded.
|
||||
|
||||
On the other hand, GitHub Actions artifacts do require login in order to download them.
|
||||
Conversely, assets of GitHub Releases can be downloaded without login.
|
||||
Therefore, in order to make CI results available to the widest audience, some projects prefer having tarballs available
|
||||
as assets.
|
||||
In this context, one of the main use cases of **Releaser** is pushing artifacts as release assets.
|
||||
Thus, the name of the Action.
|
||||
|
||||
GitHub provides an official CLI tool, written in golang: `cli/cli <https://github.com/cli/cli>`__.
|
||||
When the Python version of **Releaser** was written, ``cli`` was evaluated as an alternative to *PyGitHub*.
|
||||
``gh release`` was (and still is) not flexible enough to update the reference of a release, without deleting and
|
||||
recreating it (see `cli.github.com: manual/gh_release_create <https://cli.github.com/manual/gh_release_create>`__).
|
||||
Deletion and recreation is unfortunate, because it notifies all the watchers of a repository
|
||||
(see `eine/tip#111 <https://github.com/eine/tip/issues/111>`__).
|
||||
However, `cli.github.com: manual/gh_release_upload <https://cli.github.com/manual/gh_release_upload>`__ handles uploading
|
||||
artifacts as assets faster and with better stability for larger files than *PyGitHub*
|
||||
(see `msys2/msys2-installer#36 <https://github.com/msys2/msys2-installer/pull/36>`__).
|
||||
Furthermore, the GitHub CLI is installed on GitHub Actions' default virtual environments.
|
||||
Although ``gh`` does not support login through SSH (see `cli/cli#3715 <https://github.com/cli/cli/issues/3715>`__), on GitHub
|
||||
Actions a token is available ``${{ github.token }}``.
|
||||
Therefore, **Releaser** uses ``gh release upload`` internally.
|
||||
|
||||
Usage
|
||||
*****
|
||||
|
||||
The following block shows a minimal YAML workflow file:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
name: 'workflow'
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 0 * * 5'
|
||||
|
||||
jobs:
|
||||
mwe:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
|
||||
# Clone repository
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
# Build your application, tool, artifacts, etc.
|
||||
- name: Build
|
||||
run: |
|
||||
echo "Build some tool and generate some artifacts" > artifact.txt
|
||||
|
||||
# Update tag and pre-release
|
||||
# - Update (force-push) tag to the commit that is used in the workflow.
|
||||
# - Upload artifacts defined by the user.
|
||||
- uses: pyTooling/Actions/releaser@r0
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
files: |
|
||||
artifact.txt
|
||||
README.md
|
||||
|
||||
|
||||
Composite Action
|
||||
================
|
||||
|
||||
The default implementation of **Releaser** is a Container Action.
|
||||
Therefore, a pre-built container image is pulled before starting the job.
|
||||
Alternatively, a Composite Action version is available: ``uses: pyTooling/Actions/releaser/composite@main``.
|
||||
The Composite version installs the dependencies on the host (the runner environment), instead of using a container.
|
||||
Both implementations are functionally equivalent from **Releaser**'s point of view; however, the Composite Action allows
|
||||
users to tweak the version of Python by using `actions/setup-python <https://github.com/actions/setup-python>`__ before.
|
||||
|
||||
Options
|
||||
*******
|
||||
|
||||
All options can be optionally provided as environment variables: ``INPUT_TOKEN``, ``INPUT_FILES``, ``INPUT_TAG``, ``INPUT_RM``
|
||||
and/or ``INPUT_SNAPSHOTS``.
|
||||
|
||||
token (required)
|
||||
================
|
||||
|
||||
Token to make authenticated API calls; can be passed in using ``{{ secrets.GITHUB_TOKEN }}``.
|
||||
|
||||
files (required)
|
||||
================
|
||||
|
||||
Either a single filename/pattern or a multi-line list can be provided. All the artifacts are uploaded regardless of the
|
||||
hierarchy.
|
||||
|
||||
For creating/updating a release without uploading assets, set ``files: none``.
|
||||
|
||||
tag
|
||||
===
|
||||
|
||||
The default tag name for the tip/nightly pre-release is ``tip``, but it can be optionally overriden through option ``tag``.
|
||||
|
||||
rm
|
||||
==
|
||||
|
||||
Set option ``rm`` to ``true`` for systematically removing previous artifacts (e.g. old versions).
|
||||
Otherwise (by default), all previours artifacts are preserved or overwritten.
|
||||
|
||||
Note:
|
||||
If all the assets are removed, or if the release itself is removed, tip/nightly assets won't be available for
|
||||
users until the workflow is successfully run.
|
||||
For instance, Action `setup-ghdl-ci <https://github.com/ghdl/setup-ghdl-ci>`__ uses assets from `ghdl/ghdl: releases/tag/nightly <https://github.com/ghdl/ghdl/releases/tag/nightly>`__.
|
||||
Hence, it is recommended to try removing the conflictive assets only, in order to maximise the availability.
|
||||
|
||||
snapshots
|
||||
=========
|
||||
|
||||
Whether to create releases from any tag or to treat some as snapshots.
|
||||
By default, all the tags with non-empty ``prerelease`` field (see `semver.org: Is there a suggested regular expression (RegEx) to check a SemVer string? <https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string>`__)
|
||||
are considered snapshots; neither a release is created nor assets are uploaded.
|
||||
|
||||
Advanced/complex use cases
|
||||
**************************
|
||||
|
||||
**Releaser** is essentially a very thin wrapper to use the GitHub Actions context data along with the classes
|
||||
and methods of PyGithub.
|
||||
|
||||
Similarly to `actions/github-script <https://github.com/actions/github-script>`__, users with advanced/complex requirements
|
||||
might find it desirable to write their own Python script, instead of using **Releaser**.
|
||||
In fact, since ``shell: python`` is supported in GitHub Actions, using Python does *not* require any Action.
|
||||
For prototyping purposes, the following job might be useful:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
Release:
|
||||
name: '📦 Release'
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- ...
|
||||
if: github.event_name != 'pull_request' && (github.ref == 'refs/heads/master' || contains(github.ref, 'refs/tags/'>`__)
|
||||
steps:
|
||||
|
||||
- uses: actions/download-artifact@v3
|
||||
|
||||
- shell: bash
|
||||
run: pip install PyGithub --progress-bar off
|
||||
|
||||
- name: Set list of files for uploading
|
||||
id: files
|
||||
shell: python
|
||||
run: |
|
||||
from github import Github
|
||||
print("· Get GitHub API handler (authenticate)")
|
||||
gh = Github('${{ github.token }}')
|
||||
print("· Get Repository handler")
|
||||
gh_repo = gh.get_repo('${{ github.repository }}')
|
||||
|
||||
Find a non-trivial use case at `msys2/msys2-autobuild <https://github.com/msys2/msys2-autobuild>`__.
|
||||
33
doc/Action/With-post-step.rst
Normal file
33
doc/Action/With-post-step.rst
Normal file
@@ -0,0 +1,33 @@
|
||||
.. _ACTION/WithPostStep:
|
||||
|
||||
with-post-step
|
||||
##############
|
||||
|
||||
JavaScript Actions support defining ``pre``, ``pre-if``, ``post`` and ``post-if`` steps, which allow executing steps at
|
||||
the beginning or the end of a job, regardless of intermediate steps failing. Unfortunately, those are not available for
|
||||
any other Action type.
|
||||
|
||||
Action **with-post-step** is a generic JavaScript Action to execute a main command and to set a further command as a
|
||||
post step. It allows using the ``post`` feature with scripts written in Bash, Python or any other interpreted language
|
||||
available on the environment.
|
||||
|
||||
**Example Usage:**
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
Image:
|
||||
steps:
|
||||
- ...
|
||||
|
||||
- name: Push container image
|
||||
uses: ./with-post-step
|
||||
with:
|
||||
main: |
|
||||
echo '${{ github.token }}' | docker login ghcr.io -u GitHub-Actions --password-stdin
|
||||
docker push ghcr.io/pytooling/releaser
|
||||
post: docker logout ghcr.io
|
||||
|
||||
.. seealso::
|
||||
|
||||
* `actions/runner#1478 <https://github.com/actions/runner/issues/1478>`__.
|
||||
7
doc/Action/index.rst
Normal file
7
doc/Action/index.rst
Normal file
@@ -0,0 +1,7 @@
|
||||
Overview
|
||||
########
|
||||
|
||||
The following 2 actions are provided by **Actions**:
|
||||
|
||||
* :ref:`ACTION/Releaser`
|
||||
* :ref:`ACTION/WithPostStep`
|
||||
87
doc/Background.rst
Normal file
87
doc/Background.rst
Normal file
@@ -0,0 +1,87 @@
|
||||
Background
|
||||
##########
|
||||
|
||||
GitHub Actions supports five procedures to reuse code:
|
||||
|
||||
- JavaScript Action:
|
||||
|
||||
- `docs.github.com: actions/creating-actions/creating-a-javascript-action <https://docs.github.com/en/actions/creating-actions/creating-a-javascript-action>`__
|
||||
|
||||
- Container Action:
|
||||
|
||||
- `docs.github.com: actions/creating-actions/creating-a-docker-container-action <https://docs.github.com/en/actions/creating-actions/creating-a-docker-container-action>`__
|
||||
|
||||
- Container Step:
|
||||
|
||||
- `docs.github.com: actions/learn-github-actions/workflow-syntax-for-github-actions#example-using-a-docker-public-registry-action <https://docs.github.com/en/actions/learn-github-actions/workflow-syntax-for-github-actions#example-using-a-docker-public-registry-action>`__
|
||||
- `docs.github.com: actions/learn-github-actions/workflow-syntax-for-github-actions#jobsjob_idstepswithargs <https://docs.github.com/en/actions/learn-github-actions/workflow-syntax-for-github-actions#jobsjob_idstepswithargs>`__
|
||||
|
||||
- Composite Action:
|
||||
|
||||
- `docs.github.com: actions/creating-actions/creating-a-composite-action <https://docs.github.com/en/actions/creating-actions/creating-a-composite-action>`__
|
||||
- `github.blog/changelog: 2020-08-07-github-actions-composite-run-steps <https://github.blog/changelog/2020-08-07-github-actions-composite-run-steps/>`__
|
||||
- `github.blog/changelog: 2021-08-25-github-actions-reduce-duplication-with-action-composition <https://github.blog/changelog/2021-08-25-github-actions-reduce-duplication-with-action-composition/>`__
|
||||
|
||||
- Reusable Workflow:
|
||||
|
||||
- `docs.github.com: actions/learn-github-actions/reusing-workflows <https://docs.github.com/en/actions/learn-github-actions/reusing-workflows>`__
|
||||
- `github.blog/changelog: 2021-10-05-github-actions-dry-your-github-actions-configuration-by-reusing-workflows <https://github.blog/changelog/2021-10-05-github-actions-dry-your-github-actions-configuration-by-reusing-workflows/>`__
|
||||
|
||||
Container Actions and Container Steps are almost equivalent: Actions use a configuration file (``action.yml``), while
|
||||
Steps do not.
|
||||
Leaving JavaScript and Container Actions and Steps aside, the main differences between Composite Actions and Reusable
|
||||
Workflows are the following:
|
||||
|
||||
- Composite Actions can be executed from a remote/external path or from the checked out branch, and from any location.
|
||||
However, Reusable Workflows can only be used through a remote/external path (``{owner}/{repo}/{path}/{filename}@{ref}``),
|
||||
where ``{path}`` must be ``.github/workflows``, and ``@{ref}`` is required.
|
||||
See `actions/runner#1493 <https://github.com/actions/runner/issues/1493>`__.
|
||||
As a result:
|
||||
|
||||
- Local Composite Actions cannot be used without a prior repo checkout, but Reusable Workflows can be used without
|
||||
checkout.
|
||||
- Testing development versions of local Reusable Workflows is cumbersome, because PRs do not pick the modifications by
|
||||
default.
|
||||
|
||||
- Composite Actions can include multiple steps, but not multiple jobs.
|
||||
Conversely, Reusable Workflows can include multiple jobs, and multiple steps in each job.
|
||||
- Composite Actions can include multiple files, so it's possible to use files from the Action or from the user's repository.
|
||||
Conversely, Reusable Workflows are a single YAML file, with no additional files retrieved by default.
|
||||
|
||||
Callable vs dispatchable workflows
|
||||
**********************************
|
||||
|
||||
Reusable Workflows are defined through the ``workflow_call`` event kind.
|
||||
Similarly, any "regular" Workflow can be triggered through a ``workflow_dispatch`` event.
|
||||
Both event kinds support ``input`` options, which are usable within the Workflow.
|
||||
Therefore, one might intuitively try to write a workflow which is both callable and dispatchable.
|
||||
In other words, which can be either reused from another workflow, or triggered through the API.
|
||||
Unfortunately, that is not the case.
|
||||
Although ``input`` options can be duplicated for both events, GitHub's backend exposes them through different objects.
|
||||
In dispatchable Workflows, the object is ``${{ github.event.inputs }}``, while callable workflows receive ``${{ inputs }}``.
|
||||
|
||||
As a result, in order to make a reusable workflow dispatchable, a wrapper workflow is required.
|
||||
See, for instance, `hdl/containers: .github/workflows/common.yml <https://github.com/hdl/containers/blob/main/.github/workflows/common.yml>`__
|
||||
and `hdl/containers: .github/workflows/dispatch.yml <https://github.com/hdl/containers/blob/main/.github/workflows/dispatch.yml>`__.
|
||||
Alternatively, a normalisation job might be used, similar to the ``Parameters`` in this repo.
|
||||
|
||||
Call hierarchy
|
||||
**************
|
||||
|
||||
Reusable Workflows cannot call other Reusable Workflows, however, they can use Composite Actions and Composite Actions
|
||||
can call other Actions.
|
||||
Therefore, in some use cases it is sensible to combine one layer of reusable workflows for orchestrating the jobs, along
|
||||
with multiple layers of composite actions.
|
||||
|
||||
Script with post step
|
||||
*********************
|
||||
|
||||
JavaScript Actions support defining ``pre``, ``pre-if``, ``post`` and ``post-if`` steps, which allow executing steps at
|
||||
the beginning or the end of a job, regardless of intermediate steps failing.
|
||||
Unfortunately, those are not available for any other Action type.
|
||||
|
||||
Action [with-post-step](with-post-step) is a generic JS Action to execute a main command and to set a command as a post
|
||||
step.
|
||||
It allows using the ``post`` feature with scripts written in bash, python or any other interpreted language available on
|
||||
the environment.
|
||||
See: `actions/runner#1478 <https://github.com/actions/runner/issues/1478>`__.
|
||||
30
doc/Dependency.rst
Normal file
30
doc/Dependency.rst
Normal file
@@ -0,0 +1,30 @@
|
||||
Dependencies
|
||||
############
|
||||
|
||||
This is a summary of dependencies used by the provided job templates. For more details, see each job template.
|
||||
|
||||
* Actions provided by GitHub
|
||||
|
||||
* :gh:`actions/checkout`
|
||||
* :gh:`actions/upload-artifact`
|
||||
* :gh:`actions/download-artifact`
|
||||
* :gh:`actions/create-release` (unmaintained)
|
||||
* :gh:`actions/setup-python`
|
||||
|
||||
* BuildTheDocs
|
||||
|
||||
* :gh:`buildthedocs/btd`
|
||||
|
||||
* Code Quality Services
|
||||
|
||||
* :gh:`codecov/codecov-action`
|
||||
* :gh:`codacy/codacy-coverage-reporter-action`
|
||||
|
||||
* Reporting
|
||||
|
||||
* :gh:`dorny/test-reporter`
|
||||
|
||||
* Miscellaneous
|
||||
|
||||
* :gh:`msys2/setup-msys2`
|
||||
* :gh:`geekyeggo/delete-artifact`
|
||||
4
doc/Deveopment.rst
Normal file
4
doc/Deveopment.rst
Normal file
@@ -0,0 +1,4 @@
|
||||
Development
|
||||
###########
|
||||
|
||||
.. todo:: Development - Explain how to write new job templates.
|
||||
353
doc/Doc-License.rst
Normal file
353
doc/Doc-License.rst
Normal file
@@ -0,0 +1,353 @@
|
||||
.. _DOCLICENSE:
|
||||
|
||||
.. note:: This is a local copy of the `Creative Commons - Attribution 4.0 International (CC BY 4.0) <https://creativecommons.org/licenses/by/4.0/legalcode>`__.
|
||||
|
||||
.. attention:: This **CC BY 4.0** license applies only to the **documentation** of this project.
|
||||
|
||||
|
||||
Creative Commons Attribution 4.0 International
|
||||
##############################################
|
||||
|
||||
Creative Commons Corporation (“Creative Commons”) is not a law firm and does not
|
||||
provide legal services or legal advice. Distribution of Creative Commons public
|
||||
licenses does not create a lawyer-client or other relationship. Creative Commons
|
||||
makes its licenses and related information available on an “as-is” basis.
|
||||
Creative Commons gives no warranties regarding its licenses, any material
|
||||
licensed under their terms and conditions, or any related information. Creative
|
||||
Commons disclaims all liability for damages resulting from their use to the
|
||||
fullest extent possible.
|
||||
|
||||
.. topic:: Using Creative Commons Public Licenses
|
||||
|
||||
Creative Commons public licenses provide a standard set of terms and conditions
|
||||
that creators and other rights holders may use to share original works of
|
||||
authorship and other material subject to copyright and certain other rights
|
||||
specified in the public license below. The following considerations are for
|
||||
informational purposes only, are not exhaustive, and do not form part of our
|
||||
licenses.
|
||||
|
||||
* **Considerations for licensors:** Our public licenses are intended for use
|
||||
by those authorized to give the public permission to use material in ways
|
||||
otherwise restricted by copyright and certain other rights. Our licenses are
|
||||
irrevocable. Licensors should read and understand the terms and conditions
|
||||
of the license they choose before applying it. Licensors should also secure
|
||||
all rights necessary before applying our licenses so that the public can reuse
|
||||
the material as expected. Licensors should clearly mark any material not
|
||||
subject to the license. This includes other CC-licensed material, or material
|
||||
used under an exception or limitation to copyright.
|
||||
`More considerations for licensors <http://wiki.creativecommons.org/Considerations_for_licensors_and_licensees#Considerations_for_licensors>`__.
|
||||
|
||||
* **Considerations for the public:** By using one of our public licenses, a
|
||||
licensor grants the public permission to use the licensed material under
|
||||
specified terms and conditions. If the licensor’s permission is not necessary
|
||||
for any reason–for example, because of any applicable exception or limitation
|
||||
to copyright–then that use is not regulated by the license. Our licenses grant
|
||||
only permissions under copyright and certain other rights that a licensor has
|
||||
authority to grant. Use of the licensed material may still be restricted for
|
||||
other reasons, including because others have copyright or other rights in the
|
||||
material. A licensor may make special requests, such as asking that all
|
||||
changes be marked or described. Although not required by our licenses, you are
|
||||
encouraged to respect those requests where reasonable.
|
||||
`More considerations for the public <http://wiki.creativecommons.org/Considerations_for_licensors_and_licensees#Considerations_for_licensees>`__.
|
||||
|
||||
:xlarge:`Creative Commons Attribution 4.0 International Public License`
|
||||
|
||||
By exercising the Licensed Rights (defined below), You accept and agree to be
|
||||
bound by the terms and conditions of this Creative Commons Attribution 4.0
|
||||
International Public License ("Public License"). To the extent this Public
|
||||
License may be interpreted as a contract, You are granted the Licensed Rights
|
||||
in consideration of Your acceptance of these terms and conditions, and the
|
||||
Licensor grants You such rights in consideration of benefits the Licensor
|
||||
receives from making the Licensed Material available under these terms and
|
||||
conditions.
|
||||
|
||||
Section 1 – Definitions.
|
||||
========================
|
||||
|
||||
a. **Adapted Material** means material subject to Copyright and Similar
|
||||
Rights that is derived from or based upon the Licensed Material and in
|
||||
which the Licensed Material is translated, altered, arranged, transformed, or
|
||||
otherwise modified in a manner requiring permission under the Copyright and
|
||||
Similar Rights held by the Licensor. For purposes of this Public License,
|
||||
where the Licensed Material is a musical work, performance, or sound
|
||||
recording, Adapted Material is always produced where the Licensed Material
|
||||
is synched in timed relation with a moving image.
|
||||
|
||||
b. **Adapter's License** means the license You apply to Your Copyright and
|
||||
Similar Rights in Your contributions to Adapted Material in accordance with
|
||||
the terms and conditions of this Public License.
|
||||
|
||||
c. **Copyright and Similar Rights** means copyright and/or similar rights
|
||||
closely related to copyright including, without limitation, performance,
|
||||
broadcast, sound recording, and Sui Generis Database Rights, without regard
|
||||
to how the rights are labeled or categorized. For purposes of this Public
|
||||
License, the rights specified in Section 2(b)(1)-(2) are not Copyright and
|
||||
Similar Rights.
|
||||
|
||||
d. **Effective Technological Measures** means those measures that, in the
|
||||
absence of proper authority, may not be circumvented under laws fulfilling
|
||||
obligations under Article 11 of the WIPO Copyright Treaty adopted on
|
||||
December 20, 1996, and/or similar international agreements.
|
||||
|
||||
e. **Exceptions and Limitations** means fair use, fair dealing, and/or any
|
||||
other exception or limitation to Copyright and Similar Rights that applies to
|
||||
Your use of the Licensed Material.
|
||||
|
||||
f. **Licensed Material** means the artistic or literary work, database, or
|
||||
other material to which the Licensor applied this Public License.
|
||||
|
||||
g. **Licensed Rights** means the rights granted to You subject to the terms
|
||||
and conditions of this Public License, which are limited to all Copyright and
|
||||
Similar Rights that apply to Your use of the Licensed Material and that the
|
||||
Licensor has authority to license.
|
||||
|
||||
h. **Licensor** means the individual(s) or entity(ies) granting rights under
|
||||
this Public License.
|
||||
|
||||
i. **Share** means to provide material to the public by any means or process
|
||||
that requires permission under the Licensed Rights, such as reproduction,
|
||||
public display, public performance, distribution, dissemination,
|
||||
communication, or importation, and to make material available to the public
|
||||
including in ways that members of the public may access the material from a
|
||||
place and at a time individually chosen by them.
|
||||
|
||||
j. **Sui Generis Database Rights** means rights other than copyright
|
||||
resulting from Directive 96/9/EC of the European Parliament and of the
|
||||
Council of 11 March 1996 on the legal protection of databases, as amended
|
||||
and/or succeeded, as well as other essentially equivalent rights anywhere
|
||||
in the world.
|
||||
|
||||
k. **You** means the individual or entity exercising the Licensed Rights
|
||||
under this Public License. **Your** has a corresponding meaning.
|
||||
|
||||
Section 2 – Scope.
|
||||
==================
|
||||
|
||||
a. **License grant.**
|
||||
|
||||
1. Subject to the terms and conditions of this Public License, the Licensor
|
||||
hereby grants You a worldwide, royalty-free, non-sublicensable,
|
||||
non-exclusive, irrevocable license to exercise the Licensed Rights in the
|
||||
Licensed Material to:
|
||||
|
||||
A. reproduce and Share the Licensed Material, in whole or in part; and
|
||||
|
||||
B. produce, reproduce, and Share Adapted Material.
|
||||
|
||||
2. :underline:`Exceptions and Limitations.` For the avoidance of doubt, where
|
||||
Exceptions and Limitations apply to Your use, this Public License does not
|
||||
apply, and You do not need to comply with its terms and conditions.
|
||||
|
||||
3. :underline:`Term.` The term of this Public License is specified in Section 6(a).
|
||||
|
||||
4. :underline:`Media and formats`; :underline:`technical modifications allowed.` The Licensor
|
||||
authorizes You to exercise the Licensed Rights in all media and formats
|
||||
whether now known or hereafter created, and to make technical
|
||||
modifications necessary to do so. The Licensor waives and/or agrees not to
|
||||
assert any right or authority to forbid You from making technical
|
||||
modifications necessary to exercise the Licensed Rights, including
|
||||
technical modifications necessary to circumvent Effective Technological
|
||||
Measures. For purposes of this Public License, simply making modifications
|
||||
authorized by this Section 2(a)(4) never produces Adapted Material.
|
||||
|
||||
5. :underline:`Downstream recipients.`
|
||||
|
||||
A. :underline:`Offer from the Licensor – Licensed Material.` Every recipient of
|
||||
the Licensed Material automatically receives an offer from the
|
||||
Licensor to exercise the Licensed Rights under the terms and
|
||||
conditions of this Public License.
|
||||
|
||||
B. :underline:`No downstream restrictions.` You may not offer or impose any
|
||||
additional or different terms or conditions on, or apply any Effective
|
||||
Technological Measures to, the Licensed Material if doing so restricts
|
||||
exercise of the Licensed Rights by any recipient of the Licensed
|
||||
Material.
|
||||
|
||||
6. :underline:`No endorsement.` Nothing in this Public License constitutes or may
|
||||
be construed as permission to assert or imply that You are, or that Your
|
||||
use of the Licensed Material is, connected with, or sponsored, endorsed,
|
||||
or granted official status by, the Licensor or others designated to
|
||||
receive attribution as provided in Section 3(a)(1)(A)(i).
|
||||
|
||||
b. **Other rights.**
|
||||
|
||||
1. Moral rights, such as the right of integrity, are not licensed under this
|
||||
Public License, nor are publicity, privacy, and/or other similar
|
||||
personality rights; however, to the extent possible, the Licensor waives
|
||||
and/or agrees not to assert any such rights held by the Licensor to the
|
||||
limited extent necessary to allow You to exercise the Licensed Rights, but
|
||||
not otherwise.
|
||||
|
||||
2. Patent and trademark rights are not licensed under this Public License.
|
||||
|
||||
3. To the extent possible, the Licensor waives any right to collect royalties
|
||||
from You for the exercise of the Licensed Rights, whether directly or
|
||||
through a collecting society under any voluntary or waivable statutory or
|
||||
compulsory licensing scheme. In all other cases the Licensor expressly
|
||||
reserves any right to collect such royalties.
|
||||
|
||||
Section 3 – License Conditions.
|
||||
===============================
|
||||
|
||||
Your exercise of the Licensed Rights is expressly made subject to the following conditions.
|
||||
|
||||
a. **Attribution.**
|
||||
|
||||
1. If You Share the Licensed Material (including in modified form), You must:
|
||||
|
||||
A. retain the following if it is supplied by the Licensor with the
|
||||
Licensed Material:
|
||||
|
||||
i. identification of the creator(s) of the Licensed Material and any
|
||||
others designated to receive attribution, in any reasonable manner
|
||||
requested by the Licensor (including by pseudonym if designated);
|
||||
|
||||
ii. a copyright notice;
|
||||
|
||||
iii. a notice that refers to this Public License;
|
||||
|
||||
iv. a notice that refers to the disclaimer of warranties;
|
||||
|
||||
v. a URI or hyperlink to the Licensed Material to the extent reasonably
|
||||
practicable;
|
||||
|
||||
B. indicate if You modified the Licensed Material and retain an
|
||||
indication of any previous modifications; and
|
||||
|
||||
C. indicate the Licensed Material is licensed under this Public License,
|
||||
and include the text of, or the URI or hyperlink to, this Public
|
||||
License.
|
||||
|
||||
2. You may satisfy the conditions in Section 3(a)(1) in any reasonable manner
|
||||
based on the medium, means, and context in which You Share the Licensed
|
||||
Material. For example, it may be reasonable to satisfy the conditions by
|
||||
providing a URI or hyperlink to a resource that includes the required
|
||||
information.
|
||||
|
||||
3. If requested by the Licensor, You must remove any of the information
|
||||
required by Section 3(a)(1)(A) to the extent reasonably practicable.
|
||||
|
||||
4. If You Share Adapted Material You produce, the Adapter's License You apply
|
||||
must not prevent recipients of the Adapted Material from complying with
|
||||
this Public License.
|
||||
|
||||
Section 4 – Sui Generis Database Rights.
|
||||
========================================
|
||||
|
||||
Where the Licensed Rights include Sui Generis Database Rights that apply to Your
|
||||
use of the Licensed Material:
|
||||
|
||||
a. for the avoidance of doubt, Section 2(a)(1) grants You the right to extract,
|
||||
reuse, reproduce, and Share all or a substantial portion of the contents of
|
||||
the database;
|
||||
|
||||
b. if You include all or a substantial portion of the database contents in a
|
||||
database in which You have Sui Generis Database Rights, then the database
|
||||
in which You have Sui Generis Database Rights (but not its individual
|
||||
contents) is Adapted Material; and
|
||||
|
||||
c. You must comply with the conditions in Section 3(a) if You Share all or a
|
||||
substantial portion of the contents of the database.
|
||||
|
||||
For the avoidance of doubt, this Section 4 supplements and does not replace
|
||||
Your obligations under this Public License where the Licensed Rights include
|
||||
other Copyright and Similar Rights.
|
||||
|
||||
Section 5 – Disclaimer of Warranties and Limitation of Liability.
|
||||
=================================================================
|
||||
|
||||
a. **Unless otherwise separately undertaken by the Licensor, to the extent
|
||||
possible, the Licensor offers the Licensed Material as-is and as-available,
|
||||
and makes no representations or warranties of any kind concerning the
|
||||
Licensed Material, whether express, implied, statutory, or other. This
|
||||
includes, without limitation, warranties of title, merchantability,
|
||||
fitness for a particular purpose, non-infringement, absence of latent or
|
||||
other defects, accuracy, or the presence or absence of errors, whether or
|
||||
not known or discoverable. Where disclaimers of warranties are not allowed
|
||||
in full or in part, this disclaimer may not apply to You.**
|
||||
|
||||
b. **To the extent possible, in no event will the Licensor be liable to You
|
||||
on any legal theory (including, without limitation, negligence) or
|
||||
otherwise for any direct, special, indirect, incidental, consequential,
|
||||
punitive, exemplary, or other losses, costs, expenses, or damages arising
|
||||
out of this Public License or use of the Licensed Material, even if the
|
||||
Licensor has been advised of the possibility of such losses, costs, expenses,
|
||||
or damages. Where a limitation of liability is not allowed in full or in
|
||||
part, this limitation may not apply to You.**
|
||||
|
||||
c. The disclaimer of warranties and limitation of liability provided above
|
||||
shall be interpreted in a manner that, to the extent possible, most
|
||||
closely approximates an absolute disclaimer and waiver of all liability.
|
||||
|
||||
Section 6 – Term and Termination.
|
||||
=================================
|
||||
|
||||
a. This Public License applies for the term of the Copyright and Similar Rights
|
||||
licensed here. However, if You fail to comply with this Public License, then
|
||||
Your rights under this Public License terminate automatically.
|
||||
|
||||
b. Where Your right to use the Licensed Material has terminated under
|
||||
Section 6(a), it reinstates:
|
||||
|
||||
1. automatically as of the date the violation is cured, provided it is cured
|
||||
within 30 days of Your discovery of the violation; or
|
||||
|
||||
2. upon express reinstatement by the Licensor.
|
||||
|
||||
For the avoidance of doubt, this Section 6(b) does not affect any right the
|
||||
Licensor may have to seek remedies for Your violations of this Public License.
|
||||
|
||||
c. For the avoidance of doubt, the Licensor may also offer the Licensed Material
|
||||
under separate terms or conditions or stop distributing the Licensed Material
|
||||
at any time; however, doing so will not terminate this Public License.
|
||||
|
||||
d. Sections 1, 5, 6, 7, and 8 survive termination of this Public License.
|
||||
|
||||
Section 7 – Other Terms and Conditions.
|
||||
=======================================
|
||||
|
||||
a. The Licensor shall not be bound by any additional or different terms or
|
||||
conditions communicated by You unless expressly agreed.
|
||||
|
||||
b. Any arrangements, understandings, or agreements regarding the Licensed
|
||||
Material not stated herein are separate from and independent of the terms
|
||||
and conditions of this Public License.
|
||||
|
||||
Section 8 – Interpretation.
|
||||
===========================
|
||||
|
||||
a. For the avoidance of doubt, this Public License does not, and shall not be
|
||||
interpreted to, reduce, limit, restrict, or impose conditions on any use of
|
||||
the Licensed Material that could lawfully be made without permission under
|
||||
this Public License.
|
||||
|
||||
b. To the extent possible, if any provision of this Public License is deemed
|
||||
unenforceable, it shall be automatically reformed to the minimum extent
|
||||
necessary to make it enforceable. If the provision cannot be reformed, it
|
||||
shall be severed from this Public License without affecting the
|
||||
enforceability of the remaining terms and conditions.
|
||||
|
||||
c. No term or condition of this Public License will be waived and no failure to
|
||||
comply consented to unless expressly agreed to by the Licensor.
|
||||
|
||||
d. Nothing in this Public License constitutes or may be interpreted as a
|
||||
limitation upon, or waiver of, any privileges and immunities that apply to
|
||||
the Licensor or You, including from the legal processes of any jurisdiction
|
||||
or authority.
|
||||
|
||||
------------------
|
||||
|
||||
Creative Commons is not a party to its public licenses. Notwithstanding,
|
||||
Creative Commons may elect to apply one of its public licenses to material it
|
||||
publishes and in those instances will be considered the “Licensor.” Except for
|
||||
the limited purpose of indicating that material is shared under a Creative
|
||||
Commons public license or as otherwise permitted by the Creative Commons
|
||||
policies published at `creativecommons.org/policies <http://creativecommons.org/policies>`__,
|
||||
Creative Commons does not authorize the use of the trademark “Creative Commons”
|
||||
or any other trademark or logo of Creative Commons without its prior written
|
||||
consent including, without limitation, in connection with any unauthorized
|
||||
modifications to any of its public licenses or any other arrangements,
|
||||
understandings, or agreements concerning use of licensed material. For the
|
||||
avoidance of doubt, this paragraph does not form part of the public licenses.
|
||||
|
||||
Creative Commons may be contacted at `creativecommons.org <https://creativecommons.org/>`__
|
||||
110
doc/Instantiation.rst
Normal file
110
doc/Instantiation.rst
Normal file
@@ -0,0 +1,110 @@
|
||||
Instantiantion
|
||||
##############
|
||||
|
||||
The job templates (GitHub Action *Reusable Workflows*) need to be stored in the same directory where normal pipelines
|
||||
(GitHub Action *Workflows*) are located: ``.github/workflows/<template>.yml``. These template files are distinguished
|
||||
from a normal pipeline by a ``on:workflow_call:`` section compared to an ``on:push`` section.
|
||||
|
||||
**Job Template Definition:**
|
||||
|
||||
The ``workflow_call`` allows the definition of input and output parameters.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
<Param1>:
|
||||
# ...
|
||||
outputs:
|
||||
# ...
|
||||
|
||||
jobs:
|
||||
<JobName>:
|
||||
# ...
|
||||
|
||||
**Job Template Instantiation:**
|
||||
|
||||
When instantiating a template, a ``jobs:<Name>:uses`` is used to refer to a template file. Unfortunately, besides the
|
||||
GitHub SLUG (*<Organization>/<Repository>*), also the full path to the template needs to be gives, but still it can't be
|
||||
outside of ``.github/workflows`` to create a cleaner repository structure. Finally, the path contains a branch name
|
||||
postfixed by ``@<branch>`` (tags are still not supported by GitHub Actions). A ``jobs:<Name>:with:`` section can be used
|
||||
to handover input parameters to the template.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
on:
|
||||
push:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
<InstanceName>:
|
||||
uses: <GitHubOrganization>/<Repository>/.github/workflows/<Template>.yml@v0
|
||||
with:
|
||||
<Param1>: <Value>
|
||||
|
||||
|
||||
Example Pipelines
|
||||
*****************
|
||||
|
||||
Documentation Only (Sphinx)
|
||||
===========================
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
name: Pipeline
|
||||
|
||||
on:
|
||||
push:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
BuildTheDocs:
|
||||
uses: pyTooling/Actions/.github/workflows/BuildTheDocs.yml@r0
|
||||
with:
|
||||
artifact: Documentation
|
||||
|
||||
PublishToGitHubPages:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r0
|
||||
needs:
|
||||
- BuildTheDocs
|
||||
with:
|
||||
doc: Documentation
|
||||
|
||||
ArtifactCleanUp:
|
||||
name: 🗑️ Artifact Cleanup
|
||||
needs:
|
||||
- BuildTheDocs
|
||||
- PublishToGitHubPages
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
steps:
|
||||
- name: 🗑️ Delete artifacts
|
||||
uses: geekyeggo/delete-artifact@v5
|
||||
with:
|
||||
name: Documentation
|
||||
|
||||
|
||||
Simple Package
|
||||
==============
|
||||
|
||||
|
||||
Package with Unit Tests
|
||||
=======================
|
||||
|
||||
|
||||
Package with Code Coverage
|
||||
==========================
|
||||
|
||||
Complex Pipeline
|
||||
================
|
||||
|
||||
|
||||
Further Reference Examples
|
||||
**************************
|
||||
|
||||
Find further usage cases in the following list of projects:
|
||||
|
||||
- `edaa-org/pyEDAA.ProjectModel <https://github.com/edaa-org/pyEDAA.ProjectModel/tree/main/.github/workflows>`__
|
||||
- `edaa-org/pySVModel <https://github.com/edaa-org/pySVModel/tree/main/.github/workflows>`__
|
||||
- `VHDL/pyVHDLModel <https://github.com/VHDL/pyVHDLModel/tree/main/.github/workflows>`__
|
||||
90
doc/JobTemplate/ArtifactCleanUp.rst
Normal file
90
doc/JobTemplate/ArtifactCleanUp.rst
Normal file
@@ -0,0 +1,90 @@
|
||||
.. _JOBTMPL/ArtifactCleanup:
|
||||
|
||||
ArtifactCleanUp
|
||||
###############
|
||||
|
||||
This job removes artifacts used to exchange data from job to job.
|
||||
|
||||
**Behavior:**
|
||||
|
||||
1. Delete the package artifact if the current pipeline run was not a tagged run.
|
||||
2. Delete all remaining artifacts if given as a parameter.
|
||||
|
||||
**Dependencies:**
|
||||
|
||||
* :gh:`geekyeggo/delete-artifact`
|
||||
|
||||
|
||||
Instantiation
|
||||
*************
|
||||
|
||||
Simple Example
|
||||
==============
|
||||
|
||||
The simplest variant just uses the artifact name for the package.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
ArtifactCleanUp:
|
||||
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r0
|
||||
with:
|
||||
package: Package
|
||||
|
||||
|
||||
Complex Example
|
||||
===============
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
ArtifactCleanUp:
|
||||
uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@r0
|
||||
needs:
|
||||
- Params
|
||||
- UnitTesting
|
||||
- BuildTheDocs
|
||||
- PublishToGitHubPages
|
||||
- PublishTestResults
|
||||
with:
|
||||
package: ${{ fromJson(needs.Params.outputs.artifact_names).package_all }}
|
||||
remaining: |
|
||||
${{ fromJson(needs.Params.outputs.artifact_names).unittesting_xml }}-*
|
||||
|
||||
|
||||
Parameters
|
||||
**********
|
||||
|
||||
package
|
||||
=======
|
||||
|
||||
+----------------+----------+----------+----------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==========+
|
||||
| package | yes | string | — — — — |
|
||||
+----------------+----------+----------+----------+
|
||||
|
||||
Artifacts to be removed on not tagged runs.
|
||||
|
||||
|
||||
remaining
|
||||
=========
|
||||
|
||||
+----------------+----------+----------+----------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==========+
|
||||
| remaining | optional | string | ``""`` |
|
||||
+----------------+----------+----------+----------+
|
||||
|
||||
Artifacts to be removed unconditionally.
|
||||
|
||||
|
||||
Secrets
|
||||
*******
|
||||
|
||||
This job template needs no secrets.
|
||||
|
||||
Results
|
||||
*******
|
||||
|
||||
This job template has no output parameters.
|
||||
75
doc/JobTemplate/BuildTheDocs.rst
Normal file
75
doc/JobTemplate/BuildTheDocs.rst
Normal file
@@ -0,0 +1,75 @@
|
||||
.. _JOBTMPL/BuildTheDocs:
|
||||
|
||||
BuildTheDocs
|
||||
############
|
||||
|
||||
This jobs compiles the documentation written in ReStructured Text with Sphinx using BuildTheDocs.
|
||||
|
||||
**Behavior:**
|
||||
|
||||
1. Checkout repository.
|
||||
2. Build the documentation.
|
||||
3. Upload the HTML documentation as an artifact.
|
||||
4. Publish the HTML documentation to GitHub Pages.
|
||||
|
||||
**Dependencies:**
|
||||
|
||||
* :gh:`actions/checkout`
|
||||
* :gh:`buildthedocs/btd`
|
||||
* :gh:`actions/upload-artifact`
|
||||
|
||||
|
||||
|
||||
Instantiation
|
||||
*************
|
||||
|
||||
Simple Example
|
||||
==============
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
BuildTheDocs:
|
||||
uses: pyTooling/Actions/.github/workflows/BuildTheDocs.yml@r0
|
||||
|
||||
|
||||
Complex Example
|
||||
===============
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
BuildTheDocs:
|
||||
uses: pyTooling/Actions/.github/workflows/BuildTheDocs.yml@r0
|
||||
needs:
|
||||
- Params
|
||||
with:
|
||||
artifact: ${{ fromJson(needs.Params.outputs.artifact_names).documentation_html }}
|
||||
|
||||
|
||||
Parameters
|
||||
**********
|
||||
|
||||
artifact
|
||||
========
|
||||
|
||||
+----------------+----------+----------+--------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==============+
|
||||
| artifact | optional | string | ``""`` |
|
||||
+----------------+----------+----------+--------------+
|
||||
|
||||
Name of the documentation artifact.
|
||||
|
||||
If no artifact name is given, the job directly publishes the documentation's HTML content to GitHub Pages.
|
||||
|
||||
|
||||
Secrets
|
||||
*******
|
||||
|
||||
This job template needs no secrets.
|
||||
|
||||
Results
|
||||
*******
|
||||
|
||||
This job template has no output parameters.
|
||||
170
doc/JobTemplate/CoverageCollection.rst
Normal file
170
doc/JobTemplate/CoverageCollection.rst
Normal file
@@ -0,0 +1,170 @@
|
||||
.. _JOBTMPL/CodeCoverage:
|
||||
|
||||
CoverageCollection
|
||||
##################
|
||||
|
||||
This jobs runs the specified unit tests with activated code coverage collection (incl. branch coverage).
|
||||
|
||||
It uses pytest, pytest-cov and coverage.py in a single job run, thus it uses one fixed Python version (usually latest).
|
||||
It generates HTML and Cobertura reports (XML), then it uploads the HTML report as an artifact and the jUnit test results
|
||||
(XML) to `Codecov <https://about.codecov.io/>`__ and `Codacy <https://www.codacy.com/>`__.
|
||||
|
||||
Configuration options to ``pytest`` and ``coverage.py`` should be given via sections ``[tool.pytest.ini_options]`` and
|
||||
``[tool.coverage.*]`` in a ``pyproject.toml`` file.
|
||||
|
||||
**Behavior:**
|
||||
|
||||
1. Checkout repository
|
||||
2. Setup Python and install dependencies
|
||||
3. Extract configuration from ``pyproject.toml`` or ``.coveragerc``.
|
||||
4. Run unit tests and collect code coverage
|
||||
5. Convert coverage data to a Cobertura XML file
|
||||
6. Convert coverage data to a HTML report
|
||||
7. Upload HTML report as an artifact
|
||||
8. Publish Cobertura file to CodeCov
|
||||
9. Publish Cobertura file to Codacy
|
||||
|
||||
**Preconditions:**
|
||||
|
||||
* A CodeCov account was created.
|
||||
* A Codacy account was created.
|
||||
|
||||
**Requirements:**
|
||||
|
||||
Setup a secret (e.g. ``codacy_token``) in GitHub to handover the Codacy project token to the job.
|
||||
|
||||
**Dependencies:**
|
||||
|
||||
* :gh:`actions/checkout`
|
||||
* :gh:`actions/setup-python`
|
||||
* :gh:`actions/upload-artifact`
|
||||
* :gh:`codecov/codecov-action`
|
||||
* :gh:`codacy/codacy-coverage-reporter-action`
|
||||
|
||||
|
||||
Instantiation
|
||||
*************
|
||||
|
||||
Simple Example
|
||||
==============
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
Coverage:
|
||||
uses: pyTooling/Actions/.github/workflows/CoverageCollection.yml@r0
|
||||
with:
|
||||
artifact: Coverage
|
||||
secrets:
|
||||
codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||
|
||||
Complex Example
|
||||
===============
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
Coverage:
|
||||
uses: pyTooling/Actions/.github/workflows/CoverageCollection.yml@r0
|
||||
needs:
|
||||
- Params
|
||||
with:
|
||||
python_version: ${{ needs.Params.outputs.python_version }}
|
||||
artifact: ${{ fromJson(needs.Params.outputs.artifact_names).codecoverage_html }}
|
||||
secrets:
|
||||
codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||
|
||||
Parameters
|
||||
**********
|
||||
|
||||
python_version
|
||||
==============
|
||||
|
||||
+----------------+----------+----------+----------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==========+
|
||||
| python_version | optional | string | 3.11 |
|
||||
+----------------+----------+----------+----------+
|
||||
|
||||
Python version used for running unit tests.
|
||||
|
||||
|
||||
requirements
|
||||
============
|
||||
|
||||
+----------------+----------+----------+-------------------------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+===============================+
|
||||
| requirements | optional | string | ``-r tests/requirements.txt`` |
|
||||
+----------------+----------+----------+-------------------------------+
|
||||
|
||||
Python dependencies to be installed through pip.
|
||||
|
||||
|
||||
tests_directory
|
||||
===============
|
||||
|
||||
+-----------------+----------+----------+-----------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+=================+==========+==========+===========+
|
||||
| tests_directory | optional | string | ``tests`` |
|
||||
+-----------------+----------+----------+-----------+
|
||||
|
||||
Path to the directory containing tests (test working directory).
|
||||
|
||||
|
||||
unittest_directory
|
||||
==================
|
||||
|
||||
+--------------------+----------+----------+-----------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+====================+==========+==========+===========+
|
||||
| unittest_directory | optional | string | ``unit`` |
|
||||
+--------------------+----------+----------+-----------+
|
||||
|
||||
Path to the directory containing unit tests (relative to tests_directory).
|
||||
|
||||
|
||||
coverage_config
|
||||
===============
|
||||
|
||||
+-----------------+----------+----------+--------------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+=================+==========+==========+====================+
|
||||
| coverage_config | optional | string | ``pyproject.toml`` |
|
||||
+-----------------+----------+----------+--------------------+
|
||||
|
||||
Path to the ``.coveragerc`` file. Use ``pyproject.toml`` by default.
|
||||
|
||||
|
||||
artifact
|
||||
========
|
||||
|
||||
+----------------+----------+----------+--------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==============+
|
||||
| artifact | yes | string | — — — — |
|
||||
+----------------+----------+----------+--------------+
|
||||
|
||||
Name of the coverage artifact.
|
||||
|
||||
|
||||
Secrets
|
||||
*******
|
||||
|
||||
codacy_token
|
||||
============
|
||||
|
||||
+----------------+----------+----------+--------------+
|
||||
| Secret Name | Required | Type | Default |
|
||||
+================+==========+==========+==============+
|
||||
| codacy_token | yes | string | — — — — |
|
||||
+----------------+----------+----------+--------------+
|
||||
|
||||
Token to push result to codacy.
|
||||
|
||||
|
||||
Results
|
||||
*******
|
||||
|
||||
This job template has no output parameters.
|
||||
106
doc/JobTemplate/Package.rst
Normal file
106
doc/JobTemplate/Package.rst
Normal file
@@ -0,0 +1,106 @@
|
||||
.. _JOBTMPL/Package:
|
||||
|
||||
Package
|
||||
#######
|
||||
|
||||
This job packages the Python source code as a source package (``*.tar.gz``) and wheel package (``*.whl``) and uploads it
|
||||
as an artifact.
|
||||
|
||||
**Behavior:**
|
||||
|
||||
1. Checkout repository
|
||||
2. Setup Python and install dependencies
|
||||
3. Package Python sources:
|
||||
|
||||
* If parameter ``requirements`` is empty, use ``build`` package and run ``python build``.
|
||||
* If parameter ``requirements`` is ``no-isolation``, use ``build`` package in *no-isolation* mode and run
|
||||
``python build``.
|
||||
* If parameter ``requirements`` is non-empty, use ``setuptools`` package and run ``python setup.py``.
|
||||
|
||||
**Dependencies:**
|
||||
|
||||
* :gh:`actions/checkout`
|
||||
* :gh:`actions/setup-python`
|
||||
* :gh:`actions/upload-artifact`
|
||||
|
||||
Instantiation
|
||||
*************
|
||||
|
||||
Simple Example
|
||||
==============
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
Package:
|
||||
uses: pyTooling/Actions/.github/workflows/Package.yml@r0
|
||||
with:
|
||||
artifact: Package
|
||||
|
||||
|
||||
Complex Example
|
||||
===============
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
Package:
|
||||
uses: pyTooling/Actions/.github/workflows/Package.yml@r0
|
||||
needs:
|
||||
- Params
|
||||
- Coverage
|
||||
with:
|
||||
python_version: ${{ needs.Params.outputs.python_version }}
|
||||
requirements: -r build/requirements.txt
|
||||
artifact: ${{ fromJson(needs.Params.outputs.artifact_names).package_all }}
|
||||
|
||||
|
||||
Parameters
|
||||
**********
|
||||
|
||||
python_version
|
||||
==============
|
||||
|
||||
+----------------+----------+----------+----------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==========+
|
||||
| python_version | optional | string | 3.11 |
|
||||
+----------------+----------+----------+----------+
|
||||
|
||||
Python version.
|
||||
|
||||
|
||||
requirements
|
||||
============
|
||||
|
||||
+----------------+----------+----------+----------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==========+
|
||||
| requirements | optional | string | ``""`` |
|
||||
+----------------+----------+----------+----------+
|
||||
|
||||
Python dependencies to be installed through pip; if empty, use pyproject.toml through build.
|
||||
|
||||
|
||||
artifact
|
||||
========
|
||||
|
||||
+----------------+----------+----------+----------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==========+
|
||||
| artifact | yes | string | — — — — |
|
||||
+----------------+----------+----------+----------+
|
||||
|
||||
Name of the package artifact.
|
||||
|
||||
|
||||
Secrets
|
||||
*******
|
||||
|
||||
This job template needs no secrets.
|
||||
|
||||
|
||||
Results
|
||||
*******
|
||||
|
||||
This job template has no output parameters.
|
||||
413
doc/JobTemplate/Parameters.rst
Normal file
413
doc/JobTemplate/Parameters.rst
Normal file
@@ -0,0 +1,413 @@
|
||||
.. _JOBTMPL/Parameters:
|
||||
|
||||
Parameters
|
||||
##########
|
||||
|
||||
The ``Parameters`` job template is a workaround for the limitations of GitHub Actions to handle global variables in
|
||||
GitHub Actions workflows (see `actions/runner#480 <https://github.com/actions/runner/issues/480>`__.
|
||||
|
||||
It generates output parameters with artifact names and a job matrix to be used in later running jobs.
|
||||
|
||||
**Behavior:**
|
||||
|
||||
.. todo:: Parameters:Behavior Needs documentation.
|
||||
|
||||
**Dependencies:**
|
||||
|
||||
*None*
|
||||
|
||||
Instantiation
|
||||
*************
|
||||
|
||||
Simple Example
|
||||
==============
|
||||
|
||||
The following instantiation example creates a job `Params` derived from job template `Parameters` version `r0`. It only
|
||||
requires a `name` parameter to create the artifact names.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
name: Pipeline
|
||||
|
||||
on:
|
||||
push:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
Params:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r0
|
||||
with:
|
||||
name: pyTooling
|
||||
|
||||
Complex Example
|
||||
===============
|
||||
|
||||
The following instantiation example creates 3 jobs from the same template, but with differing input parameters. The
|
||||
first job `UnitTestingParams` might be used to create a job matrix of unit tests. It creates the cross of default
|
||||
systems (Windows, Ubuntu, macOS, MinGW64, UCRT64) and the given list of Python versions including some mypy versions. In
|
||||
addition a list of excludes (marked as :deletion:`deletions`) and includes (marked as :addition:`additions`) is handed
|
||||
over resulting in the following combinations:
|
||||
|
||||
+------------+-------------+-------------+--------------+--------------+-------------------------+------------+-------------+------------------------------+-------------------------------+
|
||||
| Version | 3.8 🔴 | 3.9 🟠 | 3.10 🟡 | 3.11 🟢 | 3.12 🟢 | 3.13.a1 🟣 | pypy-3.8 🔴 | pypy-3.9 🟠 | pypy-3.10 🟡 |
|
||||
+============+=============+=============+==============+==============+=========================+============+=============+==============================+===============================+
|
||||
| Windows 🧊 | windows:3.8 | windows:3.9 | windows:3.10 | windows:3.11 | | | | :deletion:`windows:pypy-3.9` | :deletion:`windows:pypy-3.10` |
|
||||
+------------+-------------+-------------+--------------+--------------+-------------------------+------------+-------------+------------------------------+-------------------------------+
|
||||
| Ubuntu 🐧 | ubuntu:3.8 | ubuntu:3.9 | ubuntu:3.10 | ubuntu:3.11 | :addition:`ubuntu:3.12` | | | ubuntu:pypy-3.9 | ubuntu:pypy-3.10 |
|
||||
+------------+-------------+-------------+--------------+--------------+-------------------------+------------+-------------+------------------------------+-------------------------------+
|
||||
| macOS 🍎 | macos:3.8 | macos:3.9 | macos:3.10 | macos:3.11 | :addition:`macos:3.12` | | | macos:pypy-3.9 | macos:pypy-3.10 |
|
||||
+------------+-------------+-------------+--------------+--------------+-------------------------+------------+-------------+------------------------------+-------------------------------+
|
||||
| MSYS 🟪 | | | | | | | | | |
|
||||
+------------+-------------+-------------+--------------+--------------+-------------------------+------------+-------------+------------------------------+-------------------------------+
|
||||
| MinGW32 ⬛ | | | | | | | | | |
|
||||
+------------+-------------+-------------+--------------+--------------+-------------------------+------------+-------------+------------------------------+-------------------------------+
|
||||
| MinGW64 🟦 | | | | mingw64:3.11 | | | | | |
|
||||
+------------+-------------+-------------+--------------+--------------+-------------------------+------------+-------------+------------------------------+-------------------------------+
|
||||
| Clang32 🟫 | | | | | | | | | |
|
||||
+------------+-------------+-------------+--------------+--------------+-------------------------+------------+-------------+------------------------------+-------------------------------+
|
||||
| Clang64 🟧 | | | | | | | | | |
|
||||
+------------+-------------+-------------+--------------+--------------+-------------------------+------------+-------------+------------------------------+-------------------------------+
|
||||
| UCRT64 🟨 | | | | | | | | | |
|
||||
+------------+-------------+-------------+--------------+--------------+-------------------------+------------+-------------+------------------------------+-------------------------------+
|
||||
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
name: Pipeline
|
||||
|
||||
on:
|
||||
push:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
UnitTestingParams:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r0
|
||||
with:
|
||||
name: pyTooling
|
||||
python_version_list: "3.8 3.9 3.10 3.11 pypy-3.9 pypy-3.10"
|
||||
include_list: "ubuntu:3.12 macos:3.12"
|
||||
exclude_list: "windows:pypy-3.9 windows:pypy-3.10"
|
||||
|
||||
PerformanceTestingParams:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r0
|
||||
with:
|
||||
name: pyTooling
|
||||
python_version_list: "3.11 3.12"
|
||||
system_list: "ubuntu windows macos"
|
||||
|
||||
PlatformTestingParams:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@dev
|
||||
with:
|
||||
name: pyTooling
|
||||
python_version_list: "3.12"
|
||||
system_list: "ubuntu windows macos mingw32 mingw64 clang64 ucrt64"
|
||||
|
||||
Parameters
|
||||
**********
|
||||
|
||||
name
|
||||
====
|
||||
|
||||
+----------------+----------+----------+--------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==============+
|
||||
| name | yes | string | — — — — |
|
||||
+----------------+----------+----------+--------------+
|
||||
|
||||
The name of the library or package.
|
||||
|
||||
It's used to create artifact names.
|
||||
|
||||
|
||||
python_version
|
||||
==============
|
||||
|
||||
+----------------+----------+----------+----------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==========+
|
||||
| python_version | optional | string | ``3.12`` |
|
||||
+----------------+----------+----------+----------+
|
||||
|
||||
Python version to be used for all jobs requiring a single Python version.
|
||||
|
||||
|
||||
python_version_list
|
||||
===================
|
||||
|
||||
+----------------------+----------+----------+----------------------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+======================+==========+==========+============================+
|
||||
| python_version_list | optional | string | ``3.8 3.9 3.10 3.11 3.12`` |
|
||||
+----------------------+----------+----------+----------------------------+
|
||||
|
||||
Space separated list of CPython versions and/or mypy version to run tests with.
|
||||
|
||||
**Possible values:**
|
||||
|
||||
* ``3.7``, ``3.8``, ``3.9``, ``3.10`` , ``3.11``, ``3.12``, ``3.13``
|
||||
* ``pypy-3.7``, ``pypy-3.8``, ``pypy-3.9``, ``pypy-3.10``
|
||||
|
||||
+------+-----------+------------------+-----------------------------------------+
|
||||
| Icon | Version | Maintained until | Comments |
|
||||
+======+===========+==================+=========================================+
|
||||
| ⚫ | 3.7 | 2023.06.27 | :red:`outdated` |
|
||||
+------+-----------+------------------+-----------------------------------------+
|
||||
| 🔴 | 3.8 | 2024.10 | |
|
||||
+------+-----------+------------------+-----------------------------------------+
|
||||
| 🟠 | 3.9 | 2025.10 | |
|
||||
+------+-----------+------------------+-----------------------------------------+
|
||||
| 🟡 | 3.10 | 2026.10 | |
|
||||
+------+-----------+------------------+-----------------------------------------+
|
||||
| 🟢 | 3.11 | 2027.10 | |
|
||||
+------+-----------+------------------+-----------------------------------------+
|
||||
| 🟢 | 3.12 | 2028.10 | :green:`latest` |
|
||||
+------+-----------+------------------+-----------------------------------------+
|
||||
| 🟣 | 3.13 | 2029.10 | Python 3.13 alpha (or RC) will be used. |
|
||||
+------+-----------+------------------+-----------------------------------------+
|
||||
| ⟲⚫ | pypy-3.7 | ????.?? | |
|
||||
+------+-----------+------------------+-----------------------------------------+
|
||||
| ⟲🔴 | pypy-3.8 | ????.?? | |
|
||||
+------+-----------+------------------+-----------------------------------------+
|
||||
| ⟲🟠 | pypy-3.9 | ????.?? | |
|
||||
+------+-----------+------------------+-----------------------------------------+
|
||||
| ⟲🟡 | pypy-3.10 | ????.?? | |
|
||||
+------+-----------+------------------+-----------------------------------------+
|
||||
|
||||
|
||||
system_list
|
||||
===========
|
||||
|
||||
+----------------+----------+----------+-----------------------------------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+=========================================+
|
||||
| system_list | optional | string | ``ubuntu windows macos mingw64 ucrt64`` |
|
||||
+----------------+----------+----------+-----------------------------------------+
|
||||
|
||||
Space separated list of systems to run tests on.
|
||||
|
||||
**Possible values:**
|
||||
|
||||
* Native systems: ``ubuntu``, ``windows``, ``macos``
|
||||
* MSYS2: ``msys``, ``mingw32``, ``mingw64``, ``clang32``, ``clang64``, ``ucrt64``
|
||||
|
||||
+------+-----------+------------------------------+-----------------------------------------------------------------+
|
||||
| Icon | System | Used version | Comments |
|
||||
+======+===========+==============================+=================================================================+
|
||||
| 🧊 | Windows | Windows Server 2022 (latest) | |
|
||||
+------+-----------+------------------------------+-----------------------------------------------------------------+
|
||||
| 🐧 | Ubuntu | Ubuntu 22.04 (LTS) (latest) | |
|
||||
+------+-----------+------------------------------+-----------------------------------------------------------------+
|
||||
| 🍎 | macOS | macOS Monterey 12 (latest) | While this marked latest, macOS Ventura 13 is already provided. |
|
||||
+------+-----------+------------------------------+-----------------------------------------------------------------+
|
||||
| 🟪 | MSYS | | |
|
||||
+------+-----------+------------------------------+-----------------------------------------------------------------+
|
||||
| ⬛ | MinGW32 | | |
|
||||
+------+-----------+------------------------------+-----------------------------------------------------------------+
|
||||
| 🟦 | MinGW64 | | |
|
||||
+------+-----------+------------------------------+-----------------------------------------------------------------+
|
||||
| 🟫 | Clang32 | | |
|
||||
+------+-----------+------------------------------+-----------------------------------------------------------------+
|
||||
| 🟧 | Clang64 | | |
|
||||
+------+-----------+------------------------------+-----------------------------------------------------------------+
|
||||
| 🟨 | UCRT64 | | |
|
||||
+------+-----------+------------------------------+-----------------------------------------------------------------+
|
||||
|
||||
Source: `Images provided by GitHub <https://github.com/actions/runner-images>`__
|
||||
|
||||
include_list
|
||||
============
|
||||
|
||||
+----------------+----------+----------+----------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==========+
|
||||
| include_list | optional | string | ``""`` |
|
||||
+----------------+----------+----------+----------+
|
||||
|
||||
Space separated list of ``system:python`` items to be included into the list of test.
|
||||
|
||||
**Example:**
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
include_list: "ubuntu:3.11 macos:3.11"
|
||||
|
||||
|
||||
exclude_list
|
||||
============
|
||||
|
||||
+----------------+----------+----------+----------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==========+
|
||||
| exclude_list | optional | string | ``""`` |
|
||||
+----------------+----------+----------+----------+
|
||||
|
||||
Space separated list of ``system:python`` items to be excluded from the list of test.
|
||||
|
||||
**Example:**
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
exclude_list: "windows:pypy-3.8 windows:pypy-3.9"
|
||||
|
||||
|
||||
disable_list
|
||||
============
|
||||
|
||||
+----------------+----------+----------+----------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==========+
|
||||
| disable_list | optional | string | ``""`` |
|
||||
+----------------+----------+----------+----------+
|
||||
|
||||
Space separated list of ``system:python`` items to be temporarily disabled from the list of test.
|
||||
|
||||
Each disabled item creates a warning in the workflow log:
|
||||
|
||||
.. image:: /_static/GH_Workflow_DisabledJobsWarnings.png
|
||||
:scale: 80 %
|
||||
|
||||
|
||||
**Example:**
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
disable_list: "windows:3.10 windows:3.11"
|
||||
|
||||
|
||||
Secrets
|
||||
*******
|
||||
|
||||
This job template needs no secrets.
|
||||
|
||||
Results
|
||||
*******
|
||||
|
||||
python_version
|
||||
==============
|
||||
|
||||
A single string parameter representing the default Python version that should be used across multiple jobs in the same
|
||||
pipeline.
|
||||
|
||||
Such a parameter is needed as a workaround, because GitHub Actions doesn't support proper handling of global pipeline
|
||||
variables. Thus, this job is used to compute an output parameter that can be reused in other jobs.
|
||||
|
||||
**Usage Example:**
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
Params:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r0
|
||||
with:
|
||||
name: pyTooling
|
||||
|
||||
CodeCoverage:
|
||||
uses: pyTooling/Actions/.github/workflows/CoverageCollection.yml@r0
|
||||
needs:
|
||||
- Params
|
||||
with:
|
||||
python_version: ${{ needs.Params.outputs.python_version }}
|
||||
|
||||
python_jobs
|
||||
===========
|
||||
|
||||
A list of dictionaries containing a job description.
|
||||
|
||||
A job description contains the following key-value pairs:
|
||||
|
||||
* ``sysicon`` - icon to display
|
||||
* ``system`` - name of the system
|
||||
* ``runs-on`` - virtual machine image and base operating system
|
||||
* ``runtime`` - name of the runtime environment if not running natively on the VM image
|
||||
* ``shell`` - name of the shell
|
||||
* ``pyicon`` - icon for CPython or pypy
|
||||
* ``python`` - Python version
|
||||
* ``envname`` - full name of the selected environment
|
||||
|
||||
**Usage Example:**
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
Params:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r0
|
||||
with:
|
||||
name: pyTooling
|
||||
|
||||
UnitTesting:
|
||||
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@dev
|
||||
needs:
|
||||
- Params
|
||||
with:
|
||||
jobs: ${{ needs.Params.outputs.python_jobs }}
|
||||
|
||||
This list can be unpacked with ``fromJson(...)`` in a job ``strategy:matrix:include``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
UnitTesting:
|
||||
name: ${{ matrix.sysicon }} ${{ matrix.pyicon }} Unit Tests using Python ${{ matrix.python }}
|
||||
runs-on: ${{ matrix.runs-on }}
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
include: ${{ fromJson(inputs.jobs) }}
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: ${{ matrix.shell }}
|
||||
|
||||
steps:
|
||||
- name: 🐍 Setup Python ${{ matrix.python }}
|
||||
if: matrix.system != 'msys2'
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python }}
|
||||
|
||||
|
||||
artifact_names
|
||||
==============
|
||||
|
||||
A dictionary of artifact names sharing a common prefix.
|
||||
|
||||
The supported artifacts are:
|
||||
|
||||
* ``unittesting_xml`` - UnitTesting XML summary report
|
||||
* ``unittesting_html`` - UnitTesting HTML summary report
|
||||
* ``codecoverage_sqlite`` - Code Coverage internal database (SQLite)
|
||||
* ``codecoverage_json`` - Code Coverage JSON report
|
||||
* ``codecoverage_xml`` - Code Coverage XML report
|
||||
* ``codecoverage_html`` - Code Coverage HTML report
|
||||
* ``statictyping_html`` - Static Type Checking HTML report
|
||||
* ``package_all`` - Packaged Python project (multiple formats)
|
||||
* ``documentation_pdf`` - Documentation in PDF format
|
||||
* ``documentation_html`` - Documentation in HTML format
|
||||
|
||||
**Usage Example:**
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
Params:
|
||||
uses: pyTooling/Actions/.github/workflows/Parameters.yml@r0
|
||||
with:
|
||||
name: pyTooling
|
||||
|
||||
Coverage:
|
||||
uses: pyTooling/Actions/.github/workflows/CoverageCollection.yml@dev
|
||||
needs:
|
||||
- Params
|
||||
with:
|
||||
artifact: ${{ fromJson(needs.Params.outputs.artifact_names).codecoverage_html }}
|
||||
|
||||
|
||||
Params
|
||||
======
|
||||
|
||||
.. attention:: ``Params`` is deprecated.
|
||||
|
||||
* ``params['unittesting']`` |rarr| ``artifact_names['unittesting_xml']``
|
||||
* ``params['coverage']`` |rarr| ``artifact_names['codecoverage_xml']``
|
||||
* ``params['typing']`` |rarr| ``artifact_names['statictyping_html']``
|
||||
* ``params['package']`` |rarr| ``artifact_names['package_all']``
|
||||
* ``params['doc']`` |rarr| ``artifact_names['documentation_html']``
|
||||
139
doc/JobTemplate/PublishOnPyPI.rst
Normal file
139
doc/JobTemplate/PublishOnPyPI.rst
Normal file
@@ -0,0 +1,139 @@
|
||||
.. _JOBTMPL/PyPI:
|
||||
|
||||
PublishOnPyPI
|
||||
#############
|
||||
|
||||
Publish a source (``*.tar.gz``) package and/or wheel (``*.whl``) packages to `PyPI <https://pypi.org/>`__.
|
||||
|
||||
**Behavior:**
|
||||
|
||||
1. Download package artifact
|
||||
2. Publish source package(s) (``*.tar.gz``)
|
||||
3. Publish wheel package(s) (``*.whl``)
|
||||
4. Delete the artifact
|
||||
|
||||
**Preconditions:**
|
||||
|
||||
A PyPI account was created and the package name is either not occupied or the user has access rights for that package.
|
||||
|
||||
**Requirements:**
|
||||
|
||||
Setup a secret (e.g. ``PYPI_TOKEN``) in GitHub to handover the PyPI token to the job.
|
||||
|
||||
**Dependencies:**
|
||||
|
||||
* :gh:`actions/download-artifact`
|
||||
* :gh:`actions/setup-python`
|
||||
* :gh:`geekyeggo/delete-artifact`
|
||||
|
||||
|
||||
Instantiation
|
||||
*************
|
||||
|
||||
Simple Example
|
||||
==============
|
||||
|
||||
The following example demonstrates how to publish the artifact named ``Package`` to PyPI on every pipeline run triggered
|
||||
by a Git tag. A secret is forwarded from GitHub secrets to a job secret.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
# ...
|
||||
|
||||
PublishOnPyPI:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@r0
|
||||
if: startsWith(github.ref, 'refs/tags')
|
||||
with:
|
||||
artifact: Package
|
||||
secrets:
|
||||
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||
|
||||
Complex Example
|
||||
===============
|
||||
|
||||
In this more complex example, the job depends on a parameter creation (``Params``) and packaging job (``Package``). The
|
||||
used Python version is overwritten by a parameter calculated in the ``Params`` jobs. Also the artifact name is managed
|
||||
by that job. Finally, the list of requirements is overwritten to load a list of requirements from ``dist/requirements.txt``.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
Params:
|
||||
# ...
|
||||
|
||||
Package:
|
||||
# ...
|
||||
|
||||
PublishOnPyPI:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@r0
|
||||
if: startsWith(github.ref, 'refs/tags')
|
||||
needs:
|
||||
- Params
|
||||
- Package
|
||||
with:
|
||||
python_version: ${{ needs.Params.outputs.python_version }}
|
||||
requirements: -r dist/requirements.txt
|
||||
artifact: ${{ fromJson(needs.Params.outputs.artifact_names).package_all }}
|
||||
secrets:
|
||||
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||
|
||||
|
||||
Parameters
|
||||
**********
|
||||
|
||||
python_version
|
||||
==============
|
||||
|
||||
+----------------+----------+----------+----------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==========+
|
||||
| python_version | optional | string | ``3.11`` |
|
||||
+----------------+----------+----------+----------+
|
||||
|
||||
Python version used for uploading the package contents via `twine` to PyPI.
|
||||
|
||||
|
||||
requirements
|
||||
============
|
||||
|
||||
+----------------+----------+----------+-----------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+=================+
|
||||
| requirements | optional | string | ``wheel twine`` |
|
||||
+----------------+----------+----------+-----------------+
|
||||
|
||||
List of requirements to be installed for uploading the package contents to PyPI.
|
||||
|
||||
|
||||
artifact
|
||||
========
|
||||
|
||||
+----------------+----------+----------+--------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==============+
|
||||
| artifact | yes | string | — — — — |
|
||||
+----------------+----------+----------+--------------+
|
||||
|
||||
Name of the artifact containing the package(s).
|
||||
|
||||
|
||||
Secrets
|
||||
*******
|
||||
|
||||
PYPI_TOKEN
|
||||
==========
|
||||
|
||||
+----------------+----------+----------+--------------+
|
||||
| Secret Name | Required | Type | Default |
|
||||
+================+==========+==========+==============+
|
||||
| PYPI_TOKEN | yes | string | — — — — |
|
||||
+----------------+----------+----------+--------------+
|
||||
|
||||
The token to access the package at PyPI for uploading new data.
|
||||
|
||||
|
||||
Results
|
||||
*******
|
||||
|
||||
This job template has no output parameters.
|
||||
88
doc/JobTemplate/PublishTestResults.rst
Normal file
88
doc/JobTemplate/PublishTestResults.rst
Normal file
@@ -0,0 +1,88 @@
|
||||
.. _JOBTMPL/PublishTestResults:
|
||||
|
||||
PublishTestResults
|
||||
##################
|
||||
|
||||
This job downloads all artifacts and uploads jUnit XML reports as a Markdown page to GitHub Actions to visualize the
|
||||
results a an item in the job list. For publishing, :gh:`dorny/test-reporter` is used.
|
||||
|
||||
**Behavior:**
|
||||
|
||||
1. Checkout repository
|
||||
2. Download (all) artifacts
|
||||
3. Publish test results as a markdown report page to GitHub Actions.
|
||||
|
||||
.. note::
|
||||
|
||||
The :gh:`actions/download-artifact` does not support wildcards to specify a subset of artifacts for downloading.
|
||||
Thus, all artifacts need to be downloaded.
|
||||
|
||||
**Dependencies:**
|
||||
|
||||
* :gh:`actions/checkout`
|
||||
* :gh:`actions/download-artifact`
|
||||
* :gh:`dorny/test-reporter`
|
||||
|
||||
|
||||
Instantiation
|
||||
*************
|
||||
|
||||
Simple Example
|
||||
==============
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
PublishTestResults:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@r0
|
||||
|
||||
Complex Example
|
||||
===============
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
CodeCoverage:
|
||||
# ...
|
||||
|
||||
UnitTesting:
|
||||
# ...
|
||||
|
||||
PublishTestResults:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@r0
|
||||
needs:
|
||||
- CodeCoverage
|
||||
- UnitTesting
|
||||
|
||||
|
||||
Parameters
|
||||
**********
|
||||
|
||||
report_files
|
||||
============
|
||||
|
||||
+----------------+----------+----------+---------------------------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+=================================+
|
||||
| report_files | optional | string | ``artifacts/**/*.xml`` |
|
||||
+----------------+----------+----------+---------------------------------+
|
||||
|
||||
Pattern of jUnit report files to publish as Markdown.
|
||||
|
||||
The parameter can be a comma separated list. Wildcards are supported.
|
||||
|
||||
.. hint::
|
||||
|
||||
All artifacts are downloaded into directory ``artifacts``, thus the pattern should include this directory as a
|
||||
prefix.
|
||||
|
||||
|
||||
Secrets
|
||||
*******
|
||||
|
||||
This job template needs no secrets.
|
||||
|
||||
Results
|
||||
*******
|
||||
|
||||
This job template has no output parameters.
|
||||
106
doc/JobTemplate/PublishToGitHubPages.rst
Normal file
106
doc/JobTemplate/PublishToGitHubPages.rst
Normal file
@@ -0,0 +1,106 @@
|
||||
.. _JOBTMPL/PublishToGitHubPages:
|
||||
|
||||
PublishToGitHubPages
|
||||
####################
|
||||
|
||||
This job publishes HTML content from artifacts of other jobs to GitHub Pages.
|
||||
|
||||
**Behavior:**
|
||||
|
||||
1. Checkout repository.
|
||||
2. Download artifacts.
|
||||
3. Push HTML files to branch ``gh-pages``.
|
||||
|
||||
**Dependencies:**
|
||||
|
||||
* :gh:`actions/checkout`
|
||||
* :gh:`actions/download-artifact`
|
||||
|
||||
Instantiation
|
||||
*************
|
||||
|
||||
Simple Example
|
||||
==============
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
BuildTheDocs:
|
||||
# ...
|
||||
|
||||
PublishToGitHubPages:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r0
|
||||
needs:
|
||||
- BuildTheDocs
|
||||
with:
|
||||
doc: Documentation
|
||||
|
||||
|
||||
Complex Example
|
||||
===============
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
PublishToGitHubPages:
|
||||
uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@r0
|
||||
needs:
|
||||
- Params
|
||||
- BuildTheDocs
|
||||
- Coverage
|
||||
- StaticTypeCheck
|
||||
with:
|
||||
doc: ${{ fromJson(needs.Params.outputs.artifact_names).documentation_html }}
|
||||
coverage: ${{ fromJson(needs.Params.outputs.artifact_names).codecoverage_html }}
|
||||
typing: ${{ fromJson(needs.Params.outputs.artifact_names).statictyping_html }}
|
||||
|
||||
|
||||
Parameters
|
||||
**********
|
||||
|
||||
doc
|
||||
===
|
||||
|
||||
+----------------+----------+----------+--------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==============+
|
||||
| doc | yes | string | — — — — |
|
||||
+----------------+----------+----------+--------------+
|
||||
|
||||
Name of the documentation artifact.
|
||||
|
||||
|
||||
coverage
|
||||
========
|
||||
|
||||
+----------------+----------+----------+-----------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+=================+
|
||||
| coverage | optional | string | ``""`` |
|
||||
+----------------+----------+----------+-----------------+
|
||||
|
||||
Name of the coverage artifact.
|
||||
|
||||
|
||||
typing
|
||||
======
|
||||
|
||||
+----------------+----------+----------+-----------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+=================+
|
||||
| typing | optional | string | ``""`` |
|
||||
+----------------+----------+----------+-----------------+
|
||||
|
||||
Name of the typing artifact.
|
||||
|
||||
|
||||
|
||||
Secrets
|
||||
*******
|
||||
|
||||
This job template needs no secrets.
|
||||
|
||||
Results
|
||||
*******
|
||||
|
||||
This job template has no output parameters.
|
||||
96
doc/JobTemplate/Release.rst
Normal file
96
doc/JobTemplate/Release.rst
Normal file
@@ -0,0 +1,96 @@
|
||||
.. _JOBTMPL/GitHubReleasePage:
|
||||
|
||||
Release
|
||||
#######
|
||||
|
||||
This job creates a Release Page on GitHub.
|
||||
|
||||
**Release Template in Markdown**:
|
||||
|
||||
.. parsed-literal::
|
||||
|
||||
**Automated Release created on: ${{ steps.getVariables.outputs.datetime }}**
|
||||
|
||||
# New Features
|
||||
|
||||
* tbd
|
||||
* tbd
|
||||
|
||||
# Changes
|
||||
|
||||
* tbd
|
||||
* tbd
|
||||
|
||||
# Bug Fixes
|
||||
|
||||
* tbd
|
||||
* tbd
|
||||
|
||||
# Documentation
|
||||
|
||||
* tbd
|
||||
* tbd
|
||||
|
||||
# Unit Tests
|
||||
|
||||
* tbd
|
||||
* tbd
|
||||
|
||||
----------
|
||||
# Related Issues and Pull-Requests
|
||||
|
||||
* tbd
|
||||
* tbd
|
||||
|
||||
|
||||
**Behavior:**
|
||||
|
||||
1. Extract information from environment variables provided by GitHub Actions.
|
||||
2. Create a Release Page on GitHub
|
||||
|
||||
**Dependencies:**
|
||||
|
||||
* :gh:`actions/create-release` (unmaintained)
|
||||
|
||||
Instantiation
|
||||
*************
|
||||
|
||||
Simple Example
|
||||
==============
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
Release:
|
||||
uses: pyTooling/Actions/.github/workflows/Release.yml@r0
|
||||
|
||||
|
||||
Complex Example
|
||||
===============
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
Release:
|
||||
uses: pyTooling/Actions/.github/workflows/Release.yml@r0
|
||||
if: startsWith(github.ref, 'refs/tags')
|
||||
needs:
|
||||
- Package
|
||||
|
||||
|
||||
Parameters
|
||||
**********
|
||||
|
||||
This job template needs no input parameters.
|
||||
|
||||
|
||||
Secrets
|
||||
*******
|
||||
|
||||
This job template needs no secrets.
|
||||
|
||||
|
||||
Results
|
||||
*******
|
||||
|
||||
This job template has no output parameters.
|
||||
182
doc/JobTemplate/StaticTypeCheck.rst
Normal file
182
doc/JobTemplate/StaticTypeCheck.rst
Normal file
@@ -0,0 +1,182 @@
|
||||
.. _JOBTMPL/StaticTypeChecking:
|
||||
|
||||
StaticTypeCheck
|
||||
###############
|
||||
|
||||
This job runs a static type check using mypy and collects the results. These results can be converted to a HTML report
|
||||
and then uploaded as an artifact.
|
||||
|
||||
**Behavior:**
|
||||
|
||||
1. Checkout repository
|
||||
2. Setup Python and install dependencies
|
||||
3. Run type checking command(s).
|
||||
4. Upload type checking report as an artifact
|
||||
|
||||
**Dependencies:**
|
||||
|
||||
* :gh:`actions/checkout`
|
||||
* :gh:`actions/setup-python`
|
||||
* :gh:`actions/upload-artifact`
|
||||
|
||||
Instantiation
|
||||
*************
|
||||
|
||||
Simple Example
|
||||
==============
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
StaticTypeCheck:
|
||||
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r0
|
||||
with:
|
||||
commands: |
|
||||
touch pyTooling/__init__.py
|
||||
mypy --html-report htmlmypy -p pyTooling
|
||||
report: 'htmlmypy'
|
||||
artifact: TypeChecking
|
||||
|
||||
Complex Example
|
||||
===============
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
StaticTypeCheck:
|
||||
uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@r0
|
||||
needs:
|
||||
- Params
|
||||
with:
|
||||
python_version: ${{ needs.Params.outputs.python_version }}
|
||||
commands: |
|
||||
touch pyTooling/__init__.py
|
||||
mypy --html-report htmlmypy -p pyTooling
|
||||
report: 'htmlmypy'
|
||||
artifact: ${{ fromJson(needs.Params.outputs.artifact_names).statictyping_html }}
|
||||
|
||||
Commands
|
||||
========
|
||||
|
||||
Example ``commands``:
|
||||
|
||||
1. Regular package
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
commands: mypy --html-report htmlmypy -p ToolName
|
||||
|
||||
|
||||
2. Parent namespace package
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
commands: |
|
||||
touch Parent/__init__.py
|
||||
mypy --html-report htmlmypy -p ToolName
|
||||
|
||||
3. Child namespace package
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
commands: |
|
||||
cd Parent
|
||||
mypy --html-report ../htmlmypy -p ToolName
|
||||
|
||||
Parameters
|
||||
**********
|
||||
|
||||
python_version
|
||||
==============
|
||||
|
||||
+----------------+----------+----------+-----------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+=================+
|
||||
| python_version | optional | string | ``3.11`` |
|
||||
+----------------+----------+----------+-----------------+
|
||||
|
||||
Python version.
|
||||
|
||||
|
||||
requirements
|
||||
============
|
||||
|
||||
+----------------+----------+----------+-------------------------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+===============================+
|
||||
| requirements | optional | string | ``-r tests/requirements.txt`` |
|
||||
+----------------+----------+----------+-------------------------------+
|
||||
|
||||
Python dependencies to be installed through pip.
|
||||
|
||||
|
||||
commands
|
||||
========
|
||||
|
||||
+----------------+----------+----------+--------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==============+
|
||||
| commands | yes | string | — — — — |
|
||||
+----------------+----------+----------+--------------+
|
||||
|
||||
Commands to run the static type checks.
|
||||
|
||||
|
||||
html_report
|
||||
===========
|
||||
|
||||
+----------------+----------+----------+-----------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+=================+
|
||||
| report | optional | string | ``htmlmypy`` |
|
||||
+----------------+----------+----------+-----------------+
|
||||
|
||||
HTML output directory to upload as an artifact.
|
||||
|
||||
|
||||
junit_report
|
||||
============
|
||||
|
||||
+----------------+----------+----------+-----------------------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+=============================+
|
||||
| report | optional | string | ``StaticTypingSummary.xml`` |
|
||||
+----------------+----------+----------+-----------------------------+
|
||||
|
||||
junit file to upload as an artifact.
|
||||
|
||||
|
||||
html_artifact
|
||||
=============
|
||||
|
||||
+----------------+----------+----------+--------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==============+
|
||||
| html_artifact | yes | string | — — — — |
|
||||
+----------------+----------+----------+--------------+
|
||||
|
||||
Name of the typing artifact (HTML report).
|
||||
|
||||
|
||||
junit_artifact
|
||||
==============
|
||||
|
||||
+----------------+----------+----------+--------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==============+
|
||||
| junit_artifact | optional | string | ``""`` |
|
||||
+----------------+----------+----------+--------------+
|
||||
|
||||
Name of the typing junit artifact (junit XML).
|
||||
|
||||
|
||||
Secrets
|
||||
*******
|
||||
|
||||
This job template needs no secrets.
|
||||
|
||||
|
||||
Results
|
||||
*******
|
||||
|
||||
This job template has no output parameters.
|
||||
159
doc/JobTemplate/UnitTesting.rst
Normal file
159
doc/JobTemplate/UnitTesting.rst
Normal file
@@ -0,0 +1,159 @@
|
||||
.. _JOBTMPL/UnitTesting:
|
||||
|
||||
UnitTesting
|
||||
###########
|
||||
|
||||
This template runs multiple jobs from a matrix as a cross of Python versions and systems. The summary report in junit
|
||||
XML format is optionally uploaded as an artifact.
|
||||
|
||||
Configuration options to ``pytest`` should be given via section ``[tool.pytest.ini_options]`` in a ``pyproject.toml``
|
||||
file.
|
||||
|
||||
**Behavior:**
|
||||
|
||||
1. Checkout repository
|
||||
2. Setup Python and install dependencies
|
||||
3. Run unit tests using ``pytest``.
|
||||
4. Upload junit test summary as an artifact
|
||||
|
||||
**Dependencies:**
|
||||
|
||||
* :gh:`actions/checkout`
|
||||
* :gh:`msys2/setup-msys2`
|
||||
* :gh:`actions/setup-python`
|
||||
* :gh:`actions/upload-artifact`
|
||||
|
||||
Instantiation
|
||||
*************
|
||||
|
||||
Simple Example
|
||||
==============
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
jobs:
|
||||
Params:
|
||||
# ...
|
||||
|
||||
UnitTesting:
|
||||
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@r0
|
||||
needs:
|
||||
- Params
|
||||
with:
|
||||
jobs: ${{ needs.Params.outputs.python_jobs }}
|
||||
artifact: ${{ fromJson(needs.Params.outputs.artifact_names).unittesting }}
|
||||
|
||||
|
||||
Complex Example
|
||||
===============
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
TBD
|
||||
|
||||
Parameters
|
||||
**********
|
||||
|
||||
jobs
|
||||
====
|
||||
|
||||
+----------------+----------+----------+--------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==============+
|
||||
| jobs | yes | string | — — — — |
|
||||
+----------------+----------+----------+--------------+
|
||||
|
||||
JSON list with environment fields, telling the system and Python versions to run tests with.
|
||||
|
||||
|
||||
requirements
|
||||
============
|
||||
|
||||
+----------------+----------+----------+---------------------------------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+=================================+
|
||||
| requirements | optional | string | ``-r tests/requirements.txt`` |
|
||||
+----------------+----------+----------+---------------------------------+
|
||||
|
||||
Python dependencies to be installed through pip.
|
||||
|
||||
|
||||
pacboy
|
||||
======
|
||||
|
||||
+----------------+----------+----------+-----------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+===========+
|
||||
| pacboy | optional | string | ``""`` |
|
||||
+----------------+----------+----------+-----------+
|
||||
|
||||
Additional MSYS2 dependencies to be installed through pacboy (pacman).
|
||||
|
||||
Internally, a workflow step reads the requirements file for Python and compares requested packages with a list of
|
||||
packages that should be installed through pacman/pacboy compared to installation via pip. These are mainly core packages
|
||||
or packages with embedded C code.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
pacboy: >-
|
||||
python-lxml:p
|
||||
|
||||
|
||||
mingw_requirements
|
||||
==================
|
||||
|
||||
+--------------------+----------+----------+----------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+====================+==========+==========+==========+
|
||||
| mingw_requirements | optional | string | ``""`` |
|
||||
+--------------------+----------+----------+----------+
|
||||
|
||||
Override Python dependencies to be installed through pip on MSYS2 (MINGW64) only.
|
||||
|
||||
|
||||
tests_directory
|
||||
===============
|
||||
|
||||
+-----------------+----------+----------+-----------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+=================+==========+==========+===========+
|
||||
| tests_directory | optional | string | ``tests`` |
|
||||
+-----------------+----------+----------+-----------+
|
||||
|
||||
Path to the directory containing tests (test working directory).
|
||||
|
||||
|
||||
unittest_directory
|
||||
==================
|
||||
|
||||
+--------------------+----------+----------+----------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+====================+==========+==========+==========+
|
||||
| unittest_directory | optional | string | ``unit`` |
|
||||
+--------------------+----------+----------+----------+
|
||||
|
||||
Path to the directory containing unit tests (relative to tests_directory).
|
||||
|
||||
|
||||
artifact
|
||||
========
|
||||
|
||||
+----------------+----------+----------+----------+
|
||||
| Parameter Name | Required | Type | Default |
|
||||
+================+==========+==========+==========+
|
||||
| artifact | optional | string | ``""`` |
|
||||
+----------------+----------+----------+----------+
|
||||
|
||||
Generate unit test report with junitxml and upload results as an artifact.
|
||||
|
||||
|
||||
Secrets
|
||||
*******
|
||||
|
||||
This job template needs no secrets.
|
||||
|
||||
|
||||
Results
|
||||
*******
|
||||
|
||||
This job template has no output parameters.
|
||||
42
doc/JobTemplate/VerifyDocs.rst
Normal file
42
doc/JobTemplate/VerifyDocs.rst
Normal file
@@ -0,0 +1,42 @@
|
||||
.. _JOBTMPL/VerifyDocumentation:
|
||||
|
||||
VerifyDocs
|
||||
##########
|
||||
|
||||
This job extracts code examples from the README and tests these code snippets.
|
||||
|
||||
**Behavior:**
|
||||
|
||||
TBD
|
||||
|
||||
**Dependencies:**
|
||||
|
||||
TBD
|
||||
|
||||
Instantiation
|
||||
*************
|
||||
|
||||
Simple Example
|
||||
==============
|
||||
|
||||
.. todo:: VerifyDocs:SimpleExample Needs documentation.
|
||||
|
||||
Complex Example
|
||||
===============
|
||||
|
||||
.. todo:: VerifyDocs:ComplexExample Needs documentation.
|
||||
|
||||
Parameters
|
||||
**********
|
||||
|
||||
.. todo:: VerifyDocs:Parameters Needs documentation.
|
||||
|
||||
Secrets
|
||||
*******
|
||||
|
||||
This job template needs no secrets.
|
||||
|
||||
Results
|
||||
*******
|
||||
|
||||
This job template has no output parameters.
|
||||
67
doc/JobTemplate/index.rst
Normal file
67
doc/JobTemplate/index.rst
Normal file
@@ -0,0 +1,67 @@
|
||||
.. _JOBTMPL:
|
||||
|
||||
Overview
|
||||
########
|
||||
|
||||
The following list categorizes all pre-defined job templates, which can be instantiated in a pipeline (GitHub Action
|
||||
Workflow). They can also serve as an example for creating or driving own job templates.
|
||||
|
||||
**Table of Contents:**
|
||||
|
||||
.. hlist::
|
||||
:columns: 2
|
||||
|
||||
* **Global Templates**
|
||||
|
||||
* :ref:`JOBTMPL/Parameters`
|
||||
|
||||
* **Unit Tests, Code Coverage, Code Quality, ...**
|
||||
|
||||
* :ref:`JOBTMPL/UnitTesting`
|
||||
* :ref:`JOBTMPL/CodeCoverage`
|
||||
* :ref:`JOBTMPL/StaticTypeChecking`
|
||||
* *code formatting (planned)*
|
||||
* *coding style (planned)*
|
||||
* *code linting (planned)*
|
||||
|
||||
* **Build and Packaging**
|
||||
|
||||
* :ref:`JOBTMPL/Package`
|
||||
|
||||
* **Documentation**
|
||||
|
||||
* :ref:`JOBTMPL/VerifyDocumentation`
|
||||
* :ref:`JOBTMPL/BuildTheDocs`
|
||||
|
||||
* **Releasing, Publishing**
|
||||
|
||||
* :ref:`JOBTMPL/GitHubReleasePage`
|
||||
* :ref:`JOBTMPL/PyPI`
|
||||
* :ref:`JOBTMPL/PublishTestResults`
|
||||
* :ref:`JOBTMPL/PublishToGitHubPages`
|
||||
|
||||
* **Cleanups**
|
||||
|
||||
* :ref:`JOBTMPL/ArtifactCleanup`
|
||||
|
||||
|
||||
Instantiation
|
||||
*************
|
||||
|
||||
When instantiating a template, a ``jobs:<Name>:uses`` is used to refer to a template file. Unfortunately, besides the
|
||||
GitHub SLUG (*<Organization>/<Repository>*), also the full path to the template needs to be gives, but still it can't be
|
||||
outside of ``.github/workflows`` to create a cleaner repository structure. Finally, the path contains a branch name
|
||||
postfixed by ``@<branch>`` (tags are still not supported by GitHub Actions). A ``jobs:<Name>:with:`` section can be used
|
||||
to handover input parameters to the template.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
on:
|
||||
push:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
<InstanceName>:
|
||||
uses: <GitHubOrganization>/<Repository>/.github/workflows/<Template>.yml@v0
|
||||
with:
|
||||
<Param1>: <Value>
|
||||
136
doc/License.rst
Normal file
136
doc/License.rst
Normal file
@@ -0,0 +1,136 @@
|
||||
.. Note:: This is a local copy of the `Apache License Version 2.0 <http://www.apache.org/licenses/LICENSE-2.0>`_.
|
||||
|
||||
Apache License 2.0
|
||||
##################
|
||||
|
||||
Version 2.0, January 2004
|
||||
|
||||
**TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION**
|
||||
|
||||
|
||||
1. Definitions.
|
||||
===============
|
||||
**"License"** shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
**"Licensor"** shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
|
||||
|
||||
**"Legal Entity"** shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that
|
||||
entity. For the purposes of this definition, **"control"** means (i) the power, direct or indirect, to cause the direction or management of such entity, whether
|
||||
by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
**"You"** (or **"Your"**) shall mean an individual or Legal Entity exercising permissions granted by this License.
|
||||
|
||||
**"Source"** form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and
|
||||
configuration files.
|
||||
|
||||
**"Object"** form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object
|
||||
code, generated documentation, and conversions to other media types.
|
||||
|
||||
**"Work"** shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is
|
||||
included in or attached to the work (an example is provided in the Appendix below).
|
||||
|
||||
**"Derivative Works"** shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions,
|
||||
annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works
|
||||
shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
|
||||
|
||||
**"Contribution"** shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative
|
||||
Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to
|
||||
submit on behalf of the copyright owner. For the purposes of this definition, **"submitted"** means any form of electronic, verbal, or written communication
|
||||
sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue
|
||||
tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is
|
||||
conspicuously marked or otherwise designated in writing by the copyright owner as **"Not a Contribution."**
|
||||
|
||||
**"Contributor"** shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently
|
||||
incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License.
|
||||
==============================
|
||||
Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
|
||||
irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such
|
||||
Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License.
|
||||
===========================
|
||||
Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
|
||||
irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such
|
||||
license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of
|
||||
their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim
|
||||
or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then
|
||||
any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution.
|
||||
==================
|
||||
You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form,
|
||||
provided that You meet the following conditions:
|
||||
|
||||
* You must give any other recipients of the Work or Derivative Works a copy of this License; and
|
||||
* You must cause any modified files to carry prominent notices stating that You changed the files; and
|
||||
* You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source
|
||||
form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
|
||||
* If the Work includes a **"NOTICE"** text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the
|
||||
attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the
|
||||
following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the
|
||||
Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE
|
||||
file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute,
|
||||
alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or
|
||||
distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise
|
||||
complies with the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions.
|
||||
===============================
|
||||
Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and
|
||||
conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any
|
||||
separate license agreement you may have executed with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks.
|
||||
==============
|
||||
This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable
|
||||
and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty.
|
||||
==========================
|
||||
Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT,
|
||||
MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and
|
||||
assume any risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability.
|
||||
===========================
|
||||
In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate
|
||||
and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or
|
||||
consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages
|
||||
for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been
|
||||
advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability.
|
||||
==============================================
|
||||
While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other
|
||||
liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole
|
||||
responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
|
||||
|
||||
----------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
**Appendix: How to apply the Apache License to your work**
|
||||
|
||||
To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying
|
||||
information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or
|
||||
class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives.
|
||||
|
||||
.. code-block:: none
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
20
doc/Makefile
Normal file
20
doc/Makefile
Normal file
@@ -0,0 +1,20 @@
|
||||
# Minimal makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line, and also
|
||||
# from the environment for the first two.
|
||||
SPHINXOPTS ?=
|
||||
SPHINXBUILD ?= sphinx-build
|
||||
SOURCEDIR = .
|
||||
BUILDDIR = _build
|
||||
|
||||
# Put it first so that "make" without argument is like "make help".
|
||||
help:
|
||||
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||
|
||||
.PHONY: help Makefile
|
||||
|
||||
# Catch-all target: route all unknown targets to Sphinx using the new
|
||||
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
|
||||
%: Makefile
|
||||
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||
79
doc/Releases.rst
Normal file
79
doc/Releases.rst
Normal file
@@ -0,0 +1,79 @@
|
||||
Releases Management
|
||||
###################
|
||||
|
||||
Releases
|
||||
********
|
||||
|
||||
r1
|
||||
==
|
||||
|
||||
.. note:: Upcoming next release based in `v1.x.y`.
|
||||
|
||||
.. attention:: This release introduces breaking changes.
|
||||
|
||||
r0
|
||||
==
|
||||
|
||||
.. todo:: Releases:r0 Needs documentation.
|
||||
|
||||
Versions
|
||||
********
|
||||
|
||||
.. todo:: Releases:Versions Needs documentation.
|
||||
|
||||
Branches
|
||||
********
|
||||
|
||||
.. mermaid::
|
||||
|
||||
%%{init: { 'logLevel': 'debug', 'theme': 'neutral', 'gitGraph': {'rotateCommitLabel': false} } }%%
|
||||
gitGraph
|
||||
commit id: "-"
|
||||
branch dev
|
||||
commit id: "B"
|
||||
commit id: "C"
|
||||
checkout main
|
||||
merge dev tag: "v0.4.0"
|
||||
checkout dev
|
||||
commit id: "D"
|
||||
commit id: "E"
|
||||
commit id: "F"
|
||||
checkout main
|
||||
merge dev tag: "v0.5.0"
|
||||
|
||||
``dev``
|
||||
=======
|
||||
|
||||
Development is done on branch ``dev``.
|
||||
|
||||
All merge requests need to target this branch.
|
||||
|
||||
``main``
|
||||
========
|
||||
|
||||
Finished development is merged to branch ``main``.
|
||||
|
||||
Each merge-commit is tagged with a semantic version.
|
||||
|
||||
|
||||
Tagging
|
||||
*******
|
||||
|
||||
See context in :ghissue:`#5 Tagging/versioning of this repo <5>`.
|
||||
|
||||
Tag new releases in the ``main`` branch using a semver compatible value, starting with ``v``:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
git checkout main
|
||||
git tag v0.0.0
|
||||
git push upstream v0.0.0
|
||||
|
||||
Move the corresponding release branch (starting with ``r``) forward by creating a merge commit, and using the merged tag
|
||||
as the commit message:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
git checkout r0
|
||||
git merge --no-ff -m 'v0.0.0' v0.0.0
|
||||
git push upstream r0
|
||||
61
doc/RepositoryStructure.rst
Normal file
61
doc/RepositoryStructure.rst
Normal file
@@ -0,0 +1,61 @@
|
||||
Repository Structure
|
||||
####################
|
||||
|
||||
pyTooling Actions assumes a certain repository structure and usage of technologies. Besides assumed directory or file
|
||||
names in default parameters to job templates, almost all can be overwritten if the target repository has a differing
|
||||
structure.
|
||||
|
||||
* Python source code is located in a directory named after the Python package name.
|
||||
|
||||
* A ``<package>/__init__.py`` should be provided with global package information like: version number, author,
|
||||
copyrights, license, maintainer, ...
|
||||
|
||||
* All tests are located in a ``/tests`` directory and further divided into subdirectories by testing approach.
|
||||
|
||||
* E.g. unit tests are located in a ``/tests/unit`` directory.
|
||||
|
||||
* The package documentation is located in a ``/doc`` directory.
|
||||
|
||||
* Documentation is written with ReStructured Text (ReST) and translated using Sphinx.
|
||||
* Documentation requirements are listed in a ``/doc/requirements.txt``.
|
||||
|
||||
* Dependencies are listed in a ``/requirements.txt``.
|
||||
|
||||
* If the build process requires separate dependencies, a ``/build/requirements.txt`` is used.
|
||||
* If the publishing/distribution process requires separate dependencies, a ``/dist/requirements.txt`` is used.
|
||||
* To reduce duplication of dependencies, dependency files should recursively call each other with ``-r <path>``.
|
||||
|
||||
* All Python project settings are stored in a ``pyproject.toml``.
|
||||
* The Python package is described in a ``setup.py``.
|
||||
* Packages are build with ``build`` instead of ``setuptools``.
|
||||
* A repository overview is given in a ``README.md``.
|
||||
|
||||
.. code-block::
|
||||
|
||||
<Repository>/
|
||||
.github/
|
||||
workflows/
|
||||
Pipeline.yml
|
||||
dependabot.yml
|
||||
.vscode/
|
||||
settings.json
|
||||
build/
|
||||
requirements.txt
|
||||
dist/
|
||||
requirements.txt
|
||||
doc/
|
||||
conf.py
|
||||
index.rst
|
||||
requirements.txt
|
||||
<package>
|
||||
__init__.py
|
||||
tests/
|
||||
unit/
|
||||
requirements.txt
|
||||
.editorconfig
|
||||
.gitignore
|
||||
LICENSE.md
|
||||
pyproject.toml
|
||||
README.md
|
||||
requirements.txt
|
||||
setup.py
|
||||
4
doc/TODO.rst
Normal file
4
doc/TODO.rst
Normal file
@@ -0,0 +1,4 @@
|
||||
TODOs
|
||||
#####
|
||||
|
||||
.. todolist::
|
||||
BIN
doc/_static/GH_Workflow_DisabledJobsWarnings.png
vendored
Normal file
BIN
doc/_static/GH_Workflow_DisabledJobsWarnings.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 11 KiB |
115
doc/_static/css/override.css
vendored
Normal file
115
doc/_static/css/override.css
vendored
Normal file
@@ -0,0 +1,115 @@
|
||||
/* theme overrides */
|
||||
.rst-content h1,
|
||||
.rst-content h2 {
|
||||
margin-top: 24px;
|
||||
margin-bottom: 6px;
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.rst-content h3,
|
||||
.rst-content h4,
|
||||
.rst-content h5,
|
||||
.rst-content h6 {
|
||||
margin-top: 12px;
|
||||
margin-bottom: 6px;
|
||||
}
|
||||
|
||||
.rst-content p {
|
||||
margin-bottom: 6px
|
||||
}
|
||||
|
||||
/* general overrides */
|
||||
html {
|
||||
font-size: 15px;
|
||||
}
|
||||
|
||||
footer {
|
||||
font-size: 95%;
|
||||
text-align: center
|
||||
}
|
||||
|
||||
footer p {
|
||||
margin-bottom: 0px /* 12px */;
|
||||
font-size: 95%
|
||||
}
|
||||
|
||||
section > p,
|
||||
.section p,
|
||||
.simple li {
|
||||
text-align: justify
|
||||
}
|
||||
|
||||
.rst-content .topic-title {
|
||||
font-size: larger;
|
||||
font-weight: 700;
|
||||
margin-top: 18px;
|
||||
margin-bottom: 6px;
|
||||
}
|
||||
|
||||
.rst-content p.rubric {
|
||||
text-decoration: underline;
|
||||
font-weight: 700;
|
||||
margin-top: 18px;
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
|
||||
/* wyrm overrides */
|
||||
.wy-menu-vertical header,
|
||||
.wy-menu-vertical p.caption {
|
||||
color: #9b9b9b /* #55a5d9 */;
|
||||
padding: 0 0.809em /* 0 1.618em */;
|
||||
margin: 6px 0 0 0 /* 12px 0 0 */;
|
||||
border-top: 1px solid #9b9b9b;
|
||||
}
|
||||
|
||||
.wy-side-nav-search {
|
||||
margin-bottom: 0 /* .809em */;
|
||||
background-color: #333333 /* #2980b9 */;
|
||||
/* BTD: */
|
||||
/*color: #fcfcfc*/
|
||||
}
|
||||
|
||||
.wy-side-nav-search input[type=text] {
|
||||
border-radius: 0px /* 50px */;
|
||||
}
|
||||
|
||||
.wy-side-nav-search .wy-dropdown > a, .wy-side-nav-search > a {
|
||||
/* BTD: */
|
||||
/*color: #fcfcfc;*/
|
||||
margin-bottom: 0.404em /* .809em */;
|
||||
}
|
||||
|
||||
.wy-side-nav-search > div.version {
|
||||
margin: 0 0 6px 0;
|
||||
/* BTD: */
|
||||
/*margin-top: -.4045em;*/
|
||||
}
|
||||
|
||||
.wy-nav .wy-menu-vertical a:hover {
|
||||
background-color: #333333 /* #2980b9 */;
|
||||
}
|
||||
|
||||
.wy-nav-content {
|
||||
max-width: 1600px /* 800px */ ;
|
||||
}
|
||||
|
||||
.wy-nav-top {
|
||||
background: #333333 /* #2980b9 */;
|
||||
}
|
||||
|
||||
/* Sphinx Design */
|
||||
.sd-tab-set {
|
||||
margin: 0
|
||||
}
|
||||
|
||||
.sd-tab-set > label {
|
||||
padding-top: .5em;
|
||||
padding-right: 1em;
|
||||
padding-bottom: .5em;
|
||||
padding-left: 1em
|
||||
}
|
||||
|
||||
.sd-container-fluid {
|
||||
padding-left: 0;
|
||||
padding-right: 0;
|
||||
}
|
||||
BIN
doc/_static/icon.png
vendored
Normal file
BIN
doc/_static/icon.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 43 KiB |
BIN
doc/_static/logo.png
vendored
Normal file
BIN
doc/_static/logo.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 61 KiB |
BIN
doc/_static/pyTooling-Actions-SimplePackage.png
vendored
Normal file
BIN
doc/_static/pyTooling-Actions-SimplePackage.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 555 KiB |
160
doc/_templates/autoapi/module.rst
vendored
Normal file
160
doc/_templates/autoapi/module.rst
vendored
Normal file
@@ -0,0 +1,160 @@
|
||||
.. # Template modified by Patrick Lehmann
|
||||
* removed automodule on top, because private members are activated for autodoc (no doubled documentation).
|
||||
* Made sections like 'submodules' bold text, but no headlines to reduce number of ToC levels.
|
||||
|
||||
{{ '=' * node.name|length }}
|
||||
{{ node.name }}
|
||||
{{ '=' * node.name|length }}
|
||||
|
||||
.. automodule:: {{ node.name }}
|
||||
|
||||
{##}
|
||||
{%- block modules -%}
|
||||
{%- if subnodes %}
|
||||
|
||||
**Submodules**
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
{% for item in subnodes %}
|
||||
{{ item.name }}
|
||||
{%- endfor %}
|
||||
{##}
|
||||
{%- endif -%}
|
||||
{%- endblock -%}
|
||||
{##}
|
||||
.. currentmodule:: {{ node.name }}
|
||||
{##}
|
||||
|
||||
{%- if node.variables %}
|
||||
|
||||
**Variables**
|
||||
|
||||
{% for item, obj in node.variables.items() -%}
|
||||
- :py:data:`{{ item }}`
|
||||
{#{ obj|summary }#}
|
||||
{% endfor -%}
|
||||
{%- endif -%}
|
||||
|
||||
{%- if node.functions %}
|
||||
|
||||
**Functions**
|
||||
|
||||
{% for item, obj in node.functions.items() -%}
|
||||
- :py:func:`{{ item }}`:
|
||||
{{ obj|summary }}
|
||||
|
||||
{% endfor -%}
|
||||
{%- endif -%}
|
||||
|
||||
{%- if node.exceptions %}
|
||||
|
||||
**Exceptions**
|
||||
|
||||
{% for item, obj in node.exceptions.items() -%}
|
||||
- :py:exc:`{{ item }}`:
|
||||
{{ obj|summary }}
|
||||
|
||||
{% endfor -%}
|
||||
{%- endif -%}
|
||||
|
||||
{%- if node.classes %}
|
||||
|
||||
**Classes**
|
||||
|
||||
{% for item, obj in node.classes.items() -%}
|
||||
- :py:class:`{{ item }}`:
|
||||
{{ obj|summary }}
|
||||
|
||||
{% endfor -%}
|
||||
{%- endif -%}
|
||||
|
||||
{%- block variables -%}
|
||||
{%- if node.variables %}
|
||||
|
||||
---------------------
|
||||
|
||||
**Variables**
|
||||
|
||||
{#% for item, obj in node.variables.items() -%}
|
||||
- :py:data:`{{ item }}`
|
||||
{% endfor -%#}
|
||||
|
||||
{% for item, obj in node.variables.items() %}
|
||||
.. autodata:: {{ item }}
|
||||
:annotation:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
{{ obj|pprint|indent(6) }}
|
||||
{##}
|
||||
{%- endfor -%}
|
||||
{%- endif -%}
|
||||
{%- endblock -%}
|
||||
|
||||
{%- block functions -%}
|
||||
{%- if node.functions %}
|
||||
|
||||
---------------------
|
||||
|
||||
**Functions**
|
||||
|
||||
{% for item in node.functions %}
|
||||
.. autofunction:: {{ item }}
|
||||
{##}
|
||||
{%- endfor -%}
|
||||
{%- endif -%}
|
||||
{%- endblock -%}
|
||||
|
||||
{%- block exceptions -%}
|
||||
{%- if node.exceptions %}
|
||||
|
||||
---------------------
|
||||
|
||||
**Exceptions**
|
||||
|
||||
{#% for item, obj in node.exceptions.items() -%}
|
||||
- :py:exc:`{{ item }}`:
|
||||
{{ obj|summary }}
|
||||
|
||||
{% endfor -%#}
|
||||
|
||||
{% for item in node.exceptions %}
|
||||
.. autoexception:: {{ item }}
|
||||
|
||||
.. rubric:: Inheritance
|
||||
.. inheritance-diagram:: {{ item }}
|
||||
:parts: 1
|
||||
{##}
|
||||
{%- endfor -%}
|
||||
{%- endif -%}
|
||||
{%- endblock -%}
|
||||
|
||||
{%- block classes -%}
|
||||
{%- if node.classes %}
|
||||
|
||||
---------------------
|
||||
|
||||
**Classes**
|
||||
|
||||
{#% for item, obj in node.classes.items() -%}
|
||||
- :py:class:`{{ item }}`:
|
||||
{{ obj|summary }}
|
||||
|
||||
{% endfor -%#}
|
||||
|
||||
{% for item in node.classes %}
|
||||
.. autoclass:: {{ item }}
|
||||
:members:
|
||||
:private-members:
|
||||
:special-members:
|
||||
:inherited-members:
|
||||
:exclude-members: __weakref__
|
||||
|
||||
.. rubric:: Inheritance
|
||||
.. inheritance-diagram:: {{ item }}
|
||||
:parts: 1
|
||||
{##}
|
||||
{%- endfor -%}
|
||||
{%- endif -%}
|
||||
{%- endblock -%}
|
||||
14
doc/_templates/autoapi/package.rst
vendored
Normal file
14
doc/_templates/autoapi/package.rst
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
.. # Template created by Patrick Lehmann
|
||||
|
||||
Python Class Reference
|
||||
######################
|
||||
|
||||
Reference of all packages and modules:
|
||||
|
||||
.. automodule:: {{ node.name }}
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
{% for item in subnodes %}
|
||||
{{ item.name }}
|
||||
{%- endfor %}
|
||||
305
doc/conf.py
Normal file
305
doc/conf.py
Normal file
@@ -0,0 +1,305 @@
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
from importlib.util import find_spec
|
||||
from sys import path as sys_path
|
||||
from os.path import abspath
|
||||
from pathlib import Path
|
||||
from json import loads
|
||||
|
||||
from pyTooling.Packaging import extractVersionInformation
|
||||
|
||||
ROOT = Path(__file__).resolve().parent
|
||||
|
||||
sys_path.insert(0, abspath("."))
|
||||
sys_path.insert(0, abspath(".."))
|
||||
sys_path.insert(0, abspath("../pyDummy"))
|
||||
# sys_path.insert(0, abspath("_extensions"))
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
# Project information and versioning
|
||||
# ==============================================================================
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
githubNamespace = "pyTooling"
|
||||
githubProject = "Actions"
|
||||
project = "pyDummy"
|
||||
|
||||
packageInformationFile = Path(f"../{project}/__init__.py")
|
||||
versionInformation = extractVersionInformation(packageInformationFile)
|
||||
|
||||
author = versionInformation.Author
|
||||
copyright = versionInformation.Copyright
|
||||
version = ".".join(versionInformation.Version.split(".")[:2]) # e.g. 2.3 The short X.Y version.
|
||||
release = versionInformation.Version
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
# Miscellaneous settings
|
||||
# ==============================================================================
|
||||
# The master toctree document.
|
||||
master_doc = "index"
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ["_templates"]
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
# This pattern also affects html_static_path and html_extra_path.
|
||||
exclude_patterns = [
|
||||
"_build",
|
||||
"_theme",
|
||||
"Thumbs.db",
|
||||
".DS_Store"
|
||||
]
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = "manni"
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
# Restructured Text settings
|
||||
# ==============================================================================
|
||||
prologPath = Path("prolog.inc")
|
||||
try:
|
||||
with prologPath.open("r", encoding="utf-8") as fileHandle:
|
||||
rst_prolog = fileHandle.read()
|
||||
except Exception as ex:
|
||||
print(f"[ERROR:] While reading '{prologPath}'.")
|
||||
print(ex)
|
||||
rst_prolog = ""
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
# Options for HTML output
|
||||
# ==============================================================================
|
||||
html_theme = "sphinx_rtd_theme"
|
||||
html_theme_options = {
|
||||
"logo_only": True,
|
||||
"vcs_pageview_mode": 'blob',
|
||||
"navigation_depth": 5,
|
||||
}
|
||||
html_css_files = [
|
||||
'css/override.css',
|
||||
]
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ["_static"]
|
||||
|
||||
html_logo = str(Path(html_static_path[0]) / "logo.png")
|
||||
html_favicon = str(Path(html_static_path[0]) / "icon.png")
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = f"{githubProject}Doc"
|
||||
|
||||
# If not None, a 'Last updated on:' timestamp is inserted at every page
|
||||
# bottom, using the given strftime format.
|
||||
# The empty string is equivalent to '%b %d, %Y'.
|
||||
html_last_updated_fmt = "%d.%m.%Y"
|
||||
|
||||
# ==============================================================================
|
||||
# Python settings
|
||||
# ==============================================================================
|
||||
modindex_common_prefix = [
|
||||
f"{project}."
|
||||
]
|
||||
|
||||
# ==============================================================================
|
||||
# Options for LaTeX / PDF output
|
||||
# ==============================================================================
|
||||
from textwrap import dedent
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
"papersize": "a4paper",
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
"preamble": dedent(r"""
|
||||
% ================================================================================
|
||||
% User defined additional preamble code
|
||||
% ================================================================================
|
||||
% Add more Unicode characters for pdfLaTeX.
|
||||
% - Alternatively, compile with XeLaTeX or LuaLaTeX.
|
||||
% - https://GitHub.com/sphinx-doc/sphinx/issues/3511
|
||||
%
|
||||
\ifdefined\DeclareUnicodeCharacter
|
||||
\DeclareUnicodeCharacter{2265}{$\geq$}
|
||||
\DeclareUnicodeCharacter{21D2}{$\Rightarrow$}
|
||||
\fi
|
||||
|
||||
|
||||
% ================================================================================
|
||||
"""),
|
||||
|
||||
# Latex figure (float) alignment
|
||||
#'figure_align': 'htbp',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title,
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
( master_doc,
|
||||
f"{githubProject}.tex",
|
||||
f"The {githubProject} Documentation",
|
||||
f"Patrick Lehmann",
|
||||
f"manual"
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
# Extensions
|
||||
# ==============================================================================
|
||||
extensions = [
|
||||
# Standard Sphinx extensions
|
||||
"sphinx.ext.autodoc",
|
||||
"sphinx.ext.extlinks",
|
||||
"sphinx.ext.intersphinx",
|
||||
"sphinx.ext.inheritance_diagram",
|
||||
"sphinx.ext.todo",
|
||||
"sphinx.ext.graphviz",
|
||||
"sphinx.ext.mathjax",
|
||||
"sphinx.ext.ifconfig",
|
||||
"sphinx.ext.viewcode",
|
||||
# SphinxContrib extensions
|
||||
"sphinxcontrib.mermaid",
|
||||
# Other extensions
|
||||
"sphinx_design",
|
||||
"sphinx_copybutton",
|
||||
"sphinx_autodoc_typehints",
|
||||
"autoapi.sphinx",
|
||||
"sphinx_reports",
|
||||
# User defined extensions
|
||||
]
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
# Sphinx.Ext.InterSphinx
|
||||
# ==============================================================================
|
||||
intersphinx_mapping = {
|
||||
"python": ("https://docs.python.org/3", None),
|
||||
}
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
# Sphinx.Ext.AutoDoc
|
||||
# ==============================================================================
|
||||
# see: https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#configuration
|
||||
#autodoc_default_options = {
|
||||
# "private-members": True,
|
||||
# "special-members": True,
|
||||
# "inherited-members": True,
|
||||
# "exclude-members": "__weakref__"
|
||||
#}
|
||||
#autodoc_class_signature = "separated"
|
||||
autodoc_member_order = "bysource" # alphabetical, groupwise, bysource
|
||||
autodoc_typehints = "both"
|
||||
#autoclass_content = "both"
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
# Sphinx.Ext.ExtLinks
|
||||
# ==============================================================================
|
||||
extlinks = {
|
||||
"gh": (f"https://GitHub.com/%s", "gh:%s"),
|
||||
"ghissue": (f"https://GitHub.com/{githubNamespace}/{githubProject}/issues/%s", "issue #%s"),
|
||||
"ghpull": (f"https://GitHub.com/{githubNamespace}/{githubProject}/pull/%s", "pull request #%s"),
|
||||
"ghsrc": (f"https://GitHub.com/{githubNamespace}/{githubProject}/blob/main/%s", None),
|
||||
"wiki": (f"https://en.wikipedia.org/wiki/%s", None),
|
||||
}
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
# Sphinx.Ext.Graphviz
|
||||
# ==============================================================================
|
||||
graphviz_output_format = "svg"
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
# SphinxContrib.Mermaid
|
||||
# ==============================================================================
|
||||
mermaid_params = [
|
||||
'--backgroundColor', 'transparent',
|
||||
]
|
||||
mermaid_verbose = True
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
# Sphinx.Ext.Inheritance_Diagram
|
||||
# ==============================================================================
|
||||
inheritance_node_attrs = {
|
||||
# "shape": "ellipse",
|
||||
# "fontsize": 14,
|
||||
# "height": 0.75,
|
||||
"color": "dodgerblue1",
|
||||
"style": "filled"
|
||||
}
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
# Sphinx.Ext.ToDo
|
||||
# ==============================================================================
|
||||
# If true, `todo` and `todoList` produce output, else they produce nothing.
|
||||
todo_include_todos = True
|
||||
todo_link_only = True
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
# sphinx-reports
|
||||
# ==============================================================================
|
||||
# report_unittest_testsuites = {
|
||||
# "src": {
|
||||
# "name": f"{project}",
|
||||
# "xml_report": "../report/unit/unittest.xml",
|
||||
# }
|
||||
# }
|
||||
# report_codecov_packages = {
|
||||
# "src": {
|
||||
# "name": f"{project}",
|
||||
# "json_report": "../report/coverage/coverage.json",
|
||||
# "fail_below": 80,
|
||||
# "levels": "default"
|
||||
# }
|
||||
# }
|
||||
# report_doccov_packages = {
|
||||
# "src": {
|
||||
# "name": f"{project}",
|
||||
# "directory": f"../{project}",
|
||||
# "fail_below": 80,
|
||||
# "levels": "default"
|
||||
# }
|
||||
# }
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
# Sphinx_Design
|
||||
# ==============================================================================
|
||||
# sd_fontawesome_latex = True
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
# AutoAPI.Sphinx
|
||||
# ==============================================================================
|
||||
autoapi_modules = {
|
||||
f"{project}": {
|
||||
"template": "package",
|
||||
"output": project,
|
||||
"override": True
|
||||
}
|
||||
}
|
||||
|
||||
for directory in [mod for mod in Path(f"../{project}").iterdir() if mod.is_dir() and mod.name != "__pycache__"]:
|
||||
print(f"Adding module rule for '{project}.{directory.name}'")
|
||||
autoapi_modules[f"{project}.{directory.name}"] = {
|
||||
"template": "module",
|
||||
"output": project,
|
||||
"override": True
|
||||
}
|
||||
7
doc/coverage/index.rst
Normal file
7
doc/coverage/index.rst
Normal file
@@ -0,0 +1,7 @@
|
||||
Code Coverage Report
|
||||
####################
|
||||
|
||||
Code coverage report generated with `pytest <https://github.com/pytest-dev/pytest>`__ and `Coverage.py <https://github.com/nedbat/coveragepy/tree/master>`__.
|
||||
|
||||
.. #report:code-coverage::
|
||||
:packageid: src
|
||||
195
doc/index.rst
Normal file
195
doc/index.rst
Normal file
@@ -0,0 +1,195 @@
|
||||
.. include:: shields.inc
|
||||
|
||||
.. raw:: latex
|
||||
|
||||
\part{Introduction}
|
||||
|
||||
.. only:: html
|
||||
|
||||
| |SHIELD:svg:pyTooling-github| |SHIELD:svg:pyTooling-src-license| |SHIELD:svg:pyTooling-ghp-doc| |SHIELD:svg:pyTooling-doc-license|
|
||||
| |SHIELD:svg:pyTooling-tag| |SHIELD:svg:pyTooling-date|
|
||||
|
||||
.. Disabled shields: |SHIELD:svg:pyTooling-gitter|
|
||||
|
||||
.. only:: latex
|
||||
|
||||
|SHIELD:png:pyTooling-github| |SHIELD:png:pyTooling-src-license| |SHIELD:png:pyTooling-ghp-doc| |SHIELD:png:pyTooling-doc-license|
|
||||
|SHIELD:png:pyTooling-tag| |SHIELD:png:pyTooling-date|
|
||||
|
||||
.. Disabled shields: |SHIELD:svg:pyTooling-gitter|
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
pyTooling Actions Documentation
|
||||
###############################
|
||||
|
||||
**pyTooling Actions** are reusable steps and workflows for GitHub Actions easing the creation and maintenance of
|
||||
workflows for Python projects on GitHub.
|
||||
|
||||
Introduction
|
||||
************
|
||||
|
||||
GitHub Actions workflows, actions and documentation are mostly focused on JavaScript/TypeScript as the scripting
|
||||
language for writing reusable CI code.
|
||||
However, Python being equally popular and capable, usage of JS/TS might be bypassed, with some caveats.
|
||||
This repository gathers reusable CI tooling for testing, packaging and distributing Python projects and documentation.
|
||||
|
||||
|
||||
GitHub Action Job Templates
|
||||
***************************
|
||||
|
||||
The following list categorizes all pre-defined job templates, which can be instantiated in a pipeline (GitHub Action
|
||||
Workflow):
|
||||
|
||||
.. hlist::
|
||||
:columns: 2
|
||||
|
||||
* **Global Templates**
|
||||
|
||||
* :ref:`JOBTMPL/Parameters`
|
||||
|
||||
* **Unit Tests, Code Coverage, Code Quality, ...**
|
||||
|
||||
* :ref:`JOBTMPL/UnitTesting`
|
||||
* :ref:`JOBTMPL/CodeCoverage`
|
||||
* :ref:`JOBTMPL/StaticTypeChecking`
|
||||
* *code formatting (planned)*
|
||||
* *coding style (planned)*
|
||||
* *code linting (planned)*
|
||||
|
||||
* **Build and Packaging**
|
||||
|
||||
* :ref:`JOBTMPL/Package`
|
||||
|
||||
* **Documentation**
|
||||
|
||||
* :ref:`JOBTMPL/VerifyDocumentation`
|
||||
* :ref:`JOBTMPL/BuildTheDocs`
|
||||
|
||||
* **Releasing, Publishing**
|
||||
|
||||
* :ref:`JOBTMPL/GitHubReleasePage`
|
||||
* :ref:`JOBTMPL/PyPI`
|
||||
* :ref:`JOBTMPL/PublishTestResults`
|
||||
* :ref:`JOBTMPL/PublishToGitHubPages`
|
||||
|
||||
* **Cleanups**
|
||||
|
||||
* :ref:`JOBTMPL/ArtifactCleanup`
|
||||
|
||||
|
||||
Example Pipelines
|
||||
=================
|
||||
|
||||
``ExamplePipeline.yml`` is an example Workflow which uses all of the Reusable Workflows.
|
||||
Python package/tool developers can copy it into their repos, in order to use al the reusable workflows straightaway.
|
||||
Minimal required modifications are the following:
|
||||
|
||||
- Set the ``name`` input of job ``Parameters``.
|
||||
- Specify the ``commands`` input of job ``StaticTypeCheck``.
|
||||
|
||||
|
||||
GitHub Actions
|
||||
**************
|
||||
|
||||
* :ref:`ACTION/Releaser`
|
||||
* :ref:`ACTION/WithPostStep`
|
||||
|
||||
References
|
||||
**********
|
||||
|
||||
- `hdl/containers#48 <https://github.com/hdl/containers/issues/48>`__
|
||||
|
||||
|
||||
.. _CONTRIBUTORS:
|
||||
|
||||
Contributors
|
||||
************
|
||||
|
||||
* `Patrick Lehmann <https://GitHub.com/Paebbels>`__
|
||||
* `Unai Martinez-Corral <https://GitHub.com/umarcor>`__ (Maintainer)
|
||||
* `and more... <https://GitHub.com/pyTooling/Actions/graphs/contributors>`__
|
||||
|
||||
|
||||
.. _LICENSE:
|
||||
|
||||
License
|
||||
*******
|
||||
|
||||
.. only:: html
|
||||
|
||||
This Python package (source code) is licensed under `Apache License 2.0 <Code-License.html>`__. |br|
|
||||
The accompanying documentation is licensed under `Creative Commons - Attribution 4.0 (CC-BY 4.0) <Doc-License.html>`__.
|
||||
|
||||
.. only:: latex
|
||||
|
||||
This Python package (source code) is licensed under **Apache License 2.0**. |br|
|
||||
The accompanying documentation is licensed under **Creative Commons - Attribution 4.0 (CC-BY 4.0)**.
|
||||
|
||||
|
||||
.. toctree::
|
||||
:caption: Introduction
|
||||
:hidden:
|
||||
|
||||
Background
|
||||
RepositoryStructure
|
||||
Instantiation
|
||||
Deveopment
|
||||
Dependency
|
||||
Releases
|
||||
|
||||
.. raw:: latex
|
||||
|
||||
\part{Main Documentation}
|
||||
|
||||
.. toctree::
|
||||
:caption: Actions
|
||||
:hidden:
|
||||
|
||||
Action/index
|
||||
Action/Releaser
|
||||
Action/With-post-step
|
||||
|
||||
.. toctree::
|
||||
:caption: Job Templates
|
||||
:hidden:
|
||||
|
||||
JobTemplate/index
|
||||
JobTemplate/Parameters
|
||||
JobTemplate/CoverageCollection
|
||||
JobTemplate/UnitTesting
|
||||
JobTemplate/StaticTypeCheck
|
||||
JobTemplate/PublishTestResults
|
||||
JobTemplate/Package
|
||||
JobTemplate/PublishOnPyPI
|
||||
JobTemplate/VerifyDocs
|
||||
JobTemplate/BuildTheDocs
|
||||
JobTemplate/PublishToGitHubPages
|
||||
JobTemplate/Release
|
||||
JobTemplate/ArtifactCleanUp
|
||||
|
||||
.. raw:: latex
|
||||
|
||||
\part{pyDummy Example}
|
||||
|
||||
.. toctree::
|
||||
:caption: pyDummy Example
|
||||
:hidden:
|
||||
|
||||
pyDummy/pyDummy
|
||||
unittests/index
|
||||
coverage/index
|
||||
Doc. Coverage Report <DocCoverage>
|
||||
Static Type Check Report ➚ <typing/index>
|
||||
|
||||
.. raw:: latex
|
||||
|
||||
\part{Appendix}
|
||||
|
||||
.. toctree::
|
||||
:caption: Appendix
|
||||
:hidden:
|
||||
|
||||
License
|
||||
Doc-License
|
||||
TODO
|
||||
36
doc/make.bat
Normal file
36
doc/make.bat
Normal file
@@ -0,0 +1,36 @@
|
||||
@ECHO OFF
|
||||
|
||||
pushd %~dp0
|
||||
|
||||
REM Command file for Sphinx documentation
|
||||
|
||||
if "%SPHINXBUILD%" == "" (
|
||||
set SPHINXBUILD=sphinx-build
|
||||
)
|
||||
set SOURCEDIR=.
|
||||
set BUILDDIR=_build
|
||||
set SPHINXOPTS=-v
|
||||
|
||||
if "%1" == "" goto help
|
||||
|
||||
%SPHINXBUILD% >NUL 2>NUL
|
||||
if errorlevel 9009 (
|
||||
echo.
|
||||
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
|
||||
echo.installed, then set the SPHINXBUILD environment variable to point
|
||||
echo.to the full path of the 'sphinx-build' executable. Alternatively you
|
||||
echo.may add the Sphinx directory to PATH.
|
||||
echo.
|
||||
echo.If you don't have Sphinx installed, grab it from
|
||||
echo.http://sphinx-doc.org/
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
||||
goto end
|
||||
|
||||
:help
|
||||
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
||||
|
||||
:end
|
||||
popd
|
||||
59
doc/prolog.inc
Normal file
59
doc/prolog.inc
Normal file
@@ -0,0 +1,59 @@
|
||||
.. # Load pre-defined aliases and graphical characters like © from docutils
|
||||
# <file> is used to denote the special path
|
||||
# <Python>\Lib\site-packages\docutils\parsers\rst\include
|
||||
.. include:: <isonum.txt>
|
||||
.. include:: <mmlalias.txt>
|
||||
|
||||
.. # define a hard line break for HTML
|
||||
.. |br| raw:: html
|
||||
|
||||
<br />
|
||||
|
||||
.. # define horizontal line for HTML
|
||||
.. |hr| raw:: html
|
||||
|
||||
<hr />
|
||||
|
||||
.. # define additional CSS based styles and ReST roles for HTML
|
||||
.. raw:: html
|
||||
|
||||
<style type="text/css">
|
||||
span.bolditalic {font-weight: bold; font-style: italic; }
|
||||
span.underline {text-decoration: underline; }
|
||||
span.strike {text-decoration: line-through; }
|
||||
span.xlarge {font-size: x-large; }
|
||||
span.colorred {color: #CC0000; }
|
||||
span.colorgreen {color: #009933; }
|
||||
span.colorblue {color: #0066FF; }
|
||||
span.colorpurple {color: #9900CC; }
|
||||
</style>
|
||||
|
||||
.. role:: bolditalic
|
||||
:class: bolditalic
|
||||
|
||||
.. role:: underline
|
||||
:class: underline
|
||||
|
||||
.. role:: strike
|
||||
:class: strike
|
||||
|
||||
.. role:: xlarge
|
||||
:class: xlarge
|
||||
|
||||
.. role:: red
|
||||
:class: colorred
|
||||
.. role:: green
|
||||
:class: colorgreen
|
||||
.. role:: blue
|
||||
:class: colorblue
|
||||
.. role:: purple
|
||||
:class: colorpurple
|
||||
|
||||
.. role:: deletion
|
||||
:class: colorred strike
|
||||
.. role:: addition
|
||||
:class: colorgreen
|
||||
|
||||
.. role:: pycode(code)
|
||||
:language: python
|
||||
:class: highlight
|
||||
0
doc/pyDummy/.gitempty
Normal file
0
doc/pyDummy/.gitempty
Normal file
19
doc/requirements.txt
Normal file
19
doc/requirements.txt
Normal file
@@ -0,0 +1,19 @@
|
||||
-r ../requirements.txt
|
||||
|
||||
pyTooling ~= 8.0
|
||||
|
||||
# Enforce latest version on ReadTheDocs
|
||||
sphinx ~= 8.1
|
||||
docutils ~= 0.21
|
||||
docutils_stubs ~= 0.0.22
|
||||
|
||||
# ReadTheDocs Theme
|
||||
sphinx_rtd_theme ~= 3.0
|
||||
|
||||
# Sphinx Extenstions
|
||||
sphinxcontrib-mermaid ~= 1.0
|
||||
autoapi >= 2.0.1
|
||||
sphinx_design ~= 0.6.1
|
||||
sphinx-copybutton >= 0.5.2
|
||||
sphinx_autodoc_typehints ~= 3.0
|
||||
sphinx_reports ~= 0.7
|
||||
74
doc/shields.inc
Normal file
74
doc/shields.inc
Normal file
@@ -0,0 +1,74 @@
|
||||
.. # Use http://b64.io/ to encode any image to base64. Then replace `/` with
|
||||
# `%2F` and `+` with `%2B` (or use http://meyerweb.com/eric/tools/dencoder/).
|
||||
# Beware that `?logo=data:image/png;base64,` must also be converted to
|
||||
# percent encoding so that the URL is properly parsed.
|
||||
|
||||
.. # Sourcecode link to GitHub
|
||||
.. |SHIELD:svg:pyTooling-github| image:: https://img.shields.io/badge/pyTooling-Actions-63bf7f?longCache=true&style=flat-square&longCache=true&logo=GitHub
|
||||
:alt: Sourcecode on GitHub
|
||||
:height: 22
|
||||
:target: https://GitHub.com/pyTooling/Actions
|
||||
.. |SHIELD:png:pyTooling-github| image:: https://raster.shields.io/badge/pyTooling-Actions-63bf7f?longCache=true&style=flat-square&longCache=true&logo=GitHub
|
||||
:alt: Sourcecode on GitHub
|
||||
:height: 22
|
||||
:target: https://GitHub.com/pyTooling/Actions
|
||||
|
||||
.. # Sourcecode license
|
||||
.. |SHIELD:svg:pyTooling-src-license| image:: https://img.shields.io/pypi/l/pyTooling?longCache=true&style=flat-square&logo=Apache&label=code
|
||||
:alt: Code license
|
||||
:height: 22
|
||||
:target: Code-License.html
|
||||
.. |SHIELD:png:pyTooling-src-license| image:: https://raster.shields.io/pypi/l/pyTooling?longCache=true&style=flat-square&logo=Apache&label=code
|
||||
:alt: Code license
|
||||
:height: 22
|
||||
:target: https://GitHub.com/pyTooling/Actions/blob/main/LICENSE.md
|
||||
|
||||
.. # GitHub tag
|
||||
.. |SHIELD:svg:pyTooling-tag| image:: https://img.shields.io/github/v/tag/pyTooling/Actions?longCache=true&style=flat-square&logo=GitHub&include_prereleases
|
||||
:alt: GitHub tag (latest SemVer incl. pre-release
|
||||
:height: 22
|
||||
:target: https://GitHub.com/pyTooling/Actions/tags
|
||||
.. |SHIELD:png:pyTooling-tag| image:: https://raster.shields.io/github/v/tag/pyTooling/Actions?longCache=true&style=flat-square&logo=GitHub&include_prereleases
|
||||
:alt: GitHub tag (latest SemVer incl. pre-release
|
||||
:height: 22
|
||||
:target: https://GitHub.com/pyTooling/Actions/tags
|
||||
|
||||
.. # GitHub release date
|
||||
.. |SHIELD:svg:pyTooling-date| image:: https://img.shields.io/github/release-date/pyTooling/Actions?longCache=true&style=flat-square&logo=GitHub
|
||||
:alt: GitHub release date
|
||||
:height: 22
|
||||
:target: https://GitHub.com/pyTooling/Actions/releases
|
||||
.. |SHIELD:png:pyTooling-date| image:: https://raster.shields.io/github/release-date/pyTooling/Actions?longCache=true&style=flat-square&logo=GitHub
|
||||
:alt: GitHub release date
|
||||
:height: 22
|
||||
:target: https://GitHub.com/pyTooling/Actions/releases
|
||||
|
||||
.. # Documentation license
|
||||
.. |SHIELD:svg:pyTooling-doc-license| image:: https://img.shields.io/badge/doc-CC--BY%204.0-green?longCache=true&style=flat-square&logo=CreativeCommons&logoColor=fff
|
||||
:alt: Documentation License
|
||||
:height: 22
|
||||
:target: License.html
|
||||
.. |SHIELD:png:pyTooling-doc-license| image:: https://raster.shields.io/badge/doc-CC--BY%204.0-green?longCache=true&style=flat-square&logo=CreativeCommons&logoColor=fff
|
||||
:alt: Documentation License
|
||||
:height: 22
|
||||
:target: https://GitHub.com/pyTooling/Actions/blob/main/doc/License.rst
|
||||
|
||||
.. # GHPages - read now
|
||||
.. |SHIELD:svg:pyTooling-ghp-doc| image:: https://img.shields.io/website?longCache=true&style=flat-square&label=pyTooling.github.io%2FpyTooling&logo=GitHub&logoColor=fff&up_color=blueviolet&up_message=Read%20now%20%E2%9E%9A&url=https%3A%2F%2FpyTooling.github.io%2FpyTooling%2Findex.html
|
||||
:alt: Documentation - Read Now!
|
||||
:height: 22
|
||||
:target: https://pyTooling.github.io/pyTooling/
|
||||
.. |SHIELD:png:pyTooling-ghp-doc| image:: https://raster.shields.io/website?longCache=true&style=flat-square&label=pyTooling.github.io%2FpyTooling&logo=GitHub&logoColor=fff&up_color=blueviolet&up_message=Read%20now%20%E2%9E%9A&url=https%3A%2F%2FpyTooling.github.io%2FpyTooling%2Findex.html
|
||||
:alt: Documentation - Read Now!
|
||||
:height: 22
|
||||
:target: https://pyTooling.github.io/pyTooling/
|
||||
|
||||
.. # Gitter
|
||||
.. |SHIELD:svg:pyTooling-gitter| image:: https://img.shields.io/badge/chat-on%20gitter-4db797.?longCache=true&style=flat-square&logo=gitter&logoColor=e8ecef
|
||||
:alt: Documentation License
|
||||
:height: 22
|
||||
:target: https://gitter.im/hdl/community
|
||||
.. |SHIELD:png:pyTooling-gitter| image:: https://raster.shields.io/badge/chat-on%20gitter-4db797.?longCache=true&style=flat-square&logo=gitter&logoColor=e8ecef
|
||||
:alt: Documentation License
|
||||
:height: 22
|
||||
:target: https://gitter.im/hdl/community
|
||||
8
doc/typing/index.rst
Normal file
8
doc/typing/index.rst
Normal file
@@ -0,0 +1,8 @@
|
||||
Static Type Checking Report
|
||||
###########################
|
||||
|
||||
*Placeholder for the Static Type Checking report generated with* ``mypy``.
|
||||
|
||||
.. #raw:: html
|
||||
|
||||
<iframe src="../../../../report/typing/index.html" width="100%" height="500px" style="border:none;"/>
|
||||
7
doc/unittests/index.rst
Normal file
7
doc/unittests/index.rst
Normal file
@@ -0,0 +1,7 @@
|
||||
Unittest Summary Report
|
||||
#######################
|
||||
|
||||
Unittest report generated with `pytest <https://github.com/pytest-dev/pytest>`__.
|
||||
|
||||
.. #report:unittest-summary::
|
||||
:reportid: src
|
||||
101
pyDummy/__init__.py
Normal file
101
pyDummy/__init__.py
Normal file
@@ -0,0 +1,101 @@
|
||||
# ==================================================================================================================== #
|
||||
# _____ _ _ _ _ _ #
|
||||
# _ __ _ |_ _|__ ___ | (_)_ __ __ _ / \ ___| |_(_) ___ _ __ ___ #
|
||||
# | '_ \| | | || |/ _ \ / _ \| | | '_ \ / _` | / _ \ / __| __| |/ _ \| '_ \/ __| #
|
||||
# | |_) | |_| || | (_) | (_) | | | | | | (_| |_ / ___ \ (__| |_| | (_) | | | \__ \ #
|
||||
# | .__/ \__, ||_|\___/ \___/|_|_|_| |_|\__, (_)_/ \_\___|\__|_|\___/|_| |_|___/ #
|
||||
# |_| |___/ |___/ #
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# #
|
||||
# License: #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2017-2024 Patrick Lehmann - Bötzingen, Germany #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
#
|
||||
"""
|
||||
A module for a set of dummy classes.
|
||||
"""
|
||||
|
||||
__author__ = "Patrick Lehmann"
|
||||
__email__ = "Paebbels@gmail.com"
|
||||
__copyright__ = "2017-2024, Patrick Lehmann"
|
||||
__license__ = "Apache License, Version 2.0"
|
||||
__version__ = "0.4.4"
|
||||
__keywords__ = ["GitHub Actions"]
|
||||
__issue_tracker__ = "https://GitHub.com/pyTooling/Actions/issues"
|
||||
|
||||
from pyTooling.Decorators import export, readonly
|
||||
from pyTooling.Platform import Platform
|
||||
|
||||
|
||||
@export
|
||||
class Base:
|
||||
"""
|
||||
A base-class for dummy applications.
|
||||
"""
|
||||
|
||||
_value: int #: An internal value.
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""
|
||||
Initializes the base-class.
|
||||
"""
|
||||
self._value = 0
|
||||
|
||||
@readonly
|
||||
def Value(self) -> int:
|
||||
"""
|
||||
Read-only property to return the internal value.
|
||||
|
||||
:return: Internal value.
|
||||
"""
|
||||
return self._value
|
||||
|
||||
|
||||
@export
|
||||
class Application(Base):
|
||||
"""
|
||||
A dummy application for demonstration purposes.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""
|
||||
Initializes the dummy application.
|
||||
"""
|
||||
super().__init__()
|
||||
|
||||
platform = Platform()
|
||||
if platform.IsNativeLinux:
|
||||
self._value += 1
|
||||
elif platform.IsNativeMacOS:
|
||||
self._value += 2
|
||||
elif platform.IsNativeWindows:
|
||||
self._value += 3
|
||||
elif platform.IsMSYSOnWindows:
|
||||
self._value += 11
|
||||
elif platform.IsMinGW32OnWindows:
|
||||
self._value += 12
|
||||
elif platform.IsMinGW64OnWindows:
|
||||
self._value += 13
|
||||
elif platform.IsUCRT64OnWindows:
|
||||
self._value += 14
|
||||
elif platform.IsClang32OnWindows:
|
||||
self._value += 15
|
||||
elif platform.IsClang64OnWindows:
|
||||
self._value += 16
|
||||
0
pyDummy/py.typed
Normal file
0
pyDummy/py.typed
Normal file
101
pyExamples/Extensions/__init__.py
Normal file
101
pyExamples/Extensions/__init__.py
Normal file
@@ -0,0 +1,101 @@
|
||||
# ==================================================================================================================== #
|
||||
# _____ _ _ _ _ _ #
|
||||
# _ __ _ |_ _|__ ___ | (_)_ __ __ _ / \ ___| |_(_) ___ _ __ ___ #
|
||||
# | '_ \| | | || |/ _ \ / _ \| | | '_ \ / _` | / _ \ / __| __| |/ _ \| '_ \/ __| #
|
||||
# | |_) | |_| || | (_) | (_) | | | | | | (_| |_ / ___ \ (__| |_| | (_) | | | \__ \ #
|
||||
# | .__/ \__, ||_|\___/ \___/|_|_|_| |_|\__, (_)_/ \_\___|\__|_|\___/|_| |_|___/ #
|
||||
# |_| |___/ |___/ #
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# #
|
||||
# License: #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2017-2024 Patrick Lehmann - Bötzingen, Germany #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
#
|
||||
"""
|
||||
A module for a set of dummy classes.
|
||||
"""
|
||||
|
||||
__author__ = "Patrick Lehmann"
|
||||
__email__ = "Paebbels@gmail.com"
|
||||
__copyright__ = "2017-2024, Patrick Lehmann"
|
||||
__license__ = "Apache License, Version 2.0"
|
||||
__version__ = "0.14.8"
|
||||
__keywords__ = ["GitHub Actions"]
|
||||
__issue_tracker__ = "https://GitHub.com/pyTooling/Actions/issues"
|
||||
|
||||
from pyTooling.Decorators import export, readonly
|
||||
from pyTooling.Platform import Platform
|
||||
|
||||
|
||||
@export
|
||||
class Base:
|
||||
"""
|
||||
A base-class for dummy applications.
|
||||
"""
|
||||
|
||||
_value: int #: An internal value.
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""
|
||||
Initializes the base-class.
|
||||
"""
|
||||
self._value = 0
|
||||
|
||||
@readonly
|
||||
def Value(self) -> int:
|
||||
"""
|
||||
Read-only property to return the internal value.
|
||||
|
||||
:return: Internal value.
|
||||
"""
|
||||
return self._value
|
||||
|
||||
|
||||
@export
|
||||
class Application(Base):
|
||||
"""
|
||||
A dummy application for demonstration purposes.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""
|
||||
Initializes the dummy application.
|
||||
"""
|
||||
super().__init__()
|
||||
|
||||
platform = Platform()
|
||||
if platform.IsNativeLinux:
|
||||
self._value += 1
|
||||
elif platform.IsNativeMacOS:
|
||||
self._value += 2
|
||||
elif platform.IsNativeWindows:
|
||||
self._value += 3
|
||||
elif platform.IsMSYSOnWindows:
|
||||
self._value += 11
|
||||
elif platform.IsMinGW32OnWindows:
|
||||
self._value += 12
|
||||
elif platform.IsMinGW64OnWindows:
|
||||
self._value += 13
|
||||
elif platform.IsUCRT64OnWindows:
|
||||
self._value += 14
|
||||
elif platform.IsClang32OnWindows:
|
||||
self._value += 15
|
||||
elif platform.IsClang64OnWindows:
|
||||
self._value += 16
|
||||
0
pyExamples/Extensions/py.typed
Normal file
0
pyExamples/Extensions/py.typed
Normal file
77
pyproject.toml
Normal file
77
pyproject.toml
Normal file
@@ -0,0 +1,77 @@
|
||||
[build-system]
|
||||
requires = [
|
||||
"setuptools ~= 75.5",
|
||||
"wheel ~= 0.45",
|
||||
"pyTooling ~= 8.0"
|
||||
]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[tool.black]
|
||||
line-length = 120
|
||||
|
||||
[tool.mypy]
|
||||
files = ["pyDummy"]
|
||||
python_version = "3.12"
|
||||
#ignore_missing_imports = true
|
||||
strict = true
|
||||
pretty = true
|
||||
show_error_context = true
|
||||
show_error_codes = true
|
||||
namespace_packages = true
|
||||
html_report = "report/typing"
|
||||
|
||||
[tool.pytest]
|
||||
junit_xml = "report/unit/TestReportSummary.xml"
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
addopts = "--tb=native"
|
||||
# Don't set 'python_classes = *' otherwise, pytest doesn't search for classes
|
||||
# derived from unittest.Testcase
|
||||
python_files = "*"
|
||||
python_functions = "test_*"
|
||||
filterwarnings = [
|
||||
"error::DeprecationWarning",
|
||||
"error::PendingDeprecationWarning"
|
||||
]
|
||||
junit_logging = "all"
|
||||
|
||||
[tool.interrogate]
|
||||
color = true
|
||||
verbose = 1 # possible values: 0 (minimal output), 1 (-v), 2 (-vv)
|
||||
fail-under = 59
|
||||
generate-badge = "."
|
||||
badge-format = "png"
|
||||
ignore-setters = true
|
||||
|
||||
[tool.coverage.run]
|
||||
branch = true
|
||||
relative_files = true
|
||||
omit = [
|
||||
"*site-packages*",
|
||||
"setup.py",
|
||||
"tests/benchmark/*",
|
||||
"tests/performance/*",
|
||||
"tests/platform/*",
|
||||
"tests/unit/*"
|
||||
]
|
||||
|
||||
[tool.coverage.report]
|
||||
skip_covered = false
|
||||
skip_empty = true
|
||||
exclude_lines = [
|
||||
"pragma: no cover",
|
||||
"raise NotImplementedError"
|
||||
]
|
||||
omit = [
|
||||
"tests/*"
|
||||
]
|
||||
|
||||
[tool.coverage.xml]
|
||||
output = "report/coverage/coverage.xml"
|
||||
|
||||
[tool.coverage.json]
|
||||
output = "report/coverage/coverage.json"
|
||||
|
||||
[tool.coverage.html]
|
||||
directory = "report/coverage/html"
|
||||
title="Code Coverage of pyDummy"
|
||||
8
releaser/DEVELOPMENT.md
Normal file
8
releaser/DEVELOPMENT.md
Normal file
@@ -0,0 +1,8 @@
|
||||
# Releaser Development
|
||||
|
||||
- [pyTooling/pyAttributes](https://github.com/pyTooling/pyAttributes) or
|
||||
[willmcgugan/rich](https://github.com/willmcgugan/rich) might be used to enhance the UX.
|
||||
|
||||
- It might be desirable to have pyTooling.Version.SemVersion handle the regular expression from
|
||||
[semver.org](https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string), and use
|
||||
proper Python classes in **Releaser**.
|
||||
12
releaser/Dockerfile
Normal file
12
releaser/Dockerfile
Normal file
@@ -0,0 +1,12 @@
|
||||
FROM python:3.12-slim-bookworm
|
||||
COPY releaser.py /releaser.py
|
||||
RUN pip install PyGithub --progress-bar off \
|
||||
&& apt update -qq \
|
||||
&& apt install -y curl \
|
||||
&& curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | \
|
||||
dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg \
|
||||
&& echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | \
|
||||
tee /etc/apt/sources.list.d/github-cli.list > /dev/null \
|
||||
&& apt update -qq \
|
||||
&& apt install -y gh
|
||||
CMD ["/releaser.py"]
|
||||
181
releaser/README.md
Normal file
181
releaser/README.md
Normal file
@@ -0,0 +1,181 @@
|
||||
# Releaser
|
||||
|
||||
**Releaser** is a Docker GitHub Action written in Python.
|
||||
|
||||
**Releaser** allows to keep a GitHub Release of type pre-release and its artifacts up to date with latest builds.
|
||||
Combined with a workflow that is executed periodically, **Releaser** allows to provide a fixed release name for users willing
|
||||
to use daily/nightly artifacts of a project.
|
||||
|
||||
Furthermore, when any [semver](https://semver.org) compilant tagged commit is pushed, **Releaser** can create a release
|
||||
and upload assets.
|
||||
|
||||
## Context
|
||||
|
||||
GitHub provides official clients for the GitHub API through [github.com/octokit](https://github.com/octokit):
|
||||
|
||||
- [octokit.js](https://github.com/octokit/octokit.js) ([octokit.github.io/rest.js](https://octokit.github.io/rest.js))
|
||||
- [octokit.rb](https://github.com/octokit/octokit.rb) ([octokit.github.io/octokit.rb](http://octokit.github.io/octokit.rb))
|
||||
- [octokit.net](https://github.com/octokit/octokit.net) ([octokitnet.rtfd.io](https://octokitnet.rtfd.io))
|
||||
|
||||
When GitHub Actions was released in 2019, two Actions were made available through
|
||||
[github.com/actions](https://github.com/actions) for dealing with GitHub Releases:
|
||||
|
||||
- [actions/create-release](https://github.com/actions/create-release)
|
||||
- [actions/upload-release-asset](https://github.com/actions/upload-release-asset)
|
||||
|
||||
However, those Actions were contributed by an employee in spare time, not officially supported by GitHub.
|
||||
Therefore, they were unmaintained before GitHub Actions was out of the private beta
|
||||
(see [actions/upload-release-asset#58](https://github.com/actions/upload-release-asset/issues/58))
|
||||
and, a year later, archived.
|
||||
Those Actions are based on [actions/toolkit](https://github.com/actions/toolkit)'s hydrated version of octokit.js.
|
||||
|
||||
From a practical point of view, [actions/github-script](https://github.com/actions/github-script) is the natural replacement to those Actions, since it allows to use a pre-authenticated *octokit.js* client along with the workflow run context.
|
||||
Still, it requires writing plain JavaScript.
|
||||
|
||||
Alternatively, there are non-official GitHub API libraries available in other languages (see [docs.github.com: rest/overview/libraries](https://docs.github.com/en/rest/overview/libraries)).
|
||||
**Releaser** is based on [PyGithub/PyGithub](https://github.com/PyGithub/PyGithub), a Python client for the GitHub API.
|
||||
|
||||
**Releaser** was originally created in [eine/tip](https://github.com/eine/tip), as an enhanced alternative to using
|
||||
`actions/create-release` and `actions/upload-release-asset`, in order to cover certain use cases that were being
|
||||
migrated from Travis CI to GitHub Actions.
|
||||
The main limitation of GitHub's Actions was/is verbosity and not being possible to dynamically define the list of assets
|
||||
to be uploaded.
|
||||
|
||||
On the other hand, GitHub Actions artifacts do require login in order to download them.
|
||||
Conversely, assets of GitHub Releases can be downloaded without login.
|
||||
Therefore, in order to make CI results available to the widest audience, some projects prefer having tarballs available
|
||||
as assets.
|
||||
In this context, one of the main use cases of **Releaser** is pushing artifacts as release assets.
|
||||
Thus, the name of the Action.
|
||||
|
||||
GitHub provides an official CLI tool, written in golang: [cli/cli](https://github.com/cli/cli).
|
||||
When the Python version of **Releaser** was written, `cli` was evaluated as an alternative to *PyGitHub*.
|
||||
`gh release` was (and still is) not flexible enough to update the reference of a release, without deleting and
|
||||
recreating it (see [cli.github.com: manual/gh_release_create](https://cli.github.com/manual/gh_release_create)).
|
||||
Deletion and recreation is unfortunate, because it notifies all the watchers of a repository
|
||||
(see [eine/tip#111](https://github.com/eine/tip/issues/111)).
|
||||
However, [cli.github.com: manual/gh_release_upload](https://cli.github.com/manual/gh_release_upload) handles uploading
|
||||
artifacts as assets faster and with better stability for larger files than *PyGitHub*
|
||||
(see [msys2/msys2-installer#36](https://github.com/msys2/msys2-installer/pull/36)).
|
||||
Furthermore, the GitHub CLI is installed on GitHub Actions' default virtual environments.
|
||||
Although `gh` does not support login through SSH (see [cli/cli#3715](https://github.com/cli/cli/issues/3715)), on GitHub
|
||||
Actions a token is available `${{ github.token }}`.
|
||||
Therefore, **Releaser** uses `gh release upload` internally.
|
||||
|
||||
## Usage
|
||||
|
||||
The following block shows a minimal YAML workflow file:
|
||||
|
||||
```yml
|
||||
name: 'workflow'
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 0 * * 5'
|
||||
|
||||
jobs:
|
||||
mwe:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
|
||||
# Clone repository
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
# Build your application, tool, artifacts, etc.
|
||||
- name: Build
|
||||
run: |
|
||||
echo "Build some tool and generate some artifacts" > artifact.txt
|
||||
|
||||
# Update tag and pre-release
|
||||
# - Update (force-push) tag to the commit that is used in the workflow.
|
||||
# - Upload artifacts defined by the user.
|
||||
- uses: pyTooling/Actions/releaser@r0
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
files: |
|
||||
artifact.txt
|
||||
README.md
|
||||
```
|
||||
|
||||
### Composite Action
|
||||
|
||||
The default implementation of **Releaser** is a Container Action.
|
||||
Therefore, a pre-built container image is pulled before starting the job.
|
||||
Alternatively, a Composite Action version is available: `uses: pyTooling/Actions/releaser/composite@main`.
|
||||
The Composite version installs the dependencies on the host (the runner environment), instead of using a container.
|
||||
Both implementations are functionally equivalent from **Releaser**'s point of view; however, the Composite Action allows
|
||||
users to tweak the version of Python by using [actions/setup-python](https://github.com/actions/setup-python) before.
|
||||
|
||||
## Options
|
||||
|
||||
All options can be optionally provided as environment variables: `INPUT_TOKEN`, `INPUT_FILES`, `INPUT_TAG`, `INPUT_RM`
|
||||
and/or `INPUT_SNAPSHOTS`.
|
||||
|
||||
### token (required)
|
||||
|
||||
Token to make authenticated API calls; can be passed in using `{{ secrets.GITHUB_TOKEN }}`.
|
||||
|
||||
### files (required)
|
||||
|
||||
Either a single filename/pattern or a multi-line list can be provided. All the artifacts are uploaded regardless of the
|
||||
hierarchy.
|
||||
|
||||
For creating/updating a release without uploading assets, set `files: none`.
|
||||
|
||||
### tag
|
||||
|
||||
The default tag name for the tip/nightly pre-release is `tip`, but it can be optionally overriden through option `tag`.
|
||||
|
||||
### rm
|
||||
|
||||
Set option `rm` to `true` for systematically removing previous artifacts (e.g. old versions).
|
||||
Otherwise (by default), all previours artifacts are preserved or overwritten.
|
||||
|
||||
Note:
|
||||
If all the assets are removed, or if the release itself is removed, tip/nightly assets won't be available for
|
||||
users until the workflow is successfully run.
|
||||
For instance, Action [setup-ghdl-ci](https://github.com/ghdl/setup-ghdl-ci) uses assets from [ghdl/ghdl: releases/tag/nightly](https://github.com/ghdl/ghdl/releases/tag/nightly).
|
||||
Hence, it is recommended to try removing the conflictive assets only, in order to maximise the availability.
|
||||
|
||||
### snapshots
|
||||
|
||||
Whether to create releases from any tag or to treat some as snapshots.
|
||||
By default, all the tags with non-empty `prerelease` field (see [semver.org: Is there a suggested regular expression (RegEx) to check a SemVer string?](https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string))
|
||||
are considered snapshots; neither a release is created nor assets are uploaded.
|
||||
|
||||
## Advanced/complex use cases
|
||||
|
||||
**Releaser** is essentially a very thin wrapper to use the GitHub Actions context data along with the classes
|
||||
and methods of PyGithub.
|
||||
|
||||
Similarly to [actions/github-script](https://github.com/actions/github-script), users with advanced/complex requirements
|
||||
might find it desirable to write their own Python script, instead of using **Releaser**.
|
||||
In fact, since `shell: python` is supported in GitHub Actions, using Python does *not* require any Action.
|
||||
For prototyping purposes, the following job might be useful:
|
||||
|
||||
```yml
|
||||
Release:
|
||||
name: '📦 Release'
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- ...
|
||||
if: github.event_name != 'pull_request' && (github.ref == 'refs/heads/master' || contains(github.ref, 'refs/tags/'))
|
||||
steps:
|
||||
|
||||
- uses: actions/download-artifact@v3
|
||||
|
||||
- shell: bash
|
||||
run: pip install PyGithub --progress-bar off
|
||||
|
||||
- name: Set list of files for uploading
|
||||
id: files
|
||||
shell: python
|
||||
run: |
|
||||
from github import Github
|
||||
print("· Get GitHub API handler (authenticate)")
|
||||
gh = Github('${{ github.token }}')
|
||||
print("· Get Repository handler")
|
||||
gh_repo = gh.get_repo('${{ github.repository }}')
|
||||
```
|
||||
|
||||
Find a non-trivial use case at [msys2/msys2-autobuild](https://github.com/msys2/msys2-autobuild).
|
||||
45
releaser/action.yml
Normal file
45
releaser/action.yml
Normal file
@@ -0,0 +1,45 @@
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: 'Releaser'
|
||||
description: 'Publish releases, upload assets and update tip/nightly tags'
|
||||
inputs:
|
||||
token:
|
||||
description: 'Token to make authenticated API calls; can be passed in using {{ secrets.GITHUB_TOKEN }}'
|
||||
required: true
|
||||
files:
|
||||
description: 'Multi-line list of glob patterns describing the artifacts to be uploaded'
|
||||
required: true
|
||||
tag:
|
||||
description: 'Name of the tag that corresponds to the tip/nightly pre-release'
|
||||
required: false
|
||||
default: tip
|
||||
rm:
|
||||
description: 'Whether to delete all the previous artifacts, or only replacing the ones with the same name'
|
||||
required: false
|
||||
default: false
|
||||
snapshots:
|
||||
description: 'Whether to create releases from any tag or to treat some as snapshots'
|
||||
required: false
|
||||
default: true
|
||||
runs:
|
||||
using: 'docker'
|
||||
image: 'docker://ghcr.io/pytooling/releaser'
|
||||
59
releaser/composite/action.yml
Normal file
59
releaser/composite/action.yml
Normal file
@@ -0,0 +1,59 @@
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
name: 'Releaser'
|
||||
description: 'Publish releases, upload assets and update tip/nightly tags'
|
||||
inputs:
|
||||
token:
|
||||
description: 'Token to make authenticated API calls; can be passed in using {{ secrets.GITHUB_TOKEN }}'
|
||||
required: true
|
||||
files:
|
||||
description: 'Multi-line list of glob patterns describing the artifacts to be uploaded'
|
||||
required: true
|
||||
tag:
|
||||
description: 'Name of the tag that corresponds to the tip/nightly pre-release'
|
||||
required: false
|
||||
default: tip
|
||||
rm:
|
||||
description: 'Whether to delete all the previous artifacts, or only replacing the ones with the same name'
|
||||
required: false
|
||||
default: false
|
||||
snapshots:
|
||||
description: 'Whether to create releases from any tag or to treat some as snapshots'
|
||||
required: false
|
||||
default: true
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
|
||||
- shell: bash
|
||||
run: |
|
||||
[ "$(source /etc/os-release && echo $VERSION_ID)" == "24.04" ] && UBUNTU_2404_ARGS='--break-system-packages' || unset UBUNTU_2404_ARGS
|
||||
pip install --disable-pip-version-check --progress-bar off $UBUNTU_2404_ARGS PyGithub
|
||||
|
||||
- shell: bash
|
||||
run: '''${{ github.action_path }}/../releaser.py'''
|
||||
env:
|
||||
INPUT_TOKEN: ${{ inputs.token }}
|
||||
INPUT_FILES: ${{ inputs.files }}
|
||||
INPUT_TAG: ${{ inputs.tag }}
|
||||
INPUT_RM: ${{ inputs.rm }}
|
||||
INPUT_SNAPSHOTS: ${{ inputs.snapshots }}
|
||||
2
releaser/pyproject.toml
Normal file
2
releaser/pyproject.toml
Normal file
@@ -0,0 +1,2 @@
|
||||
[tool.black]
|
||||
line-length = 120
|
||||
193
releaser/releaser.py
Executable file
193
releaser/releaser.py
Executable file
@@ -0,0 +1,193 @@
|
||||
#!/usr/bin/env python3
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# Unai Martinez-Corral #
|
||||
# #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2020-2024 The pyTooling Authors #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
import re
|
||||
from sys import argv as sys_argv, stdout, exit as sys_exit
|
||||
from os import environ, getenv
|
||||
from glob import glob
|
||||
from pathlib import Path
|
||||
from github import Github, GithubException
|
||||
from subprocess import check_call
|
||||
|
||||
|
||||
paramTag = getenv("INPUT_TAG", "tip")
|
||||
paramFiles = getenv("INPUT_FILES", None).split()
|
||||
paramRM = getenv("INPUT_RM", "false") == "true"
|
||||
paramSnapshots = getenv("INPUT_SNAPSHOTS", "true").lower() == "true"
|
||||
paramToken = (
|
||||
environ["GITHUB_TOKEN"]
|
||||
if "GITHUB_TOKEN" in environ
|
||||
else environ["INPUT_TOKEN"]
|
||||
if "INPUT_TOKEN" in environ
|
||||
else None
|
||||
)
|
||||
paramRepo = getenv("GITHUB_REPOSITORY", None)
|
||||
paramRef = getenv("GITHUB_REF", None)
|
||||
paramSHA = getenv("GITHUB_SHA", None)
|
||||
|
||||
|
||||
def GetListOfArtifacts(argv, files):
|
||||
print("· Get list of artifacts to be uploaded")
|
||||
args = files if files is not None else []
|
||||
if len(argv) > 1:
|
||||
args += argv[1:]
|
||||
if len(args) == 1 and args[0].lower() == "none":
|
||||
print("! Skipping 'files' because it's set to 'none'.")
|
||||
return []
|
||||
elif len(args) == 0:
|
||||
stdout.flush()
|
||||
raise (Exception("Glob patterns need to be provided as positional arguments or through envvar 'INPUT_FILES'!"))
|
||||
else:
|
||||
flist = []
|
||||
for item in args:
|
||||
print(f" glob({item!s}):")
|
||||
for fname in [fname for fname in glob(item, recursive=True) if not Path(fname).is_dir()]:
|
||||
if Path(fname).stat().st_size == 0:
|
||||
print(f" - ! Skipping empty file {fname!s}.")
|
||||
continue
|
||||
print(f" - {fname!s}")
|
||||
flist.append(fname)
|
||||
if len(flist) < 1:
|
||||
stdout.flush()
|
||||
raise (Exception("Empty list of files to upload/update!"))
|
||||
return sorted(flist)
|
||||
|
||||
|
||||
def GetGitHubAPIHandler(token):
|
||||
print("· Get GitHub API handler (authenticate)")
|
||||
if token is not None:
|
||||
return Github(token)
|
||||
raise (Exception("Need credentials to authenticate! Please, provide 'GITHUB_TOKEN' or 'INPUT_TOKEN'"))
|
||||
|
||||
|
||||
def CheckRefSemVer(gh_ref, tag, snapshots):
|
||||
print("· Check SemVer compliance of the reference/tag")
|
||||
env_tag = None
|
||||
if gh_ref[0:10] == "refs/tags/":
|
||||
env_tag = gh_ref[10:]
|
||||
if env_tag != tag:
|
||||
rexp = r"^(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$"
|
||||
semver = re.search(rexp, env_tag)
|
||||
if semver == None and env_tag[0] == "v":
|
||||
semver = re.search(rexp, env_tag[1:])
|
||||
tag = env_tag
|
||||
if semver == None:
|
||||
print(f"! Could not get semver from {gh_ref!s}")
|
||||
print(f"! Treat tag '{tag!s}' as a release")
|
||||
return (tag, env_tag, False)
|
||||
else:
|
||||
if semver.group("prerelease") is None:
|
||||
# is a regular semver compilant tag
|
||||
return (tag, env_tag, False)
|
||||
elif snapshots:
|
||||
# is semver compilant prerelease tag, thus a snapshot (we skip it)
|
||||
print("! Skipping snapshot prerelease.")
|
||||
sys_exit()
|
||||
|
||||
return (tag, env_tag, True)
|
||||
|
||||
|
||||
def GetRepositoryHandler(gh, repo):
|
||||
print("· Get Repository handler")
|
||||
if repo is None:
|
||||
stdout.flush()
|
||||
raise (Exception("Repository name not defined! Please set 'GITHUB_REPOSITORY"))
|
||||
return gh.get_repo(repo)
|
||||
|
||||
|
||||
def GetOrCreateRelease(gh_repo, tag, sha, is_prerelease):
|
||||
print("· Get Release handler")
|
||||
gh_tag = None
|
||||
try:
|
||||
gh_tag = gh_repo.get_git_ref(f"tags/{tag!s}")
|
||||
except Exception:
|
||||
stdout.flush()
|
||||
|
||||
if gh_tag:
|
||||
try:
|
||||
return (gh_repo.get_release(tag), False)
|
||||
except Exception:
|
||||
return (gh_repo.create_git_release(tag, tag, "", draft=True, prerelease=is_prerelease), True)
|
||||
else:
|
||||
err_msg = f"Tag/release '{tag!s}' does not exist and could not create it!"
|
||||
if sha is None:
|
||||
raise (Exception(err_msg))
|
||||
try:
|
||||
return (
|
||||
gh_repo.create_git_tag_and_release(
|
||||
tag, "", tag, "", sha, "commit", draft=True, prerelease=is_prerelease
|
||||
),
|
||||
True,
|
||||
)
|
||||
except Exception:
|
||||
raise (Exception(err_msg))
|
||||
|
||||
|
||||
def UpdateReference(gh_release, tag, sha, is_prerelease, is_draft):
|
||||
print("· Update Release reference (force-push tag)")
|
||||
|
||||
if is_draft:
|
||||
# Unfortunately, it seems not possible to update fields 'created_at' or 'published_at'.
|
||||
print(" > Update (pre-)release")
|
||||
gh_release.update_release(
|
||||
gh_release.title,
|
||||
"" if gh_release.body is None else gh_release.body,
|
||||
draft=False,
|
||||
prerelease=is_prerelease,
|
||||
tag_name=gh_release.tag_name,
|
||||
target_commitish=gh_release.target_commitish,
|
||||
)
|
||||
|
||||
if sha is not None:
|
||||
print(f" > Force-push '{tag!s}' to {sha!s}")
|
||||
gh_repo.get_git_ref(f"tags/{tag!s}").edit(sha)
|
||||
|
||||
|
||||
files = GetListOfArtifacts(sys_argv, paramFiles)
|
||||
stdout.flush()
|
||||
[tag, env_tag, is_prerelease] = CheckRefSemVer(paramRef, paramTag, paramSnapshots)
|
||||
stdout.flush()
|
||||
gh_repo = GetRepositoryHandler(GetGitHubAPIHandler(paramToken), paramRepo)
|
||||
stdout.flush()
|
||||
[gh_release, is_draft] = GetOrCreateRelease(gh_repo, tag, paramSHA, is_prerelease)
|
||||
stdout.flush()
|
||||
|
||||
if paramRM:
|
||||
print("· RM set. All previous assets are being cleared...")
|
||||
for asset in gh_release.get_assets():
|
||||
print(f" - {asset.name}")
|
||||
asset.delete_asset()
|
||||
stdout.flush()
|
||||
|
||||
if len(files) > 0:
|
||||
print("· Upload assets")
|
||||
env = environ.copy()
|
||||
env["GITHUB_TOKEN"] = paramToken
|
||||
cmd = ["gh", "release", "upload", "--repo", paramRepo, "--clobber", tag] + files
|
||||
print(f" > {' '.join(cmd)}")
|
||||
check_call(cmd, env=env)
|
||||
stdout.flush()
|
||||
else:
|
||||
print("! Skipping uploading assets because the file list is empty.")
|
||||
|
||||
UpdateReference(gh_release, tag, paramSHA if env_tag is None else None, is_prerelease, is_draft)
|
||||
1
requirements.txt
Normal file
1
requirements.txt
Normal file
@@ -0,0 +1 @@
|
||||
pyTooling ~= 8.0
|
||||
316
run.ps1
Normal file
316
run.ps1
Normal file
@@ -0,0 +1,316 @@
|
||||
[CmdletBinding()]
|
||||
Param(
|
||||
# Clean up all files and directories
|
||||
[switch]$clean,
|
||||
|
||||
# Commands
|
||||
[switch]$all,
|
||||
[switch]$copyall,
|
||||
|
||||
[switch]$doc,
|
||||
[switch]$livedoc,
|
||||
[switch]$doccov,
|
||||
|
||||
[switch]$unit,
|
||||
[switch]$liveunit,
|
||||
[switch]$copyunit,
|
||||
|
||||
[switch]$cov,
|
||||
[switch]$livecov,
|
||||
[switch]$copycov,
|
||||
|
||||
[switch]$type,
|
||||
[switch]$livetype,
|
||||
[switch]$copytype,
|
||||
|
||||
[switch]$nooutput,
|
||||
|
||||
[switch]$build,
|
||||
[switch]$install,
|
||||
|
||||
# Display this help"
|
||||
[switch]$help
|
||||
)
|
||||
|
||||
$PackageName = "Actions"
|
||||
|
||||
# set default values
|
||||
$EnableDebug = [bool]$PSCmdlet.MyInvocation.BoundParameters["Debug"]
|
||||
$EnableVerbose = [bool]$PSCmdlet.MyInvocation.BoundParameters["Verbose"] -or $EnableDebug
|
||||
|
||||
# Display help if no command was selected
|
||||
$help = $help -or ( -not(
|
||||
$all -or $copyall -or
|
||||
$clean -or
|
||||
$doc -or $livedoc -or $doccov -or
|
||||
$unit -or $liveunit -or $copyunit -or
|
||||
$cov -or $livecov -or $copycov -or
|
||||
$type -or $livetype -or $copytype -or
|
||||
$build -or $install
|
||||
)
|
||||
)
|
||||
|
||||
Write-Host "================================================================================" -ForegroundColor Magenta
|
||||
Write-Host "$PackageName Documentation Compilation and Assembly Tool" -ForegroundColor Magenta
|
||||
Write-Host "================================================================================" -ForegroundColor Magenta
|
||||
|
||||
if ($help)
|
||||
{ Get-Help $MYINVOCATION.MyCommand.Path -Detailed
|
||||
exit 0
|
||||
}
|
||||
|
||||
if ($all)
|
||||
{ $doc = $true
|
||||
$unit = $true
|
||||
# $copyunit = $true
|
||||
$cov = $true
|
||||
# $copycov = $true
|
||||
$type = $true
|
||||
$copytype = $true
|
||||
}
|
||||
if ($copyall)
|
||||
{# $copyunit = $true
|
||||
# $copycov = $true
|
||||
$copytype = $true
|
||||
}
|
||||
|
||||
if ($clean)
|
||||
{ Write-Host -ForegroundColor DarkYellow "[live][DOC] Cleaning documentation directories ..."
|
||||
rm -Force .\doc\$PackageName\*
|
||||
.\doc\make.bat clean
|
||||
Write-Host -ForegroundColor DarkYellow "[live][BUILD] Cleaning build directories ..."
|
||||
rm -Force .\build\bdist.win-amd64
|
||||
rm -Force .\build\lib
|
||||
}
|
||||
|
||||
if ($build)
|
||||
{ Write-Host -ForegroundColor Yellow "[live][BUILD] Cleaning build directories ..."
|
||||
rm -Force .\build\bdist.win-amd64
|
||||
rm -Force .\build\lib
|
||||
Write-Host -ForegroundColor Yellow "[live][BUILD] Building $PackageName package as wheel ..."
|
||||
py -3.13 -m build --wheel
|
||||
|
||||
Write-Host -ForegroundColor Yellow "[live][BUILD] Building wheel finished"
|
||||
}
|
||||
if ($install)
|
||||
{ if (!([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator"))
|
||||
{ Write-Host -ForegroundColor Yellow "[live][INSTALL] Installing $PackageName with administrator rights ..."
|
||||
$proc = Start-Process pwsh.exe "-NoProfile -ExecutionPolicy Bypass -WorkingDirectory `"$PSScriptRoot`" -File `"$PSCommandPath`" `"-install`"" -Verb RunAs -Wait
|
||||
|
||||
# Write-Host -ForegroundColor Yellow "[live][INSTALL] Wait on administrator console ..."
|
||||
# Wait-Process -Id $proc.Id
|
||||
}
|
||||
else
|
||||
{ Write-Host -ForegroundColor Cyan "[ADMIN][UNINSTALL] Uninstalling $PackageName ..."
|
||||
py -3.13 -m pip uninstall -y $PackageName
|
||||
Write-Host -ForegroundColor Cyan "[ADMIN][INSTALL] Installing $PackageName from wheel ..."
|
||||
py -3.13 -m pip install .\dist\$PackageName-8.1.0-py3-none-any.whl
|
||||
|
||||
Write-Host -ForegroundColor Cyan "[ADMIN][INSTALL] Closing window in 5 seconds ..."
|
||||
Start-Sleep -Seconds 5
|
||||
}
|
||||
}
|
||||
|
||||
$jobs = @()
|
||||
|
||||
if ($livedoc)
|
||||
{ Write-Host -ForegroundColor DarkYellow "[live][DOC] Building documentation using Sphinx ..."
|
||||
|
||||
.\doc\make.bat html --verbose
|
||||
|
||||
Write-Host -ForegroundColor DarkYellow "[live][DOC] Documentation finished"
|
||||
}
|
||||
elseif ($doc)
|
||||
{ Write-Host -ForegroundColor DarkYellow "[Job1][DOC] Building documentation using Sphinx ..."
|
||||
Write-Host -ForegroundColor DarkGreen "[SCRIPT] Starting Documentation job ..."
|
||||
|
||||
# Compile documentation
|
||||
$compileDocFunc = {
|
||||
.\doc\make.bat html --verbose
|
||||
}
|
||||
$docJob = Start-Job -Name "Documentation" -ScriptBlock $compileDocFunc
|
||||
# $jobs += $docJob
|
||||
}
|
||||
|
||||
|
||||
if ($doccov)
|
||||
{
|
||||
.\doc\make.bat coverage
|
||||
}
|
||||
|
||||
if ($liveunit)
|
||||
{ Write-Host -ForegroundColor DarkYellow "[live][UNIT] Running Unit Tests using pytest ..."
|
||||
|
||||
$env:ENVIRONMENT_NAME = "Windows (x86-64)"
|
||||
pytest -raP --color=yes --junitxml=report/unit/unittest.xml --template=html1/index.html --report=report/unit/html/index.html --split-report tests/unit
|
||||
|
||||
if ($copyunit)
|
||||
{ cp -Recurse -Force .\report\unit\html\* .\doc\_build\html\unittests
|
||||
Write-Host -ForegroundColor DarkBlue "[live][UNIT] Copyed unit testing report to 'unittests' directory in HTML directory"
|
||||
}
|
||||
|
||||
Write-Host -ForegroundColor DarkYellow "[live][UNIT] Unit Tests finished"
|
||||
}
|
||||
elseif ($unit)
|
||||
{ Write-Host -ForegroundColor DarkYellow "[Job2][UNIT] Running Unit Tests using pytest ..."
|
||||
Write-Host -ForegroundColor DarkGreen "[SCRIPT] Starting UnitTests jobs ..."
|
||||
|
||||
# Run unit tests
|
||||
$runUnitFunc = {
|
||||
$env:ENVIRONMENT_NAME = "Windows (x86-64)"
|
||||
pytest -raP --color=yes --junitxml=report/unit/unittest.xml --template=html1/index.html --report=report/unit/html/index.html --split-report tests/unit
|
||||
}
|
||||
$unitJob = Start-Job -Name "UnitTests" -ScriptBlock $runUnitFunc
|
||||
$jobs += $unitJob
|
||||
}
|
||||
|
||||
if ($livecov)
|
||||
{ Write-Host -ForegroundColor DarkMagenta "[live][COV] Running Unit Tests with coverage ..."
|
||||
|
||||
$env:ENVIRONMENT_NAME = "Windows (x86-64)"
|
||||
coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -ra --tb=line --color=yes tests/unit
|
||||
|
||||
Write-Host -ForegroundColor DarkMagenta "[live][COV] Convert coverage report to HTML ..."
|
||||
coverage html
|
||||
|
||||
Write-Host -ForegroundColor DarkMagenta "[live][COV] Convert coverage report to XML (Cobertura) ..."
|
||||
coverage xml
|
||||
|
||||
Write-Host -ForegroundColor DarkMagenta "[live][COV] Convert coverage report to JSON ..."
|
||||
coverage json
|
||||
|
||||
Write-Host -ForegroundColor DarkMagenta "[live][COV] Write coverage report to console ..."
|
||||
coverage report
|
||||
|
||||
if ($copycov)
|
||||
{ cp -Recurse -Force .\report\coverage\html\* .\doc\_build\html\coverage
|
||||
Write-Host -ForegroundColor DarkMagenta "[live][COV] Copyed code coverage report to 'coverage' directory in HTML directory"
|
||||
}
|
||||
|
||||
Write-Host -ForegroundColor DarkMagenta "[live][COV] Coverage finished"
|
||||
}
|
||||
elseif ($cov)
|
||||
{ Write-Host -ForegroundColor DarkMagenta "[live][COV] Running Unit Tests with coverage ..."
|
||||
Write-Host -ForegroundColor DarkMagenta "[SCRIPT] Starting Coverage jobs ..."
|
||||
|
||||
# Collect coverage
|
||||
$collectCovFunc = {
|
||||
$env:ENVIRONMENT_NAME = "Windows (x86-64)"
|
||||
coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -ra --tb=line --color=yes tests/unit
|
||||
|
||||
Write-Host -ForegroundColor DarkMagenta "[Job3][COV] Convert coverage report to HTML ..."
|
||||
coverage html
|
||||
|
||||
Write-Host -ForegroundColor DarkMagenta "[Job3][COV] Convert coverage report to XML (Cobertura) ..."
|
||||
coverage xml
|
||||
|
||||
Write-Host -ForegroundColor DarkMagenta "[Job3][COV] Convert coverage report to JSON ..."
|
||||
coverage json
|
||||
}
|
||||
$covJob = Start-Job -Name "Coverage" -ScriptBlock $collectCovFunc
|
||||
$jobs += $covJob
|
||||
}
|
||||
|
||||
if ($livetype)
|
||||
{ Write-Host -ForegroundColor DarkCyan "[live][TYPE] Running static type analysis using mypy ..."
|
||||
|
||||
$env:MYPY_FORCE_COLOR = 1
|
||||
mypy.exe -p $PackageName
|
||||
|
||||
if ($copytype)
|
||||
{ cp -Recurse -Force .\report\typing\* .\doc\_build\html\typing
|
||||
Write-Host -ForegroundColor DarkCyan "[live][TYPE] Copyed typing report to 'typing' directory in HTML directory."
|
||||
}
|
||||
|
||||
Write-Host -ForegroundColor DarkCyan "[live][TYPE] Static type analysis finished"
|
||||
}
|
||||
elseif ($type)
|
||||
{ Write-Host -ForegroundColor DarkCyan "[live][TYPE] Running static type analysis using mypy ..."
|
||||
Write-Host -ForegroundColor DarkCyan "[SCRIPT] Starting Typing jobs ..."
|
||||
|
||||
# Analyze types
|
||||
$analyzeTypesFunc = {
|
||||
$env:MYPY_FORCE_COLOR = 1
|
||||
mypy.exe -p $PackageName
|
||||
}
|
||||
$typeJob = Start-Job -Name "Typing" -ScriptBlock $analyzeTypesFunc
|
||||
$jobs += $typeJob
|
||||
}
|
||||
|
||||
|
||||
if ($doc)
|
||||
{ Write-Host -ForegroundColor DarkGreen "[SCRIPT] Waiting on Documentation job ..."
|
||||
Wait-Job -Job $docJob
|
||||
Write-Host -ForegroundColor DarkYellow "[Job1][DOC] Documentation finished"
|
||||
}
|
||||
if ($jobs.Count -ne 0)
|
||||
{
|
||||
Write-Host -ForegroundColor DarkGreen ( "[SCRIPT] Waiting on {0} jobs ({1}) ..." -f $jobs.Count, (($jobs | %{ $_.Name }) -join ", "))
|
||||
Wait-Job -Job $jobs
|
||||
}
|
||||
|
||||
|
||||
if (-not $liveunit -and $copyunit)
|
||||
{
|
||||
# if ($unit)
|
||||
# { Wait-Job -Job $unitJob
|
||||
# Write-Host -ForegroundColor DarkBlue "[Job2][UNIT] Unit tests finished"
|
||||
# }
|
||||
cp -Recurse -Force .\report\unit\html\* .\doc\_build\html\unittests
|
||||
Write-Host -ForegroundColor DarkBlue "[post][UNIT] Copyed unit testing report to 'unittests' directory in HTML directory"
|
||||
}
|
||||
if (-not ($livecov -or $cov) -and $copycov)
|
||||
{
|
||||
# if ($cov)
|
||||
# { Wait-Job -Job $unitJob
|
||||
# Write-Host -ForegroundColor DarkMagenta "[Job3][UNIT] Coverage collection finished"
|
||||
# }
|
||||
cp -Recurse -Force .\report\coverage\html\* .\doc\_build\html\coverage
|
||||
Write-Host -ForegroundColor DarkMagenta "[post][COV] Copyed code coverage report to 'coverage' directory in HTML directory"
|
||||
}
|
||||
if (-not $livetype -and $copytype)
|
||||
{
|
||||
# if ($type)
|
||||
# { Wait-Job -Job $typeJob
|
||||
# Write-Host -ForegroundColor DarkCyan "[Job4][UNIT] Static type analysis finished"
|
||||
# }
|
||||
cp -Recurse -Force .\report\typing\* .\doc\_build\html\typing
|
||||
Write-Host -ForegroundColor DarkCyan "[post][TYPE] Copyed typing report to 'typing' directory in HTML directory."
|
||||
}
|
||||
|
||||
|
||||
if ($type)
|
||||
{ Write-Host -ForegroundColor DarkCyan "================================================================================"
|
||||
if (-not $nooutput)
|
||||
{ Receive-Job -Job $typeJob
|
||||
}
|
||||
Remove-Job -Job $typeJob
|
||||
}
|
||||
if ($doc)
|
||||
{ Write-Host -ForegroundColor DarkYellow "================================================================================"
|
||||
if (-not $nooutput)
|
||||
{ Receive-Job -Job $docJob
|
||||
}
|
||||
Remove-Job -Job $docJob
|
||||
}
|
||||
if ($unit)
|
||||
{ Write-Host -ForegroundColor DarkBlue "================================================================================"
|
||||
if (-not $nooutput)
|
||||
{ Receive-Job -Job $unitJob
|
||||
}
|
||||
Remove-Job -Job $unitJob
|
||||
}
|
||||
if ($cov)
|
||||
{ Write-Host -ForegroundColor DarkMagenta "================================================================================"
|
||||
if (-not $nooutput)
|
||||
{ Receive-Job -Job $covJob
|
||||
}
|
||||
Remove-Job -Job $covJob
|
||||
|
||||
if ($copycov)
|
||||
{ cp -Recurse -Force .\report\coverage\html\* .\doc\_build\html\coverage
|
||||
Write-Host -ForegroundColor DarkMagenta "[post][COV] Copyed code coverage report to 'coverage' directory in HTML directory"
|
||||
}
|
||||
}
|
||||
Write-Host -ForegroundColor DarkGreen "================================================================================"
|
||||
Write-Host -ForegroundColor DarkGreen "[SCRIPT] Finished"
|
||||
51
setup.py
Normal file
51
setup.py
Normal file
@@ -0,0 +1,51 @@
|
||||
# ==================================================================================================================== #
|
||||
# _____ _ _ _ _ _ #
|
||||
# _ __ _ |_ _|__ ___ | (_)_ __ __ _ / \ ___| |_(_) ___ _ __ ___ #
|
||||
# | '_ \| | | || |/ _ \ / _ \| | | '_ \ / _` | / _ \ / __| __| |/ _ \| '_ \/ __| #
|
||||
# | |_) | |_| || | (_) | (_) | | | | | | (_| |_ / ___ \ (__| |_| | (_) | | | \__ \ #
|
||||
# | .__/ \__, ||_|\___/ \___/|_|_|_| |_|\__, (_)_/ \_\___|\__|_|\___/|_| |_|___/ #
|
||||
# |_| |___/ |___/ #
|
||||
# ==================================================================================================================== #
|
||||
# Authors: #
|
||||
# Patrick Lehmann #
|
||||
# #
|
||||
# License: #
|
||||
# ==================================================================================================================== #
|
||||
# Copyright 2017-2024 Patrick Lehmann - Bötzingen, Germany #
|
||||
# #
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); #
|
||||
# you may not use this file except in compliance with the License. #
|
||||
# You may obtain a copy of the License at #
|
||||
# #
|
||||
# http://www.apache.org/licenses/LICENSE-2.0 #
|
||||
# #
|
||||
# Unless required by applicable law or agreed to in writing, software #
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, #
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
|
||||
# See the License for the specific language governing permissions and #
|
||||
# limitations under the License. #
|
||||
# #
|
||||
# SPDX-License-Identifier: Apache-2.0 #
|
||||
# ==================================================================================================================== #
|
||||
#
|
||||
"""Package installer for 'pyDummy'."""
|
||||
from setuptools import setup
|
||||
|
||||
from pathlib import Path
|
||||
from pyTooling.Packaging import DescribePythonPackageHostedOnGitHub
|
||||
|
||||
gitHubNamespace = "pyTooling"
|
||||
packageName = "pyDummy"
|
||||
packageDirectory = packageName
|
||||
packageInformationFile = Path(f"{packageDirectory}/__init__.py")
|
||||
|
||||
setup(**DescribePythonPackageHostedOnGitHub(
|
||||
packageName=packageName,
|
||||
description="pyDummy is a test package to verify GitHub actions for Python projects.",
|
||||
gitHubNamespace=gitHubNamespace,
|
||||
unittestRequirementsFile=Path("tests/requirements.txt"),
|
||||
sourceFileWithVersion=packageInformationFile,
|
||||
dataFiles={
|
||||
packageName: ["py.typed"]
|
||||
}
|
||||
))
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user