diff --git a/.btd.yml b/.btd.yml new file mode 100644 index 0000000..296c029 --- /dev/null +++ b/.btd.yml @@ -0,0 +1,9 @@ +input: doc +output: _build +requirements: requirements.txt +target: gh-pages +formats: [ html ] +images: + base: btdi/sphinx:pytooling + latex: btdi/latex +theme: https://codeload.GitHub.com/buildthedocs/sphinx.theme/tar.gz/v1 diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..3c7043c --- /dev/null +++ b/.editorconfig @@ -0,0 +1,30 @@ +root = true + +[*] +charset = utf-8 +# end_of_line = lf +insert_final_newline = true +trim_trailing_whitespace = true +indent_style = tab +indent_size = 2 +tab_width = 2 + + +[*.py] +indent_style = tab +indent_size = 2 + +[*.{yml,yaml}] +indent_style = space +indent_size = 2 + +[*.{json,ini}] +indent_style = tab +indent_size = 2 + +[*.md] +trim_trailing_whitespace = false + +[*.rst] +indent_style = space +indent_size = 3 diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 11c8a30..67b1a2a 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -10,9 +10,9 @@ updates: - Dependencies assignees: - Paebbels - - Umarcor + - umarcor reviewers: - Paebbels - - Umarcor + - umarcor schedule: interval: "daily" # Checks on Monday trough Friday. diff --git a/.github/workflows/ApplicationTesting.yml b/.github/workflows/ApplicationTesting.yml new file mode 100644 index 0000000..c327cb9 --- /dev/null +++ b/.github/workflows/ApplicationTesting.yml @@ -0,0 +1,255 @@ +# ==================================================================================================================== # +# Authors: # +# Patrick Lehmann # +# Unai Martinez-Corral # +# # +# ==================================================================================================================== # +# Copyright 2020-2024 The pyTooling Authors # +# # +# Licensed under the Apache License, Version 2.0 (the "License"); # +# you may not use this file except in compliance with the License. # +# You may obtain a copy of the License at # +# # +# http://www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software # +# distributed under the License is distributed on an "AS IS" BASIS, # +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # +# See the License for the specific language governing permissions and # +# limitations under the License. # +# # +# SPDX-License-Identifier: Apache-2.0 # +# ==================================================================================================================== # +name: Application Testing + +on: + workflow_call: + inputs: + jobs: + description: 'JSON list with environment fields, telling the system and Python versions to run tests with.' + required: true + type: string + wheel: + description: "Wheel package as input artifact." + required: false + default: '' + type: string + requirements: + description: 'Python dependencies to be installed through pip.' + required: false + default: '-r tests/requirements.txt' + type: string + pacboy: + description: 'MSYS2 dependencies to be installed through pacboy (pacman).' + required: false + default: "" + type: string + mingw_requirements: + description: 'Override Python dependencies to be installed through pip on MSYS2 (MINGW64) only.' + required: false + default: '' + type: string + root_directory: + description: 'Working directory for running tests.' + required: false + default: '' + type: string + tests_directory: + description: 'Path to the directory containing tests (relative to root_directory).' + required: false + default: 'tests' + type: string + apptest_directory: + description: 'Path to the directory containing application tests (relative to tests_directory).' + required: false + default: 'app' + type: string + apptest_xml_artifact: + description: "Generate application test report with junitxml and upload results as an artifact." + required: false + default: '' + type: string + +jobs: + ApplicationTesting: + name: ${{ matrix.sysicon }} ${{ matrix.pyicon }} Application Tests using Python ${{ matrix.python }} + runs-on: ${{ matrix.runs-on }} + + strategy: + fail-fast: false + matrix: + include: ${{ fromJson(inputs.jobs) }} + + defaults: + run: + shell: ${{ matrix.shell }} + + steps: + - name: ⏬ Checkout repository + uses: actions/checkout@v4 + + - name: πŸ“₯ Download artifacts '${{ inputs.wheel }}' from 'Package' job + uses: actions/download-artifact@v4 + with: + name: ${{ inputs.wheel }} + path: install + + - name: Compute pacman/pacboy packages + id: pacboy + if: matrix.system == 'msys2' + shell: python + run: | + from os import getenv + from pathlib import Path + from re import compile + from sys import version + + print(f"Python: {version}") + + def loadRequirementsFile(requirementsFile: Path): + requirements = [] + with requirementsFile.open("r") as file: + for line in file.readlines(): + line = line.strip() + if line.startswith("#") or line.startswith("https") or line == "": + continue + elif line.startswith("-r"): + # Remove the first word/argument (-r) + requirements += loadRequirementsFile(requirementsFile.parent / line[2:].lstrip()) + else: + requirements.append(line) + + return requirements + + requirements = "${{ inputs.requirements }}" + if requirements.startswith("-r"): + requirementsFile = Path(requirements[2:].lstrip()) + dependencies = loadRequirementsFile(requirementsFile) + else: + dependencies = [req.strip() for req in requirements.split(" ")] + + packages = { + "coverage": "python-coverage:p", + "igraph": "igraph:p", + "jinja2": "python-markupsafe:p", + "lxml": "python-lxml:p", + "numpy": "python-numpy:p", + "markupsafe": "python-markupsafe:p", + "pip": "python-pip:p", + "ruamel.yaml": "python-ruamel-yaml:p python-ruamel.yaml.clib:p", + "sphinx": "python-markupsafe:p", + "tomli": "python-tomli:p", + "wheel": "python-wheel:p", + } + subPackages = { + "pytooling": { + "yaml": "python-ruamel-yaml:p python-ruamel.yaml.clib:p", + } + } + + regExp = compile(r"(?P[\w_\-\.]+)(?:\[(?P(?:\w+)(?:\s*,\s*\w+)*)\])?(?:\s*(?P[<>~=]+)\s*)(?P\d+(?:\.\d+)*)(?:-(?P\w+))?") + + pacboyPackages = set(("python-pip:p", "python-wheel:p", "python-tomli:p")) + print(f"Processing dependencies ({len(dependencies)}):") + for dependency in dependencies: + print(f" {dependency}") + + match = regExp.match(dependency.lower()) + if not match: + print(f" Wrong format: {dependency}") + print(f"::error title=Identifying Pacboy Packages::Unrecognized dependency format '{dependency}'") + continue + + package = match["PackageName"] + if package in packages: + rewrite = packages[package] + print(f" Found rewrite rule for '{package}': {rewrite}") + pacboyPackages.add(rewrite) + + if match["SubPackages"] and package in subPackages: + for subPackage in match["SubPackages"].split(","): + if subPackage in subPackages[package]: + rewrite = subPackages[package][subPackage] + print(f" Found rewrite rule for '{package}[..., {subPackage}, ...]': {rewrite}") + pacboyPackages.add(rewrite) + + # Write jobs to special file + github_output = Path(getenv("GITHUB_OUTPUT")) + print(f"GITHUB_OUTPUT: {github_output}") + with github_output.open("a+") as f: + f.write(f"pacboy_packages={' '.join(pacboyPackages)}\n") + + - name: '🟦 Setup MSYS2 for ${{ matrix.runtime }}' + if: matrix.system == 'msys2' + uses: msys2/setup-msys2@v2 + with: + msystem: ${{ matrix.runtime }} + update: true + pacboy: >- + ${{ steps.pacboy.outputs.pacboy_packages }} + ${{ inputs.pacboy }} + + - name: 🐍 Setup Python ${{ matrix.python }} + if: matrix.system != 'msys2' + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python }} + + - name: πŸ”§ Install wheel and pip dependencies (native) + if: matrix.system != 'msys2' + run: | + python -m pip install --disable-pip-version-check -U wheel + python -m pip install --disable-pip-version-check ${{ inputs.requirements }} + + - name: πŸ”§ Install pip dependencies (MSYS2) + if: matrix.system == 'msys2' + run: | + if [ -n '${{ inputs.mingw_requirements }}' ]; then + python -m pip install --disable-pip-version-check ${{ inputs.mingw_requirements }} + else + python -m pip install --disable-pip-version-check ${{ inputs.requirements }} + fi + + - name: πŸ”§ Install wheel from artifact + run: | + ls -l install + python -m pip install --disable-pip-version-check -U install/*.whl + + - name: β˜‘ Run application tests (Ubuntu/macOS) + if: matrix.system != 'windows' + run: | + export ENVIRONMENT_NAME="${{ matrix.envname }}" + + cd "${{ inputs.root_directory || '.' }}" + [ -n '${{ inputs.apptest_xml_artifact }}' ] && PYTEST_ARGS='--junitxml=report/unit/TestReportSummary.xml' || unset PYTEST_ARGS + if [ -n '${{ inputs.coverage_config }}' ]; then + echo "coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.apptest_directory }}" + coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.apptest_directory }} + else + echo "python -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.apptest_directory }}" + python -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.apptest_directory }} + fi + + - name: β˜‘ Run application tests (Windows) + if: matrix.system == 'windows' + run: | + $env:ENVIRONMENT_NAME = "${{ matrix.envname }}" + + cd "${{ inputs.root_directory || '.' }}" + $PYTEST_ARGS = if ("${{ inputs.apptest_xml_artifact }}") { "--junitxml=report/unit/TestReportSummary.xml" } else { "" } + if ("${{ inputs.coverage_config }}") { + Write-Host "coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.apptest_directory }}" + coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.apptest_directory }} + } else { + Write-Host "python -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.apptest_directory }}" + python -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.apptest_directory }} + } + + - name: πŸ“€ Upload 'TestReportSummary.xml' artifact + if: inputs.apptest_xml_artifact != '' + uses: actions/upload-artifact@v4 + with: + name: ${{ inputs.apptest_xml_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }} + path: report/unit/TestReportSummary.xml + if-no-files-found: error + retention-days: 1 diff --git a/.github/workflows/ArtifactCleanUp.yml b/.github/workflows/ArtifactCleanUp.yml index 3b4dd9d..2480577 100644 --- a/.github/workflows/ArtifactCleanUp.yml +++ b/.github/workflows/ArtifactCleanUp.yml @@ -4,7 +4,7 @@ # Unai Martinez-Corral # # # # ==================================================================================================================== # -# Copyright 2020-2022 The pyTooling Authors # +# Copyright 2020-2024 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # @@ -45,12 +45,14 @@ jobs: - name: πŸ—‘οΈ Delete package Artifacts if: ${{ ! startsWith(github.ref, 'refs/tags') }} - uses: geekyeggo/delete-artifact@v2 + uses: geekyeggo/delete-artifact@v4 with: name: ${{ inputs.package }} + token: ${{ secrets.GITHUB_TOKEN }} - name: πŸ—‘οΈ Delete remaining Artifacts if: ${{ inputs.remaining != '' }} - uses: geekyeggo/delete-artifact@v2 + uses: geekyeggo/delete-artifact@v4 with: name: ${{ inputs.remaining }} + token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/BuildTheDocs.yml b/.github/workflows/BuildTheDocs.yml index d223e99..44b13ea 100644 --- a/.github/workflows/BuildTheDocs.yml +++ b/.github/workflows/BuildTheDocs.yml @@ -4,7 +4,7 @@ # Unai Martinez-Corral # # # # ==================================================================================================================== # -# Copyright 2020-2022 The pyTooling Authors # +# Copyright 2020-2024 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # @@ -27,18 +27,18 @@ on: inputs: artifact: description: 'Name of the documentation artifact.' - required: true + required: false + default: '' type: string jobs: - BuildTheDocs: name: πŸ““ Run BuildTheDocs runs-on: ubuntu-latest steps: - name: ⏬ Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: πŸ›³οΈ Build documentation uses: buildthedocs/btd@v0 @@ -46,8 +46,23 @@ jobs: skip-deploy: true - name: πŸ“€ Upload 'documentation' artifacts - uses: actions/upload-artifact@v3 + if: inputs.artifact != '' + uses: actions/upload-artifact@v4 with: name: ${{ inputs.artifact }} path: doc/_build/html retention-days: 1 + + - name: 'πŸ““ Publish site to GitHub Pages' + if: inputs.artifact == '' && github.event_name != 'pull_request' + run: | + cp --recursive -T doc/_build/html public + cd public + touch .nojekyll + git init + cp ../.git/config ./.git/config + git add . + git config --local user.email "BuildTheDocs@GitHubActions" + git config --local user.name "GitHub Actions" + git commit -a -m "update ${{ github.sha }}" + git push -u origin +HEAD:gh-pages diff --git a/.github/workflows/CheckDocumentation.yml b/.github/workflows/CheckDocumentation.yml new file mode 100644 index 0000000..285516a --- /dev/null +++ b/.github/workflows/CheckDocumentation.yml @@ -0,0 +1,67 @@ +# ==================================================================================================================== # +# Authors: # +# Patrick Lehmann # +# # +# ==================================================================================================================== # +# Copyright 2020-2024 The pyTooling Authors # +# # +# Licensed under the Apache License, Version 2.0 (the "License"); # +# you may not use this file except in compliance with the License. # +# You may obtain a copy of the License at # +# # +# http://www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software # +# distributed under the License is distributed on an "AS IS" BASIS, # +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # +# See the License for the specific language governing permissions and # +# limitations under the License. # +# # +# SPDX-License-Identifier: Apache-2.0 # +# ==================================================================================================================== # +name: Check Documentation + +on: + workflow_call: + inputs: + python_version: + description: 'Python version.' + required: false + default: '3.12' + type: string + directory: + description: 'Source code directory to check.' + required: true + type: string +# fail_below: +# description: 'Minimum required documentation coverage level' +# required: false +# default: 75 +# type: string + +jobs: + DocCoverage: + name: πŸ‘€ Check documentation coverage + runs-on: ubuntu-latest + steps: + - name: ⏬ Checkout repository + uses: actions/checkout@v4 + + - name: 🐍 Setup Python 3.11 + uses: actions/setup-python@v5 + with: + python-version: "3.11" + + - name: πŸ”§ Install wheel,tomli and pip dependencies (native) + run: | + python -m pip install --disable-pip-version-check -U docstr_coverage interrogate + + - name: Run 'interrogate' Documentation Coverage Check + continue-on-error: true + run: | + interrogate -c pyproject.toml + + - name: Run 'docstr_coverage' Documentation Coverage Check + continue-on-error: true + run: | + docstr_coverage -v ${{ inputs.directory }} diff --git a/.github/workflows/CoverageCollection.yml b/.github/workflows/CoverageCollection.yml index d5c3757..2922e25 100644 --- a/.github/workflows/CoverageCollection.yml +++ b/.github/workflows/CoverageCollection.yml @@ -4,7 +4,7 @@ # Unai Martinez-Corral # # # # ==================================================================================================================== # -# Copyright 2020-2022 The pyTooling Authors # +# Copyright 2020-2024 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # @@ -28,17 +28,22 @@ on: python_version: description: 'Python version.' required: false - default: '3.10' + default: '3.11' type: string requirements: description: 'Python dependencies to be installed through pip.' required: false default: '-r tests/requirements.txt' type: string - unittest_directory: - description: 'Path to the directory containing unit tests.' + tests_directory: + description: 'Path to the directory containing tests (test working directory).' required: false - default: 'tests/unit' + default: 'tests' + type: string + unittest_directory: + description: 'Path to the directory containing unit tests (relative to tests_directory).' + required: false + default: 'unit' type: string coverage_config: description: 'Path to the .coveragerc file. Use pyproject.toml by default.' @@ -62,26 +67,26 @@ jobs: steps: - name: ⏬ Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: 🐍 Setup Python ${{ inputs.python_version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ inputs.python_version }} - name: πŸ—‚ Install dependencies run: | - python -m pip install -U pip - python -m pip install tomli - python -m pip install ${{ inputs.requirements }} + python -m pip install --disable-pip-version-check tomli + python -m pip install --disable-pip-version-check ${{ inputs.requirements }} - name: πŸ” Extract configurations from pyproject.toml id: getVariables shell: python run: | - from os import environ - from pathlib import Path - from tomli import load as tomli_load + from os import getenv + from pathlib import Path + from tomli import load as tomli_load + from textwrap import dedent htmlDirectory = 'htmlcov' xmlFile = './coverage.xml' @@ -89,52 +94,61 @@ jobs: # Read output paths from 'pyproject.toml' file if coverageRC == "pyproject.toml": - pyProjectFile = Path("pyproject.toml") - if pyProjectFile.exists(): - with pyProjectFile.open("rb") as file: - pyProjectSettings = tomli_load(file) + pyProjectFile = Path("pyproject.toml") + if pyProjectFile.exists(): + with pyProjectFile.open("rb") as file: + pyProjectSettings = tomli_load(file) - htmlDirectory = pyProjectSettings["tool"]["coverage"]["html"]["directory"] - xmlFile = pyProjectSettings["tool"]["coverage"]["xml"]["output"] - else: - print(f"File '{pyProjectFile}' not found and no ' .coveragerc' file specified.") + htmlDirectory = pyProjectSettings["tool"]["coverage"]["html"]["directory"] + xmlFile = pyProjectSettings["tool"]["coverage"]["xml"]["output"] + else: + print(f"File '{pyProjectFile}' not found and no ' .coveragerc' file specified.") # Read output paths from '.coveragerc' file elif len(coverageRC) > 0: - coverageRCFile = Path(coverageRC) - if coverageRCFile.exists(): - with coverageRCFile.open("rb") as file: - coverageRCSettings = tomli_load(file) + coverageRCFile = Path(coverageRC) + if coverageRCFile.exists(): + with coverageRCFile.open("rb") as file: + coverageRCSettings = tomli_load(file) - htmlDirectory = coverageRCSettings["html"]["directory"] - xmlFile = coverageRCSettings["xml"]["output"] - else: - print(f"File '{coverageRCFile}' not found.") + htmlDirectory = coverageRCSettings["html"]["directory"] + xmlFile = coverageRCSettings["xml"]["output"] + else: + print(f"File '{coverageRCFile}' not found.") + + # Write jobs to special file + github_output = Path(getenv("GITHUB_OUTPUT")) + print(f"GITHUB_OUTPUT: {github_output}") + with github_output.open("a+", encoding="utf-8") as f: + f.write(dedent(f"""\ + coverage_report_html_directory={htmlDirectory} + coverage_report_xml={xmlFile} + """)) - with open(environ['GITHUB_OUTPUT'], 'a', encoding='utf-8') as gho: - gho.write(f"""\ - coverage_report_html_directory={htmlDirectory} - coverage_report_xml={xmlFile} - """) print(f"DEBUG:\n html={htmlDirectory}\n xml={xmlFile}") - name: Collect coverage continue-on-error: true run: | - [ 'x${{ inputs.coverage_config }}' != 'x' ] && PYCOV_ARGS='--cov-config=${{ inputs.coverage_config }}' || unset PYCOV_ARGS - python -m pytest -rA --cov=. $PYCOV_ARGS ${{ inputs.unittest_directory }} --color=yes + export ENVIRONMENT_NAME="Linux (x86-64)" + export PYTHONPATH=$(pwd) + ABSDIR=$(pwd) + cd "${{ inputs.tests_directory || '.' }}" + [ -n '${{ inputs.coverage_config }}' ] && PYCOV_ARGS="--cov-config=${ABSDIR}/${{ inputs.coverage_config }}" || unset PYCOV_ARGS + echo "python -m pytest -rA --cov=${ABSDIR} ${PYCOV_ARGS} ${{ inputs.unittest_directory }} --color=yes" + python -m pytest -rA --cov=${ABSDIR} $PYCOV_ARGS ${{ inputs.unittest_directory }} --color=yes - name: Convert to cobertura format - run: coverage xml + run: coverage xml --data-file=${{ inputs.tests_directory || '.' }}/.coverage - name: Convert to HTML format run: | - coverage html -d ${{ steps.getVariables.outputs.coverage_report_html_directory }} + coverage html --data-file=${{ inputs.tests_directory || '.' }}/.coverage -d ${{ steps.getVariables.outputs.coverage_report_html_directory }} rm ${{ steps.getVariables.outputs.coverage_report_html_directory }}/.gitignore - name: πŸ“€ Upload 'Coverage Report' artifact continue-on-error: true - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: ${{ inputs.artifact }} path: ${{ steps.getVariables.outputs.coverage_report_html_directory }} @@ -151,7 +165,7 @@ jobs: - name: πŸ“‰ Publish coverage at Codacy continue-on-error: true - uses: codacy/codacy-coverage-reporter-action@master + uses: codacy/codacy-coverage-reporter-action@v1 with: project-token: ${{ secrets.codacy_token }} coverage-reports: ${{ steps.getVariables.outputs.coverage_report_xml }} diff --git a/.github/workflows/IntermediateCleanUp.yml b/.github/workflows/IntermediateCleanUp.yml new file mode 100644 index 0000000..4699446 --- /dev/null +++ b/.github/workflows/IntermediateCleanUp.yml @@ -0,0 +1,55 @@ +# ==================================================================================================================== # +# Authors: # +# Patrick Lehmann # +# # +# ==================================================================================================================== # +# Copyright 2020-2024 The pyTooling Authors # +# # +# Licensed under the Apache License, Version 2.0 (the "License"); # +# you may not use this file except in compliance with the License. # +# You may obtain a copy of the License at # +# # +# http://www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software # +# distributed under the License is distributed on an "AS IS" BASIS, # +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # +# See the License for the specific language governing permissions and # +# limitations under the License. # +# # +# SPDX-License-Identifier: Apache-2.0 # +# ==================================================================================================================== # +name: Intermediate Cleanup + +on: + workflow_call: + inputs: + sqlite_coverage_artifacts_prefix: + description: 'Prefix for SQLite coverage artifacts' + required: false + type: string + xml_unittest_artifacts_prefix: + description: 'Prefix for XML unittest artifacts' + required: false + type: string + +jobs: + IntermediateCleanUp: + name: πŸ—‘οΈ Intermediate Artifact Cleanup + runs-on: ubuntu-latest + steps: + - name: πŸ—‘οΈ Delete SQLite coverage artifacts from matrix jobs + uses: geekyeggo/delete-artifact@v4 + if: inputs.sqlite_coverage_artifacts_prefix != '' + continue-on-error: true + with: + name: ${{ inputs.sqlite_coverage_artifacts_prefix }}* + token: ${{ secrets.GITHUB_TOKEN }} + + - name: πŸ—‘οΈ Delete XML coverage artifacts from matrix jobs + uses: geekyeggo/delete-artifact@v4 + if: inputs.xml_unittest_artifacts_prefix != '' + continue-on-error: true + with: + name: ${{ inputs.xml_unittest_artifacts_prefix }}* + token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/LaTeXDocumentation.yml b/.github/workflows/LaTeXDocumentation.yml new file mode 100644 index 0000000..0c4d375 --- /dev/null +++ b/.github/workflows/LaTeXDocumentation.yml @@ -0,0 +1,66 @@ +# ==================================================================================================================== # +# Authors: # +# Patrick Lehmann # +# # +# ==================================================================================================================== # +# Copyright 2020-2024 The pyTooling Authors # +# # +# Licensed under the Apache License, Version 2.0 (the "License"); # +# you may not use this file except in compliance with the License. # +# You may obtain a copy of the License at # +# # +# http://www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software # +# distributed under the License is distributed on an "AS IS" BASIS, # +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # +# See the License for the specific language governing permissions and # +# limitations under the License. # +# # +# SPDX-License-Identifier: Apache-2.0 # +# ==================================================================================================================== # +name: LaTeX Documentation + +on: + workflow_call: + inputs: + document: + description: 'LaTeX root document without *.tex extension.' + required: true + type: string + latex_artifact: + description: 'Name of the LaTeX documentation artifact.' + required: false + default: '' + type: string + pdf_artifact: + description: 'Name of the PDF documentation artifact.' + required: false + default: '' + type: string + +jobs: + PDFDocumentation: + name: πŸ““ Converting LaTeX Documentation to PDF + runs-on: ubuntu-latest + steps: + - name: πŸ“₯ Download artifacts '${{ inputs.latex_artifact }}' from 'SphinxDocumentation' job + uses: actions/download-artifact@v4 + with: + name: ${{ inputs.latex_artifact }} + path: latex + + - name: Compile LaTeX document + uses: xu-cheng/latex-action@master + with: + working_directory: latex + root_file: ${{ inputs.document }}.tex + + - name: πŸ“€ Upload 'PDF Documentation' artifact + uses: actions/upload-artifact@v4 + if: inputs.pdf_artifact != '' + with: + name: ${{ inputs.pdf_artifact }} + path: ${{ inputs.document }}.pdf + if-no-files-found: error + retention-days: 1 diff --git a/.github/workflows/Package.yml b/.github/workflows/Package.yml index 1edc84b..5378fbc 100644 --- a/.github/workflows/Package.yml +++ b/.github/workflows/Package.yml @@ -4,7 +4,7 @@ # Unai Martinez-Corral # # # # ==================================================================================================================== # -# Copyright 2020-2022 The pyTooling Authors # +# Copyright 2020-2024 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # @@ -28,7 +28,7 @@ on: python_version: description: 'Python version.' required: false - default: '3.10' + default: '3.12' type: string requirements: description: 'Python dependencies to be installed through pip; if empty, use pyproject.toml through build.' @@ -48,21 +48,18 @@ jobs: steps: - name: ⏬ Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: 🐍 Setup Python ${{ inputs.python_version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ inputs.python_version }} - - name: 🐍 Update pip - run: python -m pip install -U pip - # build - name: πŸ”§ [build] Install dependencies for packaging and release if: inputs.requirements == '' - run: python -m pip install build + run: python -m pip install --disable-pip-version-check build - name: πŸ”¨ [build] Build Python package (source distribution) if: inputs.requirements == '' @@ -76,7 +73,7 @@ jobs: - name: πŸ”§ [build] Install dependencies for packaging and release if: inputs.requirements == 'no-isolation' - run: python -m pip install build + run: python -m pip install --disable-pip-version-check build - name: πŸ”¨ [build] Build Python package (source distribution) if: inputs.requirements == 'no-isolation' @@ -90,7 +87,7 @@ jobs: - name: πŸ”§ [setuptools] Install dependencies for packaging and release if: inputs.requirements != '' && inputs.requirements != 'no-isolation' - run: python -m pip install ${{ inputs.requirements }} + run: python -m pip install --disable-pip-version-check ${{ inputs.requirements }} - name: πŸ”¨ [setuptools] Build Python package (source distribution) if: inputs.requirements != '' && inputs.requirements != 'no-isolation' @@ -100,9 +97,8 @@ jobs: if: inputs.requirements != '' && inputs.requirements != 'no-isolation' run: python setup.py bdist_wheel - - name: πŸ“€ Upload wheel artifact - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: ${{ inputs.artifact }} path: dist/ diff --git a/.github/workflows/Parameters.yml b/.github/workflows/Parameters.yml index 2db2c26..bc18f83 100644 --- a/.github/workflows/Parameters.yml +++ b/.github/workflows/Parameters.yml @@ -4,7 +4,7 @@ # Unai Martinez-Corral # # # # ==================================================================================================================== # -# Copyright 2020-2022 The pyTooling Authors # +# Copyright 2020-2024 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # @@ -25,100 +25,267 @@ name: Parameters on: workflow_call: inputs: - python_version: - description: 'Python version.' - required: false - default: '3.10' - type: string - python_version_list: - description: 'Space separated list of Python versions to run tests with.' - required: false - default: '3.7 3.8 3.9 3.10' - type: string - system_list: - description: 'Space separated list of systems to run tests on.' - required: false - default: 'ubuntu windows msys2 macos' - type: string name: description: 'Name of the tool.' required: true type: string + python_version: + description: 'Python version.' + required: false + default: '3.12' + type: string + python_version_list: + description: 'Space separated list of Python versions to run tests with.' + required: false + default: '3.8 3.9 3.10 3.11 3.12' + type: string + system_list: + description: 'Space separated list of systems to run tests on.' + required: false + default: 'ubuntu windows macos mingw64 ucrt64' + type: string + include_list: + description: 'Space separated list of system:python items to be included into the list of test.' + required: false + default: '' + type: string + exclude_list: + description: 'Space separated list of system:python items to be excluded from the list of test.' + required: false + default: '' + type: string + disable_list: + description: 'Space separated list of system:python items to be disabled from the list of test.' + required: false + default: '' + type: string + outputs: + python_version: + description: "Default Python version for other jobs." + value: ${{ jobs.Parameters.outputs.python_version }} + python_jobs: + description: "List of Python versions (and system combinations) to be used in the matrix of other jobs." + value: ${{ jobs.Parameters.outputs.python_jobs }} + artifact_names: + description: "Pre-defined artifact names for other jobs." + value: ${{ jobs.Parameters.outputs.artifact_names }} params: description: "Parameters to be used in other jobs." value: ${{ jobs.Parameters.outputs.params }} - python_jobs: - description: "List of Python versions to be used in the matrix of other jobs." - value: ${{ jobs.Parameters.outputs.python_jobs }} jobs: - Parameters: runs-on: ubuntu-latest outputs: - params: ${{ steps.params.outputs.params }} - python_jobs: ${{ steps.params.outputs.python_jobs }} - steps: + python_version: ${{ steps.params.outputs.python_version }} + python_jobs: ${{ steps.params.outputs.python_jobs }} + artifact_names: ${{ steps.params.outputs.artifact_names }} + params: ${{ steps.params.outputs.params }} + steps: - name: Generate 'params' and 'python_jobs' id: params shell: python run: | - from os import environ + from json import dumps as json_dumps + from os import getenv + from pathlib import Path + from pprint import pprint + from textwrap import dedent - name = '${{ inputs.name }}' - params = { - 'python_version': '${{ inputs.python_version }}', - 'artifacts': { - 'unittesting': f'{name}-TestReport', - 'coverage': f'{name}-coverage', - 'typing': f'{name}-typing', - 'package': f'{name}-package', - 'doc': f'{name}-doc', + name = "${{ inputs.name }}".strip() + python_version = "${{ inputs.python_version }}".strip() + systems = "${{ inputs.system_list }}".strip() + versions = "${{ inputs.python_version_list }}".strip() + include_list = "${{ inputs.include_list }}".strip() + exclude_list = "${{ inputs.exclude_list }}".strip() + disable_list = "${{ inputs.disable_list }}".strip() + + currentMSYS2Version = "3.11" + currentAlphaVersion = "3.13" + currentAlphaRelease = "3.13.0-alpha.1" + + if systems == "": + print("::error title=Parameter::system_list is empty.") + else: + systems = [sys.strip() for sys in systems.split(" ")] + + if versions == "": + versions = [ python_version ] + else: + versions = [ver.strip() for ver in versions.split(" ")] + + if include_list == "": + includes = [] + else: + includes = [tuple(include.strip().split(":")) for include in include_list.split(" ")] + + if exclude_list == "": + excludes = [] + else: + excludes = [exclude.strip() for exclude in exclude_list.split(" ")] + + if disable_list == "": + disabled = [] + else: + disabled = [disable.strip() for disable in disable_list.split(" ")] + + if "3.7" in versions: + print("::warning title=Deprecated::Support for Python 3.7 ended in 2023.06.27.") + if "msys2" in systems: + print("::warning title=Deprecated::System 'msys2' will be replaced by 'mingw64'.") + if currentAlphaVersion in versions: + print(f"::notice title=Experimental::Python {currentAlphaVersion} ({currentAlphaRelease}) is a pre-release.") + for disable in disabled: + print(f"::warning title=Disabled Python Job::System '{disable}' temporary disabled.") + + data = { + # Python and PyPy versions supported by "setup-python" action + "python": { + "3.7": { "icon": "⚫", "until": "2023.06.27" }, + "3.8": { "icon": "πŸ”΄", "until": "2024.10" }, + "3.9": { "icon": "🟠", "until": "2025.10" }, + "3.10": { "icon": "🟑", "until": "2026.10" }, + "3.11": { "icon": "🟒", "until": "2027.10" }, + "3.12": { "icon": "🟒", "until": "2028.10" }, + # "3.13": { "icon": "🟣", "until": "2028.10" }, + "pypy-3.7": { "icon": "⟲⚫", "until": "????.??" }, + "pypy-3.8": { "icon": "βŸ²πŸ”΄", "until": "????.??" }, + "pypy-3.9": { "icon": "⟲🟠", "until": "????.??" }, + "pypy-3.10": { "icon": "⟲🟑", "until": "????.??" }, + }, + # Runner systems (runner images) supported by GitHub Actions + "sys": { + "ubuntu": { "icon": "🐧", "runs-on": "ubuntu-latest", "shell": "bash", "name": "Linux (x86-64)" }, + "windows": { "icon": "πŸͺŸ", "runs-on": "windows-latest", "shell": "pwsh", "name": "Windows (x86-64)" }, + "macos": { "icon": "🍎", "runs-on": "macos-latest", "shell": "bash", "name": "MacOS (x86-64)" }, + }, + # Runtimes provided by MSYS2 + "runtime": { + "msys": { "icon": "πŸͺŸπŸŸͺ", "name": "Windows+MSYS2 (x86-64) - MSYS" }, + "mingw32": { "icon": "πŸͺŸβ¬›", "name": "Windows+MSYS2 (x86-64) - MinGW32" }, + "mingw64": { "icon": "πŸͺŸπŸŸ¦", "name": "Windows+MSYS2 (x86-64) - MinGW64" }, + "clang32": { "icon": "πŸͺŸπŸŸ«", "name": "Windows+MSYS2 (x86-64) - Clang32" }, + "clang64": { "icon": "πŸͺŸπŸŸ§", "name": "Windows+MSYS2 (x86-64) - Clang64" }, + "ucrt64": { "icon": "πŸͺŸπŸŸ¨", "name": "Windows+MSYS2 (x86-64) - UCRT64" }, } } - with open(environ['GITHUB_OUTPUT'], 'a', encoding='utf-8') as gho: - gho.write(f"params={params!s}\n") - print("Parameters:") - print(params) + print(f"includes ({len(includes)}):") + for system,version in includes: + print(f"- {system}:{version}") + print(f"excludes ({len(excludes)}):") + for exclude in excludes: + print(f"- {exclude}") + print(f"disabled ({len(disabled)}):") + for disable in disabled: + print(f"- {disable}") - systems = '${{ inputs.system_list }}'.split(' ') - versions = '${{ inputs.python_version_list }}'.split(' ') - if '3.6' in versions: - print("::warning title=Deprecated::Support for Python 3.6 ended in 2021.12.23.") - if '3.11' in versions: - print(f"::notice title=Experimental::Python 3.11 (3.11.0-alpha3) is a pre-release.") - data = { - 'python': { - '3.6': { 'icon': '⚫', 'until': '2021.12.23' }, - '3.7': { 'icon': 'πŸ”΄', 'until': '2023.06.27' }, - '3.8': { 'icon': '🟠', 'until': '2024.10' }, - '3.9': { 'icon': '🟑', 'until': '2025.10' }, - '3.10': { 'icon': '🟒', 'until': '2026.10' }, - '3.11': { 'icon': '🟣', 'until': '2027.10' }, - }, - 'sys': { - 'ubuntu': { 'icon': '🐧', 'runs-on': 'ubuntu-latest', 'shell': 'bash' }, - 'windows': { 'icon': '🧊', 'runs-on': 'windows-latest', 'shell': 'pwsh' }, - 'msys2': { 'icon': '🟦', 'runs-on': 'windows-latest', 'shell': 'msys2 {0}' }, - 'macos': { 'icon': '🍎', 'runs-on': 'macos-latest', 'shell': 'bash' } - } - } - jobs = [ - { - 'sysicon': data['sys'][system]['icon'], - 'system': system, - 'runs-on': data['sys'][system]['runs-on'], - 'shell': data['sys'][system]['shell'], - 'pyicon': data['python'][version]['icon'], - 'python': '3.11.0-alpha.3' if version == '3.11' else version - } - for system in systems - for version in (versions if system != 'msys2' else ['3.10']) + combinations = [ + (system, version) + for system in systems + if system in data["sys"] + for version in versions + if version in data["python"] + and f"{system}:{version}" not in excludes + and f"{system}:{version}" not in disabled + ] + [ + (system, currentMSYS2Version) + for system in systems + if system in data["runtime"] + and f"{system}:{currentMSYS2Version}" not in excludes + and f"{system}:{currentMSYS2Version}" not in disabled + ] + [ + (system, version) + for system, version in includes + if system in data["sys"] + and version in data["python"] + and f"{system}:{version}" not in disabled ] - with open(environ['GITHUB_OUTPUT'], 'a', encoding='utf-8') as gho: - gho.write(f"python_jobs={jobs!s}\n") - print("Python jobs:") - print(jobs) + print(f"Combinations ({len(combinations)}):") + for system, version in combinations: + print(f"- {system}:{version}") + + jobs = [ + { + "sysicon": data["sys"][system]["icon"], + "system": system, + "runs-on": data["sys"][system]["runs-on"], + "runtime": "native", + "shell": data["sys"][system]["shell"], + "pyicon": data["python"][version]["icon"], + "python": currentAlphaRelease if version == currentAlphaVersion else version, + "envname": data["sys"][system]["name"], + } + for system, version in combinations if system in data["sys"] + ] + [ + { + "sysicon": data["runtime"][runtime]["icon"], + "system": "msys2", + "runs-on": "windows-latest", + "runtime": runtime.upper(), + "shell": "msys2 {0}", + "pyicon": data["python"][currentMSYS2Version]["icon"], + "python": version, + "envname": data["runtime"][runtime]["name"], + } + for runtime, version in combinations if runtime not in data["sys"] + ] + + artifact_names = { + "unittesting_xml": f"{name}-UnitTestReportSummary-XML", + "unittesting_html": f"{name}-UnitTestReportSummary-HTML", + "perftesting_xml": f"{name}-PerformanceTestReportSummary-XML", + "benchtesting_xml": f"{name}-BenchmarkTestReportSummary-XML", + "apptesting_xml": f"{name}-ApplicationTestReportSummary-XML", + "codecoverage_sqlite": f"{name}-CodeCoverage-SQLite", + "codecoverage_xml": f"{name}-CodeCoverage-XML", + "codecoverage_json": f"{name}-CodeCoverage-JSON", + "codecoverage_html": f"{name}-CodeCoverage-HTML", + "statictyping_html": f"{name}-StaticTyping-HTML", + "package_all": f"{name}-Packages", + "documentation_html": f"{name}-Documentation-HTML", + "documentation_latex": f"{name}-Documentation-LaTeX", + "documentation_pdf": f"{name}-Documentation-PDF", + } + + # Deprecated structure + params = { + "python_version": python_version, + "artifacts": { + "unittesting": f"{artifact_names['unittesting_xml']}", + "coverage": f"{artifact_names['codecoverage_html']}", + "typing": f"{artifact_names['statictyping_html']}", + "package": f"{artifact_names['package_all']}", + "doc": f"{artifact_names['documentation_html']}", + } + } + + print("Parameters:") + print(f" python_version: {python_version}") + print(f" python_jobs ({len(jobs)}):\n" + + "".join([f" {{ " + ", ".join([f"\"{key}\": \"{value}\"" for key, value in job.items()]) + f" }},\n" for job in jobs]) + ) + print(f" artifact_names ({len(artifact_names)}):") + for id, name in artifact_names.items(): + print(f" {id:>20}: {name}") + + # Write jobs to special file + github_output = Path(getenv("GITHUB_OUTPUT")) + print(f"GITHUB_OUTPUT: {github_output}") + with github_output.open("a+", encoding="utf-8") as f: + f.write(dedent(f"""\ + python_version={python_version} + python_jobs={json_dumps(jobs)} + artifact_names={json_dumps(artifact_names)} + params={json_dumps(params)} + """)) + + - name: Verify out parameters + id: verify + run: | + echo 'python_version: ${{ steps.params.outputs.python_version }}' + echo 'python_jobs: ${{ steps.params.outputs.python_jobs }}' + echo 'artifact_names: ${{ steps.params.outputs.artifact_names }}' + echo 'params: ${{ steps.params.outputs.params }}' diff --git a/.github/workflows/PublishCoverageResults.yml b/.github/workflows/PublishCoverageResults.yml new file mode 100644 index 0000000..30031ff --- /dev/null +++ b/.github/workflows/PublishCoverageResults.yml @@ -0,0 +1,214 @@ +# ==================================================================================================================== # +# Authors: # +# Patrick Lehmann # +# # +# ==================================================================================================================== # +# Copyright 2020-2024 The pyTooling Authors # +# # +# Licensed under the Apache License, Version 2.0 (the "License"); # +# you may not use this file except in compliance with the License. # +# You may obtain a copy of the License at # +# # +# http://www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software # +# distributed under the License is distributed on an "AS IS" BASIS, # +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # +# See the License for the specific language governing permissions and # +# limitations under the License. # +# # +# SPDX-License-Identifier: Apache-2.0 # +# ==================================================================================================================== # +name: Publish Code Coverage Results + +on: + workflow_call: + inputs: + coverage_config: + description: 'Path to the .coveragerc file. Use pyproject.toml by default.' + required: false + default: 'pyproject.toml' + type: string + coverage_sqlite_artifact: + description: 'Name of the SQLite coverage artifact.' + required: false + default: '' + type: string + coverage_xml_artifact: + description: 'Name of the XML coverage artifact.' + required: false + default: '' + type: string + coverage_json_artifact: + description: 'Name of the JSON coverage artifact.' + required: false + default: '' + type: string + coverage_html_artifact: + description: 'Name of the HTML coverage artifact.' + required: false + default: '' + type: string + secrets: + codacy_token: + description: 'Token to push result to codacy.' + required: true + +jobs: + PublishCoverageResults: + name: πŸ“Š Publish Code Coverage Results + runs-on: ubuntu-latest + if: always() + + steps: + - name: ⏬ Checkout repository + uses: actions/checkout@v4 + + - name: Download Artifacts + uses: actions/download-artifact@v4 + with: + path: artifacts + + - name: πŸ”§ Install coverage and tomli + run: | + python -m pip install --disable-pip-version-check -U coverage[toml] tomli + + - name: πŸ” Extract configurations from pyproject.toml + id: getVariables + shell: python + run: | + from os import getenv + from pathlib import Path + from sys import version + from textwrap import dedent + + print(f"Python: {version}") + + from tomli import load as tomli_load + + htmlDirectory = Path("htmlcov") + xmlFile = Path("./coverage.xml") + jsonFile = Path("./coverage.json") + coverageRC = "${{ inputs.coverage_config }}".strip() + + # Read output paths from 'pyproject.toml' file + if coverageRC == "pyproject.toml": + pyProjectFile = Path("pyproject.toml") + if pyProjectFile.exists(): + with pyProjectFile.open("rb") as file: + pyProjectSettings = tomli_load(file) + + htmlDirectory = Path(pyProjectSettings["tool"]["coverage"]["html"]["directory"]) + xmlFile = Path(pyProjectSettings["tool"]["coverage"]["xml"]["output"]) + jsonFile = Path(pyProjectSettings["tool"]["coverage"]["json"]["output"]) + else: + print(f"File '{pyProjectFile}' not found and no '.coveragerc' file specified.") + + # Read output paths from '.coveragerc' file + elif len(coverageRC) > 0: + coverageRCFile = Path(coverageRC) + if coverageRCFile.exists(): + with coverageRCFile.open("rb") as file: + coverageRCSettings = tomli_load(file) + + htmlDirectory = Path(coverageRCSettings["html"]["directory"]) + xmlFile = Path(coverageRCSettings["xml"]["output"]) + jsonFile = Path(coverageRCSettings["json"]["output"]) + else: + print(f"File '{coverageRCFile}' not found.") + + # Write jobs to special file + github_output = Path(getenv("GITHUB_OUTPUT")) + print(f"GITHUB_OUTPUT: {github_output}") + with github_output.open("a+", encoding="utf-8") as f: + f.write(dedent(f"""\ + coverage_report_html_directory={htmlDirectory.as_posix()} + coverage_report_xml={xmlFile} + coverage_report_json={jsonFile} + """)) + + print(f"DEBUG:\n html={htmlDirectory}\n xml={xmlFile}\n json={jsonFile}") + + - name: Rename .coverage files and collect them all to coverage/ + run: | + mkdir -p coverage + find . -type f -path "*artifacts*SQLite*.coverage" -exec sh -c 'cp -v $0 "coverage/$(basename $0).$(basename $(dirname $0))"' {} ';' + tree -a coverage + + - name: Combine SQLite files (using Coverage.py) + run: coverage combine --data-file=.coverage coverage/ + + - name: Report code coverage + run: coverage report --rcfile=pyproject.toml --data-file=.coverage + + - name: Convert to XML format (Cobertura) + if: inputs.coverage_xml_artifact != '' + run: coverage xml --data-file=.coverage + + - name: Convert to JSON format + if: inputs.coverage_json_artifact != '' + run: coverage json --data-file=.coverage + + - name: Convert to HTML format + if: inputs.coverage_html_artifact != '' + run: | + coverage html --data-file=.coverage -d report/coverage/html + rm report/coverage/html/.gitignore + tree -a report/coverage/html + + - name: πŸ“€ Upload 'Coverage SQLite Database' artifact + if: inputs.coverage_sqlite_artifact != '' + continue-on-error: true + uses: actions/upload-artifact@v4 + with: + name: ${{ inputs.coverage_sqlite_artifact }} + path: .coverage + if-no-files-found: error + retention-days: 1 + + - name: πŸ“€ Upload 'Coverage XML Report' artifact + if: inputs.coverage_xml_artifact != '' + continue-on-error: true + uses: actions/upload-artifact@v4 + with: + name: ${{ inputs.coverage_xml_artifact }} + path: ${{ steps.getVariables.outputs.coverage_report_xml }} + if-no-files-found: error + retention-days: 1 + + - name: πŸ“€ Upload 'Coverage JSON Report' artifact + if: inputs.coverage_json_artifact != '' + continue-on-error: true + uses: actions/upload-artifact@v4 + with: + name: ${{ inputs.coverage_json_artifact }} + path: ${{ steps.getVariables.outputs.coverage_report_json }} + if-no-files-found: error + retention-days: 1 + + - name: πŸ“€ Upload 'Coverage HTML Report' artifact + if: inputs.coverage_html_artifact != '' + continue-on-error: true + uses: actions/upload-artifact@v4 + with: + name: ${{ inputs.coverage_html_artifact }} + path: ${{ steps.getVariables.outputs.coverage_report_html_directory }} + if-no-files-found: error + retention-days: 1 + + - name: πŸ“Š Publish code coverage at CodeCov + if: inputs.CodeCov == true + continue-on-error: true + uses: codecov/codecov-action@v3 + with: + files: ${{ steps.getVariables.outputs.coverage_report_xml }} + flags: unittests + env_vars: PYTHON + + - name: πŸ“‰ Publish code coverage at Codacy + if: inputs.Codacy == true + continue-on-error: true + uses: codacy/codacy-coverage-reporter-action@v1 + with: + project-token: ${{ secrets.codacy_token }} + coverage-reports: ${{ steps.getVariables.outputs.coverage_report_xml }} diff --git a/.github/workflows/PublishOnPyPI.yml b/.github/workflows/PublishOnPyPI.yml index 96a6cfa..4ec8ba8 100644 --- a/.github/workflows/PublishOnPyPI.yml +++ b/.github/workflows/PublishOnPyPI.yml @@ -4,7 +4,7 @@ # Unai Martinez-Corral # # # # ==================================================================================================================== # -# Copyright 2020-2022 The pyTooling Authors # +# Copyright 2020-2024 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # @@ -28,7 +28,7 @@ on: python_version: description: 'Python version.' required: false - default: '3.10' + default: '3.12' type: string requirements: description: 'Python dependencies to be installed through pip.' @@ -52,20 +52,18 @@ jobs: steps: - name: πŸ“₯ Download artifacts '${{ inputs.artifact }}' from 'Package' job - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: ${{ inputs.artifact }} path: dist/ - name: 🐍 Setup Python ${{ inputs.python_version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ inputs.python_version }} - name: βš™ Install dependencies for packaging and release - run: | - python -m pip install -U pip - python -m pip install ${{ inputs.requirements }} + run: python -m pip install --disable-pip-version-check ${{ inputs.requirements }} - name: ‴ Release Python source package to PyPI env: @@ -80,6 +78,6 @@ jobs: run: twine upload dist/*.whl - name: πŸ—‘οΈ Delete packaging Artifacts - uses: geekyeggo/delete-artifact@v1 + uses: geekyeggo/delete-artifact@v4 with: name: ${{ inputs.artifact }} diff --git a/.github/workflows/PublishTestResults.yml b/.github/workflows/PublishTestResults.yml index 1d5bdb6..fd9391a 100644 --- a/.github/workflows/PublishTestResults.yml +++ b/.github/workflows/PublishTestResults.yml @@ -4,7 +4,7 @@ # Unai Martinez-Corral # # # # ==================================================================================================================== # -# Copyright 2020-2022 The pyTooling Authors # +# Copyright 2020-2024 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # @@ -25,10 +25,10 @@ name: Publish Unit Test Results on: workflow_call: inputs: - report_files: - description: 'Pattern of report files to upload. Can be a comma separated list.' + merged_junit_artifact: + description: 'Name of the merged JUnit Test Summary artifact.' required: false - default: 'artifacts/**/*.xml' + default: '' type: string jobs: @@ -39,16 +39,51 @@ jobs: steps: - name: ⏬ Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Download Artifacts - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: path: artifacts + - name: πŸ”§ Install junitparser + run: | + python -m pip install --disable-pip-version-check -U junitparser + + - name: Move JUnit files and collect them all to junit/ + run: | + mkdir -p junit + find . -type f -path "*artifacts*UnitTestReportSummary*.xml" -exec sh -c 'cp -v $0 "junit/$(basename $(dirname $0)).$(basename $0)"' {} ';' + tree -a junit + + - name: πŸ” Merge JUnit Unit Test Summaries + shell: python + run: | + from pathlib import Path + from junitparser import JUnitXml + + junitDirectory = Path("junit") + junitXml = None + for file in junitDirectory.iterdir(): + if junitXml is None: + junitXml = JUnitXml.fromfile(file) + else: + junitXml += JUnitXml.fromfile(file) + + junitXml.write(junitDirectory / "merged.xml") + - name: πŸ“Š Publish Unit Test Results uses: dorny/test-reporter@v1 with: name: Unit Test Results - path: ${{ inputs.report_files }} + path: junit/merged.xml reporter: java-junit + + - name: πŸ“€ Upload merged 'JUnit Test Summary' artifact + if: inputs.merged_junit_artifact != '' + uses: actions/upload-artifact@v4 + with: + name: ${{ inputs.merged_junit_artifact }} + path: junit/merged.xml + if-no-files-found: error + retention-days: 1 diff --git a/.github/workflows/PublishToGitHubPages.yml b/.github/workflows/PublishToGitHubPages.yml index e323193..351eb02 100644 --- a/.github/workflows/PublishToGitHubPages.yml +++ b/.github/workflows/PublishToGitHubPages.yml @@ -4,7 +4,7 @@ # Unai Martinez-Corral # # # # ==================================================================================================================== # -# Copyright 2020-2022 The pyTooling Authors # +# Copyright 2020-2024 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # @@ -48,24 +48,24 @@ jobs: steps: - name: ⏬ Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: πŸ“₯ Download artifacts '${{ inputs.doc }}' from 'BuildTheDocs' job - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: ${{ inputs.doc }} path: public - name: πŸ“₯ Download artifacts '${{ inputs.coverage }}' from 'Coverage' job if: ${{ inputs.coverage != '' }} - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: ${{ inputs.coverage }} path: public/coverage - name: πŸ“₯ Download artifacts '${{ inputs.typing }}' from 'StaticTypeCheck' job if: ${{ inputs.typing != '' }} - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: ${{ inputs.typing }} path: public/typing diff --git a/.github/workflows/Release.yml b/.github/workflows/Release.yml index 6d82182..cc3d493 100644 --- a/.github/workflows/Release.yml +++ b/.github/workflows/Release.yml @@ -4,7 +4,7 @@ # Unai Martinez-Corral # # # # ==================================================================================================================== # -# Copyright 2020-2022 The pyTooling Authors # +# Copyright 2020-2024 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # @@ -39,11 +39,9 @@ jobs: RELEASE_VERSION=${GIT_TAG#v} RELEASE_DATETIME="$(date --utc '+%d.%m.%Y - %H:%M:%S')" # write to step outputs - cat >> "$GITHUB_OUTPUT" << EOF - gitTag=${GIT_TAG} - version=${RELEASE_VERSION} - datetime=${RELEASE_DATETIME} - EOF + echo "gitTag=${GIT_TAG}" >> $GITHUB_OUTPUT + echo "version=${RELEASE_VERSION}" >> $GITHUB_OUTPUT + echo "datetime=${RELEASE_DATETIME}" >> $GITHUB_OUTPUT - name: πŸ“‘ Create Release Page id: createReleasePage diff --git a/.github/workflows/SphinxDocumentation.yml b/.github/workflows/SphinxDocumentation.yml new file mode 100644 index 0000000..34f9d48 --- /dev/null +++ b/.github/workflows/SphinxDocumentation.yml @@ -0,0 +1,203 @@ +# ==================================================================================================================== # +# Authors: # +# Patrick Lehmann # +# # +# ==================================================================================================================== # +# Copyright 2020-2024 The pyTooling Authors # +# # +# Licensed under the Apache License, Version 2.0 (the "License"); # +# you may not use this file except in compliance with the License. # +# You may obtain a copy of the License at # +# # +# http://www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software # +# distributed under the License is distributed on an "AS IS" BASIS, # +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # +# See the License for the specific language governing permissions and # +# limitations under the License. # +# # +# SPDX-License-Identifier: Apache-2.0 # +# ==================================================================================================================== # +name: Documentation + +on: + workflow_call: + inputs: + python_version: + description: 'Python version.' + required: false + default: '3.12' + type: string + requirements: + description: 'Python dependencies to be installed through pip.' + required: false + default: '-r doc/requirements.txt' + type: string + coverage_config: + description: 'Path to the .coveragerc file. Use pyproject.toml by default.' + required: false + default: 'pyproject.toml' + type: string + doc_directory: + description: 'Path to the directory containing documentation (Sphinx working directory).' + required: false + default: 'doc' + type: string + coverage_json_artifact: + description: 'Name of the coverage JSON artifact.' + required: false + default: '' + type: string + unittest_xml_artifact: + description: 'Name of the unittest XML artifact.' + required: false + default: '' + type: string + unittest_xml_directory: + description: 'Directory where unittest XML artifact is extracted.' + required: false + default: 'report/unit' + type: string + html_artifact: + description: 'Name of the HTML documentation artifact.' + required: false + default: '' + type: string + latex_artifact: + description: 'Name of the LaTeX documentation artifact.' + required: false + default: '' + type: string + +jobs: + Sphinx: + name: πŸ““ Documentation generation using Sphinx and Python ${{ inputs.python_version }} + runs-on: ubuntu-latest + + steps: + - name: ⏬ Checkout repository + uses: actions/checkout@v4 + + - name: πŸ”§ Install graphviz + run: sudo apt-get install -y --no-install-recommends graphviz + + - name: 🐍 Setup Python ${{ inputs.python_version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ inputs.python_version }} + + - name: πŸ”§ Install wheel,tomli and pip dependencies (native) + run: | + python -m pip install --disable-pip-version-check -U wheel tomli + python -m pip install --disable-pip-version-check ${{ inputs.requirements }} + + - name: πŸ” Extract configurations from pyproject.toml + id: getVariables + shell: python + run: | + from os import getenv + from pathlib import Path + from sys import version + from textwrap import dedent + + print(f"Python: {version}") + + from tomli import load as tomli_load + + htmlDirectory = Path("htmlcov") + xmlFile = Path("./coverage.xml") + jsonFile = Path("./coverage.json") + coverageRC = "${{ inputs.coverage_config }}".strip() + + # Read output paths from 'pyproject.toml' file + if coverageRC == "pyproject.toml": + pyProjectFile = Path("pyproject.toml") + if pyProjectFile.exists(): + with pyProjectFile.open("rb") as file: + pyProjectSettings = tomli_load(file) + + htmlDirectory = Path(pyProjectSettings["tool"]["coverage"]["html"]["directory"]) + xmlFile = Path(pyProjectSettings["tool"]["coverage"]["xml"]["output"]) + jsonFile = Path(pyProjectSettings["tool"]["coverage"]["json"]["output"]) + else: + print(f"File '{pyProjectFile}' not found and no '.coveragerc' file specified.") + + # Read output paths from '.coveragerc' file + elif len(coverageRC) > 0: + coverageRCFile = Path(coverageRC) + if coverageRCFile.exists(): + with coverageRCFile.open("rb") as file: + coverageRCSettings = tomli_load(file) + + htmlDirectory = Path(coverageRCSettings["html"]["directory"]) + xmlFile = Path(coverageRCSettings["xml"]["output"]) + jsonFile = Path(coverageRCSettings["json"]["output"]) + else: + print(f"File '{coverageRCFile}' not found.") + + # Write jobs to special file + github_output = Path(getenv("GITHUB_OUTPUT")) + print(f"GITHUB_OUTPUT: {github_output}") + with github_output.open("a+", encoding="utf-8") as f: + f.write(dedent(f"""\ + coverage_report_html_directory={htmlDirectory.as_posix()} + coverage_report_xml_directory={xmlFile.parent.as_posix()} + coverage_report_xml={xmlFile.as_posix()} + coverage_report_json_directory={jsonFile.parent.as_posix()} + coverage_report_json={jsonFile.as_posix()} + """)) + + print(f"DEBUG:\n html={htmlDirectory}\n xml={xmlFile}\n json={jsonFile}") + + - name: πŸ“₯ Download artifacts '${{ inputs.unittest_xml_artifact }}' from 'Unittesting' job + if: inputs.unittest_xml_artifact != '' + uses: actions/download-artifact@v4 + with: + name: ${{ inputs.unittest_xml_artifact }} + path: ${{ inputs.unittest_xml_directory }} + + - name: πŸ“₯ Download artifacts '${{ inputs.coverage_json_artifact }}' from 'PublishCoverageResults' job + if: inputs.coverage_json_artifact != '' + uses: actions/download-artifact@v4 + with: + name: ${{ inputs.coverage_json_artifact }} + path: ${{ steps.getVariables.outputs.coverage_report_json_directory }} + + - name: β˜‘ Generate HTML documentation + if: inputs.html_artifact != '' + run: | + export PYTHONPATH=$(pwd) + + cd "${{ inputs.doc_directory || '.' }}" + sphinx-build -v -n -b html -d _build/doctrees -j $(nproc) -w _build/html.log . _build/html + + - name: β˜‘ Generate LaTeX documentation + if: inputs.latex_artifact != '' +# continue-on-error: true + run: | + export PYTHONPATH=$(pwd) + + cd "${{ inputs.doc_directory || '.' }}" + sphinx-build -v -n -b latex -d _build/doctrees -j $(nproc) -w _build/latex.log . _build/latex +# --builder html --doctree-dir _build/doctrees --verbose --fresh-env --write-all --nitpicky --warning-file _build/html.log . _build/html + + - name: πŸ“€ Upload 'HTML Documentation' artifact + if: inputs.html_artifact != '' + continue-on-error: true + uses: actions/upload-artifact@v4 + with: + name: ${{ inputs.html_artifact }} + path: ${{ inputs.doc_directory }}/_build/html + if-no-files-found: error + retention-days: 1 + + - name: πŸ“€ Upload 'LaTeX Documentation' artifact + if: inputs.latex_artifact != '' + continue-on-error: true + uses: actions/upload-artifact@v4 + with: + name: ${{ inputs.latex_artifact }} + path: ${{ inputs.doc_directory }}/_build/latex + if-no-files-found: error + retention-days: 1 diff --git a/.github/workflows/StaticTypeCheck.yml b/.github/workflows/StaticTypeCheck.yml index c18f7a0..eef16f3 100644 --- a/.github/workflows/StaticTypeCheck.yml +++ b/.github/workflows/StaticTypeCheck.yml @@ -4,7 +4,7 @@ # Unai Martinez-Corral # # # # ==================================================================================================================== # -# Copyright 2020-2022 The pyTooling Authors # +# Copyright 2020-2024 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # @@ -28,26 +28,36 @@ on: python_version: description: 'Python version.' required: false - default: '3.10' + default: '3.12' type: string requirements: description: 'Python dependencies to be installed through pip.' required: false default: '-r tests/requirements.txt' type: string - report: - description: 'Directory to upload as an artifact.' - required: false - default: 'htmlmypy' - type: string commands: description: 'Commands to run the static type checks.' required: true type: string - artifact: - description: 'Name of the typing artifact.' + html_report: + description: 'Directory to upload as an artifact.' + required: false + default: 'htmlmypy' + type: string + junit_report: + description: 'junit file to upload as an artifact.' + required: false + default: 'StaticTypingSummary.xml' + type: string + html_artifact: + description: 'Name of the typing artifact (HTML report).' required: true type: string + junit_artifact: + description: 'Name of the typing junit artifact (junit XML).' + required: false + default: '' + type: string jobs: @@ -57,28 +67,36 @@ jobs: steps: - name: ⏬ Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: 🐍 Setup Python ${{ inputs.python_version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ inputs.python_version }} - name: πŸ—‚ Install dependencies - run: | - python -m pip install -U pip - python -m pip install ${{ inputs.requirements }} + run: python -m pip install --disable-pip-version-check ${{ inputs.requirements }} - name: Check Static Typing continue-on-error: true run: ${{ inputs.commands }} - - name: πŸ“€ Upload 'Static Typing Report' artifact - if: ${{ inputs.artifact != '' }} + - name: πŸ“€ Upload 'Static Typing Report' HTML artifact + if: ${{ inputs.html_artifact != '' }} continue-on-error: true - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: - name: ${{ inputs.artifact }} - path: ${{ inputs.report }} + name: ${{ inputs.html_artifact }} + path: ${{ inputs.html_report }} + if-no-files-found: error + retention-days: 1 + + - name: πŸ“€ Upload 'Static Typing Report' JUnit artifact + if: ${{ inputs.junit_artifact != '' }} + continue-on-error: true + uses: actions/upload-artifact@v4 + with: + name: ${{ inputs.junit_artifact }} + path: ${{ inputs.junit_report }} if-no-files-found: error retention-days: 1 diff --git a/.github/workflows/TestReleaser.yml b/.github/workflows/TestReleaser.yml index 4311548..5ee5787 100644 --- a/.github/workflows/TestReleaser.yml +++ b/.github/workflows/TestReleaser.yml @@ -3,7 +3,7 @@ # Unai Martinez-Corral # # # # ==================================================================================================================== # -# Copyright 2020-2022 The pyTooling Authors # +# Copyright 2020-2024 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # @@ -45,7 +45,7 @@ jobs: env: DOCKER_BUILDKIT: 1 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Build container image run: docker build -t ghcr.io/pytooling/releaser -f releaser/Dockerfile releaser @@ -62,7 +62,7 @@ jobs: Composite: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - run: echo "Build some tool and generate some (versioned) artifacts" > artifact-$(date -u +"%Y-%m-%dT%H-%M-%SZ").txt @@ -122,7 +122,7 @@ jobs: - Composite runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - run: echo "Build some tool and generate some (versioned) artifacts" > artifact-$(date -u +"%Y-%m-%dT%H-%M-%SZ").txt diff --git a/.github/workflows/UnitTesting.yml b/.github/workflows/UnitTesting.yml index 06a86bf..81e8657 100644 --- a/.github/workflows/UnitTesting.yml +++ b/.github/workflows/UnitTesting.yml @@ -4,7 +4,7 @@ # Unai Martinez-Corral # # # # ==================================================================================================================== # -# Copyright 2020-2022 The pyTooling Authors # +# Copyright 2020-2024 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # @@ -20,7 +20,7 @@ # # # SPDX-License-Identifier: Apache-2.0 # # ==================================================================================================================== # -name: Unit Testing +name: Unit Testing (Matrix) on: workflow_call: @@ -37,32 +37,67 @@ on: pacboy: description: 'MSYS2 dependencies to be installed through pacboy (pacman).' required: false - default: >- - python-pip:p - python-wheel:p - python-coverage:p - python-lxml:p + default: "" type: string mingw_requirements: description: 'Override Python dependencies to be installed through pip on MSYS2 (MINGW64) only.' required: false default: '' type: string - unittest_directory: - description: 'Path to the directory containing unit tests.' + root_directory: + description: 'Working directory for running tests.' required: false - default: 'tests/unit' + default: '' type: string - artifact: + tests_directory: + description: 'Path to the directory containing tests (relative to root_directory).' + required: false + default: 'tests' + type: string + unittest_directory: + description: 'Path to the directory containing unit tests (relative to tests_directory).' + required: false + default: 'unit' + type: string + coverage_config: + description: 'Path to the .coveragerc file. Use pyproject.toml by default.' + required: false + default: 'pyproject.toml' + type: string + unittest_xml_artifact: description: "Generate unit test report with junitxml and upload results as an artifact." required: false default: '' type: string + unittest_html_artifact: + description: "Generate unit test report with junitxml and upload results as an artifact." + required: false + default: '' + type: string + coverage_sqlite_artifact: + description: 'Name of the SQLite coverage artifact.' + required: false + default: '' + type: string + coverage_xml_artifact: + description: 'Name of the XML coverage artifact.' + required: false + default: '' + type: string + coverage_json_artifact: + description: 'Name of the JSON coverage artifact.' + required: false + default: '' + type: string + coverage_html_artifact: + description: 'Name of the HTML coverage artifact.' + required: false + default: '' + type: string jobs: - UnitTesting: - name: ${{ matrix.sysicon }} ${{ matrix.pyicon }} Unit Tests using Python ${{ matrix.python }} + name: ${{ matrix.sysicon }} ${{ matrix.pyicon }} Unit Tests - Python ${{ matrix.python }} runs-on: ${{ matrix.runs-on }} strategy: @@ -76,57 +111,288 @@ jobs: steps: - name: ⏬ Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - - name: '🟦 Setup MSYS2' + - name: πŸ”§ Install dependencies (system Python for Python shell) + if: matrix.system == 'msys2' + shell: pwsh + run: | + py -3.9 -m pip install --disable-pip-version-check -U tomli + + - name: Compute pacman/pacboy packages + id: pacboy + if: matrix.system == 'msys2' + shell: python + run: | + from os import getenv + from pathlib import Path + from re import compile + from sys import version + + print(f"Python: {version}") + + def loadRequirementsFile(requirementsFile: Path): + requirements = [] + with requirementsFile.open("r") as file: + for line in file.readlines(): + line = line.strip() + if line.startswith("#") or line.startswith("https") or line == "": + continue + elif line.startswith("-r"): + # Remove the first word/argument (-r) + requirements += loadRequirementsFile(requirementsFile.parent / line[2:].lstrip()) + else: + requirements.append(line) + + return requirements + + requirements = "${{ inputs.requirements }}" + if requirements.startswith("-r"): + requirementsFile = Path(requirements[2:].lstrip()) + dependencies = loadRequirementsFile(requirementsFile) + else: + dependencies = [req.strip() for req in requirements.split(" ")] + + packages = { + "coverage": "python-coverage:p", + "igraph": "igraph:p", + "jinja2": "python-markupsafe:p", + "lxml": "python-lxml:p", + "numpy": "python-numpy:p", + "markupsafe": "python-markupsafe:p", + "pip": "python-pip:p", + "ruamel.yaml": "python-ruamel-yaml:p python-ruamel.yaml.clib:p", + "sphinx": "python-markupsafe:p", + "tomli": "python-tomli:p", + "wheel": "python-wheel:p", + } + subPackages = { + "pytooling": { + "yaml": "python-ruamel-yaml:p python-ruamel.yaml.clib:p", + } + } + + regExp = compile(r"(?P[\w_\-\.]+)(?:\[(?P(?:\w+)(?:\s*,\s*\w+)*)\])?(?:\s*(?P[<>~=]+)\s*)(?P\d+(?:\.\d+)*)(?:-(?P\w+))?") + + pacboyPackages = set(("python-pip:p", "python-wheel:p", "python-tomli:p")) + print(f"Processing dependencies ({len(dependencies)}):") + for dependency in dependencies: + print(f" {dependency}") + + match = regExp.match(dependency.lower()) + if not match: + print(f" Wrong format: {dependency}") + print(f"::error title=Identifying Pacboy Packages::Unrecognized dependency format '{dependency}'") + continue + + package = match["PackageName"] + if package in packages: + rewrite = packages[package] + print(f" Found rewrite rule for '{package}': {rewrite}") + pacboyPackages.add(rewrite) + + if match["SubPackages"] and package in subPackages: + for subPackage in match["SubPackages"].split(","): + if subPackage in subPackages[package]: + rewrite = subPackages[package][subPackage] + print(f" Found rewrite rule for '{package}[..., {subPackage}, ...]': {rewrite}") + pacboyPackages.add(rewrite) + + # Write jobs to special file + github_output = Path(getenv("GITHUB_OUTPUT")) + print(f"GITHUB_OUTPUT: {github_output}") + with github_output.open("a+") as f: + f.write(f"pacboy_packages={' '.join(pacboyPackages)}\n") + + - name: '🟦 Setup MSYS2 for ${{ matrix.runtime }}' if: matrix.system == 'msys2' uses: msys2/setup-msys2@v2 with: - msystem: MINGW64 + msystem: ${{ matrix.runtime }} update: true - pacboy: ${{ inputs.pacboy }} + pacboy: >- + ${{ steps.pacboy.outputs.pacboy_packages }} + ${{ inputs.pacboy }} - name: 🐍 Setup Python ${{ matrix.python }} if: matrix.system != 'msys2' - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python }} - - name: βš™οΈ Update pip - run: python -m pip install -U pip - - - name: πŸ”§ Install wheel and pip dependencies + - name: πŸ”§ Install wheel,tomli and pip dependencies (native) if: matrix.system != 'msys2' run: | - python -m pip install -U wheel - python -m pip install ${{ inputs.requirements }} + python -m pip install --disable-pip-version-check -U wheel tomli + python -m pip install --disable-pip-version-check ${{ inputs.requirements }} - - name: πŸ”§ Install pip dependencies + - name: πŸ”§ Install pip dependencies (MSYS2) if: matrix.system == 'msys2' run: | - if [ 'x${{ inputs.mingw_requirements }}' != 'x' ]; then - python -m pip install ${{ inputs.mingw_requirements }} + if [ -n '${{ inputs.mingw_requirements }}' ]; then + python -m pip install --disable-pip-version-check ${{ inputs.mingw_requirements }} else - python -m pip install ${{ inputs.requirements }} + python -m pip install --disable-pip-version-check ${{ inputs.requirements }} fi - - name: β˜‘ Run unit tests - if: matrix.system == 'windows' + - name: πŸ” Extract configurations from pyproject.toml + id: getVariables + shell: python run: | - $PYTEST_ARGS = if ("${{ inputs.artifact }}".length -gt 0) { "--junitxml=TestReport.xml" } else { "" } - python -m pytest -rA ${{ inputs.unittest_directory }} $PYTEST_ARGS --color=yes + from os import getenv + from pathlib import Path + from sys import version + from textwrap import dedent - - name: β˜‘ Run unit tests + print(f"Python: {version}") + + from tomli import load as tomli_load + + htmlDirectory = Path("htmlcov") + xmlFile = Path("./coverage.xml") + jsonFile = Path("./coverage.json") + coverageRC = "${{ inputs.coverage_config }}".strip() + + # Read output paths from 'pyproject.toml' file + if coverageRC == "pyproject.toml": + pyProjectFile = Path("pyproject.toml") + if pyProjectFile.exists(): + with pyProjectFile.open("rb") as file: + pyProjectSettings = tomli_load(file) + + htmlDirectory = Path(pyProjectSettings["tool"]["coverage"]["html"]["directory"]) + xmlFile = Path(pyProjectSettings["tool"]["coverage"]["xml"]["output"]) + jsonFile = Path(pyProjectSettings["tool"]["coverage"]["json"]["output"]) + else: + print(f"File '{pyProjectFile}' not found and no '.coveragerc' file specified.") + + # Read output paths from '.coveragerc' file + elif len(coverageRC) > 0: + coverageRCFile = Path(coverageRC) + if coverageRCFile.exists(): + with coverageRCFile.open("rb") as file: + coverageRCSettings = tomli_load(file) + + htmlDirectory = Path(coverageRCSettings["html"]["directory"]) + xmlFile = Path(coverageRCSettings["xml"]["output"]) + jsonFile = Path(coverageRCSettings["json"]["output"]) + else: + print(f"File '{coverageRCFile}' not found.") + + # Write jobs to special file + github_output = Path(getenv("GITHUB_OUTPUT")) + print(f"GITHUB_OUTPUT: {github_output}") + with github_output.open("a+", encoding="utf-8") as f: + f.write(dedent(f"""\ + unittest_report_html_directory={htmlDirectory} + coverage_report_html_directory={htmlDirectory.as_posix()} + coverage_report_xml={xmlFile} + coverage_report_json={jsonFile} + """)) + + print(f"DEBUG:\n html={htmlDirectory}\n xml={xmlFile}\n json={jsonFile}") + + - name: β˜‘ Run unit tests (Ubuntu/macOS) if: matrix.system != 'windows' run: | - [ 'x${{ inputs.artifact }}' != 'x' ] && PYTEST_ARGS='--junitxml=TestReport.xml' || unset PYTEST_ARGS - python -m pytest -rA ${{ inputs.unittest_directory }} $PYTEST_ARGS --color=yes + export ENVIRONMENT_NAME="${{ matrix.envname }}" + export PYTHONPATH=$(pwd) - - name: πŸ“€ Upload 'TestReport.xml' artifact - if: inputs.artifact != '' - uses: actions/upload-artifact@v3 + cd "${{ inputs.root_directory || '.' }}" + [ -n '${{ inputs.unittest_xml_artifact }}' ] && PYTEST_ARGS='--junitxml=report/unit/TestReportSummary.xml' || unset PYTEST_ARGS + if [ -n '${{ inputs.coverage_config }}' ]; then + echo "coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }}" + coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }} + else + echo "python -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }}" + python -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }} + fi + + - name: β˜‘ Run unit tests (Windows) + if: matrix.system == 'windows' + run: | + $env:ENVIRONMENT_NAME = "${{ matrix.envname }}" + $env:PYTHONPATH = (Get-Location).ToString() + + cd "${{ inputs.root_directory || '.' }}" + $PYTEST_ARGS = if ("${{ inputs.unittest_xml_artifact }}") { "--junitxml=report/unit/TestReportSummary.xml" } else { "" } + if ("${{ inputs.coverage_config }}") { + Write-Host "coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }}" + coverage run --data-file=.coverage --rcfile=pyproject.toml -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }} + } else { + Write-Host "python -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }}" + python -m pytest -raP $PYTEST_ARGS --color=yes ${{ inputs.tests_directory || '.' }}/${{ inputs.unittest_directory }} + } + + - name: Convert coverage to XML format (Cobertura) + if: inputs.coverage_xml_artifact != '' + run: coverage xml --data-file=.coverage + + - name: Convert coverage to JSON format + if: inputs.coverage_json_artifact != '' + run: coverage json --data-file=.coverage + + - name: Convert coverage to HTML format + if: inputs.coverage_html_artifact != '' + run: | + coverage html --data-file=.coverage -d ${{ steps.getVariables.outputs.coverage_report_html_directory }} + rm ${{ steps.getVariables.outputs.coverage_report_html_directory }}/.gitignore + + - name: πŸ“€ Upload 'TestReportSummary.xml' artifact + if: inputs.unittest_xml_artifact != '' + uses: actions/upload-artifact@v4 with: - name: ${{ inputs.artifact }}-${{ matrix.system }}-${{ matrix.python }} - path: TestReport.xml + name: ${{ inputs.unittest_xml_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }} + path: report/unit/TestReportSummary.xml + if-no-files-found: error + retention-days: 1 + +# - name: πŸ“€ Upload 'Unit Tests HTML Report' artifact +# if: inputs.unittest_html_artifact != '' +# continue-on-error: true +# uses: actions/upload-artifact@v4 +# with: +# name: ${{ inputs.unittest_html_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }} +# path: ${{ steps.getVariables.outputs.unittest_report_html_directory }} +# if-no-files-found: error +# retention-days: 1 + + - name: πŸ“€ Upload 'Coverage SQLite Database' artifact + if: inputs.coverage_sqlite_artifact != '' + continue-on-error: true + uses: actions/upload-artifact@v4 + with: + name: ${{ inputs.coverage_sqlite_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }} + path: .coverage + if-no-files-found: error + retention-days: 1 + + - name: πŸ“€ Upload 'Coverage XML Report' artifact + if: inputs.coverage_xml_artifact != '' + continue-on-error: true + uses: actions/upload-artifact@v4 + with: + name: ${{ inputs.coverage_xml_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }} + path: ${{ steps.getVariables.outputs.coverage_report_xml }} + if-no-files-found: error + retention-days: 1 + + - name: πŸ“€ Upload 'Coverage JSON Report' artifact + if: inputs.coverage_json_artifact != '' + continue-on-error: true + uses: actions/upload-artifact@v4 + with: + name: ${{ inputs.coverage_json_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }} + path: ${{ steps.getVariables.outputs.coverage_report_json }} + if-no-files-found: error + retention-days: 1 + + - name: πŸ“€ Upload 'Coverage HTML Report' artifact + if: inputs.coverage_html_artifact != '' + continue-on-error: true + uses: actions/upload-artifact@v4 + with: + name: ${{ inputs.coverage_html_artifact }}-${{ matrix.system }}-${{ matrix.runtime }}-${{ matrix.python }} + path: ${{ steps.getVariables.outputs.coverage_report_html_directory }} if-no-files-found: error retention-days: 1 diff --git a/.github/workflows/VerifyDocs.yml b/.github/workflows/VerifyDocs.yml index 18c0ee4..5866d84 100644 --- a/.github/workflows/VerifyDocs.yml +++ b/.github/workflows/VerifyDocs.yml @@ -4,7 +4,7 @@ # Unai Martinez-Corral # # # # ==================================================================================================================== # -# Copyright 2020-2022 The pyTooling Authors # +# Copyright 2020-2024 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # @@ -28,7 +28,7 @@ on: python_version: description: 'Python version.' required: false - default: '3.10' + default: '3.12' type: string jobs: @@ -39,10 +39,10 @@ jobs: steps: - name: ⏬ Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: 🐍 Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ inputs.python_version }} diff --git a/.github/workflows/_Checking_ArtifactCleanup.yml b/.github/workflows/_Checking_ArtifactCleanup.yml new file mode 100644 index 0000000..f9b58ce --- /dev/null +++ b/.github/workflows/_Checking_ArtifactCleanup.yml @@ -0,0 +1,61 @@ +name: Verification Pipeline for ArtifactCleanup + +on: + push: + workflow_dispatch: + +jobs: + Params: + uses: pyTooling/Actions/.github/workflows/Parameters.yml@dev + with: + name: Example + python_version_list: "3.10 3.11" + system_list: "ubuntu windows" + + Testing: + name: Artifact generation ${{ matrix.system }}-${{ matrix.python }} + needs: + - Params + runs-on: ${{ matrix.runs-on }} + strategy: + matrix: + include: ${{ fromJson(needs.Params.outputs.python_jobs) }} + steps: + - name: Content creation for ${{ matrix.system }}-${{ matrix.python }} + run: echo "${{ matrix.runs-on }}-${{ matrix.python }}" >> artifact.txt + + - name: πŸ“€ Upload artifact for ${{ matrix.system }}-${{ matrix.python }} + uses: actions/upload-artifact@v4 + with: + name: ${{ fromJson(needs.Params.outputs.artifact_names).unittesting_xml }}-${{ matrix.system }}-${{ matrix.python }} + path: artifact.txt + if-no-files-found: error + retention-days: 1 + + Package: + name: Package generation + needs: + - Params + runs-on: ubuntu-latest + steps: + - name: Package creation + run: echo "Package" >> package.txt + + - name: πŸ“€ Upload artifact for ${{ matrix.system }}-${{ matrix.python }} + uses: actions/upload-artifact@v4 + with: + name: ${{ fromJson(needs.Params.outputs.artifact_names).package_all }} + path: package.txt + if-no-files-found: error + retention-days: 1 + + ArtifactCleanUp: + uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@dev + needs: + - Params + - Testing + - Package + with: + package: ${{ fromJson(needs.Params.outputs.artifact_names).package_all }} + remaining: | + ${{ fromJson(needs.Params.outputs.artifact_names).unittesting_xml }}-* diff --git a/.github/workflows/_Checking_Parameters.yml b/.github/workflows/_Checking_Parameters.yml new file mode 100644 index 0000000..03ea12c --- /dev/null +++ b/.github/workflows/_Checking_Parameters.yml @@ -0,0 +1,437 @@ +name: Verification Pipeline for Parameters + +on: + push: + workflow_dispatch: + +jobs: + Params_Default: + uses: pyTooling/Actions/.github/workflows/Parameters.yml@dev + with: + name: Example + + Params_PythonVersions: + uses: pyTooling/Actions/.github/workflows/Parameters.yml@dev + with: + name: Example + python_version_list: "3.9 3.10 pypy-3.8 pypy-3.9" + + Params_Systems: + uses: pyTooling/Actions/.github/workflows/Parameters.yml@dev + with: + name: Example + system_list: "windows mingw32 mingw64" + + Params_Include: + uses: pyTooling/Actions/.github/workflows/Parameters.yml@dev + with: + name: Example + python_version_list: "3.10" + system_list: "ubuntu windows macos" + include_list: "ubuntu:3.11 ubuntu:3.12" + + Params_Exclude: + uses: pyTooling/Actions/.github/workflows/Parameters.yml@dev + with: + name: Example + python_version_list: "3.10" + system_list: "ubuntu windows macos" + exclude_list: "windows:3.10 windows:3.11" + + Params_Disable: + uses: pyTooling/Actions/.github/workflows/Parameters.yml@dev + with: + name: Example + python_version_list: "3.10" + system_list: "ubuntu windows macos" + disable_list: "windows:3.10 windows:3.11" + + Params_All: + uses: pyTooling/Actions/.github/workflows/Parameters.yml@dev + with: + name: Example + python_version_list: "3.10 3.11" + system_list: "ubuntu windows macos" + include_list: "windows:3.8 windows:3.9 windows:3.12" + exclude_list: "macos:3.10 macos:3.11" + + Params_Check: + needs: + - Params_Default + - Params_PythonVersions + - Params_Systems + - Params_Include + - Params_Exclude + - Params_Disable + - Params_All + runs-on: ubuntu-latest + defaults: + run: + shell: python + steps: + - name: Install dependencies + shell: bash + run: pip install pyTooling + # Params_Default + - name: Checking results from 'Params_Default' + run: | + from json import loads as json_loads + from sys import exit + + from pyTooling.Common import zipdicts + + expectedPythonVersion = "3.11" + expectedPythons = ["3.8", "3.9", "3.10", "3.11"] + expectedSystems = ["ubuntu", "windows", "macos"] + expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons] + ["mingw64:3.10"] + expectedName = "Example" + expectedArtifacts = { + "unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML", + "perftesting_xml": f"{expectedName}-PerformanceTestReportSummary-XML", + "benchtesting_xml": f"{expectedName}-BenchmarkTestReportSummary-XML", + "apptesting_xml": f"{expectedName}-ApplicationTestReportSummary-XML", + "codecoverage_xml": f"{expectedName}-CodeCoverage-XML", + "codecoverage_html": f"{expectedName}-CodeCoverage-HTML", + "statictyping_html": f"{expectedName}-StaticTyping-HTML", + "package_all": f"{expectedName}-Packages", + "documentation_pdf": f"{expectedName}-Documentation-PDF", + "documentation_html": f"{expectedName}-Documentation-HTML", + } + + actualPythonVersion = """${{ needs.Params_Default.outputs.python_version }}""" + actualPythonJobs = json_loads("""${{ needs.Params_Default.outputs.python_jobs }}""".replace("'", '"')) + actualArtifactNames = json_loads("""${{ needs.Params_Default.outputs.artifact_names }}""".replace("'", '"')) + errors = 0 + + if actualPythonVersion != expectedPythonVersion: + print(f"'python_version' does not match: '{actualPythonVersion}' != '{expectedPythonVersion}'.") + errors += 1 + if len(actualPythonJobs) != len(expectedJobs): + print(f"Number of 'python_jobs' does not match: {len(actualPythonJobs)} != {len(expectedJobs)}.") + for job in actualPythonJobs: + print(f" {job['system']}:{job['python']}") + errors += 1 + if len(actualArtifactNames) != len(expectedArtifacts): + print(f"Number of 'artifact_names' does not match: {len(actualArtifactNames)} != {len(expectedArtifacts)}.") + errors += 1 + else: + for key, actual, expected in zipdicts(actualArtifactNames, expectedArtifacts): + if actual != expected: + print(f"Artifact name '{key}' does not match: {actual} != {expected}.") + errors += 1 + + if errors == 0: + print(f"All checks PASSED.") + exit(errors) + + # Params_PythonVersions + - name: Checking results from 'Params_PythonVersions' + run: | + from json import loads as json_loads + from sys import exit + + from pyTooling.Common import zipdicts + + expectedPythonVersion = "3.11" + expectedPythons = ["3.9", "3.10", "pypy-3.8", "pypy-3.9"] + expectedSystems = ["ubuntu", "windows", "macos"] + expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons] + ["mingw64:3.10"] + expectedName = "Example" + expectedArtifacts = { + "unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML", + "perftesting_xml": f"{expectedName}-PerformanceTestReportSummary-XML", + "benchtesting_xml": f"{expectedName}-BenchmarkTestReportSummary-XML", + "apptesting_xml": f"{expectedName}-ApplicationTestReportSummary-XML", + "codecoverage_xml": f"{expectedName}-CodeCoverage-XML", + "codecoverage_html": f"{expectedName}-CodeCoverage-HTML", + "statictyping_html": f"{expectedName}-StaticTyping-HTML", + "package_all": f"{expectedName}-Packages", + "documentation_pdf": f"{expectedName}-Documentation-PDF", + "documentation_html": f"{expectedName}-Documentation-HTML", + } + + actualPythonVersion = """${{ needs.Params_PythonVersions.outputs.python_version }}""" + actualPythonJobs = json_loads("""${{ needs.Params_PythonVersions.outputs.python_jobs }}""".replace("'", '"')) + actualArtifactNames = json_loads("""${{ needs.Params_PythonVersions.outputs.artifact_names }}""".replace("'", '"')) + errors = 0 + + if actualPythonVersion != expectedPythonVersion: + print(f"'python_version' does not match: '{actualPythonVersion}' != '{expectedPythonVersion}'.") + errors += 1 + if len(actualPythonJobs) != len(expectedJobs): + print(f"Number of 'python_jobs' does not match: {len(actualPythonJobs)} != {len(expectedJobs)}.") + for job in actualPythonJobs: + print(f" {job['system']}:{job['python']}") + errors += 1 + if len(actualArtifactNames) != len(expectedArtifacts): + print(f"Number of 'artifact_names' does not match: {len(actualArtifactNames)} != {len(expectedArtifacts)}.") + errors += 1 + else: + for key, actual, expected in zipdicts(actualArtifactNames, expectedArtifacts): + if actual != expected: + print(f"Artifact name '{key}' does not match: {actual} != {expected}.") + errors += 1 + + if errors == 0: + print(f"All checks PASSED.") + exit(errors) + + # Params_Systems + - name: Checking results from 'Params_Systems' + run: | + from json import loads as json_loads + from sys import exit + + from pyTooling.Common import zipdicts + + expectedPythonVersion = "3.11" + expectedPythons = ["3.8", "3.9", "3.10", "3.11"] + expectedSystems = ["windows"] + expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons] + ["mingw32:3.10", "mingw64:3.10"] + expectedName = "Example" + expectedArtifacts = { + "unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML", + "perftesting_xml": f"{expectedName}-PerformanceTestReportSummary-XML", + "benchtesting_xml": f"{expectedName}-BenchmarkTestReportSummary-XML", + "apptesting_xml": f"{expectedName}-ApplicationTestReportSummary-XML", + "codecoverage_xml": f"{expectedName}-CodeCoverage-XML", + "codecoverage_html": f"{expectedName}-CodeCoverage-HTML", + "statictyping_html": f"{expectedName}-StaticTyping-HTML", + "package_all": f"{expectedName}-Packages", + "documentation_pdf": f"{expectedName}-Documentation-PDF", + "documentation_html": f"{expectedName}-Documentation-HTML", + } + + actualPythonVersion = """${{ needs.Params_Systems.outputs.python_version }}""" + actualPythonJobs = json_loads("""${{ needs.Params_Systems.outputs.python_jobs }}""".replace("'", '"')) + actualArtifactNames = json_loads("""${{ needs.Params_Systems.outputs.artifact_names }}""".replace("'", '"')) + errors = 0 + + if actualPythonVersion != expectedPythonVersion: + print(f"'python_version' does not match: '{actualPythonVersion}' != '{expectedPythonVersion}'.") + errors += 1 + if len(actualPythonJobs) != len(expectedJobs): + print(f"Number of 'python_jobs' does not match: {len(actualPythonJobs)} != {len(expectedJobs)}.") + for job in actualPythonJobs: + print(f" {job['system']}:{job['python']}") + errors += 1 + if len(actualArtifactNames) != len(expectedArtifacts): + print(f"Number of 'artifact_names' does not match: {len(actualArtifactNames)} != {len(expectedArtifacts)}.") + errors += 1 + else: + for key, actual, expected in zipdicts(actualArtifactNames, expectedArtifacts): + if actual != expected: + print(f"Artifact name '{key}' does not match: {actual} != {expected}.") + errors += 1 + + if errors == 0: + print(f"All checks PASSED.") + exit(errors) + + # Params_Include + - name: Checking results from 'Params_Include' + run: | + from json import loads as json_loads + from sys import exit + + from pyTooling.Common import zipdicts + + expectedPythonVersion = "3.11" + expectedPythons = ["3.10"] + expectedSystems = ["ubuntu", "windows", "macos"] + expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons] + ["ubuntu:3.11", "ubuntu:3.12"] + expectedName = "Example" + expectedArtifacts = { + "unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML", + "perftesting_xml": f"{expectedName}-PerformanceTestReportSummary-XML", + "benchtesting_xml": f"{expectedName}-BenchmarkTestReportSummary-XML", + "apptesting_xml": f"{expectedName}-ApplicationTestReportSummary-XML", + "codecoverage_xml": f"{expectedName}-CodeCoverage-XML", + "codecoverage_html": f"{expectedName}-CodeCoverage-HTML", + "statictyping_html": f"{expectedName}-StaticTyping-HTML", + "package_all": f"{expectedName}-Packages", + "documentation_pdf": f"{expectedName}-Documentation-PDF", + "documentation_html": f"{expectedName}-Documentation-HTML", + } + + actualPythonVersion = """${{ needs.Params_Include.outputs.python_version }}""" + actualPythonJobs = json_loads("""${{ needs.Params_Include.outputs.python_jobs }}""".replace("'", '"')) + actualArtifactNames = json_loads("""${{ needs.Params_Include.outputs.artifact_names }}""".replace("'", '"')) + errors = 0 + + if actualPythonVersion != expectedPythonVersion: + print(f"'python_version' does not match: '{actualPythonVersion}' != '{expectedPythonVersion}'.") + errors += 1 + if len(actualPythonJobs) != len(expectedJobs): + print(f"Number of 'python_jobs' does not match: {len(actualPythonJobs)} != {len(expectedJobs)}.") + for job in actualPythonJobs: + print(f" {job['system']}:{job['python']}") + errors += 1 + if len(actualArtifactNames) != len(expectedArtifacts): + print(f"Number of 'artifact_names' does not match: {len(actualArtifactNames)} != {len(expectedArtifacts)}.") + errors += 1 + else: + for key, actual, expected in zipdicts(actualArtifactNames, expectedArtifacts): + if actual != expected: + print(f"Artifact name '{key}' does not match: {actual} != {expected}.") + errors += 1 + + if errors == 0: + print(f"All checks PASSED.") + exit(errors) + + # Params_Exclude + - name: Checking results from 'Params_Exclude' + run: | + from json import loads as json_loads + from sys import exit + + from pyTooling.Common import zipdicts + + expectedPythonVersion = "3.11" + expectedPythons = ["3.10"] + expectedSystems = ["ubuntu", "macos"] + expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons] + expectedName = "Example" + expectedArtifacts = { + "unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML", + "perftesting_xml": f"{expectedName}-PerformanceTestReportSummary-XML", + "benchtesting_xml": f"{expectedName}-BenchmarkTestReportSummary-XML", + "apptesting_xml": f"{expectedName}-ApplicationTestReportSummary-XML", + "codecoverage_xml": f"{expectedName}-CodeCoverage-XML", + "codecoverage_html": f"{expectedName}-CodeCoverage-HTML", + "statictyping_html": f"{expectedName}-StaticTyping-HTML", + "package_all": f"{expectedName}-Packages", + "documentation_pdf": f"{expectedName}-Documentation-PDF", + "documentation_html": f"{expectedName}-Documentation-HTML", + } + + actualPythonVersion = """${{ needs.Params_Exclude.outputs.python_version }}""" + actualPythonJobs = json_loads("""${{ needs.Params_Exclude.outputs.python_jobs }}""".replace("'", '"')) + actualArtifactNames = json_loads("""${{ needs.Params_Exclude.outputs.artifact_names }}""".replace("'", '"')) + errors = 0 + + if actualPythonVersion != expectedPythonVersion: + print(f"'python_version' does not match: '{actualPythonVersion}' != '{expectedPythonVersion}'.") + errors += 1 + if len(actualPythonJobs) != len(expectedJobs): + print(f"Number of 'python_jobs' does not match: {len(actualPythonJobs)} != {len(expectedJobs)}.") + for job in actualPythonJobs: + print(f" {job['system']}:{job['python']}") + errors += 1 + if len(actualArtifactNames) != len(expectedArtifacts): + print(f"Number of 'artifact_names' does not match: {len(actualArtifactNames)} != {len(expectedArtifacts)}.") + errors += 1 + else: + for key, actual, expected in zipdicts(actualArtifactNames, expectedArtifacts): + if actual != expected: + print(f"Artifact name '{key}' does not match: {actual} != {expected}.") + errors += 1 + + if errors == 0: + print(f"All checks PASSED.") + exit(errors) + + # Params_Disable + - name: Checking results from 'Params_Disable' + run: | + from json import loads as json_loads + from sys import exit + + from pyTooling.Common import zipdicts + + expectedPythonVersion = "3.11" + expectedPythons = ["3.10"] + expectedSystems = ["ubuntu", "macos"] + expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons] + expectedName = "Example" + expectedArtifacts = { + "unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML", + "perftesting_xml": f"{expectedName}-PerformanceTestReportSummary-XML", + "benchtesting_xml": f"{expectedName}-BenchmarkTestReportSummary-XML", + "apptesting_xml": f"{expectedName}-ApplicationTestReportSummary-XML", + "codecoverage_xml": f"{expectedName}-CodeCoverage-XML", + "codecoverage_html": f"{expectedName}-CodeCoverage-HTML", + "statictyping_html": f"{expectedName}-StaticTyping-HTML", + "package_all": f"{expectedName}-Packages", + "documentation_pdf": f"{expectedName}-Documentation-PDF", + "documentation_html": f"{expectedName}-Documentation-HTML", + } + + actualPythonVersion = """${{ needs.Params_Exclude.outputs.python_version }}""" + actualPythonJobs = json_loads("""${{ needs.Params_Exclude.outputs.python_jobs }}""".replace("'", '"')) + actualArtifactNames = json_loads("""${{ needs.Params_Exclude.outputs.artifact_names }}""".replace("'", '"')) + errors = 0 + + if actualPythonVersion != expectedPythonVersion: + print(f"'python_version' does not match: '{actualPythonVersion}' != '{expectedPythonVersion}'.") + errors += 1 + if len(actualPythonJobs) != len(expectedJobs): + print(f"Number of 'python_jobs' does not match: {len(actualPythonJobs)} != {len(expectedJobs)}.") + for job in actualPythonJobs: + print(f" {job['system']}:{job['python']}") + errors += 1 + if len(actualArtifactNames) != len(expectedArtifacts): + print(f"Number of 'artifact_names' does not match: {len(actualArtifactNames)} != {len(expectedArtifacts)}.") + errors += 1 + else: + for key, actual, expected in zipdicts(actualArtifactNames, expectedArtifacts): + if actual != expected: + print(f"Artifact name '{key}' does not match: {actual} != {expected}.") + errors += 1 + + if errors == 0: + print(f"All checks PASSED.") + exit(errors) + + # Params_All + - name: Checking results from 'Params_All' + run: | + from json import loads as json_loads + from sys import exit + + from pyTooling.Common import zipdicts + + expectedPythonVersion = "3.11" + expectedPythons = ["3.10", "3.11"] + expectedSystems = ["ubuntu", "windows"] + expectedJobs = [f"{system}:{python}" for system in expectedSystems for python in expectedPythons] + ["windows:3.8", "windows:3.9", "windows:3.12"] + expectedName = "Example" + expectedArtifacts = { + "unittesting_xml": f"{expectedName}-UnitTestReportSummary-XML", + "perftesting_xml": f"{expectedName}-PerformanceTestReportSummary-XML", + "benchtesting_xml": f"{expectedName}-BenchmarkTestReportSummary-XML", + "apptesting_xml": f"{expectedName}-ApplicationTestReportSummary-XML", + "codecoverage_xml": f"{expectedName}-CodeCoverage-XML", + "codecoverage_html": f"{expectedName}-CodeCoverage-HTML", + "statictyping_html": f"{expectedName}-StaticTyping-HTML", + "package_all": f"{expectedName}-Packages", + "documentation_pdf": f"{expectedName}-Documentation-PDF", + "documentation_html": f"{expectedName}-Documentation-HTML", + } + + actualPythonVersion = """${{ needs.Params_All.outputs.python_version }}""" + actualPythonJobs = json_loads("""${{ needs.Params_All.outputs.python_jobs }}""".replace("'", '"')) + actualArtifactNames = json_loads("""${{ needs.Params_All.outputs.artifact_names }}""".replace("'", '"')) + errors = 0 + + if actualPythonVersion != expectedPythonVersion: + print(f"'python_version' does not match: '{actualPythonVersion}' != '{expectedPythonVersion}'.") + errors += 1 + if len(actualPythonJobs) != len(expectedJobs): + print(f"Number of 'python_jobs' does not match: {len(actualPythonJobs)} != {len(expectedJobs)}.") + for job in actualPythonJobs: + print(f" {job['system']}:{job['python']}") + errors += 1 + if len(actualArtifactNames) != len(expectedArtifacts): + print(f"Number of 'artifact_names' does not match: {len(actualArtifactNames)} != {len(expectedArtifacts)}.") + errors += 1 + else: + for key, actual, expected in zipdicts(actualArtifactNames, expectedArtifacts): + if actual != expected: + print(f"Artifact name '{key}' does not match: {actual} != {expected}.") + errors += 1 + + if errors == 0: + print(f"All checks PASSED.") + exit(errors) diff --git a/.github/workflows/_Checking_Pipeline.yml b/.github/workflows/_Checking_Pipeline.yml new file mode 100644 index 0000000..caaf3ad --- /dev/null +++ b/.github/workflows/_Checking_Pipeline.yml @@ -0,0 +1,191 @@ +name: Verification of Complete Pipeline + +on: + push: + workflow_dispatch: + +jobs: + UnitTestingParams: + uses: pyTooling/Actions/.github/workflows/Parameters.yml@dev + with: + name: pyDummy + python_version_list: "3.8 3.9 3.10 3.11 3.12 pypy-3.8 pypy-3.9 pypy-3.10" + disable_list: "windows:pypy-3.10" + + PlatformTestingParams: + uses: pyTooling/Actions/.github/workflows/Parameters.yml@dev + with: + name: Platform + python_version_list: "" + system_list: "ubuntu windows macos mingw32 mingw64 clang64 ucrt64" + + UnitTesting: + uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@dev + needs: + - UnitTestingParams + with: + jobs: ${{ needs.UnitTestingParams.outputs.python_jobs }} + unittest_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }} + unittest_html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_html }} +# coverage_sqlite_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }} +# coverage_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }} +# coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }} +# coverage_html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }} + + PlatformTesting: + uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@dev + needs: + - PlatformTestingParams + with: + jobs: ${{ needs.PlatformTestingParams.outputs.python_jobs }} +# tests_directory: "" + unittest_directory: platform + unittest_xml_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).unittesting_xml }} + unittest_html_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).unittesting_html }} + coverage_sqlite_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_sqlite }} + coverage_xml_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_xml }} + coverage_json_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_json }} + coverage_html_artifact: ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_html }} + +# Coverage: +# uses: pyTooling/Actions/.github/workflows/CoverageCollection.yml@dev +# needs: +# - UnitTestingParams +# with: +# python_version: ${{ needs.UnitTestingParams.outputs.python_version }} +# artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }} +# secrets: +# codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }} + + StaticTypeCheck: + uses: pyTooling/Actions/.github/workflows/StaticTypeCheck.yml@dev + needs: + - UnitTestingParams + with: + python_version: ${{ needs.UnitTestingParams.outputs.python_version }} + commands: | + mypy --html-report htmlmypy -p pyDummy + html_report: 'htmlmypy' + html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }} + + PublishCoverageResults: + uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@dev + needs: + - UnitTestingParams + - UnitTesting + - PlatformTesting +# - Coverage + with: + coverage_sqlite_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }} + coverage_xml_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }} + coverage_json_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }} + coverage_html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }} + secrets: + codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }} + + PublishTestResults: + uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@dev + needs: + - UnitTesting + - PlatformTesting + + Package: + uses: pyTooling/Actions/.github/workflows/Package.yml@dev + needs: + - UnitTestingParams + - UnitTesting +# - Coverage + - PlatformTesting + with: + python_version: ${{ needs.UnitTestingParams.outputs.python_version }} + artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }} + +# VerifyDocs: +# uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@dev +# needs: +# - UnitTestingParams +# with: +# python_version: ${{ needs.UnitTestingParams.outputs.python_version }} + + BuildTheDocs: + uses: pyTooling/Actions/.github/workflows/BuildTheDocs.yml@dev + needs: + - UnitTestingParams +# - VerifyDocs + with: + artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_html }} + + PublishToGitHubPages: + uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@dev + needs: + - UnitTestingParams + - BuildTheDocs +# - Coverage + - PublishCoverageResults + - StaticTypeCheck + with: + doc: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_html }} + coverage: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }} + typing: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }} + + ReleasePage: + uses: pyTooling/Actions/.github/workflows/Release.yml@dev + if: startsWith(github.ref, 'refs/tags') + needs: + - UnitTesting + - PlatformTesting +# - Coverage +# - StaticTypeCheck + - Package + - PublishToGitHubPages + + PublishOnPyPI: + uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@dev + if: startsWith(github.ref, 'refs/tags') + needs: + - UnitTestingParams + - ReleasePage +# - Package + with: + python_version: ${{ needs.UnitTestingParams.outputs.python_version }} + requirements: -r dist/requirements.txt + artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }} + secrets: + PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} + + ArtifactCleanUp: + uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@dev + needs: + - UnitTestingParams + - PlatformTestingParams + - UnitTesting + - PlatformTesting +# - Coverage + - StaticTypeCheck +# - BuildTheDocs + - PublishToGitHubPages + - PublishCoverageResults + - PublishTestResults + with: + package: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }} + remaining: | + ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}-* + ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_html }}-* + ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}-* + ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }}-* + ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }}-* + ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }}-* + ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }} + ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_html }} + ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }} + ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_xml }} + ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_json }} + ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }} + ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }} + ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_html }} + ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).unittesting_xml }}-* + ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).unittesting_html }}-* + ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_sqlite }}-* + ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_xml }}-* + ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_json }}-* + ${{ fromJson(needs.PlatformTestingParams.outputs.artifact_names).codecoverage_html }}-* diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..7ab3de6 --- /dev/null +++ b/.gitignore @@ -0,0 +1,37 @@ +# Python cache and object files +__pycache__/ +*.py[cod] + +# Coverage.py +.coverage +.cov +coverage.xml +/report/coverage + +# mypy +/report/typing + +# pytest +/report/unit + +# setuptools +/build/**/*.* +/dist/**/*.* +/*.egg-info + +# Dependencies +!requirements.txt + +# Sphinx +doc/_build/ +doc/pyDummy/**/*.* +!doc/pyDummy/index.rst + +# BuildTheDocs +doc/_theme/**/*.* + +# IntelliJ project files +/.idea/workspace.xml + +# Git files +!.git* diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..bc22050 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,3 @@ +{ +"files.trimTrailingWhitespace": false, +} diff --git a/DEVELOPMENT.md b/DEVELOPMENT.md deleted file mode 100644 index 067df7f..0000000 --- a/DEVELOPMENT.md +++ /dev/null @@ -1,22 +0,0 @@ -# Development - -## Tagging/versioning - -See context in [#5](https://github.com/pyTooling/Actions/issues/5). - -Tag new releases in the `main` branch using a semver compatible value, starting with `v`: - -```sh -git checkout main -git tag v0.0.0 -git push upstream v0.0.0 -``` - -Move the corresponding release branch (starting with `r`) forward by creating a merge commit, and using the merged tag -as the commit message: - -```sh -git checkout r0 -git merge --no-ff -m 'v0.0.0' v0.0.0 -git push upstream r0 -``` diff --git a/ExamplePipeline.yml b/ExamplePipeline.yml index 39db620..15c08c0 100644 --- a/ExamplePipeline.yml +++ b/ExamplePipeline.yml @@ -4,7 +4,7 @@ # Unai Martinez-Corral # # # # ==================================================================================================================== # -# Copyright 2020-2022 The pyTooling Authors # +# Copyright 2020-2024 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # @@ -52,18 +52,21 @@ jobs: python-coverage:p python-lxml:p mingw_requirements: '-r tests/requirements.mingw.txt' - unittest_directory: 'tests/unit' - artifact: ${{ fromJson(needs.Params.outputs.params).artifacts.unittesting }} + tests_directory: 'tests' + unittest_directory: 'unit' + artifact: ${{ fromJson(needs.Params.outputs.artifact_names).unittesting_xml }} Coverage: uses: pyTooling/Actions/.github/workflows/CoverageCollection.yml@main needs: - Params with: - artifact: ${{ fromJson(needs.Params.outputs.params).artifacts.coverage }} + artifact: ${{ fromJson(needs.Params.outputs.artifact_names).codecoverage_html }} # Optional - python_version: ${{ fromJson(needs.Params.outputs.params).python_version }} + python_version: ${{ needs..Params.outputs.python_version }} requirements: '-r tests/requirements.txt' + tests_directory: 'tests' + unittest_directory: 'unit' secrets: codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }} @@ -72,18 +75,22 @@ jobs: needs: - Params with: - commands: mypy --html-report htmlmypy -p ToolName - artifact: ${{ fromJson(needs.Params.outputs.params).artifacts.typing }} + commands: | + mypy --junit-xml StaticTypingSummary.xml --html-report htmlmypy -p ToolName + html_artifact: ${{ fromJson(needs.Params.outputs.artifact_names).statictyping_html }} + junit_artifact: ${{ fromJson(needs.Params.outputs.artifact_names).statictyping_junit }} # Optional - python_version: ${{ fromJson(needs.Params.outputs.params).python_version }} + python_version: ${{ needs..Params.outputs.python_version }} requirements: '-r tests/requirements.txt' - report: 'htmlmypy' + html_report: 'htmlmypy' + junit_report: 'StaticTypingSummary.xml' allow_failure: true PublishTestResults: uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@main needs: - UnitTesting + - StaticTypeCheck with: # Optional report_files: artifacts/**/*.xml @@ -94,9 +101,9 @@ jobs: - Params - Coverage with: - artifact: ${{ fromJson(needs.Params.outputs.params).artifacts.package }} + artifact: ${{ fromJson(needs.Params.outputs.artifact_names).package_all }} # Optional - python_version: ${{ fromJson(needs.Params.outputs.params).python_version }} + python_version: ${{ needs..Params.outputs.python_version }} requirements: 'wheel' Release: @@ -116,9 +123,9 @@ jobs: - Release - Package with: - artifact: ${{ fromJson(needs.Params.outputs.params).artifacts.package }} + artifact: ${{ fromJson(needs.Params.outputs.artifact_names).package_all }} # Optional - python_version: ${{ fromJson(needs.Params.outputs.params).python_version }} + python_version: ${{ needs..Params.outputs.python_version }} requirements: 'wheel twine' secrets: PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} @@ -129,7 +136,7 @@ jobs: - Params with: # Optional - python_version: ${{ fromJson(needs.Params.outputs.params).python_version }} + python_version: ${{ needs..Params.outputs.python_version }} BuildTheDocs: uses: pyTooling/Actions/.github/workflows/BuildTheDocs.yml@main @@ -137,7 +144,7 @@ jobs: - Params - VerifyDocs with: - artifact: ${{ fromJson(needs.Params.outputs.params).artifacts.doc }} + artifact: ${{ fromJson(needs.Params.outputs.artifact_names).documentation_html }} PublishToGitHubPages: uses: pyTooling/Actions/.github/workflows/PublishToGitHubPages.yml@main @@ -147,10 +154,10 @@ jobs: - Coverage - StaticTypeCheck with: - doc: ${{ fromJson(needs.Params.outputs.params).artifacts.doc }} + doc: ${{ fromJson(needs.Params.outputs.artifact_names).documentation_html }} # Optional - coverage: ${{ fromJson(needs.Params.outputs.params).artifacts.coverage }} - typing: ${{ fromJson(needs.Params.outputs.params).artifacts.typing }} + coverage: ${{ fromJson(needs.Params.outputs.artifact_names).codecoverage_html }} + typing: ${{ fromJson(needs.Params.outputs.artifact_names).statictyping_html }} ArtifactCleanUp: uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@main @@ -162,9 +169,10 @@ jobs: - BuildTheDocs - PublishToGitHubPages with: - package: ${{ fromJson(needs.Params.outputs.params).artifacts.package }} + package: ${{ fromJson(needs.Params.outputs.artifact_names).package_all }} remaining: | - ${{ fromJson(needs.Params.outputs.params).artifacts.unittesting }}-* - ${{ fromJson(needs.Params.outputs.params).artifacts.coverage }} - ${{ fromJson(needs.Params.outputs.params).artifacts.typing }} - ${{ fromJson(needs.Params.outputs.params).artifacts.doc }} + ${{ fromJson(needs.Params.outputs.artifact_names).unittesting_xml }}-* + ${{ fromJson(needs.Params.outputs.artifact_names).codecoverage_html }} + ${{ fromJson(needs.Params.outputs.artifact_names).statictyping_html }} + ${{ fromJson(needs.Params.outputs.artifact_names).statictyping_junit }} + ${{ fromJson(needs.Params.outputs.artifact_names).documentation_html }} diff --git a/README.md b/README.md index 846e76a..ef17387 100644 --- a/README.md +++ b/README.md @@ -7,86 +7,15 @@ language for writing reusable CI code. However, Python being equally popular and capable, usage of JS/TS might be bypassed, with some caveats. This repository gathers reusable CI tooling for testing, packaging and distributing Python projects and documentation. - -## Context - -GitHub Actions supports five procedures to reuse code: - -- JavaScript Action: - - [docs.github.com: actions/creating-actions/creating-a-javascript-action](https://docs.github.com/en/actions/creating-actions/creating-a-javascript-action) -- Container Action: - - [docs.github.com: actions/creating-actions/creating-a-docker-container-action](https://docs.github.com/en/actions/creating-actions/creating-a-docker-container-action) -- Container Step: - - [docs.github.com: actions/learn-github-actions/workflow-syntax-for-github-actions#example-using-a-docker-public-registry-action](https://docs.github.com/en/actions/learn-github-actions/workflow-syntax-for-github-actions#example-using-a-docker-public-registry-action) - - [docs.github.com: actions/learn-github-actions/workflow-syntax-for-github-actions#jobsjob_idstepswithargs](https://docs.github.com/en/actions/learn-github-actions/workflow-syntax-for-github-actions#jobsjob_idstepswithargs) -- Composite Action: - - [docs.github.com: actions/creating-actions/creating-a-composite-action](https://docs.github.com/en/actions/creating-actions/creating-a-composite-action) - - [github.blog/changelog: 2020-08-07-github-actions-composite-run-steps](https://github.blog/changelog/2020-08-07-github-actions-composite-run-steps/) - - [github.blog/changelog: 2021-08-25-github-actions-reduce-duplication-with-action-compositio](https://github.blog/changelog/2021-08-25-github-actions-reduce-duplication-with-action-composition/) -- Reusable Workflow: - - [docs.github.com: actions/learn-github-actions/reusing-workflows](https://docs.github.com/en/actions/learn-github-actions/reusing-workflows) - - [github.blog/changelog: 2021-10-05-github-actions-dry-your-github-actions-configuration-by-reusing-workflows](https://github.blog/changelog/2021-10-05-github-actions-dry-your-github-actions-configuration-by-reusing-workflows/) - -Container Actions and Container Steps are almost equivalent: Actions use a configuration file (`action.yml`), while -Steps do not. -Leaving JavaScript and Container Actions and Steps aside, the main differences between Composite Actions and Reusable -Workflows are the following: - -- Composite Actions can be executed from a remote/external path or from the checked out branch, and from any location. - However, Reusable Workflows can only be used through a remote/external path (`{owner}/{repo}/{path}/{filename}@{ref}`), - where `{path}` must be `.github/workflows`, and `@{ref}` is required. - See [actions/runner#1493](https://github.com/actions/runner/issues/1493). - As a result: - - Local Composite Actions cannot be used without a prior repo checkout, but Reusable Workflows can be used without - checkout. - - Testing development versions of local Reusable Workflows is cumbersome, because PRs do not pick the modifications by - default. -- Composite Actions can include multiple steps, but not multiple jobs. - Conversely, Reusable Workflows can include multiple jobs, and multiple steps in each job. -- Composite Actions can include multiple files, so it's possible to use files from the Action or from the user's repository. - Conversely, Reusable Workflows are a single YAML file, with no additional files retrieved by default. - -### Callable vs dispatchable workflows - -Reusable Workflows are defined through the `workflow_call` event kind. -Similarly, any "regular" Workflow can be triggered through a `workflow_dispatch` event. -Both event kinds support `input` options, which are usable within the Workflow. -Therefore, one might intuitively try to write a workflow which is both callable and dispatchable. -In other words, which can be either reused from another workflow, or triggered through the API. -Unfortunately, that is not the case. -Although `input` options can be duplicated for both events, GitHub's backend exposes them through different objects. -In dispatchable Workflows, the object is `${{ github.event.inputs }}`, while callable workflows receive `${{ inputs }}`. - -As a result, in order to make a reusable workflow dispatchable, a wrapper workflow is required. -See, for instance, [hdl/containers: .github/workflows/common.yml](https://github.com/hdl/containers/blob/main/.github/workflows/common.yml) and [hdl/containers: .github/workflows/dispatch.yml](https://github.com/hdl/containers/blob/main/.github/workflows/dispatch.yml). -Alternatively, a normalisation job might be used, similar to the `Parameters` in this repo. - -### Call hierarchy - -Reusable Workflows cannot call other Reusable Workflows, however, they can use Composite Actions and Composite Actions -can call other Actions. -Therefore, in some use cases it is sensible to combine one layer of reusable workflows for orchestrating the jobs, along -with multiple layers of composite actions. - -### Script with post step - -JavaScript Actions support defining `pre`, `pre-if`, `post` and `post-if` steps, which allow executing steps at the -beginning or the end of a job, regardless of intermediate steps failing. -Unfortunately, those are not available for any other Action type. - -Action [with-post-step](with-post-step) is a generic JS Action to execute a main command and to set a command as a post -step. -It allows using the `post` feature with scripts written in bash, python or any other interpreted language available on -the environment. -See: [actions/runner#1478](https://github.com/actions/runner/issues/1478). - +See [GitHub Actions and GitHub Reusable Workflows](https://pytooling.github.io/Actions/Background.html) for more +background information. ## Reusable workflows -This repository provides 10+ Reusable Workflows based on the CI pipelines of the repos in this organisation, -[EDAΒ²](https://github.com/edaa-org), [VHDL](https://github.com/vhdl), and others. -By combining them, Python packages can be continuously tested and released along with Sphinx documentation sites, to GitHub Releases, GitHub Pages and PyPI. -Optionally, coverage and static type check reports can be gathered. +This repository provides 10+ *Reusable Workflows* based on the CI pipelines of the repos in this GitHub organisation, +[EDAΒ²](https://github.com/edaa-org), [VHDL](https://github.com/vhdl), and others. By combining them, Python packages can +be continuously tested and released along with Sphinx documentation sites, to GitHub Releases, GitHub Pages and PyPI. +Optionally, coverage and static type check reports can be gathered and integrated into the online documentation. [![](ExamplePipeline_dark.png)](ExamplePipeline_dark.png) @@ -111,28 +40,6 @@ As shown in the screenshots above, the expected order is: optionally upload results as an HTML report. Example `commands`: - 1. Regular package - - ```yml - commands: mypy --html-report htmlmypy -p ToolName - ``` - - 2. Parent namespace package - - ```yml - commands: | - touch Parent/__init__.py - mypy --html-report htmlmypy -p ToolName - ``` - - 3. Child namespace package - - ```yml - commands: | - cd Parent - mypy --html-report ../htmlmypy -p ToolName - ``` - - [VerifyDocs](.github/workflows/VerifyDocs.yml): extract code examples from the README and test these code snippets. - Packaging and releasing: - [Release](.github/workflows/Release.yml): publish GitHub Release. @@ -162,11 +69,6 @@ Find further usage cases in the following list of projects: - [VHDL/pyVHDLModel](https://github.com/VHDL/pyVHDLModel/tree/main/.github/workflows) -## References - -- [hdl/containers#48](https://github.com/hdl/containers/issues/48) - - ## Contributors * [Patrick Lehmann](https://GitHub.com/Paebbels) diff --git a/doc/Action/Releaser.rst b/doc/Action/Releaser.rst new file mode 100644 index 0000000..d91b04f --- /dev/null +++ b/doc/Action/Releaser.rst @@ -0,0 +1,195 @@ +.. _ACTION/Releaser: + +Releaser +######## + +**Releaser** is a Docker GitHub Action written in Python. + +**Releaser** allows to keep a GitHub Release of type pre-release and its artifacts up to date with latest builds. +Combined with a workflow that is executed periodically, **Releaser** allows to provide a fixed release name for users +willing to use daily/nightly artifacts of a project. + +Furthermore, when any `semver `__ compliant tagged commit is pushed, **Releaser** can create a +release and upload assets. + +Context +******* + +GitHub provides official clients for the GitHub API through `github.com/octokit `__: + +- `octokit.js `__ (`octokit.github.io/rest.js `__) +- `octokit.rb `__ (`octokit.github.io/octokit.rb `__) +- `octokit.net `__ (`octokitnet.rtfd.io `__) + +When GitHub Actions was released in 2019, two Actions were made available through +`github.com/actions `__ for dealing with GitHub Releases: + +- `actions/create-release `__ +- `actions/upload-release-asset `__ + +However, those Actions were contributed by an employee in spare time, not officially supported by GitHub. +Therefore, they were unmaintained before GitHub Actions was out of the private beta +(see `actions/upload-release-asset#58 `__) +and, a year later, archived. +Those Actions are based on `actions/toolkit `__'s hydrated version of octokit.js. + +From a practical point of view, `actions/github-script `__ is the natural replacement to those Actions, since it allows to use a pre-authenticated *octokit.js* client along with the workflow run context. +Still, it requires writing plain JavaScript. + +Alternatively, there are non-official GitHub API libraries available in other languages (see `docs.github.com: rest/overview/libraries `__). +**Releaser** is based on `PyGithub/PyGithub `__, a Python client for the GitHub API. + +**Releaser** was originally created in `eine/tip `__, as an enhanced alternative to using +``actions/create-release`` and ``actions/upload-release-asset``, in order to cover certain use cases that were being +migrated from Travis CI to GitHub Actions. +The main limitation of GitHub's Actions was/is verbosity and not being possible to dynamically define the list of assets +to be uploaded. + +On the other hand, GitHub Actions artifacts do require login in order to download them. +Conversely, assets of GitHub Releases can be downloaded without login. +Therefore, in order to make CI results available to the widest audience, some projects prefer having tarballs available +as assets. +In this context, one of the main use cases of **Releaser** is pushing artifacts as release assets. +Thus, the name of the Action. + +GitHub provides an official CLI tool, written in golang: `cli/cli `__. +When the Python version of **Releaser** was written, ``cli`` was evaluated as an alternative to *PyGitHub*. +``gh release`` was (and still is) not flexible enough to update the reference of a release, without deleting and +recreating it (see `cli.github.com: manual/gh_release_create `__). +Deletion and recreation is unfortunate, because it notifies all the watchers of a repository +(see `eine/tip#111 `__). +However, `cli.github.com: manual/gh_release_upload `__ handles uploading +artifacts as assets faster and with better stability for larger files than *PyGitHub* +(see `msys2/msys2-installer#36 `__). +Furthermore, the GitHub CLI is installed on GitHub Actions' default virtual environments. +Although ``gh`` does not support login through SSH (see `cli/cli#3715 `__), on GitHub +Actions a token is available ``${{ github.token }}``. +Therefore, **Releaser** uses ``gh release upload`` internally. + +Usage +***** + +The following block shows a minimal YAML workflow file: + +.. code-block:: yaml + + name: 'workflow' + + on: + schedule: + - cron: '0 0 * * 5' + + jobs: + mwe: + runs-on: ubuntu-latest + steps: + + # Clone repository + - uses: actions/checkout@v4 + + # Build your application, tool, artifacts, etc. + - name: Build + run: | + echo "Build some tool and generate some artifacts" > artifact.txt + + # Update tag and pre-release + # - Update (force-push) tag to the commit that is used in the workflow. + # - Upload artifacts defined by the user. + - uses: pyTooling/Actions/releaser@r0 + with: + token: ${{ secrets.GITHUB_TOKEN }} + files: | + artifact.txt + README.md + + +Composite Action +================ + +The default implementation of **Releaser** is a Container Action. +Therefore, a pre-built container image is pulled before starting the job. +Alternatively, a Composite Action version is available: ``uses: pyTooling/Actions/releaser/composite@main``. +The Composite version installs the dependencies on the host (the runner environment), instead of using a container. +Both implementations are functionally equivalent from **Releaser**'s point of view; however, the Composite Action allows +users to tweak the version of Python by using `actions/setup-python `__ before. + +Options +******* + +All options can be optionally provided as environment variables: ``INPUT_TOKEN``, ``INPUT_FILES``, ``INPUT_TAG``, ``INPUT_RM`` +and/or ``INPUT_SNAPSHOTS``. + +token (required) +================ + +Token to make authenticated API calls; can be passed in using ``{{ secrets.GITHUB_TOKEN }}``. + +files (required) +================ + +Either a single filename/pattern or a multi-line list can be provided. All the artifacts are uploaded regardless of the +hierarchy. + +For creating/updating a release without uploading assets, set ``files: none``. + +tag +=== + +The default tag name for the tip/nightly pre-release is ``tip``, but it can be optionally overriden through option ``tag``. + +rm +== + +Set option ``rm`` to ``true`` for systematically removing previous artifacts (e.g. old versions). +Otherwise (by default), all previours artifacts are preserved or overwritten. + +Note: + If all the assets are removed, or if the release itself is removed, tip/nightly assets won't be available for + users until the workflow is successfully run. + For instance, Action `setup-ghdl-ci `__ uses assets from `ghdl/ghdl: releases/tag/nightly `__. + Hence, it is recommended to try removing the conflictive assets only, in order to maximise the availability. + +snapshots +========= + +Whether to create releases from any tag or to treat some as snapshots. +By default, all the tags with non-empty ``prerelease`` field (see `semver.org: Is there a suggested regular expression (RegEx) to check a SemVer string? `__) +are considered snapshots; neither a release is created nor assets are uploaded. + +Advanced/complex use cases +************************** + +**Releaser** is essentially a very thin wrapper to use the GitHub Actions context data along with the classes +and methods of PyGithub. + +Similarly to `actions/github-script `__, users with advanced/complex requirements +might find it desirable to write their own Python script, instead of using **Releaser**. +In fact, since ``shell: python`` is supported in GitHub Actions, using Python does *not* require any Action. +For prototyping purposes, the following job might be useful: + +.. code-block:: yaml + + Release: + name: 'πŸ“¦ Release' + runs-on: ubuntu-latest + needs: + - ... + if: github.event_name != 'pull_request' && (github.ref == 'refs/heads/master' || contains(github.ref, 'refs/tags/'>`__) + steps: + + - uses: actions/download-artifact@v3 + + - shell: bash + run: pip install PyGithub --progress-bar off + + - name: Set list of files for uploading + id: files + shell: python + run: | + from github import Github + print("Β· Get GitHub API handler (authenticate)") + gh = Github('${{ github.token }}') + print("Β· Get Repository handler") + gh_repo = gh.get_repo('${{ github.repository }}') + +Find a non-trivial use case at `msys2/msys2-autobuild `__. diff --git a/doc/Action/With-post-step.rst b/doc/Action/With-post-step.rst new file mode 100644 index 0000000..f384445 --- /dev/null +++ b/doc/Action/With-post-step.rst @@ -0,0 +1,33 @@ +.. _ACTION/WithPostStep: + +with-post-step +############## + +JavaScript Actions support defining ``pre``, ``pre-if``, ``post`` and ``post-if`` steps, which allow executing steps at +the beginning or the end of a job, regardless of intermediate steps failing. Unfortunately, those are not available for +any other Action type. + +Action **with-post-step** is a generic JavaScript Action to execute a main command and to set a further command as a +post step. It allows using the ``post`` feature with scripts written in Bash, Python or any other interpreted language +available on the environment. + +**Example Usage:** + +.. code-block:: yaml + + jobs: + Image: + steps: + - ... + + - name: Push container image + uses: ./with-post-step + with: + main: | + echo '${{ github.token }}' | docker login ghcr.io -u GitHub-Actions --password-stdin + docker push ghcr.io/pytooling/releaser + post: docker logout ghcr.io + +.. seealso:: + + * `actions/runner#1478 `__. diff --git a/doc/Action/index.rst b/doc/Action/index.rst new file mode 100644 index 0000000..44eef5f --- /dev/null +++ b/doc/Action/index.rst @@ -0,0 +1,7 @@ +Overview +######## + +The following 2 actions are provided by **Actions**: + +* :ref:`ACTION/Releaser` +* :ref:`ACTION/WithPostStep` diff --git a/doc/Background.rst b/doc/Background.rst new file mode 100644 index 0000000..6e8da2d --- /dev/null +++ b/doc/Background.rst @@ -0,0 +1,87 @@ +Background +########## + +GitHub Actions supports five procedures to reuse code: + +- JavaScript Action: + + - `docs.github.com: actions/creating-actions/creating-a-javascript-action `__ + +- Container Action: + + - `docs.github.com: actions/creating-actions/creating-a-docker-container-action `__ + +- Container Step: + + - `docs.github.com: actions/learn-github-actions/workflow-syntax-for-github-actions#example-using-a-docker-public-registry-action `__ + - `docs.github.com: actions/learn-github-actions/workflow-syntax-for-github-actions#jobsjob_idstepswithargs `__ + +- Composite Action: + + - `docs.github.com: actions/creating-actions/creating-a-composite-action `__ + - `github.blog/changelog: 2020-08-07-github-actions-composite-run-steps `__ + - `github.blog/changelog: 2021-08-25-github-actions-reduce-duplication-with-action-composition `__ + +- Reusable Workflow: + + - `docs.github.com: actions/learn-github-actions/reusing-workflows `__ + - `github.blog/changelog: 2021-10-05-github-actions-dry-your-github-actions-configuration-by-reusing-workflows `__ + +Container Actions and Container Steps are almost equivalent: Actions use a configuration file (``action.yml``), while +Steps do not. +Leaving JavaScript and Container Actions and Steps aside, the main differences between Composite Actions and Reusable +Workflows are the following: + +- Composite Actions can be executed from a remote/external path or from the checked out branch, and from any location. + However, Reusable Workflows can only be used through a remote/external path (``{owner}/{repo}/{path}/{filename}@{ref}``), + where ``{path}`` must be ``.github/workflows``, and ``@{ref}`` is required. + See `actions/runner#1493 `__. + As a result: + + - Local Composite Actions cannot be used without a prior repo checkout, but Reusable Workflows can be used without + checkout. + - Testing development versions of local Reusable Workflows is cumbersome, because PRs do not pick the modifications by + default. + +- Composite Actions can include multiple steps, but not multiple jobs. + Conversely, Reusable Workflows can include multiple jobs, and multiple steps in each job. +- Composite Actions can include multiple files, so it's possible to use files from the Action or from the user's repository. + Conversely, Reusable Workflows are a single YAML file, with no additional files retrieved by default. + +Callable vs dispatchable workflows +********************************** + +Reusable Workflows are defined through the ``workflow_call`` event kind. +Similarly, any "regular" Workflow can be triggered through a ``workflow_dispatch`` event. +Both event kinds support ``input`` options, which are usable within the Workflow. +Therefore, one might intuitively try to write a workflow which is both callable and dispatchable. +In other words, which can be either reused from another workflow, or triggered through the API. +Unfortunately, that is not the case. +Although ``input`` options can be duplicated for both events, GitHub's backend exposes them through different objects. +In dispatchable Workflows, the object is ``${{ github.event.inputs }}``, while callable workflows receive ``${{ inputs }}``. + +As a result, in order to make a reusable workflow dispatchable, a wrapper workflow is required. +See, for instance, `hdl/containers: .github/workflows/common.yml `__ +and `hdl/containers: .github/workflows/dispatch.yml `__. +Alternatively, a normalisation job might be used, similar to the ``Parameters`` in this repo. + +Call hierarchy +************** + +Reusable Workflows cannot call other Reusable Workflows, however, they can use Composite Actions and Composite Actions +can call other Actions. +Therefore, in some use cases it is sensible to combine one layer of reusable workflows for orchestrating the jobs, along +with multiple layers of composite actions. + +Script with post step +********************* + +JavaScript Actions support defining ``pre``, ``pre-if``, ``post`` and ``post-if`` steps, which allow executing steps at +the beginning or the end of a job, regardless of intermediate steps failing. +Unfortunately, those are not available for any other Action type. + +Action [with-post-step](with-post-step) is a generic JS Action to execute a main command and to set a command as a post +step. +It allows using the ``post`` feature with scripts written in bash, python or any other interpreted language available on +the environment. +See: `actions/runner#1478 `__. diff --git a/doc/Dependency.rst b/doc/Dependency.rst new file mode 100644 index 0000000..aeb90ca --- /dev/null +++ b/doc/Dependency.rst @@ -0,0 +1,30 @@ +Dependencies +############ + +This is a summary of dependencies used by the provided job templates. For more details, see each job template. + +* Actions provided by GitHub + + * :gh:`actions/checkout` + * :gh:`actions/upload-artifact` + * :gh:`actions/download-artifact` + * :gh:`actions/create-release` (unmaintained) + * :gh:`actions/setup-python` + +* BuildTheDocs + + * :gh:`buildthedocs/btd` + +* Code Quality Services + + * :gh:`codecov/codecov-action` + * :gh:`codacy/codacy-coverage-reporter-action` + +* Reporting + + * :gh:`dorny/test-reporter` + +* Miscellaneous + + * :gh:`msys2/setup-msys2` + * :gh:`geekyeggo/delete-artifact` diff --git a/doc/Deveopment.rst b/doc/Deveopment.rst new file mode 100644 index 0000000..c7c2734 --- /dev/null +++ b/doc/Deveopment.rst @@ -0,0 +1,4 @@ +Development +########### + +.. todo:: Development - Explain how to write new job templates. diff --git a/doc/Doc-License.rst b/doc/Doc-License.rst index 1258fbc..ca0c256 100644 --- a/doc/Doc-License.rst +++ b/doc/Doc-License.rst @@ -1,8 +1,8 @@ .. _DOCLICENSE: -.. Note:: This is a local copy of the `Creative Commons - Attribution 4.0 International (CC BY 4.0) `__. +.. note:: This is a local copy of the `Creative Commons - Attribution 4.0 International (CC BY 4.0) `__. -.. Attention:: This **CC BY 4.0** license applies only to the **documentation** of this project. +.. attention:: This **CC BY 4.0** license applies only to the **documentation** of this project. Creative Commons Attribution 4.0 International diff --git a/doc/Instantiation.rst b/doc/Instantiation.rst new file mode 100644 index 0000000..b606f6a --- /dev/null +++ b/doc/Instantiation.rst @@ -0,0 +1,110 @@ +Instantiantion +############## + +The job templates (GitHub Action *Reusable Workflows*) need to be stored in the same directory where normal pipelines +(GitHub Action *Workflows*) are located: ``.github/workflows/