mirror of
https://github.com/pyTooling/Actions.git
synced 2026-02-12 02:56:56 +08:00
Adding support for a JSON inventory.
This commit is contained in:
2
.github/dependabot.yml
vendored
2
.github/dependabot.yml
vendored
@@ -10,9 +10,7 @@ updates:
|
|||||||
- Dependencies
|
- Dependencies
|
||||||
assignees:
|
assignees:
|
||||||
- Paebbels
|
- Paebbels
|
||||||
- umarcor
|
|
||||||
reviewers:
|
reviewers:
|
||||||
- Paebbels
|
- Paebbels
|
||||||
- umarcor
|
|
||||||
schedule:
|
schedule:
|
||||||
interval: "daily" # Checks on Monday trough Friday.
|
interval: "daily" # Checks on Monday trough Friday.
|
||||||
|
|||||||
138
.github/workflows/NightlyRelease.yml
vendored
138
.github/workflows/NightlyRelease.yml
vendored
@@ -68,6 +68,14 @@ on:
|
|||||||
description: 'Multi-line string containing artifact:file:title asset descriptions.'
|
description: 'Multi-line string containing artifact:file:title asset descriptions.'
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
|
inventory-json:
|
||||||
|
type: string
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
inventory-version:
|
||||||
|
type: string
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
tarball-name:
|
tarball-name:
|
||||||
type: string
|
type: string
|
||||||
required: false
|
required: false
|
||||||
@@ -185,6 +193,7 @@ jobs:
|
|||||||
ANSI_LIGHT_RED=$'\x1b[91m'
|
ANSI_LIGHT_RED=$'\x1b[91m'
|
||||||
ANSI_LIGHT_GREEN=$'\x1b[92m'
|
ANSI_LIGHT_GREEN=$'\x1b[92m'
|
||||||
ANSI_LIGHT_YELLOW=$'\x1b[93m'
|
ANSI_LIGHT_YELLOW=$'\x1b[93m'
|
||||||
|
ANSI_LIGHT_BLUE="\e[94m"
|
||||||
ANSI_NOCOLOR=$'\x1b[0m'
|
ANSI_NOCOLOR=$'\x1b[0m'
|
||||||
|
|
||||||
export GH_TOKEN=${{ github.token }}
|
export GH_TOKEN=${{ github.token }}
|
||||||
@@ -202,6 +211,30 @@ jobs:
|
|||||||
printf "%s\n" "$line"
|
printf "%s\n" "$line"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Create JSON inventory
|
||||||
|
if [[ "${{ inputs.inventory-json }}" != "" ]]; then
|
||||||
|
VERSION="1.0"
|
||||||
|
ORDER=("os-name" "os-version" "os-arch" "runtime" "ghdl-backend")
|
||||||
|
|
||||||
|
jsonInventory=$(jq -c -n \
|
||||||
|
--arg version "${VERSION}" \
|
||||||
|
--arg date "$(date +"%Y-%m-%dT%H-%M-%S%:z")" \
|
||||||
|
--argjson jsonMeta "$(jq -c -n \
|
||||||
|
--arg tag "${{ inputs.nightly_name }}" \
|
||||||
|
--arg version "${{ inputs.inventory-version }}" \
|
||||||
|
--arg hash "${{ github.sha }}" \
|
||||||
|
--arg repo "${{ github.server_url }}/${{ github.repository }}" \
|
||||||
|
'{"tag": $tag, "version": $version, "git-hash": $hash, "repository-url": $repo}' \
|
||||||
|
)" \
|
||||||
|
--argjson jsonInfo "$(jq -c -n \
|
||||||
|
--arg url "${{ github.server_url }}/${{ github.repository }}/releases/download/${{ inputs.nightly_name }}" \
|
||||||
|
--argjson order "$(jq -c -n '$ARGS.positional' --args "${ORDER[@]}")" \
|
||||||
|
'{"release-url": $url, "categories": $order}'
|
||||||
|
)" \
|
||||||
|
'{"version": 1.0, "timestamp": $date, "meta": $jsonMeta, "info": $jsonInfo, "files": {}}'
|
||||||
|
)
|
||||||
|
fi
|
||||||
|
|
||||||
ERRORS=0
|
ERRORS=0
|
||||||
# A dictionary of 0/1 to avoid duplicate downloads
|
# A dictionary of 0/1 to avoid duplicate downloads
|
||||||
declare -A downloadedArtifacts
|
declare -A downloadedArtifacts
|
||||||
@@ -214,12 +247,20 @@ jobs:
|
|||||||
|
|
||||||
# split assetLine colon separated triple: artifact:asset:title
|
# split assetLine colon separated triple: artifact:asset:title
|
||||||
artifact="${assetLine%%:*}"
|
artifact="${assetLine%%:*}"
|
||||||
remaining="${assetLine#*:}"
|
assetLine="${assetLine#*:}"
|
||||||
asset="${remaining%%:*}"
|
asset="${assetLine%%:*}"
|
||||||
title="${remaining##*:}"
|
assetLine="${assetLine#*:}"
|
||||||
|
if [[ "${{ inputs.inventory-json }}" == "" ]]; then
|
||||||
|
categories=""
|
||||||
|
title="${assetLine##*:}"
|
||||||
|
else
|
||||||
|
categories="${assetLine%%:*}"
|
||||||
|
title="${assetLine##*:}"
|
||||||
|
fi
|
||||||
|
|
||||||
# remove leading whitespace
|
# remove leading whitespace
|
||||||
asset="${asset#"${asset%%[![:space:]]*}"}"
|
asset="${asset#"${asset%%[![:space:]]*}"}"
|
||||||
|
categories="${categories#"${categories%%[![:space:]]*}"}"
|
||||||
title="${title#"${title%%[![:space:]]*}"}"
|
title="${title#"${title%%[![:space:]]*}"}"
|
||||||
|
|
||||||
# apply replacements
|
# apply replacements
|
||||||
@@ -227,7 +268,7 @@ jobs:
|
|||||||
title="$(Replace "${title}")"
|
title="$(Replace "${title}")"
|
||||||
|
|
||||||
printf "%s\n" "Publish asset '${asset}' from artifact '${artifact}' with title '${title}'"
|
printf "%s\n" "Publish asset '${asset}' from artifact '${artifact}' with title '${title}'"
|
||||||
printf "%s" " Checked asset for duplicates ... "
|
printf " %s" "Checked asset for duplicates ... "
|
||||||
if [[ -n "${assetFilenames[$asset]}" ]]; then
|
if [[ -n "${assetFilenames[$asset]}" ]]; then
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||||
printf "%s\n" "::error title=DuplicateAsset::Asset '${asset}' from artifact '${artifact}' was already uploaded to release '${{ inputs.nightly_name }}'."
|
printf "%s\n" "::error title=DuplicateAsset::Asset '${asset}' from artifact '${artifact}' was already uploaded to release '${{ inputs.nightly_name }}'."
|
||||||
@@ -240,10 +281,10 @@ jobs:
|
|||||||
|
|
||||||
# Download artifact by artifact name
|
# Download artifact by artifact name
|
||||||
if [[ -n "${downloadedArtifacts[$artifact]}" ]]; then
|
if [[ -n "${downloadedArtifacts[$artifact]}" ]]; then
|
||||||
printf "%s\n" " downloading '${artifact}' ... ${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}"
|
printf " %s\n" "downloading '${artifact}' ... ${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}"
|
||||||
else
|
else
|
||||||
echo " downloading '${artifact}' ... "
|
echo " downloading '${artifact}' ... "
|
||||||
printf "%s" " gh run download $GITHUB_RUN_ID --dir \"${artifact}\" --name \"${artifact}\" "
|
printf " %s" "gh run download $GITHUB_RUN_ID --dir \"${artifact}\" --name \"${artifact}\" "
|
||||||
gh run download $GITHUB_RUN_ID --dir "${artifact}" --name "${artifact}"
|
gh run download $GITHUB_RUN_ID --dir "${artifact}" --name "${artifact}"
|
||||||
if [[ $? -eq 0 ]]; then
|
if [[ $? -eq 0 ]]; then
|
||||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||||
@@ -256,13 +297,13 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
downloadedArtifacts[$artifact]=1
|
downloadedArtifacts[$artifact]=1
|
||||||
|
|
||||||
printf "%s" " Checking for embedded tarball ... "
|
printf " %s" "Checking for embedded tarball ... "
|
||||||
if [[ -f "${artifact}/${{ inputs.tarball-name }}" ]]; then
|
if [[ -f "${artifact}/${{ inputs.tarball-name }}" ]]; then
|
||||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[FOUND]${ANSI_NOCOLOR}"
|
printf "%s\n" "${ANSI_LIGHT_GREEN}[FOUND]${ANSI_NOCOLOR}"
|
||||||
|
|
||||||
pushd "${artifact}" > /dev/null
|
pushd "${artifact}" > /dev/null
|
||||||
|
|
||||||
printf "%s" " Extracting embedded tarball ... "
|
printf " %s" "Extracting embedded tarball ... "
|
||||||
tar -xf "${{ inputs.tarball-name }}"
|
tar -xf "${{ inputs.tarball-name }}"
|
||||||
if [[ $? -ne 0 ]]; then
|
if [[ $? -ne 0 ]]; then
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
printf "%s\n" "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
||||||
@@ -270,7 +311,7 @@ jobs:
|
|||||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
printf "%s" " Removing temporary tarball ... "
|
printf " %s" "Removing temporary tarball ... "
|
||||||
rm -f "${{ inputs.tarball-name }}"
|
rm -f "${{ inputs.tarball-name }}"
|
||||||
if [[ $? -ne 0 ]]; then
|
if [[ $? -ne 0 ]]; then
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
printf "%s\n" "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}"
|
||||||
@@ -285,20 +326,20 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
# Check if artifact should be compressed (zip, tgz) or if asset was part of the downloaded artifact.
|
# Check if artifact should be compressed (zip, tgz) or if asset was part of the downloaded artifact.
|
||||||
printf "%s" " checking asset '${artifact}/${asset}' ... "
|
printf " %s" "checking asset '${artifact}/${asset}' ... "
|
||||||
if [[ "${asset}" == !*.zip ]]; then
|
if [[ "${asset}" == !*.zip ]]; then
|
||||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[ZIP]${ANSI_NOCOLOR}"
|
printf "%s\n" "${ANSI_LIGHT_GREEN}[ZIP]${ANSI_NOCOLOR}"
|
||||||
asset="${asset##*!}"
|
asset="${asset##*!}"
|
||||||
printf "%s\n" " Compressing artifact '${artifact}' to '${asset}' ..."
|
printf " %s\n" "Compressing artifact '${artifact}' to '${asset}' ..."
|
||||||
(
|
(
|
||||||
cd "${artifact}" && \
|
cd "${artifact}" && \
|
||||||
zip -r "../${asset}" *
|
zip -r "../${asset}" *
|
||||||
)
|
)
|
||||||
if [[ $? -eq 0 ]]; then
|
if [[ $? -eq 0 ]]; then
|
||||||
printf "%s\n" " Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
printf " %s\n" "Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||||
uploadFile="${asset}"
|
uploadFile="${asset}"
|
||||||
else
|
else
|
||||||
printf "%s\n" " Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
printf " %s\n" "Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zip file '${asset}'.${ANSI_NOCOLOR}"
|
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zip file '${asset}'.${ANSI_NOCOLOR}"
|
||||||
printf "%s\n" "::error title=CompressionError::Couldn't compress '${artifact}' to zip file '${asset}'."
|
printf "%s\n" "::error title=CompressionError::Couldn't compress '${artifact}' to zip file '${asset}'."
|
||||||
ERRORS=1
|
ERRORS=1
|
||||||
@@ -310,12 +351,12 @@ jobs:
|
|||||||
if [[ "${asset:0:1}" == "\$" ]]; then
|
if [[ "${asset:0:1}" == "\$" ]]; then
|
||||||
asset="${asset##*$}"
|
asset="${asset##*$}"
|
||||||
dirName="${asset%.*}"
|
dirName="${asset%.*}"
|
||||||
printf "%s\n" " Compressing artifact '${artifact}' to '${asset}' ..."
|
printf " %s\n" "Compressing artifact '${artifact}' to '${asset}' ..."
|
||||||
tar -c --gzip --owner=0 --group=0 --file="${asset}" --directory="${artifact}" --transform "s|^\.|${dirName%.tar}|" .
|
tar -c --gzip --owner=0 --group=0 --file="${asset}" --directory="${artifact}" --transform "s|^\.|${dirName%.tar}|" .
|
||||||
retCode=$?
|
retCode=$?
|
||||||
else
|
else
|
||||||
asset="${asset##*!}"
|
asset="${asset##*!}"
|
||||||
printf "%s\n" " Compressing artifact '${artifact}' to '${asset}' ..."
|
printf " %s\n" "Compressing artifact '${artifact}' to '${asset}' ..."
|
||||||
(
|
(
|
||||||
cd "${artifact}" && \
|
cd "${artifact}" && \
|
||||||
tar -c --gzip --owner=0 --group=0 --file="../${asset}" *
|
tar -c --gzip --owner=0 --group=0 --file="../${asset}" *
|
||||||
@@ -324,10 +365,10 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ $retCode -eq 0 ]]; then
|
if [[ $retCode -eq 0 ]]; then
|
||||||
printf "%s\n" " Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
printf " %s\n" "Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||||
uploadFile="${asset}"
|
uploadFile="${asset}"
|
||||||
else
|
else
|
||||||
printf "%s\n" " Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
printf " %s\n" "Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to tgz file '${asset}'.${ANSI_NOCOLOR}"
|
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to tgz file '${asset}'.${ANSI_NOCOLOR}"
|
||||||
printf "%s\n" "::error title=CompressionError::Couldn't compress '${artifact}' to tgz file '${asset}'."
|
printf "%s\n" "::error title=CompressionError::Couldn't compress '${artifact}' to tgz file '${asset}'."
|
||||||
ERRORS=1
|
ERRORS=1
|
||||||
@@ -339,12 +380,12 @@ jobs:
|
|||||||
if [[ "${asset:0:1}" == "\$" ]]; then
|
if [[ "${asset:0:1}" == "\$" ]]; then
|
||||||
asset="${asset##*$}"
|
asset="${asset##*$}"
|
||||||
dirName="${asset%.*}"
|
dirName="${asset%.*}"
|
||||||
printf "%s\n" " Compressing artifact '${artifact}' to '${asset}' ..."
|
printf " %s\n" "Compressing artifact '${artifact}' to '${asset}' ..."
|
||||||
tar -c --zstd --owner=0 --group=0 --file="${asset}" --directory="${artifact}" --transform "s|^\.|${dirName%.tar}|" .
|
tar -c --zstd --owner=0 --group=0 --file="${asset}" --directory="${artifact}" --transform "s|^\.|${dirName%.tar}|" .
|
||||||
retCode=$?
|
retCode=$?
|
||||||
else
|
else
|
||||||
asset="${asset##*!}"
|
asset="${asset##*!}"
|
||||||
printf "%s\n" " Compressing artifact '${artifact}' to '${asset}' ..."
|
printf " %s\n" "Compressing artifact '${artifact}' to '${asset}' ..."
|
||||||
(
|
(
|
||||||
cd "${artifact}" && \
|
cd "${artifact}" && \
|
||||||
tar -c --zstd --owner=0 --group=0 --file="../${asset}" *
|
tar -c --zstd --owner=0 --group=0 --file="../${asset}" *
|
||||||
@@ -353,10 +394,10 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ $retCode -eq 0 ]]; then
|
if [[ $retCode -eq 0 ]]; then
|
||||||
printf "%s\n" " Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
printf " %s\n" "Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||||
uploadFile="${asset}"
|
uploadFile="${asset}"
|
||||||
else
|
else
|
||||||
printf "%s\n" " Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
printf " %s\n" "Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zst file '${asset}'.${ANSI_NOCOLOR}"
|
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zst file '${asset}'.${ANSI_NOCOLOR}"
|
||||||
printf "%s\n" "::error title=CompressionError::Couldn't compress '${artifact}' to zst file '${asset}'."
|
printf "%s\n" "::error title=CompressionError::Couldn't compress '${artifact}' to zst file '${asset}'."
|
||||||
ERRORS=1
|
ERRORS=1
|
||||||
@@ -373,8 +414,35 @@ jobs:
|
|||||||
continue
|
continue
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Add asset to JSON inventory
|
||||||
|
if [[ "${{ inputs.inventory-json }}" != "" ]]; then
|
||||||
|
if [[ "${categories}" != "${title}" ]]; then
|
||||||
|
printf " %s\n" "adding file '${uploadFile}' with '${categories//;/ → }' to JSON inventory ..."
|
||||||
|
category=""
|
||||||
|
jsonEntry=$(jq -c -n \
|
||||||
|
--arg title "${title}" \
|
||||||
|
--arg file "${uploadFile}" \
|
||||||
|
'{"file": $file, "title": $title}' \
|
||||||
|
)
|
||||||
|
|
||||||
|
while [[ "${categories}" != "${category}" ]]; do
|
||||||
|
category="${categories##*;}"
|
||||||
|
categories="${categories%;*}"
|
||||||
|
jsonEntry=$(jq -c -n --arg cat "${category}" --argjson value "${jsonEntry}" '{$cat: $value}')
|
||||||
|
done
|
||||||
|
|
||||||
|
jsonInventory=$(jq -c -n \
|
||||||
|
--argjson inventory "${jsonInventory}" \
|
||||||
|
--argjson file "${jsonEntry}" \
|
||||||
|
'$inventory * {"files": $file}' \
|
||||||
|
)
|
||||||
|
else
|
||||||
|
printf " %s\n" "adding file '${uploadFile}' to JSON inventory ... ${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
# Upload asset to existing release page
|
# Upload asset to existing release page
|
||||||
printf "%s" " uploading asset '${asset}' from '${uploadFile}' with title '${title}' ... "
|
printf " %s" "uploading asset '${asset}' from '${uploadFile}' with title '${title}' ... "
|
||||||
gh release upload ${{ inputs.nightly_name }} "${uploadFile}#${title}" --clobber
|
gh release upload ${{ inputs.nightly_name }} "${uploadFile}#${title}" --clobber
|
||||||
if [[ $? -eq 0 ]]; then
|
if [[ $? -eq 0 ]]; then
|
||||||
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||||
@@ -387,8 +455,32 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
done <<<'${{ inputs.assets }}'
|
done <<<'${{ inputs.assets }}'
|
||||||
|
|
||||||
printf "%s\n" "Inspecting downloaded artifacts ..."
|
if [[ "${{ inputs.inventory-json }}" != "" ]]; then
|
||||||
|
inventoryTitle="Release Inventory (JSON)"
|
||||||
|
|
||||||
|
printf "%s\n" "Publish asset '${{ inputs.inventory-json }}' with title '${inventoryTitle}'"
|
||||||
|
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Writing JSON inventory to '${{ inputs.inventory-json }}' ...."
|
||||||
|
printf "%s\n" "$(jq -n --argjson inventory "${jsonInventory}" '$inventory')" > "${{ inputs.inventory-json }}"
|
||||||
|
cat "${{ inputs.inventory-json }}"
|
||||||
|
printf "::endgroup::\n"
|
||||||
|
|
||||||
|
# Upload inventory asset to existing release page
|
||||||
|
printf " %s" "uploading asset '${{ inputs.inventory-json }}' title '${inventoryTitle}' ... "
|
||||||
|
gh release upload ${{ inputs.nightly_name }} "${{ inputs.inventory-json }}#${inventoryTitle}" --clobber
|
||||||
|
if [[ $? -eq 0 ]]; then
|
||||||
|
printf "%s\n" "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}"
|
||||||
|
else
|
||||||
|
printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}"
|
||||||
|
printf "%s\n" "${ANSI_LIGHT_RED}Couldn't upload asset '${{ inputs.inventory-json }}' to release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}"
|
||||||
|
printf "%s\n" "::error title=UploadError::Couldn't upload asset '${{ inputs.inventory-json }}' to release '${{ inputs.nightly_name }}'."
|
||||||
|
ERRORS=1
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Inspecting downloaded artifacts ..."
|
||||||
tree -pash -L 3 .
|
tree -pash -L 3 .
|
||||||
|
printf "::endgroup::\n"
|
||||||
|
|
||||||
if [[ $ERROR -ne 0 ]]; then
|
if [[ $ERROR -ne 0 ]]; then
|
||||||
printf "%s\n" "${ANSI_LIGHT_RED}Errors detected in previous steps.${ANSI_NOCOLOR}"
|
printf "%s\n" "${ANSI_LIGHT_RED}Errors detected in previous steps.${ANSI_NOCOLOR}"
|
||||||
|
|||||||
2
.github/workflows/_Checking_JobTemplates.yml
vendored
2
.github/workflows/_Checking_JobTemplates.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
name: Platform
|
name: Platform
|
||||||
python_version_list: ""
|
python_version_list: ""
|
||||||
system_list: "ubuntu windows macos mingw32 mingw64 clang64 ucrt64"
|
system_list: "ubuntu windows macos mingw64 clang64 ucrt64"
|
||||||
|
|
||||||
UnitTesting:
|
UnitTesting:
|
||||||
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@dev
|
uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@dev
|
||||||
|
|||||||
71
.github/workflows/_Checking_Nightly.yml
vendored
71
.github/workflows/_Checking_Nightly.yml
vendored
@@ -56,35 +56,13 @@ jobs:
|
|||||||
version=4.2.0
|
version=4.2.0
|
||||||
tool=myTool
|
tool=myTool
|
||||||
prog=program
|
prog=program
|
||||||
nightly_title: "Nightly Release"
|
nightly_title: "Nightly Test Release"
|
||||||
nightly_description: |
|
nightly_description: |
|
||||||
This *nightly* release contains all latest and important artifacts created by GHDL's CI pipeline.
|
This *nightly* release contains all latest and important artifacts created by %tool%'s CI pipeline.
|
||||||
|
|
||||||
# GHDL %version%
|
# %tool% %version%
|
||||||
|
|
||||||
GHDL offers the simulator and synthesis tool for VHDL. GHDL can be build for various backends:
|
* %prog%
|
||||||
* `gcc` - using the GCC compiler framework
|
|
||||||
* `mcode` - in memory code generation
|
|
||||||
* `llvm` - using the LLVM compiler framework
|
|
||||||
* `llvm-jit` - using the LLVM compiler framework, but in memory
|
|
||||||
|
|
||||||
The following asset categories are provided for GHDL:
|
|
||||||
* macOS x64-64 builds as TAR/GZ file
|
|
||||||
* macOS aarch64 builds as TAR/GZ file
|
|
||||||
* Ubuntu 24.04 LTS builds as TAR/GZ file
|
|
||||||
* Windows builds for standalone usage (without MSYS2) as ZIP file
|
|
||||||
* MSYS2 packages as TAR/ZST file
|
|
||||||
|
|
||||||
# pyGHDL %version%
|
|
||||||
|
|
||||||
The Python package `pyGHDL` offers Python binding (`pyGHDL.libghdl`) to a `libghdl` shared library (`*.so`/`*.dll`).
|
|
||||||
In addition to the low-level binding layer, pyGHDL offers:
|
|
||||||
* a Language Server Protocol (LSP) instance for e.g. live code checking by editors
|
|
||||||
* a Code Document Object Model (CodeDOM) based on [pyVHDLModel](https://github.com/VHDL/pyVHDLModel)
|
|
||||||
|
|
||||||
The following asset categories are provided for pyGHDL:
|
|
||||||
* Platform specific Python wheel package for Ubuntu incl. `pyGHDL...so`
|
|
||||||
* Platform specific Python wheel package for Windows incl. `pyGHDL...dll`
|
|
||||||
assets: |
|
assets: |
|
||||||
document: document1.txt: Documentation
|
document: document1.txt: Documentation
|
||||||
document: build.log: Logfile - %tool% - %tool%
|
document: build.log: Logfile - %tool% - %tool%
|
||||||
@@ -99,3 +77,44 @@ jobs:
|
|||||||
document:$archive7.tar.gz: Archive 7 - tar.gz + dir
|
document:$archive7.tar.gz: Archive 7 - tar.gz + dir
|
||||||
document:$archive8.tzst: Archive 8 - tzst + dir
|
document:$archive8.tzst: Archive 8 - tzst + dir
|
||||||
document:$archive9.tar.zst:Archive 9 - tar.zst + dir
|
document:$archive9.tar.zst:Archive 9 - tar.zst + dir
|
||||||
|
|
||||||
|
NightlyPageWithInventory:
|
||||||
|
uses: ./.github/workflows/NightlyRelease.yml
|
||||||
|
needs:
|
||||||
|
- Build
|
||||||
|
secrets: inherit
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
actions: write
|
||||||
|
# attestations: write
|
||||||
|
with:
|
||||||
|
replacements: |
|
||||||
|
version=4.2.0
|
||||||
|
tool=myTool
|
||||||
|
prog=program
|
||||||
|
nightly_name: inventory
|
||||||
|
nightly_title: "Nightly Test Release with Inventory"
|
||||||
|
nightly_description: |
|
||||||
|
This *nightly* release contains all latest and important artifacts created by %tool%'s CI pipeline.
|
||||||
|
|
||||||
|
# %tool% %version%
|
||||||
|
|
||||||
|
* %prog%
|
||||||
|
* iventory.json
|
||||||
|
inventory-json: "inventory.json"
|
||||||
|
inventory-version: 4.2.5
|
||||||
|
# inventory-categories:
|
||||||
|
assets: |
|
||||||
|
document: document1.txt: doc;html: Documentation
|
||||||
|
document: build.log: build;log: Logfile - %tool% - %tool%
|
||||||
|
other: document1.txt: build;SBOM:SBOM - %version%
|
||||||
|
other: %prog%.py: app;binary:Application - %tool% - %version%
|
||||||
|
document:!archive1.zip: Archive 1 - zip
|
||||||
|
document:!archive2.tgz: Archive 2 - tgz
|
||||||
|
document:!archive3.tar.gz: Archive 3 - tar.gz
|
||||||
|
document:!archive4.tzst: Archive 4 - tzst
|
||||||
|
document:!archive5.tar.zst: Archive 5 - tar.zst
|
||||||
|
document:$archive6.tgz: Archive 6 - tgz + dir
|
||||||
|
document:$archive7.tar.gz: Archive 7 - tar.gz + dir
|
||||||
|
document:$archive8.tzst: Archive 8 - tzst + dir
|
||||||
|
document:$archive9.tar.zst: Archive 9 - tar.zst + dir
|
||||||
|
|||||||
6
run.ps1
6
run.ps1
@@ -88,7 +88,7 @@ if ($build)
|
|||||||
rm -Force .\build\bdist.win-amd64
|
rm -Force .\build\bdist.win-amd64
|
||||||
rm -Force .\build\lib
|
rm -Force .\build\lib
|
||||||
Write-Host -ForegroundColor Yellow "[live][BUILD] Building $PackageName package as wheel ..."
|
Write-Host -ForegroundColor Yellow "[live][BUILD] Building $PackageName package as wheel ..."
|
||||||
py -3.12 -m build --wheel
|
py -3.13 -m build --wheel
|
||||||
|
|
||||||
Write-Host -ForegroundColor Yellow "[live][BUILD] Building wheel finished"
|
Write-Host -ForegroundColor Yellow "[live][BUILD] Building wheel finished"
|
||||||
}
|
}
|
||||||
@@ -102,9 +102,9 @@ if ($install)
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{ Write-Host -ForegroundColor Cyan "[ADMIN][UNINSTALL] Uninstalling $PackageName ..."
|
{ Write-Host -ForegroundColor Cyan "[ADMIN][UNINSTALL] Uninstalling $PackageName ..."
|
||||||
py -3.12 -m pip uninstall -y $PackageName
|
py -3.13 -m pip uninstall -y $PackageName
|
||||||
Write-Host -ForegroundColor Cyan "[ADMIN][INSTALL] Installing $PackageName from wheel ..."
|
Write-Host -ForegroundColor Cyan "[ADMIN][INSTALL] Installing $PackageName from wheel ..."
|
||||||
py -3.12 -m pip install .\dist\$PackageName-6.7.0-py3-none-any.whl
|
py -3.13 -m pip install .\dist\$PackageName-8.1.0-py3-none-any.whl
|
||||||
|
|
||||||
Write-Host -ForegroundColor Cyan "[ADMIN][INSTALL] Closing window in 5 seconds ..."
|
Write-Host -ForegroundColor Cyan "[ADMIN][INSTALL] Closing window in 5 seconds ..."
|
||||||
Start-Sleep -Seconds 5
|
Start-Sleep -Seconds 5
|
||||||
|
|||||||
Reference in New Issue
Block a user