Bump bump-my-version from 1.1.0 to 1.2.4 #6806
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
--- | |
name: Build & release | |
# Read https://github.com/actions/runner/issues/491 for insights on complex workflow execution logic. | |
"on": | |
workflow_call: | |
secrets: | |
PYPI_TOKEN: | |
required: false | |
inputs: | |
unstable-targets: | |
description: List of unstable targets on which the build should be attempted but not fatal | |
required: false | |
# XXX This has to be a string because workflow inputs are not allowed to be complex types. | |
# Still, thanks to gha-utils resilience, we can pass a list of targets as a string, separated | |
# by arbitrary sepatarators. | |
type: string | |
timeout: | |
description: Timeout in seconds for each binary test | |
required: false | |
type: number | |
test-plan-file: | |
description: YAML file containing the full test plan for binaries | |
required: false | |
type: string | |
default: ./tests/cli-test-plan.yaml | |
test-plan: | |
description: Test plan for binaries | |
required: false | |
# XXX This has to be a string because workflow inputs are not allowed to be complex types. | |
type: string | |
outputs: | |
nuitka_matrix: | |
description: Nuitka build matrix | |
value: ${{ jobs.project-metadata.outputs.nuitka_matrix }} | |
# Target are chosen so that all commits get a chance to have their build tested. | |
push: | |
branches: | |
- main | |
pull_request: | |
# Defaults sets in workflow_call.inputs or workflow_dispatch.inputs are not propagated to other events. | |
# We have to manually manage them: https://github.com/orgs/community/discussions/39357#discussioncomment-7500641 | |
env: | |
test-plan-file: >- | |
${{ inputs.test-plan-file == null && './tests/cli-test-plan.yaml' || inputs.test-plan-file }} | |
test-plan: ${{ inputs.test-plan }} | |
concurrency: | |
# Group workflow jobs so new commits cancels in-progress execution triggered by previous commits. Source: | |
# https://mail.python.org/archives/list/pypa-committers@python.org/thread/PCBCQMJF64JGRBOX7E2EE4YLKHT4DI55/ | |
# https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/control-the-concurrency-of-workflows-and-jobs | |
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }} | |
cancel-in-progress: true | |
jobs: | |
project-metadata: | |
name: Project metadata | |
runs-on: ubuntu-24.04 | |
outputs: | |
# There's a design issue with GitHub actions: matrix outputs are not cumulative. The last job wins | |
# (see: https://github.community/t/bug-jobs-output-should-return-a-list-for-a-matrix-job/128626). | |
# This means in a graph of jobs, a matrix-based one is terminal, and cannot be depended on. Same goes for | |
# (reusable) workflows. We use this preliminary job to produce all matrix we need to trigger depending jobs | |
# over the dimensions. | |
new_commits_matrix: ${{ steps.project-metadata.outputs.new_commits_matrix }} | |
release_commits_matrix: ${{ steps.project-metadata.outputs.release_commits_matrix }} | |
# Export Python project metadata. | |
build_targets: ${{ steps.project-metadata.outputs.build_targets }} | |
nuitka_matrix: ${{ steps.project-metadata.outputs.nuitka_matrix }} | |
is_python_project: ${{ steps.project-metadata.outputs.is_python_project }} | |
package_name: ${{ steps.project-metadata.outputs.package_name }} | |
release_notes: ${{ steps.project-metadata.outputs.release_notes }} | |
steps: | |
- uses: actions/checkout@v5.0.0 | |
with: | |
# Checkout pull request HEAD commit to ignore actions/checkout's merge commit. Fallback to push SHA. | |
ref: ${{ github.event.pull_request.head.sha || github.sha }} | |
# We're going to browse all new commits. | |
fetch-depth: 0 | |
- name: List all branches | |
run: | | |
git branch --all | |
- name: List all commits | |
run: | | |
git log --decorate=full --oneline | |
- uses: astral-sh/setup-uv@v7.0.0 | |
- name: Run gha-utils metadata | |
id: project-metadata | |
env: | |
GITHUB_CONTEXT: ${{ toJSON(github) }} | |
# Declare macOS 26 as unstable target by default, while we're searching for a solution upstream: | |
# https://github.com/Nuitka/Nuitka/issues/3637 | |
# yamllint disable rule:line-length | |
run: > | |
uvx | |
--with-requirements https://raw.githubusercontent.com/kdeldycke/workflows/main/requirements/gha-utils.txt | |
-- | |
gha-utils metadata | |
${{ inputs.unstable-targets != null && format('--unstable-targets "{0},macos-arm64"', inputs.unstable-targets) || '--unstable-targets "macos-arm64"' }} | |
--overwrite "$GITHUB_OUTPUT" | |
# yamllint enable | |
package-build: | |
name: Build package | |
needs: | |
- project-metadata | |
if: fromJSON(needs.project-metadata.outputs.is_python_project) | |
strategy: | |
matrix: ${{ fromJSON(needs.project-metadata.outputs.new_commits_matrix) }} | |
runs-on: ubuntu-24.04 | |
steps: | |
- uses: actions/checkout@v5.0.0 | |
with: | |
ref: ${{ matrix.commit }} | |
- uses: astral-sh/setup-uv@v7.0.0 | |
- name: Build package | |
run: | | |
uv --no-progress build | |
- name: Upload artifacts | |
uses: actions/upload-artifact@v4.6.2 | |
with: | |
name: ${{ github.event.repository.name }}-${{ matrix.short_sha }} | |
path: ./dist/* | |
# TODO: Should we also attest the archive created here? See: | |
# https://github.com/actions/attest-build-provenance?tab=readme-ov-file#integration-with-actionsupload-artifact | |
validate-arch: | |
# Check architecture matches the one expected from the runner image. This is to ensure that the OS does not rely on | |
# emulation to run the build. See: | |
# https://docs.astral.sh/uv/concepts/python-versions/#transparent-x86_64-emulation-on-aarch64 | |
name: Validate ${{ matrix.os }} / ${{ matrix.arch }} | |
needs: | |
- project-metadata | |
if: needs.project-metadata.outputs.build_targets | |
strategy: | |
matrix: | |
include: ${{ fromJSON(needs.project-metadata.outputs.build_targets) }} | |
runs-on: ${{ matrix.os }} | |
steps: | |
- name: Check Python version | |
run: | | |
python --version | |
python -m pip --version | |
- name: Check architecture is ${{ matrix.arch }} | |
shell: python | |
run: | | |
import platform | |
arch = platform.machine() | |
print(f"Detected architecture: {arch}") | |
matrix_arch = "${{ matrix.arch }}" | |
if matrix_arch == "x64": | |
assert arch.lower() in ("x86_64", "amd64") | |
elif matrix_arch == "arm64": | |
assert arch.lower() in ("aarch64", "arm64") | |
else: | |
raise ValueError(f"Unrecognized architecture in the matrix: {matrix_arch}") | |
compile-binaries: | |
name: "${{ matrix.state == 'stable' && '✅' || '⁉️' }} Build on ${{ matrix.os }}, ${{ matrix.short_sha }}" | |
needs: | |
- project-metadata | |
if: needs.project-metadata.outputs.nuitka_matrix | |
strategy: | |
matrix: ${{ fromJSON(needs.project-metadata.outputs.nuitka_matrix) }} | |
runs-on: ${{ matrix.os }} | |
# We keep going when a job flagged as not stable fails. | |
continue-on-error: ${{ matrix.state == 'unstable' }} | |
steps: | |
- uses: actions/checkout@v5.0.0 | |
with: | |
ref: ${{ matrix.commit }} | |
- uses: astral-sh/setup-uv@v7.0.0 | |
- name: Setup venv | |
run: | | |
uv --no-progress venv --python 3.13 | |
- name: Install Nuitka | |
run: > | |
uv --no-progress pip install | |
--requirement https://raw.githubusercontent.com/kdeldycke/workflows/main/requirements/nuitka.txt | |
- name: Nuitka + compilers versions | |
run: | | |
uv --no-progress run --frozen -- nuitka --version | |
- name: Build binary | |
id: build-binary | |
continue-on-error: true | |
run: > | |
uv --no-progress run --frozen -- nuitka | |
--onefile --assume-yes-for-downloads --output-filename=${{ matrix.bin_name }} ${{ matrix.module_path }} | |
- name: Upload Nuitka crash report | |
uses: actions/upload-artifact@v4.6.2 | |
with: | |
name: nuitka-crash-report-${{ matrix.os }}-${{ matrix.short_sha }}.xml | |
if-no-files-found: ignore | |
path: nuitka-crash-report.xml | |
- if: steps.build-binary.outcome == 'failure' | |
run: | | |
echo "Nuitka build failed, skipping the rest of the steps." | |
exit 1 | |
- name: Install exiftool - Linux | |
if: runner.os == 'Linux' | |
run: | | |
sudo apt --quiet --yes install exiftool | |
- name: Install exiftool - macOS | |
if: runner.os == 'macOS' | |
run: | | |
brew install exiftool | |
- name: Install exiftool - Windows | |
if: runner.os == 'Windows' | |
run: | | |
choco install exiftool --no-progress --yes | |
- name: Binary metadata | |
shell: python | |
run: | | |
import json | |
import subprocess | |
from pathlib import Path | |
# Get matrix architecture. | |
target = "${{ matrix.target }}" | |
platform_id = "${{ matrix.platform_id }}" | |
arch = "${{ matrix.arch }}" | |
bin_name = Path("${{ matrix.bin_name }}").resolve() | |
# Run "exiftool -json -MachineType -CPUType <binary_file>". | |
result = subprocess.run( | |
[ | |
"exiftool.exe" if platform_id == "windows" else "exiftool", | |
"-json", | |
"-CPUType", | |
"-MachineType", | |
str(bin_name), | |
], | |
capture_output=True, | |
text=True, | |
check=True, | |
) | |
print("ExifTool output:") | |
print(result.stdout) | |
# Parse output. | |
output = json.loads(result.stdout) | |
# Map each target to their exiftool architecture strings. | |
# Ubuntu: | |
# CPU Type : Arm 64-bits (Armv8/AArch64) | |
# CPU Type : AMD x86-64 | |
# macOS: | |
# CPU Type : ARM 64-bit | |
# CPU Type : x86 64-bit | |
# Windows | |
# Machine Type : ARM64 little endian | |
# Machine Type : AMD AMD64 | |
arch_mappings = { | |
"linux-arm64": ("CPUType", "Arm 64-bits"), | |
"linux-x64": ("CPUType", "AMD x86-64"), | |
"macos-arm64": ("CPUType", "ARM 64-bit"), | |
"macos-x64": ("CPUType", "x86 64-bit"), | |
"windows-arm64": ("MachineType", "ARM64"), | |
"windows-x64": ("MachineType", "AMD64"), | |
} | |
if target not in arch_mappings: | |
raise ValueError(f"Unknown target: {target}") | |
field, expected_substring = arch_mappings[target] | |
reported_arch = output[0][field] | |
if expected_substring not in reported_arch: | |
raise AssertionError( | |
"Binary architecture mismatch!\n" | |
f"Expected: {expected_substring!r} in field {field}\n" | |
f"Got: {reported_arch!r}\n" | |
) | |
print( | |
f"Binary architecture matches: {expected_substring!r} found in {field!r} " | |
f"for {target} target." | |
) | |
- name: Upload binaries | |
uses: actions/upload-artifact@v4.6.2 | |
with: | |
name: ${{ matrix.bin_name }} | |
if-no-files-found: warn | |
path: ${{ matrix.bin_name }} | |
test-binaries: | |
name: "${{ matrix.state == 'stable' && '✅' || '⁉️' }} Test on ${{ matrix.os }}, ${{ matrix.short_sha }}" | |
needs: | |
- project-metadata | |
- compile-binaries | |
if: needs.project-metadata.outputs.nuitka_matrix | |
strategy: | |
matrix: ${{ fromJSON(needs.project-metadata.outputs.nuitka_matrix) }} | |
runs-on: ${{ matrix.os }} | |
# We keep going when a job flagged as not stable fails. | |
continue-on-error: ${{ matrix.state == 'unstable' }} | |
steps: | |
- uses: actions/checkout@v5.0.0 | |
with: | |
ref: ${{ matrix.commit }} | |
- name: Download artifact | |
uses: actions/download-artifact@v5.0.0 | |
id: artifacts | |
with: | |
name: ${{ matrix.bin_name }} | |
- name: Set binary permissions | |
if: runner.os != 'Windows' | |
run: | | |
chmod +x ${{ steps.artifacts.outputs.download-path }}/${{ matrix.bin_name }} | |
- uses: astral-sh/setup-uv@v7.0.0 | |
- name: Run test plan for binary | |
run: > | |
uvx | |
--with-requirements https://raw.githubusercontent.com/kdeldycke/workflows/main/requirements/gha-utils.txt | |
-- | |
gha-utils test-plan | |
${{ inputs.timeout != null && format('--timeout {0}', inputs.timeout) || '' }} | |
--binary "${{ steps.artifacts.outputs.download-path }}/${{ matrix.bin_name }}" | |
--plan-file "${{ env.test-plan-file }}" | |
${{ env.test-plan != null && '--plan-envvar test-plan' || '' }} | |
git-tag: | |
name: Tag release | |
needs: | |
- project-metadata | |
# Only consider pushes to main branch as triggers for releases. | |
if: github.ref == 'refs/heads/main' && needs.project-metadata.outputs.release_commits_matrix | |
strategy: | |
matrix: ${{ fromJSON(needs.project-metadata.outputs.release_commits_matrix) }} | |
runs-on: ubuntu-24.04 | |
steps: | |
- uses: actions/checkout@v5.0.0 | |
with: | |
ref: ${{ matrix.commit }} | |
# XXX We need custom PAT with workflows permissions because tag generation will work but it will not trigger | |
# any other workflows that use `on.push.tags` triggers. See: | |
# https://stackoverflow.com/questions/60963759/use-github-actions-to-create-a-tag-but-not-a-release#comment135891921_64479344 | |
# https://github.com/orgs/community/discussions/27028 | |
token: ${{ secrets.WORKFLOW_UPDATE_GITHUB_PAT || secrets.GITHUB_TOKEN }} | |
- name: Check if tag exists | |
id: tag_exists | |
run: | | |
echo "tag_exists=$(git show-ref --tags "v${{ matrix.current_version }}" --quiet )" | tee -a "$GITHUB_OUTPUT" | |
- name: Push tag | |
# Skip the tag creation if it already exists instead of failing flat. This allows us to re-run the workflow if | |
# it was interrupted the first time. Which is really useful if the tagging fails during a release: we can | |
# simply push the new tag by hand and re-launch the workflow run. | |
if: ${{ ! steps.tag_exists.outputs.tag_exists }} | |
run: | | |
git tag "v${{ matrix.current_version }}" "${{ matrix.commit }}" | |
git push origin "v${{ matrix.current_version }}" | |
pypi-publish: | |
name: Publish to PyPi | |
needs: | |
- project-metadata | |
- package-build | |
- git-tag | |
if: needs.project-metadata.outputs.package_name | |
strategy: | |
matrix: ${{ fromJSON(needs.project-metadata.outputs.release_commits_matrix) }} | |
runs-on: ubuntu-24.04 | |
permissions: | |
# Allow GitHub's OIDC provider to create a JSON Web Token: | |
# https://github.blog/changelog/2023-06-15-github-actions-securing-openid-connect-oidc-token-permissions-in-reusable-workflows/ | |
# https://docs.github.com/en/actions/security-for-github-actions/security-hardening-your-deployments/about-security-hardening-with-openid-connect#adding-permissions-settings | |
id-token: write | |
attestations: write | |
steps: | |
- uses: astral-sh/setup-uv@v7.0.0 | |
- name: Download build artifacts | |
uses: actions/download-artifact@v5.0.0 | |
id: download | |
with: | |
name: ${{ github.event.repository.name }}-${{ matrix.short_sha }} | |
- name: Generate attestations | |
uses: actions/attest-build-provenance@v3.0.0 | |
with: | |
subject-path: "${{ steps.download.outputs.download-path }}/*" | |
- name: Push to PyPi | |
run: | | |
uv --no-progress publish --token "${{ secrets.PYPI_TOKEN }}" "${{ steps.download.outputs.download-path }}/*" | |
github-release: | |
name: Publish GitHub release | |
needs: | |
- project-metadata | |
- package-build | |
- compile-binaries | |
- git-tag | |
# Make sure this job always starts if git-tag ran and succeeded. | |
if: always() && needs.git-tag.result == 'success' | |
strategy: | |
matrix: ${{ fromJSON(needs.project-metadata.outputs.release_commits_matrix) }} | |
runs-on: ubuntu-24.04 | |
permissions: | |
# Allow GitHub's OIDC provider to create a JSON Web Token: | |
# https://github.blog/changelog/2023-06-15-github-actions-securing-openid-connect-oidc-token-permissions-in-reusable-workflows/ | |
# https://docs.github.com/en/actions/security-for-github-actions/security-hardening-your-deployments/about-security-hardening-with-openid-connect#adding-permissions-settings | |
id-token: write | |
attestations: write | |
# Allow project without WORKFLOW_UPDATE_GITHUB_PAT to create a GitHub release. | |
contents: write | |
steps: | |
- name: Download all artifacts | |
# Do not fetch build artifacts if all jobs producing them were skipped. | |
if: needs.package-build.result != 'skipped' || needs.compile-binaries.result != 'skipped' | |
uses: actions/download-artifact@v5.0.0 | |
id: artifacts | |
with: | |
path: release_artifact | |
# Only consider artifacts produced by the release commit. | |
pattern: "*-${{ matrix.short_sha }}*" | |
merge-multiple: true | |
- name: Collect all artefacts, rename binaries | |
# Do not try to rename artifacts if none have been downloaded. | |
if: steps.artifacts.outputs.download-path | |
id: collect_artifacts | |
shell: python | |
run: | | |
import json | |
import os | |
from pathlib import Path | |
from random import randint | |
download_folder = Path("""${{ steps.artifacts.outputs.download-path }}""") | |
nuitka_matrix_json = """${{ needs.project-metadata.outputs.nuitka_matrix }}""" | |
binaries = {} | |
if nuitka_matrix_json: | |
nuitka_matrix = json.loads(nuitka_matrix_json) | |
binaries = {entry["bin_name"] for entry in nuitka_matrix["include"] if "bin_name" in entry} | |
artifacts_path = [] | |
for artifact in download_folder.glob("*"): | |
print(f"Processing {artifact} ...") | |
assert artifact.is_file() | |
# Rename binary artifacts to remove the build ID. | |
if artifact.name in binaries: | |
new_name = f'{artifact.stem.split("""-${{ matrix.short_sha }}""", 1)[0]}{artifact.suffix}' | |
new_path = artifact.with_name(new_name) | |
print(f"Renaming {artifact} to {new_path} ...") | |
assert not new_path.exists() | |
artifact.rename(new_path) | |
artifacts_path.append(new_path) | |
# Collect other artifacts as-is. | |
else: | |
print(f"Collecting {artifact} ...") | |
artifacts_path.append(artifact) | |
# Produce a unique delimiter to feed multiline content to GITHUB_OUTPUT: | |
# https://github.com/orgs/community/discussions/26288#discussioncomment-3876281 | |
delimiter = f"ghadelimiter_{randint(10**8, (10**9) - 1)}" | |
output = f"artifacts_path<<{delimiter}\n" | |
output += "\n".join(str(p) for p in artifacts_path) | |
output += f"\n{delimiter}" | |
env_file = Path(os.getenv("GITHUB_OUTPUT")) | |
env_file.write_text(output) | |
- name: Generate attestations | |
# Do not try to attest artifacts if none have been produced. | |
if: steps.collect_artifacts.outputs.artifacts_path | |
uses: actions/attest-build-provenance@v3.0.0 | |
with: | |
subject-path: ${{ steps.collect_artifacts.outputs.artifacts_path }} | |
- name: Create GitHub release | |
uses: softprops/action-gh-release@v2.4.0 | |
# XXX We need custom PAT with workflows permissions because tag generation will work but it will not trigger | |
# any other workflows that use `on.push.tags` triggers. See: | |
# https://stackoverflow.com/questions/60963759/use-github-actions-to-create-a-tag-but-not-a-release#comment135891921_64479344 | |
# https://github.com/orgs/community/discussions/27028 | |
env: | |
GITHUB_TOKEN: ${{ secrets.WORKFLOW_UPDATE_GITHUB_PAT || secrets.GITHUB_TOKEN }} | |
with: | |
tag_name: v${{ matrix.current_version }} | |
target_commitish: ${{ matrix.commit }} | |
files: ${{ steps.collect_artifacts.outputs.artifacts_path }} | |
body: ${{ needs.project-metadata.outputs.release_notes }} |