Synchronize CI/CD, Flatbuffers vendoring and wamp-ai and wamp-cicd Submodules between autobahn-python and zlmdb #322
Workflow file for this run
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: wheels | |
| on: | |
| # Build wheels on feature branches and PRs (test only) | |
| push: | |
| branches: ["**"] | |
| tags: | |
| - 'v*' | |
| pull_request: | |
| branches: [master] | |
| # Publish to GitHub Releases when merged to master | |
| # Publish to PyPI when tagged | |
| workflow_dispatch: | |
| env: | |
| # Ensure uv and just are available in PATH | |
| UV_CACHE_DIR: ${{ github.workspace }}/.uv-cache | |
| jobs: | |
| identifiers: | |
| # GitHub needs to know where .cicd/workflows/identifiers.yml lives at parse time, | |
| # and submodules aren't included in that context! thus the following does NOT work: | |
| # uses: ./.cicd/workflows/identifiers.yml | |
| # we MUST reference the remote repo directly: | |
| uses: wamp-proto/wamp-cicd/.github/workflows/identifiers.yml@main | |
| # IMPORTANT: we still need .cicd as a Git submodule in the using repo though! | |
| # because e.g. identifiers.yml wants to access scripts/sanitize.sh ! | |
| build-wheels: | |
| name: Build wheels on ${{ matrix.os }} (${{ matrix.arch }}) | |
| needs: identifiers | |
| runs-on: ${{ matrix.os }} | |
| env: | |
| BASE_REPO: ${{ needs.identifiers.outputs.base_repo }} | |
| BASE_BRANCH: ${{ needs.identifiers.outputs.base_branch }} | |
| PR_NUMBER: ${{ needs.identifiers.outputs.pr_number }} | |
| PR_REPO: ${{ needs.identifiers.outputs.pr_repo }} | |
| PR_BRANCH: ${{ needs.identifiers.outputs.pr_branch }} | |
| strategy: | |
| fail-fast: false | |
| matrix: | |
| include: | |
| # =========================================================== | |
| # ⚠️ IMPORTANT NOTES ABOUT "arch" IN GITHUB ACTIONS ⚠️ | |
| # | |
| # - GitHub Actions DOES NOT respect `arch:` for runner selection. | |
| # - The *only* thing that decides which CPU architecture you get | |
| # is the `runs-on:` label (e.g. ubuntu-24.04 vs ubuntu-24.04-arm64). | |
| # - Any `arch:` key you see in a matrix is just *your own metadata*. | |
| # It has NO effect on which runner is provisioned. ZERO. ZILCH. | |
| # - This is confusing as hell, because many people expect `arch:` | |
| # to actually request AMD64 vs ARM64, but GitHub silently ignores it. | |
| # | |
| # So: we put `arch:` in here purely for naming artifacts or using | |
| # conditional logic in steps. But the "real" architecture is | |
| # locked in by the value of `runs-on: ${{ matrix.os }}` below. | |
| # | |
| # =========================================================== | |
| # =========================================================== | |
| # 🤦♂️IMPORTANT NOTES ABOUT GITHUB ACTIONS RUNNER AVAILABILITY PAR 🤦♂️ | |
| # | |
| # I. GitHub's runner availability is... "special": | |
| # | |
| # ✅ ALWAYS AVAILABLE (Fast, < 30 seconds): | |
| # - ubuntu-* (x86_64) → Abundant, instant | |
| # - windows-* (x86_64) → Reliable, quick | |
| # - macos-15 (ARM64) → Apple Silicon, readily available | |
| # | |
| # 🕐 "PLEASE WAIT FOREVER" ZONE (Often > 1 hour waits): | |
| # - ubuntu-*-arm64 → Limited pool, beta status | |
| # - macos-12/13 (Intel) → Legacy hardware, being phased out | |
| # | |
| # WHY THIS HAPPENS: | |
| # 1. GitHub prioritizes current hardware (ARM64 macOS > Intel macOS) | |
| # 2. ARM64 Linux runners are still beta/limited capacity | |
| # 3. Intel Macs are being phased out of GitHub's fleet | |
| # | |
| # II. There is no built-in way in Actions to auto-cancel a job if it | |
| # stays queued too long waiting for a specific runner label. | |
| # | |
| # “Cancel this job if no runner has picked it up after 2 minutes.” | |
| # | |
| # This is another of those “WTF” gaps in Actions. | |
| # =========================================================== | |
| # --- Linux --- | |
| - os: ubuntu-24.04 # ✅ GitHub-hosted Linux x86_64 (most common, always fast) | |
| platform: linux | |
| arch: x86_64 | |
| # --- macOS --- | |
| - os: macos-15 # ✅ GitHub-hosted macOS Apple Silicon (current Macs, fast) | |
| platform: macos | |
| arch: arm64 | |
| # --- Windows --- | |
| - os: windows-2022 # ✅ GitHub-hosted Windows x86_64 (mostly fast) | |
| platform: windows | |
| arch: x86_64 | |
| # --- Linux --- | |
| # - os: ubuntu-24.04-arm64 # 🕐 Linux ARM64 (servers/edge, often waits forever) | |
| # platform: linux | |
| # arch: arm64 | |
| # --- macOS --- | |
| # - os: macos-12 # 🕐 Intel macOS (legacy users, increasingly scarce, often waits forever) | |
| # platform: macos | |
| # arch: x86_64 | |
| # --- Windows --- | |
| # ⚠️ GitHub does NOT provide Windows ARM64 hosted runners. | |
| # If you want Windows ARM64 builds, you must either: | |
| # - run a self-hosted Windows ARM64 runner, OR | |
| # - cross-compile from AMD64 to ARM64 inside the workflow. | |
| steps: | |
| - name: Checkout code | |
| uses: actions/checkout@v4 | |
| with: | |
| submodules: recursive | |
| # we use the standard upstream installation on non-broken platforms. | |
| - name: Install Just (Linux/macOS) | |
| if: runner.os != 'Windows' | |
| env: | |
| GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
| run: | | |
| curl --proto '=https' --tlsv1.2 -sSf https://just.systems/install.sh | bash -s -- --to ~/bin | |
| echo "$HOME/bin" >> $GITHUB_PATH | |
| # we need to use this install wrapper on inheritently broken platforms (windows/powershell). | |
| - name: Install Just (Windows) | |
| if: runner.os == 'Windows' | |
| uses: extractions/setup-just@v3 | |
| with: | |
| just-version: "1.42.3" | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| # we use the standard upstream installation on non-broken platforms. | |
| - name: Install uv (Linux/macOS) | |
| if: runner.os != 'Windows' | |
| env: | |
| GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
| run: | | |
| curl -LsSf https://astral.sh/uv/install.sh | sh | |
| source $HOME/.cargo/env | |
| echo "$HOME/.cargo/bin" >> $GITHUB_PATH | |
| # we need to use this install wrapper on inheritently broken platforms (windows/powershell). | |
| - name: Install uv (Windows) | |
| if: runner.os == 'Windows' | |
| uses: astral-sh/setup-uv@v6 | |
| with: | |
| version: "0.7.19" | |
| enable-cache: true | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| - name: Verify toolchain installation (Linux/macOS) | |
| if: runner.os != 'Windows' | |
| run: | | |
| just --version | |
| uv --version | |
| shell: bash | |
| - name: Verify toolchain installation (Windows) | |
| if: runner.os == 'Windows' | |
| run: | | |
| just --version | |
| uv --version | |
| shell: pwsh | |
| - name: Setup uv cache | |
| # skip broken platform ("Error: The template is not valid. .github/workflows/wheels.yml (Line: 182, Col: 13): | |
| # hashFiles('pyproject.toml') failed. Fail to hash files under directory '/Users/runner/work/autobahn-python/autobahn-python'") | |
| if: matrix.platform != 'macos' | |
| uses: actions/cache@v4 | |
| with: | |
| path: ${{ env.UV_CACHE_DIR }} | |
| key: | |
| uv-cache-${{ matrix.platform }}-${{ matrix.arch }}-${{ hashFiles('pyproject.toml') }} | |
| restore-keys: | | |
| uv-cache-${{ matrix.platform }}-${{ matrix.arch }}- | |
| uv-cache-${{ matrix.platform }}- | |
| - name: Build pure Python wheels (Linux only) | |
| if: matrix.platform == 'linux' | |
| run: | | |
| # Build pure Python wheels WITHOUT NVX acceleration | |
| # This provides maximum compatibility across Linux distributions | |
| export AUTOBAHN_USE_NVX=0 | |
| just build-all | |
| shell: bash | |
| - name: Build source distribution (Linux x86_64 only) | |
| if: matrix.platform == 'linux' && matrix.arch == 'x86_64' | |
| run: | | |
| # Build source distribution (only once, on Linux x86_64) | |
| # Source distributions are platform-independent | |
| just build-sourcedist | |
| shell: bash | |
| - name: Force file system sync (post-build, pre-validation) - Linux | |
| if: matrix.platform == 'linux' | |
| run: | | |
| echo "======================================================================" | |
| echo "==> Forcing File System Sync (Post-Build)" | |
| echo "======================================================================" | |
| echo "" | |
| echo "Flushing all file system buffers to disk to ensure wheels are fully" | |
| echo "written before validation and checksumming." | |
| echo "" | |
| sync | |
| echo "✅ All buffers flushed to disk" | |
| echo "" | |
| shell: bash | |
| - name: Validate wheels integrity (Linux only) | |
| if: matrix.platform == 'linux' | |
| run: | | |
| set -o pipefail | |
| echo "======================================================================" | |
| echo "==> Validating Wheel Integrity (Fail Fast)" | |
| echo "======================================================================" | |
| echo "" | |
| echo "Installing twine for validation..." | |
| # Use --break-system-packages for consistency (safe in CI) | |
| python3 -m pip install --break-system-packages git+https://github.com/pypa/packaging.git | |
| python3 -m pip install --break-system-packages git+https://github.com/pypa/twine.git | |
| echo "" | |
| echo "==> Validation environment:" | |
| echo "Python: $(python3 --version)" | |
| echo "twine: $(twine --version)" | |
| echo "" | |
| # Initialize validation output file | |
| VALIDATION_FILE="dist/VALIDATION.txt" | |
| echo "Wheel Validation Results - Build Time (wheels workflow)" > "$VALIDATION_FILE" | |
| echo "=========================================================" >> "$VALIDATION_FILE" | |
| echo "" >> "$VALIDATION_FILE" | |
| echo "Validation Date: $(date -u +"%Y-%m-%d %H:%M:%S UTC")" >> "$VALIDATION_FILE" | |
| echo "Platform: Linux (pure Python, no NVX)" >> "$VALIDATION_FILE" | |
| echo "Python: $(python3 --version)" >> "$VALIDATION_FILE" | |
| echo "twine: $(twine --version)" >> "$VALIDATION_FILE" | |
| echo "" >> "$VALIDATION_FILE" | |
| HAS_ERRORS=0 | |
| for wheel in dist/*.whl; do | |
| if [ ! -f "$wheel" ]; then | |
| echo "⚠️ No wheels found in dist/" | |
| continue | |
| fi | |
| WHEEL_NAME=$(basename "$wheel") | |
| echo "==> Validating: $WHEEL_NAME" | |
| echo "" >> "$VALIDATION_FILE" | |
| echo "Wheel: $WHEEL_NAME" >> "$VALIDATION_FILE" | |
| echo "---" >> "$VALIDATION_FILE" | |
| # Test 1: ZIP integrity | |
| echo " [1/3] ZIP integrity test..." | |
| if unzip -t "$wheel" > /dev/null 2>&1; then | |
| echo " ✅ ZIP test PASS" | |
| echo " ZIP test: PASS" >> "$VALIDATION_FILE" | |
| else | |
| echo " ❌ ZIP test FAIL - wheel is corrupted!" | |
| echo " ZIP test: FAIL - wheel is corrupted!" >> "$VALIDATION_FILE" | |
| HAS_ERRORS=1 | |
| fi | |
| # Test 2: Python zipfile module | |
| echo " [2/3] Python zipfile test..." | |
| if python3 -m zipfile -t "$wheel" > /dev/null 2>&1; then | |
| echo " ✅ Python zipfile test PASS" | |
| echo " Python zipfile test: PASS" >> "$VALIDATION_FILE" | |
| else | |
| echo " ❌ Python zipfile test FAIL - wheel is corrupted!" | |
| echo " Python zipfile test: FAIL - wheel is corrupted!" >> "$VALIDATION_FILE" | |
| HAS_ERRORS=1 | |
| fi | |
| # Test 3: twine check | |
| echo " [3/3] Twine validation..." | |
| twine check "$wheel" 2>&1 | tee /tmp/twine_output.txt | |
| TWINE_EXIT=${PIPESTATUS[0]} | |
| if [ "$TWINE_EXIT" -eq 0 ] && ! grep -Eqi "ERROR|FAILED|InvalidDistribution" /tmp/twine_output.txt; then | |
| echo " ✅ Twine check PASS" | |
| echo " Twine check: PASS" >> "$VALIDATION_FILE" | |
| else | |
| echo " ❌ Twine check FAIL" | |
| cat /tmp/twine_output.txt | |
| echo " Twine check: FAIL" >> "$VALIDATION_FILE" | |
| cat /tmp/twine_output.txt >> "$VALIDATION_FILE" | |
| HAS_ERRORS=1 | |
| fi | |
| rm -f /tmp/twine_output.txt | |
| echo "" | |
| done | |
| if [ $HAS_ERRORS -eq 1 ]; then | |
| echo "" >> "$VALIDATION_FILE" | |
| echo "RESULT: VALIDATION FAILED" >> "$VALIDATION_FILE" | |
| echo "======================================================================" | |
| echo "❌ WHEEL VALIDATION FAILED" | |
| echo "======================================================================" | |
| echo "" | |
| echo "DO NOT PROCEED - corrupted wheels must NOT become artifacts!" | |
| exit 1 | |
| else | |
| echo "" >> "$VALIDATION_FILE" | |
| echo "RESULT: ALL VALIDATIONS PASSED" >> "$VALIDATION_FILE" | |
| echo "======================================================================" | |
| echo "✅ All wheels validated successfully" | |
| echo "======================================================================" | |
| echo "" | |
| echo "Validation results written to: $VALIDATION_FILE" | |
| fi | |
| shell: bash | |
| - name: Generate SHA256 checksums (Linux only) | |
| if: matrix.platform == 'linux' | |
| run: | | |
| echo "======================================================================" | |
| echo "==> Generating SHA256 Checksums for Chain of Custody" | |
| echo "======================================================================" | |
| echo "" | |
| echo "OpenSSL version:" | |
| openssl version | |
| echo "" | |
| # Force sync before checksumming | |
| echo "Forcing sync before checksumming..." | |
| sync | |
| echo "✅ Buffers flushed" | |
| echo "" | |
| # Change to dist directory to generate relative paths | |
| cd dist | |
| CHECKSUM_FILE="CHECKSUMS.sha256" | |
| # Generate checksums for all wheels | |
| echo "Generating checksums for wheels..." | |
| for wheel in *.whl; do | |
| if [ -f "$wheel" ]; then | |
| sync | |
| openssl sha256 "$wheel" | tee -a "$CHECKSUM_FILE" | |
| fi | |
| done | |
| echo "" | |
| echo "==> Generated checksum file:" | |
| cat "$CHECKSUM_FILE" | |
| echo "" | |
| cd .. | |
| shell: bash | |
| - name: Force file system sync (post-checksum, pre-upload) - Linux | |
| if: matrix.platform == 'linux' | |
| run: | | |
| echo "======================================================================" | |
| echo "==> Forcing File System Sync (Post-Checksum)" | |
| echo "======================================================================" | |
| echo "" | |
| sync | |
| echo "✅ All buffers flushed to disk" | |
| echo "" | |
| shell: bash | |
| - name: Build binary wheels with NVX (macOS) | |
| if: matrix.platform == 'macos' | |
| run: | | |
| # Build binary wheels WITH NVX acceleration for macOS | |
| export AUTOBAHN_USE_NVX=1 | |
| just build-all | |
| shell: bash | |
| - name: Force file system sync (post-build, pre-validation) - macOS | |
| if: matrix.platform == 'macos' | |
| run: | | |
| echo "======================================================================" | |
| echo "==> Forcing File System Sync (Post-Build)" | |
| echo "======================================================================" | |
| echo "" | |
| sync | |
| echo "✅ All buffers flushed to disk" | |
| echo "" | |
| shell: bash | |
| - name: Validate wheels integrity (macOS only) | |
| if: matrix.platform == 'macos' | |
| run: | | |
| set -o pipefail | |
| echo "======================================================================" | |
| echo "==> Validating Wheel Integrity (macOS)" | |
| echo "======================================================================" | |
| echo "" | |
| # Use --break-system-packages since this is an ephemeral CI runner | |
| python3 -m pip install --break-system-packages git+https://github.com/pypa/packaging.git | |
| python3 -m pip install --break-system-packages git+https://github.com/pypa/twine.git | |
| echo "" | |
| VALIDATION_FILE="dist/VALIDATION.txt" | |
| echo "Wheel Validation Results - Build Time (wheels workflow)" > "$VALIDATION_FILE" | |
| echo "=========================================================" >> "$VALIDATION_FILE" | |
| echo "" >> "$VALIDATION_FILE" | |
| echo "Validation Date: $(date -u +"%Y-%m-%d %H:%M:%S UTC")" >> "$VALIDATION_FILE" | |
| echo "Platform: macOS ARM64 (binary with NVX)" >> "$VALIDATION_FILE" | |
| echo "Python: $(python3 --version)" >> "$VALIDATION_FILE" | |
| echo "twine: $(twine --version)" >> "$VALIDATION_FILE" | |
| echo "" >> "$VALIDATION_FILE" | |
| HAS_ERRORS=0 | |
| for wheel in dist/*.whl; do | |
| [ -f "$wheel" ] || continue | |
| WHEEL_NAME=$(basename "$wheel") | |
| echo "==> Validating: $WHEEL_NAME" | |
| echo "" >> "$VALIDATION_FILE" | |
| echo "Wheel: $WHEEL_NAME" >> "$VALIDATION_FILE" | |
| echo "---" >> "$VALIDATION_FILE" | |
| if unzip -t "$wheel" > /dev/null 2>&1; then | |
| echo " ✅ ZIP test PASS" | |
| echo " ZIP test: PASS" >> "$VALIDATION_FILE" | |
| else | |
| echo " ❌ ZIP test FAIL" | |
| echo " ZIP test: FAIL" >> "$VALIDATION_FILE" | |
| HAS_ERRORS=1 | |
| fi | |
| if python3 -m zipfile -t "$wheel" > /dev/null 2>&1; then | |
| echo " ✅ Python zipfile test PASS" | |
| echo " Python zipfile test: PASS" >> "$VALIDATION_FILE" | |
| else | |
| echo " ❌ Python zipfile test FAIL" | |
| echo " Python zipfile test: FAIL" >> "$VALIDATION_FILE" | |
| HAS_ERRORS=1 | |
| fi | |
| twine check "$wheel" 2>&1 | tee /tmp/twine_output.txt | |
| if [ ${PIPESTATUS[0]} -eq 0 ] && ! grep -Eqi "ERROR|FAILED" /tmp/twine_output.txt; then | |
| echo " ✅ Twine check PASS" | |
| echo " Twine check: PASS" >> "$VALIDATION_FILE" | |
| else | |
| echo " ❌ Twine check FAIL" | |
| echo " Twine check: FAIL" >> "$VALIDATION_FILE" | |
| cat /tmp/twine_output.txt >> "$VALIDATION_FILE" | |
| HAS_ERRORS=1 | |
| fi | |
| rm -f /tmp/twine_output.txt | |
| done | |
| if [ $HAS_ERRORS -eq 1 ]; then | |
| echo "RESULT: VALIDATION FAILED" >> "$VALIDATION_FILE" | |
| echo "❌ WHEEL VALIDATION FAILED" | |
| exit 1 | |
| else | |
| echo "RESULT: ALL VALIDATIONS PASSED" >> "$VALIDATION_FILE" | |
| echo "✅ All wheels validated successfully" | |
| fi | |
| shell: bash | |
| - name: Generate SHA256 checksums (macOS only) | |
| if: matrix.platform == 'macos' | |
| run: | | |
| echo "==> Generating SHA256 checksums..." | |
| sync | |
| cd dist | |
| for wheel in *.whl; do | |
| [ -f "$wheel" ] && openssl sha256 "$wheel" | tee -a CHECKSUMS.sha256 | |
| done | |
| cat CHECKSUMS.sha256 | |
| cd .. | |
| shell: bash | |
| - name: Force file system sync (post-checksum) - macOS | |
| if: matrix.platform == 'macos' | |
| run: sync | |
| shell: bash | |
| - name: Build binary wheels with NVX (Windows) | |
| if: matrix.platform == 'windows' | |
| run: | | |
| # Build binary wheels WITH NVX acceleration for Windows | |
| $env:AUTOBAHN_USE_NVX = "1" | |
| just build-all | |
| shell: pwsh | |
| - name: Validate wheels integrity (Windows only) | |
| if: matrix.platform == 'windows' | |
| run: | | |
| Write-Host "======================================================================" | |
| Write-Host "==> Validating Wheel Integrity (Windows)" | |
| Write-Host "======================================================================" | |
| Write-Host "" | |
| # Use --break-system-packages for consistency (safe in CI) | |
| python -m pip install --break-system-packages git+https://github.com/pypa/packaging.git | |
| python -m pip install --break-system-packages git+https://github.com/pypa/twine.git | |
| Write-Host "" | |
| $validationFile = "dist\VALIDATION.txt" | |
| "Wheel Validation Results - Build Time (wheels workflow)" | Out-File -FilePath $validationFile -Encoding UTF8 | |
| "=========================================================" | Out-File -FilePath $validationFile -Append -Encoding UTF8 | |
| "" | Out-File -FilePath $validationFile -Append -Encoding UTF8 | |
| "Validation Date: $((Get-Date).ToUniversalTime().ToString('yyyy-MM-dd HH:mm:ss UTC'))" | Out-File -FilePath $validationFile -Append -Encoding UTF8 | |
| "Platform: Windows x86_64 (binary with NVX)" | Out-File -FilePath $validationFile -Append -Encoding UTF8 | |
| "Python: $(python --version)" | Out-File -FilePath $validationFile -Append -Encoding UTF8 | |
| "twine: $(twine --version)" | Out-File -FilePath $validationFile -Append -Encoding UTF8 | |
| "" | Out-File -FilePath $validationFile -Append -Encoding UTF8 | |
| $hasErrors = $false | |
| Get-ChildItem dist\*.whl | ForEach-Object { | |
| $wheelName = $_.Name | |
| Write-Host "==> Validating: $wheelName" | |
| "" | Out-File -FilePath $validationFile -Append -Encoding UTF8 | |
| "Wheel: $wheelName" | Out-File -FilePath $validationFile -Append -Encoding UTF8 | |
| "---" | Out-File -FilePath $validationFile -Append -Encoding UTF8 | |
| # Test 1: ZIP integrity (using Expand-Archive test) | |
| try { | |
| [System.IO.Compression.ZipFile]::OpenRead($_.FullName).Dispose() | |
| Write-Host " ✅ ZIP test PASS" | |
| " ZIP test: PASS" | Out-File -FilePath $validationFile -Append -Encoding UTF8 | |
| } catch { | |
| Write-Host " ❌ ZIP test FAIL" | |
| " ZIP test: FAIL" | Out-File -FilePath $validationFile -Append -Encoding UTF8 | |
| $hasErrors = $true | |
| } | |
| # Test 2: Python zipfile module | |
| $zipfileTest = python -m zipfile -t $_.FullName 2>&1 | |
| if ($LASTEXITCODE -eq 0) { | |
| Write-Host " ✅ Python zipfile test PASS" | |
| " Python zipfile test: PASS" | Out-File -FilePath $validationFile -Append -Encoding UTF8 | |
| } else { | |
| Write-Host " ❌ Python zipfile test FAIL" | |
| " Python zipfile test: FAIL" | Out-File -FilePath $validationFile -Append -Encoding UTF8 | |
| $hasErrors = $true | |
| } | |
| # Test 3: twine check | |
| $twineOutput = twine check $_.FullName 2>&1 | Out-String | |
| if ($LASTEXITCODE -eq 0 -and $twineOutput -notmatch "ERROR|FAILED") { | |
| Write-Host " ✅ Twine check PASS" | |
| " Twine check: PASS" | Out-File -FilePath $validationFile -Append -Encoding UTF8 | |
| } else { | |
| Write-Host " ❌ Twine check FAIL" | |
| " Twine check: FAIL" | Out-File -FilePath $validationFile -Append -Encoding UTF8 | |
| $twineOutput | Out-File -FilePath $validationFile -Append -Encoding UTF8 | |
| $hasErrors = $true | |
| } | |
| } | |
| if ($hasErrors) { | |
| "RESULT: VALIDATION FAILED" | Out-File -FilePath $validationFile -Append -Encoding UTF8 | |
| Write-Host "❌ WHEEL VALIDATION FAILED" | |
| exit 1 | |
| } else { | |
| "RESULT: ALL VALIDATIONS PASSED" | Out-File -FilePath $validationFile -Append -Encoding UTF8 | |
| Write-Host "✅ All wheels validated successfully" | |
| } | |
| shell: pwsh | |
| - name: Generate SHA256 checksums (Windows only) | |
| if: matrix.platform == 'windows' | |
| run: | | |
| Write-Host "==> Generating SHA256 checksums..." | |
| Set-Location dist | |
| $checksumFile = "CHECKSUMS.sha256" | |
| Get-ChildItem *.whl | ForEach-Object { | |
| $hash = (Get-FileHash -Algorithm SHA256 $_.Name).Hash.ToLower() | |
| $line = "SHA256($($_.Name))= $hash" | |
| Write-Host $line | |
| $line | Out-File -FilePath $checksumFile -Append -Encoding UTF8 | |
| } | |
| Get-Content $checksumFile | |
| Set-Location .. | |
| shell: pwsh | |
| - name: List built artifacts (Linux/macOS) | |
| if: runner.os != 'Windows' | |
| run: | | |
| echo "Built wheels:" | |
| ls -la dist/ | |
| shell: bash | |
| - name: List built artifacts (Windows) | |
| if: runner.os == 'Windows' | |
| run: | | |
| Write-Host "Built wheels:" | |
| Get-ChildItem dist | |
| shell: pwsh | |
| - name: Upload wheel artifacts with cryptographic verification | |
| uses: wamp-proto/wamp-cicd/actions/upload-artifact-verified@main | |
| with: | |
| name: wheels-${{ matrix.platform }}-${{ matrix.arch }} | |
| path: ${{ github.workspace }}/dist/ | |
| retention-days: 30 | |
| - name: Verify wheels built without NVX (Linux x86_64 only) | |
| if: matrix.platform == 'linux' && matrix.arch == 'x86_64' | |
| run: | | |
| # Verify that wheels were built without NVX acceleration | |
| echo "==> Wheels built without NVX (should be pure Python or at least NVX-free):" | |
| ls -la dist/*.whl || echo "No wheels found" | |
| echo "" | |
| echo "==> Source distribution:" | |
| ls -la dist/*.tar.gz || echo "No source dist found" | |
| echo "" | |
| echo "==> Wheel count: $(ls dist/*.whl 2>/dev/null | wc -l)" | |
| shell: bash | |
| - name: Verify source distribution integrity (Linux x86_64 only) | |
| if: matrix.platform == 'linux' && matrix.arch == 'x86_64' | |
| run: | | |
| echo "======================================================================" | |
| echo "==> Source Distribution Integrity Verification (Issue #1716)" | |
| echo "======================================================================" | |
| echo "" | |
| echo "OpenSSL version:" | |
| openssl version | |
| echo "" | |
| echo "Creating cryptographic fingerprints and verification logs for" | |
| echo "supply chain integrity - ensures no corruption from build → artifact → release." | |
| echo "" | |
| for tarball in dist/*.tar.gz; do | |
| if [ ! -f "$tarball" ]; then | |
| echo "⚠️ No source distribution found in dist/" | |
| continue | |
| fi | |
| BASENAME=$(basename "$tarball") | |
| VERIFY_FILE="dist/${BASENAME%.tar.gz}.verify.txt" | |
| echo "==> Verifying: $BASENAME" | |
| echo "" | |
| # Create verification report | |
| { | |
| echo "========================================================================" | |
| echo "Source Distribution Integrity Verification Report" | |
| echo "========================================================================" | |
| echo "" | |
| echo "Verification performed at origin (wheels workflow)" | |
| echo "This report ensures artifact integrity through the entire pipeline." | |
| echo "" | |
| echo "------------------------------------------------------------------------" | |
| echo "Metadata" | |
| echo "------------------------------------------------------------------------" | |
| echo "Filename: $BASENAME" | |
| echo "Timestamp: $(date -u '+%Y-%m-%d %H:%M:%S UTC')" | |
| echo "Workflow: ${{ github.workflow }}" | |
| echo "Run ID: ${{ github.run_id }}" | |
| echo "Runner OS: ${{ runner.os }}" | |
| echo "Runner Arch: ${{ runner.arch }}" | |
| echo "Commit SHA: ${{ github.sha }}" | |
| echo "Ref: ${{ github.ref }}" | |
| echo "" | |
| echo "------------------------------------------------------------------------" | |
| echo "File Properties" | |
| echo "------------------------------------------------------------------------" | |
| ls -lh "$tarball" | |
| echo "" | |
| echo "File size (bytes): $(stat -c%s "$tarball" 2>/dev/null || stat -f%z "$tarball")" | |
| echo "" | |
| echo "------------------------------------------------------------------------" | |
| echo "Cryptographic Fingerprint (SHA256)" | |
| echo "------------------------------------------------------------------------" | |
| openssl sha256 "$tarball" | |
| echo "" | |
| echo "------------------------------------------------------------------------" | |
| echo "Gzip Integrity Test" | |
| echo "------------------------------------------------------------------------" | |
| if gzip -tv "$tarball"; then | |
| echo "" | |
| echo "Gzip exit code: 0" | |
| echo "Result: ✅ PASS (no trailing garbage)" | |
| else | |
| echo "" | |
| echo "Gzip exit code: $?" | |
| echo "Result: ❌ FAIL" | |
| exit 1 | |
| fi | |
| echo "" | |
| echo "------------------------------------------------------------------------" | |
| echo "Tar Extraction Test" | |
| echo "------------------------------------------------------------------------" | |
| echo "Testing with GNU tar (lists contents without extracting):" | |
| if tar -tzf "$tarball" > /tmp/tar_contents.txt 2>&1; then | |
| echo "Result: ✅ PASS (tar can read archive)" | |
| echo "File count: $(wc -l < /tmp/tar_contents.txt)" | |
| echo "" | |
| echo "First 10 files:" | |
| head -10 /tmp/tar_contents.txt | |
| rm -f /tmp/tar_contents.txt | |
| else | |
| TAR_EXIT=$? | |
| echo "Result: ❌ FAIL (tar exit code $TAR_EXIT)" | |
| cat /tmp/tar_contents.txt || true | |
| rm -f /tmp/tar_contents.txt | |
| exit 1 | |
| fi | |
| echo "" | |
| echo "------------------------------------------------------------------------" | |
| echo "Binary Analysis" | |
| echo "------------------------------------------------------------------------" | |
| echo "Last 100 bytes (hex dump) - checking for trailing garbage:" | |
| tail -c 100 "$tarball" | hexdump -C | |
| echo "" | |
| echo "------------------------------------------------------------------------" | |
| echo "Verification Summary" | |
| echo "------------------------------------------------------------------------" | |
| echo "Overall Result: ✅ PASS - Archive is valid and extractable" | |
| echo "" | |
| echo "========================================================================" | |
| echo "End of Verification Report" | |
| echo "========================================================================" | |
| } | tee "$VERIFY_FILE" | |
| # Run actual validation tests and fail immediately on error | |
| echo "" | |
| echo "==> Running integrity checks..." | |
| # Check for trailing garbage in gzip | |
| GZIP_OUTPUT=$(gzip -tv "$tarball" 2>&1) | |
| GZIP_EXIT_CODE=$? | |
| if echo "$GZIP_OUTPUT" | grep -qi "trailing garbage"; then | |
| echo "" | |
| echo "❌ ERROR: Source distribution has trailing garbage!" | |
| echo "This will cause tar extraction failures for users." | |
| echo "See $VERIFY_FILE for full details." | |
| echo "" | |
| exit 1 | |
| fi | |
| if [ $GZIP_EXIT_CODE -ne 0 ]; then | |
| echo "" | |
| echo "❌ ERROR: gzip test failed with exit code $GZIP_EXIT_CODE" | |
| echo "See $VERIFY_FILE for full details." | |
| echo "" | |
| exit 1 | |
| fi | |
| # Check tar extraction | |
| if ! tar -tzf "$tarball" > /dev/null 2>&1; then | |
| TAR_EXIT=$? | |
| echo "" | |
| echo "❌ ERROR: tar extraction test failed with exit code $TAR_EXIT" | |
| echo "See $VERIFY_FILE for full details." | |
| echo "" | |
| exit 1 | |
| fi | |
| echo "✅ $BASENAME verified successfully" | |
| echo " Verification report: $VERIFY_FILE" | |
| echo "" | |
| done | |
| echo "======================================================================" | |
| echo "✅ All source distributions verified successfully" | |
| echo "======================================================================" | |
| echo "" | |
| echo "Verification reports created:" | |
| ls -lh dist/*.verify.txt | |
| shell: bash | |
| - name: Upload source distribution with cryptographic verification (Linux x86_64 only) | |
| if: matrix.platform == 'linux' && matrix.arch == 'x86_64' | |
| uses: wamp-proto/wamp-cicd/actions/upload-artifact-verified@main | |
| with: | |
| name: source-distribution | |
| path: ${{ github.workspace }}/dist/ | |
| retention-days: 30 | |
| - name: Upload Linux wheels without NVX with cryptographic verification (Linux x86_64 only) | |
| if: matrix.platform == 'linux' && matrix.arch == 'x86_64' | |
| uses: wamp-proto/wamp-cicd/actions/upload-artifact-verified@main | |
| with: | |
| name: linux-wheels-no-nvx | |
| path: ${{ github.workspace }}/dist/ | |
| retention-days: 30 | |
| # GitHub Releases, PyPI, and RTD publishing are now handled by the centralized 'release' workflow |