From f58c94dfd5c83bd2e5c46a4218dc4d9a7699cc72 Mon Sep 17 00:00:00 2001 From: Denis Jelovina Date: Fri, 24 Oct 2025 16:13:43 +0200 Subject: [PATCH 01/32] Squash: combined commits 9c8ebec..3591d1d0 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Summary of included commits: 3591d1d03 Denis Jelovina 2025-10-24 Squash: Combined changes from 9f634c7..HEAD — merge all temp commit 9f634c722 Denis Jelovina 2025-10-22 chore(pyalp): remove vendored pybind11 and unused CMake; add cibuildwheel workflow and config 0f9840c01 Denis Jelovina 2025-10-22 pyalp packaging and CI: bundle prebuilt bindings, setup pinned pybind11, and add wheel smoke-test 1e4cd6eb8 Denis Jelovina 2025-10-22 Remove deprecated Python bindings and related files a7c9900b4 Denis Jelovina 2025-10-22 Convert pybind11 to submodule pinned at 8d503e30be400ad431d3d140707803e87e75fad7 3b9e5b587 Petros Anastasiadis 2025-09-30 Removed numpy2alp older stuff - left .cpp ffr 1403a2a11 Petros Anastasiadis 2025-09-30 Added installation instructions for python part 95d51a889 Denis Jelovina 2025-06-02 Add doccumentation for test.py 58aae6aae Denis Jelovina 2025-05-26 compile messages 6ba4c3ee0 Denis Jelovina 2025-05-26 add submodule 03a00d453 Denis Jelovina 2025-05-26 disable pyalp by defualt a471cddd6 Denis Jelovina 2025-05-26 add pybind11 submodule 8a4ade952 Denis Jelovina 2025-05-26 refactor code add support for multiple types 68dea3daa Denis Jelovina 2025-05-23 add conjugate gradient 427594b5c Denis Jelovina 2025-05-23 add numpy to c++ examples Diffstat: .github/workflows/pyalp-ci.yml | 169 +++++++++++++++++++++++++ .github/workflows/pyalp-publish.yml | 175 ++++++++++++++++++++++++++ .gitignore | 3 +- .gitmodules | 3 + CMakeLists.txt | 22 +++- cmake/CompileFlags.cmake | 20 ++- pyalp/CMakeLists.txt | 1 + pyalp/PINNED_PYBIND11 | 1 + pyalp/extern/pybind11 | 1 + pyalp/pyproject.toml | 30 +++++ pyalp/setup.py | 114 +++++++++++++++++ pyalp/src/CMakeLists.txt | 111 ++++++++++++++++ pyalp/src/conjugate_gradient.hpp | 109 ++++++++++++++++ pyalp/src/matrix_wrappers.hpp | 115 +++++++++++++++++ pyalp/src/numpy2alp.cpp | 57 +++++++++ pyalp/src/pyalp.cpp | 84 +++++++++++++ pyalp/src/pyalp.egg-info/PKG-INFO | 65 ++++++++++ pyalp/src/pyalp.egg-info/SOURCES.txt | 12 ++ pyalp/src/pyalp.egg-info/dependency_links.txt | 1 + pyalp/src/pyalp.egg-info/requires.txt | 1 + pyalp/src/pyalp.egg-info/top_level.txt | 1 + pyalp/src/pyalp/CMakeLists.txt | 16 +++ pyalp/src/pyalp/__init__.py | 41 ++++++ pyalp/src/pyalp/bindings.cpp | 56 +++++++++ pyalp/src/pyalp/your_module.py | 4 + pyalp/src/python2alp.cpp | 9 ++ pyalp/src/utils.hpp | 20 +++ pyalp/src/vector_wrappers.hpp | 65 ++++++++++ pyproject.toml | 4 + setup.py | 10 ++ tests/CMakeLists.txt | 4 + tests/python/CMakeLists.txt | 23 ++++ tests/python/numpy_array_print.py | 5 + tests/python/test.py | 65 ++++++++++ tools/make_wheel_from_so.py | 109 ++++++++++++++++ tools/smoke_test_pyalp.py | 66 ++++++++++ 36 files changed, 1589 insertions(+), 3 deletions(-) --- .github/workflows/pyalp-ci.yml | 169 +++++++++++++++++ .github/workflows/pyalp-publish.yml | 175 ++++++++++++++++++ .gitignore | 3 +- .gitmodules | 3 + CMakeLists.txt | 22 ++- cmake/CompileFlags.cmake | 20 +- pyalp/CMakeLists.txt | 1 + pyalp/PINNED_PYBIND11 | 1 + pyalp/extern/pybind11 | 1 + pyalp/pyproject.toml | 30 +++ pyalp/setup.py | 114 ++++++++++++ pyalp/src/CMakeLists.txt | 111 +++++++++++ pyalp/src/conjugate_gradient.hpp | 109 +++++++++++ pyalp/src/matrix_wrappers.hpp | 115 ++++++++++++ pyalp/src/numpy2alp.cpp | 57 ++++++ pyalp/src/pyalp.cpp | 84 +++++++++ pyalp/src/pyalp.egg-info/PKG-INFO | 65 +++++++ pyalp/src/pyalp.egg-info/SOURCES.txt | 12 ++ pyalp/src/pyalp.egg-info/dependency_links.txt | 1 + pyalp/src/pyalp.egg-info/requires.txt | 1 + pyalp/src/pyalp.egg-info/top_level.txt | 1 + pyalp/src/pyalp/CMakeLists.txt | 16 ++ pyalp/src/pyalp/__init__.py | 41 ++++ pyalp/src/pyalp/bindings.cpp | 56 ++++++ pyalp/src/pyalp/your_module.py | 4 + pyalp/src/python2alp.cpp | 9 + pyalp/src/utils.hpp | 20 ++ pyalp/src/vector_wrappers.hpp | 65 +++++++ pyproject.toml | 4 + setup.py | 10 + tests/CMakeLists.txt | 4 + tests/python/CMakeLists.txt | 23 +++ tests/python/numpy_array_print.py | 5 + tests/python/test.py | 65 +++++++ tools/make_wheel_from_so.py | 109 +++++++++++ tools/smoke_test_pyalp.py | 66 +++++++ 36 files changed, 1589 insertions(+), 3 deletions(-) create mode 100644 .github/workflows/pyalp-ci.yml create mode 100644 .github/workflows/pyalp-publish.yml create mode 100644 .gitmodules create mode 100644 pyalp/CMakeLists.txt create mode 100644 pyalp/PINNED_PYBIND11 create mode 160000 pyalp/extern/pybind11 create mode 100644 pyalp/pyproject.toml create mode 100644 pyalp/setup.py create mode 100644 pyalp/src/CMakeLists.txt create mode 100644 pyalp/src/conjugate_gradient.hpp create mode 100644 pyalp/src/matrix_wrappers.hpp create mode 100644 pyalp/src/numpy2alp.cpp create mode 100644 pyalp/src/pyalp.cpp create mode 100644 pyalp/src/pyalp.egg-info/PKG-INFO create mode 100644 pyalp/src/pyalp.egg-info/SOURCES.txt create mode 100644 pyalp/src/pyalp.egg-info/dependency_links.txt create mode 100644 pyalp/src/pyalp.egg-info/requires.txt create mode 100644 pyalp/src/pyalp.egg-info/top_level.txt create mode 100644 pyalp/src/pyalp/CMakeLists.txt create mode 100644 pyalp/src/pyalp/__init__.py create mode 100644 pyalp/src/pyalp/bindings.cpp create mode 100644 pyalp/src/pyalp/your_module.py create mode 100644 pyalp/src/python2alp.cpp create mode 100644 pyalp/src/utils.hpp create mode 100644 pyalp/src/vector_wrappers.hpp create mode 100644 pyproject.toml create mode 100644 setup.py create mode 100644 tests/python/CMakeLists.txt create mode 100644 tests/python/numpy_array_print.py create mode 100644 tests/python/test.py create mode 100755 tools/make_wheel_from_so.py create mode 100755 tools/smoke_test_pyalp.py diff --git a/.github/workflows/pyalp-ci.yml b/.github/workflows/pyalp-ci.yml new file mode 100644 index 000000000..6c2407833 --- /dev/null +++ b/.github/workflows/pyalp-ci.yml @@ -0,0 +1,169 @@ +name: pyalp CI + +# Run only on pushes that create tags starting with 'pyalp' +on: + push: + tags: [ 'pyalp*' ] + +jobs: + build-bindings: + name: Build C++ bindings + runs-on: ubuntu-latest + steps: + - name: Checkout (with submodules) + uses: actions/checkout@v4 + with: + submodules: 'recursive' + fetch-depth: 0 + + - name: Verify pinned pybind11 submodule commit + # Fail early if the checked-out pybind11 is not the pinned commit + run: | + set -euo pipefail + # Prefer top-level pinned file so it survives moves; fallback to submodule path + if [ -f pyalp/PINNED_PYBIND11 ]; + then + PINNED_SHA=$(cat pyalp/PINNED_PYBIND11 | tr -d '\n') + elif [ -f pyalp/extern/pybind11/PINNED_COMMIT ]; + then + PINNED_SHA=$(cat pyalp/extern/pybind11/PINNED_COMMIT | tr -d '\n') + else + echo "No pinned commit file found (tried pyalp/PINNED_PYBIND11 and pyalp/extern/pybind11/PINNED_COMMIT)" >&2 + exit 2 + fi + echo "Expected pybind11 commit: $PINNED_SHA" + ACTUAL=$(git -C pyalp/extern/pybind11 rev-parse HEAD || true) + echo "Found pybind11 commit: $ACTUAL" + if [ "$ACTUAL" != "$PINNED_SHA" ]; + then + echo "ERROR: pybind11 submodule commit does not match pinned commit" >&2 + exit 2 + fi + + - name: Install build dependencies + run: | + set -euo pipefail + sudo apt-get update + # libnuma-dev provides NUMA headers/libraries needed by FindNuma.cmake + sudo apt-get install -y build-essential cmake ninja-build pkg-config python3-venv python3-dev python3-pip libnuma-dev + + - name: Configure and build pyalp bindings + run: | + set -euo pipefail + mkdir -p build_alp + cmake -S . -B build_alp -DENABLE_PYALP=ON + # Only attempt to build pyalp targets if the pyalp CMake directory exists + if [ -f pyalp/CMakeLists.txt ]; + then + echo "pyalp CMakeLists found — building pyalp targets" + cmake --build build_alp --target pyalp_ref -- -j || true + cmake --build build_alp --target pyalp_omp -- -j || true + else + echo "pyalp directory or CMakeLists not present — skipping pyalp targets" + fi + + - name: Find and list built shared objects + run: | + set -euo pipefail + echo "Searching for shared objects under build_alp and pyalp" + find build_alp -name "*.so" -maxdepth 8 -print || true + find pyalp -name "*.so" -maxdepth 8 -print || true + + - name: Collect built shared objects into artifacts/ + run: | + set -euo pipefail + mkdir -p artifacts + # copy any discovered .so files into a flat artifacts directory so upload-artifact can find them + find build_alp -name "*.so" -print0 | xargs -0 -I{} bash -lc 'cp -v "{}" artifacts/ || true' || true + find pyalp -name "*.so" -print0 | xargs -0 -I{} bash -lc 'cp -v "{}" artifacts/ || true' || true + echo "Artifacts now contains:" && ls -la artifacts || true + + - name: Upload built bindings + uses: actions/upload-artifact@v4 + with: + name: pyalp-so + path: | + build_alp/**/*.so + artifacts/**/*.so + pyalp/**/pyalp*.so + pyalp/**/_pyalp*.so + pyalp/**/libpyalp*.so + pyalp/**/*.so + + build-wheel-and-test: + name: Build wheel from prebuilt .so and smoke-test + runs-on: ubuntu-latest + needs: build-bindings + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Download built bindings + uses: actions/download-artifact@v4 + with: + name: pyalp-so + path: artifacts + + - name: Show downloaded artifacts + run: ls -la artifacts || true + + - name: Prepare wheel inputs + id: prep + run: | + set -euo pipefail + # List candidate shared-object files for debugging + echo "Candidate .so files in artifacts:" && find artifacts -type f -name "*.so" -print || true + # Find likely candidates (prefer _pyalp, pyalp, libpyalp) + SO_PATH=$(find artifacts \( -name "_pyalp*.so" -o -name "pyalp*.so" -o -name "libpyalp*.so" -o -name "*.so" \) | head -n1) + if [ -z "$SO_PATH" ]; + then + echo "ERROR: no built .so artifact found to package" >&2 + echo "Artifacts listing:" && ls -la artifacts || true + exit 2 + fi + echo "so_path=$SO_PATH" >> "$GITHUB_OUTPUT" + # Prefer helper located inside pyalp/ but fall back to top-level tools/ + if [ -f pyalp/tools/make_wheel_from_so.py ]; then + echo "builder=pyalp/tools/make_wheel_from_so.py" >> "$GITHUB_OUTPUT" + else + echo "builder=tools/make_wheel_from_so.py" >> "$GITHUB_OUTPUT" + fi + # Derive Python version from the .so filename (e.g., cpython-311 -> 3.11, cp312 -> 3.12) + PY_VER="" + if [[ "$SO_PATH" =~ cpython-([0-9]{3}) ]]; + then + n=${BASH_REMATCH[1]} + PY_VER="${n:0:1}.${n:1}" + elif [[ "$SO_PATH" =~ cp([0-9]{2,3}) ]]; + then + n=${BASH_REMATCH[1]} + PY_VER="${n:0:1}.${n:1}" + fi + echo "python_version=$PY_VER" >> "$GITHUB_OUTPUT" + + - name: Run wheel builder + run: | + set -euo pipefail + echo "builder=${{ steps.prep.outputs.builder }}" + echo "so=${{ steps.prep.outputs.so_path }}" + python3 "${{ steps.prep.outputs.builder }}" "${{ steps.prep.outputs.so_path }}" --out-dir dist_wheel + + - name: Show wheel + run: ls -la dist_wheel || true + + - name: Set up Python matching built extension + if: ${{ steps.prep.outputs.python_version != '' }} + uses: actions/setup-python@v5 + with: + python-version: ${{ steps.prep.outputs.python_version }} + + - name: Smoke test wheel in venv + run: | + set -euo pipefail + python3 -V + which python3 + python3 -m venv venv + . venv/bin/activate + pip install --upgrade pip wheel + pip install dist_wheel/*.whl + tools/smoke_test_pyalp.py diff --git a/.github/workflows/pyalp-publish.yml b/.github/workflows/pyalp-publish.yml new file mode 100644 index 000000000..2c9748945 --- /dev/null +++ b/.github/workflows/pyalp-publish.yml @@ -0,0 +1,175 @@ +name: pyalp wheels (cibuildwheel) + +on: + push: + tags: [ 'pyalp.v*' ] + workflow_dispatch: {} + +jobs: + build-wheels: + name: Build wheels on ${{ matrix.os }} + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, macos-latest] + steps: + - name: Checkout (with submodules) + uses: actions/checkout@v4 + with: + submodules: recursive + fetch-depth: 0 + + - name: Verify pinned pybind11 submodule commit + if: runner.os == 'Linux' || runner.os == 'macOS' + shell: bash + run: | + set -euo pipefail + if [ -f pyalp/PINNED_PYBIND11 ]; + then + PINNED_SHA=$(tr -d '\n' < pyalp/PINNED_PYBIND11) + elif [ -f pyalp/extern/pybind11/PINNED_COMMIT ]; + then + PINNED_SHA=$(tr -d '\n' < pyalp/extern/pybind11/PINNED_COMMIT) + else + echo "No pinned commit file found (pyalp/PINNED_PYBIND11 or pyalp/extern/pybind11/PINNED_COMMIT)" >&2 + exit 2 + fi + ACTUAL=$(git -C pyalp/extern/pybind11 rev-parse HEAD || true) + echo "Expected pybind11 commit: $PINNED_SHA" + echo "Found pybind11 commit: $ACTUAL" + test "$ACTUAL" = "$PINNED_SHA" + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.11' + + - name: Install cibuildwheel + run: | + python -m pip install --upgrade pip + python -m pip install cibuildwheel==2.21.3 + + - name: Build wheels + env: + CIBW_BUILD: "cp39-* cp310-* cp311-* cp312-*" + CIBW_SKIP: "*-musllinux* pp*" + CIBW_ARCHS_LINUX: "x86_64" + CIBW_ARCHS_MACOS: "arm64" + CIBW_BUILD_VERBOSITY: "1" + # Ensure submodule headers are used by setup.py + CIBW_ENVIRONMENT: > + PYTHONUTF8=1 + CIBW_ENVIRONMENT_MACOS: > + PYTHONUTF8=1 + MACOSX_DEPLOYMENT_TARGET=15.0 + # Prebuild the CMake-based extension via top-level CMake so all variables/options are defined. + CIBW_BEFORE_BUILD: | + python -m pip install --upgrade pip + python -m pip install cmake ninja + echo "[cibw] Working directory and contents:"; pwd; ls -la + echo "[cibw] Checking for pyalp CMakeLists:"; ls -la pyalp || true; if [ -f pyalp/CMakeLists.txt ]; + then echo "found pyalp/CMakeLists.txt"; else echo "pyalp/CMakeLists.txt NOT found"; fi + # If the wrapper CMakeLists.txt wasn't copied (e.g., untracked file when cibuildwheel uses git ls-files), create a minimal shim + if [ ! -f pyalp/CMakeLists.txt ]; + then + echo "[cibw] Creating pyalp/CMakeLists.txt shim (add_subdirectory(src)) for wheel build" + printf '%s\n' 'add_subdirectory(src)' > pyalp/CMakeLists.txt + fi + # Ensure no stale extension from a previous ABI remains in the source tree + rm -f pyalp/src/pyalp/_pyalp*.so || true + # Overwrite root setup.py inside the container to delegate packaging to pyalp/setup.py (keep git root clean) + printf '%s\n' "import os, runpy; ROOT=os.path.dirname(os.path.abspath(__file__)); PKG=os.path.join(ROOT, 'pyalp'); os.chdir(PKG); runpy.run_path(os.path.join(PKG, 'setup.py'), run_name='__main__')" > setup.py + # Configure from repository root; enable pyalp and choose NUMA setting per-platform + PYEXEC=$(python -c 'import sys; print(sys.executable)') + # Use a per-ABI build directory to avoid cross-ABI contamination + ABI_TAG=$(python -c 'import sys; print(f"cp{sys.version_info[0]}{sys.version_info[1]}")') + BUILD_DIR="build/${ABI_TAG}" + # Enable NUMA on Linux runners (for linux wheels), keep disabled elsewhere. + if [ "$(uname -s)" = "Linux" ]; + then + echo "[cibw] Linux build container detected — attempting to install NUMA dev libs" + # Try package managers commonly present in manylinux containers. Ignore failures + if command -v yum >/dev/null 2>&1; + then + yum -y install numactl-devel || true + elif command -v apt-get >/dev/null 2>&1; + then + apt-get update || true + apt-get install -y libnuma-dev || true + fi + NUMA_FLAG="-DWITH_NUMA=ON" + else + # On macOS install Homebrew libomp but do NOT export CPPFLAGS/LDFLAGS. + # Exporting CPPFLAGS was the cause of incorrect header ordering; instead + # pass a CMake prefix hint so FindOpenMP can locate libomp without + # prepending include paths to the global compiler invocation. + if command -v brew >/dev/null 2>&1; + then + echo "[cibw] Homebrew detected — ensuring libomp is available" + # Only install if not already present to avoid reinstall warnings + if ! brew list libomp >/dev/null 2>&1; then + brew install libomp + fi + + # Locate libomp installation + if [ -d "/opt/homebrew/opt/libomp" ]; then + HOMEBREW_LIBOMP_DIR="/opt/homebrew/opt/libomp" + elif [ -d "/usr/local/opt/libomp" ]; then + HOMEBREW_LIBOMP_DIR="/usr/local/opt/libomp" + else + HOMEBREW_LIBOMP_DIR="" + fi + + if [ -n "${HOMEBREW_LIBOMP_DIR}" ]; then + CMAKE_PREFIX_HINT="-DCMAKE_PREFIX_PATH=${HOMEBREW_LIBOMP_DIR}" + echo "[cibw] Using libomp from ${HOMEBREW_LIBOMP_DIR}" + else + CMAKE_PREFIX_HINT="" + fi + fi + NUMA_FLAG="-DWITH_NUMA=OFF" + # Set macOS deployment target for arm64 to match libomp requirement + export MACOSX_DEPLOYMENT_TARGET=15.0 + OSX_DEPLOY_FLAG="-DCMAKE_OSX_DEPLOYMENT_TARGET=${MACOSX_DEPLOYMENT_TARGET}" + fi + # Clean build directory to prevent CMake caching issues + rm -rf "${BUILD_DIR}" + # On macOS, add flag to downgrade template keyword warning from error to warning + if [ "$(uname -s)" = "Darwin" ]; + then + MACOS_FLAGS="-DCMAKE_CXX_FLAGS=-Wno-error=missing-template-arg-list-after-template-kw" + else + MACOS_FLAGS="" + fi + # For wheel builds, request portable flags (avoid -march=native) and disable + # interprocedural optimization (LTO) to improve portability of the produced wheels. + PORTABLE_FLAG="-DALP_PORTABLE_BUILD=ON" + LTO_FLAG="-DCMAKE_INTERPROCEDURAL_OPTIMIZATION=OFF" + cmake -S . -B "${BUILD_DIR}" -G Ninja -DCMAKE_BUILD_TYPE=Release -DENABLE_PYALP=ON -DCMAKE_POSITION_INDEPENDENT_CODE=ON -DCMAKE_FIND_FRAMEWORK=NEVER ${MACOS_FLAGS} ${NUMA_FLAG} ${CMAKE_PREFIX_HINT:-} ${OSX_DEPLOY_FLAG:-} ${PORTABLE_FLAG} ${LTO_FLAG} -DPython3_EXECUTABLE="${PYEXEC}" + cmake --build "${BUILD_DIR}" --target pyalp_ref --parallel + run: | + # Build from repository root so the full CMake project is available in the container + python -m cibuildwheel --output-dir wheelhouse . + + - name: Upload wheels + uses: actions/upload-artifact@v4 + with: + name: pyalp-wheels-${{ matrix.os }} + path: wheelhouse/*.whl + + # Placeholder for publish job; enable when versioning is PEP 440 compliant + # publish: + # needs: build-wheels + # runs-on: ubuntu-latest + # permissions: + # id-token: write + # steps: + # - name: Download wheels + # uses: actions/download-artifact@v4 + # with: + # path: dist + # - name: Publish to PyPI + # uses: pypa/gh-action-pypi-publish@release/v1 + # with: + # packages-dir: dist diff --git a/.gitignore b/.gitignore index bbb0d673e..d144abd31 100644 --- a/.gitignore +++ b/.gitignore @@ -8,4 +8,5 @@ paths.mk [Bb]uild*/ [Oo]bj*/ [Ii]nstall*/ -cmake-build-*/ \ No newline at end of file +cmake-build-*/ +.venv/ \ No newline at end of file diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 000000000..f0b84739c --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "pyalp/extern/pybind11"] + path = pyalp/extern/pybind11 + url = https://github.com/pybind/pybind11 diff --git a/CMakeLists.txt b/CMakeLists.txt index 02c49eb37..969c56b6e 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -243,7 +243,13 @@ endif() # always look for math and rt libraries find_package( LibM REQUIRED ) -find_library( LIBRT rt REQUIRED ) +# librt exists on Linux but not on macOS; only require it on non-Apple UNIX +if(UNIX AND NOT APPLE) + find_library( LIBRT rt REQUIRED ) +else() + # On macOS librt is not provided/needed; define an empty variable for compatibility + set(LIBRT "" CACHE FILEPATH "rt library (not present on Apple platforms)") +endif() # pthreads is needed for hpparser find_package( Threads REQUIRED ) @@ -321,11 +327,13 @@ if( WITH_OMP_BACKEND OR WITH_HYBRID_BACKEND ) set( WITH_OMP_BACKEND_HEADERS ON ) endif() + add_subdirectory( include ) ### BACKEND IMPLEMENTATIONS add_subdirectory( src ) + ### TESTS and EXAMPLES # specify test categories and the directory where ALL tests are stored @@ -335,6 +343,14 @@ include( AddGRBTests ) add_subdirectory( tests ) +if( ENABLE_PYALP ) + # Only add the pyalp subdirectory if it contains a CMakeLists.txt in the source tree. + if(EXISTS "${PROJECT_SOURCE_DIR}/pyalp/CMakeLists.txt") + add_subdirectory(pyalp) + else() + message(STATUS "pyalp subdirectory not present in source tree; skipping add_subdirectory(pyalp)") + endif() +endif() add_subdirectory( examples ) @@ -377,3 +393,7 @@ add_dependencies( docs userdocs devdocs ) message( "Tests enabled for backends: ${AVAILABLE_TEST_BACKENDS}" ) message( "Enabled backend targets: ${AVAILABLE_BACKENDS}\n" ) + + + + diff --git a/cmake/CompileFlags.cmake b/cmake/CompileFlags.cmake index 4c6c1d862..41cf90646 100644 --- a/cmake/CompileFlags.cmake +++ b/cmake/CompileFlags.cmake @@ -92,7 +92,25 @@ target_link_libraries( common_flags INTERFACE ## defaults performance options for all targets (backends and tests) set( COMMON_PERF_DEFS_Release "NDEBUG" ) -set( COMMON_PERF_OPTS_Release "-O3" "-march=native" "-mtune=native" "-funroll-loops" ) + +# Option to produce portable builds (for wheels): avoid per-host microarch +# flags like -march=native/-mtune=native and aggressive unrolling. When +# building wheels in CI set -DALP_PORTABLE_BUILD=ON to get portable artifacts. +option( ALP_PORTABLE_BUILD "Build portable binaries (disable host-specific optimizations)" OFF ) + +# Avoid GCC/GNU-specific microarchitecture flags on Apple/Clang toolchains +if(APPLE) + # On macOS with AppleClang, -march/-mtune and aggressive unrolling can + # cause header search/order issues and unsupported-flag errors. Keep -O3 only. + set( COMMON_PERF_OPTS_Release "-O3" ) +else() + if( ALP_PORTABLE_BUILD ) + # Portable: avoid host-specific tuning + set( COMMON_PERF_OPTS_Release "-O3" ) + else() + set( COMMON_PERF_OPTS_Release "-O3" "-march=native" "-mtune=native" "-funroll-loops" ) + endif() +endif() set( COMMON_PERF_DEFS_Debug "" ) set( COMMON_PERF_OPTS_Debug "-O0" ) set( COMMON_PERF_DEFS_Coverage "" ) diff --git a/pyalp/CMakeLists.txt b/pyalp/CMakeLists.txt new file mode 100644 index 000000000..febd4f0ab --- /dev/null +++ b/pyalp/CMakeLists.txt @@ -0,0 +1 @@ +add_subdirectory(src) diff --git a/pyalp/PINNED_PYBIND11 b/pyalp/PINNED_PYBIND11 new file mode 100644 index 000000000..1cb281060 --- /dev/null +++ b/pyalp/PINNED_PYBIND11 @@ -0,0 +1 @@ +8d503e30be400ad431d3d140707803e87e75fad7 diff --git a/pyalp/extern/pybind11 b/pyalp/extern/pybind11 new file mode 160000 index 000000000..8d503e30b --- /dev/null +++ b/pyalp/extern/pybind11 @@ -0,0 +1 @@ +Subproject commit 8d503e30be400ad431d3d140707803e87e75fad7 diff --git a/pyalp/pyproject.toml b/pyalp/pyproject.toml new file mode 100644 index 000000000..6a8af41fd --- /dev/null +++ b/pyalp/pyproject.toml @@ -0,0 +1,30 @@ +[build-system] +requires = ["setuptools>=61.0", "wheel", "pybind11>=2.6"] +build-backend = "setuptools.build_meta" + +[project] +name = "pyalp" +version = "0.0.0" +description = "Python bindings for ALP GraphBLAS (minimal package layout)" +authors = [ { name = "ALP" } ] +readme = "README.md" +license = { text = "Apache-2.0" } +requires-python = ">=3.8" + +[tool.setuptools.packages.find] +where = ["src"] + +[tool.cibuildwheel] +# Build CPython 3.9–3.12 wheels; skip PyPy and musllinux for now +build = "cp39-* cp310-* cp311-* cp312-*" +skip = "pp* *-musllinux* *-manylinux_i686 *-win32" +build-verbosity = 1 + +[tool.cibuildwheel.linux] +archs = ["x86_64"] + +[tool.cibuildwheel.macos] +archs = ["x86_64", "arm64"] + +[tool.cibuildwheel.windows] +archs = ["AMD64"] diff --git a/pyalp/setup.py b/pyalp/setup.py new file mode 100644 index 000000000..24b770731 --- /dev/null +++ b/pyalp/setup.py @@ -0,0 +1,114 @@ +from setuptools import setup, Extension +from setuptools import find_packages +from setuptools.command.build_ext import build_ext as _build_ext +import sys +import os +import glob +import shutil +import sysconfig +bdist_wheel_cmd = None +try: + # Used to mark wheel as non-pure when bundling a prebuilt .so + from wheel.bdist_wheel import bdist_wheel as _bdist_wheel + + class bdist_wheel(_bdist_wheel): + def finalize_options(self): + super().finalize_options() + # wheel contains a native shared object; mark as platform-specific + self.root_is_pure = False + + bdist_wheel_cmd = bdist_wheel +except Exception: + bdist_wheel_cmd = None +_have_pybind11 = False +try: + # import lazily — only needed when we build from sources + from pybind11.setup_helpers import Pybind11Extension, build_ext + _have_pybind11 = True +except Exception: + Pybind11Extension = None + build_ext = None + +here = os.path.abspath(os.path.dirname(__file__)) + +prebuilt_so = os.environ.get("PREBUILT_PYALP_SO") or os.environ.get("PYALP_PREBUILT_SO") +# Prefer a prebuilt extension compiled by CMake if present in the tree +def find_prebuilt(): + candidates = [] + # Top-level CMake build tree locations (preferred flow) + candidates.extend(glob.glob(os.path.join(here, '..', 'build', '**', 'pyalp_ref*.so'), recursive=True)) + candidates.extend(glob.glob(os.path.join(here, '..', 'build', '**', 'pyalp_ref*.pyd'), recursive=True)) + candidates.extend(glob.glob(os.path.join(here, '..', 'build', '**', '_pyalp*.so'), recursive=True)) + candidates.extend(glob.glob(os.path.join(here, '..', 'build', '**', '_pyalp*.pyd'), recursive=True)) + # Prefer the candidate matching the current Python tag + py_tag = f"cpython-{sys.version_info[0]}{sys.version_info[1]}" + matching = [c for c in candidates if py_tag in os.path.basename(c)] or candidates + return matching[0] if matching else None + +if not prebuilt_so: + prebuilt_so = find_prebuilt() +package_data = {} +ext_modules = [] + +class build_ext_copy_prebuilt(_build_ext): + """Custom build_ext that copies a prebuilt shared object into the build dir. + + This ensures the extension is installed into platlib and the wheel is valid + for auditwheel repair. + """ + + def build_extension(self, ext): + # Determine target path for the extension + target_path = self.get_ext_fullpath(ext.name) + os.makedirs(os.path.dirname(target_path), exist_ok=True) + src = os.environ.get("PREBUILT_PYALP_SO") or os.environ.get("PYALP_PREBUILT_SO") or find_prebuilt() + if not src or not os.path.exists(src): + raise RuntimeError("Prebuilt pyalp shared object not found during build_ext") + shutil.copyfile(src, target_path) + +if prebuilt_so: + if not os.path.exists(prebuilt_so): + raise FileNotFoundError(f"PREBUILT_PYALP_SO set but file not found: {prebuilt_so}") + # Declare a binary extension so files go to platlib; actual build just copies the prebuilt .so + ext_modules = [Extension("pyalp._pyalp", sources=[])] +else: + if not _have_pybind11: + raise RuntimeError("pybind11 is required to build the extension from sources. Install pybind11 or provide PREBUILT_PYALP_SO to bundle a prebuilt .so.") + assert Pybind11Extension is not None + ext_modules = [ + Pybind11Extension( + "pyalp._pyalp", + ["src/pyalp/bindings.cpp"], + include_dirs=[ + os.path.join(here, "src"), + os.path.join(here, "src", "pyalp"), + os.path.join(here, "extern", "pybind11", "include"), + os.path.normpath(os.path.join(here, "..", "include")), + ], + define_macros=[("PYALP_MODULE_NAME", "_pyalp")], + cxx_std=14, + ) + ] + +setup_kwargs = { + "name": "pyalp", + "version": "0.0.0", + "description": "pyalp package (C++ bindings)", + "packages": find_packages(where="src"), + "package_dir": {"": "src"}, + "ext_modules": ext_modules, + "include_package_data": True, +} + +# Supply cmdclass entries for build_ext (copy-prebuilt or pybind11) and bdist_wheel +cmdclass = {} +if prebuilt_so: + cmdclass["build_ext"] = build_ext_copy_prebuilt +elif build_ext is not None: + cmdclass["build_ext"] = build_ext +if bdist_wheel_cmd is not None: + cmdclass["bdist_wheel"] = bdist_wheel_cmd +if cmdclass: + setup_kwargs["cmdclass"] = cmdclass + +setup(**setup_kwargs) diff --git a/pyalp/src/CMakeLists.txt b/pyalp/src/CMakeLists.txt new file mode 100644 index 000000000..4dc7fd587 --- /dev/null +++ b/pyalp/src/CMakeLists.txt @@ -0,0 +1,111 @@ +cmake_minimum_required(VERSION 3.14) +project(python_module LANGUAGES CXX) +# Include pybind11 relative to this CMakeLists.txt directory +get_filename_component(PYBIND11_SRC_DIR "${CMAKE_CURRENT_LIST_DIR}/../extern/pybind11" ABSOLUTE) +add_subdirectory(${PYBIND11_SRC_DIR} ${CMAKE_BINARY_DIR}/pyalp/extern/pybind11) + +# Try to find OpenMP - on macOS this may require Homebrew libomp to be installed. +# We do this quietly here and link the imported target to the module targets that +# require OpenMP so CMake's FindOpenMP is used consistently instead of relying on +# environment CPPFLAGS/LDFLAGS. +find_package(OpenMP QUIET) + +# When configuring from the pyalp package directory (e.g. in cibuildwheel +# containers) the top-level CMake that usually defines these options is not +# executed. Provide sensible cached defaults so this CMakeLists can be used +# standalone. The top-level configuration will override these cached values +# when present. +if(NOT DEFINED WITH_REFERENCE_BACKEND) + set(WITH_REFERENCE_BACKEND ON CACHE BOOL "Build Reference backend (default for pyalp package)") +endif() +if(NOT DEFINED WITH_OMP_BACKEND) + set(WITH_OMP_BACKEND ON CACHE BOOL "Build OMP backend (default for pyalp package)") +endif() + +assert_defined_variables( WITH_REFERENCE_BACKEND WITH_OMP_BACKEND ) + +# target listing all examples, to build them at once with 'make examples' + + +if( WITH_REFERENCE_BACKEND ) + set(PYALP_MODULE_NAME pyalp_ref) + + pybind11_add_module( ${PYALP_MODULE_NAME} pyalp.cpp conjugate_gradient.hpp matrix_wrappers.hpp utils.hpp vector_wrappers.hpp ) + target_compile_definitions(${PYALP_MODULE_NAME} PRIVATE PYALP_MODULE_NAME=${PYALP_MODULE_NAME}) + # Link your required libraries + target_link_libraries(${PYALP_MODULE_NAME} PRIVATE backend_shmem_shared backend_reference common_flags) + # If OpenMP support is available, ensure the module links the OpenMP imported target + if(OpenMP_CXX_FOUND) + target_link_libraries(${PYALP_MODULE_NAME} PRIVATE OpenMP::OpenMP_CXX) + endif() + # (Optional) If you need extra compile flags or C++ standard: + set_target_properties(${PYALP_MODULE_NAME} PROPERTIES CXX_STANDARD 14 CXX_STANDARD_REQUIRED YES) + # target_compile_options(conjugate_gradient_python PRIVATE -Wall -Wextra) + + # On Apple platforms with newer Clang, downgrade the template keyword warning from error to warning + if(APPLE) + target_compile_options(${PYALP_MODULE_NAME} PRIVATE -Wno-error=missing-template-arg-list-after-template-kw) + endif() + + add_custom_command(TARGET ${PYALP_MODULE_NAME} POST_BUILD + COMMAND ${CMAKE_COMMAND} -E echo "" + COMMAND ${CMAKE_COMMAND} -E echo "============================================================" + COMMAND ${CMAKE_COMMAND} -E echo "Build complete!" + COMMAND ${CMAKE_COMMAND} -E echo "To use the Python bindings, append ${CMAKE_BINARY_DIR}/python to your PYTHONPATH:" + COMMAND ${CMAKE_COMMAND} -E echo "============================================================" + ) + +endif() + + +if( WITH_OMP_BACKEND ) + set(PYALP_MODULE_NAME pyalp_omp) + + pybind11_add_module( ${PYALP_MODULE_NAME} pyalp.cpp conjugate_gradient.hpp matrix_wrappers.hpp utils.hpp vector_wrappers.hpp ) + target_compile_definitions( ${PYALP_MODULE_NAME} PRIVATE PYALP_MODULE_NAME=${PYALP_MODULE_NAME}) + # Link your required libraries + target_link_libraries(${PYALP_MODULE_NAME} PRIVATE backend_shmem_shared backend_reference_omp common_flags) + # If OpenMP support is available, ensure the module links the OpenMP imported target + if(OpenMP_CXX_FOUND) + target_link_libraries(${PYALP_MODULE_NAME} PRIVATE OpenMP::OpenMP_CXX) + endif() + # (Optional) If you need extra compile flags or C++ standard: + set_target_properties(${PYALP_MODULE_NAME} PROPERTIES CXX_STANDARD 14 CXX_STANDARD_REQUIRED YES) + # target_compile_options(conjugate_gradient_python PRIVATE -Wall -Wextra) + + add_custom_command(TARGET ${PYALP_MODULE_NAME} POST_BUILD + COMMAND ${CMAKE_COMMAND} -E echo "" + COMMAND ${CMAKE_COMMAND} -E echo "============================================================" + COMMAND ${CMAKE_COMMAND} -E echo "Build complete!" + COMMAND ${CMAKE_COMMAND} -E echo "To use the Python bindings, append ${CMAKE_BINARY_DIR}/python to your PYTHONPATH:" + COMMAND ${CMAKE_COMMAND} -E echo "============================================================" + ) + +endif() + + +if( WITH_NONBLOCKING_BACKEND ) + set(PYALP_MODULE_NAME pyalp_nonblocking) + + pybind11_add_module( ${PYALP_MODULE_NAME} pyalp.cpp conjugate_gradient.hpp matrix_wrappers.hpp utils.hpp vector_wrappers.hpp ) + target_compile_definitions( ${PYALP_MODULE_NAME} PRIVATE PYALP_MODULE_NAME=${PYALP_MODULE_NAME}) + # Link your required libraries + target_link_libraries(${PYALP_MODULE_NAME} PRIVATE backend_shmem_shared backend_reference_omp common_flags) + # If OpenMP support is available, ensure the module links the OpenMP imported target + if(OpenMP_CXX_FOUND) + target_link_libraries(${PYALP_MODULE_NAME} PRIVATE OpenMP::OpenMP_CXX) + endif() + # (Optional) If you need extra compile flags or C++ standard: + set_target_properties(${PYALP_MODULE_NAME} PROPERTIES CXX_STANDARD 14 CXX_STANDARD_REQUIRED YES) + # target_compile_options(conjugate_gradient_python PRIVATE -Wall -Wextra) + + add_custom_command(TARGET ${PYALP_MODULE_NAME} POST_BUILD + COMMAND ${CMAKE_COMMAND} -E echo "" + COMMAND ${CMAKE_COMMAND} -E echo "============================================================" + COMMAND ${CMAKE_COMMAND} -E echo "Build complete!" + COMMAND ${CMAKE_COMMAND} -E echo "To use the Python bindings, append ${CMAKE_BINARY_DIR}/python to your PYTHONPATH:" + COMMAND ${CMAKE_COMMAND} -E echo "============================================================" + ) + +endif() + diff --git a/pyalp/src/conjugate_gradient.hpp b/pyalp/src/conjugate_gradient.hpp new file mode 100644 index 000000000..dee9cf154 --- /dev/null +++ b/pyalp/src/conjugate_gradient.hpp @@ -0,0 +1,109 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +#ifdef _CG_COMPLEX + #include +#endif + +#include + +#include + +#include + +#include + +#include +#include +#include + +#include + +namespace py = pybind11; + +using BaseScalarType = double; +#ifdef _CG_COMPLEX + using ScalarType = std::complex< BaseScalarType >; +#else + using ScalarType = BaseScalarType; +#endif + + +constexpr const BaseScalarType tol = 0.000001; + +/** The default number of maximum iterations. */ +constexpr const size_t max_iters = 10000; + +constexpr const double c1 = 0.0001; +constexpr const double c2 = 0.0001; + +std::tuple +conjugate_gradient( + grb::Matrix< ScalarType > & L, + grb::Vector< ScalarType > & x, + grb::Vector< ScalarType > & b, + grb::Vector< ScalarType > & r, + grb::Vector< ScalarType > & u, + grb::Vector< ScalarType > & temp, + size_t solver_iterations = 1000, + size_t verbose = 0 + //const struct input &data_in, struct output &out + ) { + size_t iterations = 0; + BaseScalarType residual; + + if( !verbose ) + std::cout << "conjugate_gradient: start \n"; + // get user process ID + const size_t s = grb::spmd<>::pid(); + (void)s; + assert( s < grb::spmd<>::nprocs() ); + + // get input n + grb::utils::Timer timer; + timer.reset(); + + grb::RC rc = grb::SUCCESS; + rc = grb::algorithms::conjugate_gradient( + x, L, b, + solver_iterations, tol, + iterations, residual, + r, u, temp + ); + double single_time = timer.time(); + if( !(rc == grb::SUCCESS || rc == grb::FAILED) ) { + std::cerr << "Failure: call to conjugate_gradient did not succeed (" + << grb::toString( rc ) << ")." << std::endl; + } + if( rc == grb::FAILED ) { + if( !verbose ) { + std::cout << "Warning: call to conjugate_gradient did not converge\n"; + } + } + if( rc == grb::SUCCESS ) { + rc = grb::collectives<>::reduce( single_time, 0, grb::operators::max< double >() ); + } + + if( !verbose ) { + // output + std::cout << " solver_iterations = " << solver_iterations << "\n"; + std::cout << " tol = " << tol << "\n"; + std::cout << " iterations = " << iterations << "\n"; + std::cout << " residual = " << residual << "\n"; + } + + if( !verbose ) { + std::cout << "conjugate_gradient: end \n"; + } + + // Return as a tuple: (int, float) + return std::make_tuple(iterations, residual); +} + + diff --git a/pyalp/src/matrix_wrappers.hpp b/pyalp/src/matrix_wrappers.hpp new file mode 100644 index 000000000..b1644b429 --- /dev/null +++ b/pyalp/src/matrix_wrappers.hpp @@ -0,0 +1,115 @@ +#include +#include +#include + +#include + +#include +#include +#include +#include + +namespace py = pybind11; + +// using BaseScalarType = double; +// #ifdef _CG_COMPLEX +// using ScalarType = std::complex< BaseScalarType >; +// #else +// using ScalarType = BaseScalarType; +// #endif + +// /** Parser type */ +// typedef grb::utils::MatrixFileReader< +// ScalarType, +// std::conditional< +// (sizeof(grb::config::RowIndexType) > sizeof(grb::config::ColIndexType)), +// grb::config::RowIndexType, +// grb::config::ColIndexType +// >::type +// > Parser; + +// /** Nonzero type */ +// typedef grb::internal::NonzeroStorage< +// grb::config::RowIndexType, +// grb::config::ColIndexType, +// ScalarType +// > NonzeroT; + +// /** In-memory storage type */ +// typedef grb::utils::Singleton< +// std::pair< +// // stores n and nz (according to parser) +// std::pair< size_t, size_t >, +// // stores the actual nonzeroes +// std::vector< NonzeroT > +// > +// > Storage; + +template< + typename IntType + , typename ScalarType + > +void buildMatrix( + grb::Matrix< ScalarType >& M, + py::array_t arri, + py::array_t arrj, + py::array_t arrv + ) { + // Check array is 1D + py::buffer_info info_i = arri.request(); + if (info_i.ndim != 1) throw std::runtime_error("Array must be 1D"); + IntType* data_ptr_i = static_cast(info_i.ptr); + auto nnz = info_i.size; + + // Check array is 1D + py::buffer_info info_j = arrj.request(); + if (info_j.ndim != 1) throw std::runtime_error("Array must be 1D"); + IntType* data_ptr_j = static_cast(info_j.ptr); + assert( nnz == info_j.size ); + + // Check array is 1D + py::buffer_info info_v = arrv.request(); + if (info_v.ndim != 1) throw std::runtime_error("Array must be 1D"); + ScalarType* data_ptr_v = static_cast(info_v.ptr); + assert( nnz == info_v.size ); + + grb::RC io_rc; + (void)io_rc; + io_rc = grb::buildMatrixUnique( M, data_ptr_i, data_ptr_j , data_ptr_v, nnz, grb::SEQUENTIAL ); + assert( io_rc == grb::SUCCESS ); +} + +// helper for template specialisation +template +grb::Matrix matrix_factory( + size_t m, size_t n, + py::array data1, + py::array data2, + py::array_t data3) +{ + grb::Matrix mat(m, n); + + // Helper for dispatch + bool handled = false; + auto try_type = [&](auto dummy) { + using IntType = decltype(dummy); + if (py::dtype::of().is(data1.dtype()) && py::dtype::of().is(data2.dtype())) { + buildMatrix( + mat, + data1.cast>(), + data2.cast>(), + data3 + ); + handled = true; + } + }; + + // List of supported integer types + (try_type(int8_t{}), try_type(int16_t{}), try_type(int32_t{}), try_type(int64_t{}), + try_type(uint8_t{}), try_type(uint16_t{}), try_type(uint32_t{}), try_type(uint64_t{})); + + if (!handled) + throw std::runtime_error("Unsupported integer dtype for data1/data2 or nonmatching types of data1 and data2 "); + + return mat; +} diff --git a/pyalp/src/numpy2alp.cpp b/pyalp/src/numpy2alp.cpp new file mode 100644 index 000000000..3407090c0 --- /dev/null +++ b/pyalp/src/numpy2alp.cpp @@ -0,0 +1,57 @@ +#include +#include +#include +#include +#include + +namespace py = pybind11; + +// Print a NumPy array as a std::vector (flattened) +void print_numpy_array(py::array_t input) { + py::buffer_info buf = input.request(); + double* ptr = static_cast(buf.ptr); + std::vector vec(ptr, ptr + buf.size); + + std::cout << "Vector contents (flattened): "; + for (double v : vec) { + std::cout << v << " "; + } + std::cout << std::endl; +} + +// Add two NumPy arrays (supports multi-dimensional, as long as shapes match) +py::array_t add_numpy_arrays(py::array_t a, py::array_t b) { + py::buffer_info buf_a = a.request(); + py::buffer_info buf_b = b.request(); + + // Check that shapes match + if (buf_a.ndim != buf_b.ndim) + throw std::runtime_error("Input arrays must have the same number of dimensions"); + for (ssize_t i = 0; i < buf_a.ndim; ++i) { + if (buf_a.shape[i] != buf_b.shape[i]) + throw std::runtime_error("Input array shapes must match"); + } + + // Prepare output array with the same shape + auto result = py::array_t(buf_a.size); + py::buffer_info buf_result = result.request(); + + double* ptr_a = static_cast(buf_a.ptr); + double* ptr_b = static_cast(buf_b.ptr); + double* ptr_result = static_cast(buf_result.ptr); + + // Element-wise addition (flat) + for (ssize_t i = 0; i < buf_a.size; ++i) { + ptr_result[i] = ptr_a[i] + ptr_b[i]; + } + + // Reshape result to match input shape + result.resize(buf_a.shape); + + return result; +} + +PYBIND11_MODULE(numpy2alp, m) { + m.def("print_numpy_array", &print_numpy_array, "Print a numpy array as a flattened std::vector"); + m.def("add_numpy_arrays", &add_numpy_arrays, "Add two numpy arrays element-wise (supports multi-dimensional arrays)"); +} diff --git a/pyalp/src/pyalp.cpp b/pyalp/src/pyalp.cpp new file mode 100644 index 000000000..a84cc3410 --- /dev/null +++ b/pyalp/src/pyalp.cpp @@ -0,0 +1,84 @@ +#include +#include + +#include + +#include "utils.hpp" +#include "matrix_wrappers.hpp" +#include "vector_wrappers.hpp" +#include "conjugate_gradient.hpp" + +namespace py = pybind11; + + +//PYBIND11_MODULE(pyalp, m) { +// Use a macro for the module name +#ifndef PYALP_MODULE_NAME +#define PYALP_MODULE_NAME pyalp +#endif + +PYBIND11_MODULE(PYALP_MODULE_NAME, m) { + // Common bindings for all backends + m.def("backend_name", [](){ return "backend"; }); + py::class_>(m, "Matrix") + .def(py::init([](size_t m, size_t n, + py::array data1, + py::array data2, + py::array_t data3) { + return matrix_factory(m, n, data1, data2, data3); + }), + py::arg("m"), py::arg("n"), + py::arg("i_array"), py::arg("j_array"), py::arg("k_array")); + // simple constructor + // .def(py::init([](size_t m, size_t n, + // py::array_t data1, + // py::array_t data2, + // py::array_t data3) { + // grb::Matrix< ScalarType > mat(m, n); // call the basic constructor + // buildMatrix(mat, data1, data2, data3); // initialize with data + // return mat; + // }), + // py::arg("m"), py::arg("n"), + // py::arg("i_array"), py::arg("j_array"), py::arg("k_array") + // ) + + // add some existing things + // .def("get", &Matrix::get) + // .def("set", &Matrix::set) + // .def("rows", &Matrix::rows) + // .def("cols", &Matrix::cols) + ; // + py::class_>(m, "Vector") + .def(py::init()) + .def(py::init([](size_t m, + py::array_t data3) { + grb::Vector< ScalarType > vec(m); // call the basic constructor + buildVector(vec, data3); // initialize with data + return vec; + }), + py::arg("m"), + py::arg("k_array") + ) + + .def("to_numpy", &to_numpy, "Convert to numpy array"); + ; // + + + +//m.def("buildMatrix", &buildMatrix, "Fill Matrix from 3 NumPy arrays"); // + + m.def("buildVector", &buildVector, "Fill Vector from 1 NumPy array"); + m.def("print_my_numpy_array", &print_my_numpy_array, "Print a numpy array as a flattened std::vector"); + m.def("conjugate_gradient", &conjugate_gradient, "Pass alp data to alp CG sover", + py::arg("L"), + py::arg("x"), + py::arg("b"), + py::arg("r"), + py::arg("u"), + py::arg("temp"), + py::arg("solver_iterations") = 1000, + py::arg("verbose") = 0 + ); +} + + diff --git a/pyalp/src/pyalp.egg-info/PKG-INFO b/pyalp/src/pyalp.egg-info/PKG-INFO new file mode 100644 index 000000000..6fd9e5890 --- /dev/null +++ b/pyalp/src/pyalp.egg-info/PKG-INFO @@ -0,0 +1,65 @@ +Metadata-Version: 2.4 +Name: pyalp +Version: 0.0.0 +Summary: Python bindings for ALP GraphBLAS (minimal package layout) +Author: ALP +License: Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + ... (full license text omitted here - original file retained at project root) + +Classifier: Programming Language :: Python :: 3 +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: OS Independent +Classifier: Development Status :: 3 - Alpha +Classifier: Intended Audience :: Developers +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=3.8 +Description-Content-Type: text/markdown +License-File: LICENSE +Requires-Dist: numpy +Dynamic: license-file + +# pyalp (packaged) + +This directory contains a minimal Python package layout for the existing `pyalp` C++ bindings using pybind11. + +Quick usage: + +1. Install in editable mode (from `pyalp/`): + + python -m pip install -e . + +2. Build with CMake (optional) from repository root or `pyalp/` CMakeLists can be used when integrating with the full project. + +The C++ extension is exposed as `pyalp._pyalp` and the package `pyalp` imports it lazily from `__init__.py`. + +Building a wheel from a prebuilt .so (no install) +------------------------------------------------ + +If you already build the extension with the project's CMake and have a `.so` file, you can produce a wheel without re-compiling by pointing `setup.py` at the shared object and then building a wheel: + +1. Build with CMake (example from your message): + + cd /tmp/build_alp && cmake ~/Repos/graphblas -DENABLE_PYALP=ON && make pyalp_ref + + This produces a file like `/tmp/build_alp/pyalp/src/pyalp_ref.cpython-311-x86_64-linux-gnu.so`. + +2. Build a wheel that contains that `.so` (no install): + + PREBUILT_PYALP_SO=/tmp/build_alp/pyalp/src/pyalp_ref.cpython-311-x86_64-linux-gnu.so python3 -m pip wheel . -w dist/ + + The `setup.py` will copy the provided .so into the package as the `_pyalp` extension and produce a wheel in `dist/`. + +Notes: +- The prebuilt .so will be bundled into the wheel as package data. Make sure the python ABI tag in the filename matches your target interpreter (cpython-311 => Python 3.11 etc.). +- If you don't provide `PREBUILT_PYALP_SO`, `pip wheel` will attempt to compile the extension via pybind11. + diff --git a/pyalp/src/pyalp.egg-info/SOURCES.txt b/pyalp/src/pyalp.egg-info/SOURCES.txt new file mode 100644 index 000000000..d19b172be --- /dev/null +++ b/pyalp/src/pyalp.egg-info/SOURCES.txt @@ -0,0 +1,12 @@ +LICENSE +README.md +pyproject.toml +setup.py +src/pyalp/__init__.py +src/pyalp/_pyalp.cpython-311-x86_64-linux-gnu.so +src/pyalp/your_module.py +src/pyalp.egg-info/PKG-INFO +src/pyalp.egg-info/SOURCES.txt +src/pyalp.egg-info/dependency_links.txt +src/pyalp.egg-info/requires.txt +src/pyalp.egg-info/top_level.txt \ No newline at end of file diff --git a/pyalp/src/pyalp.egg-info/dependency_links.txt b/pyalp/src/pyalp.egg-info/dependency_links.txt new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/pyalp/src/pyalp.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/pyalp/src/pyalp.egg-info/requires.txt b/pyalp/src/pyalp.egg-info/requires.txt new file mode 100644 index 000000000..24ce15ab7 --- /dev/null +++ b/pyalp/src/pyalp.egg-info/requires.txt @@ -0,0 +1 @@ +numpy diff --git a/pyalp/src/pyalp.egg-info/top_level.txt b/pyalp/src/pyalp.egg-info/top_level.txt new file mode 100644 index 000000000..076d42d13 --- /dev/null +++ b/pyalp/src/pyalp.egg-info/top_level.txt @@ -0,0 +1 @@ +pyalp diff --git a/pyalp/src/pyalp/CMakeLists.txt b/pyalp/src/pyalp/CMakeLists.txt new file mode 100644 index 000000000..2d86c07dd --- /dev/null +++ b/pyalp/src/pyalp/CMakeLists.txt @@ -0,0 +1,16 @@ +cmake_minimum_required(VERSION 3.14) +project(pyalp_bindings LANGUAGES CXX) + +pybind11_add_module(_pyalp bindings.cpp ../matrix_wrappers.hpp ../utils.hpp ../vector_wrappers.hpp ../conjugate_gradient.hpp) +target_compile_definitions(_pyalp PRIVATE PYALP_MODULE_NAME=_pyalp) +set_target_properties(_pyalp PROPERTIES CXX_STANDARD 14 CXX_STANDARD_REQUIRED YES) +target_include_directories(_pyalp PRIVATE ${CMAKE_SOURCE_DIR}/include ${CMAKE_CURRENT_SOURCE_DIR}/..) + +# Honor top-level NUMA switch: add NO_NUMA_DEF when WITH_NUMA=OFF +if(DEFINED WITH_NUMA AND NOT WITH_NUMA) + if(DEFINED NO_NUMA_DEF) + target_compile_definitions(_pyalp PRIVATE ${NO_NUMA_DEF}) + else() + target_compile_definitions(_pyalp PRIVATE _GRB_NO_LIBNUMA) + endif() +endif() diff --git a/pyalp/src/pyalp/__init__.py b/pyalp/src/pyalp/__init__.py new file mode 100644 index 000000000..4b8ca7f6b --- /dev/null +++ b/pyalp/src/pyalp/__init__.py @@ -0,0 +1,41 @@ +"""pyalp Python package init. + +Expose a small Python surface and import compiled extension if available. +""" +from importlib import metadata +import importlib +import pathlib +import sys + +# compiled extension will be available after installation or build +try: + from . import _pyalp # type: ignore +except Exception: # pragma: no cover - fallback for source tree + # Fallback: try to discover any compiled extension in the package directory + _pyalp = None + try: + pkgdir = pathlib.Path(__file__).parent + for p in pkgdir.iterdir(): + if p.suffix == ".so": + # PEP 3149 allows ABI tags in the filename (e.g. _pyalp.cpython-311-x86_64-linux-gnu.so) + # The module name is the part before the first dot. + modname = p.name.split(".", 1)[0] + try: + # Use absolute import to avoid import-time package-relative issues + m = importlib.import_module(f"{__package__}.{modname}") + _pyalp = m + break + except Exception: + # ignore and try next candidate + continue + except Exception: + _pyalp = None + +__all__ = ["_pyalp"] + + +def version(): + try: + return metadata.version("pyalp") + except Exception: + return "0.0.0" diff --git a/pyalp/src/pyalp/bindings.cpp b/pyalp/src/pyalp/bindings.cpp new file mode 100644 index 000000000..6dcb95cf3 --- /dev/null +++ b/pyalp/src/pyalp/bindings.cpp @@ -0,0 +1,56 @@ +#include +#include + +#include + +#include "utils.hpp" +#include "matrix_wrappers.hpp" +#include "vector_wrappers.hpp" +#include "conjugate_gradient.hpp" + +namespace py = pybind11; + +// Build extension named _pyalp (private extension module) so package can expose it +#ifndef PYALP_MODULE_NAME +#define PYALP_MODULE_NAME _pyalp +#endif + +PYBIND11_MODULE(PYALP_MODULE_NAME, m) { + // Common bindings for all backends (kept minimal here) + m.def("backend_name", [](){ return "backend"; }); + py::class_>(m, "Matrix") + .def(py::init([](size_t m_, size_t n_, + py::array data1, + py::array data2, + py::array_t data3) { + return matrix_factory(m_, n_, data1, data2, data3); + }), + py::arg("m"), py::arg("n"), + py::arg("i_array"), py::arg("j_array"), py::arg("k_array")); + + py::class_>(m, "Vector") + .def(py::init()) + .def(py::init([](size_t m, + py::array_t data3) { + grb::Vector< ScalarType > vec(m); // call the basic constructor + buildVector(vec, data3); // initialize with data + return vec; + }), + py::arg("m"), + py::arg("k_array") + ) + .def("to_numpy", &to_numpy, "Convert to numpy array"); + + m.def("buildVector", &buildVector, "Fill Vector from 1 NumPy array"); + m.def("print_my_numpy_array", &print_my_numpy_array, "Print a numpy array as a flattened std::vector"); + m.def("conjugate_gradient", &conjugate_gradient, "Pass alp data to alp CG sover", + py::arg("L"), + py::arg("x"), + py::arg("b"), + py::arg("r"), + py::arg("u"), + py::arg("temp"), + py::arg("solver_iterations") = 1000, + py::arg("verbose") = 0 + ); +} diff --git a/pyalp/src/pyalp/your_module.py b/pyalp/src/pyalp/your_module.py new file mode 100644 index 000000000..0acf81b9c --- /dev/null +++ b/pyalp/src/pyalp/your_module.py @@ -0,0 +1,4 @@ +"""Small pure-Python utilities for pyalp package.""" + +def hello_py(): + return "Hello from pure Python module" diff --git a/pyalp/src/python2alp.cpp b/pyalp/src/python2alp.cpp new file mode 100644 index 000000000..0d70140f0 --- /dev/null +++ b/pyalp/src/python2alp.cpp @@ -0,0 +1,9 @@ +#include + +std::string say_hello() { + return "Hello, world from C++!"; +} + +PYBIND11_MODULE(python2alp, m) { + m.def("say_hello", &say_hello, "A function that returns a greeting"); +} diff --git a/pyalp/src/utils.hpp b/pyalp/src/utils.hpp new file mode 100644 index 000000000..037eda952 --- /dev/null +++ b/pyalp/src/utils.hpp @@ -0,0 +1,20 @@ +#include +#include +#include +#include +#include + +namespace py = pybind11; + +// Print a NumPy array as a std::vector (flattened) +void print_my_numpy_array(py::array_t input) { + py::buffer_info buf = input.request(); + double* ptr = static_cast(buf.ptr); + std::vector vec(ptr, ptr + buf.size); + + std::cout << "Vector contents (flattened): "; + for (double v : vec) { + std::cout << v << " "; + } + std::cout << std::endl; +} diff --git a/pyalp/src/vector_wrappers.hpp b/pyalp/src/vector_wrappers.hpp new file mode 100644 index 000000000..13e2de05f --- /dev/null +++ b/pyalp/src/vector_wrappers.hpp @@ -0,0 +1,65 @@ +#include +#include +#include +#include +#include +#include + +#include + +#include + +#include +#include +#include +#include + +#include + + +namespace py = pybind11; + + +using BaseScalarType = double; +#ifdef _CG_COMPLEX + using ScalarType = std::complex< BaseScalarType >; +#else + using ScalarType = BaseScalarType; +#endif + +void buildVector(grb::Vector< ScalarType >& V, py::array_t arrv) { + + // Check array is 1D + py::buffer_info info_v = arrv.request(); + if (info_v.ndim != 1) throw std::runtime_error("Array must be 1D"); + ScalarType* data_ptr_v = static_cast(info_v.ptr); + + grb::RC io_rc; + (void)io_rc; + io_rc = grb::buildVector( V, data_ptr_v, data_ptr_v + info_v.size, grb::SEQUENTIAL ); + assert( io_rc == grb::SUCCESS ); +} + +py::array_t +to_numpy(grb::Vector< ScalarType >& x) { + grb::PinnedVector< ScalarType > pinnedVector; + pinnedVector = grb::PinnedVector< ScalarType >( x, grb::SEQUENTIAL ); + + std::cout << "create numpy array from grb::vector\n"; + + ScalarType* data = new ScalarType[grb::size(x)]; + for( size_t k = 0; k < grb::size(x); ++k ) { + const auto &value = pinnedVector.getNonzeroValue( k ); + data[k]=value; + } + + // Capsule to manage memory (will delete[] when array is destroyed in Python) + py::capsule free_when_done(data, [](void *f) { + delete[] reinterpret_cast(f); + }); + + // Create NumPy array that shares memory with C++ + py::array_t arr({grb::size(x)}, {sizeof(ScalarType)}, data, free_when_done); + return arr; + +} diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 000000000..db8750212 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,4 @@ +[build-system] +requires = ["setuptools>=61.0", "wheel"] +# Use the legacy backend so setup.py is executed, which delegates to pyalp/setup.py. +build-backend = "setuptools.build_meta:__legacy__" diff --git a/setup.py b/setup.py new file mode 100644 index 000000000..0f07871b4 --- /dev/null +++ b/setup.py @@ -0,0 +1,10 @@ +"""This repository is not meant to be built as a Python package at the root. + +Please build wheels from the 'pyalp' subdirectory. +""" + +from setuptools import setup + +raise SystemExit( + "Use 'pip wheel ./pyalp' (cibuildwheel points to the 'pyalp' subdirectory)." +) diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index d90ca5cdb..d12a0fb6d 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -163,6 +163,10 @@ if( GNN_DATASET_PATH ) ) endif() +if( ENABLE_PYALP ) + add_subdirectory( python ) +endif() + add_subdirectory( unit ) add_subdirectory( smoke ) diff --git a/tests/python/CMakeLists.txt b/tests/python/CMakeLists.txt new file mode 100644 index 000000000..0bf2ac485 --- /dev/null +++ b/tests/python/CMakeLists.txt @@ -0,0 +1,23 @@ +# +# Copyright 2021 Huawei Technologies Co., Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +find_package(Python3 COMPONENTS Interpreter REQUIRED) + +set( TEST_CATEGORY "python" ) + +enable_testing() + + diff --git a/tests/python/numpy_array_print.py b/tests/python/numpy_array_print.py new file mode 100644 index 000000000..283449ced --- /dev/null +++ b/tests/python/numpy_array_print.py @@ -0,0 +1,5 @@ +import pyalp_ref as pyalp +import numpy as np + +arr = np.array([1.1, 2.2, 3.3]) +pyalp.print_my_numpy_array(arr) diff --git a/tests/python/test.py b/tests/python/test.py new file mode 100644 index 000000000..577917b42 --- /dev/null +++ b/tests/python/test.py @@ -0,0 +1,65 @@ +""" +Test script for the pyalp_ref (GraphBLAS-like) Python module. + +This script sets up a small sparse linear system and solves it using the +conjugate gradient method implemented in pyalp_ref. It verifies the solution +against an expected result using numpy's allclose. + +Steps performed: +- Defines a 5x5 sparse matrix in coordinate (COO) format. +- Initializes vectors for the right-hand side (b), initial guess (x), and workspace. +- Constructs pyalp_ref Matrix and Vector objects. +- Runs the conjugate gradient solver. +- Prints the number of iterations, residual, and resulting solution vector. +- Asserts that the computed solution is close to the expected values. + +Usage: + python test.py + +Dependencies: + - numpy + - pyalp_ref (should be available in the Python path) +""" + +import pyalp_ref as pyalp +import numpy as np + +# Gnerate a small sparse linear system using numpy arrays +N, M = 5 , 5 +idata = np.array([ 0, 1, 2, 3, 3, 4, 2, 3, 3, 4, 1, 4, 1, 4, 4 ],dtype=np.int32) +jdata = np.array([ 0, 1, 2, 3, 2, 2, 1, 4, 1, 1, 0, 3, 0, 3, 4 ],dtype=np.int32) +vdata = np.array([ 1, 1, 1, 1, .5, 2, 1, 4, 4.4, 1, 0, 3.5, 0, 3, 1 ], dtype=np.float64) +b = np.array([ 1., 1., 1., 1., 1. ], dtype=np.float64) +x = np.array([ 1, 1., 0., 0.3, -1. ], dtype=np.float64) +r = np.zeros(5) +u = np.zeros(5) +tmp = np.zeros(5) + +A=np.zeros((M,N)) +for i,j,v in zip(idata,jdata,vdata): + A[i,j]=v + +pyalp.print_my_numpy_array(b) + +maxiterations = 2000 +verbose = 1 + +######################### +# Create the pyalp_ref Matrix and Vector objects +alpmatrixA = pyalp.Matrix(5,5,idata,jdata,vdata) +alpvectorx = pyalp.Vector(5,x) +alpvectorb = pyalp.Vector(5,b) +alpvectorr = pyalp.Vector(5,r) +alpvectoru = pyalp.Vector(5,u) +alpvectortmp = pyalp.Vector(5,tmp) + +#solve the linear system using conjugate gradient method in pyalp_ref +iterations,residual = pyalp.conjugate_gradient( alpmatrixA, alpvectorx, alpvectorb, alpvectorr, alpvectoru, alpvectortmp, maxiterations, verbose ) +print(" iterations = ", iterations ) +print(" residual = ", residual ) + +# Convert the result vector to a numpy array and print it +x_result=alpvectorx.to_numpy() +print(x_result) +# Check if the result is close to the expected solution +assert(np.allclose(x_result,np.array([ 1., 1., 0., 0.13598679, -0.88396565]))) diff --git a/tools/make_wheel_from_so.py b/tools/make_wheel_from_so.py new file mode 100755 index 000000000..f3a901198 --- /dev/null +++ b/tools/make_wheel_from_so.py @@ -0,0 +1,109 @@ +#!/usr/bin/env python3 +"""Simple helper: build a wheel that packages a prebuilt .so into the pyalp package. + +Usage: make_wheel_from_so.py --out-dir + +The script will create /--.whl containing: + - pyalp/__init__.py (minimal stub) + - pyalp/ + - pyalp-.dist-info/{METADATA,WHEEL,RECORD} + +This is intentionally minimal and meant for CI-snapshots where the compiled .so +is produced by your CMake job and we only need to bundle it into a wheel. +""" + +import argparse +import re +import sys +import zipfile +import sysconfig +from pathlib import Path + +NAME = "pyalp" +VERSION = "0.0.0" + + +def infer_cp_tag_from_filename(name: str) -> str | None: + # try to find cp311/cp312 style + m = re.search(r"cp(\d{2,3})", name) + if m: + return f"cp{m.group(1)}" + # try to find cpython-311 style + m = re.search(r"cpython-(\d{3})", name) + if m: + return f"cp{m.group(1)}" + return None + + +def make_wheel(so_path: Path, out_dir: Path) -> Path: + if not so_path.exists(): + raise FileNotFoundError(f".so not found: {so_path}") + so_name = so_path.name + cp_tag = infer_cp_tag_from_filename(so_name) + if not cp_tag: + # Fallback to current interpreter if tag cannot be inferred + cp_tag = f"cp{sys.version_info.major}{sys.version_info.minor}" + py_tag = cp_tag + abi_tag = cp_tag + plat = sysconfig.get_platform().replace("-", "_").replace(".", "_") + wheel_fname = f"{NAME}-{VERSION}-{py_tag}-{abi_tag}-{plat}.whl" + out_dir.mkdir(parents=True, exist_ok=True) + wheel_path = out_dir / wheel_fname + + init_py = ( + "try:\n" + " from . import _pyalp\n" + "except Exception:\n" + " _pyalp = None\n" + "__all__ = [\"_pyalp\"]\n" + ) + + dist_info = f"{NAME}-{VERSION}.dist-info" + metadata = ( + "Metadata-Version: 2.1\n" + f"Name: {NAME}\n" + f"Version: {VERSION}\n" + "Summary: pyalp packaged wheel (prebuilt .so)\n" + ) + wheel_meta = ( + "Wheel-Version: 1.0\n" + "Generator: make_wheel_from_so.py\n" + "Root-Is-Purelib: false\n" + f"Tag: {py_tag}-{abi_tag}-{plat}\n" + ) + + with zipfile.ZipFile(wheel_path, "w", compression=zipfile.ZIP_DEFLATED) as z: + z.writestr(f"{NAME}/__init__.py", init_py) + # Normalize the extension module name to _pyalp.so so the package can import it as pyalp._pyalp + so_target_name = "_pyalp.so" + z.write(so_path, f"{NAME}/{so_target_name}") + z.writestr(f"{dist_info}/METADATA", metadata) + z.writestr(f"{dist_info}/WHEEL", wheel_meta) + # RECORD should list files; for minimal CI, leave entries empty (tools may complain but pip accepts) + z.writestr(f"{dist_info}/RECORD", "") + + return wheel_path + + +def parse_args(argv): + p = argparse.ArgumentParser(description="Make simple wheel from prebuilt .so") + p.add_argument("so", help="Path to prebuilt .so file") + p.add_argument("--out-dir", default="dist_wheel", help="Output directory") + return p.parse_args(argv) + + +def main(argv): + args = parse_args(argv) + so_path = Path(args.so) + out_dir = Path(args.out_dir) + try: + wheel = make_wheel(so_path, out_dir) + print("Wheel written to", wheel) + except Exception as e: + print("ERROR:", e, file=sys.stderr) + return 2 + return 0 + + +if __name__ == "__main__": + raise SystemExit(main(sys.argv[1:])) diff --git a/tools/smoke_test_pyalp.py b/tools/smoke_test_pyalp.py new file mode 100755 index 000000000..7cc6bf65e --- /dev/null +++ b/tools/smoke_test_pyalp.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python3 +"""Smoke-test installer for the pyalp wheel. + +Usage: python tools/smoke_test_pyalp.py + +This script imports `pyalp`, checks for presence of `_pyalp` extension and +tries to call `backend_name()` if present. It exits non-zero on failure and +prints helpful tracebacks. +""" +import sys +import traceback + + +def main(): + try: + import pyalp + except Exception as e: + print("ERROR: importing pyalp failed:", e, file=sys.stderr) + traceback.print_exc() + return 2 + ext = getattr(pyalp, "_pyalp", None) + ok_ext = ext is not None + print("pyalp import OK, compiled ext loaded:", ok_ext) + if not ok_ext: + # Try to import the extension directly and print diagnostics + try: + import importlib + ext = importlib.import_module("pyalp._pyalp") + print("Direct import succeeded after fallback.") + ok_ext = True + except Exception as e: + print("Extension import failed:", e, file=sys.stderr) + traceback.print_exc() + try: + import importlib.util + import pathlib + spec = importlib.util.find_spec("pyalp") + pkgdir = None + if spec and spec.submodule_search_locations: + pkgdir = pathlib.Path(list(spec.submodule_search_locations)[0]) + else: + pkgdir = pathlib.Path(__import__("pyalp").__file__).parent # type: ignore[attr-defined] + print("pyalp dir:", pkgdir) + print(".so files:") + for p in pkgdir.iterdir(): + if p.suffix == ".so": + print(" -", p) + except Exception: + pass + if ok_ext: + try: + if ext is not None and hasattr(ext, "backend_name"): + name = ext.backend_name() + print("backend_name:", name) + else: + print("Extension module loaded but missing backend_name()", file=sys.stderr) + return 3 + except Exception as e: + print("calling backend failed:", e, file=sys.stderr) + traceback.print_exc() + return 3 + return 0 + + +if __name__ == "__main__": + sys.exit(main()) From cda713ab30fb728ccf4ef4d8562c0cd67e57d41e Mon Sep 17 00:00:00 2001 From: Denis Jelovina Date: Mon, 27 Oct 2025 14:27:23 +0100 Subject: [PATCH 02/32] Merge branch '358-python-api-metadata' --- .github/workflows/pyalp-publish.yml | 14 ++++ .gitignore | 3 +- pyalp/pyproject.toml | 2 +- pyalp/setup.py | 38 ++++++++++- pyalp/src/CMakeLists.txt | 40 ++++++++++++ pyalp/src/pyalp.egg-info/PKG-INFO | 65 ------------------- pyalp/src/pyalp.egg-info/SOURCES.txt | 12 ---- pyalp/src/pyalp.egg-info/dependency_links.txt | 1 - pyalp/src/pyalp.egg-info/requires.txt | 1 - pyalp/src/pyalp.egg-info/top_level.txt | 1 - pyalp/src/pyalp/__init__.py | 16 ++++- pyalp/src/pyalp/_metadata.py.in | 36 ++++++++++ 12 files changed, 145 insertions(+), 84 deletions(-) delete mode 100644 pyalp/src/pyalp.egg-info/PKG-INFO delete mode 100644 pyalp/src/pyalp.egg-info/SOURCES.txt delete mode 100644 pyalp/src/pyalp.egg-info/dependency_links.txt delete mode 100644 pyalp/src/pyalp.egg-info/requires.txt delete mode 100644 pyalp/src/pyalp.egg-info/top_level.txt create mode 100644 pyalp/src/pyalp/_metadata.py.in diff --git a/.github/workflows/pyalp-publish.yml b/.github/workflows/pyalp-publish.yml index 2c9748945..725661ff4 100644 --- a/.github/workflows/pyalp-publish.yml +++ b/.github/workflows/pyalp-publish.yml @@ -85,6 +85,12 @@ jobs: # Use a per-ABI build directory to avoid cross-ABI contamination ABI_TAG=$(python -c 'import sys; print(f"cp{sys.version_info[0]}{sys.version_info[1]}")') BUILD_DIR="build/${ABI_TAG}" + # Export the per-ABI build dir so setup.py (inside the wheel build) can find + # the CMake-generated metadata file. cibuildwheel runs this before_build + # inside the container and environment variables exported here are visible + # to the subsequent packaging steps in that container. + export CMAKE_BUILD_DIR="${BUILD_DIR}" + echo "[cibw] Exported CMAKE_BUILD_DIR=${CMAKE_BUILD_DIR}" # Enable NUMA on Linux runners (for linux wheels), keep disabled elsewhere. if [ "$(uname -s)" = "Linux" ]; then @@ -148,6 +154,14 @@ jobs: LTO_FLAG="-DCMAKE_INTERPROCEDURAL_OPTIMIZATION=OFF" cmake -S . -B "${BUILD_DIR}" -G Ninja -DCMAKE_BUILD_TYPE=Release -DENABLE_PYALP=ON -DCMAKE_POSITION_INDEPENDENT_CODE=ON -DCMAKE_FIND_FRAMEWORK=NEVER ${MACOS_FLAGS} ${NUMA_FLAG} ${CMAKE_PREFIX_HINT:-} ${OSX_DEPLOY_FLAG:-} ${PORTABLE_FLAG} ${LTO_FLAG} -DPython3_EXECUTABLE="${PYEXEC}" cmake --build "${BUILD_DIR}" --target pyalp_ref --parallel + # Debug: show the generated metadata file (if present) to the CI logs + echo "[cibw] Checking for generated metadata file: ${CMAKE_BUILD_DIR}/pyalp_metadata.py" + if [ -f "${CMAKE_BUILD_DIR}/pyalp_metadata.py" ]; then + echo "[cibw] Found metadata file:"; ls -l "${CMAKE_BUILD_DIR}/pyalp_metadata.py" + echo "[cibw] First 100 lines of metadata:"; sed -n '1,100p' "${CMAKE_BUILD_DIR}/pyalp_metadata.py" || true + else + echo "[cibw] Metadata file not found at ${CMAKE_BUILD_DIR}/pyalp_metadata.py" + fi run: | # Build from repository root so the full CMake project is available in the container python -m cibuildwheel --output-dir wheelhouse . diff --git a/.gitignore b/.gitignore index d144abd31..95a567357 100644 --- a/.gitignore +++ b/.gitignore @@ -9,4 +9,5 @@ paths.mk [Oo]bj*/ [Ii]nstall*/ cmake-build-*/ -.venv/ \ No newline at end of file +.venv/ +pyalp/src/pyalp.egg-info/ diff --git a/pyalp/pyproject.toml b/pyalp/pyproject.toml index 6a8af41fd..6d127c2c9 100644 --- a/pyalp/pyproject.toml +++ b/pyalp/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "pyalp" -version = "0.0.0" +version = "0.8.1" description = "Python bindings for ALP GraphBLAS (minimal package layout)" authors = [ { name = "ALP" } ] readme = "README.md" diff --git a/pyalp/setup.py b/pyalp/setup.py index 24b770731..7ca4690d1 100644 --- a/pyalp/setup.py +++ b/pyalp/setup.py @@ -6,6 +6,7 @@ import glob import shutil import sysconfig +import pathlib bdist_wheel_cmd = None try: # Used to mark wheel as non-pure when bundling a prebuilt .so @@ -66,6 +67,38 @@ def build_extension(self, ext): raise RuntimeError("Prebuilt pyalp shared object not found during build_ext") shutil.copyfile(src, target_path) + # The _metadata.py file is generated by CMake in the build directory. + # We need to find it and copy it to the same directory as the extension. + ext_build_dir = os.path.dirname(target_path) + # CMAKE_BUILD_DIR is set by the cibuildwheel before_build script to the per-ABI build directory + cmake_build_dir = os.environ.get("CMAKE_BUILD_DIR") + if cmake_build_dir: + metadata_src_path = os.path.join(cmake_build_dir, "pyalp_metadata.py") + metadata_dest_path = os.path.join(ext_build_dir, "_metadata.py") + if os.path.exists(metadata_src_path): + print(f"Copying generated metadata from {metadata_src_path} to {metadata_dest_path}") + shutil.copyfile(metadata_src_path, metadata_dest_path) + else: + print(f"Warning: Generated metadata file not found at {metadata_src_path}. Skipping copy.") + else: + # Fall back: try to locate the generated metadata under any per-ABI + # build directory (e.g. ../build/cp310, ../build/cp39, ...). + # This avoids relying strictly on the CMAKE_BUILD_DIR env var which + # may not always be propagated into the isolated build environment. + search_pattern = os.path.join(here, '..', 'build', '**', 'pyalp_metadata.py') + candidates = glob.glob(search_pattern, recursive=True) + # Prefer candidate matching the current Python ABI tag if present + py_tag = f"cp{sys.version_info[0]}{sys.version_info[1]}" + matching = [c for c in candidates if py_tag in os.path.basename(os.path.dirname(c)) or py_tag in os.path.basename(c)] + chosen = (matching or candidates)[:1] + if chosen: + metadata_src_path = os.path.abspath(chosen[0]) + metadata_dest_path = os.path.join(ext_build_dir, "_metadata.py") + print(f"Copying generated metadata from {metadata_src_path} to {metadata_dest_path} (discovered by glob search)") + shutil.copyfile(metadata_src_path, metadata_dest_path) + else: + print("Warning: CMAKE_BUILD_DIR not set and no generated metadata found under ../build. Skipping metadata file copy.") + if prebuilt_so: if not os.path.exists(prebuilt_so): raise FileNotFoundError(f"PREBUILT_PYALP_SO set but file not found: {prebuilt_so}") @@ -92,10 +125,13 @@ def build_extension(self, ext): setup_kwargs = { "name": "pyalp", - "version": "0.0.0", + "version": "0.8.1", "description": "pyalp package (C++ bindings)", "packages": find_packages(where="src"), "package_dir": {"": "src"}, + # Ensure generated metadata is included in the wheel. The build process + # will copy the generated file to the package build dir as `_metadata.py`. + "package_data": {"pyalp": ["_metadata.py"]}, "ext_modules": ext_modules, "include_package_data": True, } diff --git a/pyalp/src/CMakeLists.txt b/pyalp/src/CMakeLists.txt index 4dc7fd587..737e36261 100644 --- a/pyalp/src/CMakeLists.txt +++ b/pyalp/src/CMakeLists.txt @@ -109,3 +109,43 @@ if( WITH_NONBLOCKING_BACKEND ) endif() +# --- Metadata generation --- +# These variables are expected to be set by the top-level CMakeLists.txt, +# but we provide defaults for standalone builds. +if(NOT DEFINED pyalp_VERSION) + set(pyalp_VERSION "0.0.0") +endif() +if(NOT DEFINED ALP_VERSION) + set(ALP_VERSION "unknown") +endif() +if(NOT DEFINED ALP_BUILD_TYPE) + set(ALP_BUILD_TYPE "unknown") +endif() +if(NOT DEFINED ALP_GIT_COMMIT_SHA) + set(ALP_GIT_COMMIT_SHA "unknown") +endif() +if(NOT DEFINED ALP_GIT_BRANCH) + set(ALP_GIT_BRANCH "unknown") +endif() + +# Get Python and pybind11 versions +find_package(PythonInterp REQUIRED) +set(PYTHON_VERSION ${PYTHON_VERSION_STRING}) +set(pybind11_VERSION ${pybind11_VERSION}) + +# This is a simplified list. A more robust solution would inspect the build targets. +set(pyalp_ALGORITHMS "conjugate_gradient") +set(pyalp_BACKENDS "reference, reference_omp") + +# Configure the metadata file from the template +set(METADATA_TEMPLATE "${CMAKE_CURRENT_SOURCE_DIR}/pyalp/_metadata.py.in") +set(METADATA_OUTPUT "${CMAKE_BINARY_DIR}/pyalp_metadata.py") +configure_file(${METADATA_TEMPLATE} ${METADATA_OUTPUT} @ONLY) + +# This command is useful for debugging inside a cibuildwheel container +# to verify that the file is being generated correctly. +# add_custom_command(TARGET ${PYALP_MODULE_NAME} POST_BUILD +# COMMAND ${CMAKE_COMMAND} -E echo "Generated metadata file content:" +# COMMAND ${CMAKE_COMMAND} -E cat ${METADATA_OUTPUT} +# ) + diff --git a/pyalp/src/pyalp.egg-info/PKG-INFO b/pyalp/src/pyalp.egg-info/PKG-INFO deleted file mode 100644 index 6fd9e5890..000000000 --- a/pyalp/src/pyalp.egg-info/PKG-INFO +++ /dev/null @@ -1,65 +0,0 @@ -Metadata-Version: 2.4 -Name: pyalp -Version: 0.0.0 -Summary: Python bindings for ALP GraphBLAS (minimal package layout) -Author: ALP -License: Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - ... (full license text omitted here - original file retained at project root) - -Classifier: Programming Language :: Python :: 3 -Classifier: License :: OSI Approved :: Apache Software License -Classifier: Operating System :: OS Independent -Classifier: Development Status :: 3 - Alpha -Classifier: Intended Audience :: Developers -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Requires-Python: >=3.8 -Description-Content-Type: text/markdown -License-File: LICENSE -Requires-Dist: numpy -Dynamic: license-file - -# pyalp (packaged) - -This directory contains a minimal Python package layout for the existing `pyalp` C++ bindings using pybind11. - -Quick usage: - -1. Install in editable mode (from `pyalp/`): - - python -m pip install -e . - -2. Build with CMake (optional) from repository root or `pyalp/` CMakeLists can be used when integrating with the full project. - -The C++ extension is exposed as `pyalp._pyalp` and the package `pyalp` imports it lazily from `__init__.py`. - -Building a wheel from a prebuilt .so (no install) ------------------------------------------------- - -If you already build the extension with the project's CMake and have a `.so` file, you can produce a wheel without re-compiling by pointing `setup.py` at the shared object and then building a wheel: - -1. Build with CMake (example from your message): - - cd /tmp/build_alp && cmake ~/Repos/graphblas -DENABLE_PYALP=ON && make pyalp_ref - - This produces a file like `/tmp/build_alp/pyalp/src/pyalp_ref.cpython-311-x86_64-linux-gnu.so`. - -2. Build a wheel that contains that `.so` (no install): - - PREBUILT_PYALP_SO=/tmp/build_alp/pyalp/src/pyalp_ref.cpython-311-x86_64-linux-gnu.so python3 -m pip wheel . -w dist/ - - The `setup.py` will copy the provided .so into the package as the `_pyalp` extension and produce a wheel in `dist/`. - -Notes: -- The prebuilt .so will be bundled into the wheel as package data. Make sure the python ABI tag in the filename matches your target interpreter (cpython-311 => Python 3.11 etc.). -- If you don't provide `PREBUILT_PYALP_SO`, `pip wheel` will attempt to compile the extension via pybind11. - diff --git a/pyalp/src/pyalp.egg-info/SOURCES.txt b/pyalp/src/pyalp.egg-info/SOURCES.txt deleted file mode 100644 index d19b172be..000000000 --- a/pyalp/src/pyalp.egg-info/SOURCES.txt +++ /dev/null @@ -1,12 +0,0 @@ -LICENSE -README.md -pyproject.toml -setup.py -src/pyalp/__init__.py -src/pyalp/_pyalp.cpython-311-x86_64-linux-gnu.so -src/pyalp/your_module.py -src/pyalp.egg-info/PKG-INFO -src/pyalp.egg-info/SOURCES.txt -src/pyalp.egg-info/dependency_links.txt -src/pyalp.egg-info/requires.txt -src/pyalp.egg-info/top_level.txt \ No newline at end of file diff --git a/pyalp/src/pyalp.egg-info/dependency_links.txt b/pyalp/src/pyalp.egg-info/dependency_links.txt deleted file mode 100644 index 8b1378917..000000000 --- a/pyalp/src/pyalp.egg-info/dependency_links.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/pyalp/src/pyalp.egg-info/requires.txt b/pyalp/src/pyalp.egg-info/requires.txt deleted file mode 100644 index 24ce15ab7..000000000 --- a/pyalp/src/pyalp.egg-info/requires.txt +++ /dev/null @@ -1 +0,0 @@ -numpy diff --git a/pyalp/src/pyalp.egg-info/top_level.txt b/pyalp/src/pyalp.egg-info/top_level.txt deleted file mode 100644 index 076d42d13..000000000 --- a/pyalp/src/pyalp.egg-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -pyalp diff --git a/pyalp/src/pyalp/__init__.py b/pyalp/src/pyalp/__init__.py index 4b8ca7f6b..21c236488 100644 --- a/pyalp/src/pyalp/__init__.py +++ b/pyalp/src/pyalp/__init__.py @@ -31,7 +31,21 @@ except Exception: _pyalp = None -__all__ = ["_pyalp"] +# compiled metadata will be available after installation or build +try: + from ._metadata import get_build_metadata, get_algorithm_metadata +except ImportError: # pragma: no cover - fallback for source tree + + def get_build_metadata(): + """Return an empty dictionary if metadata is not available.""" + return {} + + def get_algorithm_metadata(): + """Return an empty dictionary if metadata is not available.""" + return {} + + +__all__ = ["_pyalp", "version", "get_build_metadata", "get_algorithm_metadata"] def version(): diff --git a/pyalp/src/pyalp/_metadata.py.in b/pyalp/src/pyalp/_metadata.py.in new file mode 100644 index 000000000..f3a763308 --- /dev/null +++ b/pyalp/src/pyalp/_metadata.py.in @@ -0,0 +1,36 @@ +# pyalp/_metadata.py.in +""" +Runtime metadata for the pyalp package. + +This file is generated by CMake from _metadata.py.in. +""" + +__all__ = ["get_build_metadata", "get_algorithm_metadata"] + +_build_metadata = { + "version": "@pyalp_VERSION@", + "build_type": "@CMAKE_BUILD_TYPE@", + "alp_version": "@ALP_VERSION@", + "alp_build_type": "@ALP_BUILD_TYPE@", + "alp_git_commit": "@ALP_GIT_COMMIT_SHA@", + "alp_git_branch": "@ALP_GIT_BRANCH@", + "python_version": "@PYTHON_VERSION@", + "pybind11_version": "@pybind11_VERSION@", + "license": "BSD-3-Clause", + "homepage": "https://github.com/Algebraic-Programming/graphblas", +} + +_algorithm_metadata = { + "algorithms": "@pyalp_ALGORITHMS@", + "backends": "@pyalp_BACKENDS@", +} + + +def get_build_metadata(): + """Return a dictionary of build-time metadata.""" + return _build_metadata + + +def get_algorithm_metadata(): + """Return a dictionary of available algorithms and backends.""" + return _algorithm_metadata From 966689e0b3315d0a59c112a89ec84578adc8a0b3 Mon Sep 17 00:00:00 2001 From: Denis Jelovina Date: Mon, 27 Oct 2025 15:07:03 +0100 Subject: [PATCH 03/32] Merge branch '358-python-api-metadata2' --- .github/workflows/pyalp-publish.yml | 36 +++++++++++++++++------------ CMakeLists.txt | 21 +++++++++++++++++ 2 files changed, 42 insertions(+), 15 deletions(-) diff --git a/.github/workflows/pyalp-publish.yml b/.github/workflows/pyalp-publish.yml index 725661ff4..9af78c3b8 100644 --- a/.github/workflows/pyalp-publish.yml +++ b/.github/workflows/pyalp-publish.yml @@ -172,18 +172,24 @@ jobs: name: pyalp-wheels-${{ matrix.os }} path: wheelhouse/*.whl - # Placeholder for publish job; enable when versioning is PEP 440 compliant - # publish: - # needs: build-wheels - # runs-on: ubuntu-latest - # permissions: - # id-token: write - # steps: - # - name: Download wheels - # uses: actions/download-artifact@v4 - # with: - # path: dist - # - name: Publish to PyPI - # uses: pypa/gh-action-pypi-publish@release/v1 - # with: - # packages-dir: dist + publish: + needs: build-wheels + runs-on: ubuntu-latest + environment: + name: testpypi + url: https://test.pypi.org/p/pyalp + permissions: + id-token: write + steps: + - name: Download all wheels + uses: actions/download-artifact@v4 + with: + path: dist + pattern: pyalp-wheels-* + merge-multiple: true + - name: Publish to TestPyPI + uses: pypa/gh-action-pypi-publish@release/v1 + with: + repository-url: https://test.pypi.org/legacy/ + packages-dir: dist/ + verbose: true diff --git a/CMakeLists.txt b/CMakeLists.txt index 969c56b6e..0c6790ded 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -37,6 +37,27 @@ project( GraphBLAS DESCRIPTION "The ultimate engine for sparse computation" LANGUAGES CXX C ) + +# Find Git and get repository information for metadata +find_package(Git QUIET) +if(GIT_FOUND) + execute_process( + COMMAND ${GIT_EXECUTABLE} rev-parse --short HEAD + WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} + OUTPUT_VARIABLE ALP_GIT_COMMIT + OUTPUT_STRIP_TRAILING_WHITESPACE + ) + execute_process( + COMMAND ${GIT_EXECUTABLE} rev-parse --abbrev-ref HEAD + WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} + OUTPUT_VARIABLE ALP_GIT_BRANCH + OUTPUT_STRIP_TRAILING_WHITESPACE + ) +else() + set(ALP_GIT_COMMIT "unknown") + set(ALP_GIT_BRANCH "unknown") +endif() + set( CMAKE_CXX_STANDARD 11 ) set( CMAKE_CXX_STANDARD_REQUIRED ON ) From 159e818644b704eed537b92fa849eff869a9893b Mon Sep 17 00:00:00 2001 From: Denis Jelovina Date: Mon, 27 Oct 2025 16:07:48 +0100 Subject: [PATCH 04/32] bump version to 0.8.3 in pyproject.toml --- pyalp/pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyalp/pyproject.toml b/pyalp/pyproject.toml index 6d127c2c9..7c928d1bc 100644 --- a/pyalp/pyproject.toml +++ b/pyalp/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "pyalp" -version = "0.8.1" +version = "0.8.3" description = "Python bindings for ALP GraphBLAS (minimal package layout)" authors = [ { name = "ALP" } ] readme = "README.md" From d33cc7a3d7219d2ef64fa863127c702f32554411 Mon Sep 17 00:00:00 2001 From: Denis Jelovina Date: Mon, 27 Oct 2025 16:26:48 +0100 Subject: [PATCH 05/32] Update version to 0.8.4 and enhance metadata gathering in CMake - Bump version in pyproject.toml to 0.8.4 - Add Git metadata and README content handling in CMake for improved package metadata --- .github/workflows/pyalp-publish.yml | 18 +++++++++++++++++- pyalp/pyproject.toml | 2 +- pyalp/src/CMakeLists.txt | 13 +++++++++++++ pyalp/src/pyalp/_metadata.py.in | 3 +++ 4 files changed, 34 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pyalp-publish.yml b/.github/workflows/pyalp-publish.yml index 9af78c3b8..2dc2c3866 100644 --- a/.github/workflows/pyalp-publish.yml +++ b/.github/workflows/pyalp-publish.yml @@ -82,6 +82,22 @@ jobs: printf '%s\n' "import os, runpy; ROOT=os.path.dirname(os.path.abspath(__file__)); PKG=os.path.join(ROOT, 'pyalp'); os.chdir(PKG); runpy.run_path(os.path.join(PKG, 'setup.py'), run_name='__main__')" > setup.py # Configure from repository root; enable pyalp and choose NUMA setting per-platform PYEXEC=$(python -c 'import sys; print(sys.executable)') + # Gather Git metadata and package version to pass into CMake so the + # generated runtime metadata contains accurate values even in CI. + # Prefer environment-provided values when available (GITHUB_SHA/REF_NAME) + ALP_GIT_COMMIT="${GITHUB_SHA:-$(git rev-parse --short HEAD)}" + # GITHUB_REF_NAME is available in Actions; fallback to git branch + ALP_GIT_BRANCH="${GITHUB_REF_NAME:-$(git rev-parse --abbrev-ref HEAD)}" + # Try to pick a semantic/alp version from tags (prefer nearest tag) + ALP_VERSION=$(git describe --tags --match "v*" --abbrev=0 2>/dev/null || true) + if [ -z "${ALP_VERSION}" ]; then + # Fall back to a describe-style value + ALP_VERSION=$(git describe --tags --match "v*" --always 2>/dev/null || echo "unknown") + fi + # Read the pyalp package version from pyalp/pyproject.toml (simple grep) + PYALP_VERSION=$(grep -E '^version\s*=\s*"' pyalp/pyproject.toml | head -n1 | sed -E 's/^version\s*=\s*"([^"]+)".*/\1/') + PYALP_VERSION=${PYALP_VERSION:-0.0.0} + echo "[cibw] Derived ALP_VERSION=${ALP_VERSION}, ALP_GIT_COMMIT=${ALP_GIT_COMMIT}, ALP_GIT_BRANCH=${ALP_GIT_BRANCH}, PYALP_VERSION=${PYALP_VERSION}" # Use a per-ABI build directory to avoid cross-ABI contamination ABI_TAG=$(python -c 'import sys; print(f"cp{sys.version_info[0]}{sys.version_info[1]}")') BUILD_DIR="build/${ABI_TAG}" @@ -152,7 +168,7 @@ jobs: # interprocedural optimization (LTO) to improve portability of the produced wheels. PORTABLE_FLAG="-DALP_PORTABLE_BUILD=ON" LTO_FLAG="-DCMAKE_INTERPROCEDURAL_OPTIMIZATION=OFF" - cmake -S . -B "${BUILD_DIR}" -G Ninja -DCMAKE_BUILD_TYPE=Release -DENABLE_PYALP=ON -DCMAKE_POSITION_INDEPENDENT_CODE=ON -DCMAKE_FIND_FRAMEWORK=NEVER ${MACOS_FLAGS} ${NUMA_FLAG} ${CMAKE_PREFIX_HINT:-} ${OSX_DEPLOY_FLAG:-} ${PORTABLE_FLAG} ${LTO_FLAG} -DPython3_EXECUTABLE="${PYEXEC}" + cmake -S . -B "${BUILD_DIR}" -G Ninja -DCMAKE_BUILD_TYPE=Release -DENABLE_PYALP=ON -DCMAKE_POSITION_INDEPENDENT_CODE=ON -DCMAKE_FIND_FRAMEWORK=NEVER ${MACOS_FLAGS} ${NUMA_FLAG} ${CMAKE_PREFIX_HINT:-} ${OSX_DEPLOY_FLAG:-} ${PORTABLE_FLAG} ${LTO_FLAG} -DPython3_EXECUTABLE="${PYEXEC}" -DALP_VERSION="${ALP_VERSION}" -DALP_GIT_COMMIT_SHA="${ALP_GIT_COMMIT}" -DALP_GIT_BRANCH="${ALP_GIT_BRANCH}" -Dpyalp_VERSION="${PYALP_VERSION}" cmake --build "${BUILD_DIR}" --target pyalp_ref --parallel # Debug: show the generated metadata file (if present) to the CI logs echo "[cibw] Checking for generated metadata file: ${CMAKE_BUILD_DIR}/pyalp_metadata.py" diff --git a/pyalp/pyproject.toml b/pyalp/pyproject.toml index 7c928d1bc..64a66763a 100644 --- a/pyalp/pyproject.toml +++ b/pyalp/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "pyalp" -version = "0.8.3" +version = "0.8.4" description = "Python bindings for ALP GraphBLAS (minimal package layout)" authors = [ { name = "ALP" } ] readme = "README.md" diff --git a/pyalp/src/CMakeLists.txt b/pyalp/src/CMakeLists.txt index 737e36261..5765a857d 100644 --- a/pyalp/src/CMakeLists.txt +++ b/pyalp/src/CMakeLists.txt @@ -128,6 +128,17 @@ if(NOT DEFINED ALP_GIT_BRANCH) set(ALP_GIT_BRANCH "unknown") endif() +# If a top-level README.md exists, read it and make it available to the +# metadata template. We escape triple quotes so the Python triple-quoted +# string in the template remains valid. +if(EXISTS "${CMAKE_SOURCE_DIR}/README.md") + file(READ "${CMAKE_SOURCE_DIR}/README.md" PYALP_README_RAW) + # Escape triple-quotes to avoid breaking the Python triple-quoted string + string(REPLACE "\"\"\"" "\\\"\\\"\\\"" PYALP_README_ESCAPED "${PYALP_README_RAW}") +else() + set(PYALP_README_ESCAPED "") +endif() + # Get Python and pybind11 versions find_package(PythonInterp REQUIRED) set(PYTHON_VERSION ${PYTHON_VERSION_STRING}) @@ -140,6 +151,8 @@ set(pyalp_BACKENDS "reference, reference_omp") # Configure the metadata file from the template set(METADATA_TEMPLATE "${CMAKE_CURRENT_SOURCE_DIR}/pyalp/_metadata.py.in") set(METADATA_OUTPUT "${CMAKE_BINARY_DIR}/pyalp_metadata.py") +# Make the README content available as @PYALP_README_ESCAPED@ to the +# template (CMake configure_file will perform the substitution). configure_file(${METADATA_TEMPLATE} ${METADATA_OUTPUT} @ONLY) # This command is useful for debugging inside a cibuildwheel container diff --git a/pyalp/src/pyalp/_metadata.py.in b/pyalp/src/pyalp/_metadata.py.in index f3a763308..96f1fb4e6 100644 --- a/pyalp/src/pyalp/_metadata.py.in +++ b/pyalp/src/pyalp/_metadata.py.in @@ -25,6 +25,9 @@ _algorithm_metadata = { "backends": "@pyalp_BACKENDS@", } +# README content (may be large); inserted as a Python triple-quoted string. +_algorithm_metadata["readme"] = """@PYALP_README_ESCAPED@""" + def get_build_metadata(): """Return a dictionary of build-time metadata.""" From 6f88c39fd4f4a8a829d2610ffe427b87a3770770 Mon Sep 17 00:00:00 2001 From: Denis Jelovina Date: Mon, 27 Oct 2025 16:56:29 +0100 Subject: [PATCH 06/32] Update version to 0.8.5, add ALP version handling in CMake, and enhance metadata in _metadata.py --- CMakeLists.txt | 10 ++++++ pyalp/README.md | 60 +++++++++++++++++++++++++++++++++ pyalp/pyproject.toml | 2 +- pyalp/src/CMakeLists.txt | 47 ++++++++++++++++++++++++++ pyalp/src/pyalp/_metadata.py.in | 2 +- 5 files changed, 119 insertions(+), 2 deletions(-) create mode 100644 pyalp/README.md diff --git a/CMakeLists.txt b/CMakeLists.txt index 0c6790ded..33235939d 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -31,6 +31,16 @@ set( MINORVERSION 7 ) set( BUGVERSION 0 ) set( VERSION "${MAJORVERSION}.${MINORVERSION}.${BUGVERSION}" ) +# Export a canonical ALP version string for subprojects and packaging. This +# defaults to the VERSION defined above but can be overridden by -DALP_VERSION +# on the cmake command line (CI may pass this explicitly). +if(NOT DEFINED ALP_VERSION) + set(ALP_VERSION "${VERSION}" CACHE STRING "ALP project version (for packaging)") +else() + # Keep user-provided ALP_VERSION in cache so subprojects see it + set(ALP_VERSION "${ALP_VERSION}" CACHE STRING "ALP project version (for packaging)" FORCE) +endif() + # set the project name project( GraphBLAS VERSION ${VERSION} diff --git a/pyalp/README.md b/pyalp/README.md new file mode 100644 index 000000000..c975439b3 --- /dev/null +++ b/pyalp/README.md @@ -0,0 +1,60 @@ + +# pyalp (packaged) + +This directory contains the Python package layout for the `pyalp` bindings +that expose parts of the ALP GraphBLAS project via pybind11. + +Quick start +----------- + +Create and activate a virtual environment, then install the package (example +using TestPyPI): + +```bash +python -m venv venv +source venv/bin/activate +pip install --index-url https://test.pypi.org/simple/ --no-deps pyalp +``` + +Basic usage +----------- + +```python +import pyalp +print('pyalp version:', pyalp.version()) +print('build metadata:', pyalp.get_build_metadata()) +print('algorithm readme (first 200 chars):') +print(pyalp.get_algorithm_metadata().get('readme','')[:200]) +``` + +Runtime metadata +---------------- + +The package provides a small runtime metadata module generated at build time +from CMake. Useful keys in `pyalp.get_build_metadata()` include: + +- `version` — pyalp package version +- `build_type` — CMake build type used (e.g., Release) +- `alp_version` — ALP repository version or tag used to build +- `alp_git_commit` / `alp_git_branch` — Git information captured by CI +- `license` — detected repository license (e.g. Apache-2.0) + +`pyalp.get_algorithm_metadata()` contains available algorithm/backends and +also includes a `readme` key with the package README contents. + +Packaging notes (for maintainers) +-------------------------------- + +- The CI uses a top-level CMake configure/build to produce the native shared + object and a CMake-configured `_metadata.py`. The packaging `setup.py` then + copies the built `.so` and `_metadata.py` into the wheel. +- The CI passes Git/version information into CMake so the generated metadata + is populated even in detached/CI environments. + +If you modify the metadata template, update `pyalp/src/pyalp/_metadata.py.in`. + +License +------- + +See the repository `LICENSE` at the project root; the packaging pipeline +attempts to detect and embed the license string in runtime metadata. diff --git a/pyalp/pyproject.toml b/pyalp/pyproject.toml index 64a66763a..3dd867035 100644 --- a/pyalp/pyproject.toml +++ b/pyalp/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "pyalp" -version = "0.8.4" +version = "0.8.5" description = "Python bindings for ALP GraphBLAS (minimal package layout)" authors = [ { name = "ALP" } ] readme = "README.md" diff --git a/pyalp/src/CMakeLists.txt b/pyalp/src/CMakeLists.txt index 5765a857d..65d5d77f4 100644 --- a/pyalp/src/CMakeLists.txt +++ b/pyalp/src/CMakeLists.txt @@ -117,9 +117,23 @@ if(NOT DEFINED pyalp_VERSION) endif() if(NOT DEFINED ALP_VERSION) set(ALP_VERSION "unknown") + # If the top-level project provided ALP_VERSION (cached) this will be used. + # Otherwise keep 'unknown'. When building from the top-level CMakeLists + # ALP_VERSION should already be set via cache. + if(DEFINED ALP_VERSION) + # already defined (likely cached) — keep it + else() + set(ALP_VERSION "unknown") + endif() endif() if(NOT DEFINED ALP_BUILD_TYPE) set(ALP_BUILD_TYPE "unknown") + # Use CMAKE_BUILD_TYPE as the ALP build type unless explicitly supplied + if(DEFINED CMAKE_BUILD_TYPE) + set(ALP_BUILD_TYPE "${CMAKE_BUILD_TYPE}") + else() + set(ALP_BUILD_TYPE "unknown") + endif() endif() if(NOT DEFINED ALP_GIT_COMMIT_SHA) set(ALP_GIT_COMMIT_SHA "unknown") @@ -128,6 +142,26 @@ if(NOT DEFINED ALP_GIT_BRANCH) set(ALP_GIT_BRANCH "unknown") endif() +# Determine repository license (simple heuristic from LICENSE file) if not provided +if(NOT DEFINED ALP_LICENSE) + if(EXISTS "${CMAKE_SOURCE_DIR}/LICENSE") + file(READ "${CMAKE_SOURCE_DIR}/LICENSE" ALP_LICENSE_RAW) + string(FIND "${ALP_LICENSE_RAW}" "Apache" _has_apache) + if(_has_apache GREATER -1) + set(ALP_LICENSE "Apache-2.0") + else() + string(FIND "${ALP_LICENSE_RAW}" "BSD" _has_bsd) + if(_has_bsd GREATER -1) + set(ALP_LICENSE "BSD-3-Clause") + else() + set(ALP_LICENSE "unknown") + endif() + endif() + else() + set(ALP_LICENSE "unknown") + endif() +endif() + # If a top-level README.md exists, read it and make it available to the # metadata template. We escape triple quotes so the Python triple-quoted # string in the template remains valid. @@ -138,6 +172,19 @@ if(EXISTS "${CMAKE_SOURCE_DIR}/README.md") else() set(PYALP_README_ESCAPED "") endif() +if(EXISTS "${CMAKE_SOURCE_DIR}/pyalp/README.md") + file(READ "${CMAKE_SOURCE_DIR}/pyalp/README.md" PYALP_README_RAW) +elseif(EXISTS "${CMAKE_SOURCE_DIR}/README.md") + file(READ "${CMAKE_SOURCE_DIR}/README.md" PYALP_README_RAW) +else() + set(PYALP_README_ESCAPED "") +endif() +if(DEFINED PYALP_README_RAW) + # Escape triple-quotes to avoid breaking the Python triple-quoted string + string(REPLACE "\"\"\"" "\\\"\\\"\\\"" PYALP_README_ESCAPED "${PYALP_README_RAW}") +else() + set(PYALP_README_ESCAPED "") +endif() # Get Python and pybind11 versions find_package(PythonInterp REQUIRED) diff --git a/pyalp/src/pyalp/_metadata.py.in b/pyalp/src/pyalp/_metadata.py.in index 96f1fb4e6..d40c3aeda 100644 --- a/pyalp/src/pyalp/_metadata.py.in +++ b/pyalp/src/pyalp/_metadata.py.in @@ -16,7 +16,7 @@ _build_metadata = { "alp_git_branch": "@ALP_GIT_BRANCH@", "python_version": "@PYTHON_VERSION@", "pybind11_version": "@pybind11_VERSION@", - "license": "BSD-3-Clause", + "license": "@ALP_LICENSE@", "homepage": "https://github.com/Algebraic-Programming/graphblas", } From 040ee5748ace3d1e41e4d0ad3c5470b83a82c654 Mon Sep 17 00:00:00 2001 From: Denis Jelovina Date: Tue, 28 Oct 2025 10:01:31 +0100 Subject: [PATCH 07/32] Enhance setup.py to support multiple prebuilt backend modules and update __init__.py to expose them for user access --- .github/workflows/pyalp-publish.yml | 4 +- pyalp/setup.py | 85 +++++++++++++++++++++-------- pyalp/src/pyalp/__init__.py | 25 +++++++++ 3 files changed, 88 insertions(+), 26 deletions(-) diff --git a/.github/workflows/pyalp-publish.yml b/.github/workflows/pyalp-publish.yml index 2dc2c3866..155433cc3 100644 --- a/.github/workflows/pyalp-publish.yml +++ b/.github/workflows/pyalp-publish.yml @@ -168,8 +168,8 @@ jobs: # interprocedural optimization (LTO) to improve portability of the produced wheels. PORTABLE_FLAG="-DALP_PORTABLE_BUILD=ON" LTO_FLAG="-DCMAKE_INTERPROCEDURAL_OPTIMIZATION=OFF" - cmake -S . -B "${BUILD_DIR}" -G Ninja -DCMAKE_BUILD_TYPE=Release -DENABLE_PYALP=ON -DCMAKE_POSITION_INDEPENDENT_CODE=ON -DCMAKE_FIND_FRAMEWORK=NEVER ${MACOS_FLAGS} ${NUMA_FLAG} ${CMAKE_PREFIX_HINT:-} ${OSX_DEPLOY_FLAG:-} ${PORTABLE_FLAG} ${LTO_FLAG} -DPython3_EXECUTABLE="${PYEXEC}" -DALP_VERSION="${ALP_VERSION}" -DALP_GIT_COMMIT_SHA="${ALP_GIT_COMMIT}" -DALP_GIT_BRANCH="${ALP_GIT_BRANCH}" -Dpyalp_VERSION="${PYALP_VERSION}" - cmake --build "${BUILD_DIR}" --target pyalp_ref --parallel + cmake -S . -B "${BUILD_DIR}" -G Ninja -DCMAKE_BUILD_TYPE=Release -DENABLE_PYALP=ON -DCMAKE_POSITION_INDEPENDENT_CODE=ON -DCMAKE_FIND_FRAMEWORK=NEVER ${MACOS_FLAGS} ${NUMA_FLAG} ${CMAKE_PREFIX_HINT:-} ${OSX_DEPLOY_FLAG:-} ${PORTABLE_FLAG} ${LTO_FLAG} -DPython3_EXECUTABLE="${PYEXEC}" -DALP_VERSION="${ALP_VERSION}" -DALP_GIT_COMMIT_SHA="${ALP_GIT_COMMIT}" -DALP_GIT_BRANCH="${ALP_GIT_BRANCH}" -Dpyalp_VERSION="${PYALP_VERSION}" -DWITH_OMP_BACKEND=ON -DWITH_NONBLOCKING_BACKEND=ON + cmake --build "${BUILD_DIR}" --target pyalp_ref pyalp_omp pyalp_nonblocking --parallel # Debug: show the generated metadata file (if present) to the CI logs echo "[cibw] Checking for generated metadata file: ${CMAKE_BUILD_DIR}/pyalp_metadata.py" if [ -f "${CMAKE_BUILD_DIR}/pyalp_metadata.py" ]; then diff --git a/pyalp/setup.py b/pyalp/setup.py index 7ca4690d1..4856c5427 100644 --- a/pyalp/setup.py +++ b/pyalp/setup.py @@ -32,22 +32,40 @@ def finalize_options(self): here = os.path.abspath(os.path.dirname(__file__)) -prebuilt_so = os.environ.get("PREBUILT_PYALP_SO") or os.environ.get("PYALP_PREBUILT_SO") -# Prefer a prebuilt extension compiled by CMake if present in the tree -def find_prebuilt(): - candidates = [] - # Top-level CMake build tree locations (preferred flow) - candidates.extend(glob.glob(os.path.join(here, '..', 'build', '**', 'pyalp_ref*.so'), recursive=True)) - candidates.extend(glob.glob(os.path.join(here, '..', 'build', '**', 'pyalp_ref*.pyd'), recursive=True)) - candidates.extend(glob.glob(os.path.join(here, '..', 'build', '**', '_pyalp*.so'), recursive=True)) - candidates.extend(glob.glob(os.path.join(here, '..', 'build', '**', '_pyalp*.pyd'), recursive=True)) - # Prefer the candidate matching the current Python tag - py_tag = f"cpython-{sys.version_info[0]}{sys.version_info[1]}" - matching = [c for c in candidates if py_tag in os.path.basename(c)] or candidates - return matching[0] if matching else None - -if not prebuilt_so: - prebuilt_so = find_prebuilt() +prebuilt_env = os.environ.get("PREBUILT_PYALP_SO") or os.environ.get("PYALP_PREBUILT_SO") + +# Discover prebuilt backend shared objects in the CMake build tree. +def find_all_prebuilt(): + supported = ["pyalp_ref", "pyalp_omp", "pyalp_nonblocking", "_pyalp"] + py_tag = f"cp{sys.version_info[0]}{sys.version_info[1]}" + mapping = {} + for mod in supported: + patterns = [ + os.path.join(here, '..', 'build', '**', f'{mod}*.so'), + os.path.join(here, '..', 'build', '**', f'{mod}*.pyd'), + ] + found = [] + for pat in patterns: + found.extend(glob.glob(pat, recursive=True)) + if not found: + continue + # Prefer candidate matching current ABI tag in filename or parent dir + matching = [c for c in found if py_tag in os.path.basename(c) or py_tag in os.path.basename(os.path.dirname(c))] + chosen = (matching or found)[0] + mapping[mod] = os.path.abspath(chosen) + return mapping + +# Determine prebuilt modules mapping. If user specified a single PREBUILT env var, +# map it to its basename (module name) where possible; otherwise search the build tree. +prebuilt_modules = {} +if prebuilt_env: + # map provided path to module name by deriving filename stem + bn = os.path.basename(prebuilt_env) + modname = bn.split('.', 1)[0] + prebuilt_modules[modname] = os.path.abspath(prebuilt_env) +else: + prebuilt_modules = find_all_prebuilt() + package_data = {} ext_modules = [] @@ -62,9 +80,26 @@ def build_extension(self, ext): # Determine target path for the extension target_path = self.get_ext_fullpath(ext.name) os.makedirs(os.path.dirname(target_path), exist_ok=True) - src = os.environ.get("PREBUILT_PYALP_SO") or os.environ.get("PYALP_PREBUILT_SO") or find_prebuilt() + # Choose the source prebuilt file corresponding to this extension + # ext.name is like 'pyalp.' + mod_fullname = ext.name + modname = mod_fullname.split('.', 1)[1] if '.' in mod_fullname else mod_fullname + + # Priority: explicit env var -> mapping discovered earlier -> glob search + src = os.environ.get("PREBUILT_PYALP_SO") or os.environ.get("PYALP_PREBUILT_SO") + if not src: + src = prebuilt_modules.get(modname) + if not src: + # Try a targeted glob for this module as a last resort + candidates = glob.glob(os.path.join(here, '..', 'build', '**', f'{modname}*.so'), recursive=True) + candidates += glob.glob(os.path.join(here, '..', 'build', '**', f'{modname}*.pyd'), recursive=True) + py_tag = f"cp{sys.version_info[0]}{sys.version_info[1]}" + matching = [c for c in candidates if py_tag in os.path.basename(c) or py_tag in os.path.basename(os.path.dirname(c))] + src = (matching or candidates)[:1] + src = os.path.abspath(src[0]) if src else None + if not src or not os.path.exists(src): - raise RuntimeError("Prebuilt pyalp shared object not found during build_ext") + raise RuntimeError(f"Prebuilt pyalp shared object not found for module '{modname}' during build_ext") shutil.copyfile(src, target_path) # The _metadata.py file is generated by CMake in the build directory. @@ -99,11 +134,11 @@ def build_extension(self, ext): else: print("Warning: CMAKE_BUILD_DIR not set and no generated metadata found under ../build. Skipping metadata file copy.") -if prebuilt_so: - if not os.path.exists(prebuilt_so): - raise FileNotFoundError(f"PREBUILT_PYALP_SO set but file not found: {prebuilt_so}") - # Declare a binary extension so files go to platlib; actual build just copies the prebuilt .so - ext_modules = [Extension("pyalp._pyalp", sources=[])] +if prebuilt_modules: + # Create an Extension for each discovered prebuilt module so setuptools will + # place the shared object into the package (platlib). + for modname in prebuilt_modules.keys(): + ext_modules.append(Extension(f"pyalp.{modname}", sources=[])) else: if not _have_pybind11: raise RuntimeError("pybind11 is required to build the extension from sources. Install pybind11 or provide PREBUILT_PYALP_SO to bundle a prebuilt .so.") @@ -138,7 +173,9 @@ def build_extension(self, ext): # Supply cmdclass entries for build_ext (copy-prebuilt or pybind11) and bdist_wheel cmdclass = {} -if prebuilt_so: +# If we detected prebuilt modules, use the copy-prebuilt build_ext which copies +# each discovered shared object into the package build directory. +if prebuilt_modules: cmdclass["build_ext"] = build_ext_copy_prebuilt elif build_ext is not None: cmdclass["build_ext"] = build_ext diff --git a/pyalp/src/pyalp/__init__.py b/pyalp/src/pyalp/__init__.py index 21c236488..11dca43eb 100644 --- a/pyalp/src/pyalp/__init__.py +++ b/pyalp/src/pyalp/__init__.py @@ -53,3 +53,28 @@ def version(): return metadata.version("pyalp") except Exception: return "0.0.0" + + +# Expose available backend submodules (if present in the installed wheel) so users +# can import them as `from pyalp import pyalp_ref` or access `pyalp.pyalp_ref`. +_backend_candidates = ["pyalp_ref", "pyalp_omp", "pyalp_nonblocking", "_pyalp"] +for _b in _backend_candidates: + try: + _m = importlib.import_module(f"{__package__}.{_b}") + globals()[_b] = _m + if _b not in __all__: + __all__.append(_b) + except Exception: + # ignore missing backends + continue + else: + # if imported successfully, also register a top-level alias so + # `import pyalp_ref` can work for users expecting the former layout. + try: + # ensure the module object is in globals + _mod = globals().get(_b) + if _mod is not None: + # register top-level module name to point to the submodule + sys.modules[_b] = _mod + except Exception: + pass From 7ed457412b4a94f75674f36f6a250402ae9daabd Mon Sep 17 00:00:00 2001 From: Denis Jelovina Date: Tue, 28 Oct 2025 10:21:08 +0100 Subject: [PATCH 08/32] Enable OMP and nonblocking backends conditionally for Linux runners in CMake build --- .github/workflows/pyalp-publish.yml | 15 +++++++++++++-- pyalp/pyproject.toml | 2 +- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/.github/workflows/pyalp-publish.yml b/.github/workflows/pyalp-publish.yml index 155433cc3..6f3bc8613 100644 --- a/.github/workflows/pyalp-publish.yml +++ b/.github/workflows/pyalp-publish.yml @@ -168,8 +168,19 @@ jobs: # interprocedural optimization (LTO) to improve portability of the produced wheels. PORTABLE_FLAG="-DALP_PORTABLE_BUILD=ON" LTO_FLAG="-DCMAKE_INTERPROCEDURAL_OPTIMIZATION=OFF" - cmake -S . -B "${BUILD_DIR}" -G Ninja -DCMAKE_BUILD_TYPE=Release -DENABLE_PYALP=ON -DCMAKE_POSITION_INDEPENDENT_CODE=ON -DCMAKE_FIND_FRAMEWORK=NEVER ${MACOS_FLAGS} ${NUMA_FLAG} ${CMAKE_PREFIX_HINT:-} ${OSX_DEPLOY_FLAG:-} ${PORTABLE_FLAG} ${LTO_FLAG} -DPython3_EXECUTABLE="${PYEXEC}" -DALP_VERSION="${ALP_VERSION}" -DALP_GIT_COMMIT_SHA="${ALP_GIT_COMMIT}" -DALP_GIT_BRANCH="${ALP_GIT_BRANCH}" -Dpyalp_VERSION="${PYALP_VERSION}" -DWITH_OMP_BACKEND=ON -DWITH_NONBLOCKING_BACKEND=ON - cmake --build "${BUILD_DIR}" --target pyalp_ref pyalp_omp pyalp_nonblocking --parallel + # Only enable OMP and nonblocking backends on Linux runners where libomp + # and required build support are available. macOS wheels will build the + # stable reference backend only to avoid SDK/ABI compile issues. + if [ "$(uname -s)" = "Linux" ]; then + BACKEND_FLAGS="-DWITH_OMP_BACKEND=ON -DWITH_NONBLOCKING_BACKEND=ON" + BUILD_TARGETS="pyalp_ref pyalp_omp pyalp_nonblocking" + else + BACKEND_FLAGS="-DWITH_OMP_BACKEND=OFF -DWITH_NONBLOCKING_BACKEND=OFF" + BUILD_TARGETS="pyalp_ref" + fi + + cmake -S . -B "${BUILD_DIR}" -G Ninja -DCMAKE_BUILD_TYPE=Release -DENABLE_PYALP=ON -DCMAKE_POSITION_INDEPENDENT_CODE=ON -DCMAKE_FIND_FRAMEWORK=NEVER ${MACOS_FLAGS} ${NUMA_FLAG} ${CMAKE_PREFIX_HINT:-} ${OSX_DEPLOY_FLAG:-} ${PORTABLE_FLAG} ${LTO_FLAG} ${BACKEND_FLAGS} -DPython3_EXECUTABLE="${PYEXEC}" -DALP_VERSION="${ALP_VERSION}" -DALP_GIT_COMMIT_SHA="${ALP_GIT_COMMIT}" -DALP_GIT_BRANCH="${ALP_GIT_BRANCH}" -Dpyalp_VERSION="${PYALP_VERSION}" + cmake --build "${BUILD_DIR}" --target ${BUILD_TARGETS} --parallel # Debug: show the generated metadata file (if present) to the CI logs echo "[cibw] Checking for generated metadata file: ${CMAKE_BUILD_DIR}/pyalp_metadata.py" if [ -f "${CMAKE_BUILD_DIR}/pyalp_metadata.py" ]; then diff --git a/pyalp/pyproject.toml b/pyalp/pyproject.toml index 3dd867035..cda293ad1 100644 --- a/pyalp/pyproject.toml +++ b/pyalp/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "pyalp" -version = "0.8.5" +version = "0.8.8" description = "Python bindings for ALP GraphBLAS (minimal package layout)" authors = [ { name = "ALP" } ] readme = "README.md" From 9ca4457f22262bed0cc274c63d6438abac6956e4 Mon Sep 17 00:00:00 2001 From: Denis Jelovina Date: Tue, 28 Oct 2025 10:56:56 +0100 Subject: [PATCH 09/32] Add py::module_local() to Matrix and Vector class bindings for improved encapsulation --- pyalp/pyproject.toml | 2 +- pyalp/src/pyalp/bindings.cpp | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pyalp/pyproject.toml b/pyalp/pyproject.toml index cda293ad1..18e6521c6 100644 --- a/pyalp/pyproject.toml +++ b/pyalp/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "pyalp" -version = "0.8.8" +version = "0.8.9" description = "Python bindings for ALP GraphBLAS (minimal package layout)" authors = [ { name = "ALP" } ] readme = "README.md" diff --git a/pyalp/src/pyalp/bindings.cpp b/pyalp/src/pyalp/bindings.cpp index 6dcb95cf3..ef545ccd9 100644 --- a/pyalp/src/pyalp/bindings.cpp +++ b/pyalp/src/pyalp/bindings.cpp @@ -18,7 +18,7 @@ namespace py = pybind11; PYBIND11_MODULE(PYALP_MODULE_NAME, m) { // Common bindings for all backends (kept minimal here) m.def("backend_name", [](){ return "backend"; }); - py::class_>(m, "Matrix") + py::class_>(m, "Matrix", py::module_local()) .def(py::init([](size_t m_, size_t n_, py::array data1, py::array data2, @@ -28,7 +28,7 @@ PYBIND11_MODULE(PYALP_MODULE_NAME, m) { py::arg("m"), py::arg("n"), py::arg("i_array"), py::arg("j_array"), py::arg("k_array")); - py::class_>(m, "Vector") + py::class_>(m, "Vector", py::module_local()) .def(py::init()) .def(py::init([](size_t m, py::array_t data3) { From 598843ec56c4c70cef3466b7cceb1f369835ef6a Mon Sep 17 00:00:00 2001 From: Denis Jelovina Date: Tue, 28 Oct 2025 11:52:36 +0100 Subject: [PATCH 10/32] Enhance backend discovery and selection in __init__.py; add smoke tests for backend modules --- pyalp/pyproject.toml | 2 +- pyalp/src/pyalp/__init__.py | 86 +++++++++++++++++++++------- tests/python/backend_smoke_runner.py | 64 +++++++++++++++++++++ tests/python/test_backends.py | 50 ++++++++++++++++ 4 files changed, 180 insertions(+), 22 deletions(-) create mode 100644 tests/python/backend_smoke_runner.py create mode 100644 tests/python/test_backends.py diff --git a/pyalp/pyproject.toml b/pyalp/pyproject.toml index 18e6521c6..9e9ae70f5 100644 --- a/pyalp/pyproject.toml +++ b/pyalp/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "pyalp" -version = "0.8.9" +version = "0.8.11" description = "Python bindings for ALP GraphBLAS (minimal package layout)" authors = [ { name = "ALP" } ] readme = "README.md" diff --git a/pyalp/src/pyalp/__init__.py b/pyalp/src/pyalp/__init__.py index 11dca43eb..d6c00fd2d 100644 --- a/pyalp/src/pyalp/__init__.py +++ b/pyalp/src/pyalp/__init__.py @@ -6,6 +6,7 @@ import importlib import pathlib import sys +import os # compiled extension will be available after installation or build try: @@ -45,7 +46,7 @@ def get_algorithm_metadata(): return {} -__all__ = ["_pyalp", "version", "get_build_metadata", "get_algorithm_metadata"] +__all__ = ["version", "get_build_metadata", "get_algorithm_metadata", "get_backend", "list_backends"] def version(): @@ -56,25 +57,68 @@ def version(): # Expose available backend submodules (if present in the installed wheel) so users -# can import them as `from pyalp import pyalp_ref` or access `pyalp.pyalp_ref`. -_backend_candidates = ["pyalp_ref", "pyalp_omp", "pyalp_nonblocking", "_pyalp"] -for _b in _backend_candidates: +# Backend discovery and selection helpers. +import pkgutil + + +def list_backends(): + """Return a sorted list of backend module names available in the package. + + This inspects the package directory for compiled extension modules with + expected names (e.g. pyalp_ref, pyalp_omp, pyalp_nonblocking, _pyalp). + """ + pkgdir = pathlib.Path(__file__).parent + found = set() + # Use pkgutil.iter_modules on the package path to discover installed modules try: - _m = importlib.import_module(f"{__package__}.{_b}") - globals()[_b] = _m - if _b not in __all__: - __all__.append(_b) + for mod in pkgutil.iter_modules([str(pkgdir)]): + name = mod.name + if name in ("_pyalp",) or name.startswith("pyalp_"): + found.add(name) except Exception: - # ignore missing backends - continue - else: - # if imported successfully, also register a top-level alias so - # `import pyalp_ref` can work for users expecting the former layout. - try: - # ensure the module object is in globals - _mod = globals().get(_b) - if _mod is not None: - # register top-level module name to point to the submodule - sys.modules[_b] = _mod - except Exception: - pass + # fallback: scan filenames + for p in pkgdir.iterdir(): + if p.is_file() and p.suffix in (".so", ".pyd"): + stem = p.name.split(".", 1)[0] + if stem == "_pyalp" or stem.startswith("pyalp_"): + found.add(stem) + return sorted(found) + + +def import_backend(name: str): + """Import and return the backend module `pyalp.`. + + Raises ImportError with a helpful message if the backend is not present. + """ + try: + return importlib.import_module(f"{__package__}.{name}") + except Exception as e: + raise ImportError(f"Backend module '{name}' is not available: {e}") from e + + +def get_backend(name: str | None = None, preferred=("pyalp_omp", "pyalp_nonblocking", "pyalp_ref", "_pyalp")): + """Return an imported backend module. + + Selection order: + - If ``name`` is provided, import that backend or raise ImportError. + - If environment variable PYALP_BACKEND is set, try to import that. + - Otherwise iterate over ``preferred`` and return the first available. + + Raises ImportError if no backend is available. + """ + # explicit name wins + if name: + return import_backend(name) + + # environment override + env = os.environ.get("PYALP_BACKEND") + if env: + return import_backend(env) + + # try preferred list + available = set(list_backends()) + for pref in preferred: + if pref in available: + return import_backend(pref) + + raise ImportError(f"No pyalp backend available. Found: {sorted(available)}") diff --git a/tests/python/backend_smoke_runner.py b/tests/python/backend_smoke_runner.py new file mode 100644 index 000000000..89c52d759 --- /dev/null +++ b/tests/python/backend_smoke_runner.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python3 +""" +Run a small conjugate-gradient smoke test for a single pyalp backend. + +This script is intended to be invoked as a subprocess by tests so each backend +is exercised in a fresh interpreter (avoiding pybind11 registration conflicts). + +Usage: + python backend_smoke_runner.py pyalp_ref + +It prints the iterations, residual, and resulting solution vector to stdout. +""" +import sys +import importlib +import argparse +import numpy as np + + +def run_smoke(backend_name: str) -> int: + # Import backend module as pyalp., fallback to top-level name + try: + m = importlib.import_module(f"pyalp.{backend_name}") + except Exception: + try: + m = importlib.import_module(backend_name) + except Exception as e: + print(f"Failed to import backend '{backend_name}': {e}", file=sys.stderr) + return 2 + + idata = np.array([0, 1, 2, 3, 3, 4, 2, 3, 3, 4, 1, 4, 1, 4, 4], dtype=np.int32) + jdata = np.array([0, 1, 2, 3, 2, 2, 1, 4, 1, 1, 0, 3, 0, 3, 4], dtype=np.int32) + vdata = np.array([1, 1, 1, 1, 0.5, 2, 1, 4, 4.4, 1, 0, 3.5, 0, 3, 1], dtype=np.float64) + b = np.array([1.0, 1.0, 1.0, 1.0, 1.0], dtype=np.float64) + x = np.array([1.0, 1.0, 0.0, 0.3, -1.0], dtype=np.float64) + r = np.zeros(5, dtype=np.float64) + u = np.zeros(5, dtype=np.float64) + tmp = np.zeros(5, dtype=np.float64) + + try: + A = m.Matrix(5, 5, idata, jdata, vdata) + xv = m.Vector(5, x) + bv = m.Vector(5, b) + rv = m.Vector(5, r) + uv = m.Vector(5, u) + tv = m.Vector(5, tmp) + + iterations, residual = m.conjugate_gradient(A, xv, bv, rv, uv, tv, 2000, 0) + print("iterations=", iterations, "residual=", residual) + print("x_result=", xv.to_numpy()) + except Exception as e: + print("Backend test failed:", e, file=sys.stderr) + return 3 + return 0 + + +def main(argv=None): + parser = argparse.ArgumentParser(description="Run pyalp backend smoke test") + parser.add_argument("backend", help="backend module name (e.g. pyalp_ref)") + args = parser.parse_args(argv) + return run_smoke(args.backend) + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/tests/python/test_backends.py b/tests/python/test_backends.py new file mode 100644 index 000000000..a04edc920 --- /dev/null +++ b/tests/python/test_backends.py @@ -0,0 +1,50 @@ +""" +Parametrized smoke test that runs the conjugate-gradient example against all +available pyalp backend modules (pyalp_ref, pyalp_omp, pyalp_nonblocking). + +This is adapted from tests/python/test.py but runs the same assertions for +each backend installed in the `pyalp` package. If a backend is not present in +the wheel, the test is skipped. +""" +import os +import shutil +import subprocess +import sys +import pytest +from pathlib import Path + + +BACKENDS = ["pyalp_ref", "pyalp_omp", "pyalp_nonblocking", "_pyalp"] + + +def backend_exists_in_package(backend: str) -> bool: + # Check installed package dir for a backend shared object + try: + import pyalp + p = Path(pyalp.__file__).parent + patterns = [f"{backend}*.so", f"{backend}*.pyd"] + for pat in patterns: + if any(p.glob(pat)): + return True + except Exception: + return False + return False + + +@pytest.mark.parametrize("backend", BACKENDS) +def test_conjugate_gradient_backend_subprocess(backend): + if not backend_exists_in_package(backend): + pytest.skip(f"backend {backend} not present in installed package") + + # Run the smoke test in a fresh Python subprocess to avoid in-process + # pybind11 type registration conflicts between multiple extension modules. + python_exe = sys.executable + runner = Path(__file__).with_name("backend_smoke_runner.py") + if not runner.exists(): + pytest.skip("backend smoke runner script not found") + proc = subprocess.run([python_exe, str(runner), backend], capture_output=True, text=True) + if proc.returncode != 0: + # Give helpful debug output + print(proc.stdout) + print(proc.stderr) + assert proc.returncode == 0, f"backend {backend} failed with return code {proc.returncode}\nSTDOUT:\n{proc.stdout}\nSTDERR:\n{proc.stderr}" From b6ea295a823a472c10e185c3971842749ff02335 Mon Sep 17 00:00:00 2001 From: Denis Jelovina Date: Tue, 28 Oct 2025 13:25:26 +0100 Subject: [PATCH 11/32] Bump version to 0.8.12 in pyproject.toml; update backend import error handling in smoke test --- pyalp/pyproject.toml | 2 +- pyalp/src/pyalp/__init__.py | 29 ++++++---------------------- tests/python/backend_smoke_runner.py | 9 +++------ 3 files changed, 10 insertions(+), 30 deletions(-) diff --git a/pyalp/pyproject.toml b/pyalp/pyproject.toml index 9e9ae70f5..cee9b807f 100644 --- a/pyalp/pyproject.toml +++ b/pyalp/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "pyalp" -version = "0.8.11" +version = "0.8.12" description = "Python bindings for ALP GraphBLAS (minimal package layout)" authors = [ { name = "ALP" } ] readme = "README.md" diff --git a/pyalp/src/pyalp/__init__.py b/pyalp/src/pyalp/__init__.py index d6c00fd2d..bca735792 100644 --- a/pyalp/src/pyalp/__init__.py +++ b/pyalp/src/pyalp/__init__.py @@ -8,29 +8,12 @@ import sys import os -# compiled extension will be available after installation or build -try: - from . import _pyalp # type: ignore -except Exception: # pragma: no cover - fallback for source tree - # Fallback: try to discover any compiled extension in the package directory - _pyalp = None - try: - pkgdir = pathlib.Path(__file__).parent - for p in pkgdir.iterdir(): - if p.suffix == ".so": - # PEP 3149 allows ABI tags in the filename (e.g. _pyalp.cpython-311-x86_64-linux-gnu.so) - # The module name is the part before the first dot. - modname = p.name.split(".", 1)[0] - try: - # Use absolute import to avoid import-time package-relative issues - m = importlib.import_module(f"{__package__}.{modname}") - _pyalp = m - break - except Exception: - # ignore and try next candidate - continue - except Exception: - _pyalp = None +# Do NOT auto-import any compiled backend at package import time. +# Importing compiled extension modules here could cause pybind11 type +# registration conflicts if multiple backends are present. Users should +# explicitly select a backend via `get_backend()` or import a specific +# submodule (e.g. `import pyalp.pyalp_ref`). +_pyalp = None # compiled metadata will be available after installation or build try: diff --git a/tests/python/backend_smoke_runner.py b/tests/python/backend_smoke_runner.py index 89c52d759..697aaf94f 100644 --- a/tests/python/backend_smoke_runner.py +++ b/tests/python/backend_smoke_runner.py @@ -20,12 +20,9 @@ def run_smoke(backend_name: str) -> int: # Import backend module as pyalp., fallback to top-level name try: m = importlib.import_module(f"pyalp.{backend_name}") - except Exception: - try: - m = importlib.import_module(backend_name) - except Exception as e: - print(f"Failed to import backend '{backend_name}': {e}", file=sys.stderr) - return 2 + except Exception as e: + print(f"Failed to import backend 'pyalp.{backend_name}': {e}", file=sys.stderr) + return 2 idata = np.array([0, 1, 2, 3, 3, 4, 2, 3, 3, 4, 1, 4, 1, 4, 4], dtype=np.int32) jdata = np.array([0, 1, 2, 3, 2, 2, 1, 4, 1, 1, 0, 3, 0, 3, 4], dtype=np.int32) From 2e237188927d1948ef0bd717dadd720faeccf1bd Mon Sep 17 00:00:00 2001 From: Denis Jelovina Date: Tue, 28 Oct 2025 13:57:42 +0100 Subject: [PATCH 12/32] Add scripts for inspecting installed pyalp package and running backend smoke tests --- .github/scripts/inspect_installed_pyalp.py | 23 ++++++++ .../scripts/run_backend_smoke_installed.py | 36 ++++++++++++ .github/workflows/pyalp-publish.yml | 58 +++++++++++++++++++ pyalp/pyproject.toml | 2 +- 4 files changed, 118 insertions(+), 1 deletion(-) create mode 100644 .github/scripts/inspect_installed_pyalp.py create mode 100644 .github/scripts/run_backend_smoke_installed.py diff --git a/.github/scripts/inspect_installed_pyalp.py b/.github/scripts/inspect_installed_pyalp.py new file mode 100644 index 000000000..da8eda299 --- /dev/null +++ b/.github/scripts/inspect_installed_pyalp.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python3 +"""Print basic information about the installed `pyalp` package. + +This script is intended to be invoked from CI after installing the package +from TestPyPI. It prints the package file, available binary modules, and +the runtime build metadata exposed by the package. +""" +import pkgutil +import sys + +try: + import pyalp +except Exception: + print('ERROR: failed to import pyalp', file=sys.stderr) + raise + +print('pyalp package:', getattr(pyalp, '__file__', None)) +print('available modules in package:', [m.name for m in pkgutil.iter_modules(pyalp.__path__)]) +try: + print('build metadata:', pyalp.get_build_metadata()) +except Exception as e: + print('metadata error:', e) +print('listed backends via helper:', pyalp.list_backends()) diff --git a/.github/scripts/run_backend_smoke_installed.py b/.github/scripts/run_backend_smoke_installed.py new file mode 100644 index 000000000..102b4737b --- /dev/null +++ b/.github/scripts/run_backend_smoke_installed.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python3 +"""Run the repo's backend smoke runner against an installed pyalp package. + +This script is intended to be invoked from CI after installing pyalp from +TestPyPI. It accepts a single argument (backend name) and will skip if that +backend is not present in the installed package. +""" +import sys +import subprocess + +try: + import pyalp +except Exception: + print('ERROR: failed to import pyalp', file=sys.stderr) + raise + + +def main(argv): + if len(argv) < 2: + print('Usage: run_backend_smoke_installed.py ', file=sys.stderr) + return 2 + backend = argv[1] + backends = pyalp.list_backends() + print('discovered backends:', backends) + if backend not in backends: + print(f'backend {backend} not present in installed package, skipping') + return 0 + + rc = subprocess.call([sys.executable, 'tests/python/backend_smoke_runner.py', backend]) + if rc != 0: + print(f'backend {backend} smoke runner failed with exit {rc}', file=sys.stderr) + return rc + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/.github/workflows/pyalp-publish.yml b/.github/workflows/pyalp-publish.yml index 6f3bc8613..ae06e24be 100644 --- a/.github/workflows/pyalp-publish.yml +++ b/.github/workflows/pyalp-publish.yml @@ -208,6 +208,11 @@ jobs: permissions: id-token: write steps: + - name: Checkout repository (for tests) + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Download all wheels uses: actions/download-artifact@v4 with: @@ -220,3 +225,56 @@ jobs: repository-url: https://test.pypi.org/legacy/ packages-dir: dist/ verbose: true + + - name: Skip in-publish verification + shell: bash + run: | + echo "Installation verification moved to 'verify-installed' job" + + verify-installed: + needs: publish + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + backend: [pyalp_ref, pyalp_omp, pyalp_nonblocking, _pyalp] + steps: + - name: Checkout repository (for tests) + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.11' + + - name: Verify installed backend + shell: bash + env: + BACKEND: ${{ matrix.backend }} + run: | + set -euo pipefail + # Determine package version from pyalp/pyproject.toml + PYALP_VERSION=$(grep -E '^version\s*=\s*"' pyalp/pyproject.toml | head -n1 | sed -E 's/^version\s*=\s*"([^\"]+)".*/\1/') + echo "Testing pyalp version: ${PYALP_VERSION}" + + PY=$(which python3 || which python) + echo "Using python: ${PY}" + VENV_DIR="./.venv_test" + rm -rf "${VENV_DIR}" + ${PY} -m venv "${VENV_DIR}" + source "${VENV_DIR}/bin/activate" + python -m pip install --upgrade pip setuptools wheel + + # Install from TestPyPI (use extra-index-url to allow dependencies from PyPI) + echo "Installing pyalp==${PYALP_VERSION} from TestPyPI" + python -m pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple pyalp==${PYALP_VERSION} --no-deps -v + + # Inspect installed package using the script moved out of the workflow + echo "Inspecting installed package" + python .github/scripts/inspect_installed_pyalp.py + + # Run the smoke runner script for the backend for this matrix job + echo "Running backend smoke runner for ${BACKEND}" + python .github/scripts/run_backend_smoke_installed.py "${BACKEND}" diff --git a/pyalp/pyproject.toml b/pyalp/pyproject.toml index cee9b807f..b59f0b83d 100644 --- a/pyalp/pyproject.toml +++ b/pyalp/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "pyalp" -version = "0.8.12" +version = "0.8.13" description = "Python bindings for ALP GraphBLAS (minimal package layout)" authors = [ { name = "ALP" } ] readme = "README.md" From 4f7602637394ead7addeba44e06faff302db4d6a Mon Sep 17 00:00:00 2001 From: Denis Jelovina Date: Tue, 28 Oct 2025 16:01:47 +0100 Subject: [PATCH 13/32] Update pyalp version to 0.8.14 and include numpy in dependencies for wheel building --- .github/workflows/pyalp-publish.yml | 2 +- pyalp/pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pyalp-publish.yml b/.github/workflows/pyalp-publish.yml index ae06e24be..1bcdc0182 100644 --- a/.github/workflows/pyalp-publish.yml +++ b/.github/workflows/pyalp-publish.yml @@ -265,7 +265,7 @@ jobs: rm -rf "${VENV_DIR}" ${PY} -m venv "${VENV_DIR}" source "${VENV_DIR}/bin/activate" - python -m pip install --upgrade pip setuptools wheel + python -m pip install --upgrade pip setuptools wheel numpy # Install from TestPyPI (use extra-index-url to allow dependencies from PyPI) echo "Installing pyalp==${PYALP_VERSION} from TestPyPI" diff --git a/pyalp/pyproject.toml b/pyalp/pyproject.toml index b59f0b83d..d3e29d540 100644 --- a/pyalp/pyproject.toml +++ b/pyalp/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "pyalp" -version = "0.8.13" +version = "0.8.14" description = "Python bindings for ALP GraphBLAS (minimal package layout)" authors = [ { name = "ALP" } ] readme = "README.md" From fd8789e283b394bb039809cc63547e83d7045186 Mon Sep 17 00:00:00 2001 From: Denis Jelovina Date: Tue, 28 Oct 2025 16:24:22 +0100 Subject: [PATCH 14/32] Add wait_for_testpypi_release.sh script and update pyalp version to 0.8.15 --- .github/scripts/wait_for_testpypi_release.sh | 51 ++++++++++++++++++++ .github/workflows/pyalp-publish.yml | 4 +- pyalp/pyproject.toml | 2 +- 3 files changed, 55 insertions(+), 2 deletions(-) create mode 100644 .github/scripts/wait_for_testpypi_release.sh diff --git a/.github/scripts/wait_for_testpypi_release.sh b/.github/scripts/wait_for_testpypi_release.sh new file mode 100644 index 000000000..26df59835 --- /dev/null +++ b/.github/scripts/wait_for_testpypi_release.sh @@ -0,0 +1,51 @@ +#!/usr/bin/env bash +set -euo pipefail +# Wait until a given package/version appears on TestPyPI (JSON API), with retries. +# Usage: wait_for_testpypi_release.sh [max_attempts] [sleep_seconds] + +pkg=${1:-} +ver=${2:-} +max_attempts=${3:-12} +sleep_secs=${4:-10} + +if [ -z "$pkg" ] || [ -z "$ver" ]; then + echo "Usage: $0 [max_attempts] [sleep_seconds]" >&2 + exit 2 +fi + +url="https://test.pypi.org/pypi/${pkg}/json" +echo "Waiting for ${pkg}==${ver} to appear on TestPyPI (polling ${url})" + +attempt=1 +while [ $attempt -le $max_attempts ]; do + echo "Attempt ${attempt}/${max_attempts}..." + # fetch JSON; tolerate transient network errors by not exiting on curl non-zero + body=$(curl -sS --max-time 10 "${url}" || true) + if [ -n "$body" ]; then + found=$(printf "%s" "$body" | python3 - <&2 +exit 1 diff --git a/.github/workflows/pyalp-publish.yml b/.github/workflows/pyalp-publish.yml index 1bcdc0182..705e0a3f1 100644 --- a/.github/workflows/pyalp-publish.yml +++ b/.github/workflows/pyalp-publish.yml @@ -267,7 +267,9 @@ jobs: source "${VENV_DIR}/bin/activate" python -m pip install --upgrade pip setuptools wheel numpy - # Install from TestPyPI (use extra-index-url to allow dependencies from PyPI) + # Wait for the published package to propagate to TestPyPI, then install + echo "Waiting for pyalp==${PYALP_VERSION} to appear on TestPyPI" + .github/scripts/wait_for_testpypi_release.sh pyalp "${PYALP_VERSION}" 18 10 echo "Installing pyalp==${PYALP_VERSION} from TestPyPI" python -m pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple pyalp==${PYALP_VERSION} --no-deps -v diff --git a/pyalp/pyproject.toml b/pyalp/pyproject.toml index d3e29d540..d8fa00194 100644 --- a/pyalp/pyproject.toml +++ b/pyalp/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "pyalp" -version = "0.8.14" +version = "0.8.15" description = "Python bindings for ALP GraphBLAS (minimal package layout)" authors = [ { name = "ALP" } ] readme = "README.md" From e1cc5992c7e42ede028219c3871ad7294fe9556e Mon Sep 17 00:00:00 2001 From: Denis Jelovina Date: Tue, 28 Oct 2025 16:30:24 +0100 Subject: [PATCH 15/32] Make wait_for_testpypi_release.sh executable --- .github/scripts/wait_for_testpypi_release.sh | 0 pyalp/pyproject.toml | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) mode change 100644 => 100755 .github/scripts/wait_for_testpypi_release.sh diff --git a/.github/scripts/wait_for_testpypi_release.sh b/.github/scripts/wait_for_testpypi_release.sh old mode 100644 new mode 100755 diff --git a/pyalp/pyproject.toml b/pyalp/pyproject.toml index d8fa00194..b150f1a23 100644 --- a/pyalp/pyproject.toml +++ b/pyalp/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "pyalp" -version = "0.8.15" +version = "0.8.16" description = "Python bindings for ALP GraphBLAS (minimal package layout)" authors = [ { name = "ALP" } ] readme = "README.md" From 13ce446c4532a851c4dc186ada79d33af7f844ff Mon Sep 17 00:00:00 2001 From: Denis Jelovina Date: Tue, 28 Oct 2025 16:46:29 +0100 Subject: [PATCH 16/32] Update wait_for_testpypi_release.sh to improve response handling and retry logic; bump version to 0.8.17 in pyproject.toml --- .github/scripts/wait_for_testpypi_release.sh | 23 +++- pyalp/README.md | 125 +++++++++++++++++-- pyalp/pyproject.toml | 2 +- 3 files changed, 137 insertions(+), 13 deletions(-) diff --git a/.github/scripts/wait_for_testpypi_release.sh b/.github/scripts/wait_for_testpypi_release.sh index 26df59835..e65412a09 100755 --- a/.github/scripts/wait_for_testpypi_release.sh +++ b/.github/scripts/wait_for_testpypi_release.sh @@ -22,10 +22,25 @@ while [ $attempt -le $max_attempts ]; do # fetch JSON; tolerate transient network errors by not exiting on curl non-zero body=$(curl -sS --max-time 10 "${url}" || true) if [ -n "$body" ]; then - found=$(printf "%s" "$body" | python3 - <&2 +exit 1 + releases = j.get('releases', {}) print('1' if '${ver}' in releases else '0') except Exception: diff --git a/pyalp/README.md b/pyalp/README.md index c975439b3..6a4e6d81e 100644 --- a/pyalp/README.md +++ b/pyalp/README.md @@ -16,22 +16,59 @@ source venv/bin/activate pip install --index-url https://test.pypi.org/simple/ --no-deps pyalp ``` +Basic usage +----------- +# pyalp (packaged) + +This directory contains the Python package layout for the `pyalp` bindings +that expose parts of the ALP GraphBLAS project via pybind11. + +Quick start +----------- + +Create and activate a virtual environment, then install the package (example +using TestPyPI): + +```bash +python -m venv venv +source venv/bin/activate +pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple pyalp +``` + Basic usage ----------- +The package exposes a small set of helpers and one or more compiled backend +modules. Use these helpers to list and select available backends and to read +runtime build metadata: + ```python import pyalp -print('pyalp version:', pyalp.version()) -print('build metadata:', pyalp.get_build_metadata()) -print('algorithm readme (first 200 chars):') -print(pyalp.get_algorithm_metadata().get('readme','')[:200]) +print('pyalp build metadata:', pyalp.get_build_metadata()) +print('available backends:', pyalp.list_backends()) + +# Import a specific backend module (returns the compiled module) +backend = pyalp.get_backend('pyalp_ref') # or 'pyalp_omp', 'pyalp_nonblocking' +print('backend module:', backend) ``` +Backends and import caveat +-------------------------- + +Wheels may include multiple compiled backend modules (for example +`pyalp_ref`, `pyalp_omp`, `pyalp_nonblocking`). Historically, importing +multiple different compiled backends in the same Python process could raise +pybind11 registration errors (types duplicated). The bindings now use +`py::module_local()` for core wrapper types, which reduces collisions, but if +you encounter issues importing more than one backend in-process, prefer +testing each backend in a separate process (the supplied test runner does +this). + Runtime metadata ---------------- -The package provides a small runtime metadata module generated at build time -from CMake. Useful keys in `pyalp.get_build_metadata()` include: +The package provides a metadata module generated at build time by CMake. Use +`pyalp.get_build_metadata()` to access keys such as: - `version` — pyalp package version - `build_type` — CMake build type used (e.g., Release) @@ -39,8 +76,80 @@ from CMake. Useful keys in `pyalp.get_build_metadata()` include: - `alp_git_commit` / `alp_git_branch` — Git information captured by CI - `license` — detected repository license (e.g. Apache-2.0) -`pyalp.get_algorithm_metadata()` contains available algorithm/backends and -also includes a `readme` key with the package README contents. +`pyalp.get_algorithm_metadata()` contains algorithm/backends info and a +`readme` key with packaged README contents. + +Minimal example — conjugate gradient (small test) +------------------------------------------------ + +Save the following as `test_cg.py` and run `python test_cg.py` after installing +`pyalp` and `numpy`. The example shows selecting a backend explicitly via +`pyalp.get_backend()` and then using the backend's `Matrix`, `Vector`, and +`conjugate_gradient` API. + +```python +#!/usr/bin/env python3 +""" +Test script for the pyalp backend (example uses the OpenMP backend name +`pyalp_omp`, but you can use `pyalp_ref` or another available backend). + +Usage: + python test_cg.py + +Dependencies: + - numpy + - pyalp (installed and providing a backend such as pyalp_omp) +""" + +import numpy as np +import pyalp + +# Choose the backend module (change name if you want a different backend) +pyalp = pyalp.get_backend('pyalp_omp') # or 'pyalp_ref', 'pyalp_nonblocking' + +# Generate a small sparse linear system using numpy arrays +N, M = 5, 5 +idata = np.array([0, 1, 2, 3, 3, 4, 2, 3, 3, 4, 1, 4, 1, 4, 4], dtype=np.int32) +jdata = np.array([0, 1, 2, 3, 2, 2, 1, 4, 1, 1, 0, 3, 0, 3, 4], dtype=np.int32) +vdata = np.array([1, 1, 1, 1, 0.5, 2, 1, 4, 4.4, 1, 0, 3.5, 0, 3, 1], dtype=np.float64) +b = np.array([1.0, 1.0, 1.0, 1.0, 1.0], dtype=np.float64) +x = np.array([1.0, 1.0, 0.0, 0.3, -1.0], dtype=np.float64) +r = np.zeros(5, dtype=np.float64) +u = np.zeros(5, dtype=np.float64) +tmp = np.zeros(5, dtype=np.float64) + +# Create the pyalp Matrix and Vector objects +alpmatrixA = pyalp.Matrix(5, 5, idata, jdata, vdata) +alpvectorx = pyalp.Vector(5, x) +alpvectorb = pyalp.Vector(5, b) +alpvectorr = pyalp.Vector(5, r) +alpvectoru = pyalp.Vector(5, u) +alpvectortmp = pyalp.Vector(5, tmp) + +maxiterations = 2000 +verbose = 1 + +# Solve the linear system using the conjugate gradient method in the backend +iterations, residual = pyalp.conjugate_gradient( + alpmatrixA, + alpvectorx, + alpvectorb, + alpvectorr, + alpvectoru, + alpvectortmp, + maxiterations, + verbose, +) +print('iterations =', iterations) +print('residual =', residual) + +# Convert the result vector to a numpy array and print it +x_result = alpvectorx.to_numpy() +print('x_result =', x_result) + +# Check if the result is close to the expected solution +assert np.allclose(x_result, np.array([1.0, 1.0, 0.0, 0.13598679, -0.88396565])), 'solution mismatch' +``` Packaging notes (for maintainers) -------------------------------- diff --git a/pyalp/pyproject.toml b/pyalp/pyproject.toml index b150f1a23..8f2984f07 100644 --- a/pyalp/pyproject.toml +++ b/pyalp/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "pyalp" -version = "0.8.16" +version = "0.8.17" description = "Python bindings for ALP GraphBLAS (minimal package layout)" authors = [ { name = "ALP" } ] readme = "README.md" From d2f2899eba325410758ba394f22eb024b3e0a987 Mon Sep 17 00:00:00 2001 From: Denis Jelovina Date: Wed, 29 Oct 2025 10:04:15 +0100 Subject: [PATCH 17/32] Add promote-to-pypi workflow and update pyalp version to 0.8.18; enhance publishing steps --- .github/workflows/promote-to-pypi.yml | 36 +++++++++++++++++++++++++++ .github/workflows/pyalp-publish.yml | 34 +++++++++++++++++++++++++ pyalp/pyproject.toml | 2 +- 3 files changed, 71 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/promote-to-pypi.yml diff --git a/.github/workflows/promote-to-pypi.yml b/.github/workflows/promote-to-pypi.yml new file mode 100644 index 000000000..d38eb22af --- /dev/null +++ b/.github/workflows/promote-to-pypi.yml @@ -0,0 +1,36 @@ +name: Promote release to PyPI + +on: + workflow_dispatch: + inputs: + tag: + description: 'Git tag / release to promote (e.g. pyalp.v0.8.14)' + required: true + +jobs: + promote: + runs-on: ubuntu-latest + permissions: + contents: read + steps: + - name: Checkout (for local scripts) + uses: actions/checkout@v4 + + - name: Download release assets + uses: softprops/action-download-release@v1 + with: + tag: ${{ github.event.inputs.tag }} + # downloads into ./release_assets by default + + - name: List downloaded assets + run: | + echo "Assets in release_assets:" + ls -la release_assets || true + + - name: Publish to PyPI (alp-graphblas) + uses: pypa/gh-action-pypi-publish@release/v1 + with: + packages-dir: release_assets/ + env: + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }} diff --git a/.github/workflows/pyalp-publish.yml b/.github/workflows/pyalp-publish.yml index 705e0a3f1..e2bca6f58 100644 --- a/.github/workflows/pyalp-publish.yml +++ b/.github/workflows/pyalp-publish.yml @@ -207,6 +207,7 @@ jobs: url: https://test.pypi.org/p/pyalp permissions: id-token: write + contents: write steps: - name: Checkout repository (for tests) uses: actions/checkout@v4 @@ -226,6 +227,13 @@ jobs: packages-dir: dist/ verbose: true + - name: Create GitHub Release and upload wheels + uses: softprops/action-gh-release@v1 + with: + tag_name: ${{ github.ref_name }} + name: ${{ github.ref_name }} + files: dist/*.whl + - name: Skip in-publish verification shell: bash run: | @@ -280,3 +288,29 @@ jobs: # Run the smoke runner script for the backend for this matrix job echo "Running backend smoke runner for ${BACKEND}" python .github/scripts/run_backend_smoke_installed.py "${BACKEND}" + + publish-to-pypi: + needs: verify-installed + runs-on: ubuntu-latest + # This job publishes the already-built artifacts to the real PyPI index. + # It requires a PyPI API token stored in the repository secrets as PYPI_API_TOKEN. + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Download built wheels + uses: actions/download-artifact@v4 + with: + path: dist + pattern: pyalp-wheels-* + merge-multiple: true + + - name: Publish to PyPI (alp-graphblas) + uses: pypa/gh-action-pypi-publish@release/v1 + with: + packages-dir: dist/ + env: + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }} diff --git a/pyalp/pyproject.toml b/pyalp/pyproject.toml index 8f2984f07..6e0a25586 100644 --- a/pyalp/pyproject.toml +++ b/pyalp/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "pyalp" -version = "0.8.17" +version = "0.8.18" description = "Python bindings for ALP GraphBLAS (minimal package layout)" authors = [ { name = "ALP" } ] readme = "README.md" From 472ecc63faab5e5d7575dc71a9f2ac69b5dd42a9 Mon Sep 17 00:00:00 2001 From: Denis Jelovina Date: Wed, 29 Oct 2025 10:22:31 +0100 Subject: [PATCH 18/32] Update promote-to-pypi workflow permissions and bump pyalp version to 0.8.19 --- .github/workflows/promote-to-pypi.yml | 9 +++++++++ pyalp/pyproject.toml | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/.github/workflows/promote-to-pypi.yml b/.github/workflows/promote-to-pypi.yml index d38eb22af..32544b2d8 100644 --- a/.github/workflows/promote-to-pypi.yml +++ b/.github/workflows/promote-to-pypi.yml @@ -7,10 +7,19 @@ on: description: 'Git tag / release to promote (e.g. pyalp.v0.8.14)' required: true +# Request OIDC id-token permissions at workflow level so actions can use +# the GitHub Actions OIDC provider. The pypa publish action requires this +# for trusted publisher flow when using repository-provided credentials. +permissions: + id-token: write + contents: read + jobs: promote: runs-on: ubuntu-latest + # Also explicitly request id-token at the job level to be extra clear. permissions: + id-token: write contents: read steps: - name: Checkout (for local scripts) diff --git a/pyalp/pyproject.toml b/pyalp/pyproject.toml index 6e0a25586..3d9d94f6d 100644 --- a/pyalp/pyproject.toml +++ b/pyalp/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "pyalp" -version = "0.8.18" +version = "0.8.19" description = "Python bindings for ALP GraphBLAS (minimal package layout)" authors = [ { name = "ALP" } ] readme = "README.md" From f1e0da543dc61b1c839abc67a9ea9dfb85fa8b63 Mon Sep 17 00:00:00 2001 From: Denis Jelovina Date: Wed, 29 Oct 2025 10:31:25 +0100 Subject: [PATCH 19/32] Update publish-to-pypi job permissions and bump version to 0.8.20 in pyproject.toml --- .github/workflows/pyalp-publish.yml | 3 +++ pyalp/pyproject.toml | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pyalp-publish.yml b/.github/workflows/pyalp-publish.yml index e2bca6f58..6dbc14b97 100644 --- a/.github/workflows/pyalp-publish.yml +++ b/.github/workflows/pyalp-publish.yml @@ -292,6 +292,9 @@ jobs: publish-to-pypi: needs: verify-installed runs-on: ubuntu-latest + permissions: + id-token: write + contents: read # This job publishes the already-built artifacts to the real PyPI index. # It requires a PyPI API token stored in the repository secrets as PYPI_API_TOKEN. steps: diff --git a/pyalp/pyproject.toml b/pyalp/pyproject.toml index 3d9d94f6d..6f678f4d9 100644 --- a/pyalp/pyproject.toml +++ b/pyalp/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "pyalp" -version = "0.8.19" +version = "0.8.20" description = "Python bindings for ALP GraphBLAS (minimal package layout)" authors = [ { name = "ALP" } ] readme = "README.md" From 8b5a52595917609083e8ff048681a8db3a4a6e3f Mon Sep 17 00:00:00 2001 From: Denis Jelovina Date: Wed, 29 Oct 2025 10:42:48 +0100 Subject: [PATCH 20/32] Bump version to 0.8.21 in pyproject.toml and disable publish-to-pypi job in workflow --- .github/workflows/pyalp-publish.yml | 4 ++++ pyalp/pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pyalp-publish.yml b/.github/workflows/pyalp-publish.yml index 6dbc14b97..9198a25cf 100644 --- a/.github/workflows/pyalp-publish.yml +++ b/.github/workflows/pyalp-publish.yml @@ -290,6 +290,10 @@ jobs: python .github/scripts/run_backend_smoke_installed.py "${BACKEND}" publish-to-pypi: + # Disabled by default to avoid triggering PyPI uploads from this workflow. + # PyPI publisher was configured to accept uploads from `promote-to-pypi.yml`. + # Keep the job present for maintainers, but skip execution unless intentionally enabled. + if: false needs: verify-installed runs-on: ubuntu-latest permissions: diff --git a/pyalp/pyproject.toml b/pyalp/pyproject.toml index 6f678f4d9..0c6ce341a 100644 --- a/pyalp/pyproject.toml +++ b/pyalp/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "pyalp" -version = "0.8.20" +version = "0.8.21" description = "Python bindings for ALP GraphBLAS (minimal package layout)" authors = [ { name = "ALP" } ] readme = "README.md" From 020a00efe1eeca62ebfcab00dc0d53d8e807c1c6 Mon Sep 17 00:00:00 2001 From: Denis Jelovina Date: Wed, 29 Oct 2025 11:38:42 +0100 Subject: [PATCH 21/32] Update promote-to-pypi workflow to accept tags from push events and bump version to 0.8.22 in pyproject.toml --- .github/workflows/promote-to-pypi.yml | 10 +++++++++- pyalp/pyproject.toml | 2 +- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/.github/workflows/promote-to-pypi.yml b/.github/workflows/promote-to-pypi.yml index 32544b2d8..b3b66af51 100644 --- a/.github/workflows/promote-to-pypi.yml +++ b/.github/workflows/promote-to-pypi.yml @@ -6,6 +6,9 @@ on: tag: description: 'Git tag / release to promote (e.g. pyalp.v0.8.14)' required: true + push: + tags: + - 'pyalp.v*' # Request OIDC id-token permissions at workflow level so actions can use # the GitHub Actions OIDC provider. The pypa publish action requires this @@ -17,6 +20,10 @@ permissions: jobs: promote: runs-on: ubuntu-latest + # Require approval from the `production` environment before the job can + # access environment-scoped secrets (e.g. the PyPI API token). Create the + # environment in the repository settings and add the secret `PYPI_API_TOKEN`. + environment: production # Also explicitly request id-token at the job level to be extra clear. permissions: id-token: write @@ -28,7 +35,8 @@ jobs: - name: Download release assets uses: softprops/action-download-release@v1 with: - tag: ${{ github.event.inputs.tag }} + # Accept tag from workflow_dispatch input OR from the push event's ref name + tag: ${{ github.event.inputs.tag || github.ref_name }} # downloads into ./release_assets by default - name: List downloaded assets diff --git a/pyalp/pyproject.toml b/pyalp/pyproject.toml index 0c6ce341a..056c89ecc 100644 --- a/pyalp/pyproject.toml +++ b/pyalp/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "pyalp" -version = "0.8.21" +version = "0.8.22" description = "Python bindings for ALP GraphBLAS (minimal package layout)" authors = [ { name = "ALP" } ] readme = "README.md" From b55113e656831a3e97ed236d1f5f2737409831cc Mon Sep 17 00:00:00 2001 From: Denis Jelovina Date: Wed, 29 Oct 2025 11:48:59 +0100 Subject: [PATCH 22/32] Update download release assets step in promote-to-pypi workflow and bump version to 0.8.23 in pyproject.toml --- .github/workflows/promote-to-pypi.yml | 27 +++++++++++++++++++++------ pyalp/pyproject.toml | 2 +- 2 files changed, 22 insertions(+), 7 deletions(-) diff --git a/.github/workflows/promote-to-pypi.yml b/.github/workflows/promote-to-pypi.yml index b3b66af51..a7b47c029 100644 --- a/.github/workflows/promote-to-pypi.yml +++ b/.github/workflows/promote-to-pypi.yml @@ -32,12 +32,27 @@ jobs: - name: Checkout (for local scripts) uses: actions/checkout@v4 - - name: Download release assets - uses: softprops/action-download-release@v1 - with: - # Accept tag from workflow_dispatch input OR from the push event's ref name - tag: ${{ github.event.inputs.tag || github.ref_name }} - # downloads into ./release_assets by default + - name: Download release assets (via GitHub API) + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + REPO: ${{ github.repository }} + TAG: ${{ github.event.inputs.tag || github.ref_name }} + run: | + set -euo pipefail + echo "Downloading release assets for ${REPO} tag ${TAG}" + mkdir -p release_assets + # Fetch release metadata for the tag + release_json=$(curl -sSf -H "Authorization: Bearer ${GITHUB_TOKEN}" "https://api.github.com/repos/${REPO}/releases/tags/${TAG}") + if [ -z "${release_json}" ]; then + echo "No release metadata found for tag ${TAG}" >&2 + exit 1 + fi + # Iterate assets and download each one using the assets API (requires Accept header) + echo "$release_json" | jq -r '.assets[] | [.id, .name] | @tsv' | while IFS=$'\t' read -r id name; do + echo "Downloading asset: ${name} (id ${id})" + curl -sSfL -H "Authorization: Bearer ${GITHUB_TOKEN}" -H "Accept: application/octet-stream" "https://api.github.com/repos/${REPO}/releases/assets/${id}" -o "release_assets/${name}" + done + echo "Downloaded files:" && ls -la release_assets || true - name: List downloaded assets run: | diff --git a/pyalp/pyproject.toml b/pyalp/pyproject.toml index 056c89ecc..fe02e1218 100644 --- a/pyalp/pyproject.toml +++ b/pyalp/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "pyalp" -version = "0.8.22" +version = "0.8.23" description = "Python bindings for ALP GraphBLAS (minimal package layout)" authors = [ { name = "ALP" } ] readme = "README.md" From cd04f5c5bbc7b120daced34f7b1c8ab256ed7ffa Mon Sep 17 00:00:00 2001 From: Denis Jelovina Date: Wed, 29 Oct 2025 13:51:04 +0100 Subject: [PATCH 23/32] Update promote-to-pypi workflow to include diagnostic step for package name and version, and bump version to 0.8.24 in pyproject.toml --- .github/workflows/promote-to-pypi.yml | 4 ++++ pyalp/pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/promote-to-pypi.yml b/.github/workflows/promote-to-pypi.yml index a7b47c029..a6177ed87 100644 --- a/.github/workflows/promote-to-pypi.yml +++ b/.github/workflows/promote-to-pypi.yml @@ -59,6 +59,10 @@ jobs: echo "Assets in release_assets:" ls -la release_assets || true + - name: Show package name and version (diagnostic) + run: | + python -c "import importlib,importlib.util,sys,pathlib; spec=importlib.util.find_spec('tomllib') or importlib.util.find_spec('tomli'); name=spec.name if spec else sys.exit(print('No TOML parser available (tomllib/tomli), skipping')); toml=importlib.import_module(name); p=pathlib.Path('pyalp/pyproject.toml'); (sys.exit(print('pyalp/pyproject.toml not found at', p)) if not p.exists() else None); data=toml.loads(p.read_text()); proj=data.get('project',{}); print('project.name =', proj.get('name')); print('project.version =', proj.get('version'))" + - name: Publish to PyPI (alp-graphblas) uses: pypa/gh-action-pypi-publish@release/v1 with: diff --git a/pyalp/pyproject.toml b/pyalp/pyproject.toml index fe02e1218..dd5cb1c95 100644 --- a/pyalp/pyproject.toml +++ b/pyalp/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "pyalp" -version = "0.8.23" +version = "0.8.24" description = "Python bindings for ALP GraphBLAS (minimal package layout)" authors = [ { name = "ALP" } ] readme = "README.md" From a69c32709ea9c134229c1f9366c7cced395566fd Mon Sep 17 00:00:00 2001 From: Denis Jelovina Date: Wed, 29 Oct 2025 15:02:37 +0100 Subject: [PATCH 24/32] Update package version to 0.8.25 in pyproject.toml and adjust diagnostic step in promote-to-pypi workflow --- .github/workflows/promote-to-pypi.yml | 7 +++---- pyalp/pyproject.toml | 2 +- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/.github/workflows/promote-to-pypi.yml b/.github/workflows/promote-to-pypi.yml index a6177ed87..765393db2 100644 --- a/.github/workflows/promote-to-pypi.yml +++ b/.github/workflows/promote-to-pypi.yml @@ -58,10 +58,9 @@ jobs: run: | echo "Assets in release_assets:" ls -la release_assets || true - - - name: Show package name and version (diagnostic) - run: | - python -c "import importlib,importlib.util,sys,pathlib; spec=importlib.util.find_spec('tomllib') or importlib.util.find_spec('tomli'); name=spec.name if spec else sys.exit(print('No TOML parser available (tomllib/tomli), skipping')); toml=importlib.import_module(name); p=pathlib.Path('pyalp/pyproject.toml'); (sys.exit(print('pyalp/pyproject.toml not found at', p)) if not p.exists() else None); data=toml.loads(p.read_text()); proj=data.get('project',{}); print('project.name =', proj.get('name')); print('project.version =', proj.get('version'))" + - name: Show package name and version (diagnostic) + run: | + python -c "import importlib,importlib.util,sys,pathlib; spec=importlib.util.find_spec('tomllib') or importlib.util.find_spec('tomli'); name=spec.name if spec else sys.exit(print('No TOML parser available (tomllib/tomli), skipping')); toml=importlib.import_module(name); p=pathlib.Path('pyalp/pyproject.toml'); (sys.exit(print('pyalp/pyproject.toml not found at', p)) if not p.exists() else None); data=toml.loads(p.read_text()); proj=data.get('project',{}); print('project.name =', proj.get('name')); print('project.version =', proj.get('version'))" - name: Publish to PyPI (alp-graphblas) uses: pypa/gh-action-pypi-publish@release/v1 diff --git a/pyalp/pyproject.toml b/pyalp/pyproject.toml index dd5cb1c95..93b960030 100644 --- a/pyalp/pyproject.toml +++ b/pyalp/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "pyalp" -version = "0.8.24" +version = "0.8.25" description = "Python bindings for ALP GraphBLAS (minimal package layout)" authors = [ { name = "ALP" } ] readme = "README.md" From 32e1ec18a1783801a223ab39d1e7a9c70ead7841 Mon Sep 17 00:00:00 2001 From: Denis Jelovina Date: Wed, 29 Oct 2025 16:20:34 +0100 Subject: [PATCH 25/32] Rename package from pyalp to alp-graphblas and bump version to 0.8.26 in pyproject.toml; deprecate legacy workflow pyalp-publish.yml in favor of publish-to-testpypi.yml --- .github/workflows/publish-to-testpypi.yml | 323 ++++++++++++++++++++++ .github/workflows/pyalp-publish.yml | 320 +-------------------- pyalp/pyproject.toml | 4 +- 3 files changed, 330 insertions(+), 317 deletions(-) create mode 100644 .github/workflows/publish-to-testpypi.yml diff --git a/.github/workflows/publish-to-testpypi.yml b/.github/workflows/publish-to-testpypi.yml new file mode 100644 index 000000000..0ed67873f --- /dev/null +++ b/.github/workflows/publish-to-testpypi.yml @@ -0,0 +1,323 @@ +name: alp-graphblas wheels (cibuildwheel) + +on: + push: + tags: [ 'pyalp.v*' ] + workflow_dispatch: {} + +jobs: + build-wheels: + name: Build wheels on ${{ matrix.os }} + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, macos-latest] + steps: + - name: Checkout (with submodules) + uses: actions/checkout@v4 + with: + submodules: recursive + fetch-depth: 0 + + - name: Verify pinned pybind11 submodule commit + if: runner.os == 'Linux' || runner.os == 'macOS' + shell: bash + run: | + set -euo pipefail + if [ -f pyalp/PINNED_PYBIND11 ]; + then + PINNED_SHA=$(tr -d '\n' < pyalp/PINNED_PYBIND11) + elif [ -f pyalp/extern/pybind11/PINNED_COMMIT ]; + then + PINNED_SHA=$(tr -d '\n' < pyalp/extern/pybind11/PINNED_COMMIT) + else + echo "No pinned commit file found (pyalp/PINNED_PYBIND11 or pyalp/extern/pybind11/PINNED_COMMIT)" >&2 + exit 2 + fi + ACTUAL=$(git -C pyalp/extern/pybind11 rev-parse HEAD || true) + echo "Expected pybind11 commit: $PINNED_SHA" + echo "Found pybind11 commit: $ACTUAL" + test "$ACTUAL" = "$PINNED_SHA" + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.11' + + - name: Install cibuildwheel + run: | + python -m pip install --upgrade pip + python -m pip install cibuildwheel==2.21.3 + + - name: Build wheels + env: + CIBW_BUILD: "cp39-* cp310-* cp311-* cp312-*" + CIBW_SKIP: "*-musllinux* pp*" + CIBW_ARCHS_LINUX: "x86_64" + CIBW_ARCHS_MACOS: "arm64" + CIBW_BUILD_VERBOSITY: "1" + # Ensure submodule headers are used by setup.py + CIBW_ENVIRONMENT: > + PYTHONUTF8=1 + CIBW_ENVIRONMENT_MACOS: > + PYTHONUTF8=1 + MACOSX_DEPLOYMENT_TARGET=15.0 + # Prebuild the CMake-based extension via top-level CMake so all variables/options are defined. + CIBW_BEFORE_BUILD: | + python -m pip install --upgrade pip + python -m pip install cmake ninja + echo "[cibw] Working directory and contents:"; pwd; ls -la + echo "[cibw] Checking for pyalp CMakeLists:"; ls -la pyalp || true; if [ -f pyalp/CMakeLists.txt ]; + then echo "found pyalp/CMakeLists.txt"; else echo "pyalp/CMakeLists.txt NOT found"; fi + # If the wrapper CMakeLists.txt wasn't copied (e.g., untracked file when cibuildwheel uses git ls-files), create a minimal shim + if [ ! -f pyalp/CMakeLists.txt ]; + then + echo "[cibw] Creating pyalp/CMakeLists.txt shim (add_subdirectory(src)) for wheel build" + printf '%s\n' 'add_subdirectory(src)' > pyalp/CMakeLists.txt + fi + # Ensure no stale extension from a previous ABI remains in the source tree + rm -f pyalp/src/pyalp/_pyalp*.so || true + # Overwrite root setup.py inside the container to delegate packaging to pyalp/setup.py (keep git root clean) + printf '%s\n' "import os, runpy; ROOT=os.path.dirname(os.path.abspath(__file__)); PKG=os.path.join(ROOT, 'pyalp'); os.chdir(PKG); runpy.run_path(os.path.join(PKG, 'setup.py'), run_name='__main__')" > setup.py + # Configure from repository root; enable pyalp and choose NUMA setting per-platform + PYEXEC=$(python -c 'import sys; print(sys.executable)') + # Gather Git metadata and package version to pass into CMake so the + # generated runtime metadata contains accurate values even in CI. + # Prefer environment-provided values when available (GITHUB_SHA/REF_NAME) + ALP_GIT_COMMIT="${GITHUB_SHA:-$(git rev-parse --short HEAD)}" + # GITHUB_REF_NAME is available in Actions; fallback to git branch + ALP_GIT_BRANCH="${GITHUB_REF_NAME:-$(git rev-parse --abbrev-ref HEAD)}" + # Try to pick a semantic/alp version from tags (prefer nearest tag) + ALP_VERSION=$(git describe --tags --match "v*" --abbrev=0 2>/dev/null || true) + if [ -z "${ALP_VERSION}" ]; then + # Fall back to a describe-style value + ALP_VERSION=$(git describe --tags --match "v*" --always 2>/dev/null || echo "unknown") + fi + # Read the pyalp package version from pyalp/pyproject.toml (simple grep) + PYALP_VERSION=$(grep -E '^version\s*=\s*"' pyalp/pyproject.toml | head -n1 | sed -E 's/^version\s*=\s*"([^"]+)".*/\1/') + PYALP_VERSION=${PYALP_VERSION:-0.0.0} + echo "[cibw] Derived ALP_VERSION=${ALP_VERSION}, ALP_GIT_COMMIT=${ALP_GIT_COMMIT}, ALP_GIT_BRANCH=${ALP_GIT_BRANCH}, PYALP_VERSION=${PYALP_VERSION}" + # Use a per-ABI build directory to avoid cross-ABI contamination + ABI_TAG=$(python -c 'import sys; print(f"cp{sys.version_info[0]}{sys.version_info[1]}")') + BUILD_DIR="build/${ABI_TAG}" + # Export the per-ABI build dir so setup.py (inside the wheel build) can find + # the CMake-generated metadata file. cibuildwheel runs this before_build + # inside the container and environment variables exported here are visible + # to the subsequent packaging steps in that container. + export CMAKE_BUILD_DIR="${BUILD_DIR}" + echo "[cibw] Exported CMAKE_BUILD_DIR=${CMAKE_BUILD_DIR}" + # Enable NUMA on Linux runners (for linux wheels), keep disabled elsewhere. + if [ "$(uname -s)" = "Linux" ]; + then + echo "[cibw] Linux build container detected — attempting to install NUMA dev libs" + # Try package managers commonly present in manylinux containers. Ignore failures + if command -v yum >/dev/null 2>&1; + then + yum -y install numactl-devel || true + elif command -v apt-get >/dev/null 2>&1; + then + apt-get update || true + apt-get install -y libnuma-dev || true + fi + NUMA_FLAG="-DWITH_NUMA=ON" + else + # On macOS install Homebrew libomp but do NOT export CPPFLAGS/LDFLAGS. + # Exporting CPPFLAGS was the cause of incorrect header ordering; instead + # pass a CMake prefix hint so FindOpenMP can locate libomp without + # prepending include paths to the global compiler invocation. + if command -v brew >/dev/null 2>&1; + then + echo "[cibw] Homebrew detected — ensuring libomp is available" + # Only install if not already present to avoid reinstall warnings + if ! brew list libomp >/dev/null 2>&1; then + brew install libomp + fi + + # Locate libomp installation + if [ -d "/opt/homebrew/opt/libomp" ]; then + HOMEBREW_LIBOMP_DIR="/opt/homebrew/opt/libomp" + elif [ -d "/usr/local/opt/libomp" ]; then + HOMEBREW_LIBOMP_DIR="/usr/local/opt/libomp" + else + HOMEBREW_LIBOMP_DIR="" + fi + + if [ -n "${HOMEBREW_LIBOMP_DIR}" ]; then + CMAKE_PREFIX_HINT="-DCMAKE_PREFIX_PATH=${HOMEBREW_LIBOMP_DIR}" + echo "[cibw] Using libomp from ${HOMEBREW_LIBOMP_DIR}" + else + CMAKE_PREFIX_HINT="" + fi + fi + NUMA_FLAG="-DWITH_NUMA=OFF" + # Set macOS deployment target for arm64 to match libomp requirement + export MACOSX_DEPLOYMENT_TARGET=15.0 + OSX_DEPLOY_FLAG="-DCMAKE_OSX_DEPLOYMENT_TARGET=${MACOSX_DEPLOYMENT_TARGET}" + fi + # Clean build directory to prevent CMake caching issues + rm -rf "${BUILD_DIR}" + # On macOS, add flag to downgrade template keyword warning from error to warning + if [ "$(uname -s)" = "Darwin" ]; + then + MACOS_FLAGS="-DCMAKE_CXX_FLAGS=-Wno-error=missing-template-arg-list-after-template-kw" + else + MACOS_FLAGS="" + fi + # For wheel builds, request portable flags (avoid -march=native) and disable + # interprocedural optimization (LTO) to improve portability of the produced wheels. + PORTABLE_FLAG="-DALP_PORTABLE_BUILD=ON" + LTO_FLAG="-DCMAKE_INTERPROCEDURAL_OPTIMIZATION=OFF" + # Only enable OMP and nonblocking backends on Linux runners where libomp + # and required build support are available. macOS wheels will build the + # stable reference backend only to avoid SDK/ABI compile issues. + if [ "$(uname -s)" = "Linux" ]; then + BACKEND_FLAGS="-DWITH_OMP_BACKEND=ON -DWITH_NONBLOCKING_BACKEND=ON" + BUILD_TARGETS="pyalp_ref pyalp_omp pyalp_nonblocking" + else + BACKEND_FLAGS="-DWITH_OMP_BACKEND=OFF -DWITH_NONBLOCKING_BACKEND=OFF" + BUILD_TARGETS="pyalp_ref" + fi + + cmake -S . -B "${BUILD_DIR}" -G Ninja -DCMAKE_BUILD_TYPE=Release -DENABLE_PYALP=ON -DCMAKE_POSITION_INDEPENDENT_CODE=ON -DCMAKE_FIND_FRAMEWORK=NEVER ${MACOS_FLAGS} ${NUMA_FLAG} ${CMAKE_PREFIX_HINT:-} ${OSX_DEPLOY_FLAG:-} ${PORTABLE_FLAG} ${LTO_FLAG} ${BACKEND_FLAGS} -DPython3_EXECUTABLE="${PYEXEC}" -DALP_VERSION="${ALP_VERSION}" -DALP_GIT_COMMIT_SHA="${ALP_GIT_COMMIT}" -DALP_GIT_BRANCH="${ALP_GIT_BRANCH}" -Dpyalp_VERSION="${PYALP_VERSION}" + cmake --build "${BUILD_DIR}" --target ${BUILD_TARGETS} --parallel + # Debug: show the generated metadata file (if present) to the CI logs + echo "[cibw] Checking for generated metadata file: ${CMAKE_BUILD_DIR}/pyalp_metadata.py" + if [ -f "${CMAKE_BUILD_DIR}/pyalp_metadata.py" ]; then + echo "[cibw] Found metadata file:"; ls -l "${CMAKE_BUILD_DIR}/pyalp_metadata.py" + echo "[cibw] First 100 lines of metadata:"; sed -n '1,100p' "${CMAKE_BUILD_DIR}/pyalp_metadata.py" || true + else + echo "[cibw] Metadata file not found at ${CMAKE_BUILD_DIR}/pyalp_metadata.py" + fi + run: | + # Build from repository root so the full CMake project is available in the container + python -m cibuildwheel --output-dir wheelhouse . + + - name: Upload wheels + uses: actions/upload-artifact@v4 + with: + name: alp-graphblas-wheels-${{ matrix.os }} + path: wheelhouse/*.whl + + publish: + needs: build-wheels + runs-on: ubuntu-latest + environment: + name: testpypi + url: https://test.pypi.org/p/alp-graphblas + permissions: + id-token: write + contents: write + steps: + - name: Checkout repository (for tests) + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Download all wheels + uses: actions/download-artifact@v4 + with: + path: dist + pattern: alp-graphblas-wheels-* + merge-multiple: true + - name: Publish to TestPyPI + uses: pypa/gh-action-pypi-publish@release/v1 + with: + repository-url: https://test.pypi.org/legacy/ + packages-dir: dist/ + verbose: true + + - name: Create GitHub Release and upload wheels + uses: softprops/action-gh-release@v1 + with: + tag_name: ${{ github.ref_name }} + name: ${{ github.ref_name }} + files: dist/*.whl + + - name: Skip in-publish verification + shell: bash + run: | + echo "Installation verification moved to 'verify-installed' job" + + verify-installed: + needs: publish + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + backend: [pyalp_ref, pyalp_omp, pyalp_nonblocking, _pyalp] + steps: + - name: Checkout repository (for tests) + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.11' + + - name: Verify installed backend + shell: bash + env: + BACKEND: ${{ matrix.backend }} + run: | + set -euo pipefail + # Determine package version from pyalp/pyproject.toml + PYALP_VERSION=$(grep -E '^version\s*=\s*"' pyalp/pyproject.toml | head -n1 | sed -E 's/^version\s*=\s*"([^\"]+)".*/\1/') + echo "Testing alp-graphblas version: ${PYALP_VERSION}" + + PY=$(which python3 || which python) + echo "Using python: ${PY}" + VENV_DIR="./.venv_test" + rm -rf "${VENV_DIR}" + ${PY} -m venv "${VENV_DIR}" + source "${VENV_DIR}/bin/activate" + python -m pip install --upgrade pip setuptools wheel numpy + + # Wait for the published package to propagate to TestPyPI, then install + echo "Waiting for alp-graphblas==${PYALP_VERSION} to appear on TestPyPI" + .github/scripts/wait_for_testpypi_release.sh alp-graphblas "${PYALP_VERSION}" 18 10 + echo "Installing alp-graphblas==${PYALP_VERSION} from TestPyPI" + python -m pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple alp-graphblas==${PYALP_VERSION} --no-deps -v + + # Inspect installed package using the script moved out of the workflow + echo "Inspecting installed package" + python .github/scripts/inspect_installed_pyalp.py + + # Run the smoke runner script for the backend for this matrix job + echo "Running backend smoke runner for ${BACKEND}" + python .github/scripts/run_backend_smoke_installed.py "${BACKEND}" + + publish-to-pypi: + # Disabled by default to avoid triggering PyPI uploads from this workflow. + # PyPI publisher was configured to accept uploads from `promote-to-pypi.yml`. + # Keep the job present for maintainers, but skip execution unless intentionally enabled. + if: false + needs: verify-installed + runs-on: ubuntu-latest + permissions: + id-token: write + contents: read + # This job publishes the already-built artifacts to the real PyPI index. + # It requires a PyPI API token stored in the repository secrets as PYPI_API_TOKEN. + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Download built wheels + uses: actions/download-artifact@v4 + with: + path: dist + pattern: alp-graphblas-wheels-* + merge-multiple: true + + - name: Publish to PyPI (alp-graphblas) + uses: pypa/gh-action-pypi-publish@release/v1 + with: + packages-dir: dist/ + env: + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }} diff --git a/.github/workflows/pyalp-publish.yml b/.github/workflows/pyalp-publish.yml index 9198a25cf..e7ca61de3 100644 --- a/.github/workflows/pyalp-publish.yml +++ b/.github/workflows/pyalp-publish.yml @@ -1,323 +1,13 @@ -name: pyalp wheels (cibuildwheel) +name: legacy pyalp-publish (DEPRECATED) +# This workflow was renamed to `publish-to-testpypi.yml` and is now deprecated. +# Keep a disabled stub for auditing/history to avoid accidental runs. on: - push: - tags: [ 'pyalp.v*' ] workflow_dispatch: {} jobs: - build-wheels: - name: Build wheels on ${{ matrix.os }} - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - os: [ubuntu-latest, macos-latest] - steps: - - name: Checkout (with submodules) - uses: actions/checkout@v4 - with: - submodules: recursive - fetch-depth: 0 - - - name: Verify pinned pybind11 submodule commit - if: runner.os == 'Linux' || runner.os == 'macOS' - shell: bash - run: | - set -euo pipefail - if [ -f pyalp/PINNED_PYBIND11 ]; - then - PINNED_SHA=$(tr -d '\n' < pyalp/PINNED_PYBIND11) - elif [ -f pyalp/extern/pybind11/PINNED_COMMIT ]; - then - PINNED_SHA=$(tr -d '\n' < pyalp/extern/pybind11/PINNED_COMMIT) - else - echo "No pinned commit file found (pyalp/PINNED_PYBIND11 or pyalp/extern/pybind11/PINNED_COMMIT)" >&2 - exit 2 - fi - ACTUAL=$(git -C pyalp/extern/pybind11 rev-parse HEAD || true) - echo "Expected pybind11 commit: $PINNED_SHA" - echo "Found pybind11 commit: $ACTUAL" - test "$ACTUAL" = "$PINNED_SHA" - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: '3.11' - - - name: Install cibuildwheel - run: | - python -m pip install --upgrade pip - python -m pip install cibuildwheel==2.21.3 - - - name: Build wheels - env: - CIBW_BUILD: "cp39-* cp310-* cp311-* cp312-*" - CIBW_SKIP: "*-musllinux* pp*" - CIBW_ARCHS_LINUX: "x86_64" - CIBW_ARCHS_MACOS: "arm64" - CIBW_BUILD_VERBOSITY: "1" - # Ensure submodule headers are used by setup.py - CIBW_ENVIRONMENT: > - PYTHONUTF8=1 - CIBW_ENVIRONMENT_MACOS: > - PYTHONUTF8=1 - MACOSX_DEPLOYMENT_TARGET=15.0 - # Prebuild the CMake-based extension via top-level CMake so all variables/options are defined. - CIBW_BEFORE_BUILD: | - python -m pip install --upgrade pip - python -m pip install cmake ninja - echo "[cibw] Working directory and contents:"; pwd; ls -la - echo "[cibw] Checking for pyalp CMakeLists:"; ls -la pyalp || true; if [ -f pyalp/CMakeLists.txt ]; - then echo "found pyalp/CMakeLists.txt"; else echo "pyalp/CMakeLists.txt NOT found"; fi - # If the wrapper CMakeLists.txt wasn't copied (e.g., untracked file when cibuildwheel uses git ls-files), create a minimal shim - if [ ! -f pyalp/CMakeLists.txt ]; - then - echo "[cibw] Creating pyalp/CMakeLists.txt shim (add_subdirectory(src)) for wheel build" - printf '%s\n' 'add_subdirectory(src)' > pyalp/CMakeLists.txt - fi - # Ensure no stale extension from a previous ABI remains in the source tree - rm -f pyalp/src/pyalp/_pyalp*.so || true - # Overwrite root setup.py inside the container to delegate packaging to pyalp/setup.py (keep git root clean) - printf '%s\n' "import os, runpy; ROOT=os.path.dirname(os.path.abspath(__file__)); PKG=os.path.join(ROOT, 'pyalp'); os.chdir(PKG); runpy.run_path(os.path.join(PKG, 'setup.py'), run_name='__main__')" > setup.py - # Configure from repository root; enable pyalp and choose NUMA setting per-platform - PYEXEC=$(python -c 'import sys; print(sys.executable)') - # Gather Git metadata and package version to pass into CMake so the - # generated runtime metadata contains accurate values even in CI. - # Prefer environment-provided values when available (GITHUB_SHA/REF_NAME) - ALP_GIT_COMMIT="${GITHUB_SHA:-$(git rev-parse --short HEAD)}" - # GITHUB_REF_NAME is available in Actions; fallback to git branch - ALP_GIT_BRANCH="${GITHUB_REF_NAME:-$(git rev-parse --abbrev-ref HEAD)}" - # Try to pick a semantic/alp version from tags (prefer nearest tag) - ALP_VERSION=$(git describe --tags --match "v*" --abbrev=0 2>/dev/null || true) - if [ -z "${ALP_VERSION}" ]; then - # Fall back to a describe-style value - ALP_VERSION=$(git describe --tags --match "v*" --always 2>/dev/null || echo "unknown") - fi - # Read the pyalp package version from pyalp/pyproject.toml (simple grep) - PYALP_VERSION=$(grep -E '^version\s*=\s*"' pyalp/pyproject.toml | head -n1 | sed -E 's/^version\s*=\s*"([^"]+)".*/\1/') - PYALP_VERSION=${PYALP_VERSION:-0.0.0} - echo "[cibw] Derived ALP_VERSION=${ALP_VERSION}, ALP_GIT_COMMIT=${ALP_GIT_COMMIT}, ALP_GIT_BRANCH=${ALP_GIT_BRANCH}, PYALP_VERSION=${PYALP_VERSION}" - # Use a per-ABI build directory to avoid cross-ABI contamination - ABI_TAG=$(python -c 'import sys; print(f"cp{sys.version_info[0]}{sys.version_info[1]}")') - BUILD_DIR="build/${ABI_TAG}" - # Export the per-ABI build dir so setup.py (inside the wheel build) can find - # the CMake-generated metadata file. cibuildwheel runs this before_build - # inside the container and environment variables exported here are visible - # to the subsequent packaging steps in that container. - export CMAKE_BUILD_DIR="${BUILD_DIR}" - echo "[cibw] Exported CMAKE_BUILD_DIR=${CMAKE_BUILD_DIR}" - # Enable NUMA on Linux runners (for linux wheels), keep disabled elsewhere. - if [ "$(uname -s)" = "Linux" ]; - then - echo "[cibw] Linux build container detected — attempting to install NUMA dev libs" - # Try package managers commonly present in manylinux containers. Ignore failures - if command -v yum >/dev/null 2>&1; - then - yum -y install numactl-devel || true - elif command -v apt-get >/dev/null 2>&1; - then - apt-get update || true - apt-get install -y libnuma-dev || true - fi - NUMA_FLAG="-DWITH_NUMA=ON" - else - # On macOS install Homebrew libomp but do NOT export CPPFLAGS/LDFLAGS. - # Exporting CPPFLAGS was the cause of incorrect header ordering; instead - # pass a CMake prefix hint so FindOpenMP can locate libomp without - # prepending include paths to the global compiler invocation. - if command -v brew >/dev/null 2>&1; - then - echo "[cibw] Homebrew detected — ensuring libomp is available" - # Only install if not already present to avoid reinstall warnings - if ! brew list libomp >/dev/null 2>&1; then - brew install libomp - fi - - # Locate libomp installation - if [ -d "/opt/homebrew/opt/libomp" ]; then - HOMEBREW_LIBOMP_DIR="/opt/homebrew/opt/libomp" - elif [ -d "/usr/local/opt/libomp" ]; then - HOMEBREW_LIBOMP_DIR="/usr/local/opt/libomp" - else - HOMEBREW_LIBOMP_DIR="" - fi - - if [ -n "${HOMEBREW_LIBOMP_DIR}" ]; then - CMAKE_PREFIX_HINT="-DCMAKE_PREFIX_PATH=${HOMEBREW_LIBOMP_DIR}" - echo "[cibw] Using libomp from ${HOMEBREW_LIBOMP_DIR}" - else - CMAKE_PREFIX_HINT="" - fi - fi - NUMA_FLAG="-DWITH_NUMA=OFF" - # Set macOS deployment target for arm64 to match libomp requirement - export MACOSX_DEPLOYMENT_TARGET=15.0 - OSX_DEPLOY_FLAG="-DCMAKE_OSX_DEPLOYMENT_TARGET=${MACOSX_DEPLOYMENT_TARGET}" - fi - # Clean build directory to prevent CMake caching issues - rm -rf "${BUILD_DIR}" - # On macOS, add flag to downgrade template keyword warning from error to warning - if [ "$(uname -s)" = "Darwin" ]; - then - MACOS_FLAGS="-DCMAKE_CXX_FLAGS=-Wno-error=missing-template-arg-list-after-template-kw" - else - MACOS_FLAGS="" - fi - # For wheel builds, request portable flags (avoid -march=native) and disable - # interprocedural optimization (LTO) to improve portability of the produced wheels. - PORTABLE_FLAG="-DALP_PORTABLE_BUILD=ON" - LTO_FLAG="-DCMAKE_INTERPROCEDURAL_OPTIMIZATION=OFF" - # Only enable OMP and nonblocking backends on Linux runners where libomp - # and required build support are available. macOS wheels will build the - # stable reference backend only to avoid SDK/ABI compile issues. - if [ "$(uname -s)" = "Linux" ]; then - BACKEND_FLAGS="-DWITH_OMP_BACKEND=ON -DWITH_NONBLOCKING_BACKEND=ON" - BUILD_TARGETS="pyalp_ref pyalp_omp pyalp_nonblocking" - else - BACKEND_FLAGS="-DWITH_OMP_BACKEND=OFF -DWITH_NONBLOCKING_BACKEND=OFF" - BUILD_TARGETS="pyalp_ref" - fi - - cmake -S . -B "${BUILD_DIR}" -G Ninja -DCMAKE_BUILD_TYPE=Release -DENABLE_PYALP=ON -DCMAKE_POSITION_INDEPENDENT_CODE=ON -DCMAKE_FIND_FRAMEWORK=NEVER ${MACOS_FLAGS} ${NUMA_FLAG} ${CMAKE_PREFIX_HINT:-} ${OSX_DEPLOY_FLAG:-} ${PORTABLE_FLAG} ${LTO_FLAG} ${BACKEND_FLAGS} -DPython3_EXECUTABLE="${PYEXEC}" -DALP_VERSION="${ALP_VERSION}" -DALP_GIT_COMMIT_SHA="${ALP_GIT_COMMIT}" -DALP_GIT_BRANCH="${ALP_GIT_BRANCH}" -Dpyalp_VERSION="${PYALP_VERSION}" - cmake --build "${BUILD_DIR}" --target ${BUILD_TARGETS} --parallel - # Debug: show the generated metadata file (if present) to the CI logs - echo "[cibw] Checking for generated metadata file: ${CMAKE_BUILD_DIR}/pyalp_metadata.py" - if [ -f "${CMAKE_BUILD_DIR}/pyalp_metadata.py" ]; then - echo "[cibw] Found metadata file:"; ls -l "${CMAKE_BUILD_DIR}/pyalp_metadata.py" - echo "[cibw] First 100 lines of metadata:"; sed -n '1,100p' "${CMAKE_BUILD_DIR}/pyalp_metadata.py" || true - else - echo "[cibw] Metadata file not found at ${CMAKE_BUILD_DIR}/pyalp_metadata.py" - fi - run: | - # Build from repository root so the full CMake project is available in the container - python -m cibuildwheel --output-dir wheelhouse . - - - name: Upload wheels - uses: actions/upload-artifact@v4 - with: - name: pyalp-wheels-${{ matrix.os }} - path: wheelhouse/*.whl - - publish: - needs: build-wheels - runs-on: ubuntu-latest - environment: - name: testpypi - url: https://test.pypi.org/p/pyalp - permissions: - id-token: write - contents: write - steps: - - name: Checkout repository (for tests) - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - - name: Download all wheels - uses: actions/download-artifact@v4 - with: - path: dist - pattern: pyalp-wheels-* - merge-multiple: true - - name: Publish to TestPyPI - uses: pypa/gh-action-pypi-publish@release/v1 - with: - repository-url: https://test.pypi.org/legacy/ - packages-dir: dist/ - verbose: true - - - name: Create GitHub Release and upload wheels - uses: softprops/action-gh-release@v1 - with: - tag_name: ${{ github.ref_name }} - name: ${{ github.ref_name }} - files: dist/*.whl - - - name: Skip in-publish verification - shell: bash - run: | - echo "Installation verification moved to 'verify-installed' job" - - verify-installed: - needs: publish - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - backend: [pyalp_ref, pyalp_omp, pyalp_nonblocking, _pyalp] - steps: - - name: Checkout repository (for tests) - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: '3.11' - - - name: Verify installed backend - shell: bash - env: - BACKEND: ${{ matrix.backend }} - run: | - set -euo pipefail - # Determine package version from pyalp/pyproject.toml - PYALP_VERSION=$(grep -E '^version\s*=\s*"' pyalp/pyproject.toml | head -n1 | sed -E 's/^version\s*=\s*"([^\"]+)".*/\1/') - echo "Testing pyalp version: ${PYALP_VERSION}" - - PY=$(which python3 || which python) - echo "Using python: ${PY}" - VENV_DIR="./.venv_test" - rm -rf "${VENV_DIR}" - ${PY} -m venv "${VENV_DIR}" - source "${VENV_DIR}/bin/activate" - python -m pip install --upgrade pip setuptools wheel numpy - - # Wait for the published package to propagate to TestPyPI, then install - echo "Waiting for pyalp==${PYALP_VERSION} to appear on TestPyPI" - .github/scripts/wait_for_testpypi_release.sh pyalp "${PYALP_VERSION}" 18 10 - echo "Installing pyalp==${PYALP_VERSION} from TestPyPI" - python -m pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple pyalp==${PYALP_VERSION} --no-deps -v - - # Inspect installed package using the script moved out of the workflow - echo "Inspecting installed package" - python .github/scripts/inspect_installed_pyalp.py - - # Run the smoke runner script for the backend for this matrix job - echo "Running backend smoke runner for ${BACKEND}" - python .github/scripts/run_backend_smoke_installed.py "${BACKEND}" - - publish-to-pypi: - # Disabled by default to avoid triggering PyPI uploads from this workflow. - # PyPI publisher was configured to accept uploads from `promote-to-pypi.yml`. - # Keep the job present for maintainers, but skip execution unless intentionally enabled. + noop: if: false - needs: verify-installed runs-on: ubuntu-latest - permissions: - id-token: write - contents: read - # This job publishes the already-built artifacts to the real PyPI index. - # It requires a PyPI API token stored in the repository secrets as PYPI_API_TOKEN. steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - - name: Download built wheels - uses: actions/download-artifact@v4 - with: - path: dist - pattern: pyalp-wheels-* - merge-multiple: true - - - name: Publish to PyPI (alp-graphblas) - uses: pypa/gh-action-pypi-publish@release/v1 - with: - packages-dir: dist/ - env: - TWINE_USERNAME: __token__ - TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }} + - run: echo "This workflow is deprecated; use publish-to-testpypi.yml instead." diff --git a/pyalp/pyproject.toml b/pyalp/pyproject.toml index 93b960030..9e2b3a186 100644 --- a/pyalp/pyproject.toml +++ b/pyalp/pyproject.toml @@ -3,8 +3,8 @@ requires = ["setuptools>=61.0", "wheel", "pybind11>=2.6"] build-backend = "setuptools.build_meta" [project] -name = "pyalp" -version = "0.8.25" +name = "alp-graphblas" +version = "0.8.26" description = "Python bindings for ALP GraphBLAS (minimal package layout)" authors = [ { name = "ALP" } ] readme = "README.md" From e68deaeb1ec23a6bd794eec70a4ce96a201326cc Mon Sep 17 00:00:00 2001 From: Denis Jelovina Date: Wed, 29 Oct 2025 17:07:57 +0100 Subject: [PATCH 26/32] Remove deprecated pyalp-publish workflow and update README for package installation; add numpy dependency in pyproject.toml --- .github/workflows/pyalp-publish.yml | 13 ------------- pyalp/README.md | 26 ++++++++++++++++---------- pyalp/pyproject.toml | 4 ++++ 3 files changed, 20 insertions(+), 23 deletions(-) delete mode 100644 .github/workflows/pyalp-publish.yml diff --git a/.github/workflows/pyalp-publish.yml b/.github/workflows/pyalp-publish.yml deleted file mode 100644 index e7ca61de3..000000000 --- a/.github/workflows/pyalp-publish.yml +++ /dev/null @@ -1,13 +0,0 @@ -name: legacy pyalp-publish (DEPRECATED) - -# This workflow was renamed to `publish-to-testpypi.yml` and is now deprecated. -# Keep a disabled stub for auditing/history to avoid accidental runs. -on: - workflow_dispatch: {} - -jobs: - noop: - if: false - runs-on: ubuntu-latest - steps: - - run: echo "This workflow is deprecated; use publish-to-testpypi.yml instead." diff --git a/pyalp/README.md b/pyalp/README.md index 6a4e6d81e..aafcee8b2 100644 --- a/pyalp/README.md +++ b/pyalp/README.md @@ -8,12 +8,22 @@ Quick start ----------- Create and activate a virtual environment, then install the package (example -using TestPyPI): +using PyPI (recommended): ```bash python -m venv venv source venv/bin/activate -pip install --index-url https://test.pypi.org/simple/ --no-deps pyalp +pip install alp-graphblas +``` + +If you want to try a pre-release from TestPyPI instead, use the TestPyPI +index but install the same package name `alp-graphblas` (pip will pull the +package and its dependencies from the given index): + +```bash +python -m venv venv +source venv/bin/activate +pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple alp-graphblas ``` Basic usage @@ -26,14 +36,10 @@ that expose parts of the ALP GraphBLAS project via pybind11. Quick start ----------- -Create and activate a virtual environment, then install the package (example -using TestPyPI): - -```bash -python -m venv venv -source venv/bin/activate -pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple pyalp -``` +Create and activate a virtual environment, then install the published package +`alp-graphblas` from PyPI or TestPyPI as shown above. Note: the import name +inside Python remains `pyalp` (the package provides the `pyalp` module), so +your code still does `import pyalp` after installation. Basic usage ----------- diff --git a/pyalp/pyproject.toml b/pyalp/pyproject.toml index 9e2b3a186..125b4c26c 100644 --- a/pyalp/pyproject.toml +++ b/pyalp/pyproject.toml @@ -11,6 +11,10 @@ readme = "README.md" license = { text = "Apache-2.0" } requires-python = ">=3.8" +dependencies = [ + "numpy>=1.22", +] + [tool.setuptools.packages.find] where = ["src"] From 4dbc5700c26175669eeb4aa5ccdc1df3ee11c754 Mon Sep 17 00:00:00 2001 From: Denis Jelovina Date: Wed, 29 Oct 2025 17:17:58 +0100 Subject: [PATCH 27/32] Bump package version to 0.8.28 in pyproject.toml --- pyalp/pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyalp/pyproject.toml b/pyalp/pyproject.toml index 125b4c26c..062f8c7a6 100644 --- a/pyalp/pyproject.toml +++ b/pyalp/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "alp-graphblas" -version = "0.8.26" +version = "0.8.28" description = "Python bindings for ALP GraphBLAS (minimal package layout)" authors = [ { name = "ALP" } ] readme = "README.md" From eb45f6b8f9af501c685e0dd58a33f21d42c47ef7 Mon Sep 17 00:00:00 2001 From: Denis Jelovina Date: Wed, 29 Oct 2025 17:27:43 +0100 Subject: [PATCH 28/32] Bump package version to 0.8.29 in pyproject.toml; update promote-to-pypi workflow to skip publish if no wheel assets are found --- .github/workflows/promote-to-pypi.yml | 22 ++++++++++++++++++---- pyalp/pyproject.toml | 2 +- 2 files changed, 19 insertions(+), 5 deletions(-) diff --git a/.github/workflows/promote-to-pypi.yml b/.github/workflows/promote-to-pypi.yml index 765393db2..4a9c6733e 100644 --- a/.github/workflows/promote-to-pypi.yml +++ b/.github/workflows/promote-to-pypi.yml @@ -42,10 +42,11 @@ jobs: echo "Downloading release assets for ${REPO} tag ${TAG}" mkdir -p release_assets # Fetch release metadata for the tag - release_json=$(curl -sSf -H "Authorization: Bearer ${GITHUB_TOKEN}" "https://api.github.com/repos/${REPO}/releases/tags/${TAG}") - if [ -z "${release_json}" ]; then - echo "No release metadata found for tag ${TAG}" >&2 - exit 1 + release_json=$(curl -sS -H "Authorization: Bearer ${GITHUB_TOKEN}" "https://api.github.com/repos/${REPO}/releases/tags/${TAG}" || true) + if [ -z "${release_json}" ] || [ "${release_json}" = "null" ]; then + echo "No release metadata found for tag ${TAG}; nothing to download. Skipping publish." + mkdir -p release_assets + exit 0 fi # Iterate assets and download each one using the assets API (requires Accept header) echo "$release_json" | jq -r '.assets[] | [.id, .name] | @tsv' | while IFS=$'\t' read -r id name; do @@ -58,11 +59,24 @@ jobs: run: | echo "Assets in release_assets:" ls -la release_assets || true + - name: Check for wheel assets + id: assets_check + run: | + set -euo pipefail + mkdir -p release_assets + count=0 + # Count wheel files (if any). Use glob safely. + shopt -s nullglob || true + files=(release_assets/*.whl) + count=${#files[@]} + echo "Found $count .whl files in release_assets" + echo "asset_count=$count" >> $GITHUB_OUTPUT - name: Show package name and version (diagnostic) run: | python -c "import importlib,importlib.util,sys,pathlib; spec=importlib.util.find_spec('tomllib') or importlib.util.find_spec('tomli'); name=spec.name if spec else sys.exit(print('No TOML parser available (tomllib/tomli), skipping')); toml=importlib.import_module(name); p=pathlib.Path('pyalp/pyproject.toml'); (sys.exit(print('pyalp/pyproject.toml not found at', p)) if not p.exists() else None); data=toml.loads(p.read_text()); proj=data.get('project',{}); print('project.name =', proj.get('name')); print('project.version =', proj.get('version'))" - name: Publish to PyPI (alp-graphblas) + if: ${{ steps.assets_check.outputs.asset_count != '0' }} uses: pypa/gh-action-pypi-publish@release/v1 with: packages-dir: release_assets/ diff --git a/pyalp/pyproject.toml b/pyalp/pyproject.toml index 062f8c7a6..997873bc6 100644 --- a/pyalp/pyproject.toml +++ b/pyalp/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "alp-graphblas" -version = "0.8.28" +version = "0.8.29" description = "Python bindings for ALP GraphBLAS (minimal package layout)" authors = [ { name = "ALP" } ] readme = "README.md" From de02f0db213917541024615131c335a60d0a237f Mon Sep 17 00:00:00 2001 From: Denis Jelovina Date: Wed, 29 Oct 2025 17:32:06 +0100 Subject: [PATCH 29/32] Bump package version to 0.8.30 in pyproject.toml; update promote-to-pypi workflow to skip download if no release assets are found --- .github/workflows/promote-to-pypi.yml | 8 ++++++++ pyalp/pyproject.toml | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/.github/workflows/promote-to-pypi.yml b/.github/workflows/promote-to-pypi.yml index 4a9c6733e..0c655145b 100644 --- a/.github/workflows/promote-to-pypi.yml +++ b/.github/workflows/promote-to-pypi.yml @@ -48,6 +48,14 @@ jobs: mkdir -p release_assets exit 0 fi + # If there are no assets, skip downloading to avoid jq iterating over null + assets_len=$(echo "$release_json" | jq '.assets | length // 0') + if [ "$assets_len" -eq 0 ]; then + echo "Release ${TAG} has no assets (assets length = ${assets_len}); skipping download and publish." + mkdir -p release_assets + exit 0 + fi + # Iterate assets and download each one using the assets API (requires Accept header) echo "$release_json" | jq -r '.assets[] | [.id, .name] | @tsv' | while IFS=$'\t' read -r id name; do echo "Downloading asset: ${name} (id ${id})" diff --git a/pyalp/pyproject.toml b/pyalp/pyproject.toml index 997873bc6..6c3b5e494 100644 --- a/pyalp/pyproject.toml +++ b/pyalp/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "alp-graphblas" -version = "0.8.29" +version = "0.8.30" description = "Python bindings for ALP GraphBLAS (minimal package layout)" authors = [ { name = "ALP" } ] readme = "README.md" From a926e4b0a5e0a92a3bc63e59dc05ecb47e64c8ec Mon Sep 17 00:00:00 2001 From: Denis Jelovina Date: Wed, 29 Oct 2025 17:46:54 +0100 Subject: [PATCH 30/32] Bump package version to 0.8.31 in pyproject.toml; update promote-to-pypi workflow to handle missing release metadata --- .github/workflows/promote-to-pypi.yml | 30 +++++---------------------- pyalp/pyproject.toml | 2 +- 2 files changed, 6 insertions(+), 26 deletions(-) diff --git a/.github/workflows/promote-to-pypi.yml b/.github/workflows/promote-to-pypi.yml index 0c655145b..14b02c2e1 100644 --- a/.github/workflows/promote-to-pypi.yml +++ b/.github/workflows/promote-to-pypi.yml @@ -42,18 +42,10 @@ jobs: echo "Downloading release assets for ${REPO} tag ${TAG}" mkdir -p release_assets # Fetch release metadata for the tag - release_json=$(curl -sS -H "Authorization: Bearer ${GITHUB_TOKEN}" "https://api.github.com/repos/${REPO}/releases/tags/${TAG}" || true) - if [ -z "${release_json}" ] || [ "${release_json}" = "null" ]; then - echo "No release metadata found for tag ${TAG}; nothing to download. Skipping publish." - mkdir -p release_assets - exit 0 - fi - # If there are no assets, skip downloading to avoid jq iterating over null - assets_len=$(echo "$release_json" | jq '.assets | length // 0') - if [ "$assets_len" -eq 0 ]; then - echo "Release ${TAG} has no assets (assets length = ${assets_len}); skipping download and publish." - mkdir -p release_assets - exit 0 + release_json=$(curl -sSf -H "Authorization: Bearer ${GITHUB_TOKEN}" "https://api.github.com/repos/${REPO}/releases/tags/${TAG}") + if [ -z "${release_json}" ]; then + echo "No release metadata found for tag ${TAG}" >&2 + exit 1 fi # Iterate assets and download each one using the assets API (requires Accept header) @@ -67,24 +59,12 @@ jobs: run: | echo "Assets in release_assets:" ls -la release_assets || true - - name: Check for wheel assets - id: assets_check - run: | - set -euo pipefail - mkdir -p release_assets - count=0 - # Count wheel files (if any). Use glob safely. - shopt -s nullglob || true - files=(release_assets/*.whl) - count=${#files[@]} - echo "Found $count .whl files in release_assets" - echo "asset_count=$count" >> $GITHUB_OUTPUT + - name: Show package name and version (diagnostic) run: | python -c "import importlib,importlib.util,sys,pathlib; spec=importlib.util.find_spec('tomllib') or importlib.util.find_spec('tomli'); name=spec.name if spec else sys.exit(print('No TOML parser available (tomllib/tomli), skipping')); toml=importlib.import_module(name); p=pathlib.Path('pyalp/pyproject.toml'); (sys.exit(print('pyalp/pyproject.toml not found at', p)) if not p.exists() else None); data=toml.loads(p.read_text()); proj=data.get('project',{}); print('project.name =', proj.get('name')); print('project.version =', proj.get('version'))" - name: Publish to PyPI (alp-graphblas) - if: ${{ steps.assets_check.outputs.asset_count != '0' }} uses: pypa/gh-action-pypi-publish@release/v1 with: packages-dir: release_assets/ diff --git a/pyalp/pyproject.toml b/pyalp/pyproject.toml index 6c3b5e494..8b6b3ca30 100644 --- a/pyalp/pyproject.toml +++ b/pyalp/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "alp-graphblas" -version = "0.8.30" +version = "0.8.31" description = "Python bindings for ALP GraphBLAS (minimal package layout)" authors = [ { name = "ALP" } ] readme = "README.md" From 74cfd52edb5b061aa1d7b0b9a4b4443de9215fed Mon Sep 17 00:00:00 2001 From: Denis Jelovina Date: Thu, 30 Oct 2025 13:45:05 +0100 Subject: [PATCH 31/32] Remove compatibility shim module `pyalp_ref` as it is no longer needed. --- pyalp/DEVELOPER_GUIDE.md | 206 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 206 insertions(+) create mode 100644 pyalp/DEVELOPER_GUIDE.md diff --git a/pyalp/DEVELOPER_GUIDE.md b/pyalp/DEVELOPER_GUIDE.md new file mode 100644 index 000000000..f4fa50098 --- /dev/null +++ b/pyalp/DEVELOPER_GUIDE.md @@ -0,0 +1,206 @@ +Developer guide — pyalp / alp-graphblas +===================================== + +Author: +Denis Jelovina + +Support: +For support or to report issues, please open an issue on the project's GitHub issue tracker. For direct contact, email denis.jelovina@gmail.com + +This document explains how the Python packaging for the pyalp bindings works, how CI builds wheels, and what to change when you add a new compiled backend (pybind11 module) or Python dependency. + +C++ binding logic and Python usage (summary) +------------------------------------------- +The pyalp package exposes native C++ backends built with pybind11. Each backend is compiled as a separate Python extension module (shared object) with a canonical name like `pyalp_ref`, `pyalp_omp`, or `pyalp_nonblocking`. The packaging layout installs those compiled modules into the `pyalp` package so they are importable as `pyalp.pyalp_ref`, `pyalp.pyalp_omp`, etc. + +How Python code uses the compiled backends +- Direct import: after installation you can import a backend module directly, for example: + + import pyalp.pyalp_ref + M = pyalp.pyalp_ref.Matrix(10, 10) + +- Helper API: the package also provides helper APIs that discover and return backends at runtime, e.g. `pyalp_importname.get_backend('pyalp_ref')` which returns the compiled module object. This is useful for selecting backends dynamically. + +How the Python object maps to C++ +- Each compiled extension is a pybind11 module which registers C++ types (Matrix, Vector, operators) and functions. The pybind11 binding code (in the `pyalp` C++ sources) defines the Python-visible class names and methods, so `pyalp.pyalp_ref.Matrix` is a python wrapper around the C++ Matrix implementation in the native backend. +- At build time, CMake compiles the C++ sources into a platform-specific shared object; the packaging step copies that shared object into the `pyalp` package so the interpreter can import it as a normal module. + +Current functional limitations and caveats +- Cross-backend imports: importing different backend modules in the same Python process can cause pybind11 type-registration collisions (duplicate registrations of the same C++ types across modules). The bindings now use `py::module_local()` for many wrapper types to reduce collisions, but issues can still occur. If you need repeatable cross-backend usage, either run backends in separate processes or design a shared-registration approach (single module that dispatches to backends or explicit shared-type registration across modules). +- Cross-backend bindings: supporting full cross-backend interoperability requires either + - a single compiled extension exporting a stable API and selecting backends internally, or + - explicit cross-registration code that ensures each type is only registered once (or registered with module-local variants and safe conversion functions). Both approaches require C++ changes and careful testing. +- Wheel portability and optimization trade-offs: + - Wheels are built per-ABI and per-OS (CI uses per-ABI build dirs). The project disables aggressive target-specific flags (no `-march=native`, LTO off) to improve portability, but wheels are still platform/ABI-specific (glibc versus musl, macOS SDK versions). Expect different wheel filenames per ABI/OS and possible limitations on older OS versions. + - CI currently skips `*-musllinux*` and does not publish Windows wheels by default (see CI matrix). If you need musl or Windows support, update the CI configuration and the before-build steps to provide appropriate toolchains and packaging options. +- Size and dependency implications: bundling multiple backends increases wheel size. + +If you plan to change the bindings or support cross-backend imports, read the `pybind11` docs on module-local registrations and consider writing small integration tests that import multiple backends in isolated subprocesses. + + +Local builds (tested with `pyalp-ci.yml`) +----------------------------------------- +If you prefer fast iteration or want to debug native build issues locally, build and test wheels on your machine. The repository provides `pyalp-ci.yml` to exercise the build steps in CI (useful to validate local changes on pull requests), but local builds let you iterate without pushing tags or waiting for remote runners. + +When to build locally +- Fast iteration when changing bindings, packaging logic, or test code. +- Debugging native-build problems where you need immediate access to compiler and linker output. +- Packaging-only checks: point `pyalp/setup.py` at an existing `.so` (via `PREBUILT_PYALP_SO`) to validate wheel contents without rebuilding native code. + +How to build wheels locally (quick recipe) +- Prepare a per-ABI build directory and run CMake (example for Python 3.11): +- Build a wheel from the `pyalp` package and point it at the per-ABI build dir so the generated metadata and prebuilt `.so` get picked up: + +```bash + cmake -DENABLE_PYALP=ON -DCMAKE_BUILD_TYPE=Release $ALP_REPO_PATH + make pyalp_ref + # append the new path to PYTHONPATH, ie. export PYTHONPATH=$PYTHONPATH:$(pwd)/python +``` + +Advantage of local builds +- Performance, active optimisations for the build architecture +- Speed: no remote queue or tag/push cycle. +- Control: change CMake flags and environment variables and rebuild immediately. +- Debuggability: full compiler/linker logs and the ability to attach tools. + + +Full publish pipeline (publish-to-testpypi.yml + promote-to-pypi.yml) +----------------------------------------------------------------- +The full repository publish flow is implemented in two primary workflows: + +- `publish-to-testpypi.yml` — builds wheels for multiple ABIs/OSes using `cibuildwheel`, publishes them to TestPyPI, uploads wheel artifacts to a GitHub Release, and runs verification steps that install the TestPyPI package into a clean virtualenv for smoke tests. This workflow is triggered by pushing a tag matching `pyalp.v*`. + +- `promote-to-pypi.yml` — a gated workflow that downloads wheel assets from a GitHub Release and uploads them to PyPI. This job requires the `production` environment and uses the `PYPI_API_TOKEN` secret; the environment gating ensures human approval before the token is available to the workflow. + +Key differences vs local builds +- Scope: the publish pipelines run multiple ABIs and platforms, produce canonical release artifacts, and publish them to TestPyPI/PyPI. +- Reproducibility: CI uses standard manylinux containers and controlled macOS runners to produce wheels intended for distribution; this reduces host-specific variation. +- Approval and secrets: promote-to-pypi requires an environment approval to access the PyPI token, preventing accidental publishes. + +When to use the publish pipeline +- After local validation and CI runs (e.g., `pyalp-ci.yml` for PRs), create an annotated tag `pyalp.vX.Y.Z` and push it to trigger `publish-to-testpypi.yml`. +- Once TestPyPI artifacts are validated, run `promote-to-pypi.yml` (workflow dispatch) to publish to PyPI; this step requires environment approval and the presence of the `PYPI_API_TOKEN` secret. + +Operational note: TestPyPI propagation and verification +- The verification step that installs wheels from TestPyPI can occasionally fail due to propagation delays between upload and index availability. If the TestPyPI install step fails transiently, re-run the workflow or re-trigger the release; the promote job should only be run once test artifacts are available and verified. + + + + +High-level contract +- Inputs: CMake-based native backends built by the top-level CMake tree, a generated Python metadata file produced by CMake, and the Python package source in `pyalp/src`. +- Output: Platform-specific wheels that contain the compiled shared object(s) and a generated `_metadata.py` file. The published PyPI project name is `alp-graphblas`, but the import name inside Python remains `pyalp`. +- Success criteria: pip install alp-graphblas (from TestPyPI or PyPI) yields a package exposing `pyalp.get_build_metadata()` and one or more backend modules accessible via `pyalp.get_backend()`. + +Where things live (important files) +- `pyalp/pyproject.toml` — project metadata used by CI and for the package release (project name, version, runtime dependencies such as numpy). +- `pyalp/setup.py` — custom setuptools glue. It either copies prebuilt shared objects from the CMake build tree into the wheel (preferred for CI-built wheels) or builds from source with pybind11 when no prebuilt artifact is present. +- `pyalp/src/pyalp/_metadata.py.in` — CMake template used to generate `pyalp_metadata.py` (copied into wheels as `_metadata.py`). If you change the runtime metadata shape, update this file and the code that reads it. +- Top-level CMake files (`CMakeLists.txt` and `src/…`) — define native targets such as `pyalp_ref`, `pyalp_omp`, `pyalp_nonblocking`. CI runs a top-level CMake configure/build per-Python-ABI and produces the native `.so` files and the generated metadata file. +- `.github/workflows/publish-to-testpypi.yml` — builds wheels with cibuildwheel and publishes to TestPyPI (trigger: push tag `pyalp.v*`). This workflow also creates a GitHub Release with wheel assets. +- `.github/workflows/promote-to-pypi.yml` — promotes a GitHub Release's wheel assets to PyPI. The job requires `environment: production` (see repository settings) and uses the secret `PYPI_API_TOKEN`. +- `.github/scripts/` — helper scripts used by CI (e.g., verification and TestPyPI wait scripts). + +How the CI build produces a wheel (brief) +- cibuildwheel is used to produce wheels for multiple Python ABIs and OSes. +- Before building each wheel, CI runs a `CIBW_BEFORE_BUILD` script which: + - Installs CMake + Ninja inside the build container. + - Derives Git and package version metadata and sets environment variables. + - Configures a per-ABI CMake build directory (e.g. `build/cp311`) and runs CMake to produce the compiled backends and a generated `pyalp_metadata.py` file inside that build dir. + - Exports `CMAKE_BUILD_DIR` pointing to the per-ABI build directory so `pyalp/setup.py` can locate the generated outputs. +- The packaging step runs `pyalp/setup.py` (setup.py will copy discovered prebuilt `.so` files and the generated metadata file into the package build directory). The wheel built by cibuildwheel therefore contains the prebuilt, ABI-specific `.so` and `_metadata.py`. + +How `pyalp/setup.py` cooperates with CMake +- By default `setup.py` searches the repo `../build/**` tree for prebuilt shared objects named like the native targets (`pyalp_ref`, `pyalp_omp`, `pyalp_nonblocking`). If it finds them it adds Extension entries with empty sources and uses a custom `build_ext` to copy the prebuilt library into the wheel. +- `setup.py` looks for the generated metadata file in the directory pointed to by the `CMAKE_BUILD_DIR` environment variable (set by the CI before_build script). If present it copies `pyalp_metadata.py` -> `_metadata.py` next to the extension in the wheel. +- If no prebuilt modules are detected and `pybind11` is available, `setup.py` will fall back to building from sources with pybind11. +- Environment variables you can use locally: + - `CMAKE_BUILD_DIR` — path to the per-ABI CMake build dir that contains `pyalp_metadata.py` and the built `.so` files. + - `PREBUILT_PYALP_SO` or `PYALP_PREBUILT_SO` — point to a single prebuilt shared object to include in the wheel (helpful for local testing). + +Adding a new compiled backend (step-by-step) +1) Add a CMake target + - Add a target to your CMake configuration (top-level CMake or `pyalp` subdirectory). Name it with the prefix used by `setup.py` (for example `pyalp_mybackend` if you want the backend import name to be `pyalp_mybackend`). + - Ensure the target produces a shared library file named so that it will be discoverable by the existing glob in `pyalp/setup.py` (the packaging code looks for `build/**/*.(so|pyd)`). + - If the backend needs additional compile flags or third-party deps, add those to the CMake target and to the cibuildwheel before-build step where platform-specific dependencies are installed. + +2) Expose the pybind11 module name correctly + - The module name that Python imports must match the filename stem: for a target `pyalp_mybackend` the shared object should become something like `pyalp_mybackend.cpython-311-x86_64-linux-gnu.so` and will be installed into the `pyalp` package as `pyalp/mybackend` importable as `pyalp.pyalp_mybackend` or accessed by the helper APIs. + - `setup.py` maps module names to the extension name `pyalp.`; if you introduce a module with a different naming scheme, update `pyalp/setup.py`'s discovery or add an explicit mapping. + +3) Update CI build targets + - The cibuildwheel `CIBW_BEFORE_BUILD` script exports a `BUILD_TARGETS` variable used by CMake to restrict which targets to build. Edit `.github/workflows/publish-to-testpypi.yml` under `CIBW_BEFORE_BUILD` to include your new target name in `BUILD_TARGETS`. + - If your backend requires platform-specific dependency installation (e.g., libnuma, libomp) ensure those package installs are available in the before-build block. + +4) Update packaging helpers if needed + - If your module uses a new stem that the setup script won't detect, add the module name to the `supported` list in `pyalp/setup.py` or rely on the glob search. + - If you want to bundle multiple backends under a different naming convention, update `find_all_prebuilt()` discovery logic and the code that constructs `Extension(f"pyalp.{modname}")` entries. + +5) Add/adjust tests + - Add small smoke tests (ideally under `tests/python/` or `tests/smoke/`) that run the new backend. Prefer running each backend in its own process where feasible to avoid pybind11 registration collisions. + +6) Build and test locally (quick recipe) + - Ensure system deps installed: cmake, ninja, a C++ toolchain and any library dependencies. + - Create a per-ABI build dir and configure CMake as CI does. Example (for Python 3.11): + ```bash + mkdir -p build/cp311 + cmake -S . -B build/cp311 -G Ninja -DCMAKE_BUILD_TYPE=Release -DENABLE_PYALP=ON -DCMAKE_POSITION_INDEPENDENT_CODE=ON -DPython3_EXECUTABLE=$(which python3) + cmake --build build/cp311 --target pyalp_ref pyalp_mybackend --parallel + ``` + + - Build a wheel locally from the `pyalp` package. From the repository root: + ```bash + export CMAKE_BUILD_DIR="$(pwd)/build/cp311" + cd pyalp + # Build a wheel using the package directory's setup.py + python -m pip wheel . --no-deps -w ../wheelhouse + + # Install and test the wheel in a fresh venv + python -m venv /tmp/venv_test + source /tmp/venv_test/bin/activate + python -m pip install --upgrade pip + python -m pip install ../wheelhouse/alp-graphblas-*.whl + ``` + + - Note: `--no-deps` is optional when building locally; published wheels should contain runtime dependency metadata so that pip will pull `numpy` automatically. + +Releases and publishing (how CI is wired) +- Creating a TestPyPI release (normal path): + 1. Bump the version in `pyalp/pyproject.toml` (recommended) and commit. + 2. Create a git tag of the form `pyalp.vX.Y.Z` and push the tag. The `publish-to-testpypi.yml` workflow is triggered on push tags matching `pyalp.v*`. + 3. The workflow builds wheels (cibuildwheel), uploads wheel artifacts as GitHub workflow artifacts, publishes to TestPyPI, and creates a GitHub Release with the wheel assets. + + - Promoting to PyPI (two-step gated publish): + - The `publish-to-testpypi.yml` workflow automatically builds and deploys wheels to TestPyPI and then attempts to install and verify those wheels in a fresh virtual environment. Occasionally this verification can fail due to propagation delays between upload and availability; if that happens, re-run the workflow (or re-trigger the release) until the verification completes successfully. + - The `promote-to-pypi.yml` workflow is triggered manually (`workflow_dispatch`) and it is enabld only with the `pyalp.v*` tag. It downloads the assets attached to the GitHub Release and uploads them to PyPI using the secret `PYPI_API_TOKEN`. + - The promote job is configured to use the repository `production` environment. Access to the `PYPI_API_TOKEN` secret in that environment requires an approval step by repository administrators (see Settings → Environments → production). + +Checklist before releasing +- Bump `pyalp/pyproject.toml` version. +- Ensure `pyalp/pyproject.toml` includes runtime dependencies (e.g., `numpy>=1.22`) so pip installs them automatically. +- Ensure `CIBW_BEFORE_BUILD` in `.github/workflows/publish-to-testpypi.yml` builds your new backend (`BUILD_TARGETS` updated). +- If your backend needs extra system packages (libnuma, libomp, etc.), add those install steps to the before-build script or document the manual requirements. +- Add smoke tests that import and exercise the backend. Run them against installed wheels (CI verifies installed wheels in a separate job). +- Create the tag `pyalp.vX.Y.Z` and push it; observe the `alp-graphblas wheels (cibuildwheel)` workflow. + +Troubleshooting / common pitfalls +- Missing metadata in wheels: Make sure CMake writes the generated `pyalp_metadata.py` into the per-ABI build dir (CI sets `CMAKE_BUILD_DIR` and `setup.py` copies `pyalp_metadata.py` -> `_metadata.py`). If your metadata template changed, update `pyalp/src/pyalp/_metadata.py.in`. +- Prebuilt `.so` not found: `pyalp/setup.py` discovers prebuilt shared objects under `build/**`. Ensure you used the same target name and that the produced filename contains the Python ABI tag (or set `PREBUILT_PYALP_SO` to the path). +- ABI contamination across wheels: CI uses per-ABI build directories (e.g. `build/cp311`) to avoid cross-ABI contamination. When testing locally, clean build dirs between ABI runs. +- pybind11 registration collisions: If you see type-registration errors when importing multiple different backends in the same process, prefer running backends in separate processes or ensure pybind11 wrappers use `py::module_local()` for types that may be defined in multiple modules. + +Security notes +- The promotion workflow uses a `PYPI_API_TOKEN` stored as a secret (likely in the repository environment `production`). If you did not create this token yourself, check: + - Repository Settings → Secrets and variables → Actions + - Environments → production → Secrets + - Organization-level secrets (if applicable) +- Rotate/revoke tokens if you discover an unexpected token. + +Appendix — quick pointers to edit points +- Add CMake target: top-level CMake / `pyalp/src` CMakeLists. +- Ensure discovery in `pyalp/setup.py`: supported names in `find_all_prebuilt()` and the glob-based discovery. +- Include generated metadata: `pyalp/src/pyalp/_metadata.py.in` (CMake variables are substituted into this template). +- CI build targets: `.github/workflows/publish-to-testpypi.yml` (search for `BUILD_TARGETS` and `BACKEND_FLAGS` in `CIBW_BEFORE_BUILD`). +- Promote workflow: `.github/workflows/promote-to-pypi.yml` (uses `PYPI_API_TOKEN` and `environment: production`). + + From 518b44096b7655f0357dc55f98381cfe0c4e1885 Mon Sep 17 00:00:00 2001 From: Denis Jelovina Date: Tue, 4 Nov 2025 10:39:12 +0100 Subject: [PATCH 32/32] pyalp: squash all local changes since github/358-python-api into single commit Consolidated pybind11 bindings and removed legacy duplicate source files; added a single shared binding implementation under pyalp. Packaging moved to top-level CMake with deterministic prebuilt discovery in setup.py and support for CMAKE_BUILD_DIR so wheels are built from the CMake artifacts (avoids needing pybind11 in isolated PEP517 build environment). Enabled module-local pybind11 registrations by default to allow importing multiple backend extension modules into the same interpreter safely; made it configurable via CMake option for cases that need cross-module sharing. Fixed an iterator instantiation issue in matrix_wrappers.hpp by iterating directly over M.cbegin()/M.cend(). Added an in-process smoke test test_bckds_inprocess.py to validate multiple-backend imports and object creation, and updated CI to install test wheels from TestPyPI with robust retry-and-verify logic. Updated developer docs and CI workflows to reflect the new packaging, testing, and build flows. --- .github/workflows/publish-to-testpypi.yml | 62 +++++++++ .github/workflows/pyalp-ci.yml | 159 ++++----------------- CMakeLists.txt | 87 ++++++++++++ cmake/CompileFlags.cmake | 19 +++ pyalp/DEVELOPER_GUIDE.md | 50 ++++++- pyalp/pyproject.toml | 2 +- pyalp/setup.py | 162 +++++++++++++++++----- pyalp/src/CMakeLists.txt | 94 ++++++++++++- pyalp/src/conjugate_gradient.hpp | 4 - pyalp/src/matrix_wrappers.hpp | 79 ++++++----- pyalp/src/numpy2alp.cpp | 57 -------- pyalp/src/pyalp.cpp | 84 ----------- pyalp/src/pyalp/CMakeLists.txt | 3 +- pyalp/src/pyalp/bindings.cpp | 56 -------- pyalp/src/pyalp/common_bindings.hpp | 94 +++++++++++++ pyalp/src/pyalp/module_entry.cpp | 14 ++ pyalp/src/pyalp/your_module.py | 4 - pyalp/src/python2alp.cpp | 9 -- pyalp/tests/test_bckds_inprocess.py | 105 ++++++++++++++ tests/python/test.py | 2 +- tests/python/test_backends.py | 111 ++++++++------- 21 files changed, 780 insertions(+), 477 deletions(-) delete mode 100644 pyalp/src/numpy2alp.cpp delete mode 100644 pyalp/src/pyalp.cpp delete mode 100644 pyalp/src/pyalp/bindings.cpp create mode 100644 pyalp/src/pyalp/common_bindings.hpp create mode 100644 pyalp/src/pyalp/module_entry.cpp delete mode 100644 pyalp/src/pyalp/your_module.py delete mode 100644 pyalp/src/python2alp.cpp create mode 100644 pyalp/tests/test_bckds_inprocess.py diff --git a/.github/workflows/publish-to-testpypi.yml b/.github/workflows/publish-to-testpypi.yml index 2399ef3d8..4f17061c0 100644 --- a/.github/workflows/publish-to-testpypi.yml +++ b/.github/workflows/publish-to-testpypi.yml @@ -323,3 +323,65 @@ jobs: env: TWINE_USERNAME: __token__ TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }} + + verify-installed-inprocess: + needs: publish + runs-on: ubuntu-latest + name: Verify installed wheel (in-process smoke) + steps: + - name: Checkout repository (for tests) + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.11' + + - name: Create venv and install prerequisites + shell: bash + run: | + set -euo pipefail + PY=$(which python3 || which python) + VENV_DIR="./.venv_test_inprocess" + rm -rf "${VENV_DIR}" + ${PY} -m venv "${VENV_DIR}" + source "${VENV_DIR}/bin/activate" + python -m pip install --upgrade pip setuptools wheel numpy + + # Retry pip install from TestPyPI with exponential backoff (bounded attempts) + PYALP_VERSION=$(grep -E '^version\s*=\s*"' pyalp/pyproject.toml | head -n1 | sed -E 's/^version\s*=\s*"([^"]+)".*/\1/') + echo "Installing alp-graphblas==${PYALP_VERSION} from TestPyPI (with retries)" + + MAX_ATTEMPTS=6 + SLEEP_BASE=10 + SUCCESS=0 + + for attempt in $(seq 1 ${MAX_ATTEMPTS}); do + echo "--- attempt ${attempt} of ${MAX_ATTEMPTS} ---" + # verbose pip output helps debugging in CI logs + python -m pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple alp-graphblas==${PYALP_VERSION} -v && SUCCESS=1 && break + echo "pip install failed on attempt ${attempt}" + if [ "${attempt}" -lt "${MAX_ATTEMPTS}" ]; then + SLEEP_SECONDS=$((SLEEP_BASE * attempt)) + echo "Sleeping ${SLEEP_SECONDS}s before retry..." + sleep "${SLEEP_SECONDS}" + fi + done + + if [ "${SUCCESS}" -ne 1 ]; then + echo "ERROR: failed to install alp-graphblas from TestPyPI after ${MAX_ATTEMPTS} attempts" >&2 + exit 1 + fi + + # Print a compact JSON summary of installed backends for easy scanning in CI logs + python -c "import json,importlib,sys; print(json.dumps({'backends': importlib.import_module('pyalp').list_backends()}))" + + - name: Run in-process backend import smoke test + shell: bash + run: | + set -euo pipefail + source ./.venv_test_inprocess/bin/activate + echo "Running pyalp/tests/test_bckds_inprocess.py" + python pyalp/tests/test_bckds_inprocess.py diff --git a/.github/workflows/pyalp-ci.yml b/.github/workflows/pyalp-ci.yml index 6c2407833..829275cfb 100644 --- a/.github/workflows/pyalp-ci.yml +++ b/.github/workflows/pyalp-ci.yml @@ -1,169 +1,60 @@ -name: pyalp CI +name: pyalp CI (local-build smoke test) -# Run only on pushes that create tags starting with 'pyalp' on: push: tags: [ 'pyalp*' ] + workflow_dispatch: {} jobs: - build-bindings: - name: Build C++ bindings + build-and-test-local: + name: Build pyalp with LOCAL profile and run smoke tests runs-on: ubuntu-latest steps: - name: Checkout (with submodules) uses: actions/checkout@v4 with: - submodules: 'recursive' + submodules: recursive fetch-depth: 0 - - name: Verify pinned pybind11 submodule commit - # Fail early if the checked-out pybind11 is not the pinned commit - run: | - set -euo pipefail - # Prefer top-level pinned file so it survives moves; fallback to submodule path - if [ -f pyalp/PINNED_PYBIND11 ]; - then - PINNED_SHA=$(cat pyalp/PINNED_PYBIND11 | tr -d '\n') - elif [ -f pyalp/extern/pybind11/PINNED_COMMIT ]; - then - PINNED_SHA=$(cat pyalp/extern/pybind11/PINNED_COMMIT | tr -d '\n') - else - echo "No pinned commit file found (tried pyalp/PINNED_PYBIND11 and pyalp/extern/pybind11/PINNED_COMMIT)" >&2 - exit 2 - fi - echo "Expected pybind11 commit: $PINNED_SHA" - ACTUAL=$(git -C pyalp/extern/pybind11 rev-parse HEAD || true) - echo "Found pybind11 commit: $ACTUAL" - if [ "$ACTUAL" != "$PINNED_SHA" ]; - then - echo "ERROR: pybind11 submodule commit does not match pinned commit" >&2 - exit 2 - fi + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.11' - - name: Install build dependencies + - name: Install system build deps run: | - set -euo pipefail sudo apt-get update - # libnuma-dev provides NUMA headers/libraries needed by FindNuma.cmake sudo apt-get install -y build-essential cmake ninja-build pkg-config python3-venv python3-dev python3-pip libnuma-dev - - name: Configure and build pyalp bindings - run: | - set -euo pipefail - mkdir -p build_alp - cmake -S . -B build_alp -DENABLE_PYALP=ON - # Only attempt to build pyalp targets if the pyalp CMake directory exists - if [ -f pyalp/CMakeLists.txt ]; - then - echo "pyalp CMakeLists found — building pyalp targets" - cmake --build build_alp --target pyalp_ref -- -j || true - cmake --build build_alp --target pyalp_omp -- -j || true - else - echo "pyalp directory or CMakeLists not present — skipping pyalp targets" - fi - - - name: Find and list built shared objects - run: | - set -euo pipefail - echo "Searching for shared objects under build_alp and pyalp" - find build_alp -name "*.so" -maxdepth 8 -print || true - find pyalp -name "*.so" -maxdepth 8 -print || true - - - name: Collect built shared objects into artifacts/ + - name: Configure top-level CMake with LOCAL profile run: | set -euo pipefail - mkdir -p artifacts - # copy any discovered .so files into a flat artifacts directory so upload-artifact can find them - find build_alp -name "*.so" -print0 | xargs -0 -I{} bash -lc 'cp -v "{}" artifacts/ || true' || true - find pyalp -name "*.so" -print0 | xargs -0 -I{} bash -lc 'cp -v "{}" artifacts/ || true' || true - echo "Artifacts now contains:" && ls -la artifacts || true - - - name: Upload built bindings - uses: actions/upload-artifact@v4 - with: - name: pyalp-so - path: | - build_alp/**/*.so - artifacts/**/*.so - pyalp/**/pyalp*.so - pyalp/**/_pyalp*.so - pyalp/**/libpyalp*.so - pyalp/**/*.so - - build-wheel-and-test: - name: Build wheel from prebuilt .so and smoke-test - runs-on: ubuntu-latest - needs: build-bindings - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Download built bindings - uses: actions/download-artifact@v4 - with: - name: pyalp-so - path: artifacts - - - name: Show downloaded artifacts - run: ls -la artifacts || true + # Configure from repository root using the LOCAL profile to enable native optimizations + cmake -S . -B build/ci_local -G Ninja \ + -DALP_BUILD_PROFILE=LOCAL \ + -DENABLE_PYALP=ON \ + -DCMAKE_POSITION_INDEPENDENT_CODE=ON \ + -DPython3_EXECUTABLE=$(which python3) - - name: Prepare wheel inputs - id: prep + - name: Build pyalp backends run: | set -euo pipefail - # List candidate shared-object files for debugging - echo "Candidate .so files in artifacts:" && find artifacts -type f -name "*.so" -print || true - # Find likely candidates (prefer _pyalp, pyalp, libpyalp) - SO_PATH=$(find artifacts \( -name "_pyalp*.so" -o -name "pyalp*.so" -o -name "libpyalp*.so" -o -name "*.so" \) | head -n1) - if [ -z "$SO_PATH" ]; - then - echo "ERROR: no built .so artifact found to package" >&2 - echo "Artifacts listing:" && ls -la artifacts || true - exit 2 - fi - echo "so_path=$SO_PATH" >> "$GITHUB_OUTPUT" - # Prefer helper located inside pyalp/ but fall back to top-level tools/ - if [ -f pyalp/tools/make_wheel_from_so.py ]; then - echo "builder=pyalp/tools/make_wheel_from_so.py" >> "$GITHUB_OUTPUT" - else - echo "builder=tools/make_wheel_from_so.py" >> "$GITHUB_OUTPUT" - fi - # Derive Python version from the .so filename (e.g., cpython-311 -> 3.11, cp312 -> 3.12) - PY_VER="" - if [[ "$SO_PATH" =~ cpython-([0-9]{3}) ]]; - then - n=${BASH_REMATCH[1]} - PY_VER="${n:0:1}.${n:1}" - elif [[ "$SO_PATH" =~ cp([0-9]{2,3}) ]]; - then - n=${BASH_REMATCH[1]} - PY_VER="${n:0:1}.${n:1}" - fi - echo "python_version=$PY_VER" >> "$GITHUB_OUTPUT" + cmake --build build/ci_local --target pyalp_ref pyalp_omp pyalp_nonblocking --parallel - - name: Run wheel builder + - name: Package pyalp wheel from CMake build run: | set -euo pipefail - echo "builder=${{ steps.prep.outputs.builder }}" - echo "so=${{ steps.prep.outputs.so_path }}" - python3 "${{ steps.prep.outputs.builder }}" "${{ steps.prep.outputs.so_path }}" --out-dir dist_wheel - - - name: Show wheel - run: ls -la dist_wheel || true - - - name: Set up Python matching built extension - if: ${{ steps.prep.outputs.python_version != '' }} - uses: actions/setup-python@v5 - with: - python-version: ${{ steps.prep.outputs.python_version }} + mkdir -p dist_wheel + export CMAKE_BUILD_DIR=$(pwd)/build/ci_local + ( cd pyalp && python -m pip wheel . -w ../dist_wheel ) - name: Smoke test wheel in venv run: | set -euo pipefail python3 -V - which python3 python3 -m venv venv . venv/bin/activate pip install --upgrade pip wheel pip install dist_wheel/*.whl - tools/smoke_test_pyalp.py + # run the smoke test script which should import pyalp and backends e.g. pyalp.pyalp_ref + python tools/smoke_test_pyalp.py diff --git a/CMakeLists.txt b/CMakeLists.txt index 33235939d..c388e8956 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -382,6 +382,93 @@ if( ENABLE_PYALP ) message(STATUS "pyalp subdirectory not present in source tree; skipping add_subdirectory(pyalp)") endif() endif() + +# Provide a top-level convenience packaging target for pyalp so callers can run +# cmake --build --target pyalp --parallel +# even if the pyalp CMakeLists placed a packaging target in a subdirectory or +# the generator didn't expose that target at the top-level. This mirrors the +# packaging flow implemented under pyalp/src/CMakeLists.txt and is only added +# when pyalp is enabled and present in source. +if( ENABLE_PYALP AND EXISTS "${PROJECT_SOURCE_DIR}/pyalp/CMakeLists.txt" ) + # Attempt to find a Python interpreter (non-fatal if already found elsewhere) + find_package(PythonInterp QUIET) + + # Build the list of backend targets that should be packaged. Keep this in + # sync with pyalp/src/CMakeLists.txt. + set(pyalp_package_targets "") + if(WITH_REFERENCE_BACKEND) + list(APPEND pyalp_package_targets pyalp_ref) + endif() + if(WITH_OMP_BACKEND) + list(APPEND pyalp_package_targets pyalp_omp) + endif() + if(WITH_NONBLOCKING_BACKEND) + list(APPEND pyalp_package_targets pyalp_nonblocking) + endif() + string(JOIN " " pyalp_package_targets_str ${pyalp_package_targets}) + + # Only add the top-level pyalp target if one is not already defined. + if(NOT TARGET pyalp) + add_custom_target(pyalp + COMMENT "Build enabled pyalp backends and package wheel(s) into ${CMAKE_BINARY_DIR}/dist" + ) + + add_custom_command(TARGET pyalp + # Build each backend target individually (cmake --build --target accepts one target at a time) + VERBATIM + ) + # Add per-backend build commands so each target is invoked separately. + foreach(_pyalp_backend IN LISTS pyalp_package_targets) + add_custom_command(TARGET pyalp + COMMAND ${CMAKE_COMMAND} --build ${CMAKE_BINARY_DIR} --target ${_pyalp_backend} --parallel + VERBATIM + ) + endforeach() + add_custom_command(TARGET pyalp + COMMAND ${CMAKE_COMMAND} -E make_directory ${CMAKE_BINARY_DIR}/dist + COMMAND ${CMAKE_COMMAND} -E env CMAKE_BUILD_DIR=${CMAKE_BINARY_DIR} ${PYTHON_EXECUTABLE} -m pip wheel ${CMAKE_SOURCE_DIR}/pyalp -w ${CMAKE_BINARY_DIR}/dist + COMMAND ${CMAKE_COMMAND} -E echo "" + COMMAND ${CMAKE_COMMAND} -E echo "============================================================" + COMMAND ${CMAKE_COMMAND} -E echo "Packaged wheel(s) into: ${CMAKE_BINARY_DIR}/dist" + COMMAND ${CMAKE_COMMAND} -E echo "To install the wheel(s):" + COMMAND ${CMAKE_COMMAND} -E echo " python -m pip install ${CMAKE_BINARY_DIR}/dist/.whl" + COMMAND ${CMAKE_COMMAND} -E echo "or install all wheels in dist:" + COMMAND ${CMAKE_COMMAND} -E echo " python -m pip install ${CMAKE_BINARY_DIR}/dist/*.whl" + COMMAND ${CMAKE_COMMAND} -E echo "After installation, import the package in Python, e.g.:" + COMMAND ${CMAKE_COMMAND} -E echo " python -c \"import alp_graphblas; print(alp_graphblas.__version__)\"" + VERBATIM + ) + endif() +endif() +## Also expose a clearly-named packaging target that avoids name collisions +if( ENABLE_PYALP AND EXISTS "${PROJECT_SOURCE_DIR}/pyalp/CMakeLists.txt" ) + if(NOT TARGET pyalp-package) + add_custom_target(pyalp-package + COMMENT "(convenience) Build enabled pyalp backends and package wheel(s) into ${CMAKE_BINARY_DIR}/dist" + ) + + foreach(_pyalp_backend IN LISTS pyalp_package_targets) + add_custom_command(TARGET pyalp-package + COMMAND ${CMAKE_COMMAND} --build ${CMAKE_BINARY_DIR} --target ${_pyalp_backend} --parallel + VERBATIM + ) + endforeach() + add_custom_command(TARGET pyalp-package + COMMAND ${CMAKE_COMMAND} -E make_directory ${CMAKE_BINARY_DIR}/dist + COMMAND ${CMAKE_COMMAND} -E env CMAKE_BUILD_DIR=${CMAKE_BINARY_DIR} ${PYTHON_EXECUTABLE} -m pip wheel ${CMAKE_SOURCE_DIR}/pyalp -w ${CMAKE_BINARY_DIR}/dist + COMMAND ${CMAKE_COMMAND} -E echo "" + COMMAND ${CMAKE_COMMAND} -E echo "============================================================" + COMMAND ${CMAKE_COMMAND} -E echo "Packaged wheel(s) into: ${CMAKE_BINARY_DIR}/dist" + COMMAND ${CMAKE_COMMAND} -E echo "To install the wheel(s):" + COMMAND ${CMAKE_COMMAND} -E echo " python -m pip install ${CMAKE_BINARY_DIR}/dist/.whl" + COMMAND ${CMAKE_COMMAND} -E echo "or install all wheels in dist:" + COMMAND ${CMAKE_COMMAND} -E echo " python -m pip install ${CMAKE_BINARY_DIR}/dist/*.whl" + COMMAND ${CMAKE_COMMAND} -E echo "After installation, import the package in Python, e.g.:" + COMMAND ${CMAKE_COMMAND} -E echo " python -c \"import alp_graphblas; print(alp_graphblas.__version__)\"" + VERBATIM + ) + endif() +endif() add_subdirectory( examples ) diff --git a/cmake/CompileFlags.cmake b/cmake/CompileFlags.cmake index 41cf90646..3eb34adc0 100644 --- a/cmake/CompileFlags.cmake +++ b/cmake/CompileFlags.cmake @@ -98,6 +98,25 @@ set( COMMON_PERF_DEFS_Release "NDEBUG" ) # building wheels in CI set -DALP_PORTABLE_BUILD=ON to get portable artifacts. option( ALP_PORTABLE_BUILD "Build portable binaries (disable host-specific optimizations)" OFF ) +# Build profile: controls portability and default LTO/optimization choices. +# Use -DALP_BUILD_PROFILE=LOCAL for developer/local builds (enables native +# host optimizations, enables LTO by default). Use -DALP_BUILD_PROFILE=DEPLOYMENT +# for wheel/deployment builds (portable by default). +set(ALP_BUILD_PROFILE "DEPLOYMENT" CACHE STRING "Build profile: LOCAL or DEPLOYMENT. LOCAL enables native optimizations; DEPLOYMENT favors portability for wheels.") +string(TOUPPER "${ALP_BUILD_PROFILE}" ALP_BUILD_PROFILE_UP) + +if(ALP_BUILD_PROFILE_UP STREQUAL "LOCAL") + # Local builds should prefer host-specific optimizations + set(ALP_PORTABLE_BUILD OFF CACHE BOOL "Build portable binaries (disable host-specific optimizations)" FORCE) + # Enable LTO by default for local performance builds; user may override. + set(CMAKE_INTERPROCEDURAL_OPTIMIZATION ON CACHE BOOL "Enable LTO (interprocedural optimization)" FORCE) +else() + # Deployment builds default to portable flags for maximum wheel compatibility + set(ALP_PORTABLE_BUILD ON CACHE BOOL "Build portable binaries (disable host-specific optimizations)" FORCE) + # Disable LTO for portable deployment builds; user may override explicitly + set(CMAKE_INTERPROCEDURAL_OPTIMIZATION OFF CACHE BOOL "Enable LTO (interprocedural optimization)" FORCE) +endif() + # Avoid GCC/GNU-specific microarchitecture flags on Apple/Clang toolchains if(APPLE) # On macOS with AppleClang, -march/-mtune and aggressive unrolling can diff --git a/pyalp/DEVELOPER_GUIDE.md b/pyalp/DEVELOPER_GUIDE.md index f4fa50098..ef0ba4019 100644 --- a/pyalp/DEVELOPER_GUIDE.md +++ b/pyalp/DEVELOPER_GUIDE.md @@ -179,9 +179,53 @@ Checklist before releasing - Bump `pyalp/pyproject.toml` version. - Ensure `pyalp/pyproject.toml` includes runtime dependencies (e.g., `numpy>=1.22`) so pip installs them automatically. - Ensure `CIBW_BEFORE_BUILD` in `.github/workflows/publish-to-testpypi.yml` builds your new backend (`BUILD_TARGETS` updated). -- If your backend needs extra system packages (libnuma, libomp, etc.), add those install steps to the before-build script or document the manual requirements. -- Add smoke tests that import and exercise the backend. Run them against installed wheels (CI verifies installed wheels in a separate job). -- Create the tag `pyalp.vX.Y.Z` and push it; observe the `alp-graphblas wheels (cibuildwheel)` workflow. + +---------------------- +Local developer workflow (CMake-generated target) +------------------------------------------------ + +The project now exposes a CMake-generated `pyalp` target that builds all +enabled pyalp backends and packages wheel(s) using the same packaging logic +that CI uses. This is the recommended local path and replaces the previous +helper script. + +Usage: + +```bash +# Configure from repo root (LOCAL profile enables host-optimizations) +cmake -S . -B build/host -DALP_BUILD_PROFILE=LOCAL -DENABLE_PYALP=ON -G Ninja + +# Build and package via the CMake target (this will place wheels in build/host/dist) +cmake --build build/host --target pyalp --parallel +``` + +After the target completes you will see a message pointing to the wheel(s). +You can either add the generated python directory to `PYTHONPATH` for quick +iteration: + +```bash +export PYTHONPATH="$PYTHONPATH:$(pwd)/build/host/python" +``` + +Or install the wheel into a venv: + +```bash +python -m venv /tmp/pyalp-venv +source /tmp/pyalp-venv/bin/activate +pip install build/host/dist/*.whl +``` + +If you need to reproduce CI-style portable wheels, configure with the +`DEPLOYMENT` profile instead: + +```bash +cmake -S . -B build/cp311 -DALP_BUILD_PROFILE=DEPLOYMENT -DENABLE_PYALP=ON -G Ninja +cmake --build build/cp311 --target pyalp --parallel +``` + +Notes: +- Ensure system dependencies like `libnuma-dev` and `libomp` are installed when building backends that require them. +- The packaging step relies on `CMAKE_BUILD_DIR` to locate generated metadata and prebuilt `.so` files; the CMake target sets this environment appropriately when invoking `pip wheel`. Troubleshooting / common pitfalls - Missing metadata in wheels: Make sure CMake writes the generated `pyalp_metadata.py` into the per-ABI build dir (CI sets `CMAKE_BUILD_DIR` and `setup.py` copies `pyalp_metadata.py` -> `_metadata.py`). If your metadata template changed, update `pyalp/src/pyalp/_metadata.py.in`. diff --git a/pyalp/pyproject.toml b/pyalp/pyproject.toml index 0ca243737..8b71b194c 100644 --- a/pyalp/pyproject.toml +++ b/pyalp/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "alp-graphblas" -version = "0.8.33" +version = "0.8.41" description = "Python bindings for ALP GraphBLAS (minimal package layout)" authors = [ { name = "ALP" } ] readme = "README.md" diff --git a/pyalp/setup.py b/pyalp/setup.py index 4856c5427..4ff84196d 100644 --- a/pyalp/setup.py +++ b/pyalp/setup.py @@ -36,23 +36,47 @@ def finalize_options(self): # Discover prebuilt backend shared objects in the CMake build tree. def find_all_prebuilt(): + """Discover prebuilt shared objects only inside the directory explicitly + provided by the caller via the CMAKE_BUILD_DIR (or PYALP_BUILD_DIR) + environment variable. + + Per the packaging policy, this function will not probe arbitrary + sibling directories or search the source tree; callers must provide a + well-defined build directory. If no build directory is set, an empty + mapping is returned (so callers can fall back to building from + sources when pybind11 is available). + """ supported = ["pyalp_ref", "pyalp_omp", "pyalp_nonblocking", "_pyalp"] py_tag = f"cp{sys.version_info[0]}{sys.version_info[1]}" mapping = {} + + cmake_build_dir = os.environ.get("CMAKE_BUILD_DIR") or os.environ.get("PYALP_BUILD_DIR") + # If no explicit build dir is provided, fall back to the conventional + # out-of-source `../build` directory. This keeps discovery inside a + # single well-defined location and preserves prior CI behavior. + if not cmake_build_dir: + cmake_build_dir = os.path.abspath(os.path.join(here, '..', 'build')) + else: + cmake_build_dir = os.path.abspath(cmake_build_dir) + for mod in supported: + found = [] patterns = [ - os.path.join(here, '..', 'build', '**', f'{mod}*.so'), - os.path.join(here, '..', 'build', '**', f'{mod}*.pyd'), + os.path.join(cmake_build_dir, '**', f'{mod}*.so'), + os.path.join(cmake_build_dir, '**', f'{mod}*.pyd'), ] - found = [] for pat in patterns: - found.extend(glob.glob(pat, recursive=True)) + try: + found.extend(glob.glob(pat, recursive=True)) + except Exception: + pass if not found: continue # Prefer candidate matching current ABI tag in filename or parent dir matching = [c for c in found if py_tag in os.path.basename(c) or py_tag in os.path.basename(os.path.dirname(c))] chosen = (matching or found)[0] mapping[mod] = os.path.abspath(chosen) + return mapping # Determine prebuilt modules mapping. If user specified a single PREBUILT env var, @@ -90,13 +114,9 @@ def build_extension(self, ext): if not src: src = prebuilt_modules.get(modname) if not src: - # Try a targeted glob for this module as a last resort - candidates = glob.glob(os.path.join(here, '..', 'build', '**', f'{modname}*.so'), recursive=True) - candidates += glob.glob(os.path.join(here, '..', 'build', '**', f'{modname}*.pyd'), recursive=True) - py_tag = f"cp{sys.version_info[0]}{sys.version_info[1]}" - matching = [c for c in candidates if py_tag in os.path.basename(c) or py_tag in os.path.basename(os.path.dirname(c))] - src = (matching or candidates)[:1] - src = os.path.abspath(src[0]) if src else None + # No explicit PREBUILT path or discovered prebuilt module in the + # provided build directory. Do not search arbitrary locations. + src = None if not src or not os.path.exists(src): raise RuntimeError(f"Prebuilt pyalp shared object not found for module '{modname}' during build_ext") @@ -105,10 +125,12 @@ def build_extension(self, ext): # The _metadata.py file is generated by CMake in the build directory. # We need to find it and copy it to the same directory as the extension. ext_build_dir = os.path.dirname(target_path) - # CMAKE_BUILD_DIR is set by the cibuildwheel before_build script to the per-ABI build directory - cmake_build_dir = os.environ.get("CMAKE_BUILD_DIR") + # Only copy generated metadata when an explicit build directory is + # provided via CMAKE_BUILD_DIR or PYALP_BUILD_DIR. We do not search the + # source tree or other locations for generated metadata. + cmake_build_dir = os.environ.get("CMAKE_BUILD_DIR") or os.environ.get("PYALP_BUILD_DIR") if cmake_build_dir: - metadata_src_path = os.path.join(cmake_build_dir, "pyalp_metadata.py") + metadata_src_path = os.path.join(os.path.abspath(cmake_build_dir), "pyalp_metadata.py") metadata_dest_path = os.path.join(ext_build_dir, "_metadata.py") if os.path.exists(metadata_src_path): print(f"Copying generated metadata from {metadata_src_path} to {metadata_dest_path}") @@ -116,23 +138,7 @@ def build_extension(self, ext): else: print(f"Warning: Generated metadata file not found at {metadata_src_path}. Skipping copy.") else: - # Fall back: try to locate the generated metadata under any per-ABI - # build directory (e.g. ../build/cp310, ../build/cp39, ...). - # This avoids relying strictly on the CMAKE_BUILD_DIR env var which - # may not always be propagated into the isolated build environment. - search_pattern = os.path.join(here, '..', 'build', '**', 'pyalp_metadata.py') - candidates = glob.glob(search_pattern, recursive=True) - # Prefer candidate matching the current Python ABI tag if present - py_tag = f"cp{sys.version_info[0]}{sys.version_info[1]}" - matching = [c for c in candidates if py_tag in os.path.basename(os.path.dirname(c)) or py_tag in os.path.basename(c)] - chosen = (matching or candidates)[:1] - if chosen: - metadata_src_path = os.path.abspath(chosen[0]) - metadata_dest_path = os.path.join(ext_build_dir, "_metadata.py") - print(f"Copying generated metadata from {metadata_src_path} to {metadata_dest_path} (discovered by glob search)") - shutil.copyfile(metadata_src_path, metadata_dest_path) - else: - print("Warning: CMAKE_BUILD_DIR not set and no generated metadata found under ../build. Skipping metadata file copy.") + print("CMAKE_BUILD_DIR / PYALP_BUILD_DIR not set; skipping metadata file copy.") if prebuilt_modules: # Create an Extension for each discovered prebuilt module so setuptools will @@ -146,22 +152,64 @@ def build_extension(self, ext): ext_modules = [ Pybind11Extension( "pyalp._pyalp", - ["src/pyalp/bindings.cpp"], + ["src/pyalp/module_entry.cpp"], include_dirs=[ os.path.join(here, "src"), os.path.join(here, "src", "pyalp"), os.path.join(here, "extern", "pybind11", "include"), os.path.normpath(os.path.join(here, "..", "include")), ], - define_macros=[("PYALP_MODULE_NAME", "_pyalp")], + define_macros=[("PYALP_MODULE_NAME", "_pyalp"), ("PYALP_MODULE_LOCAL", "1")], cxx_std=14, ) ] +# Read metadata from pyproject.toml when available to avoid mismatched values +def _read_pyproject_toml(path): + if not os.path.exists(path): + return {} + # Prefer the stdlib tomllib on Python 3.11+, otherwise fall back to + # the third-party `toml` package if available. Avoid importing + # tomllib at module import time on older Pythons to prevent SyntaxError + # when cibuildwheel invokes builds using older interpreters. + try: + if sys.version_info >= (3, 11): + import tomllib + with open(path, "rb") as f: + return tomllib.load(f) or {} + except Exception: + pass + try: + import toml + with open(path, "r", encoding="utf-8") as f: + return toml.load(f) or {} + except Exception: + return {} + +pyproject_path = os.path.abspath(os.path.join(here, "..", "pyproject.toml")) +_pyproject = _read_pyproject_toml(pyproject_path) + +_name = None +_version = None +_description = None + +# PEP 621 [project] table +if isinstance(_pyproject, dict) and "project" in _pyproject: + proj = _pyproject.get("project", {}) + _name = proj.get("name") or _name + _version = proj.get("version") or _version + _description = proj.get("description") or _description +# poetry configuration [tool.poetry] +elif isinstance(_pyproject, dict) and _pyproject.get("tool", {}).get("poetry"): + poetry = _pyproject["tool"]["poetry"] + _name = poetry.get("name") or _name + _version = poetry.get("version") or _version + _description = poetry.get("description") or _description + setup_kwargs = { - "name": "pyalp", - "version": "0.8.1", - "description": "pyalp package (C++ bindings)", + "name": _name or "pyalp", + "version": _version or "0.8.1", + "description": _description or "pyalp package (C++ bindings)", "packages": find_packages(where="src"), "package_dir": {"": "src"}, # Ensure generated metadata is included in the wheel. The build process @@ -171,6 +219,48 @@ def build_extension(self, ext): "include_package_data": True, } +# Prefer generating egg-info in the out-of-source build directory so the +# source tree is not polluted during wheel builds. If the CI or caller set +# CMAKE_BUILD_DIR we use that; otherwise default to ../build relative to the +# package directory. +egg_base = os.environ.get("CMAKE_BUILD_DIR") +if not egg_base: + # Try to auto-detect an out-of-source CMake build directory that is a + # sibling of the repository root. We consider a directory to be a CMake + # build if it contains a CMakeCache.txt file. This supports build trees + # named arbitrarily (for example `test_build_dirname`) instead of assuming + # a literal `build` directory. + repo_parent = os.path.abspath(os.path.join(here, '..')) + candidates = [] + try: + for entry in os.listdir(repo_parent): + p = os.path.join(repo_parent, entry) + if os.path.isdir(p) and os.path.exists(os.path.join(p, 'CMakeCache.txt')): + candidates.append(p) + except Exception: + candidates = [] + if candidates: + # Prefer a directory literally named 'build' if present, else pick the + # first candidate found. + build_dir = None + for c in candidates: + if os.path.basename(c) == 'build': + build_dir = c + break + if not build_dir: + build_dir = candidates[0] + egg_base = os.path.abspath(build_dir) + else: + egg_base = os.path.abspath(os.path.join(here, '..', 'build')) + +# Supply setuptools options to place egg-info under the build directory +# Only set egg_info when an explicit build directory environment variable is +# provided. Do not attempt to auto-detect or write egg-info into the source +# tree when no build dir is specified. +if egg_base: + setup_kwargs.setdefault("options", {}) + setup_kwargs["options"]["egg_info"] = {"egg_base": egg_base} + # Supply cmdclass entries for build_ext (copy-prebuilt or pybind11) and bdist_wheel cmdclass = {} # If we detected prebuilt modules, use the copy-prebuilt build_ext which copies diff --git a/pyalp/src/CMakeLists.txt b/pyalp/src/CMakeLists.txt index 65d5d77f4..56759a637 100644 --- a/pyalp/src/CMakeLists.txt +++ b/pyalp/src/CMakeLists.txt @@ -22,6 +22,23 @@ if(NOT DEFINED WITH_OMP_BACKEND) set(WITH_OMP_BACKEND ON CACHE BOOL "Build OMP backend (default for pyalp package)") endif() +# Allow callers to choose whether backend modules use py::module_local() +# for type registration. Enabling module-local registrations avoids +# duplicate-type registration errors when importing multiple backend +# extension modules into the same interpreter. However, module-local +# registrations isolate types per-module and will prevent passing +# pybind11-wrapped C++ objects between modules unless extra conversion +# glue is implemented. Default: ON (safe for multi-backend imports). +if(NOT DEFINED PYALP_MODULE_LOCAL_DEFAULT) + option(PYALP_MODULE_LOCAL_DEFAULT "Use py::module_local for backend modules" ON) +endif() + +if(PYALP_MODULE_LOCAL_DEFAULT) + set(PYALP_MODULE_LOCAL_VAL 1) +else() + set(PYALP_MODULE_LOCAL_VAL 0) +endif() + assert_defined_variables( WITH_REFERENCE_BACKEND WITH_OMP_BACKEND ) # target listing all examples, to build them at once with 'make examples' @@ -30,8 +47,15 @@ assert_defined_variables( WITH_REFERENCE_BACKEND WITH_OMP_BACKEND ) if( WITH_REFERENCE_BACKEND ) set(PYALP_MODULE_NAME pyalp_ref) - pybind11_add_module( ${PYALP_MODULE_NAME} pyalp.cpp conjugate_gradient.hpp matrix_wrappers.hpp utils.hpp vector_wrappers.hpp ) + pybind11_add_module( ${PYALP_MODULE_NAME} pyalp/module_entry.cpp conjugate_gradient.hpp matrix_wrappers.hpp utils.hpp vector_wrappers.hpp ) target_compile_definitions(${PYALP_MODULE_NAME} PRIVATE PYALP_MODULE_NAME=${PYALP_MODULE_NAME}) + # By default enable module-local pybind11 registrations so multiple backend + # extension modules can be imported into the same interpreter without + # colliding over identical C++ type registrations. Set to 0 only when + # explicit cross-module type sharing is required. + target_compile_definitions(${PYALP_MODULE_NAME} PRIVATE PYALP_MODULE_LOCAL=${PYALP_MODULE_LOCAL_VAL}) + # Ensure compiler can find headers placed under pyalp/src and pyalp/src/pyalp + target_include_directories(${PYALP_MODULE_NAME} PRIVATE ${CMAKE_SOURCE_DIR}/include ${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/pyalp) # Link your required libraries target_link_libraries(${PYALP_MODULE_NAME} PRIVATE backend_shmem_shared backend_reference common_flags) # If OpenMP support is available, ensure the module links the OpenMP imported target @@ -51,7 +75,12 @@ if( WITH_REFERENCE_BACKEND ) COMMAND ${CMAKE_COMMAND} -E echo "" COMMAND ${CMAKE_COMMAND} -E echo "============================================================" COMMAND ${CMAKE_COMMAND} -E echo "Build complete!" - COMMAND ${CMAKE_COMMAND} -E echo "To use the Python bindings, append ${CMAKE_BINARY_DIR}/python to your PYTHONPATH:" + # Print the exact built module path and a PYTHONPATH suggestion pointing + # to the directory where the extension module is placed inside the + # top-level build tree for pyalp: ${CMAKE_BINARY_DIR}/pyalp/src + COMMAND ${CMAKE_COMMAND} -E echo "Built module: $" + COMMAND ${CMAKE_COMMAND} -E echo "To use the Python bindings, append ${CMAKE_BINARY_DIR}/pyalp/src to your PYTHONPATH:" + COMMAND ${CMAKE_COMMAND} -E echo "Or import the compiled module by name: ${PYALP_MODULE_NAME}" COMMAND ${CMAKE_COMMAND} -E echo "============================================================" ) @@ -61,8 +90,11 @@ endif() if( WITH_OMP_BACKEND ) set(PYALP_MODULE_NAME pyalp_omp) - pybind11_add_module( ${PYALP_MODULE_NAME} pyalp.cpp conjugate_gradient.hpp matrix_wrappers.hpp utils.hpp vector_wrappers.hpp ) + pybind11_add_module( ${PYALP_MODULE_NAME} pyalp/module_entry.cpp conjugate_gradient.hpp matrix_wrappers.hpp utils.hpp vector_wrappers.hpp ) target_compile_definitions( ${PYALP_MODULE_NAME} PRIVATE PYALP_MODULE_NAME=${PYALP_MODULE_NAME}) + # Enable module-local registration for this backend (see note above) + target_compile_definitions(${PYALP_MODULE_NAME} PRIVATE PYALP_MODULE_LOCAL=${PYALP_MODULE_LOCAL_VAL}) + target_include_directories(${PYALP_MODULE_NAME} PRIVATE ${CMAKE_SOURCE_DIR}/include ${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/pyalp) # Link your required libraries target_link_libraries(${PYALP_MODULE_NAME} PRIVATE backend_shmem_shared backend_reference_omp common_flags) # If OpenMP support is available, ensure the module links the OpenMP imported target @@ -77,7 +109,9 @@ if( WITH_OMP_BACKEND ) COMMAND ${CMAKE_COMMAND} -E echo "" COMMAND ${CMAKE_COMMAND} -E echo "============================================================" COMMAND ${CMAKE_COMMAND} -E echo "Build complete!" - COMMAND ${CMAKE_COMMAND} -E echo "To use the Python bindings, append ${CMAKE_BINARY_DIR}/python to your PYTHONPATH:" + COMMAND ${CMAKE_COMMAND} -E echo "Built module: $" + COMMAND ${CMAKE_COMMAND} -E echo "To use the Python bindings, append ${CMAKE_BINARY_DIR}/pyalp/src to your PYTHONPATH:" + COMMAND ${CMAKE_COMMAND} -E echo "Or import the compiled module by name: ${PYALP_MODULE_NAME}" COMMAND ${CMAKE_COMMAND} -E echo "============================================================" ) @@ -87,8 +121,11 @@ endif() if( WITH_NONBLOCKING_BACKEND ) set(PYALP_MODULE_NAME pyalp_nonblocking) - pybind11_add_module( ${PYALP_MODULE_NAME} pyalp.cpp conjugate_gradient.hpp matrix_wrappers.hpp utils.hpp vector_wrappers.hpp ) + pybind11_add_module( ${PYALP_MODULE_NAME} pyalp/module_entry.cpp conjugate_gradient.hpp matrix_wrappers.hpp utils.hpp vector_wrappers.hpp ) target_compile_definitions( ${PYALP_MODULE_NAME} PRIVATE PYALP_MODULE_NAME=${PYALP_MODULE_NAME}) + # Enable module-local registration for this backend (see note above) + target_compile_definitions(${PYALP_MODULE_NAME} PRIVATE PYALP_MODULE_LOCAL=${PYALP_MODULE_LOCAL_VAL}) + target_include_directories(${PYALP_MODULE_NAME} PRIVATE ${CMAKE_SOURCE_DIR}/include ${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/pyalp) # Link your required libraries target_link_libraries(${PYALP_MODULE_NAME} PRIVATE backend_shmem_shared backend_reference_omp common_flags) # If OpenMP support is available, ensure the module links the OpenMP imported target @@ -103,7 +140,9 @@ if( WITH_NONBLOCKING_BACKEND ) COMMAND ${CMAKE_COMMAND} -E echo "" COMMAND ${CMAKE_COMMAND} -E echo "============================================================" COMMAND ${CMAKE_COMMAND} -E echo "Build complete!" - COMMAND ${CMAKE_COMMAND} -E echo "To use the Python bindings, append ${CMAKE_BINARY_DIR}/python to your PYTHONPATH:" + COMMAND ${CMAKE_COMMAND} -E echo "Built module: $" + COMMAND ${CMAKE_COMMAND} -E echo "To use the Python bindings, append ${CMAKE_BINARY_DIR}/pyalp/src to your PYTHONPATH:" + COMMAND ${CMAKE_COMMAND} -E echo "Or import the compiled module by name: ${PYALP_MODULE_NAME}" COMMAND ${CMAKE_COMMAND} -E echo "============================================================" ) @@ -209,3 +248,46 @@ configure_file(${METADATA_TEMPLATE} ${METADATA_OUTPUT} @ONLY) # COMMAND ${CMAKE_COMMAND} -E cat ${METADATA_OUTPUT} # ) + +# Add a convenience CMake target to build all enabled pyalp backends and +# package them into a wheel using the standard Python packaging path. +# This creates a "pyalp" top-level build target you can invoke via +# cmake --build --target pyalp --parallel +# The target will build the enabled backend extension targets and then +# run `python -m pip wheel pyalp` with CMAKE_BUILD_DIR set so +# `pyalp/setup.py` can discover the prebuilt shared objects and metadata. + +set(pyalp_package_targets "") +if(WITH_REFERENCE_BACKEND) + list(APPEND pyalp_package_targets pyalp_ref) +endif() +if(WITH_OMP_BACKEND) + list(APPEND pyalp_package_targets pyalp_omp) +endif() +if(WITH_NONBLOCKING_BACKEND) + list(APPEND pyalp_package_targets pyalp_nonblocking) +endif() + +if(NOT pyalp_package_targets) + # No backends enabled: provide a dummy target that still attempts packaging + list(APPEND pyalp_package_targets "") +endif() + +string(JOIN " " pyalp_package_targets_str ${pyalp_package_targets}) + +# Create a simple, top-level alias target `pyalp` that just builds the +# enabled backend extension targets. This avoids packing logic in the +# subdirectory and makes `make pyalp` (or `cmake --build --target pyalp`) a +# straightforward way to build the Python extension modules. +if(NOT pyalp_package_targets) + # No enabled backends: provide an empty phony target. + add_custom_target(pyalp + COMMENT "pyalp: no backends enabled" + ) +else() + add_custom_target(pyalp + DEPENDS ${pyalp_package_targets} + COMMENT "Build enabled pyalp backend extension modules" + ) +endif() + diff --git a/pyalp/src/conjugate_gradient.hpp b/pyalp/src/conjugate_gradient.hpp index dee9cf154..e663f6256 100644 --- a/pyalp/src/conjugate_gradient.hpp +++ b/pyalp/src/conjugate_gradient.hpp @@ -1,5 +1,3 @@ -#include -#include #include #include #include @@ -25,8 +23,6 @@ #include -namespace py = pybind11; - using BaseScalarType = double; #ifdef _CG_COMPLEX using ScalarType = std::complex< BaseScalarType >; diff --git a/pyalp/src/matrix_wrappers.hpp b/pyalp/src/matrix_wrappers.hpp index b1644b429..b7173d005 100644 --- a/pyalp/src/matrix_wrappers.hpp +++ b/pyalp/src/matrix_wrappers.hpp @@ -11,40 +11,6 @@ namespace py = pybind11; -// using BaseScalarType = double; -// #ifdef _CG_COMPLEX -// using ScalarType = std::complex< BaseScalarType >; -// #else -// using ScalarType = BaseScalarType; -// #endif - -// /** Parser type */ -// typedef grb::utils::MatrixFileReader< -// ScalarType, -// std::conditional< -// (sizeof(grb::config::RowIndexType) > sizeof(grb::config::ColIndexType)), -// grb::config::RowIndexType, -// grb::config::ColIndexType -// >::type -// > Parser; - -// /** Nonzero type */ -// typedef grb::internal::NonzeroStorage< -// grb::config::RowIndexType, -// grb::config::ColIndexType, -// ScalarType -// > NonzeroT; - -// /** In-memory storage type */ -// typedef grb::utils::Singleton< -// std::pair< -// // stores n and nz (according to parser) -// std::pair< size_t, size_t >, -// // stores the actual nonzeroes -// std::vector< NonzeroT > -// > -// > Storage; - template< typename IntType , typename ScalarType @@ -113,3 +79,48 @@ grb::Matrix matrix_factory( return mat; } + +// Convert a GraphBLAS matrix to COO (i, j, values) numpy arrays and return +// a tuple: (i_array, j_array, values_array, nrows, ncols) +template +py::tuple matrix_to_coo(grb::Matrix &M) { + // Iterate using the matrix const iterators directly. Using the + // nonzeroIterator adapter here triggered instantiation issues due to + // incomplete iterator types in some compilation units. Iterating via the + // matrix's own const_iterator works across backends and avoids the + // incomplete-type problem. + std::vector rows; + std::vector cols; + std::vector vals; + + for (auto it = M.cbegin(); it != M.cend(); ++it) { + // Dereferenced iterator is expected to be a pair where the first + // element contains a pair (i,j) and the second element is the value. + // This matches the ALP/GraphBLAS iterator contract used by backends. + auto entry = *it; + rows.push_back( static_cast( entry.first.first ) ); + cols.push_back( static_cast( entry.first.second ) ); + vals.push_back( static_cast( entry.second ) ); + } + + // Create numpy arrays (copies are fine for interoperability) + py::array_t i_arr(rows.size()); + py::buffer_info i_info = i_arr.request(); + size_t *i_ptr = static_cast(i_info.ptr); + for (size_t k = 0; k < rows.size(); ++k) i_ptr[k] = rows[k]; + + py::array_t j_arr(cols.size()); + py::buffer_info j_info = j_arr.request(); + size_t *j_ptr = static_cast(j_info.ptr); + for (size_t k = 0; k < cols.size(); ++k) j_ptr[k] = cols[k]; + + py::array_t v_arr(vals.size()); + py::buffer_info v_info = v_arr.request(); + ScalarType *v_ptr = static_cast(v_info.ptr); + for (size_t k = 0; k < vals.size(); ++k) v_ptr[k] = vals[k]; + + size_t nrows = grb::nrows(M); + size_t ncols = grb::ncols(M); + + return py::make_tuple(i_arr, j_arr, v_arr, nrows, ncols); +} diff --git a/pyalp/src/numpy2alp.cpp b/pyalp/src/numpy2alp.cpp deleted file mode 100644 index 3407090c0..000000000 --- a/pyalp/src/numpy2alp.cpp +++ /dev/null @@ -1,57 +0,0 @@ -#include -#include -#include -#include -#include - -namespace py = pybind11; - -// Print a NumPy array as a std::vector (flattened) -void print_numpy_array(py::array_t input) { - py::buffer_info buf = input.request(); - double* ptr = static_cast(buf.ptr); - std::vector vec(ptr, ptr + buf.size); - - std::cout << "Vector contents (flattened): "; - for (double v : vec) { - std::cout << v << " "; - } - std::cout << std::endl; -} - -// Add two NumPy arrays (supports multi-dimensional, as long as shapes match) -py::array_t add_numpy_arrays(py::array_t a, py::array_t b) { - py::buffer_info buf_a = a.request(); - py::buffer_info buf_b = b.request(); - - // Check that shapes match - if (buf_a.ndim != buf_b.ndim) - throw std::runtime_error("Input arrays must have the same number of dimensions"); - for (ssize_t i = 0; i < buf_a.ndim; ++i) { - if (buf_a.shape[i] != buf_b.shape[i]) - throw std::runtime_error("Input array shapes must match"); - } - - // Prepare output array with the same shape - auto result = py::array_t(buf_a.size); - py::buffer_info buf_result = result.request(); - - double* ptr_a = static_cast(buf_a.ptr); - double* ptr_b = static_cast(buf_b.ptr); - double* ptr_result = static_cast(buf_result.ptr); - - // Element-wise addition (flat) - for (ssize_t i = 0; i < buf_a.size; ++i) { - ptr_result[i] = ptr_a[i] + ptr_b[i]; - } - - // Reshape result to match input shape - result.resize(buf_a.shape); - - return result; -} - -PYBIND11_MODULE(numpy2alp, m) { - m.def("print_numpy_array", &print_numpy_array, "Print a numpy array as a flattened std::vector"); - m.def("add_numpy_arrays", &add_numpy_arrays, "Add two numpy arrays element-wise (supports multi-dimensional arrays)"); -} diff --git a/pyalp/src/pyalp.cpp b/pyalp/src/pyalp.cpp deleted file mode 100644 index a84cc3410..000000000 --- a/pyalp/src/pyalp.cpp +++ /dev/null @@ -1,84 +0,0 @@ -#include -#include - -#include - -#include "utils.hpp" -#include "matrix_wrappers.hpp" -#include "vector_wrappers.hpp" -#include "conjugate_gradient.hpp" - -namespace py = pybind11; - - -//PYBIND11_MODULE(pyalp, m) { -// Use a macro for the module name -#ifndef PYALP_MODULE_NAME -#define PYALP_MODULE_NAME pyalp -#endif - -PYBIND11_MODULE(PYALP_MODULE_NAME, m) { - // Common bindings for all backends - m.def("backend_name", [](){ return "backend"; }); - py::class_>(m, "Matrix") - .def(py::init([](size_t m, size_t n, - py::array data1, - py::array data2, - py::array_t data3) { - return matrix_factory(m, n, data1, data2, data3); - }), - py::arg("m"), py::arg("n"), - py::arg("i_array"), py::arg("j_array"), py::arg("k_array")); - // simple constructor - // .def(py::init([](size_t m, size_t n, - // py::array_t data1, - // py::array_t data2, - // py::array_t data3) { - // grb::Matrix< ScalarType > mat(m, n); // call the basic constructor - // buildMatrix(mat, data1, data2, data3); // initialize with data - // return mat; - // }), - // py::arg("m"), py::arg("n"), - // py::arg("i_array"), py::arg("j_array"), py::arg("k_array") - // ) - - // add some existing things - // .def("get", &Matrix::get) - // .def("set", &Matrix::set) - // .def("rows", &Matrix::rows) - // .def("cols", &Matrix::cols) - ; // - py::class_>(m, "Vector") - .def(py::init()) - .def(py::init([](size_t m, - py::array_t data3) { - grb::Vector< ScalarType > vec(m); // call the basic constructor - buildVector(vec, data3); // initialize with data - return vec; - }), - py::arg("m"), - py::arg("k_array") - ) - - .def("to_numpy", &to_numpy, "Convert to numpy array"); - ; // - - - -//m.def("buildMatrix", &buildMatrix, "Fill Matrix from 3 NumPy arrays"); // - - m.def("buildVector", &buildVector, "Fill Vector from 1 NumPy array"); - m.def("print_my_numpy_array", &print_my_numpy_array, "Print a numpy array as a flattened std::vector"); - m.def("conjugate_gradient", &conjugate_gradient, "Pass alp data to alp CG sover", - py::arg("L"), - py::arg("x"), - py::arg("b"), - py::arg("r"), - py::arg("u"), - py::arg("temp"), - py::arg("solver_iterations") = 1000, - py::arg("verbose") = 0 - ); -} - - diff --git a/pyalp/src/pyalp/CMakeLists.txt b/pyalp/src/pyalp/CMakeLists.txt index 2d86c07dd..ddd4524ca 100644 --- a/pyalp/src/pyalp/CMakeLists.txt +++ b/pyalp/src/pyalp/CMakeLists.txt @@ -1,8 +1,9 @@ cmake_minimum_required(VERSION 3.14) project(pyalp_bindings LANGUAGES CXX) -pybind11_add_module(_pyalp bindings.cpp ../matrix_wrappers.hpp ../utils.hpp ../vector_wrappers.hpp ../conjugate_gradient.hpp) +pybind11_add_module(_pyalp module_entry.cpp ../matrix_wrappers.hpp ../utils.hpp ../vector_wrappers.hpp ../conjugate_gradient.hpp) target_compile_definitions(_pyalp PRIVATE PYALP_MODULE_NAME=_pyalp) +target_compile_definitions(_pyalp PRIVATE PYALP_MODULE_LOCAL=1) set_target_properties(_pyalp PROPERTIES CXX_STANDARD 14 CXX_STANDARD_REQUIRED YES) target_include_directories(_pyalp PRIVATE ${CMAKE_SOURCE_DIR}/include ${CMAKE_CURRENT_SOURCE_DIR}/..) diff --git a/pyalp/src/pyalp/bindings.cpp b/pyalp/src/pyalp/bindings.cpp deleted file mode 100644 index ef545ccd9..000000000 --- a/pyalp/src/pyalp/bindings.cpp +++ /dev/null @@ -1,56 +0,0 @@ -#include -#include - -#include - -#include "utils.hpp" -#include "matrix_wrappers.hpp" -#include "vector_wrappers.hpp" -#include "conjugate_gradient.hpp" - -namespace py = pybind11; - -// Build extension named _pyalp (private extension module) so package can expose it -#ifndef PYALP_MODULE_NAME -#define PYALP_MODULE_NAME _pyalp -#endif - -PYBIND11_MODULE(PYALP_MODULE_NAME, m) { - // Common bindings for all backends (kept minimal here) - m.def("backend_name", [](){ return "backend"; }); - py::class_>(m, "Matrix", py::module_local()) - .def(py::init([](size_t m_, size_t n_, - py::array data1, - py::array data2, - py::array_t data3) { - return matrix_factory(m_, n_, data1, data2, data3); - }), - py::arg("m"), py::arg("n"), - py::arg("i_array"), py::arg("j_array"), py::arg("k_array")); - - py::class_>(m, "Vector", py::module_local()) - .def(py::init()) - .def(py::init([](size_t m, - py::array_t data3) { - grb::Vector< ScalarType > vec(m); // call the basic constructor - buildVector(vec, data3); // initialize with data - return vec; - }), - py::arg("m"), - py::arg("k_array") - ) - .def("to_numpy", &to_numpy, "Convert to numpy array"); - - m.def("buildVector", &buildVector, "Fill Vector from 1 NumPy array"); - m.def("print_my_numpy_array", &print_my_numpy_array, "Print a numpy array as a flattened std::vector"); - m.def("conjugate_gradient", &conjugate_gradient, "Pass alp data to alp CG sover", - py::arg("L"), - py::arg("x"), - py::arg("b"), - py::arg("r"), - py::arg("u"), - py::arg("temp"), - py::arg("solver_iterations") = 1000, - py::arg("verbose") = 0 - ); -} diff --git a/pyalp/src/pyalp/common_bindings.hpp b/pyalp/src/pyalp/common_bindings.hpp new file mode 100644 index 000000000..cc14b19aa --- /dev/null +++ b/pyalp/src/pyalp/common_bindings.hpp @@ -0,0 +1,94 @@ +// Common pybind11 bindings shared by CMake targets and setuptools builds. +#pragma once + +#include +#include + +#include + +#include "utils.hpp" +#include "matrix_wrappers.hpp" +#include "vector_wrappers.hpp" +#include "conjugate_gradient.hpp" + +namespace py = pybind11; + +// Register all pyalp bindings. Module-local registration can be enabled by +// instantiating with ModuleLocal = true. When ModuleLocal==true the +// py::module_local() policy is applied to class bindings to avoid symbol +// collisions when multiple compiled variants are imported in the same +// interpreter. +template +void register_pyalp(py::module_ &m) { + // Common bindings for all backends + m.def("backend_name", [](){ return "backend"; }); + + if constexpr (ModuleLocal) { + py::class_>(m, "Matrix", py::module_local()) + .def(py::init([](size_t m_, size_t n_, + py::array data1, + py::array data2, + py::array_t data3) { + return matrix_factory(m_, n_, data1, data2, data3); + }), + py::arg("m"), py::arg("n"), + py::arg("i_array"), py::arg("j_array"), py::arg("k_array")); + + // Expose a COO serializer so Matrix instances can be moved between + // modules/processes without depending on pybind11 cross-module + // type registration. Returns (i_array, j_array, values_array, nrows, ncols). + m.def("matrix_to_coo", &matrix_to_coo, "Serialize Matrix to COO arrays"); + + py::class_>(m, "Vector", py::module_local()) + .def(py::init()) + .def(py::init([](size_t m, + py::array_t data3) { + grb::Vector< ScalarType > vec(m); // call the basic constructor + buildVector(vec, data3); // initialize with data + return vec; + }), + py::arg("m"), + py::arg("k_array") + ) + .def("to_numpy", &to_numpy, "Convert to numpy array"); + } else { + py::class_>(m, "Matrix") + .def(py::init([](size_t m_, size_t n_, + py::array data1, + py::array data2, + py::array_t data3) { + return matrix_factory(m_, n_, data1, data2, data3); + }), + py::arg("m"), py::arg("n"), + py::arg("i_array"), py::arg("j_array"), py::arg("k_array")); + + // Expose the matrix_to_coo helper in the non-module_local case as well. + m.def("matrix_to_coo", &matrix_to_coo, "Serialize Matrix to COO arrays"); + + py::class_>(m, "Vector") + .def(py::init()) + .def(py::init([](size_t m, + py::array_t data3) { + grb::Vector< ScalarType > vec(m); // call the basic constructor + buildVector(vec, data3); // initialize with data + return vec; + }), + py::arg("m"), + py::arg("k_array") + ) + .def("to_numpy", &to_numpy, "Convert to numpy array"); + } + + m.def("buildVector", &buildVector, "Fill Vector from 1 NumPy array"); + m.def("print_my_numpy_array", &print_my_numpy_array, "Print a numpy array as a flattened std::vector"); + m.def("conjugate_gradient", &conjugate_gradient, "Pass alp data to alp CG solver", + py::arg("L"), + py::arg("x"), + py::arg("b"), + py::arg("r"), + py::arg("u"), + py::arg("temp"), + py::arg("solver_iterations") = 1000, + py::arg("verbose") = 0 + ); +} diff --git a/pyalp/src/pyalp/module_entry.cpp b/pyalp/src/pyalp/module_entry.cpp new file mode 100644 index 000000000..f9bf49e06 --- /dev/null +++ b/pyalp/src/pyalp/module_entry.cpp @@ -0,0 +1,14 @@ +#include +#include "common_bindings.hpp" + +#ifndef PYALP_MODULE_LOCAL +#define PYALP_MODULE_LOCAL 1 +#endif + +PYBIND11_MODULE(PYALP_MODULE_NAME, m) { +#if PYALP_MODULE_LOCAL + register_pyalp(m); +#else + register_pyalp(m); +#endif +} diff --git a/pyalp/src/pyalp/your_module.py b/pyalp/src/pyalp/your_module.py deleted file mode 100644 index 0acf81b9c..000000000 --- a/pyalp/src/pyalp/your_module.py +++ /dev/null @@ -1,4 +0,0 @@ -"""Small pure-Python utilities for pyalp package.""" - -def hello_py(): - return "Hello from pure Python module" diff --git a/pyalp/src/python2alp.cpp b/pyalp/src/python2alp.cpp deleted file mode 100644 index 0d70140f0..000000000 --- a/pyalp/src/python2alp.cpp +++ /dev/null @@ -1,9 +0,0 @@ -#include - -std::string say_hello() { - return "Hello, world from C++!"; -} - -PYBIND11_MODULE(python2alp, m) { - m.def("say_hello", &say_hello, "A function that returns a greeting"); -} diff --git a/pyalp/tests/test_bckds_inprocess.py b/pyalp/tests/test_bckds_inprocess.py new file mode 100644 index 000000000..b8af3f074 --- /dev/null +++ b/pyalp/tests/test_bckds_inprocess.py @@ -0,0 +1,105 @@ +""" +Simple in-process test to verify multiple pyalp backend extension modules +can be imported and used in the same Python interpreter without pybind11 +duplicate-type registration collisions. + +Usage (when building locally): + + # configure & build top-level project with pyalp enabled + cmake -S . -B build -DENABLE_PYALP=ON + cmake --build build --target pyalp + + # run the test pointing PYTHONPATH to the build output + PYTHONPATH=build/pyalp/src python3 pyalp/tests/test_bckds_inprocess.py + +If the build places extensions elsewhere, adjust PYTHONPATH to include that +directory. +""" + +import sys +import importlib +import numpy as np + +# Optionally prepend a build directory. If you're running inside the repo and +# built into ../build, uncomment and adjust the path below. +# sys.path.insert(0, '/path/to/your/build/pyalp/src') + +BACKENDS = ['pyalp_ref', 'pyalp_omp', 'pyalp_nonblocking'] + +def make_simple_matrix(): + # Create arrays for a single non-zero entry at (0,0) with value 1.0 + i = np.array([0], dtype=np.int64) + j = np.array([0], dtype=np.int64) + v = np.array([1.0], dtype=np.float64) + return 1, 1, i, j, v + + +def main(): + m,n,i,j,v = make_simple_matrix() + exercised = 0 + + # If the installed package exposes a `pyalp` package, prefer to query + # it for the list of available backends and skip any that aren't present + # (useful for platform-specific wheels that omit some backends). + installed_backends = None + try: + pkg = importlib.import_module('pyalp') + try: + installed_backends = set(pkg.list_backends()) + except Exception: + installed_backends = None + except ModuleNotFoundError: + installed_backends = None + + for backend in BACKENDS: + # If we detected an installed pyalp package and it doesn't list this + # backend, skip it rather than failing the whole test. + if installed_backends is not None and backend not in installed_backends: + print(f"Backend {backend} not present in installed package, skipping") + continue + + # Try importing the module as a top-level module first (old-style), + # then as a submodule of the installed `pyalp` package. This mirrors + # how the wheel packages the compiled extensions under the `pyalp` + # package (pyalp.pyalp_ref, etc.). We also attach the imported + # submodule to the `pyalp` package object for convenience. + mod = None + try: + mod = importlib.import_module(backend) + except ModuleNotFoundError: + try: + fq = f"pyalp.{backend}" + mod = importlib.import_module(fq) + # Attach to pyalp package so attribute access works + try: + pkg = importlib.import_module('pyalp') + setattr(pkg, backend, mod) + except Exception: + pass + except Exception as e: + print(f"FAILED IMPORT {backend}: {e}") + raise + print(f"Imported {backend}: {mod}") + try: + Matrix = getattr(mod, 'Matrix') + except AttributeError: + print(f"{backend} does not expose Matrix") + raise + # Construct an instance + try: + mat = Matrix(m, n, i, j, v) + exercised += 1 + print(f"Constructed Matrix from {backend}:", type(mat)) + except Exception as e: + print(f"FAILED TO CONSTRUCT Matrix from {backend}: {e}") + raise + + print('\nALL BACKENDS IMPORTED AND INSTANCES CREATED SUCCESSFULLY') + if exercised == 0: + print('ERROR: no backends were exercised (none installed).', file=sys.stderr) + raise SystemExit(2) + else: + print(f'SUCCESS: exercised {exercised} backend(s).') + +if __name__ == '__main__': + main() diff --git a/tests/python/test.py b/tests/python/test.py index 577917b42..b6628612f 100644 --- a/tests/python/test.py +++ b/tests/python/test.py @@ -21,7 +21,7 @@ - pyalp_ref (should be available in the Python path) """ -import pyalp_ref as pyalp +import pyalp.pyalp_ref as pyalp import numpy as np # Gnerate a small sparse linear system using numpy arrays diff --git a/tests/python/test_backends.py b/tests/python/test_backends.py index a04edc920..bb0ba2658 100644 --- a/tests/python/test_backends.py +++ b/tests/python/test_backends.py @@ -1,50 +1,67 @@ +#!/usr/bin/env python3 """ -Parametrized smoke test that runs the conjugate-gradient example against all -available pyalp backend modules (pyalp_ref, pyalp_omp, pyalp_nonblocking). +Test script for the pyalp backend (example uses the OpenMP backend name +`pyalp_omp`, but you can use `pyalp_ref` or another available backend). -This is adapted from tests/python/test.py but runs the same assertions for -each backend installed in the `pyalp` package. If a backend is not present in -the wheel, the test is skipped. +Usage: + python test_cg.py + +Dependencies: + - numpy + - pyalp (installed and providing a backend such as pyalp_omp) """ -import os -import shutil -import subprocess -import sys -import pytest -from pathlib import Path - - -BACKENDS = ["pyalp_ref", "pyalp_omp", "pyalp_nonblocking", "_pyalp"] - - -def backend_exists_in_package(backend: str) -> bool: - # Check installed package dir for a backend shared object - try: - import pyalp - p = Path(pyalp.__file__).parent - patterns = [f"{backend}*.so", f"{backend}*.pyd"] - for pat in patterns: - if any(p.glob(pat)): - return True - except Exception: - return False - return False - - -@pytest.mark.parametrize("backend", BACKENDS) -def test_conjugate_gradient_backend_subprocess(backend): - if not backend_exists_in_package(backend): - pytest.skip(f"backend {backend} not present in installed package") - - # Run the smoke test in a fresh Python subprocess to avoid in-process - # pybind11 type registration conflicts between multiple extension modules. - python_exe = sys.executable - runner = Path(__file__).with_name("backend_smoke_runner.py") - if not runner.exists(): - pytest.skip("backend smoke runner script not found") - proc = subprocess.run([python_exe, str(runner), backend], capture_output=True, text=True) - if proc.returncode != 0: - # Give helpful debug output - print(proc.stdout) - print(proc.stderr) - assert proc.returncode == 0, f"backend {backend} failed with return code {proc.returncode}\nSTDOUT:\n{proc.stdout}\nSTDERR:\n{proc.stderr}" + +import numpy as np + + + +for backendname in ['pyalp_ref','pyalp_omp','pyalp_nonblocking']: + + import pyalp + # Choose the backend module (change name if you want a different backend) + pyalp = pyalp.get_backend(backendname) # or 'pyalp_ref', 'pyalp_nonblocking' + + # Generate a small sparse linear system using numpy arrays + N, M = 5, 5 + idata = np.array([0, 1, 2, 3, 3, 4, 2, 3, 3, 4, 1, 4, 1, 4, 4], dtype=np.int32) + jdata = np.array([0, 1, 2, 3, 2, 2, 1, 4, 1, 1, 0, 3, 0, 3, 4], dtype=np.int32) + vdata = np.array([1, 1, 1, 1, 0.5, 2, 1, 4, 4.4, 1, 0, 3.5, 0, 3, 1], dtype=np.float64) + b = np.array([1.0, 1.0, 1.0, 1.0, 1.0], dtype=np.float64) + x = np.array([1.0, 1.0, 0.0, 0.3, -1.0], dtype=np.float64) + r = np.zeros(5, dtype=np.float64) + u = np.zeros(5, dtype=np.float64) + tmp = np.zeros(5, dtype=np.float64) + + # Create the pyalp Matrix and Vector objects + alpmatrixA = pyalp.Matrix(5, 5, idata, jdata, vdata) + alpvectorx = pyalp.Vector(5, x) + alpvectorb = pyalp.Vector(5, b) + alpvectorr = pyalp.Vector(5, r) + alpvectoru = pyalp.Vector(5, u) + alpvectortmp = pyalp.Vector(5, tmp) + + maxiterations = 2000 + verbose = 1 + + # Solve the linear system using the conjugate gradient method in the backend + iterations, residual = pyalp.conjugate_gradient( + alpmatrixA, + alpvectorx, + alpvectorb, + alpvectorr, + alpvectoru, + alpvectortmp, + maxiterations, + verbose, + ) + print('iterations =', iterations) + print('residual =', residual) + + # Convert the result vector to a numpy array and print it + x_result = alpvectorx.to_numpy() + print('x_result =', x_result) + + # Check if the result is close to the expected solution + assert np.allclose(x_result, np.array([1.0, 1.0, 0.0, 0.13598679, -0.88396565])), 'solution mismatch' + + print("backend ", backendname, " OK")