diff --git a/.github/workflows/python_build.yaml b/.github/workflows/python_build.yaml new file mode 100644 index 000000000..81b9177b6 --- /dev/null +++ b/.github/workflows/python_build.yaml @@ -0,0 +1,224 @@ +name: Offline Installation Archive + +on: + workflow_dispatch: + workflow_call: + +jobs: + # Run ci on workflow_dispatch to ensure we have a clean build. + # If using workflow_call, we expect the caller (python_release.yaml) to have already run ci. + python-ci: + if: github.event_name == 'workflow_dispatch' && !github.event.workflow + uses: ./.github/workflows/python_ci.yaml + + get-matrix-config: + name: Get matrix configuration + runs-on: ubuntu-latest + outputs: + supported_python_versions: ${{ steps.get-python-versions.outputs.versions }} + platforms: ${{ steps.set-matrix.outputs.matrix }} + sift_package_version: ${{ steps.get-sift-version.outputs.version }} + steps: + - uses: actions/checkout@v4 + + - name: Get supported Python versions from pyproject.toml + id: get-python-versions + run: | + versions=$(grep "Programming Language :: Python :: " python/pyproject.toml | sed 's/.*Python :: \([0-9.]*\).*/\1/' | jq -R -s -c 'split("\n")[:-1]') + echo "versions=$versions" >> $GITHUB_OUTPUT + + - name: Get sift-stack-py package version from pyproject.toml + id: get-sift-version + run: | + version=$(grep '^version = ' python/pyproject.toml | sed 's/version = "\(.*\)"/\1/') + echo "version=$version" >> $GITHUB_OUTPUT + + # We define the platforms here so we can reuse the matrix in multiple jobs + # This is a workaround for the fact that we can't use the same matrix in multiple jobs + - name: Set platform matrix + id: set-matrix + uses: actions/github-script@v7 + with: + script: | + const matrix = [ + {os: 'ubuntu', arch: 'x86_64', runner: 'ubuntu-latest', platform_tag: 'linux_x86_64'}, + {os: 'ubuntu', arch: 'aarch64', runner: 'ubuntu-latest', platform_tag: 'linux_aarch64'}, + {os: 'macos', arch: 'x86_64', runner: 'macos-latest', platform_tag: 'macos_x86_64'}, + {os: 'macos', arch: 'arm64', runner: 'macos-latest', platform_tag: 'macos_arm64'}, + {os: 'windows', arch: 'amd64', runner: 'windows-latest', platform_tag: 'win_amd64'} + ]; + core.setOutput('matrix', JSON.stringify(matrix)); + + build_wheel: + name: Build sift-stack-py distributions + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.8" # Use lowest supported version for maximum compatibility + + - name: Install build tools + run: | + python -m pip install --upgrade pip + pip install build twine + + - name: Build distributions + working-directory: python + run: | + python -m build + + - name: Verify distributions + working-directory: python + shell: bash + run: | + # Check all distributions with twine + twine check dist/* + + # Verify we have a universal wheel + # We want to ensure that the wheel is compatible with all Python versions + # and all architectures. If this fails, we will need to update our build strategy to + # build separate wheels for each Python version and architecture. + WHEEL_NAME=$(ls dist/sift_stack_py*.whl) + if [[ ! $WHEEL_NAME =~ "py3-none-any.whl" ]]; then + echo "Error: Expected a universal wheel (py3-none-any) but got: $WHEEL_NAME" + exit 1 + fi + echo "Verified universal wheel: $WHEEL_NAME" + + - name: Upload distributions + uses: actions/upload-artifact@v4 + with: + name: sift-stack-py-dist + path: python/dist/* + retention-days: 14 + + build_and_verify: + name: Build offline archive for ${{ matrix.platform.os }} (${{ matrix.platform.arch }}) with Python ${{ matrix.python-version }} + needs: [get-matrix-config, build_wheel] + runs-on: ${{ matrix.platform.runner }} + strategy: + fail-fast: false + matrix: + platform: ${{ fromJson(needs.get-matrix-config.outputs.platforms) }} + python-version: ${{fromJson(needs.get-matrix-config.outputs.supported_python_versions)}} + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + + - name: Install build tools + shell: bash + run: | + python -m pip install --upgrade pip + pip install build pip-tools + + - name: Generate requirements + working-directory: python + shell: bash + run: | + # Generate requirements file with all extras + pip-compile pyproject.toml --all-extras -o requirements-all.txt + + - name: Build dependency wheels + working-directory: python + shell: bash + run: | + # Build wheels for all dependencies directly to dist directory + # First build wheels for build dependencies + pip wheel -w dist \ + setuptools wheel Cython \ + --prefer-binary \ + --no-deps + + # Then build wheels for package dependencies + pip wheel -r requirements-all.txt -w dist \ + --prefer-binary \ + --no-deps + + - name: Download sift-stack-py wheel + uses: actions/download-artifact@v4 + with: + name: sift-stack-py-dist + path: python/dist/ + + - name: Test installations + working-directory: python + shell: bash + run: | + python scripts/build_utils.py \ + --dist-dir dist \ + --package-name sift-stack-py \ + ${{ matrix.platform.os == 'windows' && '--is-windows' || '' }} + + - name: Create distribution archive + working-directory: python + shell: bash + run: | + if [ "${{ matrix.platform.os }}" = "windows" ]; then + pwsh -Command "Compress-Archive -Path dist/* -DestinationPath dist/sift-py-dist-${{ needs.get-matrix-config.outputs.sift_package_version }}-py${{ matrix.python-version }}-${{ matrix.platform.platform_tag }}.zip -Force" + else + cd dist && zip -r "sift-py-dist-${{ needs.get-matrix-config.outputs.sift_package_version }}-py${{ matrix.python-version }}-${{ matrix.platform.platform_tag }}.zip" * + fi + + - name: Upload distribution archive + uses: actions/upload-artifact@v4 + with: + name: sift-py-dist-${{ needs.get-matrix-config.outputs.sift_package_version }}-py${{ matrix.python-version }}-${{ matrix.platform.platform_tag }} + path: python/dist/sift-py-dist-*.zip + retention-days: 14 + + merge_platform_archives: + name: Merge archives for ${{ matrix.platform.os }} (${{ matrix.platform.arch }}) + needs: [build_and_verify, get-matrix-config] + runs-on: ubuntu-latest + strategy: + matrix: + platform: ${{ fromJson(needs.get-matrix-config.outputs.platforms) }} + steps: + - name: Download platform archives + uses: actions/download-artifact@v4 + with: + pattern: sift-py-dist-${{ needs.get-matrix-config.outputs.sift_package_version }}-py*-${{ matrix.platform.platform_tag }} + path: platform_archives + merge-multiple: false + + - name: Merge archives + shell: bash + run: | + # Create directory for merged files + mkdir -p merged + + # Extract and merge all archives for this platform + for zip in platform_archives/*/sift-py-dist-${{ needs.get-matrix-config.outputs.sift_package_version }}-py*-${{ matrix.platform.platform_tag }}.zip; do + echo "Processing archive: $zip" + unzip -o "$zip" -d "merged" + echo "Contents after extracting $zip:" + ls -R merged + done + + # Create base name for archives + ARCHIVE_BASE="sift-py-dist-${{ needs.get-matrix-config.outputs.sift_package_version }}-py3-${{ matrix.platform.platform_tag }}" + + # Create zip archive + cd merged + zip -r "../${ARCHIVE_BASE}.zip" * + + # Create tar.gz archive + tar -czf "../${ARCHIVE_BASE}.tar.gz" * + cd .. + + - name: Upload merged archives + uses: actions/upload-artifact@v4 + with: + name: sift-py-dist-${{ needs.get-matrix-config.outputs.sift_package_version }}-py3-${{ matrix.platform.platform_tag }} + path: | + sift-py-dist-*.zip + sift-py-dist-*.tar.gz + retention-days: 14 diff --git a/.github/workflows/python_release.yaml b/.github/workflows/python_release.yaml index 310738490..3c8be0286 100644 --- a/.github/workflows/python_release.yaml +++ b/.github/workflows/python_release.yaml @@ -8,39 +8,83 @@ jobs: if: github.event_name == 'workflow_dispatch' && startsWith(github.ref, 'refs/tags') uses: ./.github/workflows/python_ci.yaml + build-offline-archives: + if: github.event_name == 'workflow_dispatch' && startsWith(github.ref, 'refs/tags') + needs: python-ci + uses: ./.github/workflows/python_build.yaml + publish-to-pypi: name: Upload release to PyPI - needs: python-ci + needs: [python-ci, build-offline-archives] runs-on: ubuntu-latest - defaults: - run: - working-directory: python environment: name: pypi url: https://pypi.org/p/sift_py permissions: id-token: write + steps: + - name: Download distributions + uses: actions/download-artifact@v4 + with: + name: sift-stack-py-dist + path: python/dist/ + + - name: Publish package distributions to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 + with: + packages-dir: python/dist/ + + create-github-release: + name: Create GitHub Release + needs: [build-offline-archives] + runs-on: ubuntu-latest + permissions: + contents: write steps: - name: Checkout code - uses: actions/checkout@v2 + uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v2 + - name: Download distributions + uses: actions/download-artifact@v4 with: - python-version: "3.8" + name: sift-stack-py-dist + path: python/dist/ - - name: Pip install - run: | - python -m pip install --upgrade pip - pip install '.[build]' - pip install . + - name: Download all platform archives + uses: actions/download-artifact@v4 + with: + pattern: sift-py-dist-*-py3-* + path: platform_archives + merge-multiple: false - - name: Build distributions - working-directory: python + - name: Create Release + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + TAG_NAME: ${{ github.ref_name }} run: | - python -m build + # Create release notes + cat > release_notes.md << 'EOL' + See [CHANGELOG.md](https://github.com/sift-stack/sift/blob/main/python/CHANGELOG.md) for details. - - name: Publish package distributions to PyPI - uses: pypa/gh-action-pypi-publish@release/v1 - with: - packages-dir: ./python/dist/ + Offline archives are available for download for multiple platforms. Offline archives include wheels for all dependencies including build extras, e.g. openssl, development, and build. + + Use `pip install sift-stack-py --find-links={path/to/archive} --no-index` to install. + EOL + + # Create the release + gh release create "$TAG_NAME" \ + --title "sift-stack-py $TAG_NAME" \ + --notes-file release_notes.md + + # Upload Python package distributions + for dist in python/dist/*; do + echo "Uploading distribution: $dist" + gh release upload "$TAG_NAME" "$dist" --clobber + done + + # Upload platform archives (both .zip and .tar.gz) + for archive in platform_archives/*/sift-py-dist-*-py3-*.{zip,tar.gz}; do + echo "Uploading archive: $archive" + gh release upload "$TAG_NAME" "$archive" --clobber + done + diff --git a/.gitignore b/.gitignore index 9f7ad8e80..e7d1e7bef 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ **/.env **/venv +**/.venv **/__pycache__ **/dist diff --git a/python/pyproject.toml b/python/pyproject.toml index 1010474ab..a86b4bff0 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -47,7 +47,7 @@ Changelog = "https://github.com/sift-stack/sift/tree/main/python/CHANGELOG.md" [project.optional-dependencies] development = [ - "grpcio-testing==1.13", + "grpcio-testing~=1.13", "mypy==1.10.0", "pyright==1.1.386", "pytest==8.2.2", diff --git a/python/scripts/build_utils.py b/python/scripts/build_utils.py new file mode 100644 index 000000000..b519d6141 --- /dev/null +++ b/python/scripts/build_utils.py @@ -0,0 +1,127 @@ +#!/usr/bin/env python3 + +import argparse +import os +import subprocess +import venv +from itertools import combinations +from pathlib import Path +from typing import List, Optional +from zipfile import ZipFile + + +def get_extras_from_wheel(wheel_path: str) -> List[str]: + """Extract the list of extras from a wheel's metadata. + + Args: + wheel_path: Path to the wheel file to inspect. + + Returns: + List of extra names declared in the wheel metadata. + """ + with ZipFile(wheel_path) as wheel: + # Find the METADATA file in the .dist-info directory + metadata_file = next( + name for name in wheel.namelist() if name.endswith(".dist-info/METADATA") + ) + metadata = wheel.read(metadata_file).decode("utf-8") + + # Parse Provides-Extra lines from metadata + extras = [] + for line in metadata.splitlines(): + if line.startswith("Provides-Extra:"): + extras.append(line.split(":", 1)[1].strip()) + return extras + + +def get_extra_combinations(extras: List[str]) -> List[str]: + """Generate all possible combinations of extras. + + Args: + extras: List of extra names to generate combinations from. + + Returns: + List of comma-separated strings representing each combination of extras. + """ + all_combinations = [] + for r in range(len(extras) + 1): + all_combinations.extend(",".join(c) for c in combinations(extras, r)) + return all_combinations + + +def test_install( + package_name: str, extras: Optional[str], dist_dir: str, venv_dir: str, is_windows: bool +) -> None: + """Test package installation with given extras in a fresh venv. + + Args: + package_name: Name of the package to install. + extras: Optional comma-separated string of extras to install. + dist_dir: Directory containing wheel and dependencies. + venv_dir: Directory to create virtual environment in. + is_windows: Whether running on Windows platform. + """ + print(f"Testing installation with extras: {extras or 'none'}") + + # Create and activate venv + venv.create(venv_dir, with_pip=True) + + # Build installation command + if extras: + install_cmd = f'pip install --no-index --find-links="{dist_dir}" "{package_name}[{extras}]"' + else: + install_cmd = f'pip install --no-index --find-links="{dist_dir}" {package_name}' + + if is_windows: + # Windows uses different activation and command syntax + activate_script = os.path.join(venv_dir, "Scripts", "activate.bat") + full_cmd = f'"{activate_script}" && {install_cmd} && deactivate' + subprocess.run(full_cmd, shell=True, check=True) + else: + # Unix systems use bash + activate_script = os.path.join(venv_dir, "bin", "activate") + full_cmd = f'source "{activate_script}" && {install_cmd} && deactivate' + subprocess.run(full_cmd, shell=True, check=True, executable="/bin/bash") + + +def main(): + parser = argparse.ArgumentParser( + description="Test package installation with all extra combinations" + ) + parser.add_argument( + "--dist-dir", required=True, help="Directory containing wheel and dependencies" + ) + parser.add_argument("--package-name", required=True, help="Name of the package to install") + parser.add_argument("--is-windows", action="store_true", help="Whether running on Windows") + args = parser.parse_args() + + dist_dir = Path(args.dist_dir) + wheel_file = next(dist_dir.glob(f"{args.package_name.replace('-', '_')}*.whl")) + + # Get all extras from the wheel + extras = get_extras_from_wheel(str(wheel_file)) + combinations = get_extra_combinations(extras) + + # Test base installation first + test_install( + package_name=args.package_name, + extras=None, + dist_dir=str(dist_dir), + venv_dir="test_venv_base", + is_windows=args.is_windows, + ) + + # Test each combination of extras + for combo in combinations: + if combo: # Skip empty string from base combination + test_install( + package_name=args.package_name, + extras=combo, + dist_dir=str(dist_dir), + venv_dir=f'test_venv_{combo.replace(",", "_")}', + is_windows=args.is_windows, + ) + + +if __name__ == "__main__": + main()