diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..39bed5e --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,291 @@ +name: CI/CD Pipeline + +on: + push: + branches: + - main + pull_request: + types: [opened, synchronize, reopened] + workflow_dispatch: + inputs: + bump_type: + description: 'Version bump type' + required: true + type: choice + options: + - patch + - minor + - major + description: + description: 'Release description (optional)' + required: false + type: string + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + # REQUIRED CI CHECKS - All must pass before release + # These jobs ensure code quality and tests pass before any release + + # Linting and formatting + lint: + name: Lint and Format Check + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: '3.13' + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -e ".[dev]" + + - name: Run Ruff linting + run: ruff check . + + - name: Check Ruff formatting + run: ruff format --check . + + - name: Run mypy + run: mypy src + + - name: Check file size limit + run: python scripts/check_file_size.py + + # Test on latest Python version only + test: + name: Test (Python 3.13) + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: '3.13' + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -e ".[dev]" + + - name: Run tests + run: pytest tests/ -v --cov=src --cov-report=xml --cov-report=term + + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v4 + with: + file: ./coverage.xml + fail_ci_if_error: false + + # Build package - only runs if lint and test pass + build: + name: Build Package + runs-on: ubuntu-latest + needs: [lint, test] + steps: + - uses: actions/checkout@v4 + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: '3.13' + + - name: Install build dependencies + run: | + python -m pip install --upgrade pip + pip install build twine + + - name: Build package + run: python -m build + + - name: Check package + run: twine check dist/* + + - name: Upload artifacts + uses: actions/upload-artifact@v4 + with: + name: dist + path: dist/ + + # Check for changelog fragments in PRs (similar to changesets check) + changelog: + name: Changelog Fragment Check + runs-on: ubuntu-latest + if: github.event_name == 'pull_request' + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.13" + + - name: Install scriv + run: pip install "scriv[toml]" + + - name: Check for changelog fragments + run: | + # Get list of fragment files (excluding README and template) + FRAGMENTS=$(find changelog.d -name "*.md" ! -name "README.md" ! -name "*.j2" 2>/dev/null | wc -l) + + # Get changed files in PR + CHANGED_FILES=$(git diff --name-only origin/${{ github.base_ref }}...HEAD) + + # Check if any source files changed (excluding docs and config) + SOURCE_CHANGED=$(echo "$CHANGED_FILES" | grep -E "^(src/|tests/|scripts/)" | wc -l) + + if [ "$SOURCE_CHANGED" -gt 0 ] && [ "$FRAGMENTS" -eq 0 ]; then + echo "::warning::No changelog fragment found. Please run 'scriv create' and document your changes." + echo "" + echo "To create a changelog fragment:" + echo " pip install 'scriv[toml]'" + echo " scriv create" + echo "" + echo "This is similar to adding a changeset in JavaScript projects." + echo "See changelog.d/README.md for more information." + # Note: This is a warning, not a failure, to allow flexibility + # Change 'exit 0' to 'exit 1' to make it required + exit 0 + fi + + echo "✓ Changelog check passed" + + # RELEASE JOBS - Only run after all CI checks pass + + # Automatic release on push to main (if version changed) + auto-release: + name: Auto Release + needs: [lint, test, build] + if: github.event_name == 'push' && github.ref == 'refs/heads/main' + runs-on: ubuntu-latest + permissions: + contents: write + id-token: write + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: '3.13' + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install build twine + + - name: Check if version changed + id: version_check + run: | + # Get current version from pyproject.toml + CURRENT_VERSION=$(grep -Po '(?<=^version = ")[^"]*' pyproject.toml) + echo "current_version=$CURRENT_VERSION" >> $GITHUB_OUTPUT + + # Check if tag exists + if git rev-parse "v$CURRENT_VERSION" >/dev/null 2>&1; then + echo "Tag v$CURRENT_VERSION already exists, skipping release" + echo "should_release=false" >> $GITHUB_OUTPUT + else + echo "New version detected: $CURRENT_VERSION" + echo "should_release=true" >> $GITHUB_OUTPUT + fi + + - name: Download artifacts + if: steps.version_check.outputs.should_release == 'true' + uses: actions/download-artifact@v4 + with: + name: dist + path: dist/ + + - name: Publish to PyPI + if: steps.version_check.outputs.should_release == 'true' + uses: pypa/gh-action-pypi-publish@release/v1 + + - name: Create GitHub Release + if: steps.version_check.outputs.should_release == 'true' + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + python scripts/create_github_release.py \ + --version "${{ steps.version_check.outputs.current_version }}" \ + --repository "${{ github.repository }}" + + # Manual release via workflow_dispatch - only after CI passes + manual-release: + name: Manual Release + needs: [lint, test, build] + if: github.event_name == 'workflow_dispatch' + runs-on: ubuntu-latest + permissions: + contents: write + id-token: write + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: '3.13' + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install build twine "scriv[toml]" + + - name: Configure git + run: | + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + + - name: Collect changelog fragments + run: | + # Check if there are any fragments to collect + FRAGMENTS=$(find changelog.d -name "*.md" ! -name "README.md" ! -name "*.j2" 2>/dev/null | wc -l) + if [ "$FRAGMENTS" -gt 0 ]; then + echo "Found $FRAGMENTS changelog fragment(s), collecting..." + scriv collect --version "${{ github.event.inputs.bump_type }}" + else + echo "No changelog fragments found, skipping collection" + fi + + - name: Version and commit + id: version + run: | + python scripts/version_and_commit.py \ + --bump-type "${{ github.event.inputs.bump_type }}" \ + --description "${{ github.event.inputs.description }}" + + - name: Build package + if: steps.version.outputs.version_committed == 'true' || steps.version.outputs.already_released == 'true' + run: python -m build + + - name: Check package + if: steps.version.outputs.version_committed == 'true' || steps.version.outputs.already_released == 'true' + run: twine check dist/* + + - name: Publish to PyPI + if: steps.version.outputs.version_committed == 'true' || steps.version.outputs.already_released == 'true' + uses: pypa/gh-action-pypi-publish@release/v1 + + - name: Create GitHub Release + if: steps.version.outputs.version_committed == 'true' || steps.version.outputs.already_released == 'true' + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + python scripts/create_github_release.py \ + --version "${{ steps.version.outputs.new_version }}" \ + --repository "${{ github.repository }}" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..36a6207 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,25 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v5.0.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml + - id: check-added-large-files + - id: check-merge-conflict + - id: check-toml + - id: debug-statements + + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.8.4 + hooks: + - id: ruff + args: [--fix, --exit-non-zero-on-fix] + - id: ruff-format + + - repo: https://github.com/pre-commit/mirrors-mypy + rev: v1.13.0 + hooks: + - id: mypy + additional_dependencies: [pytest, pytest-asyncio] + args: [--strict, --ignore-missing-imports] diff --git a/.ruff.toml b/.ruff.toml new file mode 100644 index 0000000..e6a459e --- /dev/null +++ b/.ruff.toml @@ -0,0 +1,5 @@ +# Ruff configuration +# This file provides additional settings beyond pyproject.toml + +[lint.isort] +known-first-party = ["my_package"] diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..f3f84c7 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,22 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + + + +## [0.1.0] - 2025-01-XX + +### Added + +- Initial project structure +- Basic example functions (add, multiply, delay) +- Comprehensive test suite with pytest +- Code quality tools (ruff, mypy) +- Pre-commit hooks configuration +- GitHub Actions CI/CD pipeline +- Scriv for changelog management (similar to Changesets) +- Release automation (PyPI + GitHub releases) +- Template structure for AI-driven Python development diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..6d9596c --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,269 @@ +# Contributing to python-ai-driven-development-pipeline-template + +Thank you for your interest in contributing! This document provides guidelines and instructions for contributing to this project. + +## Development Setup + +1. **Fork and clone the repository** + + ```bash + git clone https://github.com/YOUR-USERNAME/python-ai-driven-development-pipeline-template.git + cd python-ai-driven-development-pipeline-template + ``` + +2. **Create a virtual environment** + + ```bash + python -m venv .venv + source .venv/bin/activate # On Windows: .venv\Scripts\activate + ``` + +3. **Install dependencies** + + ```bash + pip install -e ".[dev]" + ``` + +4. **Install pre-commit hooks** + + ```bash + pip install pre-commit + pre-commit install + ``` + +## Development Workflow + +1. **Create a feature branch** + + ```bash + git checkout -b feature/my-feature + ``` + +2. **Make your changes** + + - Write code following the project's style guidelines + - Add tests for any new functionality + - Update documentation as needed + +3. **Run quality checks** + + ```bash + # Lint code + ruff check . + + # Format code + ruff format . + + # Type check + mypy src/ + + # Check file sizes + python scripts/check_file_size.py + + # Run all checks together + ruff check . && ruff format --check . && mypy src/ && python scripts/check_file_size.py + ``` + +4. **Run tests** + + ```bash + # Run tests + pytest + + # Run tests with coverage + pytest --cov=src --cov-report=term --cov-report=html + ``` + +5. **Add a changelog fragment** + + For any user-facing changes, create a changelog fragment: + + ```bash + # Create a new changelog fragment (similar to `npx changeset` in JS) + scriv create + ``` + + This will create a new file in `changelog.d/`. Edit it to document your changes: + + ```markdown + ### Added + - Description of new feature + + ### Fixed + - Description of bug fix + ``` + + **Why fragments?** This prevents merge conflicts in CHANGELOG.md when multiple PRs are open simultaneously (same as Changesets in JavaScript). + +6. **Commit your changes** + + ```bash + git add . + git commit -m "feat: add new feature" + ``` + + Pre-commit hooks will automatically run and check your code. + +7. **Push and create a Pull Request** + + ```bash + git push origin feature/my-feature + ``` + + Then create a Pull Request on GitHub. + +## Code Style Guidelines + +This project uses: + +- **Ruff** for linting and formatting (replaces black, isort, flake8) +- **mypy** for static type checking +- **pytest** for testing + +### Code Standards + +- Follow PEP 8 style guidelines +- Use type hints for all functions and methods +- Write docstrings for all public APIs (Google style) +- Keep functions under 50 lines when possible +- Keep files under 1000 lines +- Maintain test coverage above 80% + +### Docstring Format + +Use Google-style docstrings: + +```python +def example_function(arg1: str, arg2: int) -> bool: + """Brief description of the function. + + Longer description if needed. + + Args: + arg1: Description of arg1 + arg2: Description of arg2 + + Returns: + Description of return value + + Raises: + ValueError: Description of when this is raised + """ + pass +``` + +## Testing Guidelines + +- Write tests for all new features +- Maintain or improve test coverage +- Use descriptive test names +- Organize tests using classes when appropriate +- Use pytest fixtures for common setup + +Example test structure: + +```python +class TestMyFeature: + """Tests for my feature.""" + + def test_basic_functionality(self) -> None: + """Test basic functionality.""" + assert my_function() == expected_result + + def test_edge_case(self) -> None: + """Test edge case.""" + assert my_function(edge_case_input) == expected_result +``` + +## Pull Request Process + +1. Ensure all tests pass locally +2. Update documentation if needed +3. Add a changelog fragment with `scriv create` (see step 5 in Development Workflow) +4. Ensure the PR description clearly describes the changes +5. Link any related issues in the PR description +6. Wait for CI checks to pass +7. Address any review feedback + +## Changelog Management + +This project uses [Scriv](https://scriv.readthedocs.io/) for changelog management, which works similarly to [Changesets](https://github.com/changesets/changesets) in JavaScript projects. + +### Creating a Fragment + +```bash +# Install scriv (included in dev dependencies) +pip install -e ".[dev]" + +# Create a new fragment +scriv create +``` + +### Fragment Categories + +Use these categories in your fragments: + +- **Added**: New features +- **Changed**: Changes to existing functionality +- **Deprecated**: Features that will be removed in future +- **Removed**: Features that were removed +- **Fixed**: Bug fixes +- **Security**: Security-related changes + +### During Release + +Fragments are automatically collected into CHANGELOG.md during the release process. The release workflow: + +1. Collects all fragments with `scriv collect` +2. Updates CHANGELOG.md with the new version entry +3. Removes processed fragment files +4. Bumps the version in pyproject.toml +5. Creates a git tag and GitHub release +6. Publishes to PyPI + +## Project Structure + +``` +. +├── .github/workflows/ # GitHub Actions CI/CD +├── changelog.d/ # Changelog fragments (like .changeset/) +│ ├── README.md # Fragment instructions +│ └── *.md # Individual changelog fragments +├── examples/ # Usage examples +├── scripts/ # Utility scripts +├── src/my_package/ # Source code +│ ├── __init__.py # Package entry point +│ └── py.typed # Type marker file +├── tests/ # Test files +├── .pre-commit-config.yaml # Pre-commit hooks +├── .ruff.toml # Ruff configuration +├── pyproject.toml # Project configuration +├── CHANGELOG.md # Project changelog +├── CONTRIBUTING.md # This file +└── README.md # Project README +``` + +## Release Process + +This project uses semantic versioning (MAJOR.MINOR.PATCH): + +- **MAJOR**: Breaking changes +- **MINOR**: New features (backward compatible) +- **PATCH**: Bug fixes (backward compatible) + +Releases are managed through GitHub releases and PyPI publishing is handled via GitHub Actions. + +## Getting Help + +- Open an issue for bugs or feature requests +- Use discussions for questions and general help +- Check existing issues and PRs before creating new ones + +## Code of Conduct + +- Be respectful and inclusive +- Provide constructive feedback +- Focus on what is best for the community +- Show empathy towards other community members + +Thank you for contributing! diff --git a/README.md b/README.md index c217e10..646875d 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,314 @@ # python-ai-driven-development-pipeline-template -A template for AI driven development in Python + +A comprehensive template for AI-driven Python development with full CI/CD pipeline support. + +[![CI/CD Pipeline](https://github.com/link-foundation/python-ai-driven-development-pipeline-template/workflows/CI/CD%20Pipeline/badge.svg)](https://github.com/link-foundation/python-ai-driven-development-pipeline-template/actions) +[![Python Version](https://img.shields.io/badge/python-3.9%2B-blue.svg)](https://www.python.org/downloads/) +[![License: Unlicense](https://img.shields.io/badge/license-Unlicense-blue.svg)](http://unlicense.org/) + +## Features + +- **Multi-version Python support**: Works with Python 3.9-3.13 +- **Comprehensive testing**: pytest with async support and coverage reporting +- **Code quality**: Ruff (linting + formatting) + mypy (type checking) +- **Pre-commit hooks**: Automated code quality checks before commits +- **CI/CD pipeline**: GitHub Actions CI/CD with Python 3.13 +- **Changelog management**: Scriv for conflict-free changelog (like Changesets in JS) +- **Release automation**: Automatic PyPI publishing and GitHub releases + +## Quick Start + +### Using This Template + +1. Click "Use this template" on GitHub to create a new repository +2. Clone your new repository +3. Update `pyproject.toml` with your package name and description +4. Rename `src/my_package` to your package name +5. Update imports in tests and examples +6. Install dependencies and start developing! + +### Development Setup + +```bash +# Clone the repository +git clone https://github.com/link-foundation/python-ai-driven-development-pipeline-template.git +cd python-ai-driven-development-pipeline-template + +# Create a virtual environment +python -m venv .venv +source .venv/bin/activate # On Windows: .venv\Scripts\activate + +# Install in editable mode with development dependencies +pip install -e ".[dev]" + +# Install pre-commit hooks +pip install pre-commit +pre-commit install +``` + +### Running Tests + +```bash +# Run all tests +pytest + +# Run with coverage +pytest --cov=src --cov-report=term --cov-report=html + +# Run specific test file +pytest tests/test_my_package.py + +# Run with verbose output +pytest -v +``` + +### Code Quality Checks + +```bash +# Lint code (check for issues) +ruff check . + +# Format code +ruff format . + +# Type check +mypy src/ + +# Check file size limits +python scripts/check_file_size.py + +# Run all checks +ruff check . && ruff format --check . && mypy src/ && python scripts/check_file_size.py +``` + +## Project Structure + +``` +. +├── .github/ +│ └── workflows/ +│ ├── ci.yml # CI/CD pipeline configuration +│ └── release.yml # Release automation (PyPI + GitHub) +├── changelog.d/ # Changelog fragments (like .changeset/) +│ ├── README.md # Fragment instructions +│ └── *.md # Individual changelog entries +├── examples/ +│ └── basic_usage.py # Usage examples +├── scripts/ +│ ├── check_file_size.py # File size validation script +│ ├── bump_version.py # Version bumping utility +│ ├── version_and_commit.py # CI/CD version management +│ ├── publish_to_pypi.py # PyPI publishing script +│ └── create_github_release.py # GitHub release creation +├── src/ +│ └── my_package/ +│ ├── __init__.py # Package entry point +│ └── py.typed # Type marker file +├── tests/ +│ ├── __init__.py +│ └── test_my_package.py # Test suite +├── .gitignore # Git ignore patterns +├── .pre-commit-config.yaml # Pre-commit hooks configuration +├── .ruff.toml # Ruff additional configuration +├── pyproject.toml # Project configuration and dependencies +├── CHANGELOG.md # Project changelog +├── CONTRIBUTING.md # Contribution guidelines +├── LICENSE # Unlicense (public domain) +└── README.md # This file +``` + +## Design Choices + +### Package Management + +This template uses modern Python packaging standards: + +- **pyproject.toml**: Single source of truth for project configuration +- **hatchling**: Modern build backend (PEP 517) +- **src layout**: Prevents accidental imports from source directory +- **py.typed**: Marks package as type-hinted for mypy + +### Code Quality Tools + +- **Ruff**: Ultra-fast Python linter and formatter (replaces flake8, black, isort) + - Configured for strict code quality standards + - Integrates with pre-commit hooks + - Consistent formatting across the project + +- **mypy**: Static type checker + - Strict mode enabled for maximum type safety + - Ensures code correctness before runtime + +- **pytest**: Modern testing framework + - Support for async tests via pytest-asyncio + - Coverage reporting via pytest-cov + - Organized test structure with classes + +### Pre-commit Hooks + +Automated checks run before each commit: + +1. Basic checks (trailing whitespace, file endings, etc.) +2. Ruff linting and formatting +3. mypy type checking + +This ensures code quality is maintained throughout development. + +### Changelog Management (Scriv) + +This template uses [Scriv](https://scriv.readthedocs.io/) for changelog management, which works similarly to [Changesets](https://github.com/changesets/changesets) in JavaScript projects: + +- **Fragment-based**: Each PR adds a changelog fragment to `changelog.d/` +- **Conflict-free**: Multiple PRs can add fragments without merge conflicts +- **Auto-collection**: Fragments are automatically merged during release +- **Category-based**: Supports Added, Changed, Deprecated, Removed, Fixed, Security + +```bash +# Create a changelog fragment (similar to `npx changeset`) +scriv create + +# View pending fragments +ls changelog.d/*.md +``` + +### CI/CD Pipeline + +The GitHub Actions workflow provides: + +1. **Linting**: Ruff linting, formatting, and mypy type checking +2. **Changelog check**: Warns if PRs are missing changelog fragments +3. **Testing**: Python 3.13 test suite +4. **Building**: Package building and validation +5. **Coverage**: Automatic upload to Codecov + +### Release Automation + +The release workflow (`release.yml`) provides: + +1. **Integrated CI checks**: Runs lint, test, and build before any release +2. **Auto-release on push**: Detects version changes and publishes automatically +3. **Manual release**: Trigger releases via workflow_dispatch +4. **Fragment collection**: Automatically collects changelog fragments +5. **PyPI publishing**: OIDC trusted publishing (no tokens needed) +6. **GitHub releases**: Automatic creation with CHANGELOG content + +**Important**: All releases require passing CI checks (lint + test + build). No release will ever happen without passing tests, ensuring code quality and stability. + +## Configuration + +### Updating Package Name + +After creating a repository from this template: + +1. Update `pyproject.toml`: + - Change `name` field + - Update `project.urls` + - Update `tool.hatch.build.targets.wheel.packages` + +2. Rename `src/my_package/` directory to your package name + +3. Update imports: + - `tests/test_my_package.py` + - `examples/basic_usage.py` + - `.ruff.toml` (known-first-party) + +### Ruff Configuration + +Customize Ruff in `pyproject.toml` under `[tool.ruff]`. Current configuration: + +- 88-character line length (Black-compatible) +- Comprehensive linting rules (E, W, F, I, N, UP, B, etc.) +- Strict equality enforcement +- Automatic import sorting + +### mypy Configuration + +Configured in `pyproject.toml` under `[tool.mypy]`: + +- Strict mode enabled +- No implicit optionals +- Warn on unused ignores +- Full type checking coverage + +### pytest Configuration + +Configured in `pyproject.toml` under `[tool.pytest.ini_options]`: + +- Test discovery in `tests/` directory +- Source path includes `src/` +- Strict marker enforcement +- Coverage configuration included + +## Scripts Reference + +| Script | Description | +| ------------------------------ | ---------------------------------------- | +| `pytest` | Run all tests | +| `pytest --cov=src` | Run tests with coverage | +| `ruff check .` | Lint code | +| `ruff format .` | Format code | +| `mypy src/` | Type check code | +| `python scripts/check_file_size.py` | Check file size limits | +| `pre-commit run --all-files` | Run all pre-commit hooks | +| `scriv create` | Create a changelog fragment | +| `scriv collect --version X.Y.Z`| Collect fragments into CHANGELOG.md | + +## Example Usage + +```python +from my_package import add, multiply, delay +import asyncio + +# Basic arithmetic +result = add(2, 3) # 5 +product = multiply(2, 3) # 6 + +# Async operations +async def main(): + await delay(1.0) # Wait for 1 second + +asyncio.run(main()) +``` + +See `examples/basic_usage.py` for more examples. + +## Contributing + +Contributions are welcome! Please see [CONTRIBUTING.md](CONTRIBUTING.md) for guidelines. + +### Development Workflow + +1. Fork the repository +2. Create a feature branch: `git checkout -b feature/my-feature` +3. Make your changes and add tests +4. Run quality checks: `ruff check . && ruff format . && mypy src/ && pytest` +5. Commit your changes (pre-commit hooks will run automatically) +6. Push and create a Pull Request + +## Testing + +This project maintains high test coverage and uses pytest for testing: + +- Unit tests for all functions +- Async test support +- Coverage reporting +- Cross-platform compatibility testing + +## License + +[Unlicense](LICENSE) - Public Domain + +This is free and unencumbered software released into the public domain. See [LICENSE](LICENSE) for details. + +## Acknowledgments + +Inspired by [js-ai-driven-development-pipeline-template](https://github.com/link-foundation/js-ai-driven-development-pipeline-template). + +## Resources + +- [Python Packaging Guide](https://packaging.python.org/) +- [pytest Documentation](https://docs.pytest.org/) +- [Ruff Documentation](https://docs.astral.sh/ruff/) +- [mypy Documentation](https://mypy.readthedocs.io/) +- [Pre-commit Documentation](https://pre-commit.com/) +- [Scriv Documentation](https://scriv.readthedocs.io/) \ No newline at end of file diff --git a/changelog.d/20251218_133759_drakonard_issue_1_3b50e2f12be6.md b/changelog.d/20251218_133759_drakonard_issue_1_3b50e2f12be6.md new file mode 100644 index 0000000..2f15a70 --- /dev/null +++ b/changelog.d/20251218_133759_drakonard_issue_1_3b50e2f12be6.md @@ -0,0 +1,7 @@ +### Added + +- Scriv for changelog management (Python equivalent of Changesets) +- Fragment-based changelog workflow to prevent merge conflicts +- CI check for changelog fragments in pull requests +- Automated fragment collection in release workflow +- Documentation for changelog workflow in CONTRIBUTING.md and README.md diff --git a/changelog.d/README.md b/changelog.d/README.md new file mode 100644 index 0000000..fc68734 --- /dev/null +++ b/changelog.d/README.md @@ -0,0 +1,57 @@ +# Changelog Fragments + +This directory contains changelog fragments that will be collected into `CHANGELOG.md` during releases. + +## How to Add a Changelog Fragment + +When making changes that should be documented in the changelog, create a fragment file: + +```bash +# Create a new fragment (recommended - auto-generates filename with branch/timestamp) +scriv create + +# Or manually create a file matching the pattern: YYYYMMDD_HHMMSS_username.md +``` + +## Fragment Format + +Each fragment should contain relevant sections. Uncomment and fill in the appropriate sections: + +```markdown +### Added +- Description of new feature + +### Changed +- Description of change to existing functionality + +### Fixed +- Description of bug fix + +### Removed +- Description of removed feature + +### Deprecated +- Description of deprecated feature + +### Security +- Description of security fix +``` + +## Why Fragments? + +Using changelog fragments (similar to [Changesets](https://github.com/changesets/changesets) in JavaScript): + +1. **No merge conflicts**: Multiple PRs can add fragments without conflicts +2. **Per-PR documentation**: Each PR documents its own changes +3. **Automated collection**: Fragments are automatically collected during release +4. **Consistent format**: Template ensures consistent changelog entries + +## During Release + +Fragments are automatically collected into `CHANGELOG.md` by running: + +```bash +scriv collect --version X.Y.Z +``` + +This is handled automatically by the release workflow. diff --git a/changelog.d/fragment_template.md.j2 b/changelog.d/fragment_template.md.j2 new file mode 100644 index 0000000..62d6e5b --- /dev/null +++ b/changelog.d/fragment_template.md.j2 @@ -0,0 +1,31 @@ + diff --git a/examples/basic_usage.py b/examples/basic_usage.py new file mode 100644 index 0000000..68f7122 --- /dev/null +++ b/examples/basic_usage.py @@ -0,0 +1,39 @@ +"""Basic usage example for my-package. + +This example demonstrates the basic functionality of the package. +""" + +from __future__ import annotations + +import asyncio + +from my_package import add, delay, multiply + + +def main() -> None: + """Run basic examples.""" + # Example 1: Basic arithmetic + print("Example 1: Basic arithmetic") + print(f"2 + 3 = {add(2, 3)}") + print(f"2 * 3 = {multiply(2, 3)}") + print() + + # Example 2: Working with floats + print("Example 2: Working with floats") + print(f"2.5 + 3.5 = {add(2.5, 3.5)}") + print(f"2.5 * 2 = {multiply(2.5, 2)}") + print() + + +async def async_example() -> None: + """Run async examples.""" + print("Example 3: Async delay") + print("Waiting for 1 second...") + await delay(1.0) + print("Done!") + print() + + +if __name__ == "__main__": + main() + asyncio.run(async_example()) diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..ae38cf0 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,159 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "my-package" +version = "0.1.0" +description = "A Python package template for AI-driven development" +readme = "README.md" +license = {text = "Unlicense"} +requires-python = ">=3.9" +authors = [ + {name = "Your Name", email = "your.email@example.com"}, +] +keywords = ["template", "python", "ai-driven"] +classifiers = [ + "Development Status :: 3 - Alpha", + "Intended Audience :: Developers", + "License :: OSI Approved :: The Unlicense (Unlicense)", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", +] + +[project.optional-dependencies] +dev = [ + "ruff>=0.8.0", + "mypy>=1.13.0", + "pytest>=8.3.0", + "pytest-asyncio>=0.24.0", + "pytest-cov>=6.0.0", + "pre-commit>=4.0.0", + "scriv[toml]>=1.7.0", +] + +[project.urls] +Homepage = "https://github.com/link-foundation/python-ai-driven-development-pipeline-template" +Repository = "https://github.com/link-foundation/python-ai-driven-development-pipeline-template" +Issues = "https://github.com/link-foundation/python-ai-driven-development-pipeline-template/issues" + +[tool.hatch.build.targets.wheel] +packages = ["src/my_package"] + +[tool.ruff] +line-length = 88 +target-version = "py39" + +[tool.ruff.lint] +select = [ + "E", # pycodestyle errors + "W", # pycodestyle warnings + "F", # pyflakes + "I", # isort + "N", # pep8-naming + "UP", # pyupgrade + "B", # flake8-bugbear + "C4", # flake8-comprehensions + "DTZ", # flake8-datetimez + "T10", # flake8-debugger + "EM", # flake8-errmsg + "ISC", # flake8-implicit-str-concat + "ICN", # flake8-import-conventions + "PIE", # flake8-pie + "PT", # flake8-pytest-style + "Q", # flake8-quotes + "RSE", # flake8-raise + "RET", # flake8-return + "SIM", # flake8-simplify + "TID", # flake8-tidy-imports + "ARG", # flake8-unused-arguments + "PTH", # flake8-use-pathlib + "ERA", # eradicate + "PL", # pylint + "PERF", # perflint + "RUF", # ruff-specific rules +] +ignore = [ + "E501", # line too long (handled by formatter) + "PLR0913", # too many arguments + "PLR2004", # magic value comparison +] + +[tool.ruff.lint.per-file-ignores] +"tests/**/*.py" = [ + "S101", # allow assert in tests + "ARG", # allow unused arguments in tests + "PLR2004", # allow magic values in tests +] + +[tool.ruff.format] +quote-style = "double" +indent-style = "space" +line-ending = "lf" +skip-magic-trailing-comma = false + +[tool.mypy] +python_version = "3.9" +warn_return_any = true +warn_unused_configs = true +disallow_untyped_defs = true +disallow_incomplete_defs = true +check_untyped_defs = true +no_implicit_optional = true +warn_redundant_casts = true +warn_unused_ignores = true +warn_no_return = true +strict_equality = true +strict_concatenate = true + +[tool.pytest.ini_options] +minversion = "7.0" +addopts = "-ra -q --strict-markers" +testpaths = ["tests"] +pythonpath = ["src"] + +[tool.coverage.run] +source = ["src"] +branch = true + +[tool.coverage.report] +exclude_lines = [ + "pragma: no cover", + "def __repr__", + "if __name__ == .__main__.:", + "raise AssertionError", + "raise NotImplementedError", + "if TYPE_CHECKING:", +] + +# Scriv configuration for changelog management +# Similar to @changesets/cli but for Python projects +[tool.scriv] +# Fragment format (markdown for better GitHub compatibility) +format = "md" +# Fragment directory +fragment_directory = "changelog.d" +# Changelog file +output_file = "CHANGELOG.md" +# Categories for changelog entries +categories = [ + "Removed", + "Added", + "Changed", + "Deprecated", + "Fixed", + "Security", +] +# Version header format +entry_title_template = "## [{{ version }}] - {{ date.strftime('%Y-%m-%d') }}" +# Insert marker (where new entries go) +insert_marker = "" +# Main branch name +main_branches = ["main"] +# New fragment template +new_fragment_template = """file:changelog.d/fragment_template.md.j2 +""" diff --git a/scripts/bump_version.py b/scripts/bump_version.py new file mode 100755 index 0000000..6850af3 --- /dev/null +++ b/scripts/bump_version.py @@ -0,0 +1,171 @@ +#!/usr/bin/env python3 +""" +Bump version in pyproject.toml and update CHANGELOG.md + +Usage: + python scripts/bump_version.py [--description "..."] + +Examples: + python scripts/bump_version.py patch + python scripts/bump_version.py minor --description "Add new feature" + python scripts/bump_version.py major --description "Breaking changes" +""" + +import argparse +import re +import sys +from datetime import datetime +from pathlib import Path + + +def get_current_version(pyproject_path: Path) -> str: + """Extract current version from pyproject.toml.""" + content = pyproject_path.read_text() + match = re.search(r'^version\s*=\s*["\']([^"\']+)["\']', content, re.MULTILINE) + if not match: + raise ValueError("Could not find version in pyproject.toml") + return match.group(1) + + +def bump_version(current: str, bump_type: str) -> str: + """Bump version according to semantic versioning.""" + parts = current.split(".") + if len(parts) != 3: + raise ValueError(f"Invalid version format: {current}") + + major, minor, patch = map(int, parts) + + if bump_type == "major": + return f"{major + 1}.0.0" + elif bump_type == "minor": + return f"{major}.{minor + 1}.0" + elif bump_type == "patch": + return f"{major}.{minor}.{patch + 1}" + else: + raise ValueError(f"Invalid bump type: {bump_type}") + + +def update_pyproject(pyproject_path: Path, old_version: str, new_version: str) -> None: + """Update version in pyproject.toml.""" + content = pyproject_path.read_text() + pattern = rf'^(version\s*=\s*["\']){re.escape(old_version)}(["\'])' + new_content = re.sub( + pattern, rf"\g<1>{new_version}\g<2>", content, flags=re.MULTILINE + ) + + if content == new_content: + raise ValueError( + f"Failed to update version from {old_version} to {new_version}" + ) + + pyproject_path.write_text(new_content) + print(f"✓ Updated pyproject.toml: {old_version} → {new_version}") + + +def update_changelog( + changelog_path: Path, version: str, bump_type: str, description: str +) -> None: + """Update CHANGELOG.md with new version entry.""" + if not changelog_path.exists(): + print(f"Warning: {changelog_path} not found, skipping changelog update") + return + + content = changelog_path.read_text() + today = datetime.now().strftime("%Y-%m-%d") + + # Create new entry + bump_type_title = bump_type.capitalize() + new_entry = f"""## {version} - {today} + +### {bump_type_title} Changes + +- {description} + +""" + + # Find insertion point (after first heading, before first version section) + match = re.search(r"^## ", content, re.MULTILINE) + + if match: + # Insert before first version section + insert_pos = match.start() + new_content = content[:insert_pos] + new_entry + content[insert_pos:] + else: + # If no version sections, insert after main heading + main_heading_match = re.search(r"^# .+$", content, re.MULTILINE) + if main_heading_match: + insert_pos = main_heading_match.end() + new_content = ( + content[:insert_pos] + "\n\n" + new_entry + content[insert_pos:] + ) + else: + # Prepend if no headings at all + new_content = new_entry + "\n" + content + + changelog_path.write_text(new_content) + print(f"✓ Updated {changelog_path.name}") + + +def main() -> int: + """Main entry point.""" + parser = argparse.ArgumentParser( + description="Bump version and update changelog", + formatter_class=argparse.RawDescriptionHelpFormatter, + ) + parser.add_argument( + "bump_type", + choices=["major", "minor", "patch"], + help="Type of version bump", + ) + parser.add_argument( + "--description", + "-d", + default="", + help="Description of changes for changelog", + ) + + args = parser.parse_args() + + # Determine project root and files + script_dir = Path(__file__).parent + project_root = script_dir.parent + pyproject_path = project_root / "pyproject.toml" + changelog_path = project_root / "CHANGELOG.md" + + if not pyproject_path.exists(): + print(f"Error: {pyproject_path} not found", file=sys.stderr) + return 1 + + try: + # Get current version + old_version = get_current_version(pyproject_path) + print(f"Current version: {old_version}") + + # Calculate new version + new_version = bump_version(old_version, args.bump_type) + print(f"New version: {new_version}") + + # Update files + update_pyproject(pyproject_path, old_version, new_version) + + description = args.description or f"Manual {args.bump_type} release" + update_changelog(changelog_path, new_version, args.bump_type, description) + + print(f"\n✅ Version bump complete: {old_version} → {new_version}") + print("\nNext steps:") + print(" 1. Review changes: git diff") + print( + " 2. Commit: git add . && git commit -m 'chore: bump version to {new_version}'" + ) + print(" 3. Tag: git tag v{new_version}") + print(" 4. Push: git push && git push --tags") + + return 0 + + except Exception as e: + print(f"Error: {e}", file=sys.stderr) + return 1 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/scripts/check_file_size.py b/scripts/check_file_size.py new file mode 100755 index 0000000..e90ac79 --- /dev/null +++ b/scripts/check_file_size.py @@ -0,0 +1,109 @@ +#!/usr/bin/env python3 +"""Check for files exceeding the maximum allowed line count. + +Exits with error code 1 if any files exceed the limit. +""" + +from __future__ import annotations + +import sys +from pathlib import Path + +MAX_LINES = 1000 +FILE_EXTENSIONS = [".py"] +EXCLUDE_PATTERNS = [ + "node_modules", + ".venv", + "venv", + "env", + "__pycache__", + ".git", + "build", + "dist", + ".eggs", + "*.egg-info", +] + + +def should_exclude(path: Path, exclude_patterns: list[str]) -> bool: + """Check if a path should be excluded. + + Args: + path: Path to check + exclude_patterns: List of patterns to exclude + + Returns: + True if path should be excluded + """ + path_str = str(path) + return any(pattern in path_str for pattern in exclude_patterns) + + +def find_python_files(directory: Path, exclude_patterns: list[str]) -> list[Path]: + """Recursively find all Python files in a directory. + + Args: + directory: Directory to search + exclude_patterns: Patterns to exclude + + Returns: + List of file paths + """ + files = [] + for path in directory.rglob("*"): + if should_exclude(path, exclude_patterns): + continue + if path.is_file() and path.suffix in FILE_EXTENSIONS: + files.append(path) + return files + + +def count_lines(file_path: Path) -> int: + """Count lines in a file. + + Args: + file_path: Path to the file + + Returns: + Number of lines + """ + return len(file_path.read_text(encoding="utf-8").split("\n")) + + +def main() -> None: + """Main function.""" + cwd = Path.cwd() + print(f"\nChecking Python files for maximum {MAX_LINES} lines...\n") + + files = find_python_files(cwd, EXCLUDE_PATTERNS) + violations = [] + + for file in files: + line_count = count_lines(file) + if line_count > MAX_LINES: + violations.append({"file": file.relative_to(cwd), "lines": line_count}) + + if not violations: + print("✓ All files are within the line limit\n") + sys.exit(0) + else: + print("✗ Found files exceeding the line limit:\n") + for violation in violations: + print( + f" {violation['file']}: {violation['lines']} lines " + f"(exceeds {MAX_LINES})" + ) + print(f"\nPlease refactor these files to be under {MAX_LINES} lines\n") + sys.exit(1) + + +if __name__ == "__main__": + try: + main() + except Exception as e: + print(f"Error: {e}", file=sys.stderr) + if "DEBUG" in sys.modules: + import traceback + + traceback.print_exc() + sys.exit(1) diff --git a/scripts/create_github_release.py b/scripts/create_github_release.py new file mode 100755 index 0000000..6d62d3a --- /dev/null +++ b/scripts/create_github_release.py @@ -0,0 +1,167 @@ +#!/usr/bin/env python3 +""" +Create a GitHub release from CHANGELOG.md content. + +Usage: + python scripts/create_github_release.py --version VERSION --repository REPO + +Example: + python scripts/create_github_release.py --version 1.2.3 --repository owner/repo + +Environment variables: + GH_TOKEN or GITHUB_TOKEN: GitHub token for authentication +""" + +import argparse +import os +import re +import subprocess +import sys +from pathlib import Path + + +def run_command(cmd: list[str], check: bool = True) -> subprocess.CompletedProcess: + """Run a command and handle errors.""" + print(f"Running: {' '.join(cmd)}") + result = subprocess.run(cmd, capture_output=True, text=True, check=False) + + if result.stdout: + print(result.stdout) + if result.stderr and result.returncode != 0: + print(result.stderr, file=sys.stderr) + + if check and result.returncode != 0: + print( + f"Error: Command failed with exit code {result.returncode}", + file=sys.stderr, + ) + sys.exit(result.returncode) + + return result + + +def extract_changelog_entry(changelog_path: Path, version: str) -> str: + """Extract the changelog entry for a specific version.""" + if not changelog_path.exists(): + print(f"Warning: {changelog_path} not found", file=sys.stderr) + return f"Release {version}" + + content = changelog_path.read_text() + + # Look for version section (e.g., "## 1.2.3" or "## 1.2.3 - 2024-01-15") + version_pattern = rf"^## {re.escape(version)}(\s|$)" + match = re.search(version_pattern, content, re.MULTILINE) + + if not match: + print( + f"Warning: Version {version} not found in {changelog_path}", + file=sys.stderr, + ) + return f"Release {version}" + + # Extract content until next version section or end of file + start = match.end() + next_version = re.search(r"^## \d+\.\d+\.\d+", content[start:], re.MULTILINE) + + if next_version: + entry = content[start : start + next_version.start()].strip() + else: + entry = content[start:].strip() + + return entry if entry else f"Release {version}" + + +def create_release( + version: str, repository: str, release_notes: str, prerelease: bool = False +) -> None: + """Create a GitHub release using gh CLI.""" + tag = f"v{version}" + + print(f"\nCreating GitHub release for {tag}...") + print(f"Repository: {repository}") + print(f"Prerelease: {prerelease}") + print(f"\nRelease notes:\n{release_notes}\n") + + cmd = [ + "gh", + "release", + "create", + tag, + "--repo", + repository, + "--title", + tag, + "--notes", + release_notes, + ] + + if prerelease: + cmd.append("--prerelease") + + run_command(cmd) + print(f"\n✅ GitHub release {tag} created successfully!") + + +def main() -> int: + """Main entry point.""" + parser = argparse.ArgumentParser( + description="Create GitHub release from CHANGELOG.md", + ) + parser.add_argument( + "--version", + "-v", + required=True, + help="Version to release (e.g., 1.2.3)", + ) + parser.add_argument( + "--repository", + "-r", + required=True, + help="GitHub repository (owner/repo)", + ) + parser.add_argument( + "--prerelease", + action="store_true", + help="Mark as prerelease", + ) + + args = parser.parse_args() + + # Check for GitHub token + if not os.environ.get("GH_TOKEN") and not os.environ.get("GITHUB_TOKEN"): + print( + "Error: GH_TOKEN or GITHUB_TOKEN environment variable required", + file=sys.stderr, + ) + return 1 + + # Check if gh CLI is available + result = run_command(["gh", "--version"], check=False) + if result.returncode != 0: + print( + "Error: gh CLI not found. Install from https://cli.github.com/", + file=sys.stderr, + ) + return 1 + + # Determine project root + script_dir = Path(__file__).parent + project_root = script_dir.parent + changelog_path = project_root / "CHANGELOG.md" + + try: + # Extract changelog entry + release_notes = extract_changelog_entry(changelog_path, args.version) + + # Create release + create_release(args.version, args.repository, release_notes, args.prerelease) + + return 0 + + except Exception as e: + print(f"Error: {e}", file=sys.stderr) + return 1 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/scripts/create_manual_changeset.py b/scripts/create_manual_changeset.py new file mode 100644 index 0000000..083a88d --- /dev/null +++ b/scripts/create_manual_changeset.py @@ -0,0 +1,249 @@ +#!/usr/bin/env python3 +""" +Create a manual changelog fragment for releases. + +This script is the Python equivalent of create-manual-changeset.mjs from the JS template. +It creates a changelog fragment in the changelog.d/ directory for documenting changes. + +Usage: + python scripts/create_manual_changeset.py [--description "..."] + +Examples: + python scripts/create_manual_changeset.py patch + python scripts/create_manual_changeset.py minor --description "Add new feature" + python scripts/create_manual_changeset.py major --description "Breaking changes" + +Note: This wraps 'scriv create' but can also create fragments manually if scriv +is not installed. +""" + +import argparse +import os +import re +import shutil +import subprocess +import sys +from datetime import datetime +from pathlib import Path + + +def get_branch_name() -> str: + """Get current git branch name.""" + try: + result = subprocess.run( + ["git", "branch", "--show-current"], + capture_output=True, + text=True, + check=True, + ) + return result.stdout.strip() + except (subprocess.CalledProcessError, FileNotFoundError): + return "manual" + + +def get_username() -> str: + """Get current user name for fragment filename.""" + # Try git user.name first + try: + result = subprocess.run( + ["git", "config", "user.name"], + capture_output=True, + text=True, + check=True, + ) + username = result.stdout.strip() + if username: + # Sanitize username for filename + return re.sub(r"[^a-zA-Z0-9_-]", "_", username).lower() + except (subprocess.CalledProcessError, FileNotFoundError): + pass + + # Fall back to environment variable or default + return os.environ.get("USER", os.environ.get("USERNAME", "user")).lower() + + +def has_scriv() -> bool: + """Check if scriv is installed.""" + return shutil.which("scriv") is not None + + +def create_with_scriv(bump_type: str, description: str) -> int: + """Create fragment using scriv create command.""" + print("Using scriv to create changelog fragment...") + + try: + result = subprocess.run( + ["scriv", "create"], + capture_output=True, + text=True, + check=False, + ) + + if result.returncode != 0: + print(f"Warning: scriv create returned non-zero: {result.stderr}") + return result.returncode + + print(result.stdout) + + # Find the created fragment + changelog_dir = Path("changelog.d") + if changelog_dir.exists(): + fragments = sorted( + [ + f + for f in changelog_dir.glob("*.md") + if f.name != "README.md" and not f.name.endswith(".j2") + ], + key=lambda x: x.stat().st_mtime, + reverse=True, + ) + + if fragments: + fragment_path = fragments[0] + print(f"\nCreated fragment: {fragment_path}") + print("\nPlease edit the fragment file to document your changes.") + + if description: + # Update fragment with provided description + bump_category = { + "major": "Changed", # Major = breaking changes + "minor": "Added", # Minor = new features + "patch": "Fixed", # Patch = bug fixes + }.get(bump_type, "Changed") + + # Add the description under the appropriate category + new_content = f"### {bump_category}\n\n- {description}\n" + fragment_path.write_text(new_content) + print(f"Updated fragment with {bump_type} change: {description}") + + return 0 + + except FileNotFoundError: + print("Error: scriv command not found") + return 1 + + +def create_manual_fragment( + changelog_dir: Path, bump_type: str, description: str +) -> int: + """Create a changelog fragment manually without scriv.""" + # Generate filename + timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") + username = get_username() + branch = get_branch_name() + + # Sanitize branch name + safe_branch = re.sub(r"[^a-zA-Z0-9_-]", "_", branch) + + filename = f"{timestamp}_{username}_{safe_branch}.md" + fragment_path = changelog_dir / filename + + # Determine category based on bump type + bump_category = { + "major": "Changed", # Major = breaking changes + "minor": "Added", # Minor = new features + "patch": "Fixed", # Patch = bug fixes + }.get(bump_type, "Changed") + + # Create fragment content + if description: + content = f"### {bump_category}\n\n- {description}\n" + else: + content = """ + +### Added + +- New feature description + +### Changed + +- Change to existing functionality + +### Fixed + +- Bug fix description + + +""" + + fragment_path.write_text(content) + print(f"Created changelog fragment: {fragment_path}") + + if not description: + print("\nPlease edit the fragment file to document your changes.") + print(f" File: {fragment_path}") + + return 0 + + +def main() -> int: + """Main entry point.""" + parser = argparse.ArgumentParser( + description="Create a changelog fragment for release documentation", + formatter_class=argparse.RawDescriptionHelpFormatter, + epilog=""" +This script creates a changelog fragment in changelog.d/ to document changes. +It's the Python equivalent of 'npx changeset' in JavaScript projects. + +The fragment will be collected into CHANGELOG.md during release. + """, + ) + parser.add_argument( + "bump_type", + choices=["major", "minor", "patch"], + help="Type of version bump (determines default category)", + ) + parser.add_argument( + "--description", + "-d", + default="", + help="Description of changes (optional, can edit file later)", + ) + parser.add_argument( + "--no-scriv", + action="store_true", + help="Create fragment manually without using scriv", + ) + + args = parser.parse_args() + + # Determine project root and changelog directory + script_dir = Path(__file__).parent + project_root = script_dir.parent + changelog_dir = project_root / "changelog.d" + + # Ensure changelog directory exists + if not changelog_dir.exists(): + changelog_dir.mkdir(parents=True) + print(f"Created directory: {changelog_dir}") + + # Use scriv if available, unless --no-scriv is specified + if has_scriv() and not args.no_scriv: + return create_with_scriv(args.bump_type, args.description) + else: + if not args.no_scriv: + print("Note: scriv not found, creating fragment manually") + print( + "Install scriv for better fragment management: pip install scriv[toml]" + ) + print() + return create_manual_fragment(changelog_dir, args.bump_type, args.description) + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/scripts/format_release_notes.py b/scripts/format_release_notes.py new file mode 100644 index 0000000..e965087 --- /dev/null +++ b/scripts/format_release_notes.py @@ -0,0 +1,254 @@ +#!/usr/bin/env python3 +""" +Format GitHub release notes with enhanced information. + +This script is the Python equivalent of format-release-notes.mjs from the JS template. +It enhances GitHub release notes with: +- PyPI version badge +- Link to associated pull request +- Clean formatting + +Usage: + python scripts/format_release_notes.py --release-id --version \\ + --repository [--commit-sha ] + +Example: + python scripts/format_release_notes.py --release-id 12345 --version 1.0.0 \\ + --repository link-foundation/my-package --commit-sha abc123 +""" + +from __future__ import annotations + +import argparse +import re +import subprocess +import sys +from typing import Optional + + +def run_gh_command(args: list[str]) -> tuple[bool, str]: + """Run a gh CLI command and return (success, output).""" + try: + result = subprocess.run( + ["gh"] + args, + capture_output=True, + text=True, + check=False, + ) + if result.returncode != 0: + return False, result.stderr + return True, result.stdout + except FileNotFoundError: + return False, "gh CLI not found. Install from https://cli.github.com/" + + +def get_release_body(repository: str, release_id: str) -> tuple[bool, str]: + """Get the body of a GitHub release.""" + success, output = run_gh_command( + ["api", f"repos/{repository}/releases/{release_id}", "--jq", ".body"] + ) + return success, output.strip() if success else output + + +def find_pr_for_commit(repository: str, commit_sha: str) -> Optional[str]: + """Find the pull request that contains a specific commit.""" + if not commit_sha: + return None + + success, output = run_gh_command( + [ + "api", + f"repos/{repository}/commits/{commit_sha}/pulls", + "--jq", + ".[0].number", + ] + ) + + if success and output.strip(): + try: + pr_number = int(output.strip()) + return str(pr_number) + except ValueError: + pass + + return None + + +def format_release_body( + body: str, + version: str, + repository: str, + pr_number: Optional[str], + package_name: str, +) -> str: + """Format the release body with enhanced information.""" + # Check if already formatted (has PyPI badge) + if "pypi.org/project" in body.lower() or "img.shields.io" in body.lower(): + print("Release notes already formatted, skipping") + return body + + formatted_parts = [] + + # Add PyPI badge + pypi_badge = ( + f"[![PyPI version](https://img.shields.io/pypi/v/{package_name}.svg)]" + f"(https://pypi.org/project/{package_name}/)" + ) + formatted_parts.append(pypi_badge) + formatted_parts.append("") + + # Add PR link if available + if pr_number: + pr_link = f"**Pull Request:** [#{pr_number}](https://github.com/{repository}/pull/{pr_number})" + formatted_parts.append(pr_link) + formatted_parts.append("") + + # Clean up the existing body + cleaned_body = body.strip() + + # Fix escaped newlines and special characters + cleaned_body = cleaned_body.replace("\\n", "\n") + cleaned_body = cleaned_body.replace("\\r", "") + cleaned_body = cleaned_body.replace('\\"', '"') + + # Remove duplicate version headers if present + version_pattern = rf"^#+\s*v?{re.escape(version)}\s*$" + cleaned_body = re.sub(version_pattern, "", cleaned_body, flags=re.MULTILINE) + + # Clean up excessive whitespace + cleaned_body = re.sub(r"\n{3,}", "\n\n", cleaned_body) + cleaned_body = cleaned_body.strip() + + if cleaned_body: + formatted_parts.append(cleaned_body) + + return "\n".join(formatted_parts) + + +def update_release(repository: str, release_id: str, new_body: str) -> bool: + """Update the release body on GitHub.""" + # Use gh api to update the release + success, output = run_gh_command( + [ + "api", + "-X", + "PATCH", + f"repos/{repository}/releases/{release_id}", + "-f", + f"body={new_body}", + ] + ) + + if not success: + print(f"Error updating release: {output}", file=sys.stderr) + return False + + return True + + +def get_package_name() -> str: + """Get the package name from pyproject.toml.""" + try: + with open("pyproject.toml") as f: + content = f.read() + match = re.search(r'^name\s*=\s*["\']([^"\']+)["\']', content, re.MULTILINE) + if match: + return match.group(1) + except FileNotFoundError: + pass + + return "my-package" + + +def main() -> int: + """Main entry point.""" + parser = argparse.ArgumentParser( + description="Format GitHub release notes with enhanced information", + formatter_class=argparse.RawDescriptionHelpFormatter, + ) + parser.add_argument( + "--release-id", + required=True, + help="GitHub release ID", + ) + parser.add_argument( + "--version", + required=True, + help="Release version (e.g., 1.0.0)", + ) + parser.add_argument( + "--repository", + required=True, + help="Repository in owner/repo format", + ) + parser.add_argument( + "--commit-sha", + default="", + help="Commit SHA to find associated PR", + ) + parser.add_argument( + "--package-name", + default="", + help="Package name for PyPI badge (auto-detected from pyproject.toml if not provided)", + ) + parser.add_argument( + "--dry-run", + action="store_true", + help="Print formatted notes without updating release", + ) + + args = parser.parse_args() + + # Get package name + package_name = args.package_name or get_package_name() + print(f"Package name: {package_name}") + + # Get current release body + print(f"Fetching release {args.release_id}...") + success, body = get_release_body(args.repository, args.release_id) + if not success: + print(f"Error fetching release: {body}", file=sys.stderr) + return 1 + + print(f"Current body length: {len(body)} characters") + + # Find associated PR + pr_number = None + if args.commit_sha: + print(f"Looking for PR associated with commit {args.commit_sha}...") + pr_number = find_pr_for_commit(args.repository, args.commit_sha) + if pr_number: + print(f"Found PR: #{pr_number}") + else: + print("No associated PR found") + + # Format the release body + formatted_body = format_release_body( + body, + args.version, + args.repository, + pr_number, + package_name, + ) + + if args.dry_run: + print("\n--- Formatted Release Notes ---") + print(formatted_body) + print("--- End ---\n") + return 0 + + # Update release + if formatted_body != body: + print("Updating release notes...") + if update_release(args.repository, args.release_id, formatted_body): + print("Release notes updated successfully!") + return 0 + else: + return 1 + else: + print("No changes needed") + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/scripts/publish_to_pypi.py b/scripts/publish_to_pypi.py new file mode 100755 index 0000000..641bede --- /dev/null +++ b/scripts/publish_to_pypi.py @@ -0,0 +1,157 @@ +#!/usr/bin/env python3 +""" +Build and publish package to PyPI using trusted publishing (OIDC). + +This script: +1. Cleans previous build artifacts +2. Builds the package using hatchling +3. Validates the built distribution +4. Publishes to PyPI using OIDC (no token needed in CI) + +Usage: + python scripts/publish_to_pypi.py [--dry-run] + +Note: In GitHub Actions, this uses OIDC trusted publishing. + For local testing, use --dry-run or set TWINE_USERNAME/TWINE_PASSWORD. +""" + +import argparse +import shutil +import subprocess +import sys +from pathlib import Path + + +def run_command(cmd: list[str], check: bool = True) -> subprocess.CompletedProcess: + """Run a command and handle errors.""" + print(f"Running: {' '.join(cmd)}") + result = subprocess.run(cmd, capture_output=True, text=True, check=False) + + if result.stdout: + print(result.stdout) + if result.stderr: + print(result.stderr, file=sys.stderr) + + if check and result.returncode != 0: + print( + f"Error: Command failed with exit code {result.returncode}", file=sys.stderr + ) + sys.exit(result.returncode) + + return result + + +def clean_build_artifacts(project_root: Path) -> None: + """Remove previous build artifacts.""" + print("Cleaning build artifacts...") + dirs_to_remove = ["dist", "build", "*.egg-info"] + + for pattern in dirs_to_remove: + if "*" in pattern: + for path in project_root.glob(pattern): + if path.is_dir(): + shutil.rmtree(path) + print(f" Removed: {path}") + else: + path = project_root / pattern + if path.exists(): + shutil.rmtree(path) + print(f" Removed: {path}") + + +def build_package(project_root: Path) -> None: + """Build the package using python -m build.""" + print("\nBuilding package...") + run_command([sys.executable, "-m", "build", str(project_root)]) + + +def check_package(dist_dir: Path) -> None: + """Validate the built package using twine.""" + print("\nValidating package...") + dist_files = list(dist_dir.glob("*")) + + if not dist_files: + print("Error: No distribution files found in dist/", file=sys.stderr) + sys.exit(1) + + print(f"Found {len(dist_files)} distribution file(s):") + for file in dist_files: + print(f" - {file.name}") + + run_command([sys.executable, "-m", "twine", "check"] + [str(f) for f in dist_files]) + + +def publish_package(dist_dir: Path, dry_run: bool = False) -> None: + """Publish package to PyPI.""" + dist_files = list(dist_dir.glob("*")) + + if not dist_files: + print("Error: No distribution files found in dist/", file=sys.stderr) + sys.exit(1) + + if dry_run: + print("\n[DRY RUN] Would publish the following files:") + for file in dist_files: + print(f" - {file.name}") + print("\nSkipping actual upload (dry run mode)") + return + + print("\nPublishing to PyPI...") + + # Use twine upload with OIDC if in CI, otherwise use credentials + cmd = [sys.executable, "-m", "twine", "upload"] + cmd.extend([str(f) for f in dist_files]) + + run_command(cmd) + print("\n✅ Package published successfully!") + + +def main() -> int: + """Main entry point.""" + parser = argparse.ArgumentParser( + description="Build and publish package to PyPI", + ) + parser.add_argument( + "--dry-run", + action="store_true", + help="Build and validate but don't publish", + ) + + args = parser.parse_args() + + # Determine project root + script_dir = Path(__file__).parent + project_root = script_dir.parent + dist_dir = project_root / "dist" + + try: + # Ensure required tools are available + for tool in ["build", "twine"]: + result = run_command( + [sys.executable, "-m", tool, "--version"], + check=False, + ) + if result.returncode != 0: + print( + f"Error: {tool} is not installed. Install with: pip install {tool}", + file=sys.stderr, + ) + return 1 + + # Clean, build, check + clean_build_artifacts(project_root) + build_package(project_root) + check_package(dist_dir) + + # Publish (unless dry run) + publish_package(dist_dir, dry_run=args.dry_run) + + return 0 + + except Exception as e: + print(f"Error: {e}", file=sys.stderr) + return 1 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/scripts/validate_changeset.py b/scripts/validate_changeset.py new file mode 100644 index 0000000..c968488 --- /dev/null +++ b/scripts/validate_changeset.py @@ -0,0 +1,161 @@ +#!/usr/bin/env python3 +""" +Validate that PRs contain proper changelog fragments. + +This script is the Python equivalent of validate-changeset.mjs from the JS template. +It ensures that pull requests include changelog documentation. + +Usage: + python scripts/validate_changeset.py + +Exit codes: + 0 - Validation passed (fragment found or no source changes) + 1 - Validation failed (source changes without fragment) + +Example CI usage: + - name: Validate changelog fragment + run: python scripts/validate_changeset.py +""" + +import re +import sys +from pathlib import Path + + +def get_fragment_files(changelog_dir: Path) -> list[Path]: + """Get list of changelog fragment files (excluding README and template).""" + if not changelog_dir.exists(): + return [] + + return [ + f + for f in changelog_dir.glob("*.md") + if f.name != "README.md" and not f.name.endswith(".j2") + ] + + +def validate_fragment_content(fragment_path: Path) -> tuple[bool, str]: + """ + Validate that a fragment has proper content. + + Returns (is_valid, error_message). + """ + content = fragment_path.read_text().strip() + + if not content: + return False, f"Fragment {fragment_path.name} is empty" + + # Check for at least one category heading + category_pattern = re.compile( + r"^###\s*(Added|Changed|Deprecated|Fixed|Removed|Security)", + re.MULTILINE | re.IGNORECASE, + ) + + if not category_pattern.search(content): + return False, ( + f"Fragment {fragment_path.name} missing category heading.\n" + "Expected one of: ### Added, ### Changed, ### Deprecated, " + "### Fixed, ### Removed, ### Security" + ) + + # Check for actual content (not just commented template) + # Remove HTML comments + content_without_comments = re.sub(r"", "", content, flags=re.DOTALL) + # Check if there's meaningful content after headings + lines = [ + line.strip() + for line in content_without_comments.split("\n") + if line.strip() and not line.strip().startswith("#") + ] + + if not lines: + return False, ( + f"Fragment {fragment_path.name} has no content.\n" + "Please add a description of your changes under the appropriate category." + ) + + return True, "" + + +def main() -> int: + """Main entry point.""" + # Determine project root and changelog directory + script_dir = Path(__file__).parent + project_root = script_dir.parent + changelog_dir = project_root / "changelog.d" + + print("Validating changelog fragments...") + print() + + # Get fragment files + fragments = get_fragment_files(changelog_dir) + fragment_count = len(fragments) + + print(f"Found {fragment_count} changelog fragment(s)") + + if fragment_count == 0: + print() + print("WARNING: No changelog fragment found!") + print() + print("To document your changes, create a changelog fragment:") + print() + print(" # Using scriv (recommended):") + print(" pip install 'scriv[toml]'") + print(" scriv create") + print() + print(" # Or using the helper script:") + print( + " python scripts/create_manual_changeset.py patch --description 'Your changes'" + ) + print() + print("See changelog.d/README.md for more information.") + print() + + # This is currently a warning, not a failure + # Change to "return 1" to make it required + return 0 + + if fragment_count > 1: + print() + print( + f"WARNING: Found {fragment_count} fragments. Usually PRs should have only one." + ) + print("Fragments found:") + for f in fragments: + print(f" - {f.name}") + print() + + # Validate each fragment + all_valid = True + for fragment in fragments: + is_valid, error = validate_fragment_content(fragment) + if is_valid: + print(f" [OK] {fragment.name}") + else: + print(f" [FAIL] {error}") + all_valid = False + + print() + + if all_valid: + print("Changelog validation passed!") + return 0 + else: + print("Changelog validation FAILED!") + print() + print("Expected fragment format:") + print() + print(" ### Added") + print(" - Description of new feature") + print() + print(" ### Changed") + print(" - Description of change") + print() + print(" ### Fixed") + print(" - Description of bug fix") + print() + return 1 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/scripts/version_and_commit.py b/scripts/version_and_commit.py new file mode 100755 index 0000000..5080e82 --- /dev/null +++ b/scripts/version_and_commit.py @@ -0,0 +1,237 @@ +#!/usr/bin/env python3 +""" +Version packages and commit to main branch. + +This script handles version bumping and committing for CI/CD workflows. +It supports idempotent re-runs and detects when work was already completed. + +Usage: + python scripts/version_and_commit.py --bump-type [--description "..."] + +Example: + python scripts/version_and_commit.py --bump-type patch + python scripts/version_and_commit.py --bump-type minor --description "New feature" + +Environment variables: + GITHUB_OUTPUT: Path to GitHub Actions output file +""" + +import argparse +import os +import re +import subprocess +import sys +from pathlib import Path + + +def run_command( + cmd: list[str], check: bool = True, capture: bool = False +) -> subprocess.CompletedProcess: + """Run a command and handle errors.""" + cmd_str = " ".join(cmd) + print(f"Running: {cmd_str}") + + result = subprocess.run( + cmd, + capture_output=capture, + text=True, + check=False, + ) + + if not capture: + if result.stdout: + print(result.stdout) + if result.stderr: + print(result.stderr, file=sys.stderr) + + if check and result.returncode != 0: + if capture: + print(result.stdout) + print(result.stderr, file=sys.stderr) + print( + f"Error: Command failed with exit code {result.returncode}", + file=sys.stderr, + ) + sys.exit(result.returncode) + + return result + + +def set_github_output(key: str, value: str) -> None: + """Set GitHub Actions output variable.""" + output_file = os.environ.get("GITHUB_OUTPUT") + if output_file: + with open(output_file, "a") as f: + f.write(f"{key}={value}\n") + print(f"Set output: {key}={value}") + + +def get_current_version(pyproject_path: Path) -> str: + """Get version from pyproject.toml.""" + content = pyproject_path.read_text() + match = re.search(r'^version\s*=\s*["\']([^"\']+)["\']', content, re.MULTILINE) + if not match: + raise ValueError("Could not find version in pyproject.toml") + return match.group(1) + + +def configure_git() -> None: + """Configure git for automated commits.""" + print("Configuring git...") + run_command( + ["git", "config", "user.name", "github-actions[bot]"], + ) + run_command( + ["git", "config", "user.email", "github-actions[bot]@users.noreply.github.com"], + ) + + +def check_remote_changes(pyproject_path: Path) -> tuple[bool, str]: + """ + Check if remote main has advanced (handles re-runs). + Returns (already_released, remote_version). + """ + print("\nChecking for remote changes...") + run_command(["git", "fetch", "origin", "main"]) + + # Get commit SHAs + local_head = run_command( + ["git", "rev-parse", "HEAD"], + capture=True, + ).stdout.strip() + + remote_head = run_command( + ["git", "rev-parse", "origin/main"], + capture=True, + ).stdout.strip() + + if local_head != remote_head: + print(f"Remote main has advanced (local: {local_head}, remote: {remote_head})") + print("This may indicate a previous attempt partially succeeded.") + + # Get remote version + remote_content = run_command( + ["git", "show", "origin/main:pyproject.toml"], + capture=True, + ).stdout + + remote_match = re.search( + r'^version\s*=\s*["\']([^"\']+)["\']', + remote_content, + re.MULTILINE, + ) + if remote_match: + remote_version = remote_match.group(1) + print(f"Remote version: {remote_version}") + + # Check if versions differ (indicating work was done) + local_version = get_current_version(pyproject_path) + if local_version != remote_version: + print("Local and remote versions differ, rebasing...") + run_command(["git", "rebase", "origin/main"]) + return False, remote_version + else: + print("Versions match, assuming previous run completed successfully") + return True, remote_version + + return False, "" + + +def main() -> int: + """Main entry point.""" + parser = argparse.ArgumentParser( + description="Version bump and commit for CI/CD", + ) + parser.add_argument( + "--bump-type", + choices=["major", "minor", "patch"], + required=True, + help="Type of version bump", + ) + parser.add_argument( + "--description", + default="", + help="Description for changelog", + ) + + args = parser.parse_args() + + # Determine project root + script_dir = Path(__file__).parent + project_root = script_dir.parent + pyproject_path = project_root / "pyproject.toml" + + if not pyproject_path.exists(): + print(f"Error: {pyproject_path} not found", file=sys.stderr) + return 1 + + try: + # Configure git + configure_git() + + # Check for remote changes + already_released, remote_version = check_remote_changes(pyproject_path) + + if already_released: + print("Version bump already completed in previous run") + set_github_output("version_committed", "false") + set_github_output("already_released", "true") + set_github_output("new_version", remote_version) + return 0 + + # Get current version + old_version = get_current_version(pyproject_path) + print(f"\nCurrent version: {old_version}") + + # Run version bump + print(f"\nBumping version ({args.bump_type})...") + bump_cmd = [ + sys.executable, + "scripts/bump_version.py", + args.bump_type, + ] + if args.description: + bump_cmd.extend(["--description", args.description]) + + run_command(bump_cmd) + + # Get new version + new_version = get_current_version(pyproject_path) + print(f"New version: {new_version}") + set_github_output("new_version", new_version) + + # Check for changes + status = run_command( + ["git", "status", "--porcelain"], + capture=True, + ).stdout.strip() + + if status: + print("\nChanges detected, committing...") + + # Stage all changes + run_command(["git", "add", "-A"]) + + # Commit with version as message + run_command(["git", "commit", "-m", new_version]) + + # Push to main + run_command(["git", "push", "origin", "main"]) + + print( + f"\n✅ Version bump committed and pushed: {old_version} → {new_version}" + ) + set_github_output("version_committed", "true") + else: + print("\nNo changes to commit") + set_github_output("version_committed", "false") + + return 0 + + except Exception as e: + print(f"Error: {e}", file=sys.stderr) + return 1 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/src/my_package/__init__.py b/src/my_package/__init__.py new file mode 100644 index 0000000..bf4618d --- /dev/null +++ b/src/my_package/__init__.py @@ -0,0 +1,47 @@ +"""Example module entry point. + +Replace this with your actual implementation. +""" + +from __future__ import annotations + +__version__ = "0.1.0" + +__all__ = ["add", "multiply", "delay"] + + +def add(a: int | float, b: int | float) -> int | float: + """Add two numbers. + + Args: + a: First number + b: Second number + + Returns: + Sum of a and b + """ + return a + b + + +def multiply(a: int | float, b: int | float) -> int | float: + """Multiply two numbers. + + Args: + a: First number + b: Second number + + Returns: + Product of a and b + """ + return a * b + + +async def delay(seconds: float) -> None: + """Async delay function. + + Args: + seconds: Seconds to wait + """ + import asyncio + + await asyncio.sleep(seconds) diff --git a/src/my_package/py.typed b/src/my_package/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..42baaec --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1 @@ +"""Test suite for my_package.""" diff --git a/tests/test_my_package.py b/tests/test_my_package.py new file mode 100644 index 0000000..93922ec --- /dev/null +++ b/tests/test_my_package.py @@ -0,0 +1,64 @@ +"""Example test file using pytest. + +Tests for the my_package module. +""" + +from __future__ import annotations + +import pytest + +from my_package import add, delay, multiply + + +class TestAdd: + """Tests for add function.""" + + def test_add_positive_numbers(self) -> None: + """Test adding two positive numbers.""" + assert add(2, 3) == 5 + + def test_add_negative_numbers(self) -> None: + """Test adding negative numbers.""" + assert add(-1, -2) == -3 + + def test_add_zero(self) -> None: + """Test adding zero.""" + assert add(5, 0) == 5 + + def test_add_floats(self) -> None: + """Test adding floating point numbers.""" + assert add(2.5, 3.5) == 6.0 + + +class TestMultiply: + """Tests for multiply function.""" + + def test_multiply_positive_numbers(self) -> None: + """Test multiplying two positive numbers.""" + assert multiply(2, 3) == 6 + + def test_multiply_by_zero(self) -> None: + """Test multiplying by zero.""" + assert multiply(5, 0) == 0 + + def test_multiply_negative_numbers(self) -> None: + """Test multiplying negative numbers.""" + assert multiply(-2, 3) == -6 + + def test_multiply_floats(self) -> None: + """Test multiplying floating point numbers.""" + assert multiply(2.5, 2) == 5.0 + + +class TestDelay: + """Tests for delay function.""" + + @pytest.mark.asyncio + async def test_delay(self) -> None: + """Test async delay function.""" + import time + + start = time.time() + await delay(0.1) + elapsed = time.time() - start + assert elapsed >= 0.1