From 8b7077cae52f4d1378e48090d9788286a3ff6cb4 Mon Sep 17 00:00:00 2001 From: Copilot <198982749+Copilot@users.noreply.github.com> Date: Sat, 8 Nov 2025 20:19:46 -0800 Subject: [PATCH 1/3] chore(copilot): add GitHub Copilot instructions for repository (#1354) Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com> --- .github/copilot-instructions.md | 550 ++++++++++++++++++++++++++++++++ 1 file changed, 550 insertions(+) create mode 100644 .github/copilot-instructions.md diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md new file mode 100644 index 000000000..97a92c06a --- /dev/null +++ b/.github/copilot-instructions.md @@ -0,0 +1,550 @@ +# Copilot Instructions for python-semantic-release + +This document explains how GitHub Copilot-like automated agents should interact with +the python-semantic-release repository. + +## Project Overview + +Python Semantic Release is a tool for automating semantic versioning and marking releases for +various types of software projects. It analyzes commit messages with various commit parsers +(the most notable being the Conventional Commits specification) to determine what the next +version should be and facilitates release steps that the developer generally has to do. This +includes generating changelogs, stamping the code with version strings, creating a repository +tag and annotating releases on a remote Git server with version-specific release notes. + +**Key Components:** +- **CLI**: Command-line interface for version management, changelog generation, and publishing +- **Commit Parsers**: Parse commit messages to determine version bumps + (Supports Conventional-Commits, Emoji, and Scipy format) +- **HVCS Integration**: Integrations with GitHub, GitLab, Gitea, and Bitbucket for releasing +- **Version Management**: Semantic versioning logic and version calculation +- **Changelog Generation**: Automated and customizable changelog creation using Jinja2 templates + +## Development Setup + +### Installation + +Requires 3.9+ for development dependencies, but runtime supports 3.8+. + +```bash +# Set up for development +pip install -e .[build,dev,docs,mypy,test] +``` + +### Running the Application + +```bash +# See the CLI help +semantic-release --help + +# Common commands +semantic-release version +semantic-release changelog +semantic-release publish +``` + +### Making Changes + +Minimal PR checklist (run locally before proposing a PR): + +- [ ] Run pre-PR checklist script (see below) +- [ ] If you added dependencies: update `pyproject.toml` and mention them in the PR. +- [ ] Review the helpful tips at the bottom of this document to ensure best practices. +- [ ] Verify that commit messages follow the Commit Message Conventions section below. + +Runnable pre-PR checklist script (copyable): + +```bash +# lint & format +ruff format . +ruff check --unsafe-fixes . +# run type checks +mypy . +# run unit tests +pytest -m unit +# run e2e tests +pytest -m e2e +# optional docs build when docs changed +sphinx-build -b html docs docs/_build/html +``` + +## Code Style and Quality + +### Linting and Formatting + +- **Ruff**: Primary linter and formatter (replaces Black, isort, flake8) + + ```bash + # run check for lint errors + ruff check --unsafe-fixes . + + # apply lint fixes + ruff check --unsafe-fixes --fix . + + # check for formatting issues + ruff format --check . + + # apply formatting fixes + ruff format . + ``` + +- **Type Checking**: Use mypy for type checking + + ```bash + mypy . + ``` + +### Code Style Guidelines + +1. **Type Hints**: All functions must have complete type hints (enforced by mypy) + +2. **Docstrings**: Use sphinx-style docstrings (though currently many are missing - add + only when modifying a function or adding new code) + +3. **Line Length**: 88 characters (enforced by Ruff) + +4. **Import Style**: + + - Absolute imports only (no relative imports) + - All files must use `from __future__ import annotations` for ignoring type hints at runtime + - Prefer `from module import Class` over `import module` when using classes/functions + - Running Ruff with `--unsafe-fixes` and `--fix` will automatically sort and group imports + - All files should have a `if TYPE_CHECKING: # pragma: no cover` block for type-only imports + - Prefer renaming `re` imports for clarity (e.g. `from re import compile as regexp, escape as regexp_escape`) + +5. **String Quotes**: Use double quotes for strings + +6. **Error Handling**: Create specific exception classes inheriting from `SemanticReleaseBaseError` + and defined in `errors.py` + +### Common Patterns + +- Configuration uses Pydantic models (v2) for validation +- CLI uses Click framework with click-option-group for organization +- Git operations use GitPython library +- Templating uses Jinja2 for changelogs and release notes + +## Testing + +### Test Structure + +- **Unit Tests**: `tests/unit/` - Fast, isolated tests + +- **E2E Tests**: `tests/e2e/` - End-to-end integration tests performed on real git repos + (as little mocking as possible, external network calls to HVCS should be mocked). Repos are + cached into `.pytest_cache/` for faster test setup/runs after the first build. E2E tests are + built to exercise the CLI commands and options against real git repositories with various commit + histories and configurations. + +- **Fixtures**: `tests/fixtures/` - Reusable test data and fixtures + +- **Repository Fixtures**: `tests/fixtures/repos/` - Example git repositories for testing and rely on + `tests/fixtures/example_project.py` and `tests/fixtures/git_repo.py` for setup + +- **Monorepo Fixtures**: `tests/fixtures/monorepo/` - Example monorepos for testing monorepo support + +- **GitHub Action Tests**: `tests/gh_action/` - Tests for simulating GitHub Docker Action functionality + +### Running Tests + +```bash +# Run only unit tests +pytest -m unit + +# Run only e2e tests +pytest -m e2e + +# Run comprehensive (unit & e2e) test suite with full verbosity (all variations of repositories) +# Warning: long runtime (14mins+) only necessary for testing all git repository variations +pytest -vv --comprehensive + +# Run GitHub Docker Action tests (requires Docker, see .github/workflows/validate.yml for setup) +# Only required when modifying the GitHub Action code (src/gh_action/, and action.yml) +bash tests/gh_action/run.sh +``` + +### Testing Guidelines + +1. **Test Organization**: + + - Group unit tests by module structure mirroring `src/` under `tests/unit/` + - Group e2e tests by CLI command under `tests/e2e/` + - Use descriptive test function names that clearly indicate the scenario being tested + - Test docstrings should follow the format: `"""Given , when , then ."""` + +2. **Fixtures**: Use pytest fixtures from `tests/conftest.py` and `tests/fixtures/` + +3. **Markers**: Apply appropriate markers (`@pytest.mark.unit`, `@pytest.mark.e2e`, `@pytest.mark.comprehensive`) + +4. **Mocking**: Use `pytest-mock` for mocking, `responses` for HTTP mocking + +5. **Parametrization**: Use `@pytest.mark.parametrize` for testing multiple scenarios + +6. **Test Data**: Use `tests/fixtures/repos/` for specific git repository workflow strategies + + - Git repository strategies include: + + - Git Flow: + - branch & merge commit strategy + - varying number of branches & release branches + + - GitHub Flow: + - squash merge strategy + - branch & merge commit strategy + - varying number of release branches & simulated simultaneous work branches + - varying branch update strategies (e.g. rebasing, merging) + + - Trunk-Based Development (no branches): + - unreleased repo (no tags) + - trunk with only official release tags + - trunk with mixed release and pre-release tags + - concurrent major version support + + - ReleaseFlow (Not supported yet) + + - Monorepo (multiple packages): + - trunk based development with only official release tags + - github flow with squash merge strategy + - github flow with branch & merge commit strategy + +### Test Coverage + +- Maintain high test coverage for core functionality +- Unit tests should be fast and not touch filesystem/network when possible +- E2E tests should test realistic workflows with actual git operations + +### Pull Request testing + +Each PR will be evaluated through an GitHub Actions workflow before it can be merged. +The workflow is very specialized to run the tests in a specific order and with specific +parameters. Please refer to `.github/workflows/ci.yml` for details on how the tests are +structured and run. + +## Commit Message Conventions + +This project uses **Conventional Commits** specification and is versioned by itself. See +the `CHANGELOG.rst` for reference of how the conventional commits and specific rules this +project uses are used in practice to communicate changes to users. + +It is highly important to separate the code changes into their respective commit types +and scopes to ensure that the changelog is generated correctly and that users can +understand the changes in each release. The commit message format is strictly enforced +and should be followed for all commits. + +When submitting a pull request, it is recommended to commit any end-2-end test cases +first as a `test` type commit, then the implementation changes as `feat`, `fix`, etc. +This order allows reviewers to run the test which demonstrates the failure case before +validating the implementation changes by doing a `git merge origin/` to run the +test again and see it pass. Unit test cases will need to be committed after the source +code implementation changes as they will not run without the implementation code. +Documentation changes should be committed last and the commit scope should be a short +reference to the page its modifying (e.g. `docs(github-actions): ` or +`docs(configuration): `). Commit types should be chosen based on reference +to the default branch as opposed to its previous commits on the branch. For example, if +you are fixing a bug in a feature that was added in the same branch, the commit type +should be `refactor` instead of `fix` since the bug was introduced in the same branch +and is not present in the default branch. + +### Format + +``` +(): + + + +[optional footer(s)] +``` + +Scopes by the specification are optional but for this project, they are required and +only by exception can they be omitted. + +Footers include: + +- `BREAKING CHANGE: ` for breaking changes + +- `NOTICE: ` for additional release information that should be included + in the changelog to give users more context about the release + +- `Resolves: #` for linking to bug fixes. Use `Implements: #` + for new features. + +You should not have a breaking change and a notice in the same commit. If you have a +breaking change, the breaking change description should include all relevant information +about the change and how to update. + +### Types + +- `feat`: New feature (minor version bump) +- `fix`: Bug fix (patch version bump) +- `perf`: Performance improvement (patch version bump) +- `docs`: Documentation only changes +- `style`: Code style changes (formatting, missing semicolons, etc.) +- `refactor`: Code refactoring without feature changes or bug fixes +- `test`: Adding or updating tests +- `build`: Changes to build system or dependencies +- `ci`: Changes to CI configuration +- `chore`: Other changes that don't modify src or test files + +### Breaking Changes + +- Add `!` after the scope: `feat(scope)!: breaking change` and add + `BREAKING CHANGE:` in footer with detailed description of what was changed, + why, and how to update. + +### Notices + +- Add `NOTICE: ` in footer to include important information about the + release that should be included in the changelog. This is for things that require + more explanation than a simple commit message and are not breaking changes. + +### Scopes + +Use scopes as categories to indicate the area of change. They are most important for the +types of changes that are included in the changelog (bug fixes, features, performance +improvements, documentation, build dependencies) to tell the user what area was changed. + +Common scopes include: + +- `changelog`: Changes related to changelog generation +- `config`: Changes related to user configuration +- `fixtures`: Changes related to test fixtures +- `deps`: Changes related to runtime dependencies +- `deps-dev`: Changes related to development dependencies + (as defined in `pyproject.toml:project.optional-dependencies.dev`) +- `deps-build`: Changes related to build dependencies + (as defined in `pyproject.toml:project.optional-dependencies.build`) +- `deps-docs`: Changes related to documentation dependencies + (as defined in `pyproject.toml:project.optional-dependencies.docs`) +- `deps-test`: Changes related to testing dependencies + (as defined in `pyproject.toml:project.optional-dependencies.test`) + +We use hyphenated scopes to group related changes together in a category to subcategory +format. The most common categories are: + +- `cmd-`: Changes related to a specific CLI command +- `parser-`: Changes related to a specific commit parser +- `hvcs-`: Changes related to a specific hosting service integration + +## Architecture + +The project's primary entrypoint is `src/semantic_release/__main__.py:main`, as defined +in `pyproject.toml:project.scripts`. This is the CLI interface that users interact with. +The CLI is built using Click and lazy-loaded subcommands for version management, +changelog generation, and publishing. + +Although the project is primarily a CLI tool, the code is under development to become +more modular and pluggable to allow for more flexible usage in other contexts (e.g. as a +library). + +This repository also is provided as a GitHub Action (see `src/gh_action/`) for users +who want a pre-built solution for their GitHub repositories. The action is built using Docker +and wraps the built wheel of the project before it runs the CLI version command in a +containerized environment. The publish command is also available as a GitHub Action but +that code is hosted in a separate repository (https://github.com/python-semantic-release/publish-action). + +### Key Components + +- `src/semantic_release/cli/`: Click-based CLI interface + - `commands/`: Individual CLI commands (version, changelog, publish) + - `config.py`: Configuration loading and validation with Pydantic + +- `src/semantic_release/commit_parser/`: Commit message parsers + - `_base.py`: Base parser interface + - `conventional/parser.py`: Conventional Commits parser + - `conventional/options.py`: Conventional Commits parser options + - `conventional/parser_monorepo.py`: Conventional Commits parser for monorepos + - `conventional/options_monorepo.py`: Conventional Commits monorepo parser options + - `angular.py`, `emoji.py`, `scipy.py`, `tag.py`: Parser implementations + +- `src/semantic_release/hvcs/`: Hosting service integrations + - `_base.py`: Base HVCS interface + - `remote_hvcs_base.py`: Base class for remote HVCS implementations + - `github.py`, `gitlab.py`, `gitea.py`, `bitbucket.py`: Service implementations + +- `src/semantic_release/version/`: Version management + - `version.py`: Version class and comparison logic + - `declarations/`: Implementations of how to stamp versions into various types of code + from users' configuration + - `translator.py`: Version translation between different formats + +- `src/gh_action/`: GitHub Docker Action implementation + - `action.sh`: Main entrypoint for the action + - `Dockerfile`: Dockerfile for the action + +- `action.yml`: GitHub Action definition + +### Design Patterns + +- **Strategy Pattern**: Commit parsers and HVCS implementations are pluggable +- **Template Method**: Base classes define workflow, subclasses implement specifics +- **Builder Pattern**: Version calculation builds up changes from commits +- **Factory Pattern**: Parser and HVCS selection based on configuration +- **Composition Pattern**: The future of the project's design for pluggable components + +## Building and Releasing + +### Local Build + +```bash +pip install -e .[build] +bash scripts/build.sh +``` + +### Release Process + +This project is released via GitHub Actions (see `.github/workflows/cicd.yml`) after +a successful validation workflow. During release, it runs a previous version of +itself to perform the release steps. The release process includes: + +1. Commits are analyzed from the last tag that exists on the current branch +2. Version is determined based on commit types +3. Changelog is generated from commits +4. Source code is stamped with new version +5. Documentation is stamped with the new version (`$NEW_VERSION`) + or new release tag (`$NEW_RELEASE_TAG`) (see `scripts/bump_version_in_docs.py` for details) +6. Package is built with stamped version +7. Code changes from steps 4-6 are committed and pushed to the repository +8. A new tag is created for the release and pushed to the repository +9. A new release is created on the hosting service with version-specific generated release notes +10. Assets are uploaded to the release +11. The package is published to PyPI +12. ReadTheDocs is triggered to build & publish the documentation + +### Version Configuration + +- Version stored in `pyproject.toml:project.version` +- Additional version locations in `tool.semantic_release.version_variables` +- Follows semantic versioning: MAJOR.MINOR.PATCH + +## Common Tasks + +### Adding a New Commit Parser + +1. Create new parser in `src/semantic_release/commit_parser/` +2. Inherit from `CommitParser` base class +3. Implement `parse()` method +4. Add parser to `KNOWN_COMMIT_PARSERS` in config +5. Add tests in `tests/unit/semantic_release/commit_parser/` +6. Add fixtures that can select the new parser for e2e tests + +### Adding a New HVCS Integration + +1. Create new HVCS in `src/semantic_release/hvcs/` +2. Inherit from `HvcsBase` base class or `RemoteHvcsBase` if it is a remote service +3. Implement required methods (token creation, asset upload, release creation) +4. Add HVCS to configuration options +5. Add tests in `tests/unit/semantic_release/hvcs/` +6. Add fixtures that can select the new HVCS for e2e tests + +### Adding a New CLI Command + +1. Create command in `src/semantic_release/cli/commands/` +2. Use Click decorators for arguments/options +3. Access shared context via `ctx.obj` (RuntimeContext) +4. Add command to main command group in `src/semantic_release/cli/commands/main.py` +5. Add tests in `tests/e2e/cmd_/` +6. Add documentation for the command in `docs/api/commands.rst` + +### Modifying the included default changelog templates source + +1. Update the default templates in `src/semantic_release/data/templates///` +2. Update the fixtures in `tests/fixtures/git_repo.py` to correctly replicate the + format of the new templates via code. +3. Update the unit tests for changelog generation + +### Adding a new configuration option + +1. Update the Pydantic models in `cli/config.py` with validation +2. Add option over into the RuntimeContext if necessary +3. Add option description to documentation in + `docs/configuration/configuration.rst` +4. Add unit tests for the validation of the option in `tests/unit/semantic_release/cli/config.py` +5. Add e2e tests for the option in `tests/e2e/` depending on the option's scope + and functionality. + +### Adding a new command line option + +1. Add the option to the appropriate CLI command in + `src/semantic_release/cli/commands/` +2. Add the option to the GitHub Action if it is for the `version` command +3. Add the option to the documentation in `docs/api/commands.rst` +4. Add the option to the GitHub Action documentation in + `docs/configuration/automatic-releases/github-actions.rst` +5. Add e2e tests for the option in `tests/e2e/cmd_/` + +### Adding a new changelog context filter + +1. Implement the filter in `src/semantic_release/changelog/context.py` +2. Add the filter to the changelog context and release notes context objects +3. Add unit tests for the filter in `tests/unit/semantic_release/changelog/**` +4. Add description and example of how to use the filter in the documentation + in `docs/concepts/changelog_templates.rst` + +## Important Files + +- `pyproject.toml`: Project configuration, dependencies, tool settings +- `action.yml`: GitHub Action definition +- `config/release-templates/`: Project-specific Jinja2 templates for changelog and release notes +- `.pre-commit-config.yaml`: Pre-commit hooks configuration +- `.readthedocs.yml`: ReadTheDocs configuration +- `CONTRIBUTING.rst`: Contribution guidelines + +## Documentation + +- Hosted on ReadTheDocs: https://python-semantic-release.readthedocs.io +- Source in `docs/` directory +- Uses Sphinx with Furo theme +- Build locally: `sphinx-build -b html docs docs/_build/html` +- View locally: open `docs/_build/html/index.html` + +## Python Version Support + +- Runtime Minimum: Python 3.8 +- Development Dependencies: Python 3.9+ +- Tested on: Python 3.8, 3.14 +- Target version for type checking: Python 3.8 + +## Dependencies to Know + +- **Click**: CLI framework +- **GitPython**: Git operations +- **Pydantic v2**: Configuration validation and models +- **Jinja2**: Template engine for changelogs +- **requests**: HTTP client for HVCS APIs +- **python-gitlab**: GitLab API client +- **tomlkit**: TOML parsing with formatting preservation +- **rich**: Rich terminal output + +## Helpful Tips + +- Never add real secrets, tokens, or credentials to source, commits, fixtures, or logs. + +- All proposed changes must include tests (unit and/or e2e as appropriate) and pass the + local quality gate before creating a PR. + +- When modifying configuration, update the Pydantic models in `cli/config.py` + +- Jinja2 changelog templates for this project are in `config/release-templates/`, whereas + the default changelog templates provided to users as a part of this project are in + `src/semantic_release/data/templates///**`. + +- The `RuntimeContext` object holds shared state across CLI commands + +- Use `--noop` flag to test commands without making changes + +- Version detection respects git tags - use annotated tags + +- The project uses its own tool for versioning, so commit messages matter! + +- When creating a Pull Request, create a PR description that fills out the + PR template found in `.github/PULL_REQUEST_TEMPLATE.md`. This will help + reviewers understand the changes and the impact of the PR. + +- If creating an issue, fill out one of the issue templates found in + `.github/ISSUE_TEMPLATE/` related to the type of issue (bug, feature request, etc.). + This will help maintainers understand the issue and its impact. + +- When adding new features, consider how they will affect the changelog and + versioning. Make as few breaking changes as possible by adding backwards compatibility + and if you do make a breaking change, be sure to include a detailed description in the + `BREAKING CHANGE` footer of the commit message. From c95c6083749972aaef1e949eb596192309d0d8d1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 8 Nov 2025 20:24:11 -0800 Subject: [PATCH 2/3] ci(deps): bump `mikepenz/action-junit-report@v6.0.0` action to `v6.0.1` (#1361) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/validate.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index 72aa94207..6f447d461 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -195,7 +195,7 @@ jobs: --junit-xml=tests/reports/pytest-results.xml - name: Report | Upload Test Results - uses: mikepenz/action-junit-report@5b7ee5a21e8674b695313d769f3cbdfd5d4d53a4 # v6.0.0 + uses: mikepenz/action-junit-report@e08919a3b1fb83a78393dfb775a9c37f17d8eea6 # v6.0.1 if: ${{ always() && steps.tests.outcome != 'skipped' }} with: report_paths: ./tests/reports/*.xml @@ -285,7 +285,7 @@ jobs: retention-days: 1 - name: Report | Upload Test Results - uses: mikepenz/action-junit-report@5b7ee5a21e8674b695313d769f3cbdfd5d4d53a4 # v6.0.0 + uses: mikepenz/action-junit-report@e08919a3b1fb83a78393dfb775a9c37f17d8eea6 # v6.0.1 if: ${{ always() && steps.tests.outcome != 'skipped' }} with: report_paths: ./tests/reports/*.xml @@ -383,7 +383,7 @@ jobs: retention-days: 1 - name: Report | Upload Test Results - uses: mikepenz/action-junit-report@5b7ee5a21e8674b695313d769f3cbdfd5d4d53a4 # v6.0.0 + uses: mikepenz/action-junit-report@e08919a3b1fb83a78393dfb775a9c37f17d8eea6 # v6.0.1 if: ${{ always() && steps.tests.outcome != 'skipped' }} with: report_paths: ./tests/reports/*.xml From 90a1ffa55c5a1605c59cb26a1797f9a37fdfa784 Mon Sep 17 00:00:00 2001 From: codejedi365 Date: Sat, 8 Nov 2025 20:28:24 -0800 Subject: [PATCH 3/3] feat(cmd-version): add automatic repository un-shallowing to version workflow (#1366) NOTICE: If you were previously handling the unshallowing of a repository clone in your CI/CD pipelines, you may now remove that step from your workflow. PSR will now detect a shallow repository and unshallow it before evaluating the commit history. Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com> * docs(github-actions): update example to remove need to specify repo checkout's fetch depth * docs(uv-integration): update example to remove need to specify repo checkout's fetch depth * test(cmd-version): add E2E test cases to verify automatic un-shallowing of repos * test(gitproject): refactor verify_upstream tests to use common mocking fixture * test(gitproject): add unit tests to exercise the auto-unshallow use & error cases --- .../automatic-releases/github-actions.rst | 26 +- .../configuration-guides/uv_integration.rst | 2 - src/semantic_release/cli/commands/version.py | 17 +- src/semantic_release/gitproject.py | 36 +++ tests/e2e/cmd_version/test_version_shallow.py | 306 ++++++++++++++++++ .../unit/semantic_release/test_gitproject.py | 254 +++++++++------ 6 files changed, 515 insertions(+), 126 deletions(-) create mode 100644 tests/e2e/cmd_version/test_version_shallow.py diff --git a/docs/configuration/automatic-releases/github-actions.rst b/docs/configuration/automatic-releases/github-actions.rst index c515ed474..cf8610c10 100644 --- a/docs/configuration/automatic-releases/github-actions.rst +++ b/docs/configuration/automatic-releases/github-actions.rst @@ -875,17 +875,16 @@ to the GitHub Release Assets as well. contents: write steps: - # Note: We checkout the repository at the branch that triggered the workflow - # with the entire history to ensure to match PSR's release branch detection - # and history evaluation. - # However, we forcefully reset the branch to the workflow sha because it is - # possible that the branch was updated while the workflow was running. This - # prevents accidentally releasing un-evaluated changes. + # Note: We checkout the repository at the branch that triggered the workflow. + # Python Semantic Release will automatically convert shallow clones to full clones + # if needed to ensure proper history evaluation. However, we forcefully reset the + # branch to the workflow sha because it is possible that the branch was updated + # while the workflow was running, which prevents accidentally releasing un-evaluated + # changes. - name: Setup | Checkout Repository on Release Branch uses: actions/checkout@v4 with: ref: ${{ github.ref_name }} - fetch-depth: 0 - name: Setup | Force release branch to be at workflow sha run: | @@ -959,11 +958,6 @@ to the GitHub Release Assets as well. one release job in the case if there are multiple pushes to ``main`` in a short period of time. -.. warning:: - You must set ``fetch-depth`` to 0 when using ``actions/checkout@v4``, since - Python Semantic Release needs access to the full history to build a changelog - and at least the latest tags to determine the next version. - .. warning:: The ``GITHUB_TOKEN`` secret is automatically configured by GitHub, with the same permissions role as the user who triggered the workflow run. This causes @@ -974,6 +968,14 @@ to the GitHub Release Assets as well. case, you will also need to pass the new token to ``actions/checkout`` (as the ``token`` input) in order to gain push access. +.. note:: + As of $NEW_RELEASE_TAG, Python Semantic Release automatically detects and converts + shallow clones to full clones when needed. While you can still use ``fetch-depth: 0`` + with ``actions/checkout@v4`` to fetch the full history upfront, it is no longer + required. If you use the default shallow clone, Python Semantic Release will + automatically fetch the full history before evaluating commits. If you are using + an older version of PSR, you will need to unshallow the repository prior to use. + .. note:: As of $NEW_RELEASE_TAG, the verify upstream step is no longer required as it has been integrated into PSR directly. If you are using an older version of PSR, you will need diff --git a/docs/configuration/configuration-guides/uv_integration.rst b/docs/configuration/configuration-guides/uv_integration.rst index bc794832e..ac9f2359e 100644 --- a/docs/configuration/configuration-guides/uv_integration.rst +++ b/docs/configuration/configuration-guides/uv_integration.rst @@ -161,7 +161,6 @@ look like this: uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: ref: ${{ github.sha }} - fetch-depth: 0 - name: Setup | Force correct release branch on workflow sha run: git checkout -B ${{ github.ref_name }} @@ -259,7 +258,6 @@ look like this: uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: ref: ${{ github.ref_name }} - fetch-depth: 0 - name: Setup | Force release branch to be at workflow sha run: git reset --hard ${{ github.sha }} diff --git a/src/semantic_release/cli/commands/version.py b/src/semantic_release/cli/commands/version.py index 627cc1fc6..ad60b95aa 100644 --- a/src/semantic_release/cli/commands/version.py +++ b/src/semantic_release/cli/commands/version.py @@ -496,6 +496,17 @@ def version( # noqa: C901 logger.info("Forcing use of %s as the prerelease token", prerelease_token) translator.prerelease_token = prerelease_token + # Check if the repository is shallow and unshallow it if necessary + # This ensures we have the full history for commit analysis + project = GitProject( + directory=runtime.repo_dir, + commit_author=runtime.commit_author, + credential_masker=runtime.masker, + ) + if project.is_shallow_clone(): + logger.info("Repository is a shallow clone, converting to full clone...") + project.git_unshallow(noop=opts.noop) + # Only push if we're committing changes if push_changes and not commit_changes and not create_tag: logger.info("changes will not be pushed because --no-commit disables pushing") @@ -688,12 +699,6 @@ def version( # noqa: C901 license_name="" if not isinstance(license_cfg, str) else license_cfg, ) - project = GitProject( - directory=runtime.repo_dir, - commit_author=runtime.commit_author, - credential_masker=runtime.masker, - ) - # Preparing for committing changes; we always stage files even if we're not committing them in order to support a two-stage commit project.git_add(paths=all_paths_to_add, noop=opts.noop) if commit_changes: diff --git a/src/semantic_release/gitproject.py b/src/semantic_release/gitproject.py index 05c4b1015..a29bb41de 100644 --- a/src/semantic_release/gitproject.py +++ b/src/semantic_release/gitproject.py @@ -90,6 +90,42 @@ def is_dirty(self) -> bool: with Repo(str(self.project_root)) as repo: return repo.is_dirty() + def is_shallow_clone(self) -> bool: + """ + Check if the repository is a shallow clone. + + :return: True if the repository is a shallow clone, False otherwise + """ + with Repo(str(self.project_root)) as repo: + shallow_file = Path(repo.git_dir, "shallow") + return shallow_file.exists() + + def git_unshallow(self, noop: bool = False) -> None: + """ + Convert a shallow clone to a full clone by fetching the full history. + + :param noop: Whether or not to actually run the unshallow command + """ + if noop: + noop_report("would have run:\n" " git fetch --unshallow") + return + + with Repo(str(self.project_root)) as repo: + try: + self.logger.info("Converting shallow clone to full clone...") + repo.git.fetch("--unshallow") + self.logger.info("Repository unshallowed successfully") + except GitCommandError as err: + # If the repository is already a full clone, git fetch --unshallow will fail + # with "fatal: --unshallow on a complete repository does not make sense" + # We can safely ignore this error by checking the stderr message + stderr = str(err.stderr) if err.stderr else "" + if "does not make sense" in stderr or "complete repository" in stderr: + self.logger.debug("Repository is already a full clone") + else: + self.logger.exception(str(err)) + raise + def git_add( self, paths: Sequence[Path | str], diff --git a/tests/e2e/cmd_version/test_version_shallow.py b/tests/e2e/cmd_version/test_version_shallow.py new file mode 100644 index 000000000..fa088e548 --- /dev/null +++ b/tests/e2e/cmd_version/test_version_shallow.py @@ -0,0 +1,306 @@ +"""Tests for version command with shallow repositories.""" + +from __future__ import annotations + +from contextlib import suppress +from pathlib import Path +from typing import TYPE_CHECKING, cast + +import pytest +from git import Repo +from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture + +from semantic_release.hvcs.github import Github + +from tests.const import MAIN_PROG_NAME, VERSION_SUBCMD +from tests.fixtures.example_project import change_to_ex_proj_dir +from tests.fixtures.repos import repo_w_trunk_only_conventional_commits +from tests.fixtures.repos.trunk_based_dev.repo_w_tags import ( + build_trunk_only_repo_w_tags, +) +from tests.util import assert_successful_exit_code, temporary_working_directory + +if TYPE_CHECKING: + from requests_mock import Mocker + + from tests.conftest import RunCliFn + from tests.fixtures.example_project import ExProjectDir, UpdatePyprojectTomlFn + from tests.fixtures.git_repo import ( + BuildSpecificRepoFn, + CommitConvention, + GetCfgValueFromDefFn, + GetGitRepo4DirFn, + GetVersionsFromRepoBuildDefFn, + ) + + +@pytest.mark.parametrize( + "repo_fixture_name, build_repo_fn", + [ + ( + repo_fixture_name, + lazy_fixture(build_repo_fn_name), + ) + for repo_fixture_name, build_repo_fn_name in [ + ( + repo_w_trunk_only_conventional_commits.__name__, + build_trunk_only_repo_w_tags.__name__, + ), + ] + ], +) +@pytest.mark.usefixtures(change_to_ex_proj_dir.__name__) +def test_version_w_shallow_repo_unshallows( + repo_fixture_name: str, + run_cli: RunCliFn, + build_repo_fn: BuildSpecificRepoFn, + example_project_dir: ExProjectDir, + git_repo_for_directory: GetGitRepo4DirFn, + post_mocker: Mocker, + get_cfg_value_from_def: GetCfgValueFromDefFn, + get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, + pyproject_toml_file: Path, + update_pyproject_toml: UpdatePyprojectTomlFn, +) -> None: + """ + Test that the version command automatically unshallows a shallow repository. + + Given a shallow repository, + When running the version command, + Then the repository should be unshallowed and release should succeed + """ + remote_name = "origin" + + # Create a bare remote (simulating origin) + local_origin = Repo.init(str(example_project_dir / "local_origin"), bare=True) + + # build target repo into a temporary directory + target_repo_dir = example_project_dir / repo_fixture_name + commit_type: CommitConvention = ( + repo_fixture_name.split("commits", 1)[0].split("_")[-2] # type: ignore[assignment] + ) + target_repo_definition = build_repo_fn( + repo_name=repo_fixture_name, + commit_type=commit_type, + dest_dir=target_repo_dir, + ) + target_git_repo = git_repo_for_directory(target_repo_dir) + + # Configure the source repo to use the bare remote (removing any existing 'origin') + with suppress(AttributeError): + target_git_repo.delete_remote(target_git_repo.remotes[remote_name]) + + target_git_repo.create_remote(remote_name, str(local_origin.working_dir)) + + # Remove last release before pushing to upstream + tag_format_str = cast( + "str", get_cfg_value_from_def(target_repo_definition, "tag_format_str") + ) + latest_tag = tag_format_str.format( + version=get_versions_from_repo_build_def(target_repo_definition)[-1] + ) + target_git_repo.git.tag("-d", latest_tag) + target_git_repo.git.reset("--hard", "HEAD~1") + + # TODO: when available, switch this to use hvcs=none or similar config to avoid token use for push + update_pyproject_toml( + "tool.semantic_release.remote.ignore_token_for_push", + True, + target_repo_dir / pyproject_toml_file, + ) + target_git_repo.git.commit(amend=True, no_edit=True, all=True) + + # push the current state to establish the remote (cannot push tags and branches at the same time) + target_git_repo.git.push(remote_name, all=True) # all branches + target_git_repo.git.push(remote_name, tags=True) # all tags + + # ensure bare remote HEAD points to the active branch so clones can checkout + local_origin.git.symbolic_ref( + "HEAD", f"refs/heads/{target_git_repo.active_branch.name}" + ) + + # current remote tags + remote_origin_tags_before = {tag.name for tag in local_origin.tags} + + # Create a shallow clone from the remote using file:// protocol for depth support + shallow_repo = Repo.clone_from( + f"file://{local_origin.working_dir}", + str(example_project_dir / "shallow_clone"), + no_local=True, + depth=1, + ) + with shallow_repo.config_writer("repository") as config: + config.set_value("core", "hookspath", "") + config.set_value("commit", "gpgsign", False) + config.set_value("tag", "gpgsign", False) + + with shallow_repo: + # Verify it's a shallow clone + shallow_file = Path(shallow_repo.git_dir, "shallow") + assert shallow_file.exists(), "Repository should be shallow" + + # Capture expected values from the full repo + expected_vcs_url_post = 1 + commit_sha_before = shallow_repo.head.commit.hexsha + + # Run PSR on the shallow clone + with temporary_working_directory(str(shallow_repo.working_dir)): + cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--patch"] + result = run_cli(cli_cmd[1:], env={Github.DEFAULT_ENV_TOKEN_NAME: "1234"}) + + # Initial execution check + assert_successful_exit_code(result, cli_cmd) + + # Take measurements after running PSR + remote_origin_tags_after = {tag.name for tag in local_origin.tags} + different_tags = remote_origin_tags_after.difference(remote_origin_tags_before) + with shallow_repo: + parent_commit_shas = [ + parent.hexsha for parent in shallow_repo.head.commit.parents + ] + commit_sha_after = shallow_repo.head.commit.hexsha + + # Verify the shallow file is gone (repo was unshallowed) + assert not shallow_file.exists(), "Repository should be unshallowed" + + # Verify release was successful + assert commit_sha_before != commit_sha_after, "Expected commit SHA to change" + assert ( + commit_sha_before in parent_commit_shas + ), "Expected new commit to be created on HEAD" + assert ( + latest_tag in different_tags + ), "Expected a new tag to be created and pushed to remote" + assert expected_vcs_url_post == post_mocker.call_count # 1x vcs release created + + +@pytest.mark.parametrize( + "repo_fixture_name, build_repo_fn", + [ + ( + repo_fixture_name, + lazy_fixture(build_repo_fn_name), + ) + for repo_fixture_name, build_repo_fn_name in [ + ( + repo_w_trunk_only_conventional_commits.__name__, + build_trunk_only_repo_w_tags.__name__, + ), + ] + ], +) +@pytest.mark.usefixtures(change_to_ex_proj_dir.__name__) +def test_version_noop_w_shallow_repo( + repo_fixture_name: str, + run_cli: RunCliFn, + build_repo_fn: BuildSpecificRepoFn, + example_project_dir: ExProjectDir, + git_repo_for_directory: GetGitRepo4DirFn, + post_mocker: Mocker, + get_cfg_value_from_def: GetCfgValueFromDefFn, + get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, + pyproject_toml_file: Path, + update_pyproject_toml: UpdatePyprojectTomlFn, +) -> None: + """ + Test that the version command in noop mode reports unshallow action. + + Given a shallow repository, + When running the version command with --noop, + Then the command should report what it would do but not actually unshallow + """ + remote_name = "origin" + + # Create a bare remote (simulating origin) + local_origin = Repo.init(str(example_project_dir / "local_origin"), bare=True) + + # build target repo into a temporary directory + target_repo_dir = example_project_dir / repo_fixture_name + commit_type: CommitConvention = ( + repo_fixture_name.split("commits", 1)[0].split("_")[-2] # type: ignore[assignment] + ) + target_repo_definition = build_repo_fn( + repo_name=repo_fixture_name, + commit_type=commit_type, + dest_dir=target_repo_dir, + ) + target_git_repo = git_repo_for_directory(target_repo_dir) + + # Configure the source repo to use the bare remote (removing any existing 'origin') + with suppress(AttributeError): + target_git_repo.delete_remote(target_git_repo.remotes[remote_name]) + + target_git_repo.create_remote(remote_name, str(local_origin.working_dir)) + + # Remove last release before pushing to upstream + tag_format_str = cast( + "str", get_cfg_value_from_def(target_repo_definition, "tag_format_str") + ) + latest_tag = tag_format_str.format( + version=get_versions_from_repo_build_def(target_repo_definition)[-1] + ) + target_git_repo.git.tag("-d", latest_tag) + target_git_repo.git.reset("--hard", "HEAD~1") + + # TODO: when available, switch this to use hvcs=none or similar config to avoid token use for push + update_pyproject_toml( + "tool.semantic_release.remote.ignore_token_for_push", + True, + target_repo_dir / pyproject_toml_file, + ) + target_git_repo.git.commit(amend=True, no_edit=True, all=True) + + # push the current state to establish the remote (cannot push tags and branches at the same time) + target_git_repo.git.push(remote_name, all=True) # all branches + target_git_repo.git.push(remote_name, tags=True) # all tags + + # ensure bare remote HEAD points to the active branch so clones can checkout + local_origin.git.symbolic_ref( + "HEAD", f"refs/heads/{target_git_repo.active_branch.name}" + ) + + # Create a shallow clone from the remote using file:// protocol for depth support + shallow_repo = Repo.clone_from( + f"file://{local_origin.working_dir}", + str(example_project_dir / "shallow_clone"), + no_local=True, + depth=1, + ) + with shallow_repo.config_writer("repository") as config: + config.set_value("core", "hookspath", "") + config.set_value("commit", "gpgsign", False) + config.set_value("tag", "gpgsign", False) + + with shallow_repo: + # Verify it's a shallow clone + shallow_file = Path(shallow_repo.git_dir, "shallow") + assert shallow_file.exists(), "Repository should be shallow" + + # Capture expected values from the full repo + expected_vcs_url_post = 0 + commit_sha_before = shallow_repo.head.commit.hexsha + remote_origin_tags_before = {tag.name for tag in local_origin.tags} + + # Run PSR in noop mode on the shallow clone + with temporary_working_directory(str(shallow_repo.working_dir)): + cli_cmd = [MAIN_PROG_NAME, "--noop", VERSION_SUBCMD, "--patch"] + result = run_cli(cli_cmd[1:], env={Github.DEFAULT_ENV_TOKEN_NAME: "1234"}) + + # Initial execution check + assert_successful_exit_code(result, cli_cmd) + + # Take measurements after running PSR + remote_origin_tags_after = {tag.name for tag in local_origin.tags} + different_tags = remote_origin_tags_after.difference(remote_origin_tags_before) + with shallow_repo: + commit_sha_after = shallow_repo.head.commit.hexsha + + # Verify the shallow file still exists (repo was NOT actually unshallowed in noop) + assert shallow_file.exists(), "Repository should still be shallow in noop mode" + + # Verify no actual changes were made + assert ( + commit_sha_before == commit_sha_after + ), "Expected commit SHA to remain unchanged in noop mode" + assert not different_tags, "Expected no new tags to be created in noop mode" + assert expected_vcs_url_post == post_mocker.call_count diff --git a/tests/unit/semantic_release/test_gitproject.py b/tests/unit/semantic_release/test_gitproject.py index d5795fff7..7b37a9756 100644 --- a/tests/unit/semantic_release/test_gitproject.py +++ b/tests/unit/semantic_release/test_gitproject.py @@ -1,11 +1,14 @@ +"""Tests for the GitProject class.""" + from __future__ import annotations -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, cast from unittest.mock import MagicMock, PropertyMock, patch import pytest from git import GitCommandError +import semantic_release.gitproject from semantic_release.errors import ( DetachedHeadGitError, GitFetchError, @@ -13,16 +16,36 @@ UnknownUpstreamBranchError, UpstreamBranchChangedError, ) -from semantic_release.gitproject import GitProject if TYPE_CHECKING: from pathlib import Path + from typing import Generator + + from semantic_release.gitproject import GitProject + + class MockGit(MagicMock): + """A mock Git object that can be used in tests.""" + + rev_parse: MagicMock + fetch: MagicMock + push: MagicMock + + class RepoMock(MagicMock): + """A mock Git repository that can be used in tests.""" + + active_branch: MagicMock + remotes: dict[str, MagicMock] + git: MockGit + git_dir: str + commit: MagicMock @pytest.fixture -def mock_repo(): +def mock_repo(tmp_path: Path) -> RepoMock: """Create a mock Git repository with proper structure for new implementation.""" - repo = MagicMock() + repo = cast("RepoMock", MagicMock()) + + repo.git_dir = str(tmp_path / ".git") # Mock active branch active_branch = MagicMock() @@ -60,17 +83,30 @@ def mock_repo(): return repo -def test_verify_upstream_unchanged_success(tmp_path: Path, mock_repo: MagicMock): - """Test that verify_upstream_unchanged succeeds when upstream has not changed.""" - git_project = GitProject(directory=tmp_path) +@pytest.fixture +def git_project(tmp_path: Path) -> GitProject: + """Create a GitProject instance for testing.""" + return semantic_release.gitproject.GitProject(directory=tmp_path) - # Mock Repo as a context manager - with patch("semantic_release.gitproject.Repo") as mock_repo_class: + +@pytest.fixture +def mock_gitproject( + git_project: GitProject, mock_repo: RepoMock +) -> Generator[GitProject, None, None]: + """Patch the GitProject to use the mock Repo.""" + module_path = semantic_release.gitproject.__name__ + with patch(f"{module_path}.Repo") as mock_repo_class: mock_repo_class.return_value.__enter__ = MagicMock(return_value=mock_repo) mock_repo_class.return_value.__exit__ = MagicMock(return_value=False) + yield git_project - # Should not raise an exception - git_project.verify_upstream_unchanged(local_ref="HEAD", noop=False) + +def test_verify_upstream_unchanged_success( + mock_gitproject: GitProject, mock_repo: RepoMock +): + """Test that verify_upstream_unchanged succeeds when upstream has not changed.""" + # Should not raise an exception + mock_gitproject.verify_upstream_unchanged(local_ref="HEAD", noop=False) # Verify fetch was called mock_repo.remotes["origin"].fetch.assert_called_once() @@ -79,11 +115,9 @@ def test_verify_upstream_unchanged_success(tmp_path: Path, mock_repo: MagicMock) def test_verify_upstream_unchanged_fails_when_changed( - tmp_path: Path, mock_repo: MagicMock + mock_gitproject: GitProject, mock_repo: RepoMock ): """Test that verify_upstream_unchanged raises error when upstream has changed.""" - git_project = GitProject(directory=tmp_path) - # Mock git operations with different SHAs mock_repo.git.rev_parse = MagicMock( return_value="def456" # Different from upstream @@ -95,152 +129,160 @@ def test_verify_upstream_unchanged_fails_when_changed( changed_commit.iter_parents = MagicMock(return_value=[]) mock_repo.commit = MagicMock(return_value=changed_commit) - # Mock Repo as a context manager - with patch("semantic_release.gitproject.Repo") as mock_repo_class: - mock_repo_class.return_value.__enter__ = MagicMock(return_value=mock_repo) - mock_repo_class.return_value.__exit__ = MagicMock(return_value=False) - - with pytest.raises( - UpstreamBranchChangedError, match=r"Upstream branch .* has changed" - ): - git_project.verify_upstream_unchanged(local_ref="HEAD", noop=False) + with pytest.raises( + UpstreamBranchChangedError, match=r"Upstream branch .* has changed" + ): + mock_gitproject.verify_upstream_unchanged(local_ref="HEAD", noop=False) -def test_verify_upstream_unchanged_noop(tmp_path: Path): +def test_verify_upstream_unchanged_noop( + mock_gitproject: GitProject, mock_repo: RepoMock +): """Test that verify_upstream_unchanged does nothing in noop mode.""" - git_project = GitProject(directory=tmp_path) - - mock_repo = MagicMock() - - # Mock Repo as a context manager - with patch("semantic_release.gitproject.Repo") as mock_repo_class: - mock_repo_class.return_value.__enter__ = MagicMock(return_value=mock_repo) - mock_repo_class.return_value.__exit__ = MagicMock(return_value=False) - - # Should not raise an exception and should not call git operations - git_project.verify_upstream_unchanged(noop=True) + # Should not raise an exception and should not call git operations + mock_gitproject.verify_upstream_unchanged(noop=True) # Verify Repo was not instantiated at all in noop mode - mock_repo_class.assert_not_called() + mock_repo.assert_not_called() def test_verify_upstream_unchanged_no_tracking_branch( - tmp_path: Path, mock_repo: MagicMock + mock_gitproject: GitProject, mock_repo: RepoMock ): """Test that verify_upstream_unchanged raises error when no tracking branch exists.""" - git_project = GitProject(directory=tmp_path) - # Mock no tracking branch mock_repo.active_branch.tracking_branch = MagicMock(return_value=None) - # Mock Repo as a context manager - with patch("semantic_release.gitproject.Repo") as mock_repo_class: - mock_repo_class.return_value.__enter__ = MagicMock(return_value=mock_repo) - mock_repo_class.return_value.__exit__ = MagicMock(return_value=False) - - # Should raise UnknownUpstreamBranchError - with pytest.raises( - UnknownUpstreamBranchError, match="No upstream branch found" - ): - git_project.verify_upstream_unchanged(local_ref="HEAD", noop=False) + # Should raise UnknownUpstreamBranchError + with pytest.raises(UnknownUpstreamBranchError, match="No upstream branch found"): + mock_gitproject.verify_upstream_unchanged(local_ref="HEAD", noop=False) -def test_verify_upstream_unchanged_detached_head(tmp_path: Path): +def test_verify_upstream_unchanged_detached_head( + mock_gitproject: GitProject, mock_repo: RepoMock +): """Test that verify_upstream_unchanged raises error in detached HEAD state.""" - git_project = GitProject(directory=tmp_path) - - mock_repo = MagicMock() # Simulate detached HEAD by having active_branch raise TypeError # This is what GitPython does when in a detached HEAD state type(mock_repo).active_branch = PropertyMock(side_effect=TypeError("detached HEAD")) - # Mock Repo as a context manager - with patch("semantic_release.gitproject.Repo") as mock_repo_class: - mock_repo_class.return_value.__enter__ = MagicMock(return_value=mock_repo) - mock_repo_class.return_value.__exit__ = MagicMock(return_value=False) + # Should raise DetachedHeadGitError + with pytest.raises(DetachedHeadGitError, match="detached HEAD state"): + mock_gitproject.verify_upstream_unchanged(local_ref="HEAD", noop=False) - # Should raise DetachedHeadGitError - with pytest.raises(DetachedHeadGitError, match="detached HEAD state"): - git_project.verify_upstream_unchanged(local_ref="HEAD", noop=False) - -def test_verify_upstream_unchanged_fetch_fails(tmp_path: Path, mock_repo: MagicMock): +def test_verify_upstream_unchanged_fetch_fails( + mock_gitproject: GitProject, mock_repo: RepoMock +): """Test that verify_upstream_unchanged raises GitFetchError when fetch fails.""" - git_project = GitProject(directory=tmp_path) - # Mock fetch to raise an error mock_repo.remotes["origin"].fetch = MagicMock( side_effect=GitCommandError("fetch", "error") ) - # Mock Repo as a context manager - with patch("semantic_release.gitproject.Repo") as mock_repo_class: - mock_repo_class.return_value.__enter__ = MagicMock(return_value=mock_repo) - mock_repo_class.return_value.__exit__ = MagicMock(return_value=False) - - with pytest.raises(GitFetchError, match="Failed to fetch from remote"): - git_project.verify_upstream_unchanged(local_ref="HEAD", noop=False) + with pytest.raises(GitFetchError, match="Failed to fetch from remote"): + mock_gitproject.verify_upstream_unchanged(local_ref="HEAD", noop=False) def test_verify_upstream_unchanged_upstream_sha_fails( - tmp_path: Path, mock_repo: MagicMock + mock_gitproject: GitProject, mock_repo: RepoMock ): """Test that verify_upstream_unchanged raises error when upstream SHA cannot be determined.""" - git_project = GitProject(directory=tmp_path) - # Mock refs to raise AttributeError (simulating missing branch) mock_repo.remotes["origin"].refs = MagicMock() mock_repo.remotes["origin"].refs.__getitem__ = MagicMock( side_effect=AttributeError("No such ref") ) - # Mock Repo as a context manager - with patch("semantic_release.gitproject.Repo") as mock_repo_class: - mock_repo_class.return_value.__enter__ = MagicMock(return_value=mock_repo) - mock_repo_class.return_value.__exit__ = MagicMock(return_value=False) - - with pytest.raises( - GitFetchError, match="Unable to determine upstream branch SHA" - ): - git_project.verify_upstream_unchanged(local_ref="HEAD", noop=False) + with pytest.raises(GitFetchError, match="Unable to determine upstream branch SHA"): + mock_gitproject.verify_upstream_unchanged(local_ref="HEAD", noop=False) def test_verify_upstream_unchanged_local_ref_sha_fails( - tmp_path: Path, mock_repo: MagicMock + mock_gitproject: GitProject, mock_repo: RepoMock ): """Test that verify_upstream_unchanged raises error when local ref SHA cannot be determined.""" - git_project = GitProject(directory=tmp_path) - # Mock git operations - rev_parse fails mock_repo.git.rev_parse = MagicMock( side_effect=GitCommandError("rev-parse", "error") ) - # Mock Repo as a context manager - with patch("semantic_release.gitproject.Repo") as mock_repo_class: - mock_repo_class.return_value.__enter__ = MagicMock(return_value=mock_repo) - mock_repo_class.return_value.__exit__ = MagicMock(return_value=False) - - with pytest.raises( - LocalGitError, - match="Unable to determine the SHA for local ref", - ): - git_project.verify_upstream_unchanged(local_ref="HEAD", noop=False) + with pytest.raises( + LocalGitError, + match="Unable to determine the SHA for local ref", + ): + mock_gitproject.verify_upstream_unchanged(local_ref="HEAD", noop=False) def test_verify_upstream_unchanged_with_custom_ref( - tmp_path: Path, mock_repo: MagicMock + mock_gitproject: GitProject, mock_repo: RepoMock ): """Test that verify_upstream_unchanged works with a custom ref like HEAD~1.""" - git_project = GitProject(directory=tmp_path) - - # Mock Repo as a context manager - with patch("semantic_release.gitproject.Repo") as mock_repo_class: - mock_repo_class.return_value.__enter__ = MagicMock(return_value=mock_repo) - mock_repo_class.return_value.__exit__ = MagicMock(return_value=False) - - # Should not raise an exception - git_project.verify_upstream_unchanged(local_ref="HEAD~1", noop=False) + # Should not raise an exception + mock_gitproject.verify_upstream_unchanged(local_ref="HEAD~1", noop=False) # Verify rev_parse was called with custom ref mock_repo.git.rev_parse.assert_called_once_with("HEAD~1") + + +def test_is_shallow_clone_true(mock_gitproject: GitProject, tmp_path: Path) -> None: + """Test is_shallow_clone returns True when shallow file exists.""" + # Create a shallow file + shallow_file = tmp_path / ".git" / "shallow" + shallow_file.parent.mkdir(parents=True, exist_ok=True) + shallow_file.touch() + + assert mock_gitproject.is_shallow_clone() is True + + +def test_is_shallow_clone_false(mock_gitproject: GitProject, tmp_path: Path) -> None: + """Test is_shallow_clone returns False when shallow file does not exist.""" + # Ensure shallow file does not exist + shallow_file = tmp_path / ".git" / "shallow" + if shallow_file.exists(): + shallow_file.unlink() + + assert mock_gitproject.is_shallow_clone() is False + + +def test_git_unshallow_success( + mock_gitproject: GitProject, mock_repo: RepoMock +) -> None: + """Test git_unshallow successfully unshallows a repository.""" + mock_gitproject.git_unshallow(noop=False) + mock_repo.git.fetch.assert_called_once_with("--unshallow") + + +def test_git_unshallow_noop(mock_gitproject: GitProject, mock_repo: RepoMock) -> None: + """Test git_unshallow in noop mode does not execute the command.""" + mock_gitproject.git_unshallow(noop=True) + mock_repo.git.fetch.assert_not_called() + + +def test_git_unshallow_already_complete( + mock_gitproject: GitProject, mock_repo: RepoMock +) -> None: + """Test git_unshallow handles already-complete repository gracefully.""" + # Simulate error from git when repo is already complete + error_msg = "fatal: --unshallow on a complete repository does not make sense" + mock_repo.git.fetch.side_effect = GitCommandError( + "fetch", status=128, stderr=error_msg + ) + + # Should not raise an exception + mock_gitproject.git_unshallow(noop=False) + + +def test_git_unshallow_other_error( + mock_gitproject: GitProject, mock_repo: RepoMock +) -> None: + """Test git_unshallow raises exception for other errors.""" + # Simulate a different error + error_msg = "fatal: some other error" + mock_repo.git.fetch.side_effect = GitCommandError( + "fetch", status=128, stderr=error_msg + ) + + # Should raise the exception + with pytest.raises(GitCommandError): + mock_gitproject.git_unshallow(noop=False)