From a937eeb859be244d1eb147defb2fa41216276ab6 Mon Sep 17 00:00:00 2001 From: Yesudeep Mangalapilly Date: Fri, 13 Feb 2026 17:20:43 -0800 Subject: [PATCH 1/8] fix(releasekit): fix git push argument order for --set-upstream The command was 'git push origin --set-upstream' which is invalid. Correct syntax: 'git push --set-upstream origin '. The --set-upstream flag must come before the remote, and the branch name must be explicitly specified as a refspec. --- py/tools/releasekit/src/releasekit/backends/vcs/git.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/py/tools/releasekit/src/releasekit/backends/vcs/git.py b/py/tools/releasekit/src/releasekit/backends/vcs/git.py index 293ce52844..0737175b01 100644 --- a/py/tools/releasekit/src/releasekit/backends/vcs/git.py +++ b/py/tools/releasekit/src/releasekit/backends/vcs/git.py @@ -227,9 +227,15 @@ async def push( dry_run: bool = False, ) -> CommandResult: """Push commits and/or tags.""" - cmd_parts = ['push', remote] + cmd_parts = ['push'] if set_upstream: cmd_parts.append('--set-upstream') + cmd_parts.append(remote) + if set_upstream: + # --set-upstream requires an explicit refspec (branch name). + branch = await self.current_branch() + if branch: + cmd_parts.append(branch) if tags: cmd_parts.append('--tags') log.info('push', remote=remote, tags=tags, set_upstream=set_upstream) From 6dc022f43cc6385f7b16028272e393c976509e26 Mon Sep 17 00:00:00 2001 From: Yesudeep Mangalapilly Date: Fri, 13 Feb 2026 17:29:23 -0800 Subject: [PATCH 2/8] fix(releasekit): fix multiple pipeline issues found in audit 1. git.py: restructure push() per review feedback; skip --set-upstream for tag-only pushes (--set-upstream + --tags conflict) 2. github.py: add url to list_prs --json fields; use --body-file for create_pr to avoid shell argument size limits 3. github_api.py: add url to list_prs results; extract html_url from create_pr response instead of returning raw JSON 4. prepare.py: fall back to bootstrap_sha when per-package tag doesn't exist (bootstrapping issue on first release) 5. tags.py: make tag push failure fatal (raise RuntimeError) 6. commitback.py: make push failure fatal (raise RuntimeError) All 1301 tests pass (83% coverage). --- .../src/releasekit/backends/forge/github.py | 18 +++++++++-- .../releasekit/backends/forge/github_api.py | 12 ++++++- .../src/releasekit/backends/vcs/git.py | 13 ++++---- .../releasekit/src/releasekit/commitback.py | 16 +++++----- py/tools/releasekit/src/releasekit/prepare.py | 4 +++ py/tools/releasekit/src/releasekit/tags.py | 31 ++++++++----------- .../tests/rk_backends_forge_github_test.py | 2 +- py/tools/releasekit/tests/rk_tags_test.py | 19 ++++++------ 8 files changed, 69 insertions(+), 46 deletions(-) diff --git a/py/tools/releasekit/src/releasekit/backends/forge/github.py b/py/tools/releasekit/src/releasekit/backends/forge/github.py index a5ec060263..cdd9363d95 100644 --- a/py/tools/releasekit/src/releasekit/backends/forge/github.py +++ b/py/tools/releasekit/src/releasekit/backends/forge/github.py @@ -28,6 +28,7 @@ import asyncio import json import shutil +import tempfile from pathlib import Path from typing import Any @@ -181,10 +182,21 @@ async def create_pr( ) -> CommandResult: """Create a GitHub Pull Request.""" cmd_parts = ['pr', 'create', '--title', title, '--head', head, '--base', base] - if body: - cmd_parts.extend(['--body', body]) log.info('create_pr', title=title, head=head, base=base) + if body: + # Use --body-file to avoid shell argument size limits with large + # PR bodies (e.g. 60+ package changelogs + embedded manifest). + with tempfile.NamedTemporaryFile( + mode='w', suffix='.md', delete=False, encoding='utf-8', + ) as f: + f.write(body) + body_file = f.name + try: + cmd_parts.extend(['--body-file', body_file]) + return await asyncio.to_thread(self._gh, *cmd_parts, dry_run=dry_run) + finally: + Path(body_file).unlink(missing_ok=True) return await asyncio.to_thread(self._gh, *cmd_parts, dry_run=dry_run) async def pr_data(self, pr_number: int) -> dict[str, Any]: @@ -223,7 +235,7 @@ async def list_prs( '--limit', str(limit), '--json', - 'number,title,state,labels,headRefName,mergeCommit', + 'number,title,state,labels,headRefName,mergeCommit,url', ] if label: cmd_parts.extend(['--label', label]) diff --git a/py/tools/releasekit/src/releasekit/backends/forge/github_api.py b/py/tools/releasekit/src/releasekit/backends/forge/github_api.py index eeaeb5c9fe..2b6caf2729 100644 --- a/py/tools/releasekit/src/releasekit/backends/forge/github_api.py +++ b/py/tools/releasekit/src/releasekit/backends/forge/github_api.py @@ -348,10 +348,19 @@ async def create_pr( response = await request_with_retry(client, 'POST', url, json=payload) log.info('create_pr', title=title, status=response.status_code) + # Extract html_url from response so stdout matches gh CLI behavior + # (prepare.py expects stdout to be the PR URL). + stdout = response.text + if response.is_success: + try: + pr_data = response.json() + stdout = pr_data.get('html_url', response.text) + except (ValueError, json.JSONDecodeError): + pass return CommandResult( command=['POST', url], returncode=0 if response.is_success else response.status_code, - stdout=response.text, + stdout=stdout, stderr='' if response.is_success else response.text, ) @@ -447,6 +456,7 @@ async def list_prs( 'number': pr.get('number', 0), 'title': pr.get('title', ''), 'state': pr.get('state', ''), + 'url': pr.get('html_url', ''), 'labels': pr_labels, 'headRefName': pr.get('head', {}).get('ref', ''), 'mergeCommit': { diff --git a/py/tools/releasekit/src/releasekit/backends/vcs/git.py b/py/tools/releasekit/src/releasekit/backends/vcs/git.py index 0737175b01..032e3b4fb7 100644 --- a/py/tools/releasekit/src/releasekit/backends/vcs/git.py +++ b/py/tools/releasekit/src/releasekit/backends/vcs/git.py @@ -228,14 +228,15 @@ async def push( ) -> CommandResult: """Push commits and/or tags.""" cmd_parts = ['push'] - if set_upstream: + # --set-upstream is only meaningful for branch pushes, not tag-only pushes. + branch_refspec: str = '' + if set_upstream and not tags: cmd_parts.append('--set-upstream') - cmd_parts.append(remote) - if set_upstream: # --set-upstream requires an explicit refspec (branch name). - branch = await self.current_branch() - if branch: - cmd_parts.append(branch) + branch_refspec = await self.current_branch() + cmd_parts.append(remote) + if branch_refspec: + cmd_parts.append(branch_refspec) if tags: cmd_parts.append('--tags') log.info('push', remote=remote, tags=tags, set_upstream=set_upstream) diff --git a/py/tools/releasekit/src/releasekit/commitback.py b/py/tools/releasekit/src/releasekit/commitback.py index ba3bfd7617..34229c4936 100644 --- a/py/tools/releasekit/src/releasekit/commitback.py +++ b/py/tools/releasekit/src/releasekit/commitback.py @@ -213,14 +213,14 @@ async def create_commitback_pr( return result commit_msg = f'chore: bump to next dev version after {umbrella_version}' - try: - await vcs.commit(commit_msg, dry_run=dry_run) - await vcs.push(remote='origin', dry_run=dry_run) - logger.info('commitback_pushed', branch=branch_name) - except Exception as exc: - result.errors.append(f'Push failed: {exc}') - logger.error('commitback_push_error', error=str(exc)) - return result + await vcs.commit(commit_msg, dry_run=dry_run) + push_result = await vcs.push(remote='origin', dry_run=dry_run) + if not push_result.ok: + raise RuntimeError( + f'Failed to push commit-back branch {branch_name!r}: ' + f'{push_result.stderr.strip()}' + ) + logger.info('commitback_pushed', branch=branch_name) if forge is not None and hasattr(forge, 'is_available') and await forge.is_available(): try: diff --git a/py/tools/releasekit/src/releasekit/prepare.py b/py/tools/releasekit/src/releasekit/prepare.py index c9dec98aac..b2126276ac 100644 --- a/py/tools/releasekit/src/releasekit/prepare.py +++ b/py/tools/releasekit/src/releasekit/prepare.py @@ -306,6 +306,10 @@ async def prepare_release( pkg_paths = _package_paths(packages) for ver in bumped: since_tag = format_tag(ws_config.tag_format, name=ver.name, version=ver.old_version, label=ws_config.label) + # Fall back to bootstrap_sha (or None for full history) when the + # per-package tag doesn't exist yet — e.g. on the very first release. + if not await vcs.tag_exists(since_tag): + since_tag = ws_config.bootstrap_sha or None changelog = await generate_changelog( vcs=vcs, version=ver.new_version, diff --git a/py/tools/releasekit/src/releasekit/tags.py b/py/tools/releasekit/src/releasekit/tags.py index 206e9f1b2f..f43fca0f76 100644 --- a/py/tools/releasekit/src/releasekit/tags.py +++ b/py/tools/releasekit/src/releasekit/tags.py @@ -361,25 +361,20 @@ async def create_tags( ) if result.created and not result.failed: - try: - await vcs.push(tags=True, dry_run=dry_run) - # Mutate the frozen dataclass via object.__setattr__ for - # the pushed flag — TagResult is frozen for safety but we - # need to set this after the push succeeds. - object.__setattr__(result, 'pushed', True) - logger.info( - 'tags_pushed', - count=len(result.created), - tags=result.created, - ) - except Exception as exc: - # Tag push failure is non-fatal — tags exist locally and - # can be pushed manually. - logger.error( - 'tags_push_failed', - error=str(exc), - hint='Tags were created locally. Push manually with: git push --tags', + push_result = await vcs.push(tags=True, dry_run=dry_run) + if not push_result.ok: + raise RuntimeError( + f'Failed to push tags to remote: {push_result.stderr.strip()}' ) + # Mutate the frozen dataclass via object.__setattr__ for + # the pushed flag — TagResult is frozen for safety but we + # need to set this after the push succeeds. + object.__setattr__(result, 'pushed', True) + logger.info( + 'tags_pushed', + count=len(result.created), + tags=result.created, + ) await _create_release_if_available( forge=forge, diff --git a/py/tools/releasekit/tests/rk_backends_forge_github_test.py b/py/tools/releasekit/tests/rk_backends_forge_github_test.py index bba305d1d4..5189e8f821 100644 --- a/py/tools/releasekit/tests/rk_backends_forge_github_test.py +++ b/py/tools/releasekit/tests/rk_backends_forge_github_test.py @@ -185,7 +185,7 @@ async def test_basic(self, gh: GitHubCLIBackend) -> None: args = m.call_args[0] assert 'pr' in args assert 'create' in args - assert '--body' in args + assert '--body-file' in args class TestPRData: diff --git a/py/tools/releasekit/tests/rk_tags_test.py b/py/tools/releasekit/tests/rk_tags_test.py index feeb3195eb..bec577966b 100644 --- a/py/tools/releasekit/tests/rk_tags_test.py +++ b/py/tools/releasekit/tests/rk_tags_test.py @@ -96,7 +96,12 @@ async def push( ) -> CommandResult: """Push tags (records for assertion).""" if self.push_error: - raise RuntimeError(self.push_error) + return CommandResult( + command=['git', 'push'], + returncode=128, + stdout='', + stderr=self.push_error, + ) self.push_calls.append({'tags': tags, 'remote': remote}) return _OK @@ -476,17 +481,13 @@ async def test_tag_error_recorded(self) -> None: raise AssertionError(f'Expected genkit-v0.5.0 in failed: {result.failed}') @pytest.mark.asyncio - async def test_push_error_non_fatal(self) -> None: - """Push failure is non-fatal — tags exist locally.""" + async def test_push_error_is_fatal(self) -> None: + """Push failure raises RuntimeError — fail fast.""" manifest = _make_manifest('genkit') vcs = FakeVCS(push_error='Network error') - result = await create_tags(manifest=manifest, vcs=vcs) - - if 'genkit-v0.5.0' not in result.created: - raise AssertionError(f'Expected genkit-v0.5.0 created: {result.created}') - if result.pushed: - raise AssertionError('Expected pushed=False after push error') + with pytest.raises(RuntimeError, match='Failed to push tags'): + await create_tags(manifest=manifest, vcs=vcs) @pytest.mark.asyncio async def test_no_push_when_failures_exist(self) -> None: From ba942d757334c96a1ef669fc185865505660b7d6 Mon Sep 17 00:00:00 2001 From: Yesudeep Mangalapilly Date: Fri, 13 Feb 2026 17:34:35 -0800 Subject: [PATCH 3/8] fix(releasekit): fix 7 remaining issues from command audit MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit R1: github.py update_pr — use --body-file to avoid shell arg size limits R2: git.py commit — fix git add -A logic (don't run add -A in dry_run) R3: git.py delete_tag — return remote push failure instead of swallowing R4: gitlab.py create_pr — use temp file for large MR descriptions R5: bitbucket.py list_prs — add missing url field R6: gitlab.py list_prs — add missing url field R7: pnpm.py lock — don't discard check_only when upgrade_package is set All 1301 tests pass (83% coverage). --- .../src/releasekit/backends/forge/bitbucket.py | 1 + .../src/releasekit/backends/forge/github.py | 15 +++++++++++++-- .../src/releasekit/backends/forge/gitlab.py | 18 +++++++++++++++--- .../src/releasekit/backends/pm/pnpm.py | 10 +++++----- .../src/releasekit/backends/vcs/git.py | 13 ++++++++----- .../releasekit/tests/backends/rk_forge_test.py | 2 +- .../tests/rk_backends_forge_github_test.py | 2 +- 7 files changed, 44 insertions(+), 17 deletions(-) diff --git a/py/tools/releasekit/src/releasekit/backends/forge/bitbucket.py b/py/tools/releasekit/src/releasekit/backends/forge/bitbucket.py index f7013375b8..f946a3aef7 100644 --- a/py/tools/releasekit/src/releasekit/backends/forge/bitbucket.py +++ b/py/tools/releasekit/src/releasekit/backends/forge/bitbucket.py @@ -433,6 +433,7 @@ async def list_prs( 'number': pr.get('id', 0), 'title': pr_title, 'state': pr.get('state', '').lower(), + 'url': pr.get('links', {}).get('html', {}).get('href', ''), 'labels': [], # Bitbucket PRs don't have labels. 'headRefName': source_branch, 'mergeCommit': { diff --git a/py/tools/releasekit/src/releasekit/backends/forge/github.py b/py/tools/releasekit/src/releasekit/backends/forge/github.py index cdd9363d95..a732cfa28f 100644 --- a/py/tools/releasekit/src/releasekit/backends/forge/github.py +++ b/py/tools/releasekit/src/releasekit/backends/forge/github.py @@ -294,10 +294,21 @@ async def update_pr( cmd_parts = ['pr', 'edit', str(pr_number)] if title: cmd_parts.extend(['--title', title]) - if body: - cmd_parts.extend(['--body', body]) log.info('update_pr', pr=pr_number, has_title=bool(title), has_body=bool(body)) + if body: + # Use --body-file to avoid shell argument size limits with large + # PR bodies (e.g. 60+ package changelogs + embedded manifest). + with tempfile.NamedTemporaryFile( + mode='w', suffix='.md', delete=False, encoding='utf-8', + ) as f: + f.write(body) + body_file = f.name + try: + cmd_parts.extend(['--body-file', body_file]) + return await asyncio.to_thread(self._gh, *cmd_parts, dry_run=dry_run) + finally: + Path(body_file).unlink(missing_ok=True) return await asyncio.to_thread(self._gh, *cmd_parts, dry_run=dry_run) async def merge_pr( diff --git a/py/tools/releasekit/src/releasekit/backends/forge/gitlab.py b/py/tools/releasekit/src/releasekit/backends/forge/gitlab.py index 6d6c6b3b24..5cff5b74f6 100644 --- a/py/tools/releasekit/src/releasekit/backends/forge/gitlab.py +++ b/py/tools/releasekit/src/releasekit/backends/forge/gitlab.py @@ -34,6 +34,7 @@ import asyncio import json import shutil +import tempfile from pathlib import Path from typing import Any @@ -204,10 +205,20 @@ async def create_pr( base, '--remove-source-branch', ] - if body: - cmd_parts.extend(['--description', body]) - log.info('create_mr', title=title, head=head, base=base) + if body: + # Use a temp file to avoid shell argument size limits with large + # MR descriptions (e.g. 60+ package changelogs + embedded manifest). + with tempfile.NamedTemporaryFile( + mode='w', suffix='.md', delete=False, encoding='utf-8', + ) as f: + f.write(body) + body_file = f.name + try: + cmd_parts.extend(['--description', f'@{body_file}']) + return await asyncio.to_thread(self._glab, *cmd_parts, dry_run=dry_run) + finally: + Path(body_file).unlink(missing_ok=True) return await asyncio.to_thread(self._glab, *cmd_parts, dry_run=dry_run) async def pr_data(self, pr_number: int) -> dict[str, Any]: @@ -292,6 +303,7 @@ async def list_prs( 'number': mr.get('iid', 0), 'title': mr.get('title', ''), 'state': mr.get('state', ''), + 'url': mr.get('web_url', ''), 'labels': mr.get('labels', []), 'headRefName': mr.get('source_branch', ''), 'mergeCommit': {'oid': mr.get('merge_commit_sha', '')}, diff --git a/py/tools/releasekit/src/releasekit/backends/pm/pnpm.py b/py/tools/releasekit/src/releasekit/backends/pm/pnpm.py index cd46bc3083..5a72439d3d 100644 --- a/py/tools/releasekit/src/releasekit/backends/pm/pnpm.py +++ b/py/tools/releasekit/src/releasekit/backends/pm/pnpm.py @@ -152,15 +152,15 @@ async def lock( See: https://pnpm.io/cli/install#--lockfile-only See: https://pnpm.io/cli/install#--frozen-lockfile """ - if check_only: + if upgrade_package: + # pnpm update is the equivalent of upgrading a single dep. + # Note: check_only is not applicable with upgrade_package. + cmd = ['pnpm', 'update', upgrade_package] + elif check_only: cmd = ['pnpm', 'install', '--frozen-lockfile'] else: cmd = ['pnpm', 'install', '--lockfile-only'] - if upgrade_package: - # pnpm update is the equivalent. - cmd = ['pnpm', 'update', upgrade_package] - effective_cwd = cwd or self._root log.info('lock', check_only=check_only, upgrade_package=upgrade_package) return await asyncio.to_thread(run_command, cmd, cwd=effective_cwd, dry_run=dry_run) diff --git a/py/tools/releasekit/src/releasekit/backends/vcs/git.py b/py/tools/releasekit/src/releasekit/backends/vcs/git.py index 032e3b4fb7..83bb174a14 100644 --- a/py/tools/releasekit/src/releasekit/backends/vcs/git.py +++ b/py/tools/releasekit/src/releasekit/backends/vcs/git.py @@ -166,10 +166,11 @@ async def commit( dry_run: bool = False, ) -> CommandResult: """Create a commit, staging specified paths first.""" - if paths and not dry_run: - await asyncio.to_thread(self._git, 'add', *paths) - else: - await asyncio.to_thread(self._git, 'add', '-A', dry_run=dry_run) + if paths: + if not dry_run: + await asyncio.to_thread(self._git, 'add', *paths) + elif not dry_run: + await asyncio.to_thread(self._git, 'add', '-A') log.info('commit', message=message[:80]) return await asyncio.to_thread(self._git, 'commit', '-m', message, dry_run=dry_run) @@ -209,13 +210,15 @@ async def delete_tag( """Delete a tag locally and optionally on the remote.""" result = await asyncio.to_thread(self._git, 'tag', '-d', tag_name, dry_run=dry_run) if remote and result.ok: - await asyncio.to_thread( + remote_result = await asyncio.to_thread( self._git, 'push', 'origin', f':refs/tags/{tag_name}', dry_run=dry_run, ) + if not remote_result.ok: + return remote_result return result async def push( diff --git a/py/tools/releasekit/tests/backends/rk_forge_test.py b/py/tools/releasekit/tests/backends/rk_forge_test.py index 72f90d3bfd..e9e6cc958f 100644 --- a/py/tools/releasekit/tests/backends/rk_forge_test.py +++ b/py/tools/releasekit/tests/backends/rk_forge_test.py @@ -129,4 +129,4 @@ async def test_update_pr_dry_run(self, tmp_path: Path) -> None: assert result.ok assert result.dry_run assert '--title' in result.command - assert '--body' in result.command + assert '--body-file' in result.command diff --git a/py/tools/releasekit/tests/rk_backends_forge_github_test.py b/py/tools/releasekit/tests/rk_backends_forge_github_test.py index 5189e8f821..8b89866f27 100644 --- a/py/tools/releasekit/tests/rk_backends_forge_github_test.py +++ b/py/tools/releasekit/tests/rk_backends_forge_github_test.py @@ -272,7 +272,7 @@ async def test_update(self, gh: GitHubCLIBackend) -> None: await gh.update_pr(42, title='New title', body='New body') args = m.call_args[0] assert '--title' in args - assert '--body' in args + assert '--body-file' in args class TestMergePR: From bcb71237fb98fc518fb92d98620002c5953d2f5e Mon Sep 17 00:00:00 2001 From: Yesudeep Mangalapilly Date: Fri, 13 Feb 2026 17:36:48 -0800 Subject: [PATCH 4/8] fix(releasekit): fix ASYNC240 lint errors in forge backends Replace Path.unlink() with os.unlink() in async functions to avoid blocking pathlib calls flagged by the ASYNC240 linter rule. --- .../src/releasekit/backends/forge/github.py | 15 +++++++++++---- .../src/releasekit/backends/forge/gitlab.py | 8 ++++++-- py/tools/releasekit/src/releasekit/commitback.py | 5 +---- py/tools/releasekit/src/releasekit/tags.py | 4 +--- 4 files changed, 19 insertions(+), 13 deletions(-) diff --git a/py/tools/releasekit/src/releasekit/backends/forge/github.py b/py/tools/releasekit/src/releasekit/backends/forge/github.py index a732cfa28f..7ca71495de 100644 --- a/py/tools/releasekit/src/releasekit/backends/forge/github.py +++ b/py/tools/releasekit/src/releasekit/backends/forge/github.py @@ -27,6 +27,7 @@ import asyncio import json +import os import shutil import tempfile from pathlib import Path @@ -188,7 +189,10 @@ async def create_pr( # Use --body-file to avoid shell argument size limits with large # PR bodies (e.g. 60+ package changelogs + embedded manifest). with tempfile.NamedTemporaryFile( - mode='w', suffix='.md', delete=False, encoding='utf-8', + mode='w', + suffix='.md', + delete=False, + encoding='utf-8', ) as f: f.write(body) body_file = f.name @@ -196,7 +200,7 @@ async def create_pr( cmd_parts.extend(['--body-file', body_file]) return await asyncio.to_thread(self._gh, *cmd_parts, dry_run=dry_run) finally: - Path(body_file).unlink(missing_ok=True) + os.unlink(body_file) # noqa: PTH108 return await asyncio.to_thread(self._gh, *cmd_parts, dry_run=dry_run) async def pr_data(self, pr_number: int) -> dict[str, Any]: @@ -300,7 +304,10 @@ async def update_pr( # Use --body-file to avoid shell argument size limits with large # PR bodies (e.g. 60+ package changelogs + embedded manifest). with tempfile.NamedTemporaryFile( - mode='w', suffix='.md', delete=False, encoding='utf-8', + mode='w', + suffix='.md', + delete=False, + encoding='utf-8', ) as f: f.write(body) body_file = f.name @@ -308,7 +315,7 @@ async def update_pr( cmd_parts.extend(['--body-file', body_file]) return await asyncio.to_thread(self._gh, *cmd_parts, dry_run=dry_run) finally: - Path(body_file).unlink(missing_ok=True) + os.unlink(body_file) # noqa: PTH108 return await asyncio.to_thread(self._gh, *cmd_parts, dry_run=dry_run) async def merge_pr( diff --git a/py/tools/releasekit/src/releasekit/backends/forge/gitlab.py b/py/tools/releasekit/src/releasekit/backends/forge/gitlab.py index 5cff5b74f6..c6595cf5d1 100644 --- a/py/tools/releasekit/src/releasekit/backends/forge/gitlab.py +++ b/py/tools/releasekit/src/releasekit/backends/forge/gitlab.py @@ -33,6 +33,7 @@ import asyncio import json +import os import shutil import tempfile from pathlib import Path @@ -210,7 +211,10 @@ async def create_pr( # Use a temp file to avoid shell argument size limits with large # MR descriptions (e.g. 60+ package changelogs + embedded manifest). with tempfile.NamedTemporaryFile( - mode='w', suffix='.md', delete=False, encoding='utf-8', + mode='w', + suffix='.md', + delete=False, + encoding='utf-8', ) as f: f.write(body) body_file = f.name @@ -218,7 +222,7 @@ async def create_pr( cmd_parts.extend(['--description', f'@{body_file}']) return await asyncio.to_thread(self._glab, *cmd_parts, dry_run=dry_run) finally: - Path(body_file).unlink(missing_ok=True) + os.unlink(body_file) # noqa: PTH108 return await asyncio.to_thread(self._glab, *cmd_parts, dry_run=dry_run) async def pr_data(self, pr_number: int) -> dict[str, Any]: diff --git a/py/tools/releasekit/src/releasekit/commitback.py b/py/tools/releasekit/src/releasekit/commitback.py index 34229c4936..937852689c 100644 --- a/py/tools/releasekit/src/releasekit/commitback.py +++ b/py/tools/releasekit/src/releasekit/commitback.py @@ -216,10 +216,7 @@ async def create_commitback_pr( await vcs.commit(commit_msg, dry_run=dry_run) push_result = await vcs.push(remote='origin', dry_run=dry_run) if not push_result.ok: - raise RuntimeError( - f'Failed to push commit-back branch {branch_name!r}: ' - f'{push_result.stderr.strip()}' - ) + raise RuntimeError(f'Failed to push commit-back branch {branch_name!r}: {push_result.stderr.strip()}') logger.info('commitback_pushed', branch=branch_name) if forge is not None and hasattr(forge, 'is_available') and await forge.is_available(): diff --git a/py/tools/releasekit/src/releasekit/tags.py b/py/tools/releasekit/src/releasekit/tags.py index f43fca0f76..f2b207b134 100644 --- a/py/tools/releasekit/src/releasekit/tags.py +++ b/py/tools/releasekit/src/releasekit/tags.py @@ -363,9 +363,7 @@ async def create_tags( if result.created and not result.failed: push_result = await vcs.push(tags=True, dry_run=dry_run) if not push_result.ok: - raise RuntimeError( - f'Failed to push tags to remote: {push_result.stderr.strip()}' - ) + raise RuntimeError(f'Failed to push tags to remote: {push_result.stderr.strip()}') # Mutate the frozen dataclass via object.__setattr__ for # the pushed flag — TagResult is frozen for safety but we # need to set this after the push succeeds. From d727200ce84fe39f14991ca41dc02694b626c4e8 Mon Sep 17 00:00:00 2001 From: Yesudeep Mangalapilly Date: Fri, 13 Feb 2026 17:39:31 -0800 Subject: [PATCH 5/8] test(releasekit): add git integration tests exercising real commands 20 tests against real git repos (with bare remote): - push --set-upstream on new branch - push --tags (verifies no --set-upstream conflict) - push set_upstream=False - checkout_branch create/switch - current_branch - tag, tag_exists, list_tags, list_tags with pattern - delete_tag local and remote (verifies remote failure propagation) - log with since_tag, paths, max_commits - diff_files since_tag - commit with paths (partial stage), without paths (git add -A), dry_run - is_shallow, default_branch All 1321 tests pass (83% coverage). --- .../tests/backends/rk_vcs_integration_test.py | 379 ++++++++++++++++++ 1 file changed, 379 insertions(+) create mode 100644 py/tools/releasekit/tests/backends/rk_vcs_integration_test.py diff --git a/py/tools/releasekit/tests/backends/rk_vcs_integration_test.py b/py/tools/releasekit/tests/backends/rk_vcs_integration_test.py new file mode 100644 index 0000000000..f85a5eb904 --- /dev/null +++ b/py/tools/releasekit/tests/backends/rk_vcs_integration_test.py @@ -0,0 +1,379 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +"""Integration tests for GitCLIBackend against real git repos. + +These tests create real git repositories (with a bare repo as "remote") +and exercise every command that GitCLIBackend constructs, verifying +argument order, flag correctness, and error propagation. + +The tests do NOT touch the network — the "remote" is a local bare repo. +""" + +from __future__ import annotations + +from pathlib import Path + +import pytest +from releasekit.backends._run import run_command +from releasekit.backends.vcs.git import GitCLIBackend +from releasekit.logging import configure_logging + +configure_logging(quiet=True) + + +def _init_repo_with_remote(tmp_path: Path) -> tuple[GitCLIBackend, Path, Path]: + """Create a git repo with a bare remote and one commit. + + Returns (backend, work_dir, bare_dir). + """ + bare = tmp_path / 'remote.git' + bare.mkdir() + run_command(['git', 'init', '--bare'], cwd=bare, check=True) + + work = tmp_path / 'work' + work.mkdir() + run_command(['git', 'init'], cwd=work, check=True) + run_command(['git', 'config', 'user.email', 'test@example.com'], cwd=work, check=True) + run_command(['git', 'config', 'user.name', 'Test User'], cwd=work, check=True) + run_command(['git', 'remote', 'add', 'origin', str(bare)], cwd=work, check=True) + + (work / 'README.md').write_text('# Test\n') + run_command(['git', 'add', '.'], cwd=work, check=True) + run_command(['git', 'commit', '-m', 'Initial commit'], cwd=work, check=True) + + # Push main so the remote has a branch. + run_command(['git', 'push', '-u', 'origin', 'main'], cwd=work, check=True) + + return GitCLIBackend(repo_root=work), work, bare + + +# --------------------------------------------------------------------------- +# Push: branch with --set-upstream +# --------------------------------------------------------------------------- + + +class TestPushBranchSetUpstream: + """Test git push --set-upstream for new branches.""" + + @pytest.mark.asyncio + async def test_push_new_branch_set_upstream(self, tmp_path: Path) -> None: + """push() on a new branch should use --set-upstream origin .""" + backend, work, _ = _init_repo_with_remote(tmp_path) + + # Create and switch to a new branch. + await backend.checkout_branch('release/v1.0', create=True) + (work / 'release.txt').write_text('release notes') + await backend.commit('release commit', paths=['release.txt']) + + # Push with set_upstream=True (default). + result = await backend.push() + assert result.ok, f'push failed: {result.stderr}' + + # Verify the branch exists on the remote. + ls = run_command( + ['git', 'ls-remote', '--heads', 'origin', 'release/v1.0'], + cwd=work, + ) + assert 'release/v1.0' in ls.stdout + + @pytest.mark.asyncio + async def test_push_set_upstream_false(self, tmp_path: Path) -> None: + """push(set_upstream=False) should push without --set-upstream.""" + backend, work, _ = _init_repo_with_remote(tmp_path) + + # We're on main which already has upstream. + (work / 'change.txt').write_text('change') + await backend.commit('another commit', paths=['change.txt']) + + result = await backend.push(set_upstream=False) + assert result.ok, f'push failed: {result.stderr}' + + +# --------------------------------------------------------------------------- +# Push: tags +# --------------------------------------------------------------------------- + + +class TestPushTags: + """Test git push --tags.""" + + @pytest.mark.asyncio + async def test_push_tags(self, tmp_path: Path) -> None: + """push(tags=True) should push tags to the remote.""" + backend, work, _ = _init_repo_with_remote(tmp_path) + + await backend.tag('v1.0.0', message='Release v1.0.0') + result = await backend.push(tags=True) + assert result.ok, f'tag push failed: {result.stderr}' + + # Verify the tag exists on the remote. + ls = run_command( + ['git', 'ls-remote', '--tags', 'origin', 'v1.0.0'], + cwd=work, + ) + assert 'v1.0.0' in ls.stdout + + @pytest.mark.asyncio + async def test_push_tags_does_not_use_set_upstream(self, tmp_path: Path) -> None: + """push(tags=True) should NOT include --set-upstream.""" + backend, work, _ = _init_repo_with_remote(tmp_path) + + await backend.tag('v2.0.0', message='Release v2.0.0') + # This should succeed — --set-upstream is skipped for tag pushes. + result = await backend.push(tags=True, set_upstream=True) + assert result.ok, f'tag push failed: {result.stderr}' + + +# --------------------------------------------------------------------------- +# Branch operations +# --------------------------------------------------------------------------- + + +class TestBranchOperations: + """Test checkout_branch and current_branch.""" + + @pytest.mark.asyncio + async def test_checkout_create_branch(self, tmp_path: Path) -> None: + """checkout_branch(create=True) should create and switch to a new branch.""" + backend, _, _ = _init_repo_with_remote(tmp_path) + + result = await backend.checkout_branch('feat/new', create=True) + assert result.ok + + branch = await backend.current_branch() + assert branch == 'feat/new' + + @pytest.mark.asyncio + async def test_checkout_existing_branch(self, tmp_path: Path) -> None: + """checkout_branch() should switch to an existing branch.""" + backend, _, _ = _init_repo_with_remote(tmp_path) + + await backend.checkout_branch('feat/x', create=True) + await backend.checkout_branch('main') + branch = await backend.current_branch() + assert branch == 'main' + + @pytest.mark.asyncio + async def test_current_branch_on_main(self, tmp_path: Path) -> None: + """current_branch() should return 'main' on the default branch.""" + backend, _, _ = _init_repo_with_remote(tmp_path) + branch = await backend.current_branch() + assert branch == 'main' + + +# --------------------------------------------------------------------------- +# Tag operations +# --------------------------------------------------------------------------- + + +class TestTagOperations: + """Test tag, tag_exists, delete_tag, list_tags.""" + + @pytest.mark.asyncio + async def test_create_and_list_tags(self, tmp_path: Path) -> None: + """tag() + list_tags() should create and list tags.""" + backend, _, _ = _init_repo_with_remote(tmp_path) + + await backend.tag('v1.0.0') + await backend.tag('v1.1.0') + await backend.tag('v2.0.0') + + tags = await backend.list_tags() + assert 'v1.0.0' in tags + assert 'v1.1.0' in tags + assert 'v2.0.0' in tags + + @pytest.mark.asyncio + async def test_list_tags_with_pattern(self, tmp_path: Path) -> None: + """list_tags(pattern=...) should filter tags.""" + backend, _, _ = _init_repo_with_remote(tmp_path) + + await backend.tag('v1.0.0') + await backend.tag('v2.0.0') + await backend.tag('pkg-v1.0.0') + + tags = await backend.list_tags(pattern='v*') + assert 'v1.0.0' in tags + assert 'v2.0.0' in tags + assert 'pkg-v1.0.0' not in tags + + @pytest.mark.asyncio + async def test_delete_tag_local(self, tmp_path: Path) -> None: + """delete_tag() should remove a local tag.""" + backend, _, _ = _init_repo_with_remote(tmp_path) + + await backend.tag('v1.0.0') + assert await backend.tag_exists('v1.0.0') + + result = await backend.delete_tag('v1.0.0') + assert result.ok + assert not await backend.tag_exists('v1.0.0') + + @pytest.mark.asyncio + async def test_delete_tag_remote(self, tmp_path: Path) -> None: + """delete_tag(remote=True) should remove from both local and remote.""" + backend, work, _ = _init_repo_with_remote(tmp_path) + + await backend.tag('v1.0.0') + await backend.push(tags=True) + + # Verify tag is on remote. + ls = run_command(['git', 'ls-remote', '--tags', 'origin', 'v1.0.0'], cwd=work) + assert 'v1.0.0' in ls.stdout + + # Delete locally and remotely. + result = await backend.delete_tag('v1.0.0', remote=True) + assert result.ok + + # Verify tag is gone from remote. + ls = run_command(['git', 'ls-remote', '--tags', 'origin', 'v1.0.0'], cwd=work) + assert 'v1.0.0' not in ls.stdout + + +# --------------------------------------------------------------------------- +# Log and diff_files +# --------------------------------------------------------------------------- + + +class TestLogAndDiff: + """Test log() and diff_files() with since_tag.""" + + @pytest.mark.asyncio + async def test_log_since_tag(self, tmp_path: Path) -> None: + """log(since_tag=...) should only return commits after the tag.""" + backend, work, _ = _init_repo_with_remote(tmp_path) + + await backend.tag('v1.0.0') + + (work / 'file1.txt').write_text('one') + await backend.commit('feat: add file1', paths=['file1.txt']) + (work / 'file2.txt').write_text('two') + await backend.commit('feat: add file2', paths=['file2.txt']) + + lines = await backend.log(since_tag='v1.0.0') + assert len(lines) == 2 + assert any('file1' in line for line in lines) + assert any('file2' in line for line in lines) + + @pytest.mark.asyncio + async def test_log_with_paths(self, tmp_path: Path) -> None: + """log(paths=...) should filter to commits touching those paths.""" + backend, work, _ = _init_repo_with_remote(tmp_path) + + (work / 'a.txt').write_text('a') + await backend.commit('add a', paths=['a.txt']) + (work / 'b.txt').write_text('b') + await backend.commit('add b', paths=['b.txt']) + + lines = await backend.log(paths=['a.txt']) + assert len(lines) == 1 + assert 'add a' in lines[0] + + @pytest.mark.asyncio + async def test_log_max_commits(self, tmp_path: Path) -> None: + """log(max_commits=N) should limit output.""" + backend, work, _ = _init_repo_with_remote(tmp_path) + + for i in range(5): + (work / f'f{i}.txt').write_text(str(i)) + await backend.commit(f'commit {i}', paths=[f'f{i}.txt']) + + lines = await backend.log(max_commits=2) + assert len(lines) == 2 + + @pytest.mark.asyncio + async def test_diff_files_since_tag(self, tmp_path: Path) -> None: + """diff_files(since_tag=...) should return changed files.""" + backend, work, _ = _init_repo_with_remote(tmp_path) + + await backend.tag('v1.0.0') + + (work / 'new.txt').write_text('new') + await backend.commit('add new', paths=['new.txt']) + + files = await backend.diff_files(since_tag='v1.0.0') + assert 'new.txt' in files + + +# --------------------------------------------------------------------------- +# Commit edge cases +# --------------------------------------------------------------------------- + + +class TestCommitEdgeCases: + """Test commit() with various path/dry_run combinations.""" + + @pytest.mark.asyncio + async def test_commit_with_paths(self, tmp_path: Path) -> None: + """commit(paths=[...]) should only stage specified files.""" + backend, work, _ = _init_repo_with_remote(tmp_path) + + (work / 'staged.txt').write_text('staged') + (work / 'unstaged.txt').write_text('unstaged') + + result = await backend.commit('partial commit', paths=['staged.txt']) + assert result.ok + + # unstaged.txt should still be untracked. + assert not await backend.is_clean() + + @pytest.mark.asyncio + async def test_commit_without_paths_stages_all(self, tmp_path: Path) -> None: + """commit(paths=None) should stage everything via git add -A.""" + backend, work, _ = _init_repo_with_remote(tmp_path) + + (work / 'a.txt').write_text('a') + (work / 'b.txt').write_text('b') + + result = await backend.commit('stage all') + assert result.ok + assert await backend.is_clean() + + @pytest.mark.asyncio + async def test_commit_dry_run_does_not_stage(self, tmp_path: Path) -> None: + """commit(dry_run=True) should not stage or commit anything.""" + backend, work, _ = _init_repo_with_remote(tmp_path) + + (work / 'dirty.txt').write_text('dirty') + + result = await backend.commit('dry run commit', dry_run=True) + assert result.dry_run + + # File should still be untracked. + assert not await backend.is_clean() + + +# --------------------------------------------------------------------------- +# is_shallow, default_branch +# --------------------------------------------------------------------------- + + +class TestRepoMetadata: + """Test is_shallow() and default_branch().""" + + @pytest.mark.asyncio + async def test_not_shallow(self, tmp_path: Path) -> None: + """A locally-created repo should not be shallow.""" + backend, _, _ = _init_repo_with_remote(tmp_path) + assert not await backend.is_shallow() + + @pytest.mark.asyncio + async def test_default_branch(self, tmp_path: Path) -> None: + """default_branch() should detect 'main'.""" + backend, _, _ = _init_repo_with_remote(tmp_path) + branch = await backend.default_branch() + assert branch == 'main' From fbc12dd7f1d2338307816f7ac05f89bc1afd967b Mon Sep 17 00:00:00 2001 From: Yesudeep Mangalapilly Date: Fri, 13 Feb 2026 17:42:20 -0800 Subject: [PATCH 6/8] style(releasekit): remove section marker banners per GEMINI.md guidelines --- .../tests/backends/rk_vcs_integration_test.py | 35 ------------------- 1 file changed, 35 deletions(-) diff --git a/py/tools/releasekit/tests/backends/rk_vcs_integration_test.py b/py/tools/releasekit/tests/backends/rk_vcs_integration_test.py index f85a5eb904..f5299e04b2 100644 --- a/py/tools/releasekit/tests/backends/rk_vcs_integration_test.py +++ b/py/tools/releasekit/tests/backends/rk_vcs_integration_test.py @@ -61,11 +61,6 @@ def _init_repo_with_remote(tmp_path: Path) -> tuple[GitCLIBackend, Path, Path]: return GitCLIBackend(repo_root=work), work, bare -# --------------------------------------------------------------------------- -# Push: branch with --set-upstream -# --------------------------------------------------------------------------- - - class TestPushBranchSetUpstream: """Test git push --set-upstream for new branches.""" @@ -103,11 +98,6 @@ async def test_push_set_upstream_false(self, tmp_path: Path) -> None: assert result.ok, f'push failed: {result.stderr}' -# --------------------------------------------------------------------------- -# Push: tags -# --------------------------------------------------------------------------- - - class TestPushTags: """Test git push --tags.""" @@ -138,11 +128,6 @@ async def test_push_tags_does_not_use_set_upstream(self, tmp_path: Path) -> None assert result.ok, f'tag push failed: {result.stderr}' -# --------------------------------------------------------------------------- -# Branch operations -# --------------------------------------------------------------------------- - - class TestBranchOperations: """Test checkout_branch and current_branch.""" @@ -175,11 +160,6 @@ async def test_current_branch_on_main(self, tmp_path: Path) -> None: assert branch == 'main' -# --------------------------------------------------------------------------- -# Tag operations -# --------------------------------------------------------------------------- - - class TestTagOperations: """Test tag, tag_exists, delete_tag, list_tags.""" @@ -244,11 +224,6 @@ async def test_delete_tag_remote(self, tmp_path: Path) -> None: assert 'v1.0.0' not in ls.stdout -# --------------------------------------------------------------------------- -# Log and diff_files -# --------------------------------------------------------------------------- - - class TestLogAndDiff: """Test log() and diff_files() with since_tag.""" @@ -309,11 +284,6 @@ async def test_diff_files_since_tag(self, tmp_path: Path) -> None: assert 'new.txt' in files -# --------------------------------------------------------------------------- -# Commit edge cases -# --------------------------------------------------------------------------- - - class TestCommitEdgeCases: """Test commit() with various path/dry_run combinations.""" @@ -357,11 +327,6 @@ async def test_commit_dry_run_does_not_stage(self, tmp_path: Path) -> None: assert not await backend.is_clean() -# --------------------------------------------------------------------------- -# is_shallow, default_branch -# --------------------------------------------------------------------------- - - class TestRepoMetadata: """Test is_shallow() and default_branch().""" From 33be775abdcccc14b9d863c079dcabb061b43d42 Mon Sep 17 00:00:00 2001 From: Yesudeep Mangalapilly Date: Fri, 13 Feb 2026 17:51:16 -0800 Subject: [PATCH 7/8] test(releasekit): add integration tests for uv, pnpm, and gh backends 45 integration tests exercising real CLI commands on temp repos: - git (20): push, tags, branches, commit, log, diff, delete_tag - uv (7): build, lock, lock --check, version_bump + dry-run variants - pnpm (7): pack, lock, frozen-lockfile, npm version + dry-run variants - gh (11): release create, pr create/update/merge, labels (dry-run) Each test file uses shutil.which() skipif markers so tests warn and skip locally when a tool is missing, but run in CI where all tools are installed. CI workflow (python.yml) updated to install pnpm, node, and gh CLI in the tool-tests job. --- .github/workflows/python.yml | 19 ++ .../backends/rk_forge_gh_integration_test.py | 193 +++++++++++++++++ .../backends/rk_pm_pnpm_integration_test.py | 205 ++++++++++++++++++ .../backends/rk_pm_uv_integration_test.py | 197 +++++++++++++++++ .../tests/backends/rk_vcs_integration_test.py | 6 + 5 files changed, 620 insertions(+) create mode 100644 py/tools/releasekit/tests/backends/rk_forge_gh_integration_test.py create mode 100644 py/tools/releasekit/tests/backends/rk_pm_pnpm_integration_test.py create mode 100644 py/tools/releasekit/tests/backends/rk_pm_uv_integration_test.py diff --git a/.github/workflows/python.yml b/.github/workflows/python.yml index fd1007bfb9..c33902f0e9 100644 --- a/.github/workflows/python.yml +++ b/.github/workflows/python.yml @@ -289,6 +289,25 @@ jobs: enable-cache: true python-version: ${{ matrix.python-version }} + - uses: pnpm/action-setup@v4 + + - name: Set up Node + uses: actions/setup-node@v6 + with: + node-version: 20.x + cache: "pnpm" + + - name: Install gh CLI + run: | + (type -p wget >/dev/null || (sudo apt update && sudo apt-get install wget -y)) \ + && sudo mkdir -p -m 755 /etc/apt/keyrings \ + && out=$(mktemp) && wget -nv -O$out https://cli.github.com/packages/githubcli-archive-keyring.gpg \ + && cat $out | sudo tee /etc/apt/keyrings/githubcli-archive-keyring.gpg > /dev/null \ + && sudo chmod go+r /etc/apt/keyrings/githubcli-archive-keyring.gpg \ + && echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null \ + && sudo apt update \ + && sudo apt install gh -y + - name: Install tool dependencies run: | cd py/tools/${{ matrix.tool }} diff --git a/py/tools/releasekit/tests/backends/rk_forge_gh_integration_test.py b/py/tools/releasekit/tests/backends/rk_forge_gh_integration_test.py new file mode 100644 index 0000000000..116e88a52b --- /dev/null +++ b/py/tools/releasekit/tests/backends/rk_forge_gh_integration_test.py @@ -0,0 +1,193 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +"""Integration tests for GitHubCLIBackend command construction. + +These tests verify that the ``gh`` CLI commands are constructed +correctly using dry-run mode. They require ``gh`` to be installed +but do NOT require authentication or network access. + +Dry-run mode returns a synthetic ``CommandResult`` with the full +command line, allowing us to verify argument order and flag +correctness without executing anything. +""" + +from __future__ import annotations + +import shutil +from pathlib import Path + +import pytest +from releasekit.backends.forge.github import GitHubCLIBackend +from releasekit.logging import configure_logging + +configure_logging(quiet=True) + +pytestmark = pytest.mark.skipif( + shutil.which('gh') is None, + reason='gh not found on PATH. Install gh: https://cli.github.com/', +) + + +class TestCreateReleaseDryRun: + """Test gh release create command construction.""" + + @pytest.mark.asyncio + async def test_basic_release(self, tmp_path: Path) -> None: + """create_release() should produce correct gh release create command.""" + backend = GitHubCLIBackend(repo='firebase/genkit', cwd=tmp_path) + result = await backend.create_release('v1.0.0', title='Release v1.0.0', dry_run=True) + assert result.ok + cmd = result.command + assert cmd[:2] == ['gh', 'release'] + assert 'create' in cmd + assert 'v1.0.0' in cmd + assert '--title' in cmd + assert '--repo' in cmd + assert 'firebase/genkit' in cmd + + @pytest.mark.asyncio + async def test_draft_prerelease(self, tmp_path: Path) -> None: + """create_release(draft=True, prerelease=True) should include both flags.""" + backend = GitHubCLIBackend(repo='firebase/genkit', cwd=tmp_path) + result = await backend.create_release( + 'v1.0.0-rc.1', + draft=True, + prerelease=True, + dry_run=True, + ) + assert '--draft' in result.command + assert '--prerelease' in result.command + + @pytest.mark.asyncio + async def test_generate_notes_when_no_body(self, tmp_path: Path) -> None: + """create_release() without body should use --generate-notes.""" + backend = GitHubCLIBackend(repo='firebase/genkit', cwd=tmp_path) + result = await backend.create_release('v1.0.0', dry_run=True) + assert '--generate-notes' in result.command + + +class TestCreatePRDryRun: + """Test gh pr create command construction.""" + + @pytest.mark.asyncio + async def test_pr_with_body(self, tmp_path: Path) -> None: + """create_pr() with body should use --body-file.""" + backend = GitHubCLIBackend(repo='firebase/genkit', cwd=tmp_path) + result = await backend.create_pr( + title='chore(release): v1.0.0', + body='## Release\n\nChangelog here.', + head='release/v1.0.0', + base='main', + dry_run=True, + ) + assert result.ok + cmd = result.command + assert '--body-file' in cmd + assert '--head' in cmd + assert '--base' in cmd + assert 'release/v1.0.0' in cmd + + @pytest.mark.asyncio + async def test_pr_without_body(self, tmp_path: Path) -> None: + """create_pr() without body should not include --body-file.""" + backend = GitHubCLIBackend(repo='firebase/genkit', cwd=tmp_path) + result = await backend.create_pr( + title='chore: fix', + head='fix/bug', + dry_run=True, + ) + assert '--body-file' not in result.command + + +class TestUpdatePRDryRun: + """Test gh pr edit command construction.""" + + @pytest.mark.asyncio + async def test_update_with_body(self, tmp_path: Path) -> None: + """update_pr() with body should use --body-file.""" + backend = GitHubCLIBackend(repo='firebase/genkit', cwd=tmp_path) + result = await backend.update_pr( + 42, + title='Updated title', + body='Updated body', + dry_run=True, + ) + assert result.ok + assert '--body-file' in result.command + assert '--title' in result.command + + @pytest.mark.asyncio + async def test_update_title_only(self, tmp_path: Path) -> None: + """update_pr() with title only should not include --body-file.""" + backend = GitHubCLIBackend(repo='firebase/genkit', cwd=tmp_path) + result = await backend.update_pr(42, title='New title', dry_run=True) + assert '--title' in result.command + assert '--body-file' not in result.command + + +class TestMergePRDryRun: + """Test gh pr merge command construction.""" + + @pytest.mark.asyncio + async def test_squash_merge(self, tmp_path: Path) -> None: + """merge_pr() should default to --squash --auto --delete-branch.""" + backend = GitHubCLIBackend(repo='firebase/genkit', cwd=tmp_path) + result = await backend.merge_pr(42, dry_run=True) + assert result.ok + cmd = result.command + assert '--squash' in cmd + assert '--auto' in cmd + assert '--delete-branch' in cmd + + @pytest.mark.asyncio + async def test_rebase_merge(self, tmp_path: Path) -> None: + """merge_pr(method='rebase') should use --rebase.""" + backend = GitHubCLIBackend(repo='firebase/genkit', cwd=tmp_path) + result = await backend.merge_pr(42, method='rebase', dry_run=True) + assert '--rebase' in result.command + assert '--squash' not in result.command + + +class TestLabelsDryRun: + """Test gh pr edit label commands.""" + + @pytest.mark.asyncio + async def test_add_labels(self, tmp_path: Path) -> None: + """add_labels() should include --add-label for each label.""" + backend = GitHubCLIBackend(repo='firebase/genkit', cwd=tmp_path) + result = await backend.add_labels( + 42, + ['autorelease: pending', 'release'], + dry_run=True, + ) + assert result.ok + assert result.command.count('--add-label') == 2 + assert 'autorelease: pending' in result.command + assert 'release' in result.command + + @pytest.mark.asyncio + async def test_remove_labels(self, tmp_path: Path) -> None: + """remove_labels() should include --remove-label for each label.""" + backend = GitHubCLIBackend(repo='firebase/genkit', cwd=tmp_path) + result = await backend.remove_labels( + 42, + ['autorelease: pending'], + dry_run=True, + ) + assert result.ok + assert '--remove-label' in result.command + assert 'autorelease: pending' in result.command diff --git a/py/tools/releasekit/tests/backends/rk_pm_pnpm_integration_test.py b/py/tools/releasekit/tests/backends/rk_pm_pnpm_integration_test.py new file mode 100644 index 0000000000..6a3ce4dad6 --- /dev/null +++ b/py/tools/releasekit/tests/backends/rk_pm_pnpm_integration_test.py @@ -0,0 +1,205 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +"""Integration tests for PnpmBackend against real pnpm/npm commands. + +These tests create real Node.js projects in temp directories and exercise +the ``pnpm pack``, ``pnpm install``, and ``npm version`` commands that +PnpmBackend constructs. + +The tests do NOT publish to any registry — only local operations. +""" + +from __future__ import annotations + +import json +import shutil +import textwrap +from pathlib import Path + +import pytest +from releasekit.backends.pm.pnpm import PnpmBackend +from releasekit.logging import configure_logging + +configure_logging(quiet=True) + +_pnpm_missing = shutil.which('pnpm') is None +_npm_missing = shutil.which('npm') is None + + +def _init_pnpm_package(tmp_path: Path) -> Path: + """Create a minimal npm package suitable for ``pnpm pack``. + + Returns the package directory. + """ + pkg = tmp_path / 'test-pkg' + pkg.mkdir() + (pkg / 'package.json').write_text( + json.dumps( + { + 'name': 'test-pkg', + 'version': '0.1.0', + 'main': 'index.js', + }, + indent=2, + ) + + '\n' + ) + (pkg / 'index.js').write_text('module.exports = {};\n') + return pkg + + +def _init_pnpm_workspace(tmp_path: Path) -> Path: + """Create a minimal pnpm workspace with one member package. + + Returns the workspace root. + """ + root = tmp_path / 'workspace' + root.mkdir() + (root / 'package.json').write_text( + json.dumps( + { + 'name': 'test-workspace', + 'version': '0.0.0', + 'private': True, + }, + indent=2, + ) + + '\n' + ) + (root / 'pnpm-workspace.yaml').write_text( + textwrap.dedent("""\ + packages: + - 'packages/*' + """) + ) + + pkg = root / 'packages' / 'test-pkg' + pkg.mkdir(parents=True) + (pkg / 'package.json').write_text( + json.dumps( + { + 'name': 'test-pkg', + 'version': '0.1.0', + 'main': 'index.js', + }, + indent=2, + ) + + '\n' + ) + (pkg / 'index.js').write_text('module.exports = {};\n') + return root + + +@pytest.mark.skipif(_pnpm_missing, reason='pnpm not found on PATH. Install pnpm: https://pnpm.io/installation') +class TestPnpmBuild: + """Test pnpm pack with real packages.""" + + @pytest.mark.asyncio + async def test_pack_produces_tarball(self, tmp_path: Path) -> None: + """build() should produce a .tgz tarball.""" + pkg = _init_pnpm_package(tmp_path) + dist = tmp_path / 'dist' + dist.mkdir() + + backend = PnpmBackend(workspace_root=tmp_path) + result = await backend.build(pkg, output_dir=dist) + assert result.ok, f'pnpm pack failed: {result.stderr}' + + tarballs = list(dist.glob('*.tgz')) + assert len(tarballs) >= 1, f'No tarballs found in {dist}' + + @pytest.mark.asyncio + async def test_build_dry_run(self, tmp_path: Path) -> None: + """build(dry_run=True) should not produce files.""" + pkg = _init_pnpm_package(tmp_path) + dist = tmp_path / 'dist' + dist.mkdir() + + backend = PnpmBackend(workspace_root=tmp_path) + result = await backend.build(pkg, output_dir=dist, dry_run=True) + assert result.ok + assert result.dry_run + assert list(dist.iterdir()) == [] + + +@pytest.mark.skipif(_pnpm_missing, reason='pnpm not found on PATH. Install pnpm: https://pnpm.io/installation') +class TestPnpmLock: + """Test pnpm install --lockfile-only with real workspaces.""" + + @pytest.mark.asyncio + async def test_lock_generates_lockfile(self, tmp_path: Path) -> None: + """lock() should generate a pnpm-lock.yaml file.""" + root = _init_pnpm_workspace(tmp_path) + backend = PnpmBackend(workspace_root=root) + result = await backend.lock() + assert result.ok, f'pnpm install --lockfile-only failed: {result.stderr}' + assert (root / 'pnpm-lock.yaml').is_file() + + @pytest.mark.asyncio + async def test_lock_frozen_passes_after_lock(self, tmp_path: Path) -> None: + """lock(check_only=True) should pass after a fresh lock.""" + root = _init_pnpm_workspace(tmp_path) + backend = PnpmBackend(workspace_root=root) + + # First, generate the lockfile. + result = await backend.lock() + assert result.ok, f'pnpm install --lockfile-only failed: {result.stderr}' + + # Then verify it. + check = await backend.lock(check_only=True) + assert check.ok, f'pnpm install --frozen-lockfile failed: {check.stderr}' + + @pytest.mark.asyncio + async def test_lock_dry_run(self, tmp_path: Path) -> None: + """lock(dry_run=True) should not create a lockfile.""" + root = _init_pnpm_workspace(tmp_path) + backend = PnpmBackend(workspace_root=root) + result = await backend.lock(dry_run=True) + assert result.ok + assert result.dry_run + assert not (root / 'pnpm-lock.yaml').is_file() + + +@pytest.mark.skipif(_npm_missing, reason='npm not found on PATH. Install npm (comes with Node.js): https://nodejs.org/') +class TestPnpmVersionBump: + """Test npm version with real packages.""" + + @pytest.mark.asyncio + async def test_version_bump(self, tmp_path: Path) -> None: + """version_bump() should update the version in package.json.""" + pkg = _init_pnpm_package(tmp_path) + backend = PnpmBackend(workspace_root=tmp_path) + + result = await backend.version_bump(pkg, '2.0.0') + assert result.ok, f'npm version failed: {result.stderr}' + + # Verify the version was updated. + data = json.loads((pkg / 'package.json').read_text()) + assert data['version'] == '2.0.0' + + @pytest.mark.asyncio + async def test_version_bump_dry_run(self, tmp_path: Path) -> None: + """version_bump(dry_run=True) should not change the file.""" + pkg = _init_pnpm_package(tmp_path) + backend = PnpmBackend(workspace_root=tmp_path) + + result = await backend.version_bump(pkg, '2.0.0', dry_run=True) + assert result.ok + assert result.dry_run + + data = json.loads((pkg / 'package.json').read_text()) + assert data['version'] == '0.1.0' diff --git a/py/tools/releasekit/tests/backends/rk_pm_uv_integration_test.py b/py/tools/releasekit/tests/backends/rk_pm_uv_integration_test.py new file mode 100644 index 0000000000..d7fd1032f4 --- /dev/null +++ b/py/tools/releasekit/tests/backends/rk_pm_uv_integration_test.py @@ -0,0 +1,197 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +"""Integration tests for UvBackend against real uv commands. + +These tests create real Python projects in temp directories and exercise +the ``uv build``, ``uv lock``, and ``uv version`` commands that +UvBackend constructs. + +The tests do NOT publish to any registry — only local operations. +""" + +from __future__ import annotations + +import shutil +import textwrap +from pathlib import Path + +import pytest +from releasekit.backends.pm.uv import UvBackend +from releasekit.logging import configure_logging + +configure_logging(quiet=True) + +pytestmark = pytest.mark.skipif( + shutil.which('uv') is None, + reason='uv not found on PATH. Install uv: https://docs.astral.sh/uv/', +) + +_MINIMAL_PYPROJECT = textwrap.dedent("""\ + [project] + name = "test-pkg" + version = "0.1.0" + requires-python = ">=3.10" + + [build-system] + requires = ["setuptools>=68.0"] + build-backend = "setuptools.build_meta" + + [tool.setuptools.packages.find] + where = ["src"] +""") + + +def _init_uv_project(tmp_path: Path) -> Path: + """Create a minimal Python package suitable for ``uv build``. + + Returns the package directory. + """ + pkg = tmp_path / 'test-pkg' + pkg.mkdir() + (pkg / 'pyproject.toml').write_text(_MINIMAL_PYPROJECT) + src = pkg / 'src' / 'test_pkg' + src.mkdir(parents=True) + (src / '__init__.py').write_text('__version__ = "0.1.0"\n') + return pkg + + +def _init_uv_workspace(tmp_path: Path) -> Path: + """Create a minimal uv workspace with one member package. + + Returns the workspace root. + """ + root = tmp_path / 'workspace' + root.mkdir() + (root / 'pyproject.toml').write_text( + textwrap.dedent("""\ + [project] + name = "test-workspace" + version = "0.0.0" + requires-python = ">=3.10" + + [tool.uv.workspace] + members = ["packages/*"] + """) + ) + + pkg = root / 'packages' / 'test-pkg' + pkg.mkdir(parents=True) + (pkg / 'pyproject.toml').write_text(_MINIMAL_PYPROJECT) + src = pkg / 'src' / 'test_pkg' + src.mkdir(parents=True) + (src / '__init__.py').write_text('__version__ = "0.1.0"\n') + return root + + +class TestUvBuild: + """Test uv build with real packages.""" + + @pytest.mark.asyncio + async def test_build_produces_dist(self, tmp_path: Path) -> None: + """build() should produce .whl and .tar.gz files.""" + pkg = _init_uv_project(tmp_path) + dist = tmp_path / 'dist' + dist.mkdir() + + backend = UvBackend(workspace_root=tmp_path) + result = await backend.build(pkg, output_dir=dist, no_sources=False) + assert result.ok, f'uv build failed: {result.stderr}' + + files = list(dist.iterdir()) + extensions = {f.suffix for f in files} + assert '.whl' in extensions or '.gz' in extensions, f'No dist files: {files}' + + @pytest.mark.asyncio + async def test_build_dry_run(self, tmp_path: Path) -> None: + """build(dry_run=True) should not produce files.""" + pkg = _init_uv_project(tmp_path) + dist = tmp_path / 'dist' + dist.mkdir() + + backend = UvBackend(workspace_root=tmp_path) + result = await backend.build(pkg, output_dir=dist, dry_run=True) + assert result.ok + assert result.dry_run + assert list(dist.iterdir()) == [] + + +class TestUvLock: + """Test uv lock with real workspaces.""" + + @pytest.mark.asyncio + async def test_lock_generates_lockfile(self, tmp_path: Path) -> None: + """lock() should generate a uv.lock file.""" + root = _init_uv_workspace(tmp_path) + backend = UvBackend(workspace_root=root) + result = await backend.lock() + assert result.ok, f'uv lock failed: {result.stderr}' + assert (root / 'uv.lock').is_file() + + @pytest.mark.asyncio + async def test_lock_check_passes_after_lock(self, tmp_path: Path) -> None: + """lock(check_only=True) should pass after a fresh lock.""" + root = _init_uv_workspace(tmp_path) + backend = UvBackend(workspace_root=root) + + # First, generate the lockfile. + result = await backend.lock() + assert result.ok, f'uv lock failed: {result.stderr}' + + # Then verify it. + check = await backend.lock(check_only=True) + assert check.ok, f'uv lock --check failed: {check.stderr}' + + @pytest.mark.asyncio + async def test_lock_dry_run(self, tmp_path: Path) -> None: + """lock(dry_run=True) should not create a lockfile.""" + root = _init_uv_workspace(tmp_path) + backend = UvBackend(workspace_root=root) + result = await backend.lock(dry_run=True) + assert result.ok + assert result.dry_run + assert not (root / 'uv.lock').is_file() + + +class TestUvVersionBump: + """Test uv version with real packages.""" + + @pytest.mark.asyncio + async def test_version_bump(self, tmp_path: Path) -> None: + """version_bump() should update the version in pyproject.toml.""" + pkg = _init_uv_project(tmp_path) + backend = UvBackend(workspace_root=tmp_path) + + result = await backend.version_bump(pkg, '2.0.0') + assert result.ok, f'uv version failed: {result.stderr}' + + # Verify the version was updated. + content = (pkg / 'pyproject.toml').read_text() + assert '2.0.0' in content + + @pytest.mark.asyncio + async def test_version_bump_dry_run(self, tmp_path: Path) -> None: + """version_bump(dry_run=True) should not change the file.""" + pkg = _init_uv_project(tmp_path) + backend = UvBackend(workspace_root=tmp_path) + + result = await backend.version_bump(pkg, '2.0.0', dry_run=True) + assert result.ok + assert result.dry_run + + content = (pkg / 'pyproject.toml').read_text() + assert '0.1.0' in content + assert '2.0.0' not in content diff --git a/py/tools/releasekit/tests/backends/rk_vcs_integration_test.py b/py/tools/releasekit/tests/backends/rk_vcs_integration_test.py index f5299e04b2..46ac225268 100644 --- a/py/tools/releasekit/tests/backends/rk_vcs_integration_test.py +++ b/py/tools/releasekit/tests/backends/rk_vcs_integration_test.py @@ -25,6 +25,7 @@ from __future__ import annotations +import shutil from pathlib import Path import pytest @@ -34,6 +35,11 @@ configure_logging(quiet=True) +pytestmark = pytest.mark.skipif( + shutil.which('git') is None, + reason='git not found on PATH. Install git: https://git-scm.com/', +) + def _init_repo_with_remote(tmp_path: Path) -> tuple[GitCLIBackend, Path, Path]: """Create a git repo with a bare remote and one commit. From b204361e34997dc6487871ddb21c44a575ca011e Mon Sep 17 00:00:00 2001 From: Yesudeep Mangalapilly Date: Sun, 15 Feb 2026 06:50:38 -0800 Subject: [PATCH 8/8] fix(releasekit): use git init -b main in integration tests for CI compat CI runners may have 'master' as the default branch. Explicitly set '-b main' in both bare and work repos to ensure consistent behavior. --- bin/lint | 126 +- py/GEMINI.md | 991 +--- py/bin/fix_missing_test_docstrings.py | 155 + py/docs/WHY_MONOREPO.md | 54 +- py/pyproject.toml | 3 + .../src/main.py | 2 +- py/samples/web-endpoints-hello/GEMINI.md | 84 + .../tests/telemetry_test.py | 2 +- py/tools/releasekit/GEMINI.md | 4193 +++++++++++++++++ py/tools/releasekit/README.md | 163 +- .../docs/competitive-gap-analysis.md | 451 +- py/tools/releasekit/docs/docs/guides/ci-cd.md | 117 + .../docs/docs/guides/configuration.md | 149 + py/tools/releasekit/docs/docs/index.md | 14 + .../docs/docs/internals/preflight.md | 60 +- .../github/workflows/releasekit-cargo.yml | 449 ++ .../github/workflows/releasekit-dart.yml | 448 ++ .../github/workflows/releasekit-go.yml | 429 ++ .../github/workflows/releasekit-gradle.yml | 470 ++ .../github/workflows/releasekit-pnpm.yml | 5 +- .../github/workflows/releasekit-uv.yml | 6 +- py/tools/releasekit/pyproject.toml | 15 +- py/tools/releasekit/roadmap.md | 1120 ++++- .../src/releasekit/backends/_run.py | 32 +- .../releasekit/backends/forge/bitbucket.py | 10 +- .../src/releasekit/backends/forge/github.py | 66 +- .../releasekit/backends/forge/github_api.py | 26 +- .../src/releasekit/backends/forge/gitlab.py | 22 +- .../src/releasekit/backends/pm/__init__.py | 18 + .../src/releasekit/backends/pm/bazel.py | 412 ++ .../src/releasekit/backends/pm/cargo.py | 262 + .../src/releasekit/backends/pm/dart.py | 207 + .../src/releasekit/backends/pm/go.py | 201 + .../src/releasekit/backends/pm/maturin.py | 281 ++ .../src/releasekit/backends/pm/maven.py | 269 ++ .../releasekit/backends/registry/__init__.py | 12 + .../releasekit/backends/registry/crates_io.py | 202 + .../releasekit/backends/registry/goproxy.py | 208 + .../backends/registry/maven_central.py | 217 + .../src/releasekit/backends/registry/npm.py | 10 +- .../releasekit/backends/registry/pubdev.py | 165 + .../src/releasekit/backends/registry/pypi.py | 6 + .../src/releasekit/backends/vcs/__init__.py | 15 + .../src/releasekit/backends/vcs/git.py | 12 +- .../src/releasekit/backends/vcs/mercurial.py | 12 + .../releasekit/backends/workspace/__init__.py | 15 + .../src/releasekit/backends/workspace/_io.py | 57 + .../releasekit/backends/workspace/bazel.py | 400 ++ .../releasekit/backends/workspace/cargo.py | 389 ++ .../src/releasekit/backends/workspace/dart.py | 289 ++ .../src/releasekit/backends/workspace/go.py | 222 + .../releasekit/backends/workspace/maven.py | 522 ++ .../src/releasekit/backends/workspace/pnpm.py | 29 +- .../src/releasekit/backends/workspace/uv.py | 44 +- py/tools/releasekit/src/releasekit/bump.py | 5 + .../src/releasekit/checks/__init__.py | 57 +- .../releasekit/src/releasekit/checks/_base.py | 314 ++ .../releasekit/src/releasekit/checks/_dart.py | 307 ++ .../src/releasekit/checks/_dart_fixers.py | 228 + .../releasekit/src/releasekit/checks/_go.py | 256 + .../src/releasekit/checks/_go_fixers.py | 137 + .../releasekit/src/releasekit/checks/_java.py | 490 ++ .../src/releasekit/checks/_java_fixers.py | 210 + .../releasekit/src/releasekit/checks/_js.py | 369 ++ .../src/releasekit/checks/_js_fixers.py | 194 + .../src/releasekit/checks/_python.py | 214 +- .../src/releasekit/checks/_runner.py | 167 +- .../releasekit/src/releasekit/checks/_rust.py | 307 ++ .../src/releasekit/checks/_rust_fixers.py | 215 + .../src/releasekit/checks/_universal.py | 21 + py/tools/releasekit/src/releasekit/cli.py | 508 +- .../releasekit/src/releasekit/commitback.py | 13 +- py/tools/releasekit/src/releasekit/config.py | 19 +- .../releasekit/src/releasekit/detection.py | 229 +- py/tools/releasekit/src/releasekit/init.py | 370 +- py/tools/releasekit/src/releasekit/lock.py | 34 +- py/tools/releasekit/src/releasekit/migrate.py | 487 ++ py/tools/releasekit/src/releasekit/pin.py | 4 + .../releasekit/src/releasekit/preflight.py | 280 +- py/tools/releasekit/src/releasekit/prepare.py | 6 +- .../releasekit/src/releasekit/publisher.py | 1 + .../src/releasekit/release_notes.py | 8 +- py/tools/releasekit/src/releasekit/sbom.py | 6 +- py/tools/releasekit/src/releasekit/signing.py | 326 ++ py/tools/releasekit/src/releasekit/state.py | 1 + py/tools/releasekit/src/releasekit/tracing.py | 106 +- .../src/releasekit/utils/__init__.py | 21 + .../releasekit/src/releasekit/utils/date.py | 40 + .../src/releasekit/utils/packaging.py | 57 + .../releasekit/src/releasekit/versioning.py | 12 +- .../releasekit/src/releasekit/workspace.py | 127 +- py/tools/releasekit/tests/_fakes/__init__.py | 42 + py/tools/releasekit/tests/_fakes/_forge.py | 176 + py/tools/releasekit/tests/_fakes/_pm.py | 130 + py/tools/releasekit/tests/_fakes/_registry.py | 91 + py/tools/releasekit/tests/_fakes/_vcs.py | 203 + .../tests/backends/rk_pm_bazel_test.py | 307 ++ .../tests/backends/rk_pm_cargo_test.py | 319 ++ .../tests/backends/rk_pm_dart_test.py | 187 + .../tests/backends/rk_pm_go_test.py | 198 + .../tests/backends/rk_pm_maturin_test.py | 164 + .../tests/backends/rk_pm_maven_test.py | 320 ++ .../backends/rk_registry_crates_io_test.py | 318 ++ .../backends/rk_registry_goproxy_test.py | 345 ++ .../rk_registry_maven_central_test.py | 405 ++ .../tests/backends/rk_registry_pubdev_test.py | 252 + .../releasekit/tests/backends/rk_run_test.py | 24 +- .../tests/backends/rk_vcs_integration_test.py | 6 +- .../tests/backends/rk_vcs_mercurial_test.py | 186 + .../tests/backends/rk_workspace_bazel_test.py | 287 ++ .../tests/backends/rk_workspace_maven_test.py | 537 +++ .../tests/rk_backends_cargo_test.py | 409 ++ .../releasekit/tests/rk_backends_dart_test.py | 338 ++ .../tests/rk_backends_forge_bitbucket_test.py | 4 +- .../tests/rk_backends_forge_github_test.py | 6 +- .../tests/rk_backends_forge_gitlab_test.py | 4 +- .../releasekit/tests/rk_backends_go_test.py | 334 ++ .../tests/rk_backends_maven_test.py | 455 ++ .../tests/rk_backends_vcs_git_test.py | 4 +- .../tests/rk_backends_vcs_hg_test.py | 4 +- py/tools/releasekit/tests/rk_branch_test.py | 55 + py/tools/releasekit/tests/rk_bump_test.py | 38 + .../releasekit/tests/rk_changelog_test.py | 106 +- .../tests/rk_check_backend_fixers_test.py | 740 +++ .../tests/rk_check_backends_test.py | 720 +++ .../tests/rk_checks_source_context_test.py | 1326 ++++++ .../releasekit/tests/rk_cli_backends_test.py | 246 + .../releasekit/tests/rk_commitback_test.py | 344 +- .../releasekit/tests/rk_detection_test.py | 297 +- py/tools/releasekit/tests/rk_doctor_test.py | 462 ++ py/tools/releasekit/tests/rk_errors_test.py | 152 + .../releasekit/tests/rk_formatters_test.py | 50 + py/tools/releasekit/tests/rk_graph_test.py | 29 + py/tools/releasekit/tests/rk_init_test.py | 846 ++++ .../tests/rk_integration_backends_test.py | 896 ++++ .../releasekit/tests/rk_integration_test.py | 894 ++++ .../releasekit/tests/rk_invariants_test.py | 741 +++ py/tools/releasekit/tests/rk_lock_test.py | 136 + py/tools/releasekit/tests/rk_migrate_test.py | 621 +++ py/tools/releasekit/tests/rk_pin_test.py | 122 + py/tools/releasekit/tests/rk_plan_test.py | 38 + .../releasekit/tests/rk_preflight_test.py | 736 +-- py/tools/releasekit/tests/rk_prepare_test.py | 761 ++- .../releasekit/tests/rk_profiling_test.py | 248 + .../releasekit/tests/rk_publisher_test.py | 540 +-- .../releasekit/tests/rk_release_notes_test.py | 85 +- py/tools/releasekit/tests/rk_release_test.py | 210 +- py/tools/releasekit/tests/rk_sbom_test.py | 495 ++ .../releasekit/tests/rk_scheduler_test.py | 228 + py/tools/releasekit/tests/rk_security_test.py | 332 ++ py/tools/releasekit/tests/rk_signing_test.py | 297 ++ .../tests/rk_source_context_test.py | 286 ++ py/tools/releasekit/tests/rk_state_test.py | 56 + py/tools/releasekit/tests/rk_tags_test.py | 371 +- py/tools/releasekit/tests/rk_tracing_test.py | 176 + .../releasekit/tests/rk_utils_date_test.py | 113 + .../tests/rk_utils_packaging_test.py | 128 + .../releasekit/tests/rk_versioning_test.py | 116 +- .../releasekit/tests/rk_workspace_test.py | 111 + py/tools/releasekit/tests/schemas/NOTICE | 12 + .../tests/schemas/bom-1.5.schema.json | 3799 +++++++++++++++ .../tests/schemas/spdx-2.3.schema.json | 740 +++ py/tools/releasekit/uv.lock | 800 +++- py/uv.lock | 285 +- releasekit.toml | 36 + 165 files changed, 42978 insertions(+), 3181 deletions(-) create mode 100644 py/bin/fix_missing_test_docstrings.py create mode 100644 py/tools/releasekit/GEMINI.md create mode 100644 py/tools/releasekit/github/workflows/releasekit-cargo.yml create mode 100644 py/tools/releasekit/github/workflows/releasekit-dart.yml create mode 100644 py/tools/releasekit/github/workflows/releasekit-go.yml create mode 100644 py/tools/releasekit/github/workflows/releasekit-gradle.yml create mode 100644 py/tools/releasekit/src/releasekit/backends/pm/bazel.py create mode 100644 py/tools/releasekit/src/releasekit/backends/pm/cargo.py create mode 100644 py/tools/releasekit/src/releasekit/backends/pm/dart.py create mode 100644 py/tools/releasekit/src/releasekit/backends/pm/go.py create mode 100644 py/tools/releasekit/src/releasekit/backends/pm/maturin.py create mode 100644 py/tools/releasekit/src/releasekit/backends/pm/maven.py create mode 100644 py/tools/releasekit/src/releasekit/backends/registry/crates_io.py create mode 100644 py/tools/releasekit/src/releasekit/backends/registry/goproxy.py create mode 100644 py/tools/releasekit/src/releasekit/backends/registry/maven_central.py create mode 100644 py/tools/releasekit/src/releasekit/backends/registry/pubdev.py create mode 100644 py/tools/releasekit/src/releasekit/backends/workspace/_io.py create mode 100644 py/tools/releasekit/src/releasekit/backends/workspace/bazel.py create mode 100644 py/tools/releasekit/src/releasekit/backends/workspace/cargo.py create mode 100644 py/tools/releasekit/src/releasekit/backends/workspace/dart.py create mode 100644 py/tools/releasekit/src/releasekit/backends/workspace/go.py create mode 100644 py/tools/releasekit/src/releasekit/backends/workspace/maven.py create mode 100644 py/tools/releasekit/src/releasekit/checks/_base.py create mode 100644 py/tools/releasekit/src/releasekit/checks/_dart.py create mode 100644 py/tools/releasekit/src/releasekit/checks/_dart_fixers.py create mode 100644 py/tools/releasekit/src/releasekit/checks/_go.py create mode 100644 py/tools/releasekit/src/releasekit/checks/_go_fixers.py create mode 100644 py/tools/releasekit/src/releasekit/checks/_java.py create mode 100644 py/tools/releasekit/src/releasekit/checks/_java_fixers.py create mode 100644 py/tools/releasekit/src/releasekit/checks/_js.py create mode 100644 py/tools/releasekit/src/releasekit/checks/_js_fixers.py create mode 100644 py/tools/releasekit/src/releasekit/checks/_rust.py create mode 100644 py/tools/releasekit/src/releasekit/checks/_rust_fixers.py create mode 100644 py/tools/releasekit/src/releasekit/migrate.py create mode 100644 py/tools/releasekit/src/releasekit/signing.py create mode 100644 py/tools/releasekit/src/releasekit/utils/__init__.py create mode 100644 py/tools/releasekit/src/releasekit/utils/date.py create mode 100644 py/tools/releasekit/src/releasekit/utils/packaging.py create mode 100644 py/tools/releasekit/tests/_fakes/__init__.py create mode 100644 py/tools/releasekit/tests/_fakes/_forge.py create mode 100644 py/tools/releasekit/tests/_fakes/_pm.py create mode 100644 py/tools/releasekit/tests/_fakes/_registry.py create mode 100644 py/tools/releasekit/tests/_fakes/_vcs.py create mode 100644 py/tools/releasekit/tests/backends/rk_pm_bazel_test.py create mode 100644 py/tools/releasekit/tests/backends/rk_pm_cargo_test.py create mode 100644 py/tools/releasekit/tests/backends/rk_pm_dart_test.py create mode 100644 py/tools/releasekit/tests/backends/rk_pm_go_test.py create mode 100644 py/tools/releasekit/tests/backends/rk_pm_maturin_test.py create mode 100644 py/tools/releasekit/tests/backends/rk_pm_maven_test.py create mode 100644 py/tools/releasekit/tests/backends/rk_registry_crates_io_test.py create mode 100644 py/tools/releasekit/tests/backends/rk_registry_goproxy_test.py create mode 100644 py/tools/releasekit/tests/backends/rk_registry_maven_central_test.py create mode 100644 py/tools/releasekit/tests/backends/rk_registry_pubdev_test.py create mode 100644 py/tools/releasekit/tests/backends/rk_vcs_mercurial_test.py create mode 100644 py/tools/releasekit/tests/backends/rk_workspace_bazel_test.py create mode 100644 py/tools/releasekit/tests/backends/rk_workspace_maven_test.py create mode 100644 py/tools/releasekit/tests/rk_backends_cargo_test.py create mode 100644 py/tools/releasekit/tests/rk_backends_dart_test.py create mode 100644 py/tools/releasekit/tests/rk_backends_go_test.py create mode 100644 py/tools/releasekit/tests/rk_backends_maven_test.py create mode 100644 py/tools/releasekit/tests/rk_branch_test.py create mode 100644 py/tools/releasekit/tests/rk_check_backend_fixers_test.py create mode 100644 py/tools/releasekit/tests/rk_check_backends_test.py create mode 100644 py/tools/releasekit/tests/rk_checks_source_context_test.py create mode 100644 py/tools/releasekit/tests/rk_cli_backends_test.py create mode 100644 py/tools/releasekit/tests/rk_doctor_test.py create mode 100644 py/tools/releasekit/tests/rk_integration_backends_test.py create mode 100644 py/tools/releasekit/tests/rk_integration_test.py create mode 100644 py/tools/releasekit/tests/rk_invariants_test.py create mode 100644 py/tools/releasekit/tests/rk_migrate_test.py create mode 100644 py/tools/releasekit/tests/rk_profiling_test.py create mode 100644 py/tools/releasekit/tests/rk_sbom_test.py create mode 100644 py/tools/releasekit/tests/rk_security_test.py create mode 100644 py/tools/releasekit/tests/rk_signing_test.py create mode 100644 py/tools/releasekit/tests/rk_source_context_test.py create mode 100644 py/tools/releasekit/tests/rk_tracing_test.py create mode 100644 py/tools/releasekit/tests/rk_utils_date_test.py create mode 100644 py/tools/releasekit/tests/rk_utils_packaging_test.py create mode 100644 py/tools/releasekit/tests/schemas/NOTICE create mode 100644 py/tools/releasekit/tests/schemas/bom-1.5.schema.json create mode 100644 py/tools/releasekit/tests/schemas/spdx-2.3.schema.json diff --git a/bin/lint b/bin/lint index e6f0dae9c2..737c64509e 100755 --- a/bin/lint +++ b/bin/lint @@ -25,63 +25,66 @@ PY_DIR="${TOP_DIR}/py" # shellcheck disable=SC2034 JS_DIR="${TOP_DIR}/js" # Reserved for future JS linting +# ── Phase 1: Sequential (modifies files) ────────────────────────────── +# ruff fix/format must complete before any read-only checks see the source. uv run --directory "${PY_DIR}" ruff check --fix --preview --unsafe-fixes . uv run --directory "${PY_DIR}" ruff format --preview . -# Check lockfile is up to date echo "--- 🔒 Checking lockfile is up to date ---" uv lock --check --directory "${PY_DIR}" -# Fast type checkers first (blocking) -# Note: ty reads its config (environment.root) from py/pyproject.toml -echo "--- 🔍 Running Ty Type Check ---" -uv run --directory "${PY_DIR}" ty check . - -# Pyrefly checks the full workspace (config in pyproject.toml handles PEP 420 namespace packages) -echo "--- 🔍 Running Pyrefly Type Check ---" -uv run --directory "${PY_DIR}" pyrefly check . - -# Pyright runs on packages/ only (blocking) -echo "--- 🔍 Running Pyright Type Check ---" -uv run --directory "${PY_DIR}" pyright packages/ - -"${PY_DIR}/bin/run_python_security_checks" - -# License checks -echo "--- 📜 Running License Check ---" -"${TOP_DIR}/bin/check_license" - -# Dependency license check -echo "--- 📜 Running Dependency License Check ---" -uv run --directory "${PY_DIR}" liccheck -s pyproject.toml - -# Consistency checks (Python versions, plugin versions, naming, workspace completeness) -echo "--- 🔍 Running Consistency Checks ---" -"${PY_DIR}/bin/check_consistency" - -# Releasekit workspace health checks (PyPI metadata, publish classifiers, changelog URLs, etc.) -echo "--- 📦 Running Releasekit Checks ---" -if ! uv run --directory "${TOP_DIR}/py/tools/releasekit" releasekit check 2>&1; then - echo "⚠️ releasekit check found issues (see above)" - exit 1 -fi -echo "✅ All releasekit checks passed" - -# Shell script linting -echo "--- 🐚 Running Shell Script Lint (shellcheck) ---" -if command -v shellcheck &> /dev/null; then - shell_errors=0 - - # Collect all shell scripts to check: - # 1. bin/* and py/bin/* (scripts without extensions) - # 2. All *.sh files in py/samples/ (run.sh, deploy_*.sh, test_*.sh, etc.) - shell_scripts=() +# ── Phase 2: Parallel read-only checks ──────────────────────────────── +# All checks below are read-only and independent. Run them concurrently +# and collect results at the end. + +TMPDIR_LINT=$(mktemp -d) +# shellcheck disable=SC2064 +trap "rm -rf '${TMPDIR_LINT}'" EXIT + +declare -a PIDS=() +declare -a NAMES=() + +# Helper: launch a check in the background, capturing output. +run_check() { + local name="$1"; shift + local logfile="${TMPDIR_LINT}/${name}.log" + ( + echo "--- ${name} ---" + "$@" 2>&1 + ) > "${logfile}" 2>&1 & + PIDS+=($!) + NAMES+=("${name}") +} + +# Type checkers +run_check "🔍 Ty Type Check" uv run --directory "${PY_DIR}" ty check . +run_check "🔍 Pyrefly Type Check" uv run --directory "${PY_DIR}" pyrefly check . +run_check "🔍 Pyright Type Check" uv run --directory "${PY_DIR}" pyright packages/ + +# Security +run_check "🔒 Security Checks" "${PY_DIR}/bin/run_python_security_checks" + +# License +run_check "📜 License Check" "${TOP_DIR}/bin/check_license" +run_check "📜 Dep License Check" uv run --directory "${PY_DIR}" liccheck -s pyproject.toml + +# Consistency + releasekit +run_check "🔍 Consistency Checks" "${PY_DIR}/bin/check_consistency" +run_check "📦 Releasekit Checks" uv run --directory "${TOP_DIR}/py/tools/releasekit" releasekit check + +# Shellcheck (inline — slightly more complex, but still read-only) +_run_shellcheck() { + if ! command -v shellcheck &> /dev/null; then + echo "⚠️ shellcheck not installed (brew install shellcheck) - skipping" + return 0 + fi + local shell_errors=0 + local shell_scripts=() - # bin/ scripts (detect by file type since they have no extension). for script in "${TOP_DIR}"/bin/* "${PY_DIR}"/bin/*; do if [ -f "$script" ] && file "$script" | grep -qE "shell|bash|sh script" 2>/dev/null; then + local script_name script_name=$(basename "$script") - # Skip .py files and .venv directories. if [[ "$script_name" == *.py ]] || [[ "$script" == */.venv/* ]]; then continue fi @@ -89,14 +92,11 @@ if command -v shellcheck &> /dev/null; then fi done - # py/samples/ shell scripts (find all .sh files recursively). while IFS= read -r -d '' script; do shell_scripts+=("$script") done < <(find "${PY_DIR}/samples" -not -path '*/.venv/*' -name '*.sh' -type f -print0 2>/dev/null) for script in "${shell_scripts[@]}"; do - # -x follows sourced files; -e SC1091 skips "not following" warnings - # for files like _common.sh that live in parent directories. if ! shellcheck -x -e SC1091 "$script" 2>&1; then shell_errors=$((shell_errors + 1)) fi @@ -104,12 +104,32 @@ if command -v shellcheck &> /dev/null; then if [ $shell_errors -gt 0 ]; then echo "⚠️ $shell_errors shell script(s) have shellcheck warnings" - exit 1 + return 1 else echo "✅ All ${#shell_scripts[@]} shell scripts pass shellcheck" fi -else - echo "⚠️ shellcheck not installed (brew install shellcheck) - skipping" +} +run_check "🐚 Shellcheck" _run_shellcheck + +# ── Collect results ─────────────────────────────────────────────────── +failures=0 +for i in "${!PIDS[@]}"; do + pid="${PIDS[$i]}" + name="${NAMES[$i]}" + if ! wait "${pid}"; then + echo "" + echo "❌ FAILED: ${name}" + cat "${TMPDIR_LINT}/${name}.log" + failures=$((failures + 1)) + else + echo "✅ ${name}" + fi +done + +if [ $failures -gt 0 ]; then + echo "" + echo "❌ ${failures} check(s) failed" + exit 1 fi # Disabled because there are many lint errors. diff --git a/py/GEMINI.md b/py/GEMINI.md index e9a87cf36e..4129ae034f 100644 --- a/py/GEMINI.md +++ b/py/GEMINI.md @@ -97,21 +97,6 @@ * Plugins: `plugins/{name}/` → package name `genkit-plugin-{name}` * Samples: `samples/{name}/` → package name `{name}` * Use hyphens (`-`) not underscores (`_`) in package names - * Manual verification: - ```bash - # Check plugins - for d in plugins/*/; do - name=$(basename "$d") - pkg=$(grep '^name = ' "$d/pyproject.toml" | cut -d'"' -f2) - [ "$pkg" != "genkit-plugin-$name" ] && echo "MISMATCH: $d -> $pkg" - done - # Check samples - for d in samples/*/; do - name=$(basename "$d") - pkg=$(grep '^name = ' "$d/pyproject.toml" | cut -d'"' -f2) - [ "$pkg" != "$name" ] && echo "MISMATCH: $d -> $pkg" - done - ``` * **Dependency Verification**: All dependencies must resolve correctly. Run these checks before submitting PRs: ```bash @@ -148,22 +133,6 @@ ] ``` - **Manual verification** (run from `py/` directory): - ```bash - for sample_dir in samples/*/; do - pyproject="$sample_dir/pyproject.toml" - [ ! -f "$pyproject" ] && continue - imports=$(grep -rh 'from genkit\.plugins\.' "$sample_dir/src/" 2>/dev/null \ - | sed 's/.*from genkit\.plugins\.\([a-z_]*\).*/\1/' | sort -u) - for imp in $imports; do - pkg="genkit-plugin-$(echo "$imp" | tr '_' '-')" - if ! grep -q "\"$pkg\"" "$pyproject" 2>/dev/null; then - echo "⚠️ $sample_dir: imports genkit.plugins.$imp but missing $pkg in pyproject.toml" - fi - done - done - ``` - **Note**: Imports inside `try/except ImportError` blocks (for optional platform auto-detection) are exempt from this rule. @@ -173,48 +142,142 @@ * Use version constraints (e.g., `>=1.0.0`) to allow flexibility * Pin exact versions only when necessary for compatibility * Remove unused dependencies to keep packages lean -* **Python Version Consistency**: All packages MUST use the same `requires-python` - version. Currently, all packages should specify `requires-python = ">=3.10"`. +* **Python Version Consistency**: All packages MUST specify `requires-python = ">=3.10"`. **This is automatically checked by `py/bin/check_consistency`.** - Manual verification: - ```bash - # Check all pyproject.toml files have consistent Python version - expected=">=3.10" - for f in packages/*/pyproject.toml plugins/*/pyproject.toml samples/*/pyproject.toml; do - version=$(grep 'requires-python' "$f" | cut -d'"' -f2) - if [ "$version" != "$expected" ]; then - echo "MISMATCH: $f has '$version' (expected '$expected')" - fi - done - ``` - **Note**: The `.python-version` file specifies `3.12` for local development, but - CI tests against Python 3.10, 3.11, 3.12, 3.13, and 3.14. Scripts using `uv run` - should use `--active` flag to respect the CI matrix Python version. -* **Plugin Version Sync**: All plugin versions should stay in sync with the core - framework version. When releasing, update all plugin versions together. - **This is automatically checked by `py/bin/check_consistency`.** - Manual verification: - ```bash - # Get core framework version - core_version=$(grep '^version = ' packages/genkit/pyproject.toml | cut -d'"' -f2) - echo "Core version: $core_version" - - # Check all plugins have the same version - for f in plugins/*/pyproject.toml; do - plugin_version=$(grep '^version = ' "$f" | cut -d'"' -f2) - plugin_name=$(grep '^name = ' "$f" | cut -d'"' -f2) - if [ "$plugin_version" != "$core_version" ]; then - echo "MISMATCH: $plugin_name has version '$plugin_version' (expected '$core_version')" - fi - done - ``` - **Version Policy**: + The `.python-version` file specifies `3.12` for local development, but CI tests + against Python 3.10–3.14. Scripts using `uv run` should use `--active` flag to + respect the CI matrix Python version. +* **Plugin Version Sync**: All plugin versions stay in sync with the core framework + version. **This is automatically checked by `py/bin/check_consistency`.** * Core framework and all plugins share the same version number * Samples can have independent versions (typically `0.1.0`) * Use semantic versioning (MAJOR.MINOR.PATCH) * Bump versions together during releases * **Production Ready**: The objective is to produce production-grade code. * **Shift Left**: Employ a "shift left" strategy—catch errors early. +* **Configurability Over Hardcoding**: All tools, scripts, and libraries MUST be + configurable rather than hardcoded. This is a hard design requirement that applies + to URLs, registry endpoints, file paths, tool names, thresholds, timeouts, and + any other value that a user or CI environment might need to override. + + **Rules**: + * **Never hardcode URLs** — use constructor parameters, config fields, environment + variables, or CLI flags. Every URL that appears as a string literal must also be + overridable (e.g. `base_url` parameter with a sensible default). + * **Expose constants as class attributes** — use `DEFAULT_BASE_URL` / `TEST_BASE_URL` + patterns so users can reference well-known values without string literals. + * **CLI flags override config files** — when both a config file field and a CLI flag + exist for the same setting, the CLI flag takes precedence. + * **Config files override defaults** — dataclass/struct defaults are the last + fallback. Config file values override them. CLI flags override config files. + * **Environment variables for CI** — settings that CI pipelines commonly override + (registry URLs, tokens, pool sizes, timeouts) should be readable from environment + variables when a CLI flag is impractical. + * **No magic constants in business logic** — extract thresholds, retry counts, + pool sizes, and timeouts into named constants or config fields with docstrings + explaining the default value. + + **Priority order** (highest wins): + ``` + CLI flag > environment variable > config file > class/struct default + ``` + + **Examples**: + ```python + # WRONG — hardcoded registry URL, not overridable + class MyRegistry: + def check(self, pkg: str) -> bool: + url = f"https://registry.example.com/api/{pkg}" # ❌ Hardcoded + ... + + # CORRECT — configurable with sensible default + well-known constant + class MyRegistry: + DEFAULT_BASE_URL: str = "https://registry.example.com" + TEST_BASE_URL: str = "http://localhost:8080" + + def __init__(self, *, base_url: str = DEFAULT_BASE_URL) -> None: + self._base_url = base_url.rstrip("/") + + def check(self, pkg: str) -> bool: + url = f"{self._base_url}/api/{pkg}" # ✅ Configurable + ... + ``` + + This principle ensures that every tool can be tested against staging/local + registries, used in air-gapped environments, and adapted to non-standard + infrastructure without code changes. +* **Fixer Scripts Over Shell Eval**: When fixing lint errors, formatting issues, + or performing bulk code transformations, **always write a dedicated fixer script** + instead of evaluating code snippets or one-liners at the shell. This is a hard + requirement. + + **Rules**: + * **Never `eval` or `exec` strings at the command line** to fix code. Shell + one-liners with `sed`, `awk`, `perl -pi -e`, or `python -c` are fragile, + unreviewable, and unreproducible. They also bypass linting and type checking. + * **Write a Python fixer script** (e.g. `py/bin/fix_*.py`) that uses the `ast` + module or `libcst` for syntax-aware transformations. Text-based regex fixes + are acceptable only for non-Python files (TOML, YAML, Markdown). + * **Prefer AST-based transforms** over regex for Python code. The `ast` module + can parse, inspect, and rewrite Python source without breaking syntax. Use + `ast.parse()` + `ast.NodeVisitor`/`ast.NodeTransformer` for structural changes. + Use `libcst` when you need to preserve comments and whitespace. + * **Use `ruff check --fix`** for auto-fixable lint rules before writing custom + fixers. Ruff can auto-fix many categories (unused imports, formatting, simple + refactors). Only write a custom fixer for issues Ruff cannot auto-fix. + * **Fixer scripts must be idempotent** — running them twice produces the same + result. This allows safe re-runs and CI integration. + * **Commit fixer scripts** to the repo (in `py/bin/`) so the team can re-run + them and review the transformation logic. + + **Example — adding missing docstrings to test methods**: + ```python + #!/usr/bin/env python3 + """Add missing docstrings to test methods (fixes D102).""" + import ast + import sys + from pathlib import Path + + def fix_file(path: Path) -> int: + source = path.read_text(encoding='utf-8') + tree = ast.parse(source) + # ... walk tree, find methods without docstrings, insert them ... + path.write_text(new_source, encoding='utf-8') + return count + + for p in Path(sys.argv[1]).rglob('*_test.py'): + fix_file(p) + ``` + + **Why this matters**: Shell one-liners are invisible to code review, cannot be + tested, and often introduce subtle bugs (wrong quoting, partial matches, broken + indentation). A committed fixer script is reviewable, testable, and documents + the transformation for future maintainers. + +* **Rust-Style Errors with Hints**: Every user-facing error MUST follow the Rust + compiler's diagnostic style: a **machine-readable error code**, a **human-readable + message**, and an actionable **hint** that tells the user (or an AI agent) exactly + how to fix the problem. + + **Rules**: + * Every custom exception raise MUST include a non-empty `hint` (or equivalent + guidance field). A raise site without a hint is a bug. + * The `hint` must be **actionable** — it tells the reader what to do, not just + what went wrong. Good: `"Run 'git fetch --unshallow' to fetch full history."` + Bad: `"The repository is shallow."` (that's the message, not a hint). + * Error codes should use a `PREFIX-NAMED-KEY` format (e.g. `RK-CONFIG-NOT-FOUND`, + `GK-PLUGIN-NOT-FOUND`). Define codes as enums, not raw strings. + * For CLI tools, render errors in Rust-style format: + ``` + error[RK-CONFIG-NOT-FOUND]: No releasekit.toml found in /repo. + | + = hint: Run 'releasekit init' to generate a default configuration. + ``` + + **Why hints matter**: Hints are the single most important part of an error for + both humans and AI agents. An AI reading a hint can self-correct without + needing to understand the full codebase. A human reading a hint can fix the + issue without searching docs. Treat a missing hint as a P1 bug. * **Strict Typing**: Strict type checking is required. Do not use `Any` unless absolutely necessary and documented. * **Security & Async Best Practices**: Ruff is configured with security (S), async (ASYNC), @@ -373,6 +436,51 @@ don't expire, include auth headers in `get_cached_client()` * **WeakKeyDictionary cleanup**: The cache automatically cleans up clients when their event loop is garbage collected + * **Testing**: Mock `get_cached_client` instead of `httpx.AsyncClient`: + ```python + @patch('my_module.get_cached_client') + async def test_api_call(mock_get_client): + mock_client = AsyncMock() + mock_client.post = AsyncMock(return_value=mock_response) + mock_get_client.return_value = mock_client + result = await my_api_call() + ``` + * **Related**: [#4420](https://github.com/firebase/genkit/issues/4420) +* **Security Vulnerability Checks**: Beyond Ruff's S rules, the codebase enforces + additional security invariants. ReleaseKit has an automated security test suite + (`py/tools/releasekit/tests/rk_security_test.py`) that demonstrates the pattern. + Apply these checks to all Python code in the repository: + + **Automated Checks (enforced in CI via test suites)**: + + | # | Check | What It Catches | Severity | + |---|-------|-----------------|----------| + | 1 | No `shell=True` | Command injection via subprocess | Critical | + | 2 | No `pickle`/`yaml.load`/`eval`/`exec` | Arbitrary code execution via deserialization | Critical | + | 3 | No hardcoded secrets | Literal tokens, AWS keys, GitHub PATs in source | Critical | + | 4 | No `verify=False` / `CERT_NONE` | TLS certificate verification bypass | Critical | + | 5 | `NamedTemporaryFile(delete=False)` in `try/finally` | Temp file leak on exception | High | + | 6 | No bare `except:` | Swallows `KeyboardInterrupt`/`SystemExit` | Medium | + | 7 | API backends define `__repr__` | Credential leak in tracebacks/logs | High | + | 8 | Lock files use `O_CREAT\|O_EXCL` | TOCTOU race condition | High | + | 9 | No `http://` URLs in runtime code | Plaintext traffic (no TLS) | Medium | + | 10 | State files use `mkstemp` + `os.replace` | Crash corruption on partial writes | High | + | 11 | `resolve()` on discovered paths | Symlink traversal attacks | Medium | + + **Manual Review Checklist** (for PR reviews): + + | Category | What to Look For | Fix | + |----------|-----------------|-----| + | TOCTOU races | Check-then-act on files without atomic ops | `O_CREAT\|O_EXCL`, `mkstemp` + `os.replace` | + | Log injection | User data in structlog event names | Literals for event names; user data in kwargs | + | Path traversal | `Path(user_input)` without validation | `.resolve()` + verify under expected root | + | Credential logging | Objects with tokens in `log.*()` calls | `__repr__` that redacts sensitive fields | + | Subprocess args | User input in command lists | Validate inputs; never `shell=True` | + | Temp file cleanup | `NamedTemporaryFile(delete=False)` | Wrap in `try/finally` with `os.unlink` | + | Atomic writes | `write_text()` for state/config files | `mkstemp` + `os.write` + `os.replace` | + | Exception swallowing | `except Exception` hiding real errors | Log exception; re-raise if not recoverable | + | ReDoS | Regex with nested quantifiers on untrusted input | Avoid catastrophic backtracking patterns | + * **Error Suppression Policy**: Avoid ignoring warnings from the type checker (`# type: ignore`, `# pyrefly: ignore`, etc.) or linter (`# noqa`) unless there is a compelling, documented reason. @@ -492,6 +600,28 @@ * All dependencies in this codebase are mandatory * Standard library imports are negligible cost +* **No Kitchen-Sink `utils.py`**: Do not dump unrelated helpers into a single + `utils.py` file. Instead, organise shared utilities into focused modules + grouped by domain: + + ``` + utils/ + ├── __init__.py + ├── date.py # UTC date/time helpers + ├── packaging.py # PEP 503/508 name normalisation + └── text.py # String formatting helpers + ``` + + **Rules**: + * Each module in `utils/` must have a single, clear responsibility described + in its module docstring. + * If a helper is only used by one module, keep it private in that module + (prefixed with `_`). Only promote to `utils/` when a second consumer appears. + * Never create a bare `utils.py` at the package root — always use a `utils/` + package with sub-modules. + * Name the sub-module after the *domain* it serves (e.g. `date`, `packaging`, + `text`), not after the caller (e.g. ~~`prepare_helpers`~~). + ## Shell Scripts Reference @@ -1713,25 +1843,9 @@ For PRs that add new plugins or modify system architecture, include ASCII diagra ### Data Flow Diagrams -For PRs involving data processing or multi-step operations: - -``` -Data Flow:: - - User Request - │ - ▼ - ┌─────────┐ ┌─────────┐ ┌─────────┐ - │ Flow │ ──▶ │ Model │ ──▶ │ Tool │ - │ (span) │ │ (span) │ │ (span) │ - └─────────┘ └─────────┘ └─────────┘ - │ │ │ - └───────────────┼───────────────┘ - ▼ - ┌─────────────┐ - │ Exporter │ ──▶ Cloud Backend - └─────────────┘ -``` +For PRs involving data processing or multi-step operations, include ASCII data +flow diagrams. See the "Docstrings > Data Flow Diagram" section above for the +standard format and examples. ### PR Template Examples @@ -2043,30 +2157,6 @@ When implementing model provider plugins: 4. **Support all provider-specific parameters** (reasoning\_effort, thinking, etc.) 5. **Handle model-specific restrictions** (e.g., grok-4 doesn't support frequency\_penalty) -### Verification Workflow - -When reviewing or updating a plugin: - -``` -1. Read the plugin's current implementation - └── Focus on: env vars, model names, API params, auth - -2. Search provider's official documentation - └── Find: current model list, env var names, API reference - -3. Compare and document differences - └── Create: table of inconsistencies found - -4. Fix issues and update documentation - └── Update: code, docstrings, README, GEMINI.md - -5. Run linter and type checkers - └── Verify: bin/lint passes with 0 errors - -6. Update PR description with verification status - └── Include: table showing what was verified -``` - ### Python Version Compatibility When using features from newer Python versions: @@ -2090,53 +2180,9 @@ When using features from newer Python versions: ## Session Learning Documentation -**IMPORTANT**: At the end of each development session, document new learnings, -patterns, and insights discovered during the session into this file (`py/GEMINI.md`). - -This creates a feedback loop where: - -1. Future sessions benefit from past learnings -2. Patterns and best practices are captured permanently -3. Common issues and their solutions are documented -4. The guidelines evolve based on real-world experience - -### What to Document - -After completing tasks in a session, add relevant learnings to appropriate sections: - -| Learning Type | Where to Add | Example | -|---------------|--------------|---------| -| **New provider env vars** | "Official Environment Variables by Provider" table | `MISTRAL_API_KEY` for Mistral AI | -| **Plugin verification findings** | "Provider Documentation Links" table | New provider docs URLs | -| **Authentication patterns** | "Telemetry Plugin Authentication Patterns" | New auth header formats | -| **Common issues** | "Common Issues Found During Verification" | Model prefix mistakes | -| **Python compatibility** | "Python Version Compatibility" | New conditional imports | -| **New coding patterns** | Appropriate section or create new | Reusable patterns discovered | - -### Session Documentation Workflow - -``` -1. Complete development tasks - └── Implement features, fix bugs, verify plugins - -2. Identify learnings worth preserving - └── New patterns, gotchas, best practices, provider details - -3. Update py/GEMINI.md with learnings - └── Add to existing sections or create new ones - -4. Commit changes - └── Include GEMINI.md updates in the commit -``` - -### Example Learnings to Document - -* Added Mistral AI and Hugging Face to provider documentation table -* Documented that `@hf/` prefix is used for Hugging Face-hosted models on Cloudflare -* Added OpenRouter as a viable option via `compat-oai` plugin -* Sentry uses `x-sentry-auth` header format, not standard Bearer token -* Grafana Cloud requires Base64 encoding of `user_id:api_key` -* Added structured logging pattern for trace correlation +Document new learnings, patterns, and gotchas at the end of each development +session. Add to existing sections when possible; create new subsections only +when the topic is genuinely new. ### Session Learnings (2026-02-01): Mistral AI and Hugging Face Plugins @@ -2776,111 +2822,6 @@ classifiers = [ ] ``` -### CHANGELOG Format - -Follow [Keep a Changelog](https://keepachangelog.com/) format: - -```markdown -## [X.Y.Z] - YYYY-MM-DD - -### Added -- New features - -### Changed -- Changes to existing functionality - -### Deprecated -- Features to be removed in future - -### Removed -- Removed features - -### Fixed -- Bug fixes - -### Security -- Security fixes -``` - -### Session Learnings (2026-02-03): HTTP Client Event Loop Binding - -#### The Problem: `httpx.AsyncClient` Event Loop Binding - -`httpx.AsyncClient` instances are **bound to the event loop** they were created in. -This causes issues in production when: - -1. **Different event loops**: The client was created in one event loop but is used - in another (common in test frameworks, async workers, or web servers) -2. **Closed event loops**: The original event loop was closed but the client is - still being used - -**Error message**: `RuntimeError: Event loop is closed` or -`RuntimeError: cannot schedule new futures after interpreter shutdown` - -#### The Solution: Per-Event-Loop Client Caching - -Created `genkit.core.http_client` module with a shared utility: - -```python -from genkit.core.http_client import get_cached_client - -# Get or create cached client for current event loop -client = get_cached_client( - cache_key='my-plugin', - headers={'Authorization': 'Bearer token'}, - timeout=60.0, -) -``` - -**Key design decisions**: - -| Decision | Rationale | -|----------|-----------| -| **WeakKeyDictionary** | Automatically cleanup when event loop is GC'd | -| **Two-level cache** | `loop -> cache_key -> client` allows multiple configs per loop | -| **Per-request auth** | For expiring tokens (GCP), pass headers in request not client | -| **Static auth in client** | For static API keys (Cloudflare), include in cached client | - -#### Plugins Updated - -| Plugin | Change | -|--------|--------| -| **cloudflare-workers-ai** | Refactored `_get_client()` to use `get_cached_client()` | -| **google-genai/rerankers** | Changed from `async with httpx.AsyncClient()` to cached client | -| **google-genai/evaluators** | Changed from `async with httpx.AsyncClient()` to cached client | - -#### When to Use Which Pattern - -| Pattern | Use When | -|---------|----------| -| `async with httpx.AsyncClient()` | One-off requests, infrequent calls | -| `get_cached_client()` | Frequent requests, performance-critical paths | -| Client stored at init | **Never** - causes event loop binding issues | - -#### Testing Cached Clients - -When mocking HTTP clients in tests, mock `get_cached_client` instead of -`httpx.AsyncClient`: - -```python -from unittest.mock import AsyncMock, patch - -@patch('my_module.get_cached_client') -async def test_api_call(mock_get_client): - mock_client = AsyncMock() - mock_client.post = AsyncMock(return_value=mock_response) - mock_client.is_closed = False - mock_get_client.return_value = mock_client - - result = await my_api_call() - - mock_client.post.assert_called_once() -``` - -#### Related Issue - -* GitHub Issue: [#4420](https://github.com/firebase/genkit/issues/4420) - ### Session Learnings (2026-02-04): Release PRs and Changelogs When drafting release PRs and changelogs, follow these guidelines to create @@ -2914,111 +2855,10 @@ Use this checklist when drafting a release PR: #### Automated Release Documentation Validation -Run this validation script before finalizing any release documentation. Save as -`py/bin/validate_release_docs` and run before committing: - -```bash -#!/bin/bash -# Release Documentation Validator -# Run from py/ directory: ./bin/validate_release_docs - -set -e -echo "=== Release Documentation Validation ===" -ERRORS=0 - -# 1. Check branding: No "Firebase Genkit" references -echo -n "Checking branding (no 'Firebase Genkit')... " -if grep -ri "Firebase Genkit" engdoc/ .github/PR_DESCRIPTION_*.md CHANGELOG.md 2>/dev/null; then - echo "FAIL: Found 'Firebase Genkit' - use 'Genkit' instead" - ERRORS=$((ERRORS + 1)) -else - echo "OK" -fi - -# 2. Check for non-existent plugins -echo -n "Checking plugin names... " -FAKE_PLUGINS="genkit-plugin-aim|genkit-plugin-firestore" -if grep -rE "$FAKE_PLUGINS" engdoc/ .github/PR_DESCRIPTION_*.md CHANGELOG.md 2>/dev/null; then - echo "FAIL: Found non-existent plugin names" - ERRORS=$((ERRORS + 1)) -else - echo "OK" -fi - -# 3. Check for unshipped features (DAP, MCP unless actually shipped) -echo -n "Checking for unshipped features... " -UNSHIPPED="Dynamic Action Provider|DAP factory|MCP resource|action_provider" -if grep -rE "$UNSHIPPED" engdoc/ .github/PR_DESCRIPTION_*.md CHANGELOG.md 2>/dev/null; then - echo "WARN: Found references to features that may not be shipped yet" - # Not a hard error, just a warning -else - echo "OK" -fi - -# 4. Check blog code syntax errors -echo -n "Checking blog code syntax... " -WRONG_PATTERNS='response\.text\(\)|output_schema=|asyncio\.run\(main|from genkit import Genkit[^.]' -if grep -rE "$WRONG_PATTERNS" engdoc/blog-*.md 2>/dev/null; then - echo "FAIL: Found incorrect API patterns in blog code examples" - ERRORS=$((ERRORS + 1)) -else - echo "OK" -fi - -# 5. Verify all mentioned plugins exist -echo -n "Verifying plugin references... " -for plugin in $(grep -ohE 'genkit-plugin-[a-z-]+' engdoc/ .github/PR_DESCRIPTION_*.md CHANGELOG.md 2>/dev/null | sort -u); do - dir_name=$(echo "$plugin" | sed 's/genkit-plugin-//') - if [ ! -d "plugins/$dir_name" ]; then - echo "FAIL: Plugin $plugin does not exist (no plugins/$dir_name/)" - ERRORS=$((ERRORS + 1)) - fi -done -echo "OK" - -# 6. Check contributor GitHub links are properly formatted -echo -n "Checking contributor links... " -if grep -E '\[@[a-zA-Z0-9]+\]' .github/PR_DESCRIPTION_*.md CHANGELOG.md 2>/dev/null | \ - grep -vE '\[@[a-zA-Z0-9_-]+\]\(https://github\.com/[a-zA-Z0-9_-]+\)' 2>/dev/null; then - echo "WARN: Some contributor links may not have GitHub URLs" -else - echo "OK" -fi - -# 7. Verify imports work -echo -n "Checking Python imports... " -if python -c "from genkit.ai import Genkit, Output; print('OK')" 2>/dev/null; then - : -else - echo "WARN: Could not verify imports (genkit may not be installed)" -fi - -# Summary -echo "" -echo "=== Validation Complete ===" -if [ $ERRORS -gt 0 ]; then - echo "FAILED: $ERRORS error(s) found. Fix before releasing." - exit 1 -else - echo "PASSED: All checks passed!" - exit 0 -fi -``` - -**Quick manual validation commands:** - -```bash -# All-in-one check for common issues -cd py && grep -rE \ - 'Firebase Genkit|genkit-plugin-aim|response\.text\(\)|DAP factory|output_schema=' \ - engdoc/ .github/PR_DESCRIPTION_*.md CHANGELOG.md - -# List all plugin references and verify they exist -for p in $(grep -ohE 'genkit-plugin-[a-z-]+' CHANGELOG.md | sort -u); do - d=$(echo $p | sed 's/genkit-plugin-//'); - [ -d "plugins/$d" ] && echo "✓ $p" || echo "✗ $p (not found)"; -done -``` +Run `py/bin/validate_release_docs` before finalizing release documentation. It +checks branding ("Genkit" not "Firebase Genkit"), non-existent plugin names, +unshipped feature references, blog code syntax, contributor link formatting, +and import validity. #### Key Principles @@ -3037,11 +2877,6 @@ done Major releases may include a blog article (e.g. in the PR description or an external blog). -**Branding Note**: The project is called **"Genkit"** (not "Firebase Genkit"). While the -repository is hosted at `github.com/firebase/genkit` and some blog posts may be published -on the Firebase blog, the product name is simply "Genkit". Use this consistently in all -documentation, blog articles, and release notes. - **Required Sections:** 1. **Headline**: "Genkit Python SDK X.Y.Z: [Catchy Subtitle]" 2. **Stats paragraph**: Commits, files changed, contributors, PRs @@ -3055,57 +2890,19 @@ documentation, blog articles, and release notes. 10. **What's Next**: Roadmap items 11. **Get Involved**: Community links -**Code Example Accuracy Checklist:** - -Before publishing, verify ALL code examples match the actual API: +**Code Example Accuracy** — verify ALL examples match the actual API: | Pattern | Correct | Wrong | |---------|---------|-------| | Text response | `response.text` | `response.text()` | | Structured output | `output=Output(schema=Model)` | `output_schema=Model` | | Dynamic tools | `ai.dynamic_tool(name, fn, description=...)` | `@ai.action_provider()` | -| Partials | `ai.define_partial('name', 'template')` | `Dotprompt.define_partial()` | | Main function | `ai.run_main(main())` | `asyncio.run(main())` | | Genkit init | Module-level `ai = Genkit(...)` | Inside `async def main()` | | Imports | `from genkit.ai import Genkit, Output` | `from genkit import Genkit` | -**Verify examples against actual samples:** - -```bash -# Check API patterns in existing samples -grep -r "response.text" py/samples/*/src/main.py | head -5 -grep -r "Output(schema=" py/samples/*/src/main.py | head -5 -grep -r "ai.run_main" py/samples/*/src/main.py | head -5 -``` - -**Verify blog code syntax matches codebase patterns:** - -CRITICAL: Before publishing any blog article, extract and validate ALL code snippets -against the actual codebase to ensure they would compile/run correctly. - -```bash -# Verify import statements match actual module structure -python -c "from genkit.ai import Genkit, Output; print('Imports OK')" - -# Check that decorator patterns exist in codebase -grep -r "@ai.flow()" py/samples/*/src/main.py | head -3 -grep -r "@ai.tool()" py/samples/*/src/main.py | head -3 -``` - -**Blog Article Code Review Checklist:** - -Before finalizing a blog article, manually verify: - -| Check | How to Verify | -|-------|---------------| -| No `response.text()` | Search for `\.text\(\)` - should find nothing | -| Correct Output usage | Search for `output=Output(schema=` | -| Module-level Genkit | `ai = Genkit(...)` outside any function | -| `ai.run_main()` used | Not `asyncio.run()` for samples with Dev UI | -| Pydantic imports | `from pydantic import BaseModel, Field` | -| Tool decorator | `@ai.tool()` with Pydantic input schema | -| Flow decorator | `@ai.flow()` for async functions | -| No fictional features | DAP, MCP, etc. - only document shipped features | +Cross-check against actual samples: `grep -r "pattern" py/samples/*/src/main.py`. +Only document shipped features — never reference DAP, MCP, etc. unless actually shipped. **Verify plugin names exist before documenting:** @@ -3164,265 +2961,41 @@ Follow [Keep a Changelog](https://keepachangelog.com/) format with these section #### Gathering Release Statistics -Use these commands to gather comprehensive release statistics: - ```bash -# Count commits since previous version -git log "genkit-python@0.4.0"..HEAD --oneline -- py/ | wc -l - -# Get contributors by commit count -git log "genkit-python@0.4.0"..HEAD --pretty=format:"%an" -- py/ | sort | uniq -c | sort -rn - -# Get commits by each contributor with messages -git log "genkit-python@0.4.0"..HEAD --pretty=format:"%an|%s" -- py/ - -# Get PR counts by contributor (requires gh CLI) -gh pr list --repo firebase/genkit --state merged \ - --search "label:python merged:>=2025-05-26" \ - --json author,number,title --limit 200 | \ - jq -r '.[].author.login' | sort | uniq -c | sort -rn - -# Get files changed count -git diff --stat "genkit-python@0.4.0"..HEAD -- py/ | tail -1 -``` - -#### Contributor Acknowledgment Table - -Include a detailed contributors table with both PRs and commits: - -```markdown -### Contributors - -This release includes contributions from **N developers** across **M PRs**. -Thank you to everyone who contributed! - -| Contributor | PRs | Commits | Key Contributions | -|-------------|-----|---------|-------------------| -| **Name** | 91 | 93 | Core framework, type safety, plugins | -| **Name** | 42 | 42 | Resource support, samples | -... - -**[external/repo](https://github.com/org/repo) Contributors** (Feature integration): - -| Contributor | PRs | Key Contributions | -|-------------|-----|-------------------| -| **Name** | 42 | CI/CD improvements, release automation | -``` - -#### PR Description File - -Create a `.github/PR_DESCRIPTION_X.Y.Z.md` file for each major release: - -```markdown -# Genkit Python SDK vX.Y.Z Release - -## Overview - -Brief description of the release (one paragraph). - -## Impact Summary - -| Category | Description | -|----------|-------------| -| **New Capabilities** | Summary | -| **Breaking Changes** | Summary | -| **Performance** | Summary | - -## New Features - -### Feature Category 1 -- **Feature Name**: Description - -## Breaking Changes - -### Change 1 -**Before**: Old behavior... -**After**: New behavior... -**Migration**: How to migrate... - -## Critical Fixes +# Commits, contributors, and file changes since last release +git log "genkit-python@PREV"..HEAD --oneline -- py/ | wc -l +git log "genkit-python@PREV"..HEAD --pretty=format:"%an" -- py/ | sort | uniq -c | sort -rn +git diff --stat "genkit-python@PREV"..HEAD -- py/ | tail -1 -- **Fix Name**: Description (PR #) - -## Testing - -All X plugins and Y+ samples have been tested. CI runs on Python 3.10-3.14. - -## Contributors -(Same table format as CHANGELOG) - -## Full Changelog - -See [CHANGELOG.md](py/CHANGELOG.md) for the complete list of changes. +# Map git names to GitHub handles (requires gh CLI) +gh pr list --state merged --search "label:python" --json author --limit 200 \ + | jq -r '.[].author | "\(.name) -> @\(.login)"' | sort -u ``` -#### Updating the PR on GitHub - -Use gh CLI to update the PR body from the file: - -```bash -gh pr edit --body-file py/.github/PR_DESCRIPTION_X.Y.Z.md -``` +#### PR Description & Contributors -#### Key Sections to Include +Create `.github/PR_DESCRIPTION_X.Y.Z.md` for each major release. Required sections: | Section | Purpose | |---------|---------| | **Impact Summary** | Quick overview table with categories | -| **Critical Fixes** | Highlight race conditions, thread safety, security | -| **Performance** | Document speedups and optimizations | +| **Critical Fixes** | Race conditions, thread safety, security (with PR #s) | | **Breaking Changes** | Migration guide with before/after examples | | **Contributors** | Table with PRs, commits, and key contributions | -#### Commit Messages for Release Documentation - -Use conventional commits with `--no-verify` for release documentation: - -```bash -git commit --no-verify -m "docs(py): add contributor acknowledgments to changelog - -Genkit Python SDK Contributors (N developers): -- Name: Core framework, type safety -- Name: Plugins, samples -... - -External Contributors: -- Name: CI/CD improvements" -``` - -#### Highlighting Critical Information - -**When documenting fixes, emphasize:** - -1. **Race conditions**: Dev server startup, async operations -2. **Thread safety**: Event loop binding, HTTP client caching -3. **Security**: CVE/CWE references, audit results -4. **Infinite recursion**: Cycle detection, recursion limits - -**Example format:** - -```markdown -### Critical Fixes - -- **Race Condition**: Dev server startup race condition resolved (#4225) -- **Thread Safety**: Per-event-loop HTTP client caching prevents event loop - binding errors (#4419, #4429) -- **Infinite Recursion**: Cycle detection in Handlebars partial resolution -- **Security**: Path traversal hardening (CWE-22), SigV4 signing (#4402) -``` - -#### External Project Contributions - -When integrating external projects (e.g., dotprompt), include their contributors: - -```bash -# Check commits in external repo -# Navigate to GitHub contributors page or use git log on local clone -``` - -Include a separate table linking to the external repository. - -#### Contributor Profile Links and Exhaustive Contributions - -**Always include clickable GitHub profile links** for contributors in release notes: +Contributor table format — use clickable GitHub links: ```markdown | Contributor | PRs | Commits | Key Contributions | |-------------|-----|---------|-------------------| -| [**@username**](https://github.com/username) | 91 | 93 | Exhaustive list... | +| [**@user**](https://github.com/user) | 91 | 93 | Core framework, plugins | ``` -**Finding GitHub usernames from git log names:** - -```bash -# Get GitHub username from PR data (most reliable) -gh pr list --repo firebase/genkit --state merged \ - --search "author:USERNAME label:python" \ - --json author,title --limit 5 - -# Map git log author names to GitHub handles -gh pr list --repo firebase/genkit --state merged \ - --search "label:python" \ - --json author --limit 200 | \ - jq -r '.[].author | "\(.name) -> @\(.login)"' | sort -u -``` - -**Make key contributions exhaustive by reviewing each contributor's commits:** - -```bash -# Get detailed commits for each contributor -git log "genkit-python@0.4.0"..HEAD --pretty=format:"%s" \ - --author="Contributor Name" -- py/ | head -20 -``` - -Then summarize their work comprehensively, including: -- Specific plugins/features implemented -- Specific samples maintained -- API/config changes made -- Bug fix categories -- Documentation contributions - -**Handle cross-name contributors:** - -If a contributor uses different names in git vs GitHub (e.g., "Elisa Shen" in git but -"@MengqinShen" on GitHub), add the real name in parentheses: - -```markdown -| [**@MengqinShen**](https://github.com/MengqinShen) (Elisa Shen) | 42 | 42 | ... | -``` - -**Filter contributors by SDK:** - -For Python SDK releases, only include contributors with commits under `py/`: - -```bash -# Get ONLY Python contributors -git log "genkit-python@0.4.0"..HEAD --pretty=format:"%an" -- py/ | sort | uniq -c | sort -rn -``` - -Exclude contributors whose work is Go-only, JS-only, or infrastructure-only (unless -their infrastructure work directly benefits the Python SDK). - -#### Separate External Repository Contributors - -When the Python SDK integrates external projects (like dotprompt), add a separate -contributor table for that project with the **same columns** as the main table: - -```markdown -**[google/dotprompt](https://github.com/google/dotprompt) Contributors** (Dotprompt Python integration): - -| Contributor | PRs | Commits | Key Contributions | -|-------------|-----|---------|-------------------| -| [**@username**](https://github.com/username) | 50+ | 100+ | **Category**: Feature descriptions with PR numbers. | -| [**@contributor2**](https://github.com/contributor2) | 42 | 45 | **CI/CD**: Package publishing, release automation. | -``` - -This clearly distinguishes between core SDK contributions and external project contributions. - -#### Fast Iteration with --no-verify - -When iterating on release documentation, use `--no-verify` to skip pre-commit/pre-push -hooks for faster feedback: - -```bash -# Fast commit -git commit --no-verify -m "docs(py): update contributor tables" - -# Fast push -git push --no-verify -``` - -**Only use this for documentation-only changes** where CI verification is not critical. -For code changes, always run full verification. - -#### Updating PR Description on GitHub - -After updating the PR description file, push it to GitHub: - -```bash -# Update the PR body from the file -gh pr edit --body-file py/.github/PR_DESCRIPTION_X.Y.Z.md -``` +- Only include contributors with commits under `py/` +- For cross-name contributors: `@GitHubName (Real Name)` +- For external repos (e.g., dotprompt), add a separate table with same columns +- Use `--no-verify` for documentation-only commits/pushes +- Update PR body: `gh pr edit --body-file py/.github/PR_DESCRIPTION_X.Y.Z.md` ### Release Publishing Process @@ -3507,118 +3080,10 @@ pip install genkit genkit-plugin-google-genai python -c "from genkit.ai import Genkit; print('Success!')" ``` -#### v0.5.0 Release Summary - -For the v0.5.0 release specifically: - -| Metric | Value | -|--------|-------| -| **Commits** | 178 | -| **Files Changed** | 680+ | -| **Contributors** | 13 developers | -| **PRs** | 188 | -| **New PyPI Packages** | 14 (first publish) | -| **Updated PyPI Packages** | 9 (from v0.4.0) | -| **Total Packages** | 23 | - -**Packages on PyPI (existing - v0.4.0 → v0.5.0):** -- genkit, genkit-plugin-compat-oai, genkit-plugin-dev-local-vectorstore -- genkit-plugin-firebase, genkit-plugin-flask, genkit-plugin-google-cloud -- genkit-plugin-google-genai, genkit-plugin-ollama, genkit-plugin-vertex-ai - -**New Packages (first publish at v0.5.0):** -- genkit-plugin-anthropic, genkit-plugin-aws, genkit-plugin-amazon-bedrock -- genkit-plugin-cloudflare-workers-ai -- genkit-plugin-deepseek, genkit-plugin-evaluators, genkit-plugin-huggingface -- genkit-plugin-mcp, genkit-plugin-mistral, genkit-plugin-microsoft-foundry -- genkit-plugin-observability, genkit-plugin-xai - -#### Full Release Guide - -For detailed release instructions, see: -- `py/.github/PR_DESCRIPTION_0.5.0.md` - v0.5.0 PR description template -- `py/CHANGELOG.md` - Full changelog format - ### Version Consistency -All packages (core, plugins, and samples) must have the same version. Use these scripts: - -```bash -# Check version consistency -./bin/check_versions - -# Fix version mismatches -./bin/check_versions --fix -# or -./bin/bump_version 0.5.0 - -# Bump to next version -./bin/bump_version --minor # 0.5.0 -> 0.6.0 -./bin/bump_version --patch # 0.5.0 -> 0.5.1 -./bin/bump_version --major # 0.5.0 -> 1.0.0 -``` - -The `bump_version` script dynamically discovers all packages: -- `packages/genkit` (core) -- `plugins/*/` (all plugins) -- `samples/*/` (all samples) - -### Shell Script Linting - -All shell scripts in `bin/` and `py/bin/` must pass `shellcheck`. This is enforced -by `bin/lint` and `py/bin/release_check`. - -```bash -# Run shellcheck on all scripts -shellcheck bin/* py/bin/* - -# Install shellcheck if not present -brew install shellcheck # macOS -apt install shellcheck # Debian/Ubuntu -``` - -**Common shellcheck fixes:** -- Use `"${var}"` instead of `$var` for safer expansion -- Add `# shellcheck disable=SC2034` for intentionally unused variables -- Use `${var//search/replace}` instead of `echo "$var" | sed 's/search/replace/'` - -### Shell Script Standards - -All shell scripts must follow these standards: - -**1. Shebang Line (line 1):** -```bash -#!/usr/bin/env bash -``` -- Use `#!/usr/bin/env bash` for portability (not `#!/bin/bash`) -- Must be the **first line** of the file (before license header) - -**2. Strict Mode:** -```bash -set -euo pipefail -``` -- `-e`: Exit immediately on command failure -- `-u`: Exit on undefined variable usage -- `-o pipefail`: Exit on pipe failures - -**3. Verification Script:** -```bash -# Check all scripts for proper shebang and pipefail -for script in bin/* py/bin/*; do - if [ -f "$script" ] && file "$script" | grep -qE "shell|bash"; then - shebang=$(head -1 "$script") - if [[ "$shebang" != "#!/usr/bin/env bash" ]]; then - echo "❌ SHEBANG: $script" - fi - if ! grep -q "set -euo pipefail" "$script"; then - echo "❌ PIPEFAIL: $script" - fi - fi -done -``` - -**Exception:** `bin/install_cli` intentionally omits `pipefail` as it's a user-facing -install script that handles errors differently for better user experience. +See "Plugin Version Sync" in the Code Quality section and "Version Bumping" +above for version management details. ## Code Reviewer Preferences diff --git a/py/bin/fix_missing_test_docstrings.py b/py/bin/fix_missing_test_docstrings.py new file mode 100644 index 0000000000..524325bfd7 --- /dev/null +++ b/py/bin/fix_missing_test_docstrings.py @@ -0,0 +1,155 @@ +#!/usr/bin/env python3 +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +"""Add missing docstrings to test methods and private functions (fixes D102/D103). + +Uses the AST to find methods/functions without docstrings and inserts a +docstring derived from the function name. The script is idempotent — running +it twice produces the same result. + +Usage: + python py/bin/fix_missing_test_docstrings.py [...] + +Examples: + # Fix all test files under releasekit tests + python py/bin/fix_missing_test_docstrings.py py/tools/releasekit/tests/ + + # Fix a single file + python py/bin/fix_missing_test_docstrings.py py/tools/releasekit/tests/backends/rk_pm_cargo_test.py +""" + +from __future__ import annotations + +import ast +import sys +from pathlib import Path + + +def _name_to_docstring(name: str) -> str: + """Convert a function/method name to a human-readable docstring. + + Examples: + test_build_dry_run -> "Test build dry run." + _fake_run_command -> "Fake run command." + test_returns_true_on_200 -> "Test returns true on 200." + """ + # Strip leading underscores + clean = name.lstrip('_') + # Replace underscores with spaces + words = clean.replace('_', ' ') + # Capitalize first letter, add period + if words: + words = words[0].upper() + words[1:] + if not words.endswith('.'): + words += '.' + return words + + +def _has_docstring(node: ast.FunctionDef | ast.AsyncFunctionDef) -> bool: + """Check if a function/method already has a docstring.""" + if not node.body: + return False + first = node.body[0] + if isinstance(first, ast.Expr) and isinstance(first.value, ast.Constant) and isinstance(first.value.value, str): + return True + return False + + +def _get_indent(source_lines: list[str], lineno: int) -> str: + """Get the indentation of the first statement in a function body. + + lineno is 1-indexed (from AST). + """ + # The body starts after the def line. Find the first non-empty body line. + idx = lineno - 1 # Convert to 0-indexed + if idx < len(source_lines): + line = source_lines[idx] + return line[: len(line) - len(line.lstrip())] + return ' ' + + +def fix_file(path: Path) -> int: + """Add missing docstrings to functions/methods in a single file. + + Returns the number of docstrings added. + """ + source = path.read_text(encoding='utf-8') + tree = ast.parse(source) + lines = source.splitlines(keepends=True) + + # Collect insertion points (line number, indent, docstring text) + # We process in reverse order so line numbers stay valid after insertions. + insertions: list[tuple[int, str, str]] = [] + + for node in ast.walk(tree): + if not isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef)): + continue + if _has_docstring(node): + continue + if not node.body: + continue + + # Get the line of the first body statement (1-indexed) + body_line = node.body[0].lineno + indent = _get_indent(lines, body_line) + docstring = _name_to_docstring(node.name) + insertions.append((body_line, indent, docstring)) + + if not insertions: + return 0 + + # Sort by line number descending so we can insert without shifting earlier indices + insertions.sort(key=lambda x: x[0], reverse=True) + + for body_line, indent, docstring in insertions: + idx = body_line - 1 # Convert to 0-indexed + docstring_line = f'{indent}"""{docstring}"""\n' + lines.insert(idx, docstring_line) + + path.write_text(''.join(lines), encoding='utf-8') + return len(insertions) + + +def main() -> None: + """Entry point.""" + if len(sys.argv) < 2: + print(f'Usage: {sys.argv[0]} [ ...]') # noqa: T201 + print(' can be a file or directory (recurses into *_test.py files)') # noqa: T201 + sys.exit(1) + + total = 0 + for arg in sys.argv[1:]: + p = Path(arg) + if p.is_file(): + files = [p] + elif p.is_dir(): + files = sorted(p.rglob('*_test.py')) + else: + print(f'Warning: {p} does not exist, skipping') # noqa: T201 + continue + + for f in files: + count = fix_file(f) + if count: + print(f' {f}: added {count} docstring(s)') # noqa: T201 + total += count + + print(f'Total: {total} docstring(s) added') # noqa: T201 + + +if __name__ == '__main__': + main() diff --git a/py/docs/WHY_MONOREPO.md b/py/docs/WHY_MONOREPO.md index 2291150cd5..00cc75b5d1 100644 --- a/py/docs/WHY_MONOREPO.md +++ b/py/docs/WHY_MONOREPO.md @@ -179,6 +179,14 @@ has dozens of edge cases that will bite you if you try to do it with shell scripts or manual processes. Every feature exists because we hit the bug it prevents. +Releasekit is **ecosystem-agnostic** — it supports Python (uv), JS +(pnpm), Go, Dart (pub), Java/Kotlin (Maven/Gradle), Clojure +(Leiningen/tools.deps), and Rust (Cargo). Each ecosystem has a +workspace backend that discovers packages, a package manager backend +that builds/publishes, and a registry backend that verifies +availability. Adding a new ecosystem means implementing three +protocols — no changes to the core orchestration. + ## The Release Process A single release involves: @@ -410,36 +418,34 @@ in production. Every module exists because a release failed without it. ## Releasekit Architecture -``` -releasekit/ -├── preflight.py # 12 safety checks before anything starts -├── prepare.py # Version bumps, changelogs, release PR -├── release.py # Tag creation, platform releases -├── publisher.py # Build → pin → publish → verify pipeline -├── scheduler.py # Dependency-triggered concurrent dispatch -├── pin.py # Ephemeral pyproject.toml rewriting -├── graph.py # DAG construction, cycle detection, topo sort -├── state.py # Crash-safe per-package status tracking -├── tags.py # Per-package + umbrella tag management -├── changelog.py # Conventional commit → changelog sections -├── release_notes.py # Umbrella release notes generation -├── commitback.py # Post-release .dev0 version bumping -├── versioning.py # Semantic version computation -├── workspace.py # Package discovery and metadata parsing -├── config.py # TOML config parsing and validation -├── errors.py # Structured error codes with hints -├── ui.py # Rich progress UI for terminal output -├── doctor.py # Diagnostic self-checks -└── backends/ # Pluggable backends (git, uv, pnpm, gh, PyPI, npm) -``` - Every module is tested with fake backends (no network, no git, no -registry calls in tests). The entire test suite runs in seconds. +registry calls in tests). The backends are pluggable via protocols: +VCS (Git, Mercurial), PackageManager (uv, pnpm, Go, Dart, Maven, +Cargo), Registry (PyPI, npm, Go proxy, pub.dev, Maven Central, +crates.io), Forge (GitHub, GitLab, Bitbucket), and Workspace +(package discovery per ecosystem). **If you're tempted to replace releasekit with shell scripts, read Part II again first.** The edge cases are real, and they will find you in production at the worst possible time. +## Release Tool Invariants + +These are **hard requirements** — violations are P0 bugs. Tests live +in `tests/rk_invariants_test.py`. + +| Key | Invariant | One-liner | +|-----|-----------|----------| +| `INV-IDEMPOTENCY` | Idempotency | Re-running a command is always safe | +| `INV-CRASH-SAFETY` | Crash Safety | Interrupted releases resume without re-publishing | +| `INV-ATOMICITY` | Atomicity | Each publish fully succeeds or fully fails | +| `INV-DETERMINISM` | Determinism | Same inputs always produce same outputs | +| `INV-OBSERVABILITY` | Observability | Every action emits structured logs | +| `INV-DRY-RUN` | Dry-Run Fidelity | `--dry-run` exercises real code paths | +| `INV-GRACEFUL-DEGRADATION` | Graceful Degradation | Missing optional components degrade to no-ops | +| `INV-TOPO-ORDER` | Topological Correctness | Packages publish in dependency order | +| `INV-SUPPLY-CHAIN` | Supply Chain Integrity | Published artifacts are verified against checksums | + --- # A Note from the Team diff --git a/py/pyproject.toml b/py/pyproject.toml index ad8609db24..9d25d12d38 100644 --- a/py/pyproject.toml +++ b/py/pyproject.toml @@ -117,8 +117,11 @@ lint = [ # imports in tools/releasekit/. The tool has its own venv but the # type checkers run at workspace scope. "diagnostic>=3.0.0", + "opentelemetry-api>=1.20.0", + "opentelemetry-sdk>=1.20.0", "packaging>=24.0", "rich-argparse>=1.6.0", + "sigstore>=3.0.0", "tomlkit>=0.13.0", ] diff --git a/py/samples/provider-google-genai-vertexai-image/src/main.py b/py/samples/provider-google-genai-vertexai-image/src/main.py index a4212a4edd..6c37082245 100755 --- a/py/samples/provider-google-genai-vertexai-image/src/main.py +++ b/py/samples/provider-google-genai-vertexai-image/src/main.py @@ -104,7 +104,7 @@ async def main() -> None: return # pyrefly: ignore[missing-attribute] - MediaModel has url attribute media = result.message.content[0].root.media - media_url = media.url if media and hasattr(media, 'url') else '' + media_url: str = getattr(media, 'url', '') if media else '' if not media_url: return # Extract base64 data after the comma in "data:image/png;base64,..." diff --git a/py/samples/web-endpoints-hello/GEMINI.md b/py/samples/web-endpoints-hello/GEMINI.md index 7ec47e7551..8f2d32f7e6 100644 --- a/py/samples/web-endpoints-hello/GEMINI.md +++ b/py/samples/web-endpoints-hello/GEMINI.md @@ -319,6 +319,90 @@ See the module docstrings in each file for detailed rationale. ## Code Quality +- **Configurability Over Hardcoding**: All tools, scripts, and libraries MUST be + configurable rather than hardcoded. This is a hard design requirement that applies + to URLs, API endpoints, file paths, thresholds, timeouts, and any other value + that a user or CI environment might need to override. + + - **Never hardcode URLs** — use constructor parameters, config fields, environment + variables, or CLI flags. Every URL that appears as a string literal must also be + overridable (e.g. `base_url` parameter with a sensible default). + - **Expose constants as class attributes** — use `DEFAULT_BASE_URL` / `TEST_BASE_URL` + patterns so users can reference well-known values without string literals. + - **No magic constants in business logic** — extract thresholds, retry counts, + pool sizes, and timeouts into named constants or config fields with docstrings + explaining the default value. + - **Priority order** (highest wins): + `CLI flag > environment variable > config file > class/struct default` + + This principle ensures that every component can be tested against staging/local + services, used in air-gapped environments, and adapted to non-standard + infrastructure without code changes. + +- **No Kitchen-Sink `utils.py`**: Do not dump unrelated helpers into a single + `utils.py` file. Instead, organise shared utilities into focused modules + grouped by domain: + + ``` + utils/ + ├── __init__.py + ├── date.py # UTC date/time helpers + ├── packaging.py # PEP 503/508 name normalisation + └── text.py # String formatting helpers + ``` + + **Rules**: + - Each module in `utils/` must have a single, clear responsibility described + in its module docstring. + - If a helper is only used by one module, keep it private in that module + (prefixed with `_`). Only promote to `utils/` when a second consumer appears. + - Never create a bare `utils.py` at the package root — always use a `utils/` + package with sub-modules. + - Name the sub-module after the *domain* it serves (e.g. `date`, `packaging`, + `text`), not after the caller (e.g. ~~`prepare_helpers`~~). + +- **Fixer Scripts Over Shell Eval**: When fixing lint errors, formatting issues, + or performing bulk code transformations, **always write a dedicated fixer script** + instead of evaluating code snippets or one-liners at the shell. This is a hard + requirement. + + - **Never `eval` or `exec` strings at the command line** to fix code. Shell + one-liners with `sed`, `awk`, `perl -pi -e`, or `python -c` are fragile, + unreviewable, and unreproducible. They also bypass linting and type checking. + - **Write a Python fixer script** (e.g. `py/bin/fix_*.py`) that uses the `ast` + module or `libcst` for syntax-aware transformations. Text-based regex fixes + are acceptable only for non-Python files (TOML, YAML, Markdown). + - **Prefer AST-based transforms** over regex for Python code. The `ast` module + can parse, inspect, and rewrite Python source without breaking syntax. Use + `ast.parse()` + `ast.NodeVisitor`/`ast.NodeTransformer` for structural changes. + Use `libcst` when you need to preserve comments and whitespace. + - **Use `ruff check --fix`** for auto-fixable lint rules before writing custom + fixers. Ruff can auto-fix many categories (unused imports, formatting, simple + refactors). Only write a custom fixer for issues Ruff cannot auto-fix. + - **Fixer scripts must be idempotent** — running them twice produces the same + result. This allows safe re-runs and CI integration. + - **Commit fixer scripts** to the repo (in `py/bin/`) so the team can re-run + them and review the transformation logic. + +- **Rust-Style Errors with Hints**: Every user-facing error MUST follow the Rust + compiler's diagnostic style: a **machine-readable error code**, a **human-readable + message**, and an actionable **hint** that tells the user (or an AI agent) exactly + how to fix the problem. + + **Rules**: + - Every custom exception raise MUST include a non-empty `hint` (or equivalent + guidance field). A raise site without a hint is a bug. + - The `hint` must be **actionable** — it tells the reader what to do, not just + what went wrong. Good: `"Run 'git fetch --unshallow' to fetch full history."` + Bad: `"The repository is shallow."` (that's the message, not a hint). + - Error codes should use a `PREFIX-NAMED-KEY` format (e.g. `RK-CONFIG-NOT-FOUND`, + `GK-PLUGIN-NOT-FOUND`). Define codes as enums, not raw strings. + + **Why hints matter**: Hints are the single most important part of an error for + both humans and AI agents. An AI reading a hint can self-correct without + needing to understand the full codebase. A human reading a hint can fix the + issue without searching docs. Treat a missing hint as a P1 bug. + `pyproject.toml` includes full linter and type checker configs — they work both inside the monorepo and when the sample is copied out as a standalone project: diff --git a/py/samples/web-endpoints-hello/tests/telemetry_test.py b/py/samples/web-endpoints-hello/tests/telemetry_test.py index 82418b362c..0e3322412f 100644 --- a/py/samples/web-endpoints-hello/tests/telemetry_test.py +++ b/py/samples/web-endpoints-hello/tests/telemetry_test.py @@ -76,7 +76,7 @@ async def test_health_creates_trace_span(client: AsyncClient) -> None: span = health_spans[0] if span.attributes is None: pytest.fail("Span has no attributes") - attrs = dict(span.attributes) # ty: ignore[no-matching-overload] — attr type too broad for dict() + attrs = dict(span.attributes) method = attrs.get("http.method", attrs.get("http.request.method")) if method != "GET": pytest.fail(f"Expected GET method, got {method}") diff --git a/py/tools/releasekit/GEMINI.md b/py/tools/releasekit/GEMINI.md new file mode 100644 index 0000000000..8dce354e65 --- /dev/null +++ b/py/tools/releasekit/GEMINI.md @@ -0,0 +1,4193 @@ +# Python Development Guidelines + +## Code Quality & Linting + +* **MANDATORY: Pass `bin/lint`**: Before submitting any PR, you MUST run `./bin/lint` + from the repo root and ensure it passes with 0 errors. This is a hard requirement. + PRs with lint failures will not be accepted. The lint script runs: + + * Ruff (formatting and linting) + * Ty, Pyrefly, Pyright (type checking) + * PySentry (security vulnerability scanning) + * License checks (`bin/check_license`) + * Consistency checks (`py/bin/check_consistency`) + + **Automated Consistency Checks** (`py/bin/check_consistency`): + + | Check | Description | Status | + |-------|-------------|--------| + | Python version | All packages use `requires-python = ">=3.10"` | ✅ Automated | + | Plugin version sync | All plugin versions match core framework | ✅ Automated | + | Package naming | Directory names match package names | ✅ Automated | + | Workspace completeness | All packages in `[tool.uv.sources]` | ✅ Automated | + | Test file naming | Files use `*_test.py` format | ✅ Automated | + | README files | Plugins and samples have README.md | ✅ Automated | + | LICENSE files | Publishable packages have LICENSE | ✅ Automated | + | py.typed markers | PEP 561 type hint markers exist | ✅ Automated | + | Dependency resolution | `uv pip check` passes | ✅ Automated | + | In-function imports | Imports at top of file | ✅ Automated (warning) | + | Required metadata | pyproject.toml has required fields | ✅ Automated | + | Sample run.sh | Samples have run.sh scripts | ✅ Automated | + | Hardcoded secrets | No API keys in source code | ✅ Automated | + | Typos | No spelling errors (via `typos` tool) | ✅ Automated | + | `__all__` exports | Main package has `__all__` for IDE | ✅ Automated | + | Broad type ignores | No `# type: ignore` without codes | ✅ Automated | + | Python classifiers | All packages have 3.10-3.14 classifiers | ✅ Automated | + | Namespace `__init__.py` | Plugins must not have `__init__.py` in `genkit/` or `genkit/plugins/` | ✅ Automated | + | Model conformance specs | Model plugins have `model-conformance.yaml` + `conformance_entry.py` | ✅ Automated | + + **Release Checks** (`py/bin/release_check`): + + | Check | Description | Status | + |-------|-------------|--------| + | Package metadata | All required pyproject.toml fields | ✅ Automated | + | Build verification | Packages build successfully | ✅ Automated | + | Wheel contents | py.typed and LICENSE in wheels | ✅ Automated | + | Twine check | Package metadata valid for PyPI | ✅ Automated | + | Dependency issues | deptry check for missing deps | ✅ Automated | + | Type checking | ty, pyrefly, pyright pass | ✅ Automated | + | Code formatting | ruff format check | ✅ Automated | + | Linting | ruff check | ✅ Automated | + | Typos | Spelling errors | ✅ Automated | + | Unit tests | pytest passes | ✅ Automated | + | Security scan | bandit/pip-audit | ✅ Automated | + | Hardcoded secrets | No API keys in code | ✅ Automated | + | License headers | Apache 2.0 headers present | ✅ Automated | + | Dependency licenses | liccheck passes | ✅ Automated | + | CHANGELOG | Current version documented | ✅ Automated | +* **Type Checkers**: Three type checkers are configured: + + * **ty** (Astral/Ruff) - Blocking, must pass with zero errors (full workspace) + * **pyrefly** (Meta) - Blocking, must pass with zero errors (full workspace) + * **pyright** (Microsoft) - Blocking, must pass with zero errors (runs on `packages/` only) + + Treat warnings as errors—do not ignore them. All three checkers run in `bin/lint`. + + **Full Coverage Required**: All type checkers must pass on the entire codebase including: + + * Core framework (`packages/genkit/`) + * Plugins (`plugins/*/`) + * Samples (`samples/*/`) + * Tests (`**/tests/`, `**/*_test.py`) + + Do not exclude or ignore any part of the codebase from type checking. + + **ty configuration**: Module discovery is configured in `py/pyproject.toml` under + `[tool.ty.environment]`. When adding new packages/plugins/samples, add their source + paths to `environment.root`. + + **pyrefly and PEP 420 Namespace Packages**: The genkit plugins use PEP 420 namespace + packages (`genkit.plugins.*`) where intermediate directories (`genkit/` and `genkit/plugins/`) + don't have `__init__.py` files. This is intentional for allowing multiple packages to + contribute to the same namespace. However, pyrefly can't resolve these imports statically. + We configure `ignore-missing-imports = ["genkit.plugins.*"]` in `pyproject.toml` to suppress + false positive import errors. At runtime, these imports work correctly because Python's + import system handles PEP 420 namespace packages natively. This is the only acceptable + import-related suppression. +* **Pass All Tests**: Ensure all unit tests pass (`uv run pytest .`). +* **Tests Required**: All new code MUST have accompanying tests. No exceptions. + PRs without tests for new functionality will not be accepted. +* **Workspace Completeness**: All plugins and samples MUST be included in + `py/pyproject.toml` under `[tool.uv.sources]`. When adding a new plugin or + sample, add it to the sources section to ensure it's properly installed in + the workspace. **This is automatically checked by `py/bin/check_consistency`.** + Verify with `uv sync` that all packages resolve correctly. +* **Naming Consistency**: Package names MUST match their directory names. + **This is automatically checked by `py/bin/check_consistency`.** + * Plugins: `plugins/{name}/` → package name `genkit-plugin-{name}` + * Samples: `samples/{name}/` → package name `{name}` + * Use hyphens (`-`) not underscores (`_`) in package names +* **Dependency Verification**: All dependencies must resolve correctly. Run these + checks before submitting PRs: + ```bash + # Sync workspace and verify all packages install + uv sync + + # Check for missing or incompatible dependencies + uv pip check + + # Verify license compliance + ./bin/check_license + ``` + **Import-to-Dependency Completeness** *(common error)*: + + Every non-optional `from genkit.plugins. import ...` statement in a + package's source code **MUST** have a corresponding `genkit-plugin-` + entry in that package's `pyproject.toml` `dependencies` list. This is the + most common dependency error — the code imports a plugin but the + `pyproject.toml` doesn't declare it, causing `ModuleNotFoundError` at + runtime when the package is installed standalone. + + **Example of the bug** (real case from `provider-vertex-ai-model-garden`): + ```python + # src/main.py imports VertexAI from google_genai plugin + from genkit.plugins.google_genai import VertexAI # ← needs genkit-plugin-google-genai + from genkit.plugins.vertex_ai.model_garden import ModelGardenPlugin # ← needs genkit-plugin-vertex-ai + ``` + ```toml + # pyproject.toml was MISSING genkit-plugin-google-genai + dependencies = [ + "genkit", + "genkit-plugin-vertex-ai", # ✅ present + # "genkit-plugin-google-genai", # ❌ MISSING — causes ModuleNotFoundError + ] + ``` + + **Note**: Imports inside `try/except ImportError` blocks (for optional + platform auto-detection) are exempt from this rule. + + **Dependency Best Practices**: + * Add dependencies directly to the package that uses them, not transitively + * Each plugin's `pyproject.toml` should list all packages it imports + * Use version constraints (e.g., `>=1.0.0`) to allow flexibility + * Pin exact versions only when necessary for compatibility + * Remove unused dependencies to keep packages lean +* **Python Version Consistency**: All packages MUST specify `requires-python = ">=3.10"`. + **This is automatically checked by `py/bin/check_consistency`.** + The `.python-version` file specifies `3.12` for local development, but CI tests + against Python 3.10–3.14. Scripts using `uv run` should use `--active` flag to + respect the CI matrix Python version. +* **Plugin Version Sync**: All plugin versions stay in sync with the core framework + version. **This is automatically checked by `py/bin/check_consistency`.** + * Core framework and all plugins share the same version number + * Samples can have independent versions (typically `0.1.0`) + * Use semantic versioning (MAJOR.MINOR.PATCH) + * Bump versions together during releases +* **Production Ready**: The objective is to produce production-grade code. +* **Shift Left**: Employ a "shift left" strategy—catch errors early. +* **Configurability Over Hardcoding**: All tools, scripts, and libraries MUST be + configurable rather than hardcoded. This is a hard design requirement that applies + to URLs, registry endpoints, file paths, tool names, thresholds, timeouts, and + any other value that a user or CI environment might need to override. + + **Rules**: + * **Never hardcode URLs** — use constructor parameters, config fields, environment + variables, or CLI flags. Every URL that appears as a string literal must also be + overridable (e.g. `base_url` parameter with a sensible default). + * **Expose constants as class attributes** — use `DEFAULT_BASE_URL` / `TEST_BASE_URL` + patterns so users can reference well-known values without string literals. + * **CLI flags override config files** — when both a config file field and a CLI flag + exist for the same setting, the CLI flag takes precedence. + * **Config files override defaults** — dataclass/struct defaults are the last + fallback. Config file values override them. CLI flags override config files. + * **Environment variables for CI** — settings that CI pipelines commonly override + (registry URLs, tokens, pool sizes, timeouts) should be readable from environment + variables when a CLI flag is impractical. + * **No magic constants in business logic** — extract thresholds, retry counts, + pool sizes, and timeouts into named constants or config fields with docstrings + explaining the default value. + + **Priority order** (highest wins): + ``` + CLI flag > environment variable > config file > class/struct default + ``` + + **Examples**: + ```python + # WRONG — hardcoded registry URL, not overridable + class MyRegistry: + def check(self, pkg: str) -> bool: + url = f"https://registry.example.com/api/{pkg}" # ❌ Hardcoded + ... + + # CORRECT — configurable with sensible default + well-known constant + class MyRegistry: + DEFAULT_BASE_URL: str = "https://registry.example.com" + TEST_BASE_URL: str = "http://localhost:8080" + + def __init__(self, *, base_url: str = DEFAULT_BASE_URL) -> None: + self._base_url = base_url.rstrip("/") + + def check(self, pkg: str) -> bool: + url = f"{self._base_url}/api/{pkg}" # ✅ Configurable + ... + ``` + + This principle ensures that every tool can be tested against staging/local + registries, used in air-gapped environments, and adapted to non-standard + infrastructure without code changes. +* **No Kitchen-Sink `utils.py`**: Do not dump unrelated helpers into a single + `utils.py` file. Instead, organise shared utilities into focused modules + grouped by domain: + + ``` + utils/ + ├── __init__.py + ├── date.py # UTC date/time helpers + ├── packaging.py # PEP 503/508 name normalisation + └── text.py # String formatting helpers + ``` + + **Rules**: + * Each module in `utils/` must have a single, clear responsibility described + in its module docstring. + * If a helper is only used by one module, keep it private in that module + (prefixed with `_`). Only promote to `utils/` when a second consumer appears. + * Never create a bare `utils.py` at the package root — always use a `utils/` + package with sub-modules. + * Name the sub-module after the *domain* it serves (e.g. `date`, `packaging`, + `text`), not after the caller (e.g. ~~`prepare_helpers`~~). +* **Fixer Scripts Over Shell Eval**: When fixing lint errors, formatting issues, + or performing bulk code transformations, **always write a dedicated fixer script** + instead of evaluating code snippets or one-liners at the shell. This is a hard + requirement. + + **Rules**: + * **Never `eval` or `exec` strings at the command line** to fix code. Shell + one-liners with `sed`, `awk`, `perl -pi -e`, or `python -c` are fragile, + unreviewable, and unreproducible. They also bypass linting and type checking. + * **Write a Python fixer script** (e.g. `py/bin/fix_*.py`) that uses the `ast` + module or `libcst` for syntax-aware transformations. Text-based regex fixes + are acceptable only for non-Python files (TOML, YAML, Markdown). + * **Prefer AST-based transforms** over regex for Python code. The `ast` module + can parse, inspect, and rewrite Python source without breaking syntax. Use + `ast.parse()` + `ast.NodeVisitor`/`ast.NodeTransformer` for structural changes. + Use `libcst` when you need to preserve comments and whitespace. + * **Use `ruff check --fix`** for auto-fixable lint rules before writing custom + fixers. Ruff can auto-fix many categories (unused imports, formatting, simple + refactors). Only write a custom fixer for issues Ruff cannot auto-fix. + * **Fixer scripts must be idempotent** — running them twice produces the same + result. This allows safe re-runs and CI integration. + * **Commit fixer scripts** to the repo (in `py/bin/`) so the team can re-run + them and review the transformation logic. + + **Example — adding missing docstrings to test methods**: + ```python + #!/usr/bin/env python3 + """Add missing docstrings to test methods (fixes D102).""" + import ast + import sys + from pathlib import Path + + def fix_file(path: Path) -> int: + source = path.read_text(encoding='utf-8') + tree = ast.parse(source) + # ... walk tree, find methods without docstrings, insert them ... + path.write_text(new_source, encoding='utf-8') + return count + + for p in Path(sys.argv[1]).rglob('*_test.py'): + fix_file(p) + ``` + + **Why this matters**: Shell one-liners are invisible to code review, cannot be + tested, and often introduce subtle bugs (wrong quoting, partial matches, broken + indentation). A committed fixer script is reviewable, testable, and documents + the transformation for future maintainers. + +* **Rust-Style Errors with Hints**: Every error raised in the codebase MUST follow + the Rust compiler's diagnostic style: a **machine-readable error code**, a + **human-readable message**, and an actionable **hint** that tells the user (or + an AI agent) exactly how to fix the problem. This applies to both errors and + warnings. + + **Rules**: + * Every `ReleaseKitError` and `ReleaseKitWarning` MUST include a non-empty + `hint=` parameter. A raise site without a hint is a bug. + * The `hint` must be **actionable** — it tells the reader what to do, not just + what went wrong. Good: `"Run 'git fetch --unshallow' to fetch full history."` + Bad: `"The repository is shallow."` (that's the message, not a hint). + * Error codes use the `RK-NAMED-KEY` format (e.g. `RK-CONFIG-NOT-FOUND`). + Define new codes in `errors.py` → `ErrorCode` enum. + * Use `render_error()` / `render_warning()` from `errors.py` for CLI output. + These produce Rust-style formatted diagnostics with optional Rich coloring. + + **Output format** (rendered by `render_error`): + ``` + error[RK-CONFIG-NOT-FOUND]: No releasekit.toml found in /repo. + | + = hint: Run 'releasekit init' to generate a default configuration. + ``` + + **Code pattern**: + ```python + from releasekit.errors import E, ReleaseKitError + + raise ReleaseKitError( + code=E.CONFIG_NOT_FOUND, + message='No releasekit.toml found in /repo.', + hint="Run 'releasekit init' to generate a default configuration.", + ) + ``` + + **Why hints matter**: Hints are the single most important part of an error for + both humans and AI agents. An AI reading a hint can self-correct without + needing to understand the full codebase. A human reading a hint can fix the + issue without searching docs. Treat a missing hint as a P1 bug. +* **Strict Typing**: Strict type checking is required. Do not use `Any` unless + absolutely necessary and documented. +* **Security & Async Best Practices**: Ruff is configured with security (S), async (ASYNC), + and print (T20) rules. These catch common production issues: + + * **S rules (Bandit)**: SQL injection, hardcoded secrets, insecure hashing, etc. + * **ASYNC rules**: Blocking calls in async functions (use `httpx.AsyncClient` not + `urllib.request`, use `aiofiles` not `open()` in async code) + * **T20 rules**: No `print()` statements in production code (use `logging` or `structlog`) + + **Async I/O Best Practices**: + + * Use `httpx.AsyncClient` for HTTP requests in async functions + * Use `aiofiles` for file I/O in async functions + * Never use blocking `urllib.request`, `requests`, or `open()` in async code + * If you must use blocking I/O, run it in a thread with `anyio.to_thread.run_sync()` + + **Example - Async HTTP**: + + ```python + # WRONG - blocks the event loop + async def fetch_data(url: str) -> bytes: + with urllib.request.urlopen(url) as response: # ❌ Blocking! + return response.read() + + # CORRECT - non-blocking + async def fetch_data(url: str) -> bytes: + async with httpx.AsyncClient() as client: + response = await client.get(url) + return response.content + ``` + + **Example - Async File I/O**: + + ```python + # WRONG - blocks the event loop + async def read_file(path: str) -> str: + with open(path) as f: # ❌ Blocking! + return f.read() + + # CORRECT - non-blocking + async def read_file(path: str) -> str: + async with aiofiles.open(path, encoding='utf-8') as f: + return await f.read() + ``` + + **Blocking I/O Audit Checklist**: + + When writing or reviewing async code, check for these common sources of + event-loop blocking. Each pattern looks innocent but can stall the event + loop for 50-500ms: + + | Pattern | Where it hides | Fix | + |---------|---------------|-----| + | `credentials.refresh(Request())` | Google Cloud auth, plugin init | `await asyncio.to_thread(credentials.refresh, req)` | + | `boto3.client(...)` / `client.invoke(...)` | AWS SDK calls | Use `aioboto3` with `async with session.client(...)` | + | `requests.get(url)` | Third-party HTTP in async code | Use `httpx.AsyncClient` or `get_cached_client()` | + | `pathlib.Path.open()` / `open()` | File reads/writes in async methods | Use `aiofiles.open()` | + | `json.load(open(...))` | Loading config/data in async code | `aiofiles.open()` + `json.loads(await f.read())` | + | `os.scandir()` / `os.listdir()` | Directory scanning | `await asyncio.to_thread(os.scandir, path)` | + | `subprocess.run()` | Shelling out from async code | `asyncio.create_subprocess_exec()` | + | `time.sleep(n)` | Delays in async code | `await asyncio.sleep(n)` | + + **Detection strategy**: Search for these patterns in `async def` functions: + + ```bash + # Find sync file I/O in async functions + rg -n 'open\(' --glob '*.py' | rg -v 'aiofiles' + + # Find sync HTTP in async code + rg -n 'requests\.(get|post|put)' --glob '*.py' + rg -n 'httpx\.Client\(\)' --glob '*.py' + + # Find blocking credential refresh + rg -n 'credentials\.refresh' --glob '*.py' + + # Find sync subprocess calls + rg -n 'subprocess\.(run|call|check_output)' --glob '*.py' + ``` + + **When blocking I/O is acceptable**: + + * **Startup-only code** (e.g., `load_prompt_folder()` reading small `.prompt` + files): If the I/O happens once during initialization with small files, + the latency is negligible (~1ms for a few KB). Document the choice. + * **OpenTelemetry exporters**: The OTEL SDK calls `export()` from its own + background thread via `BatchSpanProcessor`, so sync HTTP there is by design. + * **`atexit` handlers**: These run during interpreter shutdown when the event + loop is already closed. Sync I/O is the only option. + * **Sync tool functions**: Genkit's `@ai.tool()` can wrap sync functions. + The framework handles thread offloading. However, prefer async tools for + network-bound operations. + + **CRITICAL: Per-Event-Loop HTTP Client Caching**: + + When making multiple HTTP requests in async code, **do NOT create a new + `httpx.AsyncClient` for every request**. This has two problems: + + 1. **Performance overhead**: Each new client requires connection setup, SSL + handshake, etc. + 2. **Event loop binding**: `httpx.AsyncClient` instances are bound to the + event loop they were created in. Reusing a client across different event + loops causes "bound to different event loop" errors. + + **Use the shared `get_cached_client()` utility** from `genkit.core.http_client`: + + ```python + from genkit.core.http_client import get_cached_client + + # WRONG - creates new client per request (connection overhead) + async def call_api(url: str) -> dict: + async with httpx.AsyncClient() as client: + response = await client.get(url) + return response.json() + + # WRONG - stores client at init time (event loop binding issues) + class MyPlugin: + def __init__(self): + self._client = httpx.AsyncClient() # ❌ Bound to current event loop! + + async def call_api(self, url: str) -> dict: + response = await self._client.get(url) # May fail in different loop + return response.json() + + # CORRECT - uses per-event-loop cached client + async def call_api(url: str, token: str) -> dict: + # For APIs with expiring tokens, pass auth headers per-request + client = get_cached_client( + cache_key='my-api', + timeout=60.0, + ) + response = await client.get(url, headers={'Authorization': f'Bearer {token}'}) + return response.json() + + # CORRECT - for static auth (API keys that don't expire) + async def call_api_static_auth(url: str) -> dict: + client = get_cached_client( + cache_key='my-plugin/api', + headers={ + 'Authorization': f'Bearer {API_KEY}', + 'Content-Type': 'application/json', + }, + timeout=60.0, + ) + response = await client.get(url) + return response.json() + ``` + + **Key patterns**: + + * **Use unique `cache_key`** for each distinct client configuration (e.g., + `'vertex-ai-reranker'`, `'cloudflare-workers-ai/account123'`) + * **Pass expiring auth per-request**: For Google Cloud, Azure, etc. where + tokens expire, pass auth headers in the request, not in `get_cached_client()` + * **Static auth in client**: For Cloudflare, OpenAI, etc. where API keys + don't expire, include auth headers in `get_cached_client()` + * **WeakKeyDictionary cleanup**: The cache automatically cleans up clients + when their event loop is garbage collected + * **Testing**: Mock `get_cached_client` instead of `httpx.AsyncClient`: + ```python + @patch('my_module.get_cached_client') + async def test_api_call(mock_get_client): + mock_client = AsyncMock() + mock_client.post = AsyncMock(return_value=mock_response) + mock_get_client.return_value = mock_client + result = await my_api_call() + ``` + * **Related**: [#4420](https://github.com/firebase/genkit/issues/4420) +* **Security Vulnerability Checks**: Beyond Ruff's S rules, the codebase enforces + additional security invariants. ReleaseKit has an automated security test suite + (`py/tools/releasekit/tests/rk_security_test.py`) that demonstrates the pattern. + Apply these checks to all Python code in the repository: + + **Automated Checks (enforced in CI via test suites)**: + + | # | Check | What It Catches | Severity | + |---|-------|-----------------|----------| + | 1 | No `shell=True` | Command injection via subprocess | Critical | + | 2 | No `pickle`/`yaml.load`/`eval`/`exec` | Arbitrary code execution via deserialization | Critical | + | 3 | No hardcoded secrets | Literal tokens, AWS keys, GitHub PATs in source | Critical | + | 4 | No `verify=False` / `CERT_NONE` | TLS certificate verification bypass | Critical | + | 5 | `NamedTemporaryFile(delete=False)` in `try/finally` | Temp file leak on exception | High | + | 6 | No bare `except:` | Swallows `KeyboardInterrupt`/`SystemExit` | Medium | + | 7 | API backends define `__repr__` | Credential leak in tracebacks/logs | High | + | 8 | Lock files use `O_CREAT\|O_EXCL` | TOCTOU race condition | High | + | 9 | No `http://` URLs in runtime code | Plaintext traffic (no TLS) | Medium | + | 10 | State files use `mkstemp` + `os.replace` | Crash corruption on partial writes | High | + | 11 | `resolve()` on discovered paths | Symlink traversal attacks | Medium | + + **Manual Review Checklist** (for PR reviews): + + | Category | What to Look For | Fix | + |----------|-----------------|-----| + | TOCTOU races | Check-then-act on files without atomic ops | `O_CREAT\|O_EXCL`, `mkstemp` + `os.replace` | + | Log injection | User data in structlog event names | Literals for event names; user data in kwargs | + | Path traversal | `Path(user_input)` without validation | `.resolve()` + verify under expected root | + | Credential logging | Objects with tokens in `log.*()` calls | `__repr__` that redacts sensitive fields | + | Subprocess args | User input in command lists | Validate inputs; never `shell=True` | + | Temp file cleanup | `NamedTemporaryFile(delete=False)` | Wrap in `try/finally` with `os.unlink` | + | Atomic writes | `write_text()` for state/config files | `mkstemp` + `os.write` + `os.replace` | + | Exception swallowing | `except Exception` hiding real errors | Log exception; re-raise if not recoverable | + | ReDoS | Regex with nested quantifiers on untrusted input | Avoid catastrophic backtracking patterns | + +* **Error Suppression Policy**: Avoid ignoring warnings from the type checker + (`# type: ignore`, `# pyrefly: ignore`, etc.) or linter (`# noqa`) unless there is + a compelling, documented reason. + * **Try to fix first**: Before suppressing, try to rework the code to avoid the + warning entirely. Use explicit type annotations, asserts for type narrowing, + local variables to capture narrowed types in closures, or refactor the logic. + * **Acceptable suppressions**: Only suppress when the warning is due to: + * Type checker limitations (e.g., StrEnum narrowing, Self type compatibility) + * External library type stub issues (e.g., uvicorn, OpenTelemetry) + * Intentional design choices (e.g., Pydantic v1 compatibility, covariant overrides) + * False positives (e.g., `S105` for enum values that look like passwords) + * Intentional behavior (e.g., `S110` for silent exception handling in parsers) + * **Minimize surface area**: Suppress on the specific line, not globally in config. + **NEVER use per-file-ignores for security rules** - always use line-level `# noqa`. + * **Always add a comment**: Explain why the suppression is needed. + * **Be specific**: Use the exact error code (e.g., `# noqa: S105 - enum value, not a password` + not just `# noqa`). + * **Place `# noqa` on the exact line Ruff flags**: Ruff reports errors on the + specific line containing the violation, not the statement's opening line. For + multi-line calls, a `# noqa` comment on the wrong line is silently ignored. + + ```python + # WRONG — S607 fires on line 2 (the list literal), noqa on line 1 is ignored + proc = subprocess.run( # noqa: S603, S607 + ['uv', 'lock', '--check'], # ← Ruff flags THIS line for S607 + ... + ) + + # CORRECT — each noqa on the line Ruff actually flags + proc = subprocess.run( # noqa: S603 - intentional subprocess call + ['uv', 'lock', '--check'], # noqa: S607 - uv is a known tool + ... + ) + ``` + * **Examples**: + ```python + # Type checker suppression + # pyrefly: ignore[unexpected-keyword] - Pydantic populate_by_name=True allows schema_ + schema_=options.output.json_schema if options.output else None, + + # Security false positive - enum value looks like password + PASS_ = 'PASS' # noqa: S105 - enum value, not a password + + # Intentional silent exception handling + except Exception: # noqa: S110 - intentionally silent, parsing partial JSON + pass + + # Print in atexit handler where logger is unavailable + print(f'Removing file: {path}') # noqa: T201 - atexit handler, logger unavailable + ``` + * **Optional Dependencies**: For optional dependencies used in typing (e.g., `litestar`, + `starlette`) that type checkers can't resolve, **do NOT use inline ignore comments**. + Instead, add the dependency to the `lint` dependency group in `pyproject.toml`: + ```toml + # In pyproject.toml [project.optional-dependencies] + lint = [ + # ... other lint deps ... + "litestar>=2.0.0", # For web/typing.py type resolution + ] + ``` + This ensures type checkers can resolve the imports during CI while keeping the + package optional for runtime. +* **Import Placement**: All imports must be at the top of the file, outside any + function definitions. This is a strict Python convention that ensures: + + * Predictable module loading behavior + * Easier code review and maintenance + * Proper type checking and tooling support + + **Correct (Top-Level Imports)**: + + ```python + import asyncio + import json + import os + import random + import tempfile + from collections.abc import Callable + + from pydantic import BaseModel, Field + + from genkit.types import Media, MediaPart, Part, TextPart + + + async def main() -> None: + """Entry point - uses imports from top of file.""" + await asyncio.Event().wait() + + + def process_data(data: dict) -> str: + """Uses json from top-level import.""" + return json.dumps(data) + ``` + + **Incorrect (In-Function Imports)**: + + ```python + async def main() -> None: + """WRONG: Import inside function.""" + import asyncio # ❌ Should be at top of file + + await asyncio.Event().wait() + + + def describe_image(url: str) -> Part: + """WRONG: Import inside function.""" + from genkit.types import MediaPart # ❌ Should be at top of file + + return MediaPart(media=Media(url=url)) + ``` + + **Note**: There are NO legitimate use cases for in-function imports in this + codebase. The only traditionally acceptable reasons (circular imports, optional + dependencies, heavy import cost) do not apply here because: + + * Circular imports should be resolved through proper module design + * All dependencies in this codebase are mandatory + * Standard library imports are negligible cost + + +## Shell Scripts Reference + +The repository provides shell scripts in two locations: `bin/` for repository-wide +tools and `py/bin/` for Python-specific tools. + +### Repository-Wide Scripts (`bin/`) + +Development workflow scripts at the repository root: + +``` +┌─────────────────────────────────────────────────────────────────────────────┐ +│ Development Workflow │ +│ │ +│ Developer │ +│ │ │ +│ ├──► bin/setup ──────► Install all tools (Go, Node, Python, Rust) │ +│ │ │ +│ ├──► bin/fmt ────────► Format code (TOML, Python, Go, TypeScript) │ +│ │ │ │ +│ │ ├──► bin/add_license ───► Add Apache 2.0 headers │ +│ │ └──► bin/format_toml_files ► Format pyproject.toml │ +│ │ │ +│ ├──► bin/lint ───────► Run all linters and type checkers │ +│ │ │ │ +│ │ └──► bin/check_license ──► Verify license headers │ +│ │ │ +│ └──► bin/killports ──► Kill processes on specific ports │ +└─────────────────────────────────────────────────────────────────────────────┘ +``` + +| Script | Purpose | Usage | +|--------|---------|-------| +| `bin/setup` | Install all development tools and dependencies | `./bin/setup -a eng` (full) or `./bin/setup -a ci` (CI) | +| `bin/fmt` | Format all code (TOML, Python, Go, TS) | `./bin/fmt` | +| `bin/lint` | Run all linters and type checkers | `./bin/lint` (from repo root) | +| `bin/add_license` | Add Apache 2.0 license headers to files | `./bin/add_license` | +| `bin/check_license` | Verify license headers and compliance | `./bin/check_license` | +| `bin/format_toml_files` | Format all pyproject.toml files | `./bin/format_toml_files` | +| `bin/golang` | Run commands with specific Go version | `./bin/golang 1.22 test ./...` | +| `bin/run_go_tests` | Run Go tests | `./bin/run_go_tests` | +| `bin/killports` | Kill processes on TCP ports | `./bin/killports 3100..3105 8080` | +| `bin/update_deps` | Update all dependencies | `./bin/update_deps` | +| `bin/install_cli` | Install Genkit CLI binary | `curl -sL cli.genkit.dev \| bash` | + +### Python Scripts (`py/bin/`) + +Python-specific development and release scripts: + +``` +┌─────────────────────────────────────────────────────────────────────────────┐ +│ Python Development │ +│ │ +│ Developer │ +│ │ │ +│ ├──► py/bin/run_sample ────────► Interactive sample runner │ +│ │ │ +│ ├──► py/bin/run_python_tests ──► Run tests (all Python versions) │ +│ │ │ +│ └──► py/bin/check_consistency ─► Workspace consistency checks │ +│ │ +│ Release Manager │ +│ │ │ +│ ├──► py/bin/bump_version ──────► Bump version in all packages │ +│ │ │ +│ ├──► py/bin/release_check ─────► Pre-release validation │ +│ │ │ +│ ├──► py/bin/build_dists ───────► Build wheel/sdist packages │ +│ │ │ +│ ├──► py/bin/create_release ────► Create GitHub release │ +│ │ │ +│ └──► py/bin/publish_pypi.sh ───► Publish to PyPI │ +└─────────────────────────────────────────────────────────────────────────────┘ +``` + +| Script | Purpose | Usage | +|--------|---------|-------| +| **Development** | | | +| `py/bin/run_sample` | Interactive sample runner with fzf/gum | `py/bin/run_sample [sample-name]` | +| `py/bin/test_sample_flows` | Test flows in a sample | `py/bin/test_sample_flows [sample-name]` | +| `py/bin/run_python_tests` | Run tests across Python versions | `py/bin/run_python_tests` | +| `py/bin/watch_python_tests` | Watch mode for tests | `py/bin/watch_python_tests` | +| `py/bin/check_consistency` | Workspace consistency checks | `py/bin/check_consistency` | +| `py/bin/check_versions` | Check version consistency | `py/bin/check_versions` | +| `py/bin/cleanup` | Clean build artifacts | `py/bin/cleanup` | +| **Code Generation** | | | +| `py/bin/generate_schema_typing` | Generate typing.py from JSON schema | `py/bin/generate_schema_typing` | +| **Release** | | | +| `py/bin/bump_version` | Bump version in all packages | `py/bin/bump_version 0.6.0` | +| `py/bin/release_check` | Pre-release validation suite | `py/bin/release_check` | +| `py/bin/validate_release_docs` | Validate release documentation | `py/bin/validate_release_docs` | +| `py/bin/build_dists` | Build wheel and sdist packages | `py/bin/build_dists` | +| `py/bin/create_release` | Create GitHub release | `py/bin/create_release` | +| `py/bin/publish_pypi.sh` | Publish to PyPI | `py/bin/publish_pypi.sh` | +| **Security** | | | +| `py/bin/run_python_security_checks` | Run security scanners | `py/bin/run_python_security_checks` | + +## Generated Files & Data Model + +* **Do Not Edit typing.py**: `py/packages/genkit/src/genkit/core/typing.py` + is an auto-generated file. **DO NOT MODIFY IT DIRECTLY.** +* **Generator/Sanitizer**: Any necessary transformations to the core types must be + applied to the generator script or the schema sanitizer. +* **Canonical Parity**: The data model MUST be identical to the JSON schema + defined in the JavaScript (canonical) implementation. + +## API & Behavior Parity + +* **JS Canonical Conformance**: The Python implementation MUST be identical + in API structure and runtime behavior to the JavaScript (canonical) + implementation. + +## Detailed Coding Guidelines + +### Target Environment + +* **Python Version**: Target Python 3.12 or newer. +* **Environment**: Use `uv` for packaging and environment management. + +### Typing & Style + +* **Syntax**: + * Use `|` for union types instead of `Union`. + * Use `| None` instead of `Optional`. + * Use lowercase `list`, `dict` for type hints (avoid `List`, `Dict`). + * Use modern generics (PEP 585, 695). + * Use the `type` keyword for type aliases. +* **Imports**: Import types like `Callable`, `Awaitable` from `collections.abc`, + not standard library `typing`. +* **Enums**: Use `StrEnum` instead of `(str, Enum)`. +* **Strictness**: Apply type hints strictly, including `-> None` for void functions. +* **Design**: + * Code against interfaces, not implementations. + * Use the adapter pattern for optional implementations. +* **Comments**: + * Use proper punctuation. + * Avoid comments explaining obvious code. + * Use `TODO: Fix this later.` format for stubs. + * **Do not add section marker comments** (e.g., `# ============` banners). + Keep code clean and let structure speak for itself. +* Ensure that `bin/lint` passes without errors. + +### Docstrings + +* **Format**: Write comprehensive Google-style docstrings for modules, classes, + and functions. + +* **Test Files**: All public classes, methods, and functions in test files MUST + have docstrings. This includes: + * **Test classes** (`class TestFoo:`) — describe what is being tested + * **Test methods** (`def test_bar(self):`) — describe what the test verifies + * **Fixtures** (`@pytest.fixture def bb():`) — describe what the fixture provides + * **Helper functions** (`def make_packages():`) — describe what the helper does + + Ruff enforces D101 (missing class docstring), D102 (missing method docstring), + and D103 (missing function docstring) on all public names. A name is public if + it does not start with an underscore. Prefix helpers with `_` if they are + internal to the test module and don't need a docstring. + +* **Content**: + * **Explain Concepts**: Explain the terminology and concepts used in the + code to someone unfamiliar with the code so that first timers can easily + understand these ideas. + * **Visuals**: Prefer using tabular format and ascii diagrams in the + docstrings to break down complex concepts or list terminology. + * **ELI5 (Explain Like I'm 5)**: Include ELI5 documentation to help newcomers + quickly understand what a module does without reading all the code. + + **Requirements by module type:** + + | Module Type | Concepts Table | Data Flow Diagram | + |-------------|----------------|-------------------| + | **Plugins** (`plugins/*/`) | Required | Required | + | **Core packages** (`packages/*/`) | Required | Required for complex modules | + | **Samples** (`samples/*/`) | Required | Only for complex samples\* | + + \*Complex samples include: RAG/vector search demos, multi-step pipelines, + telemetry demos, tool interrupts, multi-server setups, etc. + + **1. Concepts Table** - Required for all modules: + + ``` + Key Concepts (ELI5):: + + ┌─────────────────────┬────────────────────────────────────────────────┐ + │ Concept │ ELI5 Explanation │ + ├─────────────────────┼────────────────────────────────────────────────┤ + │ Span │ A "timer" that records how long something │ + │ │ took. Like a stopwatch for one task. │ + ├─────────────────────┼────────────────────────────────────────────────┤ + │ Trace │ A collection of spans showing a request's │ + │ │ journey. Like breadcrumbs through your code. │ + ├─────────────────────┼────────────────────────────────────────────────┤ + │ Exporter │ Ships your traces somewhere (X-Ray, Jaeger). │ + │ │ Like a postal service for telemetry data. │ + ├─────────────────────┼────────────────────────────────────────────────┤ + │ Propagator │ Passes trace IDs between services. Like a │ + │ │ relay baton in a race. │ + ├─────────────────────┼────────────────────────────────────────────────┤ + │ Sampler │ Decides which traces to keep. Like a bouncer │ + │ │ at a club deciding who gets in. │ + └─────────────────────┴────────────────────────────────────────────────┘ + ``` + + **2. Data Flow Diagram** - Required for plugins, optional for simple samples: + + ``` + Data Flow:: + + User Request + │ + ▼ + ┌─────────┐ ┌─────────┐ ┌─────────┐ + │ Flow A │ ──▶ │ Model │ ──▶ │ Tool │ + │ (span) │ │ (span) │ │ (span) │ + └─────────┘ └─────────┘ └─────────┘ + │ │ │ + └───────────────┼───────────────┘ + ▼ + ┌─────────────┐ + │ Exporter │ ──▶ AWS X-Ray / GCP Trace + └─────────────┘ + ``` + + **Guidelines for ELI5 content:** + + * Use analogies from everyday life (mailman, bouncer, stopwatch, etc.) + * Keep explanations to 1-2 lines per concept + * Focus on the "what" and "why", not implementation details + * Use box-drawing characters for professional appearance + +* **Required Sections**: + * **Overview**: One-liner description followed by rationale. + * **Key Operations**: Purpose of the component. + * **Args/Attributes**: Required for callables/classes. + * **Returns**: Required for callables. + * **Examples**: Required for user-facing API. + * **Caveats**: Known limitations or edge cases. + * **Implementation Notes & Edge Cases**: For complex modules (especially plugins), + document implementation details that differ from typical patterns or other + similar implementations. Explain both **why** the edge case exists and **what** + the solution is. + +* **References**: + * Please use the descriptions from genkit.dev and + github.com/genkit-ai/docsite as the source of truth for the API and + concepts. + * When you are not sure about the API or concepts, please refer to the + JavaScript implementation for the same. + +* Keep examples in documentation and docstrings simple. + +* Add links to relevant documentation on the Web or elsewhere + in the relevent places in docstrings. + +### Core Framework Patterns + +**Action Input Validation (Gotcha)** + +When implementing low-level action execution (like `arun_raw`), **always check if `raw_input` is `None`** before passing it to Pydantic's `validate_python()`. + +* **The Problem**: `validate_python(None)` raises a generic, cryptic `ValidationError` ("Input should be a valid dictionary") instead of telling you the input is missing. +* **The Fix**: Explicitly check for `None` and raise `GenkitError(status='INVALID_ARGUMENT')`. + +```python +# WRONG - raises cryptic ValidationError on None +input_action = self._input_type.validate_python(raw_input) + +# CORRECT - raises clear GenkitError +if self._input_type is not None: + if raw_input is None: + raise GenkitError( + message=f"Action '{self.name}' requires input.", + status='INVALID_ARGUMENT' + ) + input_action = self._input_type.validate_python(raw_input) +``` + +* This is critical for the Dev UI, which sends `None` payload when the user clicks "Run" without providing JSON input. + +### Documentation Best Practices + +* Add ASCII diagrams to illustrate relationships, flows, and concepts. + +* **Plugin Architecture Diagrams**: Every plugin MUST include an ASCII architecture + diagram in its module docstring (typically in `__init__.py` or `typing.py`). + This helps developers understand the plugin structure at a glance: + + ``` + ┌─────────────────────────────────────────────────────────────────────────┐ + │ Plugin Name │ + ├─────────────────────────────────────────────────────────────────────────┤ + │ Plugin Entry Point (__init__.py) │ + │ ├── plugin_factory() - Plugin factory function │ + │ ├── Model References (model_a, model_b, ...) │ + │ └── Helper Functions (name_helper, config_helper, ...) │ + ├─────────────────────────────────────────────────────────────────────────┤ + │ typing.py - Type-Safe Configuration Classes │ + │ ├── BaseConfig (base configuration) │ + │ ├── ProviderAConfig, ProviderBConfig, ... │ + │ └── Provider-specific enums and types │ + ├─────────────────────────────────────────────────────────────────────────┤ + │ plugin.py - Plugin Implementation │ + │ ├── PluginClass (registers models/embedders/tools) │ + │ └── Configuration and client initialization │ + ├─────────────────────────────────────────────────────────────────────────┤ + │ models/model.py - Model Implementation │ + │ ├── ModelClass (API integration) │ + │ ├── Request/response conversion │ + │ └── Streaming support │ + ├─────────────────────────────────────────────────────────────────────────┤ + │ models/model_info.py - Model Registry (if applicable) │ + │ ├── SUPPORTED_MODELS │ + │ └── SUPPORTED_EMBEDDING_MODELS │ + └─────────────────────────────────────────────────────────────────────────┘ + ``` + + **Guidelines for architecture diagrams**: + + * Use box-drawing characters (`┌ ┐ └ ┘ ─ │ ├ ┤ ┬ ┴ ┼`) for clean appearance + * Show file/module organization and their responsibilities + * Highlight key classes, functions, and exports + * Include model registries and configuration classes + * Keep the diagram updated when plugin structure changes + +* Always update module docstrings and function docstrings when updating code + to reflect updated reality of any file you add or modify. + +* Scan documentation for every module you edit and keep it up-to-date. + +* In sample code, always add instructions about testing the demo. + +* **Document Edge Cases in Module Docstrings**: When a module handles edge cases + differently from typical patterns or other similar implementations, document + these in a dedicated "Implementation Notes & Edge Cases" section. Include: + + * **Why** the edge case exists (API limitations, platform differences, etc.) + * **What** the solution is (the implementation approach) + * **Comparison** with how other similar systems handle it (if relevant) + + Example from the AWS Bedrock plugin module docstring: + + ```python + """AWS Bedrock model implementation for Genkit. + + ... + + Implementation Notes & Edge Cases + --------------------------------- + + **Media URL Fetching (Bedrock-Specific Requirement)** + + Unlike other AI providers (Anthropic, OpenAI, Google GenAI, xAI) that accept + media URLs directly in their APIs and fetch the content server-side, AWS + Bedrock's Converse API **only accepts inline bytes**. + + This means we must fetch media content client-side before sending to Bedrock:: + + # Other providers (e.g., Anthropic): + {'type': 'url', 'url': 'https://example.com/image.jpg'} # API fetches it + + # AWS Bedrock requires: + {'image': {'format': 'jpeg', 'source': {'bytes': b'...actual bytes...'}}} + + We use ``httpx.AsyncClient`` for true async HTTP requests. This approach: + + - Uses httpx which is already a genkit core dependency + - True async I/O (no thread pool needed) + - Doesn't block the event loop during network I/O + + **JSON Output Mode (Prompt Engineering)** + + The Bedrock Converse API doesn't have native JSON mode. When JSON output is + requested, we inject instructions into the system prompt to guide the model. + """ + ``` + + This helps future maintainers understand non-obvious implementation choices + and prevents accidental regressions when the code is modified. + +### Implementation + +* Always add unit tests to improve coverage. + +* When there is a conflict between the JavaScript implementation and the + Python implementation, please refer to the JavaScript implementation for + the same. + +* When aiming to achieve parity the API and behavior should be identical to the + JS canonical implementation. + +* Always add/update samples to demonstrate the usage of the API or + functionality. + +* Use default input values for flows and actions to make them easier to use + in the DevUI so that bug bashes can be faster and more effective. + +* Support hot reloading in samples by using the `watchdog` library that + exposes a `watchmedo` command line tool. See other samples for example. + Since we want to reload examples when data files such as `.prompt` or `.pdf` + or `.json` change, please include them in the watched patterns whenever + required. + +* Add a `run.sh` script to samples that can be used to run the sample. + The script should also perform any setup required for the sample, such as + installing dependencies or setting up environment variables. + +* **IMPORTANT**: The `run.sh` script MUST use this exact command structure: + + ```bash + genkit start -- \ + uv tool run --from watchdog watchmedo auto-restart \ + -d src \ + -d ../../packages \ + -d ../../plugins \ + -p '*.py;*.prompt;*.json' \ + -R \ + -- uv run src/main.py "$@" + ``` + + Key points: + + * `genkit start` must be OUTSIDE watchmedo (starts once and stays running) + * watchmedo only restarts the Python script, NOT the genkit server + * Use `uv tool run --from watchdog watchmedo` (not `uv run watchmedo`) + * Watch `../../packages` and `../../plugins` to reload on core library changes + * Use `-p '*.py;*.prompt;*.json'` pattern to watch relevant file types + + **Wrong** (causes continuous restart loop): + + ```bash + uv run watchmedo auto-restart ... -- uv run genkit start -- python src/main.py + ``` + +* Please keep the `README.md` file for each sample up to date with the `run.sh` + script. + +* In the samples, explain the whys, hows, and whats of the sample in the module + docstring so the user learns more about the feature being demonstrated. + Also explain how to test the sample. + +* Prompt for API keys and other configuration required for the sample + in Python. + +* When creating shell scripts using bash, please use `#!/usr/bin/env bash` as + the shebang line and `set -euo pipefail`. + +* Avoid mentioning sample specific stuff in core framework or plugin code. + +* Always check for missing dependencies in pyproject.toml for each sample + and add them if we're using them. + +* When working on model provider plugins such as Google Genai or Anthropic, + ensure that model-spec.md is followed. + +* Update the roadmap.md file as and when features are implemented. + +* When a plugin such as a model provider is updated or changes, please also + update relevant documentation and samples. **This is mandatory — every plugin + change MUST include a sample audit:** + * Check if any sample under `py/samples/` uses the updated plugin. + * If new models or features were added, add demo flows to the appropriate + sample (e.g., `media-models-demo` for new media models, `compat-oai-hello` + for OpenAI models). + * Update `README.md` files in affected samples. + * Update the conformance test specs under `py/tests/conformance/` if model + capabilities changed. + +* Try to make running the sample flows a one-click operation by always defining + default input values. + +* **IMPORTANT**: For default values to appear in the Dev UI input fields, use + a `pydantic.BaseModel` for flow input (preferred) or `Annotated` with + `pydantic.Field`: + + **Preferred** (BaseModel - defaults always show in Dev UI): + + ```python + from pydantic import BaseModel, Field + + class MyFlowInput(BaseModel): + prompt: str = Field(default='Hello world', description='User prompt') + + @ai.flow() + async def my_flow(input: MyFlowInput) -> str: + return await ai.generate(prompt=input.prompt) + ``` + + **Alternative** (Annotated - may work for simple types): + + ```python + from typing import Annotated + from pydantic import Field + + @ai.flow() + async def my_flow( + prompt: Annotated[str, Field(default='Hello world')] = 'Hello world', + ) -> str: + ... + ``` + + **Wrong** (defaults won't show in Dev UI): + + ```python + @ai.flow() + async def my_flow(prompt: str = 'Hello world') -> str: + ... + ``` + +* **Sample Media URLs**: When samples need to reference an image URL (e.g., for + multimodal/vision demonstrations), use this public domain image from Wikimedia: + + ``` + https://upload.wikimedia.org/wikipedia/commons/1/13/Cute_kitten.jpg + ``` + + This ensures: + + * Consistent testing across all samples + * No licensing concerns (public domain) + * Reliable availability (Wikimedia infrastructure) + * Known working URL that has been tested with various providers + +* **Rich Tracebacks**: Use `rich` for beautiful, Rust-like colored exception + messages in samples. Add to imports and call after all imports: + + ```python + from rich.traceback import install as install_rich_traceback + + # After all imports, before any code: + install_rich_traceback(show_locals=True, width=120, extra_lines=3) + ``` + + Add `"rich>=13.0.0"` to the sample's `pyproject.toml` dependencies. + +* **Sample Entry Points**: All samples MUST use `ai.run_main()` to start + the Genkit server and enable the DevUI. This is the only supported way + to run samples: + + ```python + import asyncio + + async def main(): + # ... + await asyncio.Event().wait() + + # At the bottom of main.py + if __name__ == '__main__': + + ai.run_main(main()) + ``` + + This pattern ensures: + + * The DevUI starts at http://localhost:4000 + * Hot reloading works correctly with watchmedo + * Flows are properly registered with the reflection API + +### Plugin Development + +When developing Genkit plugins, follow these additional guidelines: + +* **Environment Variable Naming**: Use the **provider's official environment + variable names** wherever they exist. This makes the plugin feel native + to users already familiar with the provider's tooling. + + **CRITICAL**: Before implementing any plugin, research the provider's official + documentation to find their standard environment variable names. Using the + exact same names ensures: + + * Users can reuse existing credentials without reconfiguration + * Documentation and tutorials from the provider work seamlessly + * The plugin integrates naturally with the provider's ecosystem + + **Official Environment Variables by Provider**: + + | Provider | Official Env Vars | Source Documentation | + |----------|-------------------|---------------------| + | **AWS** | `AWS_REGION`, `AWS_ACCESS_KEY_ID`, `AWS_SECRET_ACCESS_KEY`, `AWS_DEFAULT_REGION` | [AWS SDK Environment Variables](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-envvars.html) | + | **Google Cloud** | `GOOGLE_CLOUD_PROJECT`, `GOOGLE_APPLICATION_CREDENTIALS`, `GCLOUD_PROJECT` | [GCP Application Default Credentials](https://cloud.google.com/docs/authentication/application-default-credentials) | + | **Azure** | `APPLICATIONINSIGHTS_CONNECTION_STRING`, `AZURE_TENANT_ID`, `AZURE_CLIENT_ID` | [Azure Monitor OpenTelemetry](https://learn.microsoft.com/azure/azure-monitor/app/opentelemetry-configuration) | + | **OpenAI** | `OPENAI_API_KEY`, `OPENAI_ORG_ID` | [OpenAI API Reference](https://platform.openai.com/docs/api-reference/authentication) | + | **Anthropic** | `ANTHROPIC_API_KEY` | [Anthropic API Reference](https://docs.anthropic.com/en/api/getting-started) | + | **Cloudflare** | `CLOUDFLARE_ACCOUNT_ID`, `CLOUDFLARE_API_TOKEN` | [Cloudflare API Tokens](https://developers.cloudflare.com/fundamentals/api/get-started/create-token/) | + | **Sentry** | `SENTRY_DSN`, `SENTRY_ENVIRONMENT`, `SENTRY_RELEASE` | [Sentry Configuration Options](https://docs.sentry.io/platforms/python/configuration/options/) | + | **Honeycomb** | `HONEYCOMB_API_KEY`, `HONEYCOMB_DATASET`, `HONEYCOMB_API_ENDPOINT` | [Honeycomb API Keys](https://docs.honeycomb.io/configure/environments/manage-api-keys/) | + | **Datadog** | `DD_API_KEY`, `DD_SITE`, `DD_APP_KEY` | [Datadog Agent Environment Variables](https://docs.datadoghq.com/agent/guide/environment-variables/) | + | **Axiom** | `AXIOM_TOKEN`, `AXIOM_DATASET`, `AXIOM_ORG_ID` | [Axiom API Tokens](https://axiom.co/docs/reference/tokens) | + | **Grafana Cloud** | `GRAFANA_OTLP_ENDPOINT`, `GRAFANA_USER_ID`, `GRAFANA_API_KEY`\* | [Grafana Cloud OTLP](https://grafana.com/docs/grafana-cloud/monitor-applications/application-observability/setup/collector/opentelemetry-collector/) | + | **OpenTelemetry** | `OTEL_EXPORTER_OTLP_ENDPOINT`, `OTEL_EXPORTER_OTLP_HEADERS`, `OTEL_SERVICE_NAME` | [OTel SDK Environment Variables](https://opentelemetry.io/docs/specs/otel/configuration/sdk-environment-variables/) | + | **Mistral AI** | `MISTRAL_API_KEY` | [Mistral AI Clients](https://docs.mistral.ai/getting-started/clients/) | + | **Hugging Face** | `HF_TOKEN`, `HF_HOME` | [Hugging Face Hub Documentation](https://huggingface.co/docs/huggingface_hub/en/package_reference/environment_variables) | + | **xAI** | `XAI_API_KEY` | [xAI API Documentation](https://docs.x.ai/api) | + | **DeepSeek** | `DEEPSEEK_API_KEY` | [DeepSeek API Documentation](https://api-docs.deepseek.com/) | + | **OpenRouter** | `OPENROUTER_API_KEY` | [OpenRouter API Keys](https://openrouter.ai/docs/api-keys) | + + \*Grafana Cloud uses standard OTel env vars with Basic auth. The `GRAFANA_*` vars are + Genkit-specific for convenience. The plugin encodes `GRAFANA_USER_ID:GRAFANA_API_KEY` + as Base64 for the `Authorization: Basic` header. + + **When to Create Custom Environment Variables**: + + Only create custom env vars when the provider doesn't have an official + standard for that specific configuration. When you must create custom vars: + + 1. Use a consistent prefix (e.g., `GENKIT_`, `CF_` for Cloudflare-specific) + 2. Document clearly that this is a Genkit-specific variable + 3. Follow the naming pattern: `PREFIX_RESOURCE_ATTRIBUTE` + * Example: `CF_OTLP_ENDPOINT` (Cloudflare OTLP endpoint, not standard CF var) + + **Verification Checklist**: + + * \[ ] Searched provider's official documentation for env var names + * \[ ] Verified env var names match exactly (case-sensitive) + * \[ ] Documented the source URL for each env var in code comments + * \[ ] Tested that existing provider credentials work without changes + +* **Model Configuration Parameters**: Support **exhaustive model configuration** + so users can access all provider-specific features through the DevUI: + + 1. **Research provider documentation**: Before implementing a model plugin, + thoroughly review the provider's API documentation to enumerate ALL + available parameters. + + 2. **Support all generation parameters**: Include every parameter the model + supports, not just common ones like temperature and max\_tokens: + + ```python + # Good: Exhaustive model config (Anthropic example) + class AnthropicModelConfig(BaseModel): + temperature: float | None = None + max_tokens: int | None = None + top_p: float | None = None + top_k: int | None = None + stop_sequences: list[str] | None = None + # Provider-specific parameters + thinking: ThinkingConfig | None = None # Extended thinking + system: str | None = None # System prompt override + metadata: dict[str, Any] | None = None # Request metadata + + # Bad: Only basic parameters + class AnthropicModelConfig(BaseModel): + temperature: float | None = None + max_tokens: int | None = None + ``` + + 3. **Document provider-specific features**: Add docstrings explaining + provider-specific parameters that may not be self-explanatory: + + ```python + class BedrockModelConfig(BaseModel): + """AWS Bedrock model configuration. + + Attributes: + guardrailIdentifier: ID of a Bedrock Guardrail to apply. + guardrailVersion: Version of the guardrail (default: "DRAFT"). + performanceConfig: Controls latency optimization settings. + """ + guardrailIdentifier: str | None = None + guardrailVersion: str | None = None + performanceConfig: PerformanceConfiguration | None = None + ``` + + 4. **Maintain a model capability registry**: For plugins with multiple models, + track which features each model supports: + + ```python + SUPPORTED_MODELS: dict[str, ModelInfo] = { + 'claude-3-5-sonnet': ModelInfo( + supports=Supports( + multiturn=True, + tools=True, + media=True, + systemRole=True, + output=['text', 'json'], + ), + max_output_tokens=8192, + ), + } + ``` + +* **Telemetry Plugin Conventions**: For telemetry/observability plugins: + + 1. **Entry point function naming**: Use `add__telemetry()`: + * `add_aws_telemetry()` + * `add_azure_telemetry()` + * `add_cf_telemetry()` + + 2. **PII redaction default**: Always default to redacting model inputs/outputs: + ```python + def add_azure_telemetry( + log_input_and_output: bool = False, # Safe default + ) -> None: + ``` + + 3. **Environment resolution order**: Check parameters first, then env vars: + ```python + def _resolve_connection_string(conn_str: str | None = None) -> str | None: + if conn_str: + return conn_str + return os.environ.get('APPLICATIONINSIGHTS_CONNECTION_STRING') + ``` + +### Avoiding Hardcoding + +Avoid hardcoding region-specific values, URLs, or other configuration that varies by +deployment environment. This makes the code more portable and user-friendly globally. + +* **Environment Variables First**: Always check environment variables before falling back + to defaults. Prefer raising clear errors over silently using defaults that may not work + for all users. + + ```python + # Good: Clear error if not configured + region = os.environ.get('AWS_REGION') or os.environ.get('AWS_DEFAULT_REGION') + if region is None: + raise ValueError('AWS region is required. Set AWS_REGION environment variable.') + + # Bad: Silent default that only works in US + region = os.environ.get('AWS_REGION', 'us-east-1') + ``` + +* **Named Constants**: Extract hardcoded values into named constants at module level. + This makes them discoverable and documents their purpose. + + ```python + # Good: Named constant with clear purpose + DEFAULT_OLLAMA_SERVER_URL = 'http://127.0.0.1:11434' + + class OllamaPlugin: + def __init__(self, server_url: str | None = None): + self.server_url = server_url or DEFAULT_OLLAMA_SERVER_URL + + # Bad: Inline hardcoded value + class OllamaPlugin: + def __init__(self, server_url: str = 'http://127.0.0.1:11434'): + ... + ``` + +* **Region-Agnostic Helpers**: For cloud services with regional endpoints, provide helper + functions that auto-detect the region instead of hardcoding a specific region. + + ```python + # Good: Helper that detects region from environment + def get_inference_profile_prefix(region: str | None = None) -> str: + if region is None: + region = os.environ.get('AWS_REGION') + if region is None: + raise ValueError('Region is required.') + # Map region to prefix... + + # Bad: Hardcoded US default + def get_inference_profile_prefix(region: str = 'us-east-1') -> str: + ... + ``` + +* **Documentation Examples**: In documentation and docstrings, use placeholder values + that are clearly examples, not real values users might accidentally copy. + + ```python + # Good: Clear placeholder + endpoint='https://your-resource.openai.azure.com/' + + # Bad: Looks like it might work + endpoint='https://eastus.api.example.com/' + ``` + +* **What IS Acceptable to Hardcode**: + * Official API endpoints that don't vary (e.g., `https://api.deepseek.com`) + * Default ports for local services (e.g., `11434` for Ollama) + * AWS/cloud service names (e.g., `'bedrock-runtime'`) + * Factual values from documentation (e.g., embedding dimensions) + +### Packaging (PEP 420 Namespace Packages) + +Genkit plugins use **PEP 420 implicit namespace packages** to allow multiple packages +to contribute to the `genkit.plugins.*` namespace. This requires special care in +build configuration. + +#### Directory Structure + +``` +plugins/ +├── anthropic/ +│ ├── pyproject.toml +│ └── src/ +│ └── genkit/ # NO __init__.py (namespace) +│ └── plugins/ # NO __init__.py (namespace) +│ └── anthropic/ # HAS __init__.py (regular package) +│ ├── __init__.py +│ ├── models.py +│ └── py.typed +``` + +**CRITICAL**: The `genkit/` and `genkit/plugins/` directories must NOT have +`__init__.py` files. Only the final plugin directory (e.g., `anthropic/`) should +have `__init__.py`. + +#### Hatch Wheel Configuration + +For PEP 420 namespace packages, use `only-include` to specify exactly which +directory to package: + +```toml +[build-system] +build-backend = "hatchling.build" +requires = ["hatchling"] + +[tool.hatch.build.targets.wheel] +only-include = ["src/genkit/plugins/"] +sources = ["src"] +``` + +**Why `sources = ["src"]` is Required:** + +The `sources` key tells hatch to rewrite paths by stripping the `src/` directory prefix. +Without it, the wheel would have paths like `src/genkit/plugins/...` instead of +`genkit/plugins/...`, which would break Python imports at runtime. + +| With `sources = ["src"]` | Without `sources` | +|--------------------------|-------------------| +| ✅ `genkit/plugins/anthropic/__init__.py` | ❌ `src/genkit/plugins/anthropic/__init__.py` | +| `from genkit.plugins.anthropic import ...` works | Import fails | + +**Why `only-include` instead of `packages`:** + +Using `packages = ["src/genkit", "src/genkit/plugins"]` causes hatch to traverse +both paths, including the same files twice. This creates wheels with duplicate +entries that PyPI rejects with: + +``` +400 Invalid distribution file. ZIP archive not accepted: +Duplicate filename in local headers. +``` + +**Configuration Examples:** + +| Plugin Directory | `only-include` Value | +|------------------|---------------------| +| `plugins/anthropic/` | `["src/genkit/plugins/anthropic"]` | +| `plugins/google-genai/` | `["src/genkit/plugins/google_genai"]` | +| `plugins/vertex-ai/` | `["src/genkit/plugins/vertex_ai"]` | +| `plugins/amazon-bedrock/` | `["src/genkit/plugins/amazon_bedrock"]` | + +Note: Internal Python directory names use underscores (`google_genai`), while +the plugin directory uses hyphens (`google-genai`). + +#### Verifying Wheel Contents + +Always verify wheels don't have duplicates before publishing: + +```bash +# Build the package +uv build --package genkit-plugin- + +# Check for duplicates (should show each file only once) +unzip -l dist/genkit_plugin_*-py3-none-any.whl + +# Look for duplicate warnings during build +# BAD: "UserWarning: Duplicate name: 'genkit/plugins/...'" +# GOOD: No warnings, clean build +``` + +#### Common Build Errors + +| Error | Cause | Solution | +|-------|-------|----------| +| `Duplicate filename in local headers` | Files included twice in wheel | Use `only-include` instead of `packages` | +| Empty wheel (no Python files) | Wrong `only-include` path | Verify path matches actual directory structure | +| `ModuleNotFoundError` at runtime | Missing `__init__.py` in plugin dir | Add `__init__.py` to the final plugin directory | + +### Formatting + +* **Tool**: Format code using `ruff` (or `bin/fmt`). +* **Line Length**: Max 120 characters. +* **Strings**: Wrap long lines and strings appropriately. +* **Config**: Refer to `.editorconfig` or `pyproject.toml` for rules. + +### Testing + +* **Framework**: Use `pytest` and `unittest`. +* **Scope**: Write comprehensive unit tests. +* **Documentation**: Add docstrings to test modules/functions explaining their scope. +* **Execution**: Run via `uv run pytest .`. +* **Porting**: Maintain 1:1 logic parity accurately if porting tests. + Do not invent behavior. +* **Fixes**: Fix underlying code issues rather than special-casing tests. +* **Test File Naming**: Test files **MUST** have unique names across the entire + repository to avoid pytest module collection conflicts. Use the format + `{plugin_name}_{component}_test.py`: + + | Plugin | Test File | Status | + |--------|-----------|--------| + | `cloud-sql-pg` | `cloud_sql_pg_engine_test.py` | ✅ Correct | + | `cloud-sql-pg` | `engine_test.py` | ❌ Wrong (conflicts with other plugins) | + | `chroma` | `chroma_retriever_test.py` | ✅ Correct | + | `checks` | `checks_evaluator_test.py` | ✅ Correct | + + **Requirements:** + + * Prefix test files with the plugin/package name, replacing any hyphens (`-`) with underscores (`_`). + * Use the `foo_test.py` suffix format (not `test_foo.py`) + * Do **NOT** add `__init__.py` to `tests/` directories (causes module conflicts) + * Place tests in `plugins/{name}/tests/` or `packages/{name}/tests/` + +### Test Coverage + +**All new code must have test coverage.** Tests are essential for maintaining code +quality, preventing regressions, and enabling confident refactoring. + +#### Coverage Requirements + +| Component Type | Minimum Coverage | Notes | +|----------------|------------------|-------| +| **Core packages** | 80%+ | Critical path code | +| **Plugins** | 70%+ | Model/embedder/telemetry plugins | +| **Utilities** | 90%+ | Helper functions, converters | +| **New features** | 100% of new lines | All new code paths tested | + +#### Running Coverage + +```bash +# Run tests with coverage report +cd py +uv run pytest --cov=packages --cov=plugins --cov-report=term-missing + +# Generate HTML coverage report +uv run pytest --cov=packages --cov=plugins --cov-report=html +# Open htmlcov/index.html in browser + +# Check coverage for a specific plugin +uv run pytest plugins/mistral/tests/ --cov=plugins/mistral/src --cov-report=term-missing +``` + +#### What to Test + +1. **Happy Path**: Normal operation with valid inputs +2. **Edge Cases**: Empty inputs, boundary values, None handling +3. **Error Handling**: Invalid inputs, API errors, network failures +4. **Type Conversions**: Message/tool/response format conversions +5. **Streaming**: Both streaming and non-streaming code paths +6. **Configuration**: Different config options and their effects + +#### Test Structure for Plugins + +Each plugin should have tests covering: + +``` +plugins/{name}/tests/ +├── {name}_plugin_test.py # Plugin initialization, registration +├── {name}_models_test.py # Model generate(), streaming +├── {name}_embedders_test.py # Embedder functionality (if applicable) +└── conftest.py # Shared fixtures (optional) +``` + +#### Mocking External Services + +* **Always mock external API calls** - Tests must not make real network requests +* Use `unittest.mock.patch` or `pytest-mock` for mocking +* Mock at the HTTP client level or SDK client level +* Provide realistic mock responses based on actual API documentation + +```python +from unittest.mock import AsyncMock, patch + +@patch('genkit.plugins.mistral.models.Mistral') +async def test_generate(mock_client_class): + mock_client = AsyncMock() + mock_client_class.return_value = mock_client + mock_client.chat.complete_async.return_value = mock_response + # ... test code +``` + +#### Coverage Exceptions + +Some code may be excluded from coverage requirements: + +* `# pragma: no cover` - Use sparingly for truly untestable code +* Type stubs and protocol definitions +* Abstract base class methods (tested via implementations) +* Debug/development-only code paths + +### Logging + +* **Library**: Use `structlog` exclusively for all logging. **Do NOT use the + standard library `logging` module** (`import logging`) in any new code. + Existing code using stdlib `logging` should be migrated to structlog when + touched. + +* **Helper**: Use `genkit.core.logging.get_logger(__name__)` to obtain a + properly typed structlog logger. This is a thin wrapper around + `structlog.get_logger()` that returns a typed `Logger` instance: + + ```python + from genkit.core.logging import get_logger + + logger = get_logger(__name__) + + # Sync logging + logger.info('Model registered', model_name=name, plugin='anthropic') + logger.debug('Request payload', payload=payload) + logger.warning('Deprecated config', key=key) + + # Async logging (inside coroutines) + await logger.ainfo('Generation complete', tokens=usage.total_tokens) + await logger.adebug('Streaming chunk', index=i) + ``` + +* **Async**: Use `await logger.ainfo(...)`, `await logger.adebug(...)`, etc. + within coroutines. Never use the sync variants (`logger.info(...)`) inside + `async def` functions — structlog's async methods ensure proper event loop + integration. + +* **Format**: Use structured key-value pairs, not f-strings: + + ```python + # WRONG - f-string logging + logger.info(f'Processing model {model_name} with {num_tokens} tokens') + + # CORRECT - structured key-value logging + logger.info('Processing model', model_name=model_name, num_tokens=num_tokens) + ``` + +* **Known Violations**: The following plugins still use stdlib `logging` and + should be migrated to `genkit.core.logging.get_logger()` when next modified: + + | Plugin | Files | + |--------|-------| + | `anthropic` | `models.py`, `utils.py` | + | `checks` | `plugin.py`, `middleware.py`, `guardrails.py`, `evaluation.py` | + | `deepseek` | `models.py`, tests | + | `google-cloud` | `telemetry/tracing.py` | + +### Licensing + +Include the Apache 2.0 license header at the top of each file (update year as needed): + +```python +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 +``` + +## Dependency Management + +When updating dependencies for the Python SDK, ensure consistency across both files: + +1. **`py/pyproject.toml`** - Workspace-level dependencies (pinned versions with `==`) +2. **`py/packages/genkit/pyproject.toml`** - Package-level dependencies (minimum versions with `>=`) + +Both files must be updated together to avoid inconsistencies where developers test +against one version but users of the published `genkit` package might install a +different version. + +After updating dependencies, regenerate the lock file: + +```bash +# Run from the repository root +cd py && uv lock +``` + +## Git Commit Message Guidelines + +This project uses [Conventional Commits](https://www.conventionalcommits.org/) for +all commit messages. This enables automated changelog generation and semantic +versioning via release-please. + +### Format + +``` +(): + +[optional body] + +[optional footer(s)] +``` + +### Commit Types + +| Type | Description | Example | +|------|-------------|---------| +| `feat` | New feature | `feat(py/plugins/aws): add X-Ray telemetry` | +| `fix` | Bug fix | `fix(py): resolve import order issue` | +| `docs` | Documentation only | `docs(py): update plugin README` | +| `style` | Code style (formatting) | `style(py): run ruff format` | +| `refactor` | Code refactoring | `refactor(py): extract helper function` | +| `perf` | Performance improvement | `perf(py): optimize model streaming` | +| `test` | Adding/updating tests | `test(py): add bedrock model tests` | +| `chore` | Maintenance tasks | `chore(py): update dependencies` | + +### Scopes + +For Python code, use these scopes: + +| Scope | When to use | +|-------|-------------| +| `py` | General Python SDK changes | +| `py/plugins/` | Specific plugin changes | +| `py/samples` | Sample application changes | +| `py/core` | Core framework changes | + +### Breaking Changes + +**IMPORTANT**: Use `!` after the type/scope to indicate breaking changes: + +``` +feat(py)!: rename generate() to invoke() + +BREAKING CHANGE: The `generate()` method has been renamed to `invoke()`. +Existing code using `ai.generate()` must be updated to `ai.invoke()`. +``` + +Or in the footer: + +``` +refactor(py): restructure plugin API + +BREAKING CHANGE: Plugin initialization now requires explicit configuration. +``` + +### Guidelines + +* Draft a plain-text commit message after you're done with changes. +* Do not include absolute file paths as links in commit messages. +* Since lines starting with `#` are treated as comments, use a simpler format. +* Add a rationale paragraph explaining the **why** and the **what** before + listing all the changes. +* For scope, refer to release-please configuration if available. +* Keep the subject line short and simple. + +## Pull Request Description Guidelines + +All Python PRs must include comprehensive descriptions following these standards. +Well-documented PRs enable faster reviews and better knowledge transfer. + +### Required Sections + +Every PR description MUST include: + +1. **Summary** - One-paragraph overview of what the PR does and why +2. **Changes** - Bullet list of specific modifications +3. **Test Plan** - How the changes were verified + +### Prefer Tables for Information + +Use markdown tables whenever presenting structured information such as: + +* Configuration options and their defaults +* API parameter lists +* Comparison of before/after behavior +* File changes summary + +Tables are easier to scan and review than prose or bullet lists. + +### Dependency Update PRs + +When updating `py/uv.lock` (e.g., via `uv sync` or `uv lock --upgrade`), the PR description +MUST include a table of all upgraded packages: + +| Package | Old Version | New Version | +|---------|-------------|-------------| +| anthropic | 0.77.0 | 0.78.0 | +| ruff | 0.14.14 | 0.15.0 | +| ... | ... | ... | + +To generate this table, use: + +```bash +git diff HEAD~1 HEAD -- py/uv.lock | grep -B10 "^-version = " | grep "^.name = " | sed 's/^.name = "\(.*\)"/\1/' | sort -u +``` + +Then cross-reference with the version changes. This helps reviewers quickly assess the +scope and risk of dependency updates. + +### Architecture Diagrams + +For PRs that add new plugins or modify system architecture, include ASCII diagrams: + +``` +┌─────────────────────────────────────────────────────────────────────────┐ +│ PLUGIN ARCHITECTURE │ +│ │ +│ Your Genkit App │ +│ │ │ +│ │ (1) Initialize Plugin │ +│ ▼ │ +│ ┌─────────────────┐ ┌─────────────────┐ ┌─────────────────┐ │ +│ │ PluginClass │────▶│ Provider │────▶│ SpanProcessor │ │ +│ │ (Manager) │ │ (Config) │ │ (Export) │ │ +│ └─────────────────┘ └─────────────────┘ └────────┬────────┘ │ +│ │ │ +│ (2) Process data │ │ +│ ▼ │ +│ ┌─────────────────┐ ┌─────────────────┐ ┌─────────────────┐ │ +│ │ Logger │────▶│ Trace ID │────▶│ HTTP/OTLP │ │ +│ │ (Structured) │ │ Injection │ │ (Auth) │ │ +│ └─────────────────┘ └─────────────────┘ └────────┬────────┘ │ +│ │ │ +│ (3) Export to backend │ │ +│ ▼ │ +│ ┌─────────────────┐ │ +│ │ Cloud Service │ │ +│ │ (X-Ray, etc.) │ │ +│ └─────────────────┘ │ +└─────────────────────────────────────────────────────────────────────────┘ +``` + +### Data Flow Diagrams + +For PRs involving data processing or multi-step operations, include ASCII data +flow diagrams. See the "Docstrings > Data Flow Diagram" section above for the +standard format and examples. + +### PR Template Examples + +**Feature PR (New Plugin)**: + +```markdown +## Summary + +This PR introduces the **AWS Telemetry Plugin** (`py/plugins/aws/`) for exporting +Genkit telemetry to AWS X-Ray and CloudWatch. + +### Plugin Features + +- **AWS X-Ray Integration**: Distributed tracing with automatic trace ID generation +- **CloudWatch Logs**: Structured logging with X-Ray trace correlation +- **SigV4 Authentication**: Secure OTLP export using AWS credentials + +[Architecture diagram here] + +## Changes + +### New Files +- `py/plugins/aws/` - Complete AWS telemetry plugin with tests +- `py/samples/aws-hello/` - Sample demonstrating AWS telemetry + +### Updated Files +- `py/GEMINI.md` - Documentation requirements +- All plugin `__init__.py` files - Added ELI5 concepts tables + +## Test Plan + +- [x] All existing tests pass (`bin/lint`) +- [x] New plugin tests pass (`py/plugins/aws/tests/aws_telemetry_test.py`) +- [ ] Manual testing with AWS credentials +``` + +**Fix/Refactor PR**: + +```markdown +## Summary + +Clean up in-function imports to follow PEP 8 conventions. Moves all imports +to the top of files for better code quality and tooling support. + +## Rationale + +Python's PEP 8 style guide recommends placing all imports at the top of the +module. In-function imports can: +- Make dependencies harder to discover +- Cause subtle performance issues from repeated import lookups +- Reduce code readability and tooling support + +## Changes + +### Plugins +- **amazon-bedrock**: Cleaned up `plugin.py` imports +- **google-cloud**: Cleaned up `telemetry/metrics.py` imports + +### Samples +Moved in-function imports to top of file: +- **anthropic-hello**: `random`, `genkit.types` imports +- **amazon-bedrock-hello**: `asyncio`, `random`, `genkit.types` imports +- [additional samples...] + +## Test Plan + +- [x] `bin/lint` passes locally +- [x] No functional behavior changes (import reorganization only) +``` + +### Documentation PR + +```markdown +## Summary + +This PR adds comprehensive planning documentation for [Topic] and updates +plugin categorization guides. + +## Changes + +### Updated Documentation +- **py/plugins/README.md** - Updated categorization guide + +## Test Plan + +- [x] Documentation integrity check +- [x] All relative links verified +- [x] Markdown linting passes +``` + +### Checklist Requirements + +Every PR should address: + +* \[ ] **Code Quality**: `bin/lint` passes with zero errors +* \[ ] **Type Safety**: All type checkers pass (ty, pyrefly, pyright) +* \[ ] **Tests**: Unit tests added/updated as needed +* \[ ] **Documentation**: Docstrings and README files updated +* \[ ] **Samples**: Demo code updated if applicable + +### Automated Code Review Workflow + +After addressing all CI checks and reviewer comments, trigger Gemini code review +by posting a single-line comment on the PR: + +``` +/gemini review +``` + +**Iterative Review Process:** + +1. Address all existing review comments and fix CI failures +2. Push changes to the PR branch +3. Post `/gemini review` comment to trigger automated review +4. Wait for Gemini's review comments (typically 1-3 minutes) +5. Address any new comments raised by Gemini +6. **Resolve addressed comments** - Reply to each comment explaining the fix, then resolve + the conversation (unless the discussion is open-ended and requires more thought) +7. Repeat steps 2-6 up to 3 times until no new comments are received +8. Once clean, request human reviewer approval + +**Best Practices:** + +| Practice | Description | +|----------|-------------| +| Fix before review | Always fix known issues before requesting review | +| Batch fixes | Combine multiple fixes into one push to reduce review cycles | +| Address all comments | Don't leave unresolved comments from previous reviews | +| Resolve conversations | Reply with fix explanation and resolve unless discussion is ongoing | +| Document decisions | If intentionally not addressing a comment, explain why | + +### Splitting Large Branches into Multiple PRs + +When splitting a feature branch with multiple commits into independent PRs: + +1. **Squash before splitting** - Use `git merge --squash` to consolidate commits into + a single commit before creating new branches. This avoids problems with: + - Commit ordering issues (commits appearing in wrong order across PRs) + - Interdependent changes that span multiple commits + - Cherry-pick conflicts when commits depend on earlier changes + +2. **Create independent branches** - For each logical unit of work: + ```bash + # From main, create a new branch + git checkout main && git checkout -b feature/part-1 + + # Selectively checkout files from the squashed branch + git checkout squashed-branch -- path/to/files + + # Commit and push + git commit -m "feat: description of part 1" + git push -u origin HEAD + ``` + +3. **Order PRs by dependency** - If PRs have dependencies: + - Create the base PR first (e.g., shared utilities) + - Stack dependent PRs on top, or wait for the base to merge + - Document dependencies in PR descriptions + +**Common Gemini Review Feedback:** + +| Category | Examples | How to Address | +|----------|----------|----------------| +| Type safety | Missing return types, `Any` usage | Add explicit type annotations | +| Error handling | Unhandled exceptions, missing try/except | Add proper error handling with specific exceptions | +| Code duplication | Similar logic in multiple places | Extract into helper functions | +| Documentation | Missing docstrings, unclear comments | Add comprehensive docstrings | +| Test coverage | Missing edge cases, untested paths | Add tests for identified gaps | + +## Plugin Verification Against Provider Documentation + +When implementing or reviewing plugins, always cross-check against the provider's +official documentation. This ensures accuracy and prevents integration issues. + +### Verification Checklist + +For each plugin, verify: + +1. **Environment Variables**: Match provider's official names exactly (case-sensitive) +2. **Model Names/IDs**: Use exact model identifiers from provider's API docs +3. **API Parameters**: Parameter names, types, and valid ranges match docs +4. **Authentication**: Use provider's recommended auth mechanism and headers +5. **Endpoints**: URLs match provider's documented endpoints + +### Model Catalog Accuracy (Mandatory) + +**CRITICAL: Never invent model names or IDs.** Every model ID in a plugin's catalog +MUST be verified against the provider's official API documentation before being added. + +#### Verification Steps + +1. **Check the provider's official model page** (see Provider Documentation Links below) +2. **Confirm the exact API model ID string** — not the marketing name, but the string + you pass to the API (e.g., `claude-opus-4-6-20260205`, not "Claude Opus 4.6") +3. **Verify the model is GA (Generally Available)** — do not add models that are only + announced, in private preview, or behind waitlists +4. **Confirm capabilities** — check if the model supports vision, tools, system role, + structured output, etc. from the official docs +5. **Use date-suffixed IDs as versions** — store the alias (e.g., `claude-opus-4-6`) + as the key and the dated ID (e.g., `claude-opus-4-6-20260205`) in `versions=[]` + +#### Provider API Model Pages + +| Provider | Where to verify model IDs | +|----------|---------------------------| +| Anthropic | https://docs.anthropic.com/en/docs/about-claude/models | +| OpenAI | https://platform.openai.com/docs/models | +| xAI | https://docs.x.ai/docs/models | +| Mistral | https://docs.mistral.ai/getting-started/models/models_overview/ | +| DeepSeek | https://api-docs.deepseek.com/quick_start/pricing | +| HuggingFace | https://huggingface.co/docs/api-inference/ | +| AWS Bedrock | https://docs.aws.amazon.com/bedrock/latest/userguide/model-ids.html | +| Azure/Foundry | https://ai.azure.com/catalog/models | +| Cloudflare | https://developers.cloudflare.com/workers-ai/models/ | + +### Common Issues Found During Verification + +| Issue Type | Example | How to Fix | +|------------|---------|------------| +| **Wrong model prefix** | `@cf/mistral/...` vs `@hf/mistral/...` | Check provider's model catalog for exact prefixes | +| **Outdated model names** | Using deprecated model IDs | Review provider's current model list | +| **Custom env var names** | `MY_API_KEY` vs `PROVIDER_API_KEY` | Use provider's official env var names | +| **Incorrect auth headers** | Wrong header name or format | Check provider's authentication docs | +| **Missing model capabilities** | Not supporting vision for multimodal models | Review model capabilities in provider docs | + +### Provider Documentation Links + +Keep these bookmarked for verification: + +| Provider | Documentation | Key Pages | +|----------|---------------|-----------| +| **Anthropic** | [docs.anthropic.com](https://docs.anthropic.com/) | [Models](https://docs.anthropic.com/en/docs/about-claude/models), [API Reference](https://docs.anthropic.com/en/api/) | +| **Google AI** | [ai.google.dev](https://ai.google.dev/) | [Gemini Models](https://ai.google.dev/gemini-api/docs/models/gemini), [API Reference](https://ai.google.dev/api/) | +| **AWS Bedrock** | [docs.aws.amazon.com/bedrock](https://docs.aws.amazon.com/bedrock/) | [Model IDs](https://docs.aws.amazon.com/bedrock/latest/userguide/model-ids.html), [Converse API](https://docs.aws.amazon.com/bedrock/latest/APIReference/API_runtime_Converse.html) | +| **Azure OpenAI** | [learn.microsoft.com](https://learn.microsoft.com/azure/ai-services/openai/) | [Models](https://learn.microsoft.com/azure/ai-services/openai/concepts/models), [API Reference](https://learn.microsoft.com/azure/ai-services/openai/reference) | +| **xAI** | [docs.x.ai](https://docs.x.ai/) | [Models](https://docs.x.ai/docs/models), [API Reference](https://docs.x.ai/api) | +| **DeepSeek** | [api-docs.deepseek.com](https://api-docs.deepseek.com/) | [Models](https://api-docs.deepseek.com/quick_start/pricing), [API Reference](https://api-docs.deepseek.com/api/create-chat-completion) | +| **Cloudflare AI** | [developers.cloudflare.com/workers-ai](https://developers.cloudflare.com/workers-ai/) | [Models](https://developers.cloudflare.com/workers-ai/models/), [API Reference](https://developers.cloudflare.com/workers-ai/configuration/open-ai-compatibility/) | +| **Ollama** | [github.com/ollama/ollama](https://github.com/ollama/ollama) | [API Docs](https://github.com/ollama/ollama/blob/main/docs/api.md), [Models](https://ollama.com/library) | +| **Sentry** | [docs.sentry.io](https://docs.sentry.io/) | [OTLP](https://docs.sentry.io/concepts/otlp/), [Configuration](https://docs.sentry.io/platforms/python/configuration/options/) | +| **Honeycomb** | [docs.honeycomb.io](https://docs.honeycomb.io/) | [API Keys](https://docs.honeycomb.io/configure/environments/manage-api-keys/), [OpenTelemetry](https://docs.honeycomb.io/send-data/opentelemetry/) | +| **Datadog** | [docs.datadoghq.com](https://docs.datadoghq.com/) | [OTLP Ingest](https://docs.datadoghq.com/opentelemetry/setup/otlp_ingest/), [API Keys](https://docs.datadoghq.com/account_management/api-app-keys/) | +| **Grafana Cloud** | [grafana.com/docs](https://grafana.com/docs/grafana-cloud/) | [OTLP Setup](https://grafana.com/docs/grafana-cloud/send-data/otlp/send-data-otlp/), [Authentication](https://grafana.com/docs/grafana-cloud/account-management/authentication-and-permissions/) | +| **Axiom** | [axiom.co/docs](https://axiom.co/docs/) | [OpenTelemetry](https://axiom.co/docs/send-data/opentelemetry), [API Tokens](https://axiom.co/docs/reference/tokens) | +| **Mistral AI** | [docs.mistral.ai](https://docs.mistral.ai/) | [Models](https://docs.mistral.ai/getting-started/models/models_overview/), [API Reference](https://docs.mistral.ai/api/) | +| **Hugging Face** | [huggingface.co/docs](https://huggingface.co/docs/api-inference/) | [Inference API](https://huggingface.co/docs/api-inference/), [Inference Providers](https://huggingface.co/docs/inference-providers/) | + +### URL Verification + +**All URLs in documentation and code must be verified to work.** Broken links degrade +developer experience and erode trust in the documentation. + +#### Verification Requirements + +1. **Before Adding URLs**: Verify the URL returns HTTP 200 and shows expected content +2. **During Code Review**: Check that all new/modified URLs are accessible +3. **Periodic Audits**: Run URL checks on documentation files periodically + +#### How to Check URLs + +Extract and test URLs from the codebase: + +```bash +# Extract unique URLs from Python source and docs +cd py +grep -roh 'https://[^[:space:])\"'"'"'`>]*' plugins/ samples/ packages/ *.md \ + | sort -u | grep -v '{' | grep -v '\[' | grep -v 'example\.com' + +# Test a specific URL +curl -s -o /dev/null -w "%{http_code}" -L --max-time 10 "https://docs.mistral.ai/" +``` + +#### Common URL Issues + +| Issue | Example | Fix | +|-------|---------|-----| +| **Trailing punctuation** | `https://api.example.com.` | Remove trailing `.` | +| **Outdated paths** | `/v1/` changed to `/v2/` | Update to current path | +| **Moved documentation** | Provider reorganized docs | Find new canonical URL | +| **Regional endpoints** | `api.eu1.` vs `api.` | Use correct regional URL | + +#### URLs That Don't Need Verification + +* Placeholder URLs: `https://your-endpoint.com`, `https://example.com` +* Template URLs with variables: `https://{region}.api.com` +* Test/mock URLs in test files + +### Telemetry Plugin Authentication Patterns + +Different observability backends use different authentication mechanisms: + +| Backend | Auth Type | Header Format | Example | +|---------|-----------|---------------|---------| +| **Sentry** | Custom | `x-sentry-auth: sentry sentry_key={key}` | Parse DSN to extract key | +| **Honeycomb** | Custom | `x-honeycomb-team: {api_key}` | Direct API key | +| **Datadog** | Custom | `DD-API-KEY: {api_key}` | Direct API key | +| **Grafana Cloud** | Basic Auth | `Authorization: Basic {base64(user:key)}` | Encode user\_id:api\_key | +| **Axiom** | Bearer | `Authorization: Bearer {token}` | Direct token | +| **Azure Monitor** | Connection String | N/A (SDK handles) | Use official SDK | +| **Generic OTLP** | Bearer | `Authorization: Bearer {token}` | Standard OTLP | + +### Model Provider Plugin Patterns + +When implementing model provider plugins: + +1. **Use dynamic model discovery** when possible (Google GenAI, Vertex AI) +2. **Maintain a `SUPPORTED_MODELS` registry** for static model lists +3. **Document model capabilities** accurately (vision, tools, JSON mode) +4. **Support all provider-specific parameters** (reasoning\_effort, thinking, etc.) +5. **Handle model-specific restrictions** (e.g., grok-4 doesn't support frequency\_penalty) + +### Python Version Compatibility + +When using features from newer Python versions: + +1. **StrEnum** (Python 3.11+): Use conditional import with `strenum` package fallback + + ```python + import sys + + if sys.version_info >= (3, 11): + from enum import StrEnum + else: + from strenum import StrEnum + ``` + +2. **Check `requires-python`**: Ensure all `pyproject.toml` files specify `>=3.10` + to maintain compatibility with CI/CD pipelines running Python 3.10 or 3.11 + +3. **Type hints**: Use `from __future__ import annotations` for forward references + in Python 3.10 compatibility + +## Session Learning Documentation + +Document new learnings, patterns, and gotchas at the end of each development +session. Add to existing sections when possible; create new subsections only +when the topic is genuinely new. + +### Session Learnings (2026-02-01): Mistral AI and Hugging Face Plugins + +#### New Provider Environment Variables + +| Provider | Variable | Documentation | +|----------|----------|---------------| +| Mistral AI | `MISTRAL_API_KEY` | [Mistral Console](https://console.mistral.ai/) | +| Hugging Face | `HF_TOKEN` | [HF Tokens](https://huggingface.co/settings/tokens) | + +#### Mistral SDK Patterns + +* **Streaming Response**: Mistral SDK's `CompletionChunk` has `choices` directly on the chunk object, NOT `chunk.data.choices`. The streaming API returns chunks directly: + + ```python + async for chunk in stream: + if chunk.choices: # NOT chunk.data.choices + choice = chunk.choices[0] + ``` + +* **Content Types**: Mistral response content can be `str` or `list[TextChunk | ...]`. Handle both: + + ```python + if isinstance(msg_content, str): + content.append(Part(root=TextPart(text=msg_content))) + elif isinstance(msg_content, list): + for chunk in msg_content: + if isinstance(chunk, TextChunk): + content.append(Part(root=TextPart(text=chunk.text))) + ``` + +#### Hugging Face SDK Patterns + +* **InferenceClient**: Use `huggingface_hub.InferenceClient` for chat completions +* **Inference Providers**: Support 17+ providers (Cerebras, Groq, Together) via `provider` parameter +* **Model IDs**: Use full HF model IDs like `meta-llama/Llama-3.3-70B-Instruct` + +#### Type Annotation Patterns + +* **Config dictionaries**: When passing config to `ai.generate()`, explicitly type as `dict[str, object]`: + + ```python + configs: dict[str, dict[str, object]] = { + 'creative': {'temperature': 0.9, 'max_tokens': 200}, + } + config: dict[str, object] = configs.get(task, configs['creative']) + ``` + +* **StreamingCallback**: `ctx.send_chunk` is synchronous (`Callable[[object], None]`), NOT async. Do NOT use `await`: + + ```python + # Correct + ctx.send_chunk(GenerateResponseChunk(...)) + + # Wrong - send_chunk is not async + await ctx.send_chunk(GenerateResponseChunk(...)) + ``` + +#### Genkit Type Patterns + +* **Usage**: Use `GenerationUsage` directly, not `Usage` wrapper: + + ```python + usage = GenerationUsage( + input_tokens=response.usage.prompt_tokens, + output_tokens=response.usage.completion_tokens, + ) + ``` + +* **FinishReason**: Use `FinishReason` enum, not string literals: + + ```python + finish_reason = FinishReason.STOP # NOT 'stop' + ``` + +#### Tool Calling Implementation Patterns + +* **Mistral Tool Definition**: Use `Function` class from `mistralai.models`, not dict: + + ```python + from mistralai.models import Function, Tool + + Tool( + type='function', + function=Function( + name=tool.name, + description=tool.description or '', + parameters=parameters, + ), + ) + ``` + +* **Tool Call Arguments**: Mistral SDK may return arguments as `str` or `dict`. Handle both: + + ```python + func_args = tool_call.function.arguments + if isinstance(func_args, str): + try: + args = json.loads(func_args) + except json.JSONDecodeError: + args = func_args + elif isinstance(func_args, dict): + args = func_args + ``` + +* **Streaming Tool Calls**: Track tool calls by index during streaming. Ensure index is `int`: + + ```python + idx: int = tool_call.index if hasattr(tool_call, 'index') and tool_call.index is not None else 0 + ``` + +* **Tool Response Messages**: Use `ToolMessage` for Mistral, dict with `role: 'tool'` for HF: + + ```python + # Mistral + ToolMessage(tool_call_id=ref, name=name, content=output_str) + + # Hugging Face + {'role': 'tool', 'tool_call_id': ref, 'content': output_str} + ``` + +#### Structured Output Implementation + +* **Mistral JSON Mode**: Use `response_format` parameter with `json_schema` type: + + ```python + params['response_format'] = { + 'type': 'json_schema', + 'json_schema': { + 'name': output.schema.get('title', 'Response'), + 'schema': output.schema, + 'strict': True, + }, + } + ``` + +* **Hugging Face JSON Mode**: Use `response_format` with `type: 'json'`: + + ```python + params['response_format'] = { + 'type': 'json', + 'value': output.schema, # Optional schema + } + ``` + +#### License Check Configuration + +* **Unknown Licenses**: When `liccheck` reports a package as "UNKNOWN", verify the actual + license and add to `[tool.liccheck.authorized_packages]` in `py/pyproject.toml`: + + ```toml + [tool.liccheck.authorized_packages] + mistralai = ">=1.9.11" # Apache-2.0 "https://github.com/mistralai/client-python/blob/main/LICENSE" + ``` + +* **Ignore Patterns**: Add generated directories to `bin/check_license` and `bin/add_license`: + + ```bash + -ignore '**/.tox/**/*' \ + -ignore '**/.nox/**/*' \ + ``` + +### Session Learnings (2026-02-07): OpenTelemetry ReadableSpan Wrapper Pitfall + +**Issue:** When wrapping OpenTelemetry's `ReadableSpan` without calling +`super().__init__()`, the OTLP trace encoder crashes with `AttributeError` +on `dropped_attributes`, `dropped_events`, or `dropped_links`. + +**Root Cause:** The base `ReadableSpan` class defines these properties to access +private instance variables (`_attributes`, `_events`, `_links`) that are only +initialized by `ReadableSpan.__init__()`. If your wrapper skips `super().__init__()` +(intentionally, to avoid duplicating span state), those fields are missing. + +**Fix Pattern:** Override all `dropped_*` properties to delegate to the wrapped span: + +```python +class MySpanWrapper(ReadableSpan): + def __init__(self, span: ReadableSpan, ...) -> None: + # Intentionally skipping super().__init__() + self._span = span + + @property + def dropped_attributes(self) -> int: + return self._span.dropped_attributes + + @property + def dropped_events(self) -> int: + return self._span.dropped_events + + @property + def dropped_links(self) -> int: + return self._span.dropped_links +``` + +**Testing:** Use `pytest.mark.parametrize` to test all three properties in a +single test function to reduce duplication (per code review feedback). + +**Reference:** PR #4494, Issue #4493. + +### Session Learnings (2026-02-10): Code Review Patterns from releasekit PR #4550 + +Code review feedback from PR #4550 surfaced several reusable patterns: + +#### 1. Never Duplicate Defaults + +When a dataclass defines field defaults, the factory function that constructs +it should **not** re-specify them. Use `**kwargs` unpacking to let the +dataclass own its defaults: + +```python +# BAD — defaults duplicated between dataclass and factory +@dataclass(frozen=True) +class Config: + tag_format: str = '{name}-v{version}' + +def load_config(raw: dict) -> Config: + return Config(tag_format=raw.get('tag_format', '{name}-v{version}')) # duplicated! + +# GOOD — dataclass is the single source of truth +def load_config(raw: dict) -> Config: + return Config(**raw) # dataclass handles missing keys with its own defaults +``` + +#### 2. Extract Allowed Values as Module-Level Constants + +Enum-like validation values should be `frozenset` constants at module level, +not inline literals inside validation functions: + +```python +# BAD — allowed values hidden inside function +def _validate_publish_from(value: str) -> None: + allowed = {'local', 'ci'} # not discoverable + if value not in allowed: ... + +# GOOD — discoverable, reusable, testable +ALLOWED_PUBLISH_FROM: frozenset[str] = frozenset({'local', 'ci'}) + +def _validate_publish_from(value: str) -> None: + if value not in ALLOWED_PUBLISH_FROM: ... +``` + +#### 3. Wrap All File I/O in try/except + +Every `read_text()`, `write_text()`, or `open()` call should be wrapped +with `try/except OSError` to produce a structured error instead of an +unhandled traceback: + +```python +# BAD — unprotected I/O +text = path.read_text(encoding='utf-8') + +# GOOD — consistent error handling +try: + text = path.read_text(encoding='utf-8') +except OSError as exc: + raise ValueError(f'Failed to read {path}: {exc}') from exc + # In releasekit: raise ReleaseKitError(code=E.PARSE_ERROR, ...) from exc +``` + +#### 4. Validate Collection Item Types + +When validating a config value is a `list`, also validate that the items +inside the list are the expected type: + +```python +# BAD — only checks container type +if not isinstance(value, list): raise ... +# A list of ints would pass silently + +# GOOD — also checks item types +if not isinstance(value, list): raise ... +for item in value: + if not isinstance(item, str): + raise TypeError(f"items must be strings, got {type(item).__name__}") +``` + +#### 5. Separate Path Globs from Name Globs + +Workspace excludes (path globs like `samples/*`) and config excludes (name +globs like `sample-*`) operate in different namespaces and must never be mixed +into a single list. Apply them in independent filter stages. + +#### 6. Test File Basename Uniqueness + +The `check_consistency` script (check 19/20) enforces unique test file +basenames across the entire workspace. When adding tests to tools or samples, +prefix with a unique identifier: + +``` +# BAD — collides with samples/web-endpoints-hello/tests/config_test.py +tools/releasekit/tests/config_test.py + +# GOOD — unique basename +tools/releasekit/tests/rk_config_test.py +``` + +#### 7. Use Named Error Codes + +Prefer human-readable error codes (`RK-CONFIG-NOT-FOUND`) over numeric +ones (`RK-0001`). Named codes are self-documenting in logs and error +messages without requiring a lookup table. + +#### 8. Use `packaging` for PEP 508 Parsing + +Never manually parse dependency specifiers by splitting on operators. +Use `packaging.requirements.Requirement` which handles all valid PEP 508 +strings correctly (extras, markers, version constraints). For a fallback, +use `re.split(r'[<>=!~,;\[]', spec, maxsplit=1)` — always pass `maxsplit` +as a keyword argument (Ruff B034 requires this to avoid positional +argument confusion). + +#### 9. Use `assert` Over `if/pytest.fail` in Tests + +Tests should use idiomatic `assert` statements, not `if/pytest.fail()`: + +```python +# BAD — verbose and non-standard +if len(graph) != 0: + pytest.fail(f'Expected empty graph, got {len(graph)}') + +# GOOD — idiomatic pytest +assert len(graph) == 0, f'Expected empty graph, got {len(graph)}' +``` + +**Caution**: When batch-converting `pytest.fail` to `assert` via sed/regex, +the closing `)` from `pytest.fail(...)` can corrupt f-string expressions. +Always re-run lint and tests after automated refactors. + +#### 10. Check Dict Key Existence, Not Value Truthiness + +When validating whether a config key exists, check `key not in dict` +rather than `not dict.get(key)`. An empty value (e.g., `[]`) is valid +config and should not be treated as missing: + +```python +# BAD — empty list raises "unknown group" error +patterns = groups.get(name, []) +if not patterns: + raise Error("Unknown group") + +# GOOD — distinguishes missing from empty +if name not in groups: + raise Error("Unknown group") +patterns = groups[name] # may be [], which is valid +``` + +#### 11. Keyword Arguments for Ambiguous Positional Parameters + +Ruff B034 flags `re.split`, `re.sub`, etc. when positional arguments +could be confused (e.g., `maxsplit` vs `flags`). Always use keyword +arguments for clarity: + +```python +# BAD — Ruff B034 error +re.split(r'[<>=]', spec, 1) + +# GOOD +re.split(r'[<>=]', spec, maxsplit=1) +``` + +#### 12. Automated Refactors Need Manual Verification + +Batch find-and-replace (sed, regex scripts) can introduce subtle bugs: + +- **Broken f-strings**: `pytest.fail(f'got {len(x)}')` → the closing `)` + can end up inside the f-string expression as `{len(x}')`. +- **Missing variable assignments**: removing a multi-line `if/pytest.fail` + block can accidentally delete the variable assignment above it. +- **Always re-run** `ruff check`, `ruff format`, and `pytest` after any + automated refactor. Never trust the script output alone. + +**Reference:** PR #4550. + +### Session Learnings (2026-02-10): Code Review Patterns from releasekit PR #4555 + +#### 13. Signal Handlers Must Use SIG_DFL + os.kill, Not default_int_handler + +`signal.default_int_handler` is only valid for SIGINT (raises +`KeyboardInterrupt`) and doesn't accept the expected arguments. For +general signal cleanup (SIGTERM/SIGINT): + +```python +# BAD — only works for SIGINT, wrong argument types +signal.default_int_handler(signum, frame) + +# GOOD — works for any signal +signal.signal(signum, signal.SIG_DFL) +os.kill(os.getpid(), signum) +``` + +#### 14. Extract Shared Parsing Logic into Helper Functions (DRY) + +When the same parsing logic appears for both regular and optional +dependencies (or any parallel structures), extract it into a helper: + +```python +# BAD — duplicated dep name extraction in two loops +for i, dep in enumerate(deps): + bare_name = dep.split('[')[0].split('>')[0]... # fragile, duplicated + +# GOOD — helper + packaging.Requirement +def _extract_dep_name(dep_spec: str) -> str: + try: + return Requirement(dep_spec).name + except InvalidRequirement: + return re.split(r'[<>=!~,;\[]', dep_spec, maxsplit=1)[0].strip() + +def _pin_dep_list(deps, version_map) -> int: + ... # single implementation, called for both dep lists +``` + +#### 15. Fail Fast on Required Fields in Serialized Data + +When loading JSON/TOML for CI handoff, required fields must fail fast +with a clear error, not silently default to empty strings: + +```python +# BAD — silent default hides missing data in downstream CI +git_sha = data.get('git_sha', '') + +# GOOD — fail fast with documented ValueError +try: + git_sha = data['git_sha'] +except KeyError as exc: + raise ValueError(f'Missing required field: git_sha') from exc +``` + +#### 16. Remove Dead Code Before Submitting + +Unused variables and unreachable code paths should be caught during +self-review. Tools like `ruff` catch unused imports, but unused local +variables assigned in loops require manual inspection. + +#### 17. Narrow Exception Types in Catch Blocks + +Catching `Exception` masks `KeyboardInterrupt`, `SystemExit`, and +unexpected programming errors. Always catch the most specific type: + +```python +# BAD — catches KeyboardInterrupt, SystemExit, etc. +except Exception: + logger.warning('operation failed') + +# GOOD — catches only expected failure modes +except OSError: + logger.warning('operation failed') +``` + +**Reference:** PR #4555. + +#### 18. Scope Commits Per-Package via `vcs.log(paths=...)` + +When computing version bumps in a monorepo, each package must only see +commits that touched *its own files*. Fetching all commits globally and +then trying to map them via `diff_files` is error-prone. Instead, use +the VCS backend's `paths` filter: + +```python +# BAD — associates ALL commits with any package that has changes +all_commits = vcs.log(format='%H %s') +for pkg in packages: + changed = vcs.diff_files(since_tag=tag) + # Tries to match commits to files — misses per-commit scoping + +# GOOD — per-package log query with path filtering +for pkg in packages: + log_lines = vcs.log(since_tag=tag, paths=[str(pkg.path)]) + # Only commits that touched files in pkg.path are returned +``` + +#### 19. Use `shutil.move` for Atomic File Restore + +When restoring from a backup file, `shutil.copy2()` + `unlink()` is +two operations that can leave orphaned backups. `shutil.move()` is +atomic on POSIX same-filesystem (uses `rename(2)`): + +```python +# BAD — non-atomic: if unlink fails, backup is orphaned +shutil.copy2(backup_path, target_path) +backup_path.unlink() + +# GOOD — atomic on same filesystem +shutil.move(backup_path, target_path) +``` + +#### 20. Test Orchestration Functions with Fake Backends + +Functions like `compute_bumps` that orchestrate multiple subsystems (VCS, +package discovery, commit parsing) need integration tests with fake +backends. A `FakeVCS` that maps paths to log output catches scoping bugs +that unit tests on individual helpers miss. + +**Reference:** PR #4555. + +## Release Process + +### Automated Release Scripts + +The following scripts automate the release process: + +| Script | Description | +|--------|-------------| +| `py/bin/release_check` | Comprehensive release readiness validation | +| `py/bin/bump_version` | Bump version across all packages | +| `py/bin/check_consistency` | Verify workspace consistency | +| `py/bin/fix_package_metadata.py` | Add missing package metadata | + +### Pre-Release Checklist + +Before releasing, run the release check script: + +```bash +# Full release readiness check +./py/bin/release_check + +# With verbose output +./py/bin/release_check --verbose + +# CI mode (optimized for CI pipelines) +./py/bin/release_check --ci + +# Skip tests (when tests are run separately in CI) +./py/bin/release_check --skip-tests +``` + +The release check validates: + +1. **Package Metadata**: All packages have required fields (name, version, description, license, authors, classifiers) +2. **Build Verification**: Lock file is current, dependencies resolve, packages build successfully +3. **Code Quality**: Type checking, formatting, linting all pass +4. **Tests**: All unit tests pass (can be skipped with `--skip-tests`) +5. **Security & Compliance**: No vulnerabilities, licenses are approved +6. **Documentation**: README files exist, CHANGELOG has current version entry + +> **Note**: The CI workflow runs release checks on every PR to ensure every commit +> is release-worthy. This catches issues early and ensures consistent quality. + +### Version Bumping + +Use the version bump script to update all packages simultaneously: + +```bash +# Bump to specific version +./py/bin/bump_version 0.5.0 + +# Bump minor version (0.4.0 -> 0.5.0) +./py/bin/bump_version --minor + +# Bump patch version (0.4.0 -> 0.4.1) +./py/bin/bump_version --patch + +# Bump major version (0.4.0 -> 1.0.0) +./py/bin/bump_version --major + +# Dry run (preview changes) +./py/bin/bump_version --minor --dry-run +``` + +### Release Steps + +1. **Update CHANGELOG.md** with release notes +2. **Bump version**: `./py/bin/bump_version ` +3. **Run release check**: `./py/bin/release_check` +4. **Commit changes**: `git commit -am "chore: release v"` +5. **Create tag**: `git tag -a py-v -m "Python SDK v"` +6. **Push**: `git push && git push --tags` +7. **Build and publish**: `./py/bin/build_dists && ./py/bin/publish_dists` + +### Package Metadata Requirements + +All publishable packages (core and plugins) MUST have: + +| Field | Required | Description | +|-------|----------|-------------| +| `name` | Yes | Package name (e.g., `genkit-plugin-anthropic`) | +| `version` | Yes | Semantic version matching core framework | +| `description` | Yes | Short description of the package | +| `license` | Yes | `Apache-2.0` | +| `requires-python` | Yes | `>=3.10` | +| `authors` | Yes | `[{ name = "Google" }]` | +| `classifiers` | Yes | PyPI classifiers for discoverability | +| `keywords` | Recommended | Search keywords for PyPI | +| `readme` | Recommended | `README.md` | +| `[project.urls]` | Recommended | Links to docs, repo, issues | + +### Required Files + +Each publishable package directory MUST contain: + +| File | Required | Description | +|------|----------|-------------| +| `pyproject.toml` | Yes | Package configuration and metadata | +| `LICENSE` | Yes | Apache 2.0 license file (copy from `py/LICENSE`) | +| `README.md` | Recommended | Package documentation | +| `src/` | Yes | Source code directory | +| `src/.../py.typed` | Yes | PEP 561 type hint marker file | + +To copy LICENSE files to all packages: + +```bash +# Copy LICENSE to core package +cp py/LICENSE py/packages/genkit/LICENSE + +# Copy LICENSE to all plugins +for d in py/plugins/*/; do cp py/LICENSE "$d/LICENSE"; done +``` + +### PEP 561 Type Hints (py.typed) + +All packages MUST include a `py.typed` marker file to indicate they support type hints. +This enables type checkers like mypy, pyright, and IDE autocompletion to use the package's +type annotations. + +```bash +# Add py.typed to core package +touch py/packages/genkit/src/genkit/py.typed + +# Add py.typed to all plugins +for d in py/plugins/*/src/genkit/plugins/*/; do touch "$d/py.typed"; done +``` + +### Required Classifiers + +All packages MUST include these PyPI classifiers: + +```toml +classifiers = [ + "Development Status :: 3 - Alpha", # or 4 - Beta, 5 - Production/Stable + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Typing :: Typed", # Required for typed packages + "Topic :: Scientific/Engineering :: Artificial Intelligence", +] +``` + +### Session Learnings (2026-02-04): Release PRs and Changelogs + +When drafting release PRs and changelogs, follow these guidelines to create +comprehensive, contributor-friendly release documentation. + +#### Release PR Checklist + +Use this checklist when drafting a release PR: + +| Step | Task | Command/Details | +|------|------|-----------------| +| 1 | **Count commits** | `git log "genkit-python@PREV"..HEAD --oneline -- py/ \| wc -l` | +| 2 | **Count file changes** | `git diff --stat "genkit-python@PREV"..HEAD -- py/ \| tail -1` | +| 3 | **List contributors** | `git log "genkit-python@PREV"..HEAD --pretty=format:"%an" -- py/ \| sort \| uniq -c \| sort -rn` | +| 4 | **Get PR counts** | `gh pr list --state merged --search "label:python merged:>=DATE" --json author --limit 200 \| jq ...` | +| 5 | **Map git names to GitHub** | `gh pr list --json author --limit 200 \| jq '.[].author \| "\(.name) -> @\(.login)"'` | +| 6 | **Get each contributor's commits** | `git log --pretty=format:"%s" --author="Name" -- py/ \| head -30` | +| 7 | **Check external repos** (e.g., dotprompt) | Review GitHub contributors page or clone and run git log | +| 8 | **Create CHANGELOG.md section** | Follow Keep a Changelog format with Impact Summary | +| 9 | **Create PR_DESCRIPTION_X.Y.Z.md** | Put in `.github/` directory | +| 10 | **Add contributor tables** | Include GitHub links, PR/commit counts, exhaustive contributions | +| 11 | **Categorize contributions** | Use bold categories: **Core**, **Plugins**, **Fixes**, etc. | +| 12 | **Include PR numbers** | Add (#1234) for each major contribution | +| 13 | **Add dotprompt table** | Same format as main table with PRs, Commits, Key Contributions | +| 14 | **Create blog article** | Optional: draft in PR description or external blog | +| 15 | **Verify code examples** | Test all code snippets match actual API patterns | +| 16 | **Run release validation** | `./bin/validate_release_docs` (see below) | +| 17 | **Commit with --no-verify** | `git commit --no-verify -m "docs(py): ..."` | +| 18 | **Push with --no-verify** | `git push --no-verify` | +| 19 | **Update PR on GitHub** | `gh pr edit --body-file py/.github/PR_DESCRIPTION_X.Y.Z.md` | + +#### Automated Release Documentation Validation + +Run `py/bin/validate_release_docs` before finalizing release documentation. It +checks branding ("Genkit" not "Firebase Genkit"), non-existent plugin names, +unshipped feature references, blog code syntax, contributor link formatting, +and import validity. + +#### Key Principles + +1. **Exhaustive contributions**: List every significant feature, fix, and improvement +2. **Clickable GitHub links**: Format as `[@username](https://github.com/username)` +3. **Real names when different**: Show as `@MengqinShen (Elisa Shen)` +4. **Categorize by type**: Use bold headers like **Core**, **Plugins**, **Type Safety** +5. **Include PR numbers**: Every major item should have `(#1234)` +6. **Match table formats**: External repo tables should have same columns as main table +7. **Cross-check repositories**: Check both firebase/genkit and google/dotprompt for Python work +8. **Use --no-verify**: For documentation-only changes, skip hooks for faster iteration +9. **Consider a blog article**: Major releases may warrant a blog article +10. **Branding**: Use "Genkit" not "Firebase Genkit" (rebranded as of 2025) + +#### Blog Article Guidelines + +Major releases may include a blog article (e.g. in the PR description or an external blog). + +**Required Sections:** +1. **Headline**: "Genkit Python SDK X.Y.Z: [Catchy Subtitle]" +2. **Stats paragraph**: Commits, files changed, contributors, PRs +3. **What's New**: Plugin expansion, architecture changes, new features +4. **Code Examples**: Accurate, tested examples (see below) +5. **Critical Fixes & Security**: Important bug fixes +6. **Developer Experience**: Tooling improvements +7. **Plugin Tables**: All available plugins with status +8. **Get Started**: Installation and quick start +9. **Contributors**: Acknowledgment table +10. **What's Next**: Roadmap items +11. **Get Involved**: Community links + +**Code Example Accuracy** — verify ALL examples match the actual API: + +| Pattern | Correct | Wrong | +|---------|---------|-------| +| Text response | `response.text` | `response.text()` | +| Structured output | `output=Output(schema=Model)` | `output_schema=Model` | +| Dynamic tools | `ai.dynamic_tool(name, fn, description=...)` | `@ai.action_provider()` | +| Main function | `ai.run_main(main())` | `asyncio.run(main())` | +| Genkit init | Module-level `ai = Genkit(...)` | Inside `async def main()` | +| Imports | `from genkit.ai import Genkit, Output` | `from genkit import Genkit` | + +Cross-check against actual samples: `grep -r "pattern" py/samples/*/src/main.py`. +Only document shipped features — never reference DAP, MCP, etc. unless actually shipped. + +**Verify plugin names exist before documenting:** + +CRITICAL: Always verify plugin names against actual packages before including them in +release documentation. Non-existent plugins will confuse users. + +```bash +# List all actual plugin package names +grep "^name = " py/plugins/*/pyproject.toml | sort + +# Verify a specific plugin exists +ls -la py/plugins//pyproject.toml +``` + +Common mistakes to avoid: +- `genkit-plugin-aim` does NOT exist (use `genkit-plugin-firebase` or `genkit-plugin-observability`) +- `genkit-plugin-firestore` does NOT exist (it's `genkit-plugin-firebase`) +- Always double-check plugin names match directory names (with `genkit-plugin-` prefix) + +#### CHANGELOG.md Structure + +Follow [Keep a Changelog](https://keepachangelog.com/) format with these sections: + +```markdown +## [X.Y.Z] - YYYY-MM-DD + +### Impact Summary +| Category | Description | +|----------|-------------| +| **New Capabilities** | Brief summary | +| **Critical Fixes** | Brief summary | +| **Performance** | Brief summary | +| **Breaking Changes** | Brief summary | + +### Added +- **Category Name** - Feature description + +### Changed +- **Category Name** - Change description + +### Fixed +- **Category Name** - Fix description + +### Security +- **Category Name** - Security fix description + +### Performance +- **Per-Event-Loop HTTP Client Caching** - Performance improvement description + +### Deprecated +- Item being deprecated + +### Contributors +... (see contributor section below) +``` + +#### Gathering Release Statistics + +```bash +# Commits, contributors, and file changes since last release +git log "genkit-python@PREV"..HEAD --oneline -- py/ | wc -l +git log "genkit-python@PREV"..HEAD --pretty=format:"%an" -- py/ | sort | uniq -c | sort -rn +git diff --stat "genkit-python@PREV"..HEAD -- py/ | tail -1 + +# Map git names to GitHub handles (requires gh CLI) +gh pr list --state merged --search "label:python" --json author --limit 200 \ + | jq -r '.[].author | "\(.name) -> @\(.login)"' | sort -u +``` + +#### PR Description & Contributors + +Create `.github/PR_DESCRIPTION_X.Y.Z.md` for each major release. Required sections: + +| Section | Purpose | +|---------|---------| +| **Impact Summary** | Quick overview table with categories | +| **Critical Fixes** | Race conditions, thread safety, security (with PR #s) | +| **Breaking Changes** | Migration guide with before/after examples | +| **Contributors** | Table with PRs, commits, and key contributions | + +Contributor table format — use clickable GitHub links: + +```markdown +| Contributor | PRs | Commits | Key Contributions | +|-------------|-----|---------|-------------------| +| [**@user**](https://github.com/user) | 91 | 93 | Core framework, plugins | +``` + +- Only include contributors with commits under `py/` +- For cross-name contributors: `@GitHubName (Real Name)` +- For external repos (e.g., dotprompt), add a separate table with same columns +- Use `--no-verify` for documentation-only commits/pushes +- Update PR body: `gh pr edit --body-file py/.github/PR_DESCRIPTION_X.Y.Z.md` + +### Release Publishing Process + +After the release PR is merged, follow these steps to complete the release. + +#### Step 1: Merge the Release PR + +```bash +# Merge via GitHub UI or CLI +gh pr merge --squash +``` + +#### Step 2: Create the Release Tag + +```bash +# Ensure you're on main with latest changes +git checkout main +git pull origin main + +# Create an annotated tag for the release +git tag -a py/vX.Y.Z -m "Genkit Python SDK vX.Y.Z + +See CHANGELOG.md for full release notes." + +# Push the tag +git push origin py/vX.Y.Z +``` + +#### Step 3: Create GitHub Release + +Use the PR description as the release body with all contributors mentioned: + +```bash +# Create release using the PR description file +gh release create py/vX.Y.Z \ + --title "Genkit Python SDK vX.Y.Z" \ + --notes-file py/.github/PR_DESCRIPTION_X.Y.Z.md +``` + +**Important:** The GitHub release should include: +- Full contributor tables with GitHub links +- Impact summary +- What's new section +- Critical fixes and security +- Breaking changes (if any) + +#### Step 4: Publish to PyPI + +Use the publish workflow with the "all" option: + +1. Go to **Actions** → **Publish Python Package** +2. Click **Run workflow** +3. Select `publish_scope: all` +4. Click **Run workflow** + +This publishes all 23 packages in parallel: + +| Package Category | Packages | +|------------------|----------| +| **Core** | `genkit` | +| **Model Providers** | `genkit-plugin-anthropic`, `genkit-plugin-amazon-bedrock`, `genkit-plugin-cloudflare-workers-ai`, `genkit-plugin-deepseek`, `genkit-plugin-google-genai`, `genkit-plugin-huggingface`, `genkit-plugin-mistral`, `genkit-plugin-microsoft-foundry`, `genkit-plugin-ollama`, `genkit-plugin-vertex-ai`, `genkit-plugin-xai` | +| **Telemetry** | `genkit-plugin-aws`, `genkit-plugin-cloudflare-workers-ai`, `genkit-plugin-google-cloud`, `genkit-plugin-observability` (Azure telemetry is included in `genkit-plugin-microsoft-foundry`) | +| **Data/Retrieval** | `genkit-plugin-dev-local-vectorstore`, `genkit-plugin-evaluators`, `genkit-plugin-firebase` | +| **Other** | `genkit-plugin-flask`, `genkit-plugin-compat-oai`, `genkit-plugin-mcp` | + +For single package publish (e.g., hotfix): +1. Select `publish_scope: single` +2. Select appropriate `project_type` (packages/plugins) +3. Select the specific `project_name` + +#### Step 5: Verify Publication + +```bash +# Check versions on PyPI +pip index versions genkit +pip index versions genkit-plugin-google-genai + +# Test installation +python -m venv /tmp/genkit-test +source /tmp/genkit-test/bin/activate +pip install genkit genkit-plugin-google-genai +python -c "from genkit.ai import Genkit; print('Success!')" +``` + +### Version Consistency + +See "Plugin Version Sync" in the Code Quality section and "Version Bumping" +above for version management details. + +## Code Reviewer Preferences + +These preferences were distilled from reviewer feedback on Python PRs and should +be followed to minimize review round-trips. + +### DRY (Don't Repeat Yourself) + +* **Eliminate duplicated logic aggressively.** If the same pattern appears more + than once (even twice), extract it into a helper function or use data-driven + lookup tables (`dict`, `enum`). +* **Prefer data-driven patterns over repeated conditionals.** Instead of: + ```python + if 'image' in name: + do_thing(ImageModel, IMAGE_REGISTRY) + elif 'tts' in name: + do_thing(TTSModel, TTS_REGISTRY) + elif 'stt' in name: + do_thing(STTModel, STT_REGISTRY) + ``` + Use a lookup table: + ```python + _CONFIG: dict[ModelType, tuple[type[Model], dict[str, ModelInfo]]] = { + ModelType.IMAGE: (ImageModel, IMAGE_REGISTRY), + ModelType.TTS: (TTSModel, TTS_REGISTRY), + ModelType.STT: (STTModel, STT_REGISTRY), + } + config = _CONFIG.get(model_type) + if config: + do_thing(*config) + ``` +* **Shared utility functions across sibling modules.** When two modules + (e.g., `audio.py` and `image.py`) have identical helper functions, consolidate + into one and import from the other. Re-export with an alias if needed for + backward compatibility. +* **Extract common logic into utility functions that can be tested + independently and exhaustively.** Data URI parsing, config extraction, + media extraction, and similar reusable patterns should live in a `utils.py` + module with comprehensive unit tests covering edge cases (malformed input, + empty strings, missing fields, etc.). This improves coverage and makes the + correct behavior verifiable without mocking external APIs. +* **Extract shared info dict builders.** When the same metadata serialization + logic (e.g., `model_info.model_dump()` with fallback) appears in both Action + creation and ActionMetadata creation, extract a single helper like + `_get_multimodal_info_dict(name, model_type, registry)` and call it from both. +* **Re-assert `isinstance` after `next()` for type narrowing.** Type checkers + can't track narrowing inside generator expressions. After `next()`, re-assert + `isinstance(part.root, MediaPart)` locally so the checker can narrow: + ```python + part_with_media = next( + (p for p in content if isinstance(p.root, MediaPart)), + None, + ) + if part_with_media: + # Re-assert to help type checkers narrow the type of part_with_media.root + assert isinstance(part_with_media.root, MediaPart) + # Now the type checker knows part_with_media.root is a MediaPart + url = part_with_media.root.media.url + ``` +* **Hoist constant lookup tables to module level.** Don't recreate `dict` + literals inside functions on every call. Define them once at module scope: + ```python + # Module level — created once. + _CONTENT_TYPE_TO_EXTENSION: dict[str, str] = {'audio/mpeg': 'mp3', ...} + + def _to_stt_params(...): + ext = _CONTENT_TYPE_TO_EXTENSION.get(content_type, 'mp3') + ``` +### Type Safety and Redundancy + +* **Remove redundant `str()` casts after `isinstance` checks.** If you guard + with `isinstance(part.root, TextPart)`, then `part.root.text` is already `str`. + Don't wrap it in `str()` again. +* **Remove unnecessary fallbacks on required fields.** If a Pydantic model field + is required (not `Optional`), don't write `str(field or '')` — just use `field` + directly. +* **Use `isinstance` over `hasattr` for type checks.** Prefer + `isinstance(part.root, TextPart)` over `hasattr(part.root, 'text')` for + type-safe attribute access. + +### Pythonic Idioms + +* **Use `next()` with generators for find-first patterns.** Instead of a loop + with `break`: + ```python + # Don't do this: + result = None + for item in collection: + if condition(item): + result = item + break + + # Do this: + result = next((item for item in collection if condition(item)), None) + ``` +* **Use `split(',', 1)` over `index(',')` for parsing.** `split` with `maxsplit` + is more robust and Pythonic for separating a string into parts: + ```python + # Don't do this: + data = s[s.index(',') + 1:] + + # Do this: + _, data = s.split(',', 1) + ``` + +### Async/Sync Correctness + +* **Don't mark functions `async` if they contain only synchronous code.** A + function that only does `response.read()`, `base64.b64encode()`, etc., should + be a regular `def`, not `async def`. +* **Use `AsyncOpenAI` consistently for async code paths.** New async model + classes should use `AsyncOpenAI`, not the synchronous `OpenAI` client. +* **Never use sync clients for network calls inside `async` methods.** If + `list_actions` or `resolve` is `async def`, all network calls within it must + use the async client and `await`. A sync call like + `self._openai_client.models.list()` blocks the event loop. +* **Update tests when switching sync→async.** When changing a method to use + the async client, update the corresponding test mocks to use `AsyncMock` + and patch the `_async_client` attribute instead of the sync one. + +### Threading, Asyncio & Event-Loop Audit Checklist + +When reviewing code that involves concurrency, locks, or shared mutable state, +check for every issue in this list. These are real bugs found during audits — +not theoretical concerns. + +#### Lock type mismatches + +* **Never use `threading.Lock` or `threading.RLock` in async code.** These + block the *entire* event loop thread while held. Use `asyncio.Lock` instead. + This applies to locks you create *and* to locks inside third-party libraries + you call (e.g. `pybreaker` uses a `threading.RLock` internally). + ```python + # BAD — blocks event loop + self._lock = threading.Lock() + with self._lock: + ... + + # GOOD — cooperatively yields + self._lock = asyncio.Lock() + async with self._lock: + ... + ``` +* **Be wary of third-party libraries that use threading locks internally.** + If you wrap a sync library for async use and access its internals (private + `_lock`, `_state_storage`, etc.), you inherit its threading-lock problem. + This is a reason to prefer custom async-native implementations over + wrapping sync-only libraries — see the OSS evaluation notes below. + +#### Time functions + +* **Use `time.monotonic()` for interval/duration measurement, not + `time.time()` or `datetime.now()`.** Wall-clock time is subject to NTP + corrections that can jump forward or backward, breaking timeouts, TTLs, + and retry-after calculations. `time.monotonic()` only moves forward. + ```python + # BAD — subject to NTP jumps + start = time.time() + elapsed = time.time() - start + + # BAD — same problem, datetime edition + opened_at = datetime.now(timezone.utc) + elapsed = datetime.now(timezone.utc) - opened_at + + # GOOD — monotonically increasing + start = time.monotonic() + elapsed = time.monotonic() - start + ``` +* **Call time functions once and reuse the value** when the same timestamp + is needed in multiple expressions. Two calls can return different values. +* **Clamp computed `retry_after` values** to a reasonable range (e.g. + `[0, 3600]`) to guard against anomalous clock behavior. + +#### Race conditions (TOCTOU) + +* **Check-then-act on shared state must be atomic.** If you check a condition + (e.g. cache miss) and then act on it (execute expensive call + store result), + the two steps must be inside the same lock acquisition or protected by + per-key coalescing. Otherwise concurrent coroutines all see the same "miss" + and duplicate the work (cache stampede / thundering herd). + ```python + # BAD — stampede window between check and set + async with self._lock: + entry = self._store.get(key) + if entry is not None: + return entry.value + result = await expensive_call() # N coroutines all reach here + async with self._lock: + self._store[key] = result + + # GOOD — per-key lock prevents concurrent duplicate calls + async with self._get_key_lock(key): + async with self._store_lock: + entry = self._store.get(key) + if entry is not None: + return entry.value + result = await expensive_call() # only 1 coroutine per key + async with self._store_lock: + self._store[key] = result + ``` +* **Half-open circuit breaker probes** must be gated so only one probe + coroutine is in flight. Without a flag or counter checked inside the lock, + the lock is released before `await fn()`, and multiple coroutines can all + transition to half-open and probe simultaneously. +* **`exists()` + `delete()` is a TOCTOU race.** The key can expire or be + deleted between the two calls. Prefer a single `delete()` call. + +#### Blocking the event loop + +* **Any synchronous library call that does network I/O will block the event + loop.** This includes: Redis clients, Memcached clients, database drivers, + HTTP clients, file I/O, and DNS resolution. Wrap in `asyncio.to_thread()` + or use an async-native library. +* **Sub-microsecond sync calls are acceptable** (dict lookups, counter + increments, in-memory data structure operations) — wrapping them in + `to_thread()` would add more overhead than the call itself. + +#### Counter and stat safety + +* **Integer `+=` between `await` points is safe** in single-threaded asyncio + (CPython's GIL ensures `+=` on an int is atomic at the bytecode level, and + no other coroutine can interleave between the read and write without an + `await`). But this assumption is fragile: + - Document it explicitly in docstrings. + - If the counter is mutated near `await` calls, move it inside a lock. + - If the code might run from multiple threads (e.g. `to_thread()`), use + a lock or `asyncio.Lock`. + +#### OSS library evaluation: when custom is better + +* **Prefer custom async-native implementations** over wrapping sync-only + libraries when the wrapper must access private internals, reimplement half + the library's logic, or ends up the same line count. Specific examples: + - `pybreaker` — sync-only, uses `threading.RLock`, requires accessing + `_lock`, `_state_storage`, `_handle_error`, `_handle_success` (all + private). Uses `datetime.now()` instead of `time.monotonic()`. + - `aiocache.SimpleMemoryCache` — no LRU eviction, no stampede prevention, + weak type hints (`Any` return). Custom `OrderedDict` cache is fewer lines. + - `limits` — sync-only API, uses `time.time()`, fixed-window algorithm + allows boundary bursts. Custom token bucket is ~25 lines. +* **Prefer OSS when the library is genuinely async, well-typed, and provides + functionality you'd otherwise have to maintain.** Example: `secure` for + OWASP security headers — it tracks evolving browser standards and header + deprecations (e.g. X-XSS-Protection removal) that are tedious to follow + manually. + +### Configuration and State + +* **Never mutate `request.config` in-place.** Always `.copy()` the config dict + before calling `.pop()` on it to avoid side effects on callers: + ```python + # Don't do this: + config = request.config # mutates caller's dict! + model = config.pop('version', None) + + # Do this: + config = request.config.copy() + model = config.pop('version', None) + ``` + +### Metadata and Fallbacks + +* **Always provide default metadata for dynamically discovered models.** When a + model isn't found in a registry, provide sensible defaults (label, supports) + rather than returning an empty dict. This ensures all resolved actions have + usable metadata. + +### Testing Style + +* **Use `assert` statements, not `if: pytest.fail()`.** For consistency: + ```python + # Don't do this: + if not isinstance(part, MediaPart): + pytest.fail('Expected MediaPart') + + # Do this: + assert isinstance(part, MediaPart) + ``` +* **Add `assert obj is not None` before accessing optional attributes.** This + satisfies type checker null-safety checks and serves as documentation: + ```python + got = some_function() + assert got.message is not None + part = got.message.content[0].root + ``` + +### Exports and Organization + +* **Keep `__all__` lists sorted alphabetically (case-sensitive).** Uppercase + names sort before lowercase (e.g., `'OpenAIModel'` before `'get_default_model_info'`). + This makes diffs cleaner and items easier to find. + +### Module Dependencies + +* **Never import between sibling model modules.** If `image.py` and `audio.py` + share utility functions, move the shared code to a `utils.py` in the same + package. This avoids creating fragile coupling between otherwise independent + model implementations. + +### Input Validation and Robustness + +* **Validate data URI schemes explicitly.** Don't rely on heuristics like + `',' in url` to detect data URIs. Check for known prefixes: + ```python + # Don't do this: + if ',' in url: + _, data = url.split(',', 1) + else: + data = url # Could be https://... which will crash b64decode + + # Do this: + if url.startswith('data:'): + _, data = url.split(',', 1) + result = base64.b64decode(data) + elif url.startswith(('http://', 'https://')): + raise ValueError('Remote URLs not supported; provide a data URI') + else: + result = base64.b64decode(url) # raw base64 fallback + ``` +* **Wrap decode calls in try/except.** `base64.b64decode` and `split` can fail + on malformed input. Wrap with descriptive `ValueError`: + ```python + try: + _, b64_data = media_url.split(',', 1) + audio_bytes = base64.b64decode(b64_data) + except (ValueError, TypeError) as e: + raise ValueError('Invalid data URI format') from e + ``` +* **Use `TypeAlias` for complex type annotations.** When a type hint is long + or repeated, extract a `TypeAlias` for readability: + ```python + from typing import TypeAlias + + _MultimodalModel: TypeAlias = OpenAIImageModel | OpenAITTSModel | OpenAISTTModel + _MultimodalModelConfig: TypeAlias = tuple[type[_MultimodalModel], dict[str, ModelInfo]] + + _MULTIMODAL_CONFIG: dict[_ModelType, _MultimodalModelConfig] = { ... } + ``` +* **Use raising helpers and non-raising helpers.** When the same extraction + logic is needed in both required and optional contexts, split into two + functions — one that returns `None` on failure, and a strict wrapper: + ```python + def _find_text(request) -> str | None: + """Non-raising: returns None if not found.""" + ... + + def _extract_text(request) -> str: + """Raising: delegates to _find_text, raises ValueError on None.""" + text = _find_text(request) + if text is not None: + return text + raise ValueError('No text content found') + ``` + +### Defensive Action Resolution + +* **Guard symmetrically against misrouted action types in `resolve()`.** Apply + `_classify_model` checks in both directions — prevent embedders from being + resolved as models AND prevent non-embedders from being resolved as embedders: + ```python + if action_type == ActionKind.EMBEDDER: + if _classify_model(name) != _ModelType.EMBEDDER: + return None # Not an embedder name. + return self._create_embedder_action(name) + + if action_type == ActionKind.MODEL: + model_type = _classify_model(name) + if model_type == _ModelType.EMBEDDER: + return None # Embedder name shouldn't create a model action. + ... + ``` + +### Code Simplification + +* **Collapse multi-branch conditionals into single expressions.** When multiple + branches assign a value and fall through to a default: + ```python + # Don't do this: + if custom_format: + params['response_format'] = custom_format + elif output_format == 'json': + params['response_format'] = 'json' + elif output_format == 'text': + params['response_format'] = 'text' + else: + params.setdefault('response_format', 'text') + + # Do this: + response_format = config.pop('response_format', None) + if not response_format and request.output and request.output.format in ('json', 'text'): + response_format = request.output.format + params['response_format'] = response_format or 'text' + ``` +* **Separate find-first from processing.** When using `next()` to find an + element and then processing it, keep both steps distinct. Don't combine + complex processing logic inside the generator expression: + ```python + # Do this: + part = next( + (p for p in content if isinstance(p.root, MediaPart) and p.root.media), + None, + ) + if not part: + raise ValueError('No media found') + media = part.root.media + # ... process media ... + ``` +* **Avoid `continue` in loops when a simple conditional suffices.** Compute + the value first, then conditionally use it: + ```python + # Don't do this: + for image in images: + if image.url: + url = image.url + elif image.b64_json: + url = f'data:...;base64,{image.b64_json}' + else: + continue + content.append(...) + + # Do this: + for image in images: + url = image.url + if not url and image.b64_json: + url = f'data:...;base64,{image.b64_json}' + if url: + content.append(...) + ``` + +### Security Design & Production Hardening + +When building samples, plugins, or services in this repository, follow these +security design principles. These are not theoretical guidelines — they come +from real issues found during audits of the `web-endpoints-hello` sample and +apply broadly to any Python service that uses Genkit. + +#### Secure-by-default philosophy + +* **Every default must be the restrictive option.** If someone deploys with + zero configuration, the system should be locked down. Development + convenience (Swagger UI, open CORS, colored logs, gRPC reflection) requires + explicit opt-in. + ```python + # BAD — open by default, must remember to close + cors_allowed_origins: str = "*" + debug: bool = True + + # GOOD — closed by default, must opt in + cors_allowed_origins: str = "" # same-origin only + debug: bool = False # no Swagger, no reflection + ``` +* **When adding a new setting, ask:** "If someone forgets to configure this, + should the system be open or closed?" Always choose closed. +* **Log a warning for insecure configurations** at startup so operators notice + immediately. Don't silently accept an insecure state. + ```python + # GOOD — warn when host-header validation is disabled in production + if not trusted_hosts and not debug: + logger.warning( + "No TRUSTED_HOSTS configured — Host-header validation is disabled." + ) + ``` + +#### Debug mode gating + +* **Gate all development-only features behind a single `debug` flag.** + This includes: API documentation (Swagger UI, ReDoc, OpenAPI schema), gRPC + reflection, relaxed Content-Security-Policy, verbose error responses, + wildcard CORS fallbacks, and colored console log output. + ```python + # GOOD — single flag controls all dev features + app = FastAPI( + docs_url="/docs" if debug else None, + redoc_url="/redoc" if debug else None, + openapi_url="/openapi.json" if debug else None, + ) + ``` +* **Never expose API schema in production.** Swagger UI, ReDoc, `/openapi.json`, + and gRPC reflection all reveal the full API surface. Disable them when + `debug=False`. +* **Use `--debug` as the CLI flag** and `DEBUG` as the env var. The `run.sh` + dev script should pass `--debug` automatically; production entry points + (gunicorn, Kubernetes manifests, Cloud Run) should never set it. + +#### Content-Security-Policy + +* **Production CSP should be `default-src none`** for API-only servers. This + blocks all resource loading (scripts, styles, images, fonts, frames). +* **Debug CSP must explicitly allowlist CDN origins** for Swagger UI (e.g. + `cdn.jsdelivr.net` for JS/CSS, `fastapi.tiangolo.com` for the favicon). + Never use `unsafe-eval`. +* **Use the `secure` library** rather than hand-crafting header values. It + tracks evolving OWASP recommendations (e.g. it dropped `X-XSS-Protection` + before most people noticed the deprecation). + +#### CORS + +* **Default to same-origin (empty allowlist)**, not wildcard. Wildcard CORS + allows any website to make cross-origin requests to your API. + ```python + # BAD — any website can call your API + cors_allowed_origins: str = "*" + + # GOOD — deny cross-origin by default + cors_allowed_origins: str = "" + ``` +* **In debug mode, fall back to `["*"]`** when no origins are configured so + Swagger UI and local dev tools work without manual config. +* **Use explicit `allow_headers` lists**, not `["*"]`. Wildcard allowed headers + let arbitrary custom headers through CORS preflight, enabling cache + poisoning or header injection attacks. + ```python + # BAD — any header allowed + allow_headers=["*"] + + # GOOD — only headers the API actually uses + allow_headers=["Content-Type", "Authorization", "X-Request-ID"] + ``` + +#### Rate limiting + +* **Apply rate limits at both REST and gRPC layers.** They share the same + algorithm (token bucket per client IP / peer) but are independent middleware. +* **Exempt health check paths** (`/health`, `/healthz`, `/ready`, `/readyz`) + from rate limiting so orchestration platforms can always probe. +* **Include `Retry-After` in 429 responses** so well-behaved clients know when + to retry. +* **Use `time.monotonic()` for token bucket timing**, not `time.time()`. See + the "Threading, Asyncio & Event-Loop Audit Checklist" above. + +#### Request body limits + +* **Enforce body size limits before parsing.** Use an ASGI middleware that + checks `Content-Length` before the framework reads the body. This prevents + memory exhaustion from oversized payloads. +* **Apply the same limit to gRPC** via `grpc.max_receive_message_length`. +* **Default to 1 MB** (1,048,576 bytes). LLM API requests are typically text, + not file uploads. + +#### Input validation + +* **Use Pydantic `Field` constraints on every input model** — `max_length`, + `min_length`, `ge`, `le`, `pattern`. This rejects malformed input before + it reaches any flow or LLM call. +* **Use `pattern` for freeform string fields** that should be constrained + (e.g. programming language names: `^[a-zA-Z#+]+$`). +* **Sanitize text before passing to the LLM** — `strip()` whitespace and + truncate to a reasonable maximum. This is a second line of defense after + Pydantic validation. + +#### ASGI middleware stack order + +* **Apply middleware inside-out** in `apply_security_middleware()`. The + request-flow order is: + + ``` + AccessLog → GZip → CORS → TrustedHost → Timeout → MaxBodySize + → ExceptionHandler → SecurityHeaders → RequestId → App + ``` + + The response passes through the same layers in reverse. + +#### Security headers (OWASP) + +* **Use pure ASGI middleware**, not framework-specific mechanisms. This ensures + headers are applied identically across FastAPI, Litestar, Quart, or any + future framework. +* **Mandatory headers** for every HTTP response: + + | Header | Value | Purpose | + |--------|-------|---------| + | `Content-Security-Policy` | `default-src none` | Block resource loading | + | `X-Content-Type-Options` | `nosniff` | Prevent MIME-sniffing | + | `X-Frame-Options` | `DENY` | Block clickjacking | + | `Referrer-Policy` | `strict-origin-when-cross-origin` | Limit referrer leakage | + | `Permissions-Policy` | `geolocation=(), camera=(), microphone=()` | Disable browser APIs | + | `Cross-Origin-Opener-Policy` | `same-origin` | Isolate browsing context | + +* **Add HSTS conditionally** — only when the request arrived over HTTPS. + Sending `Strict-Transport-Security` over plaintext HTTP is meaningless and + can confuse testing. +* **Omit `X-XSS-Protection`** — the browser XSS auditor it controlled was + removed from all modern browsers, and setting it can introduce XSS in + older browsers (OWASP recommendation since 2023). + +#### Request ID / correlation + +* **Generate or propagate `X-Request-ID` on every request.** If the client + sends one (e.g. from a load balancer), reuse it for end-to-end tracing. + Otherwise, generate a UUID4. +* **Bind the ID to structlog context vars** so every log line includes + `request_id` without manual passing. +* **Echo the ID in the response header** for client-side correlation. + +#### Trusted host validation + +* **Validate the `Host` header** when running behind a reverse proxy. Without + this, host-header poisoning can cause cache poisoning, password-reset + hijacking, and SSRF. +* **Log a warning at startup** if `TRUSTED_HOSTS` is empty in production + mode so operators notice immediately. + +#### Structured logging & secret masking + +* **Default to JSON log format** in production. Colored console output is + human-friendly but breaks log aggregation pipelines (CloudWatch, Stackdriver, + Datadog). +* **Override to `console` in `local.env`** for development. +* **Include `request_id` in every log entry** (via structlog context vars). +* **Never log secrets.** Use a structlog processor to automatically redact + API keys, tokens, passwords, and DSNs from log output. Match patterns like + `AIza...`, `Bearer ...`, `token=...`, `password=...`, and any field whose + name contains `key`, `secret`, `token`, `password`, `credential`, or `dsn`. + Show only the first 4 and last 2 characters (e.g. `AI****Qw`). + +#### HTTP access logging + +* **Log every request** with method, path, status code, and duration. This is + essential for observability and debugging latency issues. +* **Place the access log middleware outermost** so timing includes all + middleware layers (security checks, compression, etc.). + +#### Per-request timeout + +* **Enforce a per-request timeout** via ASGI middleware. If a handler exceeds + the configured timeout, return 504 Gateway Timeout immediately instead of + letting it hang indefinitely. +* **Make the timeout configurable** via `REQUEST_TIMEOUT` env var and CLI flag. + Default to a generous value (120s) for LLM calls. + +#### Global exception handler + +* **Catch unhandled exceptions in middleware** and return a consistent JSON + error body (`{"error": "Internal Server Error", "detail": "..."}`). +* **Never expose stack traces to clients in production.** Log the full + traceback server-side (via structlog / Sentry) for debugging. +* **In debug mode**, include the traceback in the response for developer + convenience. + +#### Server header suppression + +* **Remove the `Server` response header** to prevent version fingerprinting. + ASGI servers (uvicorn, granian, hypercorn) emit `Server: ...` by default, + which reveals the server software and version to attackers. + +#### Cache-Control + +* **Set `Cache-Control: no-store`** on all API responses. This prevents + intermediaries (CDNs, proxies) and browsers from caching sensitive API + responses. + +#### GZip response compression + +* **Compress responses above a configurable threshold** (default: 500 bytes) + using `GZipMiddleware`. This reduces bandwidth for JSON-heavy API responses. +* **Make the minimum size configurable** via `GZIP_MIN_SIZE` env var and CLI. + +#### Graceful shutdown + +* **Handle SIGTERM with a configurable grace period.** Cloud Run sends SIGTERM + and gives 10s by default. Kubernetes may give 30s. +* **Drain in-flight requests** before exiting. For gRPC, use + `server.stop(grace=N)`. For ASGI servers, rely on the server's native + shutdown signal handling. + +#### Connection tuning + +* **Set keep-alive timeout above the load balancer's idle timeout.** If the LB + has a 60s idle timeout (typical for Cloud Run, ALB), set the server's + keep-alive to 75s. Otherwise the server closes the connection while the LB + thinks it's still alive, causing 502s. +* **Set explicit LLM API timeouts.** The default should be generous (120s) but + not infinite. Without a timeout, a hung LLM call ties up a worker forever. +* **Cap connection pool size** to prevent unbounded outbound connections (e.g. + 100 max connections, 20 keepalive). + +#### Circuit breaker + +* **Use async-native circuit breakers** (not sync wrappers like `pybreaker` + that use `threading.RLock` — see the async/event-loop checklist above). +* **States**: Closed (normal) → Open (fail fast) → Half-open (probe). +* **Use `time.monotonic()`** for recovery timeout measurement. +* **Gate half-open probes** so only one coroutine probes at a time (prevent + stampede on recovery). + +#### Response cache + +* **Use per-key request coalescing** to prevent cache stampedes. Without it, + N concurrent requests for the same key all trigger N expensive LLM calls + (thundering herd). +* **Use `asyncio.Lock` per cache key**, not a single global lock (which + serializes all cache operations). +* **Use `time.monotonic()` for TTL**, not `time.time()`. +* **Hash cache keys with SHA-256** for fixed-length, collision-resistant keys. + +#### Container security + +* **Use distroless base images** (`gcr.io/distroless/python3-debian13:nonroot`): + - No shell — cannot `exec` into the container + - No package manager — no `apt install` attack vector + - No `setuid` binaries + - Runs as uid 65534 (`nonroot`) + - ~50 MB (vs ~150 MB for `python:3.13-slim`) +* **Multi-stage builds** — install dependencies in a builder stage, copy only + the virtual environment and source code to the final distroless stage. +* **Pin base image digests** in production Containerfiles to prevent supply + chain attacks from tag mutations. +* **Never copy `.env` files or secrets into container images.** Pass secrets + via environment variables or a secrets manager at runtime. + +#### Dependency auditing + +* **Run `pip-audit` in CI** to check for known CVEs in dependencies. +* **Run `pysentry-rs`** against frozen (exact) dependency versions, not version + ranges from `pyproject.toml`. Version ranges can report false positives for + vulnerabilities fixed in later versions. + ```bash + # BAD — false positives from minimum version ranges + pysentry-rs pyproject.toml + + # GOOD — audit exact installed versions + uv pip freeze > /tmp/requirements.txt + pysentry-rs /tmp/requirements.txt + ``` +* **Run `liccheck`** to verify all dependencies use approved licenses (Apache-2.0, + MIT, BSD, PSF, ISC, MPL-2.0). Add exceptions for packages with unknown + metadata to `[tool.liccheck.authorized_packages]` in `pyproject.toml`. +* **Run `addlicense`** to verify all source files have the correct license header. + +#### Platform telemetry auto-detection + +* **Auto-detect cloud platform at startup** by checking environment variables + set by the platform (e.g. `K_SERVICE` for Cloud Run, `AWS_EXECUTION_ENV` + for ECS). +* **Don't trigger on ambiguous signals.** `GOOGLE_CLOUD_PROJECT` is set on + most developer machines for `gcloud` CLI use — it doesn't mean the app is + running on GCP. Require a stronger signal (`K_SERVICE`, `GCE_METADATA_HOST`) + or an explicit opt-in (`GENKIT_TELEMETRY_GCP=1`). +* **Guard all platform plugin imports with `try/except ImportError`** since + they are optional dependencies. Log a warning (not an error) if the plugin + is not installed. + +#### Sentry integration + +* **Only activate when `SENTRY_DSN` is set** (no DSN = completely disabled). +* **Set `send_default_pii=False`** to strip personally identifiable information. +* **Auto-detect the active framework** (FastAPI, Litestar, Quart) and enable + the matching Sentry integration. Don't require the operator to configure it. +* **Include gRPC integration** so both REST and gRPC errors are captured. + +#### Error tracking and responses + +* **Never expose stack traces to clients in production.** Framework default + error handlers may include tracebacks in HTML responses. Use middleware or + exception handlers to return consistent JSON error bodies. +* **Consistent error format** for all error paths: + ```json + {"error": "Short Error Name", "detail": "Human-readable explanation"} + ``` +* **Log the full traceback server-side** (via structlog / Sentry) for debugging. + +#### Health check endpoints + +* **Provide both `/health` (liveness) and `/ready` (readiness)** probes. +* **Keep them lightweight** — don't call the LLM API or do expensive work. +* **Exempt them from rate limiting** so orchestration platforms can always probe. +* **Return minimal JSON** (`{"status": "ok"}`) — don't expose internal state, + version numbers, or configuration in health responses. + +#### Environment variable conventions + +* **Use `SCREAMING_SNAKE_CASE`** for all environment variables. +* **Use pydantic-settings `BaseSettings`** to load from env vars and `.env` + files with type validation. +* **Support `.env` file layering**: `.env` (shared defaults) → `..env` + (environment-specific overrides, e.g. `.local.env`, `.staging.env`). +* **Gitignore all `.env` files** (`**/*.env`) to prevent secret leakage. + Commit only the `local.env.example` template. + +#### Production hardening checklist + +When reviewing a sample or service for production readiness, verify each item: + +| Check | What to verify | +|-------|---------------| +| `DEBUG=false` | Swagger UI, gRPC reflection, relaxed CSP all disabled | +| CORS locked down | `CORS_ALLOWED_ORIGINS` is not `*` (or empty for same-origin) | +| Trusted hosts set | `TRUSTED_HOSTS` configured for the deployment domain | +| Rate limits tuned | `RATE_LIMIT_DEFAULT` appropriate for expected traffic | +| Body size limit | `MAX_BODY_SIZE` set for the expected payload sizes | +| Request timeout | `REQUEST_TIMEOUT` set appropriately (default: 120s) | +| Secret masking | Log processor redacts API keys, tokens, passwords, DSNs | +| Access logging | Every request logged with method, path, status, duration | +| Exception handler | Global middleware returns JSON 500; no tracebacks to clients | +| Server header removed | `Server` response header suppressed (no version fingerprinting) | +| Cache-Control | `no-store` on all API responses | +| GZip compression | `GZIP_MIN_SIZE` tuned for response payload sizes | +| HSTS enabled | `HSTS_MAX_AGE` set; only sent over HTTPS | +| Log format | `LOG_FORMAT=json` for structured log aggregation | +| Secrets managed | No `.env` files in production; use secrets manager | +| TLS termination | HTTPS via load balancer or reverse proxy | +| Error tracking | `SENTRY_DSN` set (or equivalent monitoring) | +| Container hardened | Distroless, nonroot, no shell, no secrets baked in | +| Dependencies audited | `pip-audit` and `liccheck` pass in CI | +| Telemetry configured | Platform telemetry or OTLP endpoint set | +| Graceful shutdown | `SHUTDOWN_GRACE` appropriate for the platform | +| Keep-alive tuned | Server keep-alive > load balancer idle timeout | + +## GitHub Actions Security + +### Avoid `eval` in Shell Steps + +Never use `eval "$CMD"` to run dynamically-constructed commands in GitHub +Actions `run:` steps. Free-form inputs (like `extra-args`) can inject +arbitrary commands. + +**Use bash arrays** to build and execute commands: + +```yaml +# WRONG — eval enables injection from free-form inputs +CMD="uv run releasekit ${{ inputs.command }}" +if [[ -n "${{ inputs.extra-args }}" ]]; then + CMD="$CMD ${{ inputs.extra-args }}" +fi +eval "$CMD" + +# CORRECT — array execution prevents injection +cmd_array=(uv run releasekit ${{ inputs.command }}) +if [[ -n "${{ inputs.extra-args }}" ]]; then + read -ra extra <<< "${{ inputs.extra-args }}" + cmd_array+=("${extra[@]}") +fi +"${cmd_array[@]}" +``` + +Key rules: + +* **Build commands as arrays**, not strings +* **Execute with `"${cmd_array[@]}"`**, not `eval` +* **Quote all `${{ inputs.* }}`** references in array additions +* **Use `read -ra`** to safely split free-form inputs into array elements +* **Capture output** with `$("${cmd_array[@]}")`, not `$(eval "$CMD")` + +### Pin Dependencies with Version Constraints + +Always pin dependencies with `>=` version constraints, especially for +packages with known CVEs. This ensures CI and production use the patched +version: + +```toml +# WRONG — allows any version, including vulnerable ones +dependencies = ["pillow"] + +# CORRECT — pins to patched version (GHSA-cfh3-3jmp-rvhc) +dependencies = ["pillow>=12.1.1"] +``` + +After pinning, always run `uv lock` to regenerate the lockfile. + +## ReleaseKit-Specific Guidelines + +The sections below apply specifically to the `releasekit` codebase. + +### Release Tool Invariants + +A release management tool operates in a high-stakes environment where +failures can publish broken packages, skip packages silently, or leave +the repository in an inconsistent state. The following invariants are +**hard requirements** — every command, backend, and orchestrator must +uphold them. Violations are treated as P0 bugs. + +| Key | Invariant | One-liner | +|-----|-----------|----------| +| `INV-IDEMPOTENCY` | Idempotency | Re-running a command is always safe | +| `INV-CRASH-SAFETY` | Crash Safety / Resume | Interrupted releases resume without re-publishing | +| `INV-ATOMICITY` | Atomicity | Each publish fully succeeds or fully fails | +| `INV-DETERMINISM` | Determinism | Same inputs always produce same outputs | +| `INV-OBSERVABILITY` | Observability | Every action emits structured logs | +| `INV-DRY-RUN` | Dry-Run Fidelity | `--dry-run` exercises real code paths | +| `INV-GRACEFUL-DEGRADATION` | Graceful Degradation | Missing optional components degrade to no-ops | +| `INV-TOPO-ORDER` | Topological Correctness | Packages publish in dependency order | +| `INV-SUPPLY-CHAIN` | Supply Chain Integrity | Published artifacts are verified against checksums | + +Tests: `tests/rk_invariants_test.py` — each test method is prefixed with +the invariant key (e.g. `test_inv_idempotency_tags_skip_existing`). + +#### 1. Idempotency (`INV-IDEMPOTENCY`) + +Every command must be safe to run multiple times with the same inputs +and produce the same outcome. This is the single most important property +of a release tool because network failures, CI timeouts, and human +errors make re-runs inevitable. + +| Component | Idempotency mechanism | +|-----------|----------------------| +| `releasekit init` | Detects existing config; safe to re-run | +| `releasekit prepare` | Updates existing Release PR if one is open | +| `releasekit release` (tags) | Skips tags that already exist (`tag_exists` check) | +| `releasekit publish` | State file tracks per-package status; resumes from last successful package | +| Changelog generation | Skips if version header already present in `CHANGELOG.md` | +| Version rewriting | Regex-based rewrite is a no-op if version already matches | + +**Design rule**: Every mutating operation must check whether its effect +already exists before acting. "Already done" is a success, not an error. + +#### 2. Crash Safety / Resume (`INV-CRASH-SAFETY`) + +An interrupted release must be resumable without re-publishing packages +that already succeeded. This is enforced by: + +- **Atomic state persistence**: `RunState.save()` uses `mkstemp` + + `os.replace` so a crash mid-write never corrupts the state file. +- **SHA validation**: `RunState.validate_sha()` rejects resume if HEAD + has changed (different HEAD = different versions possible). +- **Per-package status tracking**: Each package transitions through + `pending → building → publishing → verifying → published/failed`. + On resume, packages in `published` state are skipped. +- **Ephemeral pin restoration**: `ephemeral_pin()` context manager + restores original dependency versions even if the publish fails. + +#### 3. Atomicity (`INV-ATOMICITY`) + +Each package publish is an atomic unit — it either fully succeeds +(build + upload + verify) or fully fails. Partial states (e.g., uploaded +but not verified) are recorded as `FAILED` so they can be retried. + +State files, lock files, and config files use atomic write patterns +(`mkstemp` + `os.replace`, `O_CREAT|O_EXCL`) to prevent corruption. + +#### 4. Determinism (`INV-DETERMINISM`) + +Given the same git history, config, and registry state, releasekit must +compute the same version bumps, dependency graph, and execution plan. +No randomness, no timestamp-dependent logic in version computation. + +#### 5. Observability (`INV-OBSERVABILITY`) + +Every significant action must be logged with structured fields (via +`structlog`) so that failures can be diagnosed from logs alone. Key +events: `tag_created`, `tag_exists_skip`, `package_published`, +`publish_nothing_to_do`, `state_saved`, `state_loaded`. + +Optional OpenTelemetry tracing provides span-level visibility into +the publish pipeline without requiring OTel as a hard dependency. + +#### 6. Dry-Run Fidelity (`INV-DRY-RUN`) + +`--dry-run` must exercise the same code paths as a real run, stopping +only at the actual mutation point (tag push, registry upload, file +write). This ensures dry-run output is a reliable predictor of what +a real run will do. + +#### 7. Graceful Degradation (`INV-GRACEFUL-DEGRADATION`) + +Missing optional components (forge backend, OTel, UI observer) must +not cause failures. The tool uses no-op implementations +(`NullProgressUI`, `NoOpSpan`, forge=None checks) to degrade silently. + +#### 8. Topological Correctness (`INV-TOPO-ORDER`) + +Packages are published in dependency order. A package is never +published before all of its internal dependencies have been published +and verified on the registry. The scheduler enforces this via +dependency-triggered dispatch, not level-based batching. + +#### 9. Supply Chain Integrity (`INV-SUPPLY-CHAIN`) + +Published artifacts are verified against locally-built checksums +(`verify_checksums`). Registry polling confirms availability before +declaring success. Smoke tests optionally validate install-ability. + +### Architecture Overview + +``` +┌─────────────────────────────────────────────────────────────────────────┐ +│ ReleaseKit Architecture │ +├─────────────────────────────────────────────────────────────────────────┤ +│ CLI (cli.py) │ +│ ├── prepare / release / publish / doctor / init │ +│ └── Parses args → dispatches to orchestrators │ +├─────────────────────────────────────────────────────────────────────────┤ +│ Orchestrators │ +│ ├── prepare.py - Preflight → bump → changelog → commit → PR │ +│ ├── release.py - Find PR → extract manifest → tag → release │ +│ ├── publisher.py - Build → publish → verify (with state tracking) │ +│ └── scheduler.py - Parallel publish with concurrency control │ +├─────────────────────────────────────────────────────────────────────────┤ +│ Core Modules │ +│ ├── config.py - Load/validate releasekit.toml │ +│ ├── workspace.py - Discover packages from pyproject.toml │ +│ ├── graph.py - Build dependency graph, detect cycles │ +│ ├── plan.py - Build execution plan from versions + topo levels │ +│ ├── versioning.py - Parse commits → compute semver bumps │ +│ ├── bump.py - Rewrite version strings in pyproject.toml │ +│ ├── pin.py - Pin/restore internal dep versions for builds │ +│ ├── state.py - Track publish progress (atomic save/load) │ +│ ├── lock.py - Advisory lock (O_EXCL atomic creation) │ +│ ├── tags.py - Format and create git tags │ +│ ├── sbom.py - Generate CycloneDX/SPDX SBOMs │ +│ ├── changelog.py - Generate changelogs from commits │ +│ └── versions.py - ReleaseManifest and PackageVersion dataclasses │ +├─────────────────────────────────────────────────────────────────────────┤ +│ Backends (pluggable via protocols) │ +│ ├── forge/ - GitHub, GitLab, Bitbucket (PR/release/tag APIs) │ +│ ├── vcs/ - Git CLI backend │ +│ ├── pm/ - uv, pnpm (build/publish/lock) │ +│ └── _run.py - Central subprocess abstraction (no shell=True) │ +├─────────────────────────────────────────────────────────────────────────┤ +│ Checks & Preflight │ +│ ├── preflight.py - Run all checks before release │ +│ ├── checks/_universal.py - Cross-ecosystem checks │ +│ ├── checks/_python.py - Python-specific checks │ +│ └── checks/_python_fixers.py - Auto-fix common issues │ +└─────────────────────────────────────────────────────────────────────────┘ +``` + +### Key Design Decisions + +* **Workspace labels** in `releasekit.toml` are user-defined (not tool/ecosystem names). +* **`[tool.uv.sources]`** with `workspace = true` is required for internal + dependencies to be recognized by `discover_packages`. +* **All artifact scoping** (branches, state files, manifests) uses the workspace label. +* **Forge-agnostic design**: All forge operations go through a `Forge` protocol. + Backends for GitHub CLI, GitHub API, GitLab CLI, and Bitbucket API exist. +* **Async-first**: All backend methods are async. Blocking subprocess calls are + dispatched to `asyncio.to_thread()`. + +### Testing Strategy + +#### Test Categories + +| Category | File Pattern | Purpose | +|----------|-------------|---------| +| Unit tests | `tests/rk_*_test.py` | Test individual modules in isolation | +| Integration tests | `tests/rk_integration_test.py` | Test multi-module pipelines end-to-end | +| Security tests | `tests/rk_security_test.py` | Scan source for vulnerability patterns | + +#### Integration Test Pipelines + +The integration tests cover these end-to-end workflows: + +1. **Config → Discover → Graph → Plan**: Load config, discover packages, + build dependency graph, topological sort, create execution plan. +2. **Bump → Pin → Restore**: Version bump, pin internal deps, restore originals. +3. **State Save → Load → Resume**: Track publish progress across restarts. +4. **Manifest → SBOM**: Save/load release manifest, generate CycloneDX/SPDX. +5. **Init Scaffold → Config Load**: Generate config, load it back. +6. **Lock Acquire → Release**: Atomic lock creation, exclusion, cleanup. +7. **Tag Formatting**: Format tags from manifest data. +8. **Cycle Detection**: Detect circular deps, verify diamond pattern is clean. +9. **Config Validation**: Unknown keys, invalid ecosystems caught early. +10. **Ephemeral Pin Crash Safety**: Restore on exception, nested contexts. + +#### Writing Test Workspaces + +When creating test workspaces with internal dependencies, always include +`[tool.uv.sources]` in the root `pyproject.toml`: + +```toml +[project] +name = "workspace" + +[tool.uv.workspace] +members = ["packages/*"] + +[tool.uv.sources] +core = {workspace = true} +plugin-a = {workspace = true} +``` + +Without this, `discover_packages` treats all dependencies as external. + +### ReleaseKit Commands + +```bash +# Run tests +cd py/tools/releasekit && uv run pytest + +# Run tests with coverage +cd py/tools/releasekit && uv run pytest --cov=releasekit --cov-report=term-missing + +# Run tests across Python 3.10–3.14 +cd py/tools/releasekit && bin/run_tests_with_tox +``` + +### Self-Release Process + +ReleaseKit is used to release itself. The workflow is: + +1. `releasekit prepare` — preflight checks, version bumps, changelog, PR +2. Merge the PR +3. `releasekit release` — tag creation, GitHub release +4. `releasekit publish` — build and publish to PyPI diff --git a/py/tools/releasekit/README.md b/py/tools/releasekit/README.md index 33fe6a8161..1c79a1dd51 100644 --- a/py/tools/releasekit/README.md +++ b/py/tools/releasekit/README.md @@ -3,9 +3,10 @@ Release orchestration for polyglot monorepos — publish packages in topological order with dependency-triggered scheduling, ephemeral version pinning, retry with jitter, crash-safe file restoration, and post-publish -checksum verification. Supports Python (uv), JavaScript (pnpm), and Go -workspaces today, with Bazel, Rust (Cargo), Java (Maven/Gradle), and -Dart (Pub) on the roadmap — all through protocol-based backends. +checksum verification. Supports Python (uv), JavaScript (pnpm), Go, +Dart (Pub), Java (Maven/Gradle), Kotlin (Gradle), Clojure (Leiningen/deps.edn), +and Rust (Cargo) workspaces today, with Bazel on the roadmap — all +through protocol-based backends. ## Why This Tool Exists @@ -43,32 +44,37 @@ implementation plan. ## How Does releasekit Compare? -| Feature | releasekit | release-please | semantic-release | changesets | nx release | knope | goreleaser | -|---------|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -| 🏗️ Monorepo | ✅ | ✅ | ❌ | ✅ | ✅ | ✅ | ❌ | -| 🌐 Polyglot (Py/JS/Go/Bazel/Rust/Java/Dart) | ✅ | ✅ | ❌ | ❌ | ⚠️ | ⚠️ | ❌ | -| 📝 Conventional Commits | ✅ | ✅ | ✅ | ❌ | ✅ | ✅ | ✅ | -| 📦 Changeset files | 🔜 | ❌ | ❌ | ✅ | ✅ | ✅ | ❌ | -| 🔀 Dependency graph | ✅ | ⚠️ | ❌ | ✅ | ✅ | ❌ | ❌ | -| 📊 Topo-sorted publish | ✅ | ❌ | ❌ | ❌ | ✅ | ❌ | ❌ | -| 🩺 Health checks (33) | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | -| 🔧 Auto-fix (`--fix`) | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | -| 🏭 Multi-forge | ✅ GH/GL/BB | ❌ GH | ✅ GH/GL/BB | ❌ GH | ❌ | ⚠️ GH/Gitea | ❌ GH | -| 🏷️ Pre-release | 🔜 | ⚠️ | ✅ | ✅ | ✅ | ✅ | ✅ | -| 🧪 Dry-run | ✅ | ❌ | ❌ | ❌ | ✅ | ✅ | ✅ | -| ⏪ Rollback | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | -| 🔮 Version preview | ✅ | ❌ | ❌ | ❌ | ✅ | ❌ | ❌ | -| 📈 Graph visualization | ✅ 8 formats | ❌ | ❌ | ❌ | ✅ | ❌ | ❌ | -| 🐚 Shell completions | ✅ | ❌ | ❌ | ❌ | ✅ | ✅ | ✅ | -| 🔍 Error explainer | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | -| 🔄 Retry with backoff | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | -| 🔒 Release lock | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | -| ✍️ Signing / provenance | 🔜 | ❌ | ⚠️ npm | ❌ | ❌ | ❌ | ✅ GPG/Cosign | -| 📋 SBOM | ✅ CycloneDX+SPDX | ❌ | ❌ | ❌ | ❌ | ❌ | ✅ | -| 📢 Announcements | 🔜 | ❌ | ❌ | ❌ | ❌ | ❌ | ✅ | -| 📊 Plan profiling | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | -| 🔭 OpenTelemetry tracing | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | -| 🔄 Migrate from alternatives | 🔜 | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | +| Feature | releasekit | release-please | semantic-release | release-it | changesets | nx release | knope | goreleaser | +|---------|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +| 🏗️ Monorepo | ✅ | ✅ | ❌ | ⚠️ | ✅ | ✅ | ✅ | ❌ | +| 🌐 Polyglot (Py/JS/Go/Bazel/Rust/Java/Dart) | ✅ | ✅ | ❌ | ❌ | ❌ | ⚠️ | ⚠️ | ❌ | +| 📝 Conventional Commits | ✅ | ✅ | ✅ | ✅ | ❌ | ✅ | ✅ | ✅ | +| 📦 Changeset files | 🔜 | ❌ | ❌ | ❌ | ✅ | ✅ | ✅ | ❌ | +| 🔀 Dependency graph | ✅ | ⚠️ | ❌ | ❌ | ✅ | ✅ | ❌ | ❌ | +| 📊 Topo-sorted publish | ✅ | ❌ | ❌ | ❌ | ❌ | ✅ | ❌ | ❌ | +| 🩺 Health checks (33) | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | +| 🔧 Auto-fix (`--fix`) | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | +| 🏭 Multi-forge | ✅ GH/GL/BB | ❌ GH | ✅ GH/GL/BB | ✅ GH/GL | ❌ GH | ❌ | ⚠️ GH/Gitea | ❌ GH | +| 🏷️ Pre-release | 🔜 | ⚠️ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | +| 🧪 Dry-run | ✅ | ❌ | ✅ | ✅ | ❌ | ✅ | ✅ | ✅ | +| ⏪ Rollback | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | +| 🔮 Version preview | ✅ | ❌ | ❌ | ✅ | ❌ | ✅ | ❌ | ❌ | +| 📈 Graph visualization | ✅ 8 formats | ❌ | ❌ | ❌ | ❌ | ✅ | ❌ | ❌ | +| 🐚 Shell completions | ✅ | ❌ | ❌ | ❌ | ❌ | ✅ | ✅ | ✅ | +| 🔍 Error explainer | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | +| 🔄 Retry with backoff | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | +| 🔒 Release lock | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | +| ✍️ Signing / provenance | ✅ Sigstore | ❌ | ⚠️ npm | ❌ | ❌ | ❌ | ❌ | ✅ GPG/Cosign | +| 📋 SBOM | ✅ CycloneDX+SPDX | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ✅ | +| 📢 Announcements | 🔜 | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ✅ | +| 📊 Plan profiling | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | +| 🔭 OpenTelemetry tracing | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | +| 🔄 Migrate from alternatives | 🔜 | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | +| 🔁 Continuous deploy mode | 🔜 | ❌ | ✅ | ✅ | ❌ | ❌ | ❌ | ❌ | +| ⏰ Cadence / scheduled releases | 🔜 | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | +| 🪝 Lifecycle hooks | 🔜 | ❌ | ✅ plugins | ✅ | ❌ | ❌ | ❌ | ✅ | +| 🌿 Branch → channel mapping | 🔜 | ❌ | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | +| 📅 CalVer support | 🔜 | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | **Legend:** ✅ = supported, ⚠️ = partial, ❌ = not supported, 🔜 = planned @@ -120,7 +126,7 @@ uvx releasekit check | `rollback` | Delete a git tag (local + remote) and its GitHub release | | `explain` | Look up any error code (e.g. `releasekit explain RK-GRAPH-CYCLE-DETECTED`) | | `version` | Show the releasekit version | -| `migrate` | Migrate from another release tool (release-please, semantic-release, changesets, etc.) | +| `migrate` | Auto-detect existing tags and set `bootstrap_sha` for mid-stream adoption | | `doctor` | Diagnose inconsistent state between workspace, git tags, and platform releases | | `completion` | Generate shell completion scripts (bash/zsh/fish) | @@ -193,36 +199,26 @@ Scaffolds `releasekit.toml` in the workspace root with auto-detected package groups (plugins, samples, core). Also adds `.releasekit-state/` to `.gitignore`. -### Migrate from Other Tools +### Migrate (Mid-Stream Adoption) -```bash -# Auto-detect and migrate from release-please -releasekit migrate --from release-please - -# Migrate from semantic-release -releasekit migrate --from semantic-release --dry-run +When adopting releasekit on a repo that already has releases, the `migrate` +command automates setting `bootstrap_sha` by scanning existing git tags: -# Migrate from changesets -releasekit migrate --from changesets +```bash +# Preview what would be written +releasekit migrate --dry-run -# Migrate from a custom shell-script release process -releasekit migrate --from scripts --scan-dir scripts/ +# Write bootstrap_sha to releasekit.toml +releasekit migrate ``` -The `migrate` command uses a `MigrationSource` protocol to read configuration -and state from each alternative tool: - -| Source | What it reads | What it generates | -|--------|---------------|-------------------| -| `release-please` | `.release-please-manifest.json`, `release-please-config.json` | `releasekit.toml` with groups, tag format, changelog settings | -| `semantic-release` | `.releaserc`, `package.json[release]` | `releasekit.toml` with branch config, plugin equivalents | -| `python-semantic-release` | `pyproject.toml[tool.semantic_release]` | `releasekit.toml` with version variables, commit parsing | -| `changesets` | `.changeset/config.json` | `releasekit.toml` with linked/fixed packages, changelog | -| `scripts` | Shell scripts with `npm version`, `pnpm publish` | `releasekit.toml` with discovered package list, publish order | - -Each `MigrationSource` implementation converts the alternative tool's config into -releasekit's native format, preserving tag history and version state so -there's no gap in the release timeline. +The command: +1. Scans all git tags in the repo. +2. Classifies each tag against workspace `tag_format`, `umbrella_tag`, + and `secondary_tag_format` patterns. +3. Picks the latest semver tag per workspace. +4. Resolves the commit SHA the tag points to. +5. Writes `bootstrap_sha` into `releasekit.toml` (comment-preserving). ### Rollback @@ -300,9 +296,36 @@ releasekit completion fish > ~/.config/fish/completions/releasekit.fish - `license_classifier_mismatch` — license classifiers match LICENSE file - `unreachable_extras` — optional-dependencies reference valid packages - `self_dependencies` — no package lists itself in dependencies +- `distro_deps` — distro packaging dep sync The `CheckBackend` protocol allows adding language-specific checks -for other runtimes (Go, JS) without modifying the core check runner. +for other runtimes (Go, JS, Rust, Java, Dart) without modifying the +core check runner. + +#### Source-Level Diagnostics + +Health checks produce **source-level context** via `SourceContext` +objects that point to the exact file and line causing a warning or +failure. The CLI renders these as Rust-compiler-style diagnostics +with source excerpts: + +```text + ⚠️ warning[build_system]: Missing [build-system] section + --> py/plugins/foo/pyproject.toml:1 + | + 1 | [project] + 2 | name = "foo" + | ^^^ build-backend missing + 3 | version = "1.0" + | + = hint: Add [build-system] with build-backend = "hatchling.build". +``` + +Helpers for check authors: + +- `SourceContext(path, line, key, label)` — frozen dataclass for file locations +- `find_key_line(content, key, section=)` — find 1-based line of a TOML key +- `read_source_snippet(path, line, context_lines=)` — read lines around a location ### Auto-Fixers @@ -352,6 +375,24 @@ The `ecosystem` parameter enables forward-compatible extensibility: future ecosystems (Node/npm, Rust/cargo, Go) can add their own checks (e.g. `npm audit`, `cargo audit`, `govulncheck`) without modifying universal logic. +### Design Invariants + +Every command, backend, and orchestrator must uphold these invariants. +Violations are treated as P0 bugs. Each invariant has a named key used +in tests (`tests/rk_invariants_test.py`) and documentation (`GEMINI.md`). + +| Key | Invariant | One-liner | +|-----|-----------|----------| +| `INV-IDEMPOTENCY` | Idempotency | Re-running a command is always safe | +| `INV-CRASH-SAFETY` | Crash Safety / Resume | Interrupted releases resume without re-publishing | +| `INV-ATOMICITY` | Atomicity | Each publish fully succeeds or fully fails | +| `INV-DETERMINISM` | Determinism | Same inputs always produce same outputs | +| `INV-OBSERVABILITY` | Observability | Every action emits structured logs | +| `INV-DRY-RUN` | Dry-Run Fidelity | `--dry-run` exercises real code paths | +| `INV-GRACEFUL-DEGRADATION` | Graceful Degradation | Missing optional components degrade to no-ops | +| `INV-TOPO-ORDER` | Topological Correctness | Packages publish in dependency order | +| `INV-SUPPLY-CHAIN` | Supply Chain Integrity | Published artifacts are verified against checksums | + ### Resume / State Every publish run persists state to `.releasekit-state.json` after each @@ -488,8 +529,12 @@ releasekit │ │ ├── uv.py UvBackend (default) │ │ └── pnpm.py PnpmBackend │ ├── Workspace package discovery -│ │ ├── uv.py UvWorkspaceBackend (default) -│ │ └── pnpm.py PnpmWorkspaceBackend +│ │ ├── uv.py UvWorkspace (Python, default) +│ │ ├── pnpm.py PnpmWorkspace (JS) +│ │ ├── go.py GoWorkspace (Go) +│ │ ├── dart.py DartWorkspace (Dart) +│ │ ├── maven.py MavenWorkspace (Java/Kotlin) +│ │ └── cargo.py CargoWorkspace (Rust) │ ├── Registry package registry queries │ │ ├── pypi.py PyPIBackend (default) │ │ └── npm.py NpmRegistry @@ -635,7 +680,7 @@ enables multi-ecosystem support: ## Testing -The test suite has **1,274 tests** across 19k+ lines: +The test suite has **1,739 tests** across 28k+ lines with 91%+ coverage: ```bash # Run all tests diff --git a/py/tools/releasekit/docs/competitive-gap-analysis.md b/py/tools/releasekit/docs/competitive-gap-analysis.md index 01c834974b..a8d447d6dd 100644 --- a/py/tools/releasekit/docs/competitive-gap-analysis.md +++ b/py/tools/releasekit/docs/competitive-gap-analysis.md @@ -1,10 +1,15 @@ # Releasekit Competitive Gap Analysis -**Date:** 2026-02-13 +**Date:** 2026-02-15 **Sources:** Issue trackers and documentation of: - [release-please](https://github.com/googleapis/release-please) (Google) - [semantic-release](https://github.com/semantic-release/semantic-release) (JS ecosystem) - [python-semantic-release](https://github.com/python-semantic-release/python-semantic-release) (Python ecosystem) +- [release-it](https://github.com/release-it/release-it) (JS ecosystem, plugin-based) +- [changesets](https://github.com/changesets/changesets) (JS monorepos) +- [knope](https://github.com/knope-dev/knope) (Rust-based, polyglot) +- [goreleaser](https://github.com/goreleaser/goreleaser) (Go ecosystem) +- [jreleaser](https://github.com/jreleaser/jreleaser) (Java ecosystem) --- @@ -166,15 +171,12 @@ bounds changelog generation for large repos. `compute_bumps` Phase 1 uses | release-please [#1946](https://github.com/googleapis/release-please/issues/1946) | "Untagged merged release PRs — aborting" | | release-please [#2172](https://github.com/googleapis/release-please/issues/2172) | Manifest not updating | -**Current releasekit state:** ⚠️ **Mostly done.** The `rollback` subcommand -can delete tags and releases. `run_doctor()` in `doctor.py` implements 7 -diagnostic checks (config, VCS, forge, registry, orphaned tags, branch, -packages). `list_tags` and `current_branch` added to VCS protocol -(2026-02-13). CLI wiring for `releasekit doctor` still pending. - -**Remaining:** -- Wire `releasekit doctor` into CLI. -- Add `--bootstrap-sha` to `init` for repos adopting releasekit mid-stream. +**Current releasekit state:** ✅ **Done.** The `rollback` subcommand +can delete tags and releases. `run_doctor()` in `doctor.py` implements 6 +diagnostic checks (config, tag alignment, orphaned tags, VCS state, forge +connectivity, default branch). `list_tags` and `current_branch` added to +VCS protocol (2026-02-13). `releasekit doctor` is fully wired in CLI with +`_cmd_doctor` handler and `doctor` subparser. --- @@ -186,13 +188,17 @@ packages). `list_tags` and `current_branch` added to VCS protocol | release-please [#1314](https://github.com/googleapis/release-please/issues/1314) | GPG signing | | semantic-release | npm provenance support | -**Current releasekit state:** The `pin.py` module has signing-related code, -and the scheduler has provenance references. No end-to-end GPG or Sigstore -signing workflow is exposed via CLI. +**Current releasekit state:** ✅ **Done.** `signing.py` implements +keyless Sigstore signing via `sigstore-python`. `sign_artifact()` handles +ambient OIDC credential detection (GitHub Actions, Google Cloud) with +fallback to explicit `--identity-token`. `verify_artifact()` verifies +bundles against expected identity and OIDC issuer. CLI exposes +`releasekit sign` and `releasekit verify` subcommands, plus `--sign` +flag on `publish` for automatic post-publish signing. -**Recommendation:** -- Add `--sign` flag to `publish` that invokes `gpg` or Sigstore for tag signing. -- Support PyPI Trusted Publishers / attestation workflows. +**Remaining:** +- GPG signing (Sigstore only for now). +- PyPI Trusted Publishers / attestation workflows. ### 3.2 Auto-Merge Release PRs | Alternative tool issue | Votes/Comments | @@ -280,6 +286,9 @@ These are pain points in alternatives that releasekit **already solves**: | **Dependency graph visualization** | No equivalent | ✅ `releasekit graph` with dot, mermaid, d2, levels formats | | **Distro packaging sync** | No equivalent in any alternative | ✅ Auto-syncs Debian, Fedora, Homebrew deps from `pyproject.toml` via `releasekit check --fix` | | **Revert cancellation** | release-please [#296](https://github.com/googleapis/release-please/issues/296) (open since 2019) | ✅ Per-level bump counters with revert decrement | +| **Sigstore signing** | release-please [#1314](https://github.com/googleapis/release-please/issues/1314) (GPG only) | ✅ Keyless Sigstore signing + verification via `releasekit sign`/`verify` | +| **SBOM generation** | No equivalent in any alternative (except goreleaser) | ✅ CycloneDX + SPDX via `sbom.py` | +| **Release state diagnostics** | No equivalent | ✅ `releasekit doctor` with 6 checks (config, tags, VCS, forge, branch) | --- @@ -287,20 +296,21 @@ These are pain points in alternatives that releasekit **already solves**: ### Phase 1 (Next release) 1. **Pre-release workflow** (`--prerelease` flag + PEP 440 suffixes) -2. **Revert commit handling** (cancel out reverted bumps) -3. **`releasekit doctor`** (state consistency checker) +2. ✅ ~~**Revert commit handling**~~ — Done. +3. ✅ ~~**`releasekit doctor`**~~ — Done. ### Phase 2 (Following release) -4. **Internal dependency version propagation** (`fix_internal_dep_versions`) -5. **Contributor attribution in changelogs** -6. **Incremental changelog generation** (performance) -7. **Hotfix branch support** (`--base-branch`) +1. ✅ ~~**Internal dependency version propagation**~~ — Done. +2. ✅ ~~**Contributor attribution in changelogs**~~ — Done. +3. **Incremental changelog generation** (performance) +4. **Hotfix branch support** (`--base-branch`) ### Phase 3 (Future) -8. **Sigstore / GPG signing** -9. **Auto-merge release PRs** -10. **Custom changelog templates** -11. **Plugin system for custom steps** +1. ✅ ~~**Sigstore signing + verification**~~ — Done. +2. ✅ ~~**Auto-merge release PRs**~~ — Done. +3. ✅ ~~**SBOM generation**~~ — Done (CycloneDX + SPDX). +4. **Custom changelog templates** +5. **Plugin system for custom steps** --- @@ -511,39 +521,306 @@ signing, and publishing. - **Packager integrations** — If releasekit ever needs to publish to Homebrew, Snap, etc., JReleaser's approach is a good reference. +### 7.6 release-it + +**What it is:** Generic CLI tool to automate versioning and package +publishing. Plugin-based architecture where core is minimal and features +are added via plugins. + +**Stars:** ~9K | **Ecosystem:** JS/TS primarily, but extensible via plugins + +**Key features:** +- **Interactive + CI mode** — Interactive prompts by default, `--ci` for + fully automated. `--only-version` for prompt-only version selection. +- **Hooks system** — `before:init`, `after:bump`, `after:release` etc. + Shell commands at any lifecycle point. Template variables available. +- **Pre-release management** — `--preRelease=beta`, `--preRelease=rc`, + consecutive pre-releases, `--preReleaseBase=1` for starting count. +- **Re-run releases** — `--no-increment` to update/republish an existing + tag without bumping version. +- **Programmatic API** — Can be used as a Node.js dependency, not just CLI. +- **npm Trusted Publishing** — OIDC integration for token-free CI publishing + (as of July 2025). +- **Multi-forge** — GitHub and GitLab releases (not Bitbucket). +- **Dry-run** — `--dry-run` shows what would happen. +- **CalVer support** — Via `release-it-calver-plugin`. + +**Plugin ecosystem (things that require plugins in release-it):** + +| Capability | release-it plugin required | releasekit built-in? | +|---|---|---| +| Conventional commits | `@release-it/conventional-changelog` | ✅ Built-in | +| Changelog generation | `@release-it/conventional-changelog` or `@release-it/keep-a-changelog` | ✅ Built-in | +| Version bumping in non-package.json files | `@release-it/bumper` | ✅ Built-in (any manifest) | +| Monorepo workspaces | `@release-it-plugins/workspaces` | ✅ Built-in (first-class) | +| pnpm support | `release-it-pnpm` | ✅ Built-in (`PnpmBackend`) | +| CalVer versioning | `release-it-calver-plugin` | ❌ Not yet | +| Changesets integration | `changesets-release-it-plugin` | ❌ Not yet | +| .NET publishing | `@jcamp-code/release-it-dotnet` | ❌ Not yet | +| Gitea support | `release-it-gitea` | ❌ Not yet (GH/GL/BB only) | +| Regex-based version bumping | `@j-ulrich/release-it-regex-bumper` | ✅ Built-in (configurable `tag_format`) | + +**Top pain points (from their issues):** +- [#1110](https://github.com/release-it/release-it/issues/1110) — Want Cargo, Maven, PIP publishing (JS-only out of the box). +- [#1075](https://github.com/release-it/release-it/issues/1075) — Want PR labels instead of commit messages for version detection. +- [#1126](https://github.com/release-it/release-it/issues/1126) — Want GitHub PR-oriented flow (like release-please). +- [#1127](https://github.com/release-it/release-it/issues/1127) — Release notes from RCs not carried to stable release. +- [#1246](https://github.com/release-it/release-it/issues/1246) — `whatBump` broken with consecutive pre-releases. +- [#1112](https://github.com/release-it/release-it/issues/1112) — Pre-release ignores undefined recommended bump. +- [#1234](https://github.com/release-it/release-it/issues/1234) — No npm 2FA with security keys. +- [#1131](https://github.com/release-it/release-it/issues/1131) — GitLab integration doesn't support proxy settings. +- [#1216](https://github.com/release-it/release-it/issues/1216) — Tags latest commit instead of current on GitLab. + +**Releasekit advantages over release-it:** +- ✅ Polyglot out-of-the-box (Python, JS, Go, Rust, Java, Dart) — no plugins needed. +- ✅ Monorepo-native with dependency graph — release-it needs `@release-it-plugins/workspaces` and manual `@release-it/bumper` config per package. +- ✅ Topological publish ordering — release-it publishes in hardcoded order. +- ✅ 34 workspace health checks + auto-fix — no equivalent. +- ✅ Rollback command — no equivalent. +- ✅ Conventional commits built-in — release-it needs a plugin. +- ✅ Changelog built-in — release-it's default is raw `git log`. +- ✅ Retry with backoff — no equivalent. +- ✅ Bitbucket support — release-it only has GitHub + GitLab. + +**release-it advantages over releasekit:** +- ✅ Interactive mode with prompts — releasekit is CLI-only. +- ✅ Hooks system for arbitrary shell commands at lifecycle points. +- ✅ `--no-increment` to re-run/update existing releases. +- ✅ Programmatic Node.js API. +- ✅ npm Trusted Publishing (OIDC). +- ✅ CalVer support (via plugin). +- ✅ Mature plugin ecosystem with 15+ community plugins. + +**Monorepo support comparison:** + +release-it's monorepo recipe is **manual and fragile**: +1. Each workspace needs its own `.release-it.json` with `git: false`. +2. Internal dependencies require explicit `@release-it/bumper` config + listing every dependency path (e.g. `"dependencies.package-a"`). +3. Root `package.json` runs `npm run release --workspaces && release-it`. +4. No dependency graph — publish order is workspace declaration order. +5. No health checks — misconfigured workspaces silently break. + +Releasekit's monorepo support is **automatic**: +1. Auto-discovers all packages via workspace backend. +2. Builds dependency graph, publishes in topological order. +3. Internal dependency versions propagated automatically via BFS. +4. 34 health checks catch misconfigurations before publish. + +--- + +## 8. NEW GAPS IDENTIFIED (2026-02-15) + +### 8.1 Scheduled / Cadence-Based Releases + +**The problem:** None of the major release tools have built-in support for +scheduled releases. Teams that want daily, weekly, or per-sprint releases +must cobble together CI cron triggers + release tool invocation. This is +a common request (see [semantic-release SO question](https://stackoverflow.com/questions/75179976/daily-release-using-semantic-release)). + +**How teams work around it today:** +- **semantic-release:** CI cron job triggers `npx semantic-release` on a + schedule. If no releasable commits exist, it's a no-op. Works but has + no batching — every cron run is independent. +- **release-it:** Same approach — CI cron + `npx release-it --ci`. No + built-in scheduling. +- **release-please:** GitHub Action runs on every push to main, creates + a release PR. Merging the PR triggers the release. No scheduling. + +**What's missing across all tools:** +1. **Batched releases** — Accumulate commits over a time window, release + once. Current tools release per-commit or require manual trigger. +2. **Release cadence config** — `release_cadence = "daily"` or + `release_cadence = "weekly:monday"` in config. +3. **Minimum change threshold** — Don't release if only `chore:` commits + accumulated (no version bump needed). +4. **Release windows** — Only release during business hours or specific + days (avoid Friday deploys). +5. **Cooldown period** — Minimum time between releases to prevent + rapid-fire publishing. + +**Current releasekit state:** No scheduling support. Releasekit is +invoked manually or via CI triggers. + +**Recommendation:** +- Add `[schedule]` section to `releasekit.toml`: + ```toml + [schedule] + cadence = "daily" # or "weekly:monday", "biweekly", "on-push" + release_window = "09:00-17:00" # UTC, optional + cooldown_minutes = 60 # minimum time between releases + min_bump = "patch" # skip release if only chore/docs commits + ``` +- Add `releasekit should-release` command that returns exit code 0 if a + release should happen (for CI cron integration): + ```yaml + # GitHub Actions example + on: + schedule: + - cron: '0 9 * * 1-5' # weekdays at 9am UTC + jobs: + release: + steps: + - run: releasekit should-release || exit 0 + - run: releasekit publish --ci + ``` +- The `should-release` command checks: (a) releasable commits exist, + (b) within release window, (c) cooldown elapsed, (d) minimum bump met. + +### 8.2 Release-Per-Commit / Continuous Deployment + +**The problem:** Some teams want every merge to main to produce a release +(trunk-based development). semantic-release was designed for this but +struggles with monorepos ([#1529](https://github.com/semantic-release/semantic-release/issues/1529)). +release-it requires manual `--ci` invocation. + +**Current releasekit state:** Releasekit supports `releasekit publish` +which can be triggered on every push. However, there's no explicit +"continuous release" mode that: +1. Skips release PR creation (direct publish on merge). +2. Handles concurrent CI runs safely (two merges in quick succession). +3. Provides idempotency (re-running on the same commit is a no-op). + +**Recommendation:** +- Add `release_mode = "continuous"` config option (vs default `"pr"`): + ```toml + [workspace] + release_mode = "continuous" # publish on every merge, no release PR + ``` +- In continuous mode, `releasekit publish` should: + - Check if current HEAD already has a release tag → no-op. + - Use the release lock to prevent concurrent publishes. + - Skip PR creation, go directly to tag + publish. +- Add `--if-needed` flag: `releasekit publish --if-needed` exits 0 + without error if no releasable changes exist. + +### 8.3 Trunk-Based Development Support + +**The problem:** semantic-release [#1529](https://github.com/semantic-release/semantic-release/issues/1529) +highlights confusion about how release tools integrate with trunk-based +development. Key questions from users: +- Should releases happen from trunk or from release branches? +- How do feature branches interact with release automation? +- How do pre-releases map to trunk-based development? + +**Current releasekit state:** Releasekit is branch-agnostic — it works +from whatever branch you run it on. But there's no documentation or +configuration for trunk-based workflows specifically. + +**Recommendation:** +- Document a "Trunk-Based Development" recipe in docs: + - Continuous mode: every merge to main → release. + - Release branches: `release/v1.x` for maintenance, main for latest. + - Feature flags over feature branches for unreleased work. +- Add `branch_channels` config for mapping branches to release channels: + ```toml + [branches] + main = "latest" + "release/v1.*" = "v1-maintenance" + "next" = "next" + ``` + +### 8.4 Plugin-vs-Built-in Analysis + +A key architectural difference between releasekit and alternatives: + +**release-it's plugin model:** +- Core does: git tag, git push, npm publish, GitHub/GitLab release. +- Everything else requires plugins: conventional commits, changelog, + monorepo, pnpm, CalVer, .NET, Gitea, version bumping in non-JS files. +- **Pro:** Minimal core, community can extend. +- **Con:** Fragmented ecosystem, version compatibility issues between + plugins, no guarantee of quality, monorepo support is bolted on. + +**semantic-release's plugin model:** +- Core does: version determination, git tag. +- Plugins for: npm publish, GitHub release, changelog, commit analysis. +- Even the default behavior requires `@semantic-release/npm`, + `@semantic-release/github`, `@semantic-release/commit-analyzer`. +- **Pro:** Extremely flexible. +- **Con:** Confusing for beginners (need 4+ plugins for basic use), + monorepo support is a third-party plugin (`semantic-release-monorepo`) + that's frequently broken. + +**releasekit's built-in model:** +- Core does: everything needed for a complete release workflow. +- No plugins needed for: conventional commits, changelog, monorepo, + dependency graph, health checks, auto-fix, multi-forge, multi-ecosystem, + rollback, retry, dry-run, version preview. +- **Pro:** Works out of the box, consistent behavior, no version + compatibility matrix, monorepo is first-class. +- **Con:** Less extensible for niche use cases, larger core surface area. + +**What alternatives need plugins for that releasekit does out-of-the-box:** + +| Capability | semantic-release plugin | release-it plugin | releasekit | +|---|---|---|---| +| Conventional commits | `@semantic-release/commit-analyzer` | `@release-it/conventional-changelog` | ✅ Built-in | +| Changelog | `@semantic-release/changelog` | `@release-it/conventional-changelog` | ✅ Built-in | +| npm publish | `@semantic-release/npm` | Built-in | ✅ Built-in | +| GitHub release | `@semantic-release/github` | Built-in | ✅ Built-in | +| GitLab release | `@semantic-release/gitlab` | Built-in | ✅ Built-in | +| Monorepo | `semantic-release-monorepo` (3rd party) | `@release-it-plugins/workspaces` | ✅ Built-in | +| Dep graph ordering | ❌ Not available | ❌ Not available | ✅ Built-in | +| Health checks | ❌ Not available | ❌ Not available | ✅ Built-in | +| Auto-fix | ❌ Not available | ❌ Not available | ✅ Built-in | +| Rollback | ❌ Not available | ❌ Not available | ✅ Built-in | +| Version preview | ❌ Not available | `--release-version` flag | ✅ Built-in (`plan`, `version`) | +| Retry/backoff | ❌ Not available | ❌ Not available | ✅ Built-in | +| Multi-ecosystem | ❌ JS only | ❌ JS only (plugins for others) | ✅ Py/JS/Go/Rust/Java/Dart | +| Revert handling | ❌ Not available | ❌ Not available | ✅ Built-in | + +**Recommendation:** Releasekit's built-in approach is the right default. +However, consider adding a lightweight hooks system (like release-it's +`before:init` / `after:release`) for teams that need custom steps without +writing a full plugin. This could be as simple as: +```toml +[hooks] +before_publish = ["npm run build", "npm test"] +after_publish = ["./scripts/notify-slack.sh"] +after_tag = ["echo 'Tagged ${version}'"] +``` + --- -## 8. UPDATED FEATURE COMPARISON MATRIX - -| Feature | releasekit | release-please | semantic-release | python-semantic-release | changesets | nx release | knope | goreleaser | -|---------|-----------|----------------|-----------------|------------------------|------------|------------|-------|------------| -| **Monorepo** | ✅ | ✅ | ❌ | ❌ | ✅ | ✅ | ✅ | ❌ | -| **Polyglot** | ✅ Py/JS/Go | Multi-lang | JS-centric | Python-only | JS-only | JS/Rust/Docker | Multi | Go-only | -| **Conv. commits** | ✅ | ✅ | ✅ | ✅ | ❌ | ✅ | ✅ | ✅ | -| **Changesets** | ❌ | ❌ | ❌ | ❌ | ✅ | ✅ (version plans) | ✅ | ❌ | -| **Dep graph** | ✅ | Partial | ❌ | ❌ | ✅ | ✅ | ❌ | ❌ | -| **Topo publish** | ✅ | ❌ | ❌ | ❌ | ❌ | ✅ | ❌ | ❌ | -| **Health checks** | ✅ (34) | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | -| **Auto-fix** | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | -| **Multi-forge** | ✅ GH/GL/BB | GitHub only | GH/GL/BB | GH/GL/BB | GitHub only | ❌ | GH/Gitea | GitHub only | -| **Pre-release** | Partial | Partial | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | -| **Dry-run** | ✅ | ❌ | ❌ | ✅ | ❌ | ✅ | ✅ | ✅ | -| **Rollback** | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | -| **Version preview** | ✅ | ❌ | ❌ | ❌ | ❌ | ✅ | ❌ | ❌ | -| **Graph viz** | ✅ dot/mermaid/d2 | ❌ | ❌ | ❌ | ❌ | ✅ | ❌ | ❌ | -| **Shell completions** | ✅ | ❌ | ❌ | ❌ | ❌ | ✅ | ✅ | ✅ | -| **Error explainer** | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | -| **Retry/backoff** | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | -| **Release lock** | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | -| **Distro pkg sync** | ✅ Deb/RPM/Brew | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | -| **Cherry-pick** | Planned (R38) | ❌ | Partial | ❌ | ❌ | ❌ | ❌ | ❌ | -| **Signing** | Partial | ❌ | npm provenance | ❌ | ❌ | ❌ | ❌ | ✅ GPG/Cosign | -| **SBOM** | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ✅ | -| **Announcements** | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ✅ | +## 9. UPDATED FEATURE COMPARISON MATRIX + +| Feature | releasekit | release-please | semantic-release | python-semantic-release | release-it | changesets | nx release | knope | goreleaser | +|---------|-----------|----------------|-----------------|------------------------|------------|------------|------------|-------|------------| +| **Monorepo** | ✅ | ✅ | ❌ (plugin) | ❌ | ❌ (plugin) | ✅ | ✅ | ✅ | ❌ | +| **Polyglot** | ✅ Py/JS/Go/Rust/Java/Dart | Multi-lang | JS-centric | Python-only | JS (plugins for others) | JS-only | JS/Rust/Docker | Multi | Go-only | +| **Conv. commits** | ✅ | ✅ | ✅ (plugin) | ✅ | ❌ (plugin) | ❌ | ✅ | ✅ | ✅ | +| **Changesets** | ❌ | ❌ | ❌ | ❌ | ❌ (plugin) | ✅ | ✅ (version plans) | ✅ | ❌ | +| **Dep graph** | ✅ | Partial | ❌ | ❌ | ❌ | ✅ | ✅ | ❌ | ❌ | +| **Topo publish** | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | ✅ | ❌ | ❌ | +| **Health checks** | ✅ (34) | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | +| **Auto-fix** | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | +| **Multi-forge** | ✅ GH/GL/BB | GitHub only | GH/GL/BB | GH/GL/BB | GH/GL | GitHub only | ❌ | GH/Gitea | GitHub only | +| **Pre-release** | Partial | Partial | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | +| **Dry-run** | ✅ | ❌ | ❌ | ✅ | ✅ | ❌ | ✅ | ✅ | ✅ | +| **Rollback** | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | +| **Version preview** | ✅ | ❌ | ❌ | ❌ | ✅ (`--release-version`) | ❌ | ✅ | ❌ | ❌ | +| **Graph viz** | ✅ dot/mermaid/d2 | ❌ | ❌ | ❌ | ❌ | ❌ | ✅ | ❌ | ❌ | +| **Shell completions** | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | ✅ | ✅ | ✅ | +| **Error explainer** | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | +| **Retry/backoff** | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | +| **Release lock** | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | +| **Distro pkg sync** | ✅ Deb/RPM/Brew | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | +| **Hooks** | ❌ | ❌ | ❌ | ❌ | ✅ | ❌ | ❌ | ✅ | ❌ | +| **Interactive mode** | ❌ | ❌ | ❌ | ❌ | ✅ | ❌ | ❌ | ❌ | ❌ | +| **Scheduled releases** | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | +| **Continuous deploy** | ❌ | ❌ | ✅ | ✅ | ✅ (`--ci`) | ❌ | ❌ | ❌ | ❌ | +| **Re-run release** | ❌ | ❌ | ❌ | ❌ | ✅ (`--no-increment`) | ❌ | ❌ | ❌ | ❌ | +| **Programmatic API** | ❌ | ❌ | ✅ | ✅ | ✅ | ✅ | ✅ | ❌ | ❌ | +| **CalVer** | ❌ | ❌ | ❌ | ❌ | ❌ (plugin) | ❌ | ❌ | ❌ | ❌ | +| **Cherry-pick** | Planned (R38) | ❌ | Partial | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | +| **Signing** | ✅ Sigstore | ❌ | npm provenance | ❌ | npm OIDC | ❌ | ❌ | ❌ | ✅ GPG/Cosign | +| **SBOM** | ✅ CycloneDX/SPDX | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ✅ | +| **Announcements** | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ✅ | --- -## 9. REVISED PRIORITIZED ROADMAP +## 10. REVISED PRIORITIZED ROADMAP ### Phase 1 — Immediate (finalize JS migration readiness) @@ -551,8 +828,8 @@ signing, and publishing. and `NpmRegistry` fully implemented. Ecosystem-aware `_create_backends()`. 2. ✅ ~~**Revert commit handling**~~ — Done. Per-level bump counters with revert cancellation. -3. ⚠️ **`releasekit doctor`** — `run_doctor()` implemented with 7 checks. - CLI wiring pending. +3. ✅ ~~**`releasekit doctor`**~~ — Done. `run_doctor()` with 6 checks, + fully wired in CLI. 4. **Pre-release workflow** (`--prerelease rc` flag + PEP 440 / SemVer pre-release suffixes). Basic `prerelease` param exists in `compute_bumps`. 5. ✅ ~~**npm dist-tag support**~~ — Done. `--dist-tag` CLI flag wired through @@ -566,34 +843,48 @@ signing, and publishing. `graph.reverse_edges`. 8. ✅ ~~**Contributor attribution in changelogs**~~ — Done. `ChangelogEntry.author` field, git log format `%H\x00%an\x00%s`, rendered as `— @author` (2026-02-13). -9. **Incremental changelog generation** (performance for large repos). -10. **Hotfix / maintenance branch support** (`--base-branch`). -11. **Cherry-pick for release branches** (`releasekit cherry-pick`). -12. **Snapshot releases** (`--snapshot` for CI testing). -13. ✅ ~~**`bootstrap-sha` config**~~ (R26) — Done. `bootstrap_sha` on +9. **Continuous deploy mode** (`release_mode = "continuous"` + `--if-needed`). + Enables release-per-commit for trunk-based development. See §8.2. +10. **`releasekit should-release`** command for CI cron integration. + Returns exit 0 if a release should happen based on cadence config. See §8.1. +11. **Lifecycle hooks** (`[hooks]` in `releasekit.toml`). `before_publish`, + `after_publish`, `after_tag` for arbitrary shell commands. See §8.4. +12. **Incremental changelog generation** (performance for large repos). +13. **Hotfix / maintenance branch support** (`--base-branch`). +14. **Cherry-pick for release branches** (`releasekit cherry-pick`). +15. **Snapshot releases** (`--snapshot` for CI testing). +16. ✅ ~~**`bootstrap-sha` config**~~ (R26) — Done. `bootstrap_sha` on `WorkspaceConfig`, threaded through `compute_bumps` and all CLI call sites (2026-02-13). ### Phase 3 — Differentiation -14. **Sigstore / GPG signing + provenance**. -15. **SBOM generation** (CycloneDX / SPDX). -16. ✅ ~~**Auto-merge release PRs**~~ — Done. `auto_merge` config on +17. **Scheduled / cadence-based releases** (`[schedule]` config section). + Daily, weekly, biweekly cadences with release windows and cooldown. See §8.1. +18. **Branch-to-channel mapping** (`[branches]` config). Maps branches to + release channels (latest, next, maintenance). See §8.3. +19. ✅ ~~**Sigstore signing + verification**~~ — Done. `signing.py` with + `sign_artifact()`, `verify_artifact()`, CLI `sign`/`verify` subcommands, + `--sign` flag on `publish`. +20. ✅ ~~**SBOM generation**~~ — Done. `sbom.py` with CycloneDX and SPDX + formats, `generate_sbom()`, `write_sbom()`. +21. ✅ ~~**Auto-merge release PRs**~~ — Done. `auto_merge` config on `WorkspaceConfig`, `prepare.py` calls `forge.merge_pr()` after labeling (2026-02-13). -17. **Custom changelog templates** (Jinja2). -18. **Announcement integrations** (Slack, Discord). -19. **Optional changeset file support** (hybrid with conventional commits). +22. **Custom changelog templates** (Jinja2). +23. **Announcement integrations** (Slack, Discord). +24. **Optional changeset file support** (hybrid with conventional commits). ### Phase 4 — Future -20. **Plugin system for custom steps**. -21. **Programmatic Python API** (like Nx Release's Node.js API). -22. **Cross-compilation orchestration** (for CLI binaries). -23. **`releasekit migrate`** — Protocol-based migration from alternatives. -24. **Bazel workspace backend** (BUILD files, `bazel run //pkg:publish`). -25. **Rust/Cargo workspace backend** (`Cargo.toml`, `cargo publish`). -26. **Java backend** (Maven `pom.xml` / Gradle `build.gradle`, `mvn deploy`). -27. **Dart/Pub workspace backend** (`pubspec.yaml`, `dart pub publish`). -28. **Rustification** — Rewrite core in Rust with PyO3/maturin (see roadmap §12). +25. **Plugin system for custom steps**. +26. **Programmatic Python API** (like Nx Release's Node.js API). +27. **Cross-compilation orchestration** (for CLI binaries). +28. **`releasekit migrate`** — Protocol-based migration from alternatives. +29. **Bazel workspace backend** (BUILD files, `bazel run //pkg:publish`). +30. **Rust/Cargo workspace backend** (`Cargo.toml`, `cargo publish`). +31. **Java backend** (Maven `pom.xml` / Gradle `build.gradle`, `mvn deploy`). +32. **Dart/Pub workspace backend** (`pubspec.yaml`, `dart pub publish`). +33. **CalVer support** (calendar-based versioning). +34. **Rustification** — Rewrite core in Rust with PyO3/maturin (see roadmap §12). > **See [../roadmap.md](../roadmap.md)** for the detailed roadmap with > dependency graphs and execution phases. @@ -611,8 +902,10 @@ signing, and publishing. - Cross-referenced against releasekit's codebase (`cli.py`, `versioning.py`, `changelog.py`, `checks/`, `backends/`, `config.py`, `net.py`, `scheduler.py`). -- Compared against 8 tools: release-please, semantic-release, - python-semantic-release, changesets, nx release, knope, goreleaser, - jreleaser. +- Compared against 9 tools: release-please, semantic-release, + python-semantic-release, release-it, changesets, nx release, knope, + goreleaser, jreleaser. - Focused on issues with high community engagement (comments, reactions) as indicators of real pain points rather than edge cases. +- Analyzed plugin-vs-built-in architectural tradeoffs across release-it, + semantic-release, and releasekit (see §8.4). diff --git a/py/tools/releasekit/docs/docs/guides/ci-cd.md b/py/tools/releasekit/docs/docs/guides/ci-cd.md index 7986388098..230cf052e7 100644 --- a/py/tools/releasekit/docs/docs/guides/ci-cd.md +++ b/py/tools/releasekit/docs/docs/guides/ci-cd.md @@ -208,3 +208,120 @@ releasekit plan --format json # Dry-run publish (no uploads) releasekit publish --dry-run ``` + +## Scheduled / Cadence Releases *(planned)* + +ReleaseKit will support scheduled releases via the `should-release` +command and `[schedule]` config. This enables daily, weekly, or +custom-cadence releases driven by CI cron triggers. + +### Daily Release (GitHub Actions) + +```yaml +name: Daily Release +on: + schedule: + - cron: '0 14 * * *' # 2 PM UTC daily + +jobs: + release: + runs-on: ubuntu-latest + permissions: + contents: write + id-token: write + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Check if release needed + id: check + run: | + uv run releasekit should-release || echo "skip=true" >> "$GITHUB_OUTPUT" + + - uses: ./py/tools/releasekit + if: steps.check.outputs.skip != 'true' + with: + command: prepare + working-directory: py +``` + +### Configuration + +```toml +# releasekit.toml — scheduled release settings (planned) +[schedule] +cadence = "daily" # daily | weekly:monday | biweekly | on-push +release_window = "14:00-16:00" # UTC time range +cooldown_minutes = 60 # min time between releases +min_bump = "patch" # skip if only chore/docs commits +``` + +## Continuous Deploy Mode *(planned)* + +For projects that want a release on every push to `main` (like +semantic-release's default behavior), ReleaseKit will support a +`release_mode = "continuous"` config that skips PR creation and +goes directly to tag + publish. + +### Per-Commit Release (GitHub Actions) + +```yaml +name: Continuous Deploy +on: + push: + branches: [main] + +jobs: + release: + runs-on: ubuntu-latest + permissions: + contents: write + id-token: write + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - uses: ./py/tools/releasekit + with: + command: publish + extra-args: "--if-needed" + working-directory: py +``` + +### Configuration + +```toml +# releasekit.toml — continuous deploy settings (planned) +release_mode = "continuous" # skip PR, tag + publish directly +``` + +## Lifecycle Hooks *(planned)* + +ReleaseKit will support lifecycle hooks that run custom scripts at +key points in the release pipeline. Unlike semantic-release's plugin +system, hooks are simple shell commands configured in TOML — no +JavaScript plugins required. + +### Configuration + +```toml +# releasekit.toml — lifecycle hooks (planned) +[hooks] +before_prepare = ["./scripts/pre-release-checks.sh"] +after_tag = ["./scripts/notify-slack.sh ${version}"] +before_publish = ["./scripts/build-docs.sh"] +after_publish = [ + "./scripts/update-homebrew-formula.sh ${version}", + "./scripts/announce-release.sh ${name} ${version}", +] +``` + +Template variables available in hooks: + +| Variable | Description | +|----------|-------------| +| `${version}` | The new version being released | +| `${name}` | Package name | +| `${tag}` | Git tag (e.g. `genkit-v0.6.0`) | diff --git a/py/tools/releasekit/docs/docs/guides/configuration.md b/py/tools/releasekit/docs/docs/guides/configuration.md index 9c71872674..9afcb2f786 100644 --- a/py/tools/releasekit/docs/docs/guides/configuration.md +++ b/py/tools/releasekit/docs/docs/guides/configuration.md @@ -174,6 +174,155 @@ If `releasekit.toml` doesn't exist, sensible defaults are used: | `pr_title_template` | `"chore(release): v{version}"` | | `extra_files` | `[]` | +## Planned Configuration *(Phase 8)* + +The following configuration sections are planned for Phase 8 (Release +Automation). See [roadmap.md](../../../roadmap.md) and +[competitive-gap-analysis.md](../../../docs/competitive-gap-analysis.md) +for full rationale. + +### Override Hierarchy + +ReleaseKit uses a 3-tier config model. More specific tiers override +less specific ones: + +``` +package > workspace > root > built-in default +``` + +Each `[workspace.*]` section can override root-level defaults. Package-level +`releasekit.toml` files can override workspace settings where applicable. + +#### Phase 8 settings — override scope + +| Setting | Root | Workspace | Package | Notes | +|---------|:----:|:---------:|:-------:|-------| +| `release_mode` | ✅ | ✅ | ❌ | JS continuous, Python PR-based | +| `[schedule]` (all keys) | ✅ | ✅ | ❌ | Different cadence per ecosystem | +| `[hooks]` (all keys) | ✅ | ✅ | ✅ | Concatenated by default (see below) | +| `[branches]` | ✅ | ✅ | ❌ | JS ships `next` channel, Python doesn't | +| `versioning_scheme` | ✅ | ✅ | ❌ | One workspace CalVer, another semver | +| `calver_format` | ✅ | ✅ | ❌ | Follows `versioning_scheme` | + +#### Existing settings gaining workspace override + +| Setting | New Scope | Rationale | +|---------|:---------:|-----------| +| `publish_from` | Root + Workspace | Python from CI, Go via git tags locally | +| `concurrency` | Root + Workspace | PyPI slower than npm — different limits | +| `max_retries` | Root + Workspace | npm rarely retries, PyPI often does | +| `poll_timeout` | Root + Workspace | Maven Central ~10min sync vs PyPI ~30s | +| `verify_checksums` | Root + Workspace | Not all registries support it | +| `major_on_zero` | Root + Workspace | JS ships 0.x breaking changes, Python doesn't | +| `prerelease_mode` | Root + Workspace | Different rollup strategies per ecosystem | + +Settings that remain **root-only**: `pr_title_template`, `http_pool_size`, +`forge`, `repo_owner`, `repo_name`, `default_branch`. + +### Release Mode + +```toml +# Release mode: "pr" (default) or "continuous" +# pr — create a Release PR, publish after merge (default) +# continuous — skip PR, tag + publish directly on push +release_mode = "pr" + +# Per-workspace override: +[workspace.js] +release_mode = "continuous" +``` + +### Schedule + +```toml +# Cadence release settings (used by `releasekit should-release`) +[schedule] +cadence = "daily" # daily | weekly:monday | biweekly | on-push +release_window = "14:00-16:00" # UTC time range for releases +cooldown_minutes = 60 # minimum time between releases +min_bump = "patch" # skip release if only chore/docs commits + +# Per-workspace override: +[workspace.py.schedule] +cadence = "weekly:monday" +release_window = "10:00-12:00" +``` + +### Lifecycle Hooks + +```toml +# Shell commands executed at lifecycle points +# Template variables: ${version}, ${name}, ${tag} +[hooks] +before_prepare = ["./scripts/pre-release-checks.sh"] +after_tag = ["./scripts/notify-slack.sh ${version}"] +before_publish = ["./scripts/build-docs.sh"] +after_publish = [ + "./scripts/update-homebrew-formula.sh ${version}", + "./scripts/announce-release.sh ${name} ${version}", +] +``` + +#### Hook merge semantics + +Hooks **concatenate** across tiers (root → workspace → package) by +default. This ensures global hooks always run while workspace/package +hooks add specifics. + +```toml +# Root +[hooks] +before_publish = ["./scripts/lint.sh"] + +# Workspace +[workspace.py.hooks] +before_publish = ["./scripts/build-wheels.sh"] + +# Package (py/packages/genkit/releasekit.toml) +[hooks] +before_publish = ["./scripts/validate-schema.sh"] +``` + +Effective order for `genkit`: + +1. `./scripts/lint.sh` ← root +2. `./scripts/build-wheels.sh` ← workspace +3. `./scripts/validate-schema.sh` ← package + +To **replace** instead of concatenate, set `hooks_replace = true`: + +```toml +# py/packages/special/releasekit.toml +hooks_replace = true + +[hooks] +before_publish = ["./scripts/special-only.sh"] +``` + +### Branch-to-Channel Mapping + +```toml +# Map branches to release channels (dist-tags / pre-release suffixes) +[branches] +main = "latest" +"release/v1.*" = "v1-maintenance" +next = "next" +beta = "beta" + +# Per-workspace override: +[workspace.js.branches] +main = "latest" +next = "next" +``` + +### CalVer + +```toml +# Calendar-based versioning (alternative to semver) +versioning_scheme = "calver" # "semver" (default) or "calver" +calver_format = "YYYY.MM.DD" # YYYY.MM.DD | YYYY.MM.MICRO +``` + ## Example: Full Config ```toml diff --git a/py/tools/releasekit/docs/docs/index.md b/py/tools/releasekit/docs/docs/index.md index 0dccbe39b2..d0e775a952 100644 --- a/py/tools/releasekit/docs/docs/index.md +++ b/py/tools/releasekit/docs/docs/index.md @@ -84,6 +84,20 @@ graph LR Post-publish SHA-256 verification ensures uploaded artifacts match local builds. +- :material-clock-outline:{ .lg .middle } **Cadence Releases** *(planned)* + + --- + + Scheduled daily, weekly, or per-commit releases with built-in + cooldown, release windows, and minimum-bump thresholds. + +- :material-hook:{ .lg .middle } **Lifecycle Hooks** *(planned)* + + --- + + Run custom scripts at key points: before/after publish, after + tag, before prepare. Template variables for version and name. + ## Quick Start diff --git a/py/tools/releasekit/docs/docs/internals/preflight.md b/py/tools/releasekit/docs/docs/internals/preflight.md index 040c7d4811..15a32f095b 100644 --- a/py/tools/releasekit/docs/docs/internals/preflight.md +++ b/py/tools/releasekit/docs/docs/internals/preflight.md @@ -135,18 +135,72 @@ validation: | Namespace `__init__.py` | ✅ | — | — | | OSS files (README, LICENSE) | ✅ | ✅ | ✅ | +## Source-Level Diagnostics + +Health checks can attach **source-level context** to warnings and +failures via `SourceContext` objects. The CLI renders these as +Rust-compiler-style diagnostics with file paths, line numbers, and +source excerpts. + +### SourceContext + +```python +@dataclass(frozen=True) +class SourceContext: + path: str # File path (absolute or relative) + line: int = 0 # 1-based line number (0 = unknown) + key: str = '' # TOML key or search term matched + label: str = '' # Short annotation (e.g. "missing here") +``` + +### Helpers + +| Helper | Purpose | +|--------|---------| +| `find_key_line(content, key, section=)` | Find 1-based line of a TOML key or `[section]` header | +| `read_source_snippet(path, line, context_lines=2)` | Read lines around a location for display | + +### Example + +```python +from releasekit.preflight import SourceContext, find_key_line + +content = pkg.manifest_path.read_text(encoding='utf-8') +line = find_key_line(content, 'build-backend', section='build-system') +result.add_failure( + 'build_system', + 'Missing build-backend', + hint='Add build-backend = "hatchling.build" to [build-system].', + context=[SourceContext( + path=str(pkg.manifest_path), + line=line, + key='build-backend', + label='build-backend missing', + )], +) +``` + ## Result Types ```python class PreflightResult: - passed: list[str] # Checks that passed - warnings: list[str] # Non-blocking warnings - failed: list[str] # Blocking failures + passed: list[str] # Checks that passed + warnings: list[str] # Non-blocking warnings + failed: list[str] # Blocking failures + errors: dict[str, str] # Failed check → message + warning_messages: dict[str, str] # Warning check → message + hints: dict[str, str] # Check → actionable hint + context: dict[str, Sequence[str | SourceContext]] # Check → file locations def ok(self) -> bool: return len(self.failed) == 0 ``` +The `context` parameter on `add_warning()` and `add_failure()` accepts +`Sequence[str | SourceContext]` — callers can pass plain `list[str]` +(file paths only) or `list[SourceContext]` (with line numbers) without +type errors. + **Warnings** are displayed but don't block publishing. **Failures** abort the release with a non-zero exit code. diff --git a/py/tools/releasekit/github/workflows/releasekit-cargo.yml b/py/tools/releasekit/github/workflows/releasekit-cargo.yml new file mode 100644 index 0000000000..6cd5a291ce --- /dev/null +++ b/py/tools/releasekit/github/workflows/releasekit-cargo.yml @@ -0,0 +1,449 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +# ══════════════════════════════════════════════════════════════════════ +# ReleaseKit: Rust Release Pipeline (Cargo) +# ══════════════════════════════════════════════════════════════════════ +# +# SAMPLE WORKFLOW — Copy to .github/workflows/releasekit-cargo.yml to use. +# +# This workflow implements a release pipeline for Rust crates managed +# by a Cargo workspace (Cargo.toml with [workspace]). It uses +# releasekit to automate: +# +# 1. PREPARE — compute version bumps, generate changelogs, open +# or update a Release PR. +# 2. RELEASE — tag the merge commit, create a GitHub Release. +# 3. PUBLISH — publish crates to crates.io in topological order +# with retry and version verification. +# +# ── Automatic Flow ────────────────────────────────────────────────── +# +# push to main ──► releasekit prepare ──► Release PR +# (rust/** or rs/**) (autorelease: pending) +# │ +# merge PR +# │ +# ▼ +# releasekit release ──► tags + GitHub Release +# │ +# ▼ +# releasekit publish ──► crates.io +# │ +# ▼ +# repository_dispatch ──► downstream repos +# +# ── Manual Dispatch Flow ──────────────────────────────────────────── +# +# ┌─────────────────────────────────────────────────────────────┐ +# │ workflow_dispatch UI │ +# │ │ +# │ action: [prepare ▼] ──► runs PREPARE job only │ +# │ [release ▼] ──► runs RELEASE + PUBLISH + NOTIFY │ +# │ │ +# │ dry_run: [✓] simulate, no side effects │ +# │ force_prepare: [✓] skip preflight, force PR creation │ +# │ group: [________] target a release group │ +# │ bump_type: [auto / patch / minor / major] │ +# │ prerelease: [________] e.g. rc.1, beta.1 │ +# │ skip_publish: [✓] tag + release but don't publish │ +# │ concurrency: [0] max parallel publish (0 = auto) │ +# │ max_retries: [2] retry failed publishes │ +# └─────────────────────────────────────────────────────────────┘ +# +# ── Trigger Matrix ────────────────────────────────────────────────── +# +# Event │ Jobs that run +# ───────────────────┼────────────────────────────────── +# push to main │ prepare +# PR merged │ release → publish → notify +# dispatch: prepare │ prepare +# dispatch: release │ release → publish → notify +# +# ── Inputs Reference ──────────────────────────────────────────────── +# +# Input │ Type │ Default │ Description +# ───────────────┼─────────┼─────────┼────────────────────────────── +# action │ choice │ release │ Pipeline stage: prepare or release +# dry_run │ boolean │ true │ Simulate without side effects +# force_prepare │ boolean │ false │ Force PR creation (--force) +# group │ string │ (all) │ Target a specific release group +# bump_type │ choice │ auto │ Override semver bump detection +# prerelease │ string │ (none) │ Prerelease suffix (e.g. rc.1) +# skip_publish │ boolean │ false │ Tag + release, skip registry +# concurrency │ string │ 0 │ Max parallel publish jobs +# max_retries │ string │ 2 │ Retry failed publishes +# +# ── Authentication ────────────────────────────────────────────────── +# +# Publishing to crates.io requires an API token. Set the following +# secret in your repository: +# +# CARGO_REGISTRY_TOKEN — crates.io API token +# +# Generate one at https://crates.io/settings/tokens with the +# "publish-new" and "publish-update" scopes. +# +# The workflow is idempotent: re-running any step is safe because +# releasekit skips already-created tags and already-published versions. +# ══════════════════════════════════════════════════════════════════════ + +name: "ReleaseKit: Rust (Cargo)" + +on: + workflow_dispatch: + inputs: + action: + description: 'Which pipeline stage to run' + required: true + default: release + type: choice + options: + - prepare + - release + dry_run: + description: 'Dry run — log what would happen without creating tags or publishing' + required: true + default: true + type: boolean + force_prepare: + description: 'Force create/update the Release PR even if no new bumps are detected' + required: false + default: false + type: boolean + group: + description: 'Release group to target (leave empty for all)' + required: false + type: string + bump_type: + description: 'Override auto-detected bump type' + required: false + default: auto + type: choice + options: + - auto + - patch + - minor + - major + prerelease: + description: 'Publish as prerelease (e.g. rc.1, beta.1)' + required: false + type: string + skip_publish: + description: 'Tag and create GitHub Release but skip publishing to crates.io' + required: false + default: false + type: boolean + concurrency: + description: 'Max parallel publish jobs (0 = auto)' + required: false + default: '0' + type: string + max_retries: + description: 'Max retries for failed publish attempts (0 = no retries)' + required: false + default: '2' + type: string + push: + branches: [main] + paths: + - "rust/**" + - "rs/**" + pull_request: + types: [closed] + branches: [main] + +# Only one release pipeline runs at a time. +concurrency: + group: releasekit-rust-${{ github.ref }} + cancel-in-progress: false + +permissions: + contents: write + pull-requests: write + +env: + RELEASEKIT_DIR: py/tools/releasekit + WORKSPACE_DIR: rust + RUST_TOOLCHAIN: stable + DRY_RUN: ${{ github.event_name == 'pull_request' && 'false' || (inputs.dry_run == 'false' && 'false' || 'true') }} + +jobs: + # ═══════════════════════════════════════════════════════════════════════ + # PREPARE: Compute bumps and open/update Release PR + # ═══════════════════════════════════════════════════════════════════════ + prepare: + name: Prepare Release PR + if: | + (github.event_name == 'push' && + !startsWith(github.event.head_commit.message, 'chore(release):') && + !contains(github.event.head_commit.message, 'releasekit--release')) || + (github.event_name == 'workflow_dispatch' && inputs.action == 'prepare') + runs-on: ubuntu-latest + timeout-minutes: 10 + outputs: + has_bumps: ${{ steps.prepare.outputs.has_bumps }} + pr_url: ${{ steps.prepare.outputs.pr_url }} + steps: + - uses: actions/checkout@v6 + with: + fetch-depth: 0 + fetch-tags: true + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Install uv and setup Python + uses: astral-sh/setup-uv@v5 + with: + enable-cache: true + python-version: "3.12" + + - name: Install Rust toolchain + uses: dtolnay/rust-toolchain@master + with: + toolchain: ${{ env.RUST_TOOLCHAIN }} + + - uses: Swatinem/rust-cache@v2 + with: + workspaces: ${{ env.WORKSPACE_DIR }} + + - name: Install releasekit + working-directory: ${{ env.RELEASEKIT_DIR }} + run: uv sync + + - name: Configure git identity + run: | + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + + - name: Run releasekit prepare + id: prepare + run: | + set -euo pipefail + + cmd=(uv run --directory ${{ env.RELEASEKIT_DIR }} releasekit --workspace rust prepare) + if [ "${{ inputs.force_prepare }}" = "true" ]; then + cmd+=(--force) + fi + if [ -n "${{ inputs.group }}" ]; then + cmd+=(--group "${{ inputs.group }}") + fi + if [ "${{ inputs.bump_type }}" != "auto" ] && [ -n "${{ inputs.bump_type }}" ]; then + cmd+=(--bump "${{ inputs.bump_type }}") + fi + if [ -n "${{ inputs.prerelease }}" ]; then + cmd+=(--prerelease "${{ inputs.prerelease }}") + fi + + OUTPUT=$("${cmd[@]}" 2>&1) || EXIT_CODE=$? + echo "$OUTPUT" + if [ "${EXIT_CODE:-0}" -ne 0 ]; then + echo "::error::releasekit prepare failed with exit code $EXIT_CODE" + exit $EXIT_CODE + fi + + PR_URL=$(echo "$OUTPUT" | sed -n 's/.*Release PR: //p' | tail -1) + if [ -n "$PR_URL" ]; then + echo "has_bumps=true" >> "$GITHUB_OUTPUT" + echo "pr_url=$PR_URL" >> "$GITHUB_OUTPUT" + else + echo "has_bumps=false" >> "$GITHUB_OUTPUT" + fi + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + # ═══════════════════════════════════════════════════════════════════════ + # RELEASE: Tag merge commit and create GitHub Release + # ═══════════════════════════════════════════════════════════════════════ + release: + name: Tag and Release + if: | + (github.event_name == 'pull_request' && + github.event.pull_request.merged == true && + contains(github.event.pull_request.labels.*.name, 'autorelease: pending')) || + (github.event_name == 'workflow_dispatch' && inputs.action == 'release') + runs-on: ubuntu-latest + timeout-minutes: 10 + outputs: + release_url: ${{ steps.release.outputs.release_url }} + steps: + - uses: actions/checkout@v6 + with: + fetch-depth: 0 + fetch-tags: true + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Install uv and setup Python + uses: astral-sh/setup-uv@v5 + with: + enable-cache: true + python-version: "3.12" + + - name: Install releasekit + working-directory: ${{ env.RELEASEKIT_DIR }} + run: uv sync + + - name: Configure git identity + run: | + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + + - name: Preview execution plan + run: | + echo "::group::Execution Plan (Rust)" + uv run --directory ${{ env.RELEASEKIT_DIR }} releasekit --workspace rust plan --format full 2>&1 || true + echo "::endgroup::" + if [ "${{ env.DRY_RUN }}" = "true" ]; then + echo "::notice::DRY RUN — no tags or releases will be created" + else + echo "::notice::LIVE RUN — tags and GitHub Release will be created" + fi + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Run releasekit release + id: release + run: | + set -euo pipefail + + DRY_RUN_FLAG="" + if [ "${{ env.DRY_RUN }}" = "true" ]; then + DRY_RUN_FLAG="--dry-run" + fi + + OUTPUT=$(uv run --directory ${{ env.RELEASEKIT_DIR }} releasekit --workspace rust release $DRY_RUN_FLAG 2>&1) || EXIT_CODE=$? + echo "$OUTPUT" + if [ "${EXIT_CODE:-0}" -ne 0 ]; then + echo "::error::releasekit release failed with exit code $EXIT_CODE" + exit $EXIT_CODE + fi + + RELEASE_URL=$(echo "$OUTPUT" | sed -n 's/.*release_url=//p' | tail -1) + echo "release_url=$RELEASE_URL" >> "$GITHUB_OUTPUT" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + # ═══════════════════════════════════════════════════════════════════════ + # PUBLISH: Build and publish crates to crates.io + # + # Publishes crates in topological order using `cargo publish`. + # Cargo handles workspace dependency resolution automatically. + # The CARGO_REGISTRY_TOKEN env var is used for authentication. + # ═══════════════════════════════════════════════════════════════════════ + publish: + name: Publish to crates.io + needs: release + if: inputs.skip_publish != 'true' + runs-on: ubuntu-latest + timeout-minutes: 30 + steps: + - uses: actions/checkout@v6 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Install uv and setup Python + uses: astral-sh/setup-uv@v5 + with: + enable-cache: true + python-version: "3.12" + + - name: Install Rust toolchain + uses: dtolnay/rust-toolchain@master + with: + toolchain: ${{ env.RUST_TOOLCHAIN }} + + - uses: Swatinem/rust-cache@v2 + with: + workspaces: ${{ env.WORKSPACE_DIR }} + + - name: Build Rust crates + working-directory: ${{ env.WORKSPACE_DIR }} + run: cargo build --release + + - name: Install releasekit + working-directory: ${{ env.RELEASEKIT_DIR }} + run: uv sync + + - name: Preview execution plan + run: | + echo "::group::Execution Plan (Rust)" + uv run --directory ${{ env.RELEASEKIT_DIR }} releasekit --workspace rust plan --format full 2>&1 || true + echo "::endgroup::" + if [ "${{ env.DRY_RUN }}" = "true" ]; then + echo "::notice::DRY RUN — no crates will be published" + else + echo "::notice::LIVE RUN — crates will be published to crates.io" + fi + + - name: Run releasekit publish + run: | + set -euo pipefail + + cmd=(uv run --directory ${{ env.RELEASEKIT_DIR }} releasekit --workspace rust publish --force) + + if [ "${{ env.DRY_RUN }}" = "true" ]; then + cmd+=(--dry-run) + fi + + CONCURRENCY="${{ inputs.concurrency }}" + if [ -n "$CONCURRENCY" ] && [ "$CONCURRENCY" != "0" ]; then + cmd+=(--concurrency "$CONCURRENCY") + fi + + if [ -n "${{ inputs.group }}" ]; then + cmd+=(--group "${{ inputs.group }}") + fi + + MAX_RETRIES="${{ inputs.max_retries }}" + if [ -n "$MAX_RETRIES" ] && [ "$MAX_RETRIES" != "0" ]; then + cmd+=(--max-retries "$MAX_RETRIES") + fi + + echo "::group::Running: ${cmd[*]}" + OUTPUT=$("${cmd[@]}" 2>&1) || EXIT_CODE=$? + echo "$OUTPUT" + echo "::endgroup::" + if [ "${EXIT_CODE:-0}" -ne 0 ]; then + echo "::error::releasekit publish failed with exit code $EXIT_CODE" + exit $EXIT_CODE + fi + env: + CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }} + + - name: Upload manifest artifact + if: success() && env.DRY_RUN != 'true' + uses: actions/upload-artifact@v4 + with: + name: release-manifest-rust + path: ${{ env.WORKSPACE_DIR }}/.releasekit-state.json + retention-days: 90 + + # ═══════════════════════════════════════════════════════════════════════ + # NOTIFY: Post-release notifications + # ═══════════════════════════════════════════════════════════════════════ + notify: + name: Notify Downstream + needs: publish + if: success() + runs-on: ubuntu-latest + timeout-minutes: 5 + steps: + - name: Dispatch release event + uses: peter-evans/repository-dispatch@v3 + with: + token: ${{ secrets.GITHUB_TOKEN }} + event-type: genkit-rust-release + client-payload: '{"release_url": "${{ needs.release.outputs.release_url }}"}' diff --git a/py/tools/releasekit/github/workflows/releasekit-dart.yml b/py/tools/releasekit/github/workflows/releasekit-dart.yml new file mode 100644 index 0000000000..b27c8270bb --- /dev/null +++ b/py/tools/releasekit/github/workflows/releasekit-dart.yml @@ -0,0 +1,448 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +# ══════════════════════════════════════════════════════════════════════ +# ReleaseKit: Dart/Flutter Release Pipeline (pub) +# ══════════════════════════════════════════════════════════════════════ +# +# SAMPLE WORKFLOW — Copy to .github/workflows/releasekit-dart.yml to use. +# +# This workflow implements a release pipeline for Dart/Flutter packages +# managed by a melos workspace or pubspec.yaml workspace. It uses +# releasekit to automate: +# +# 1. PREPARE — compute version bumps, generate changelogs, open +# or update a Release PR. +# 2. RELEASE — tag the merge commit, create a GitHub Release. +# 3. PUBLISH — publish packages to pub.dev in topological order +# with retry and version verification. +# +# ── Automatic Flow ────────────────────────────────────────────────── +# +# push to main ──► releasekit prepare ──► Release PR +# (dart/** or flutter/**) (autorelease: pending) +# │ +# merge PR +# │ +# ▼ +# releasekit release ──► tags + GitHub Release +# │ +# ▼ +# releasekit publish ──► pub.dev +# │ +# ▼ +# repository_dispatch ──► downstream repos +# +# ── Manual Dispatch Flow ──────────────────────────────────────────── +# +# ┌─────────────────────────────────────────────────────────────┐ +# │ workflow_dispatch UI │ +# │ │ +# │ action: [prepare ▼] ──► runs PREPARE job only │ +# │ [release ▼] ──► runs RELEASE + PUBLISH + NOTIFY │ +# │ │ +# │ dry_run: [✓] simulate, no side effects │ +# │ force_prepare: [✓] skip preflight, force PR creation │ +# │ group: [________] target a release group │ +# │ bump_type: [auto / patch / minor / major] │ +# │ prerelease: [________] e.g. rc.1, beta.1 │ +# │ skip_publish: [✓] tag + release but don't publish │ +# │ concurrency: [0] max parallel publish (0 = auto) │ +# │ max_retries: [2] retry failed publishes │ +# └─────────────────────────────────────────────────────────────┘ +# +# ── Trigger Matrix ────────────────────────────────────────────────── +# +# Event │ Jobs that run +# ───────────────────┼────────────────────────────────── +# push to main │ prepare +# PR merged │ release → publish → notify +# dispatch: prepare │ prepare +# dispatch: release │ release → publish → notify +# +# ── Inputs Reference ──────────────────────────────────────────────── +# +# Input │ Type │ Default │ Description +# ───────────────┼─────────┼─────────┼────────────────────────────── +# action │ choice │ release │ Pipeline stage: prepare or release +# dry_run │ boolean │ true │ Simulate without side effects +# force_prepare │ boolean │ false │ Force PR creation (--force) +# group │ string │ (all) │ Target a specific release group +# bump_type │ choice │ auto │ Override semver bump detection +# prerelease │ string │ (none) │ Prerelease suffix (e.g. rc.1) +# skip_publish │ boolean │ false │ Tag + release, skip registry +# concurrency │ string │ 0 │ Max parallel publish jobs +# max_retries │ string │ 2 │ Retry failed publishes +# +# ── Authentication ────────────────────────────────────────────────── +# +# Publishing to pub.dev requires a Google Cloud service account with +# the "Service Account Token Creator" role. Set the following secrets: +# +# PUB_CREDENTIALS_JSON — JSON key file for the service account +# +# Alternatively, use Workload Identity Federation for keyless auth +# in Google Cloud environments. +# +# The workflow is idempotent: re-running any step is safe because +# releasekit skips already-created tags and already-published versions. +# ══════════════════════════════════════════════════════════════════════ + +name: "ReleaseKit: Dart (pub)" + +on: + workflow_dispatch: + inputs: + action: + description: 'Which pipeline stage to run' + required: true + default: release + type: choice + options: + - prepare + - release + dry_run: + description: 'Dry run — log what would happen without creating tags or publishing' + required: true + default: true + type: boolean + force_prepare: + description: 'Force create/update the Release PR even if no new bumps are detected' + required: false + default: false + type: boolean + group: + description: 'Release group to target (leave empty for all)' + required: false + type: string + bump_type: + description: 'Override auto-detected bump type' + required: false + default: auto + type: choice + options: + - auto + - patch + - minor + - major + prerelease: + description: 'Publish as prerelease (e.g. rc.1, beta.1)' + required: false + type: string + skip_publish: + description: 'Tag and create GitHub Release but skip publishing to pub.dev' + required: false + default: false + type: boolean + concurrency: + description: 'Max parallel publish jobs (0 = auto)' + required: false + default: '0' + type: string + max_retries: + description: 'Max retries for failed publish attempts (0 = no retries)' + required: false + default: '2' + type: string + push: + branches: [main] + paths: + - "dart/**" + - "flutter/**" + pull_request: + types: [closed] + branches: [main] + +# Only one release pipeline runs at a time. +concurrency: + group: releasekit-dart-${{ github.ref }} + cancel-in-progress: false + +permissions: + contents: write + pull-requests: write + +env: + RELEASEKIT_DIR: py/tools/releasekit + WORKSPACE_DIR: dart + FLUTTER_VERSION: "3.24" + DRY_RUN: ${{ github.event_name == 'pull_request' && 'false' || (inputs.dry_run == 'false' && 'false' || 'true') }} + +jobs: + # ═══════════════════════════════════════════════════════════════════════ + # PREPARE: Compute bumps and open/update Release PR + # ═══════════════════════════════════════════════════════════════════════ + prepare: + name: Prepare Release PR + if: | + (github.event_name == 'push' && + !startsWith(github.event.head_commit.message, 'chore(release):') && + !contains(github.event.head_commit.message, 'releasekit--release')) || + (github.event_name == 'workflow_dispatch' && inputs.action == 'prepare') + runs-on: ubuntu-latest + timeout-minutes: 10 + outputs: + has_bumps: ${{ steps.prepare.outputs.has_bumps }} + pr_url: ${{ steps.prepare.outputs.pr_url }} + steps: + - uses: actions/checkout@v6 + with: + fetch-depth: 0 + fetch-tags: true + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Install uv and setup Python + uses: astral-sh/setup-uv@v5 + with: + enable-cache: true + python-version: "3.12" + + - uses: subosito/flutter-action@v2 + with: + flutter-version: ${{ env.FLUTTER_VERSION }} + channel: stable + cache: true + + - name: Install releasekit + working-directory: ${{ env.RELEASEKIT_DIR }} + run: uv sync + + - name: Configure git identity + run: | + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + + - name: Run releasekit prepare + id: prepare + run: | + set -euo pipefail + + cmd=(uv run --directory ${{ env.RELEASEKIT_DIR }} releasekit --workspace dart prepare) + if [ "${{ inputs.force_prepare }}" = "true" ]; then + cmd+=(--force) + fi + if [ -n "${{ inputs.group }}" ]; then + cmd+=(--group "${{ inputs.group }}") + fi + if [ "${{ inputs.bump_type }}" != "auto" ] && [ -n "${{ inputs.bump_type }}" ]; then + cmd+=(--bump "${{ inputs.bump_type }}") + fi + if [ -n "${{ inputs.prerelease }}" ]; then + cmd+=(--prerelease "${{ inputs.prerelease }}") + fi + + OUTPUT=$("${cmd[@]}" 2>&1) || EXIT_CODE=$? + echo "$OUTPUT" + if [ "${EXIT_CODE:-0}" -ne 0 ]; then + echo "::error::releasekit prepare failed with exit code $EXIT_CODE" + exit $EXIT_CODE + fi + + PR_URL=$(echo "$OUTPUT" | sed -n 's/.*Release PR: //p' | tail -1) + if [ -n "$PR_URL" ]; then + echo "has_bumps=true" >> "$GITHUB_OUTPUT" + echo "pr_url=$PR_URL" >> "$GITHUB_OUTPUT" + else + echo "has_bumps=false" >> "$GITHUB_OUTPUT" + fi + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + # ═══════════════════════════════════════════════════════════════════════ + # RELEASE: Tag merge commit and create GitHub Release + # ═══════════════════════════════════════════════════════════════════════ + release: + name: Tag and Release + if: | + (github.event_name == 'pull_request' && + github.event.pull_request.merged == true && + contains(github.event.pull_request.labels.*.name, 'autorelease: pending')) || + (github.event_name == 'workflow_dispatch' && inputs.action == 'release') + runs-on: ubuntu-latest + timeout-minutes: 10 + outputs: + release_url: ${{ steps.release.outputs.release_url }} + steps: + - uses: actions/checkout@v6 + with: + fetch-depth: 0 + fetch-tags: true + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Install uv and setup Python + uses: astral-sh/setup-uv@v5 + with: + enable-cache: true + python-version: "3.12" + + - name: Install releasekit + working-directory: ${{ env.RELEASEKIT_DIR }} + run: uv sync + + - name: Configure git identity + run: | + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + + - name: Preview execution plan + run: | + echo "::group::Execution Plan (Dart)" + uv run --directory ${{ env.RELEASEKIT_DIR }} releasekit --workspace dart plan --format full 2>&1 || true + echo "::endgroup::" + if [ "${{ env.DRY_RUN }}" = "true" ]; then + echo "::notice::DRY RUN — no tags or releases will be created" + else + echo "::notice::LIVE RUN — tags and GitHub Release will be created" + fi + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Run releasekit release + id: release + run: | + set -euo pipefail + + DRY_RUN_FLAG="" + if [ "${{ env.DRY_RUN }}" = "true" ]; then + DRY_RUN_FLAG="--dry-run" + fi + + OUTPUT=$(uv run --directory ${{ env.RELEASEKIT_DIR }} releasekit --workspace dart release $DRY_RUN_FLAG 2>&1) || EXIT_CODE=$? + echo "$OUTPUT" + if [ "${EXIT_CODE:-0}" -ne 0 ]; then + echo "::error::releasekit release failed with exit code $EXIT_CODE" + exit $EXIT_CODE + fi + + RELEASE_URL=$(echo "$OUTPUT" | sed -n 's/.*release_url=//p' | tail -1) + echo "release_url=$RELEASE_URL" >> "$GITHUB_OUTPUT" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + # ═══════════════════════════════════════════════════════════════════════ + # PUBLISH: Publish packages to pub.dev + # + # Publishes Dart/Flutter packages in topological order using + # `dart pub publish`. Requires a Google Cloud service account + # credential for authentication. + # ═══════════════════════════════════════════════════════════════════════ + publish: + name: Publish to pub.dev + needs: release + if: inputs.skip_publish != 'true' + runs-on: ubuntu-latest + timeout-minutes: 30 + steps: + - uses: actions/checkout@v6 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Install uv and setup Python + uses: astral-sh/setup-uv@v5 + with: + enable-cache: true + python-version: "3.12" + + - uses: subosito/flutter-action@v2 + with: + flutter-version: ${{ env.FLUTTER_VERSION }} + channel: stable + cache: true + + - name: Install Dart dependencies + working-directory: ${{ env.WORKSPACE_DIR }} + run: dart pub get + + - name: Install releasekit + working-directory: ${{ env.RELEASEKIT_DIR }} + run: uv sync + + - name: Configure pub.dev credentials + run: | + # Write the service account credentials for dart pub publish. + # For Workload Identity Federation, replace this with OIDC setup. + mkdir -p "$HOME/.config/dart" + echo '${{ secrets.PUB_CREDENTIALS_JSON }}' > "$HOME/.config/dart/pub-credentials.json" + + - name: Preview execution plan + run: | + echo "::group::Execution Plan (Dart)" + uv run --directory ${{ env.RELEASEKIT_DIR }} releasekit --workspace dart plan --format full 2>&1 || true + echo "::endgroup::" + if [ "${{ env.DRY_RUN }}" = "true" ]; then + echo "::notice::DRY RUN — no packages will be published" + else + echo "::notice::LIVE RUN — packages will be published to pub.dev" + fi + + - name: Run releasekit publish + run: | + set -euo pipefail + + cmd=(uv run --directory ${{ env.RELEASEKIT_DIR }} releasekit --workspace dart publish --force) + + if [ "${{ env.DRY_RUN }}" = "true" ]; then + cmd+=(--dry-run) + fi + + CONCURRENCY="${{ inputs.concurrency }}" + if [ -n "$CONCURRENCY" ] && [ "$CONCURRENCY" != "0" ]; then + cmd+=(--concurrency "$CONCURRENCY") + fi + + if [ -n "${{ inputs.group }}" ]; then + cmd+=(--group "${{ inputs.group }}") + fi + + MAX_RETRIES="${{ inputs.max_retries }}" + if [ -n "$MAX_RETRIES" ] && [ "$MAX_RETRIES" != "0" ]; then + cmd+=(--max-retries "$MAX_RETRIES") + fi + + echo "::group::Running: ${cmd[*]}" + OUTPUT=$("${cmd[@]}" 2>&1) || EXIT_CODE=$? + echo "$OUTPUT" + echo "::endgroup::" + if [ "${EXIT_CODE:-0}" -ne 0 ]; then + echo "::error::releasekit publish failed with exit code $EXIT_CODE" + exit $EXIT_CODE + fi + + - name: Upload manifest artifact + if: success() && env.DRY_RUN != 'true' + uses: actions/upload-artifact@v4 + with: + name: release-manifest-dart + path: ${{ env.WORKSPACE_DIR }}/.releasekit-state.json + retention-days: 90 + + # ═══════════════════════════════════════════════════════════════════════ + # NOTIFY: Post-release notifications + # ═══════════════════════════════════════════════════════════════════════ + notify: + name: Notify Downstream + needs: publish + if: success() + runs-on: ubuntu-latest + timeout-minutes: 5 + steps: + - name: Dispatch release event + uses: peter-evans/repository-dispatch@v3 + with: + token: ${{ secrets.GITHUB_TOKEN }} + event-type: genkit-dart-release + client-payload: '{"release_url": "${{ needs.release.outputs.release_url }}"}' diff --git a/py/tools/releasekit/github/workflows/releasekit-go.yml b/py/tools/releasekit/github/workflows/releasekit-go.yml new file mode 100644 index 0000000000..7b01c80675 --- /dev/null +++ b/py/tools/releasekit/github/workflows/releasekit-go.yml @@ -0,0 +1,429 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +# ══════════════════════════════════════════════════════════════════════ +# ReleaseKit: Go Release Pipeline +# ══════════════════════════════════════════════════════════════════════ +# +# SAMPLE WORKFLOW — Copy to .github/workflows/releasekit-go.yml to use. +# +# This workflow implements a release pipeline for Go modules managed +# by a go.work workspace. It uses releasekit to automate: +# +# 1. PREPARE — compute version bumps, generate changelogs, open +# or update a Release PR. +# 2. RELEASE — tag the merge commit, create a GitHub Release. +# 3. PUBLISH — verify modules are available on the Go module proxy +# (Go modules are published by pushing tags; the proxy +# fetches them from VCS on first request). +# +# Go modules don't need an explicit "publish" step like PyPI or npm. +# After tagging, the Go module proxy (proxy.golang.org) picks up the +# new version when any user requests it. The publish job here simply +# polls the proxy to confirm availability. +# +# ── Automatic Flow ────────────────────────────────────────────────── +# +# push to main ──► releasekit prepare ──► Release PR +# (go/** or golang/**) (autorelease: pending) +# │ +# merge PR +# │ +# ▼ +# releasekit release ──► tags + GitHub Release +# │ +# ▼ +# releasekit publish ──► verify on proxy.golang.org +# │ +# ▼ +# repository_dispatch ──► downstream repos +# +# ── Manual Dispatch Flow ──────────────────────────────────────────── +# +# ┌─────────────────────────────────────────────────────────────┐ +# │ workflow_dispatch UI │ +# │ │ +# │ action: [prepare ▼] ──► runs PREPARE job only │ +# │ [release ▼] ──► runs RELEASE + PUBLISH + NOTIFY │ +# │ │ +# │ dry_run: [✓] simulate, no side effects │ +# │ force_prepare: [✓] skip preflight, force PR creation │ +# │ group: [________] target a release group │ +# │ bump_type: [auto / patch / minor / major] │ +# │ prerelease: [________] e.g. rc.1, beta.1 │ +# │ skip_publish: [✓] tag + release but don't verify proxy │ +# │ concurrency: [0] max parallel verify (0 = auto) │ +# │ max_retries: [2] retry failed verifications │ +# └─────────────────────────────────────────────────────────────┘ +# +# ── Trigger Matrix ────────────────────────────────────────────────── +# +# Event │ Jobs that run +# ───────────────────┼────────────────────────────────── +# push to main │ prepare +# PR merged │ release → publish → notify +# dispatch: prepare │ prepare +# dispatch: release │ release → publish → notify +# +# ── Inputs Reference ──────────────────────────────────────────────── +# +# Input │ Type │ Default │ Description +# ───────────────┼─────────┼─────────┼────────────────────────────── +# action │ choice │ release │ Pipeline stage: prepare or release +# dry_run │ boolean │ true │ Simulate without side effects +# force_prepare │ boolean │ false │ Force PR creation (--force) +# group │ string │ (all) │ Target a specific release group +# bump_type │ choice │ auto │ Override semver bump detection +# prerelease │ string │ (none) │ Prerelease suffix (e.g. rc.1) +# skip_publish │ boolean │ false │ Tag + release, skip proxy verify +# concurrency │ string │ 0 │ Max parallel verify jobs +# max_retries │ string │ 2 │ Retry failed verifications +# +# The workflow is idempotent: re-running any step is safe because +# releasekit skips already-created tags and already-verified versions. +# ══════════════════════════════════════════════════════════════════════ + +name: "ReleaseKit: Go" + +on: + workflow_dispatch: + inputs: + action: + description: 'Which pipeline stage to run' + required: true + default: release + type: choice + options: + - prepare + - release + dry_run: + description: 'Dry run — log what would happen without creating tags or publishing' + required: true + default: true + type: boolean + force_prepare: + description: 'Force create/update the Release PR even if no new bumps are detected' + required: false + default: false + type: boolean + group: + description: 'Release group to target (leave empty for all)' + required: false + type: string + bump_type: + description: 'Override auto-detected bump type' + required: false + default: auto + type: choice + options: + - auto + - patch + - minor + - major + prerelease: + description: 'Publish as prerelease (e.g. rc.1, beta.1)' + required: false + type: string + skip_publish: + description: 'Tag and create GitHub Release but skip proxy verification' + required: false + default: false + type: boolean + concurrency: + description: 'Max parallel verify jobs (0 = auto)' + required: false + default: '0' + type: string + max_retries: + description: 'Max retries for failed verify attempts (0 = no retries)' + required: false + default: '2' + type: string + push: + branches: [main] + paths: + - "go/**" + - "golang/**" + pull_request: + types: [closed] + branches: [main] + +# Only one release pipeline runs at a time. +concurrency: + group: releasekit-go-${{ github.ref }} + cancel-in-progress: false + +permissions: + contents: write + pull-requests: write + +env: + RELEASEKIT_DIR: py/tools/releasekit + GO_VERSION: "1.22" + DRY_RUN: ${{ github.event_name == 'pull_request' && 'false' || (inputs.dry_run == 'false' && 'false' || 'true') }} + +jobs: + # ═══════════════════════════════════════════════════════════════════════ + # PREPARE: Compute bumps and open/update Release PR + # ═══════════════════════════════════════════════════════════════════════ + prepare: + name: Prepare Release PR + if: | + (github.event_name == 'push' && + !startsWith(github.event.head_commit.message, 'chore(release):') && + !contains(github.event.head_commit.message, 'releasekit--release')) || + (github.event_name == 'workflow_dispatch' && inputs.action == 'prepare') + runs-on: ubuntu-latest + timeout-minutes: 10 + outputs: + has_bumps: ${{ steps.prepare.outputs.has_bumps }} + pr_url: ${{ steps.prepare.outputs.pr_url }} + steps: + - uses: actions/checkout@v6 + with: + fetch-depth: 0 + fetch-tags: true + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Install uv and setup Python + uses: astral-sh/setup-uv@v5 + with: + enable-cache: true + python-version: "3.12" + + - uses: actions/setup-go@v5 + with: + go-version: ${{ env.GO_VERSION }} + cache-dependency-path: go/go.sum + + - name: Install releasekit + working-directory: ${{ env.RELEASEKIT_DIR }} + run: uv sync + + - name: Configure git identity + run: | + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + + - name: Run releasekit prepare + id: prepare + run: | + set -euo pipefail + + cmd=(uv run --directory ${{ env.RELEASEKIT_DIR }} releasekit --workspace go prepare) + if [ "${{ inputs.force_prepare }}" = "true" ]; then + cmd+=(--force) + fi + if [ -n "${{ inputs.group }}" ]; then + cmd+=(--group "${{ inputs.group }}") + fi + if [ "${{ inputs.bump_type }}" != "auto" ] && [ -n "${{ inputs.bump_type }}" ]; then + cmd+=(--bump "${{ inputs.bump_type }}") + fi + if [ -n "${{ inputs.prerelease }}" ]; then + cmd+=(--prerelease "${{ inputs.prerelease }}") + fi + + OUTPUT=$("${cmd[@]}" 2>&1) || EXIT_CODE=$? + echo "$OUTPUT" + if [ "${EXIT_CODE:-0}" -ne 0 ]; then + echo "::error::releasekit prepare failed with exit code $EXIT_CODE" + exit $EXIT_CODE + fi + + PR_URL=$(echo "$OUTPUT" | sed -n 's/.*Release PR: //p' | tail -1) + if [ -n "$PR_URL" ]; then + echo "has_bumps=true" >> "$GITHUB_OUTPUT" + echo "pr_url=$PR_URL" >> "$GITHUB_OUTPUT" + else + echo "has_bumps=false" >> "$GITHUB_OUTPUT" + fi + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + # ═══════════════════════════════════════════════════════════════════════ + # RELEASE: Tag merge commit and create GitHub Release + # ═══════════════════════════════════════════════════════════════════════ + release: + name: Tag and Release + if: | + (github.event_name == 'pull_request' && + github.event.pull_request.merged == true && + contains(github.event.pull_request.labels.*.name, 'autorelease: pending')) || + (github.event_name == 'workflow_dispatch' && inputs.action == 'release') + runs-on: ubuntu-latest + timeout-minutes: 10 + outputs: + release_url: ${{ steps.release.outputs.release_url }} + steps: + - uses: actions/checkout@v6 + with: + fetch-depth: 0 + fetch-tags: true + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Install uv and setup Python + uses: astral-sh/setup-uv@v5 + with: + enable-cache: true + python-version: "3.12" + + - name: Install releasekit + working-directory: ${{ env.RELEASEKIT_DIR }} + run: uv sync + + - name: Configure git identity + run: | + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + + - name: Preview execution plan + run: | + echo "::group::Execution Plan (Go)" + uv run --directory ${{ env.RELEASEKIT_DIR }} releasekit --workspace go plan --format full 2>&1 || true + echo "::endgroup::" + if [ "${{ env.DRY_RUN }}" = "true" ]; then + echo "::notice::DRY RUN — no tags or releases will be created" + else + echo "::notice::LIVE RUN — tags and GitHub Release will be created" + fi + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Run releasekit release + id: release + run: | + set -euo pipefail + + DRY_RUN_FLAG="" + if [ "${{ env.DRY_RUN }}" = "true" ]; then + DRY_RUN_FLAG="--dry-run" + fi + + OUTPUT=$(uv run --directory ${{ env.RELEASEKIT_DIR }} releasekit --workspace go release $DRY_RUN_FLAG 2>&1) || EXIT_CODE=$? + echo "$OUTPUT" + if [ "${EXIT_CODE:-0}" -ne 0 ]; then + echo "::error::releasekit release failed with exit code $EXIT_CODE" + exit $EXIT_CODE + fi + + RELEASE_URL=$(echo "$OUTPUT" | sed -n 's/.*release_url=//p' | tail -1) + echo "release_url=$RELEASE_URL" >> "$GITHUB_OUTPUT" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + # ═══════════════════════════════════════════════════════════════════════ + # PUBLISH: Verify modules are available on the Go module proxy + # + # Go modules are published by pushing tags — the proxy fetches them + # from VCS on first request. This job polls proxy.golang.org to + # confirm each tagged module version is available. + # ═══════════════════════════════════════════════════════════════════════ + publish: + name: Verify on Go Proxy + needs: release + if: inputs.skip_publish != 'true' + runs-on: ubuntu-latest + timeout-minutes: 30 + steps: + - uses: actions/checkout@v6 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Install uv and setup Python + uses: astral-sh/setup-uv@v5 + with: + enable-cache: true + python-version: "3.12" + + - uses: actions/setup-go@v5 + with: + go-version: ${{ env.GO_VERSION }} + cache-dependency-path: go/go.sum + + - name: Install releasekit + working-directory: ${{ env.RELEASEKIT_DIR }} + run: uv sync + + - name: Preview execution plan + run: | + echo "::group::Execution Plan (Go)" + uv run --directory ${{ env.RELEASEKIT_DIR }} releasekit --workspace go plan --format full 2>&1 || true + echo "::endgroup::" + if [ "${{ env.DRY_RUN }}" = "true" ]; then + echo "::notice::DRY RUN — no proxy verification will be performed" + else + echo "::notice::LIVE RUN — verifying modules on proxy.golang.org" + fi + + - name: Run releasekit publish (Go proxy verify) + run: | + set -euo pipefail + + cmd=(uv run --directory ${{ env.RELEASEKIT_DIR }} releasekit --workspace go publish --force) + + if [ "${{ env.DRY_RUN }}" = "true" ]; then + cmd+=(--dry-run) + fi + + CONCURRENCY="${{ inputs.concurrency }}" + if [ -n "$CONCURRENCY" ] && [ "$CONCURRENCY" != "0" ]; then + cmd+=(--concurrency "$CONCURRENCY") + fi + + if [ -n "${{ inputs.group }}" ]; then + cmd+=(--group "${{ inputs.group }}") + fi + + MAX_RETRIES="${{ inputs.max_retries }}" + if [ -n "$MAX_RETRIES" ] && [ "$MAX_RETRIES" != "0" ]; then + cmd+=(--max-retries "$MAX_RETRIES") + fi + + echo "::group::Running: ${cmd[*]}" + OUTPUT=$("${cmd[@]}" 2>&1) || EXIT_CODE=$? + echo "$OUTPUT" + echo "::endgroup::" + if [ "${EXIT_CODE:-0}" -ne 0 ]; then + echo "::error::releasekit publish failed with exit code $EXIT_CODE" + exit $EXIT_CODE + fi + + - name: Upload manifest artifact + if: success() && env.DRY_RUN != 'true' + uses: actions/upload-artifact@v4 + with: + name: release-manifest-go + path: .releasekit-state.json + retention-days: 90 + + # ═══════════════════════════════════════════════════════════════════════ + # NOTIFY: Post-release notifications + # ═══════════════════════════════════════════════════════════════════════ + notify: + name: Notify Downstream + needs: publish + if: success() + runs-on: ubuntu-latest + timeout-minutes: 5 + steps: + - name: Dispatch release event + uses: peter-evans/repository-dispatch@v3 + with: + token: ${{ secrets.GITHUB_TOKEN }} + event-type: genkit-go-release + client-payload: '{"release_url": "${{ needs.release.outputs.release_url }}"}' diff --git a/py/tools/releasekit/github/workflows/releasekit-gradle.yml b/py/tools/releasekit/github/workflows/releasekit-gradle.yml new file mode 100644 index 0000000000..0cc8fedf38 --- /dev/null +++ b/py/tools/releasekit/github/workflows/releasekit-gradle.yml @@ -0,0 +1,470 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +# ══════════════════════════════════════════════════════════════════════ +# ReleaseKit: Java/Kotlin Release Pipeline (Gradle) +# ══════════════════════════════════════════════════════════════════════ +# +# SAMPLE WORKFLOW — Copy to .github/workflows/releasekit-gradle.yml to use. +# +# This workflow implements a release pipeline for Java/Kotlin projects +# managed by Gradle (settings.gradle / settings.gradle.kts). It uses +# releasekit to automate: +# +# 1. PREPARE — compute version bumps, generate changelogs, open +# or update a Release PR. +# 2. RELEASE — tag the merge commit, create a GitHub Release. +# 3. PUBLISH — publish artifacts to Maven Central (via Sonatype +# OSSRH) in topological order with retry and +# verification. +# +# ── Automatic Flow ────────────────────────────────────────────────── +# +# push to main ──► releasekit prepare ──► Release PR +# (java/** or jvm/**) (autorelease: pending) +# │ +# merge PR +# │ +# ▼ +# releasekit release ──► tags + GitHub Release +# │ +# ▼ +# releasekit publish ──► Maven Central +# │ +# ▼ +# repository_dispatch ──► downstream repos +# +# ── Manual Dispatch Flow ──────────────────────────────────────────── +# +# ┌─────────────────────────────────────────────────────────────┐ +# │ workflow_dispatch UI │ +# │ │ +# │ action: [prepare ▼] ──► runs PREPARE job only │ +# │ [release ▼] ──► runs RELEASE + PUBLISH + NOTIFY │ +# │ │ +# │ target: [maven-central / staging] │ +# │ dry_run: [✓] simulate, no side effects │ +# │ force_prepare: [✓] skip preflight, force PR creation │ +# │ group: [________] target a release group │ +# │ bump_type: [auto / patch / minor / major] │ +# │ prerelease: [________] e.g. rc.1, beta.1 │ +# │ skip_publish: [✓] tag + release but don't publish │ +# │ concurrency: [0] max parallel publish (0 = auto) │ +# │ max_retries: [2] retry failed publishes │ +# └─────────────────────────────────────────────────────────────┘ +# +# ── Trigger Matrix ────────────────────────────────────────────────── +# +# Event │ Jobs that run +# ───────────────────┼────────────────────────────────── +# push to main │ prepare +# PR merged │ release → publish → notify +# dispatch: prepare │ prepare +# dispatch: release │ release → publish → notify +# +# ── Inputs Reference ──────────────────────────────────────────────── +# +# Input │ Type │ Default │ Description +# ───────────────┼─────────┼────────────────┼────────────────────── +# action │ choice │ release │ Pipeline stage +# target │ choice │ maven-central │ Registry target +# dry_run │ boolean │ true │ Simulate +# force_prepare │ boolean │ false │ Force PR creation +# group │ string │ (all) │ Release group +# bump_type │ choice │ auto │ Override bump +# prerelease │ string │ (none) │ Prerelease suffix +# skip_publish │ boolean │ false │ Skip registry +# concurrency │ string │ 0 │ Max parallel +# max_retries │ string │ 2 │ Retry count +# +# ── Authentication ────────────────────────────────────────────────── +# +# Publishing to Maven Central via Sonatype OSSRH requires: +# +# OSSRH_USERNAME — Sonatype OSSRH username +# OSSRH_PASSWORD — Sonatype OSSRH password/token +# GPG_SIGNING_KEY — GPG private key (base64-encoded) +# GPG_PASSPHRASE — GPG key passphrase +# +# These should be stored as GitHub repository secrets. +# +# The workflow is idempotent: re-running any step is safe because +# releasekit skips already-created tags and already-published versions. +# ══════════════════════════════════════════════════════════════════════ + +name: "ReleaseKit: Java (Gradle)" + +on: + workflow_dispatch: + inputs: + action: + description: 'Which pipeline stage to run' + required: true + default: release + type: choice + options: + - prepare + - release + target: + description: 'Publish target registry (release only)' + required: true + default: maven-central + type: choice + options: + - maven-central + - staging + dry_run: + description: 'Dry run — log what would happen without creating tags or publishing' + required: true + default: true + type: boolean + force_prepare: + description: 'Force create/update the Release PR even if no new bumps are detected' + required: false + default: false + type: boolean + group: + description: 'Release group to target (leave empty for all)' + required: false + type: string + bump_type: + description: 'Override auto-detected bump type' + required: false + default: auto + type: choice + options: + - auto + - patch + - minor + - major + prerelease: + description: 'Publish as prerelease (e.g. rc.1, beta.1)' + required: false + type: string + skip_publish: + description: 'Tag and create GitHub Release but skip publishing to Maven Central' + required: false + default: false + type: boolean + concurrency: + description: 'Max parallel publish jobs (0 = auto)' + required: false + default: '0' + type: string + max_retries: + description: 'Max retries for failed publish attempts (0 = no retries)' + required: false + default: '2' + type: string + push: + branches: [main] + paths: + - "java/**" + - "jvm/**" + pull_request: + types: [closed] + branches: [main] + +# Only one release pipeline runs at a time. +concurrency: + group: releasekit-java-${{ github.ref }} + cancel-in-progress: false + +permissions: + contents: write + pull-requests: write + +env: + RELEASEKIT_DIR: py/tools/releasekit + WORKSPACE_DIR: java + JAVA_VERSION: "17" + GRADLE_VERSION: "8.7" + DRY_RUN: ${{ github.event_name == 'pull_request' && 'false' || (inputs.dry_run == 'false' && 'false' || 'true') }} + +jobs: + # ═══════════════════════════════════════════════════════════════════════ + # PREPARE: Compute bumps and open/update Release PR + # ═══════════════════════════════════════════════════════════════════════ + prepare: + name: Prepare Release PR + if: | + (github.event_name == 'push' && + !startsWith(github.event.head_commit.message, 'chore(release):') && + !contains(github.event.head_commit.message, 'releasekit--release')) || + (github.event_name == 'workflow_dispatch' && inputs.action == 'prepare') + runs-on: ubuntu-latest + timeout-minutes: 10 + outputs: + has_bumps: ${{ steps.prepare.outputs.has_bumps }} + pr_url: ${{ steps.prepare.outputs.pr_url }} + steps: + - uses: actions/checkout@v6 + with: + fetch-depth: 0 + fetch-tags: true + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Install uv and setup Python + uses: astral-sh/setup-uv@v5 + with: + enable-cache: true + python-version: "3.12" + + - uses: actions/setup-java@v4 + with: + distribution: temurin + java-version: ${{ env.JAVA_VERSION }} + + - uses: gradle/actions/setup-gradle@v4 + with: + gradle-version: ${{ env.GRADLE_VERSION }} + + - name: Install releasekit + working-directory: ${{ env.RELEASEKIT_DIR }} + run: uv sync + + - name: Configure git identity + run: | + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + + - name: Run releasekit prepare + id: prepare + run: | + set -euo pipefail + + cmd=(uv run --directory ${{ env.RELEASEKIT_DIR }} releasekit --workspace java prepare) + if [ "${{ inputs.force_prepare }}" = "true" ]; then + cmd+=(--force) + fi + if [ -n "${{ inputs.group }}" ]; then + cmd+=(--group "${{ inputs.group }}") + fi + if [ "${{ inputs.bump_type }}" != "auto" ] && [ -n "${{ inputs.bump_type }}" ]; then + cmd+=(--bump "${{ inputs.bump_type }}") + fi + if [ -n "${{ inputs.prerelease }}" ]; then + cmd+=(--prerelease "${{ inputs.prerelease }}") + fi + + OUTPUT=$("${cmd[@]}" 2>&1) || EXIT_CODE=$? + echo "$OUTPUT" + if [ "${EXIT_CODE:-0}" -ne 0 ]; then + echo "::error::releasekit prepare failed with exit code $EXIT_CODE" + exit $EXIT_CODE + fi + + PR_URL=$(echo "$OUTPUT" | sed -n 's/.*Release PR: //p' | tail -1) + if [ -n "$PR_URL" ]; then + echo "has_bumps=true" >> "$GITHUB_OUTPUT" + echo "pr_url=$PR_URL" >> "$GITHUB_OUTPUT" + else + echo "has_bumps=false" >> "$GITHUB_OUTPUT" + fi + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + # ═══════════════════════════════════════════════════════════════════════ + # RELEASE: Tag merge commit and create GitHub Release + # ═══════════════════════════════════════════════════════════════════════ + release: + name: Tag and Release + if: | + (github.event_name == 'pull_request' && + github.event.pull_request.merged == true && + contains(github.event.pull_request.labels.*.name, 'autorelease: pending')) || + (github.event_name == 'workflow_dispatch' && inputs.action == 'release') + runs-on: ubuntu-latest + timeout-minutes: 10 + outputs: + release_url: ${{ steps.release.outputs.release_url }} + steps: + - uses: actions/checkout@v6 + with: + fetch-depth: 0 + fetch-tags: true + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Install uv and setup Python + uses: astral-sh/setup-uv@v5 + with: + enable-cache: true + python-version: "3.12" + + - name: Install releasekit + working-directory: ${{ env.RELEASEKIT_DIR }} + run: uv sync + + - name: Configure git identity + run: | + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + + - name: Preview execution plan + run: | + echo "::group::Execution Plan (Java)" + uv run --directory ${{ env.RELEASEKIT_DIR }} releasekit --workspace java plan --format full 2>&1 || true + echo "::endgroup::" + if [ "${{ env.DRY_RUN }}" = "true" ]; then + echo "::notice::DRY RUN — no tags or releases will be created" + else + echo "::notice::LIVE RUN — tags and GitHub Release will be created" + fi + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Run releasekit release + id: release + run: | + set -euo pipefail + + DRY_RUN_FLAG="" + if [ "${{ env.DRY_RUN }}" = "true" ]; then + DRY_RUN_FLAG="--dry-run" + fi + + OUTPUT=$(uv run --directory ${{ env.RELEASEKIT_DIR }} releasekit --workspace java release $DRY_RUN_FLAG 2>&1) || EXIT_CODE=$? + echo "$OUTPUT" + if [ "${EXIT_CODE:-0}" -ne 0 ]; then + echo "::error::releasekit release failed with exit code $EXIT_CODE" + exit $EXIT_CODE + fi + + RELEASE_URL=$(echo "$OUTPUT" | sed -n 's/.*release_url=//p' | tail -1) + echo "release_url=$RELEASE_URL" >> "$GITHUB_OUTPUT" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + # ═══════════════════════════════════════════════════════════════════════ + # PUBLISH: Build and publish artifacts to Maven Central + # + # Uses Gradle's publishToMavenCentral task (or publishToSonatype) + # with GPG signing. The staging target publishes to Sonatype OSSRH + # staging for manual review before promotion. + # ═══════════════════════════════════════════════════════════════════════ + publish: + name: Publish to ${{ inputs.target || 'maven-central' }} + needs: release + if: inputs.skip_publish != 'true' + runs-on: ubuntu-latest + timeout-minutes: 30 + steps: + - uses: actions/checkout@v6 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Install uv and setup Python + uses: astral-sh/setup-uv@v5 + with: + enable-cache: true + python-version: "3.12" + + - uses: actions/setup-java@v4 + with: + distribution: temurin + java-version: ${{ env.JAVA_VERSION }} + + - uses: gradle/actions/setup-gradle@v4 + with: + gradle-version: ${{ env.GRADLE_VERSION }} + + - name: Import GPG signing key + run: | + echo "${{ secrets.GPG_SIGNING_KEY }}" | base64 -d | gpg --batch --import + env: + GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }} + + - name: Build Java artifacts + working-directory: ${{ env.WORKSPACE_DIR }} + run: gradle build --no-daemon + + - name: Install releasekit + working-directory: ${{ env.RELEASEKIT_DIR }} + run: uv sync + + - name: Preview execution plan + run: | + echo "::group::Execution Plan (Java)" + uv run --directory ${{ env.RELEASEKIT_DIR }} releasekit --workspace java plan --format full 2>&1 || true + echo "::endgroup::" + if [ "${{ env.DRY_RUN }}" = "true" ]; then + echo "::notice::DRY RUN — no artifacts will be published" + else + echo "::notice::LIVE RUN — artifacts will be published to ${{ inputs.target || 'maven-central' }}" + fi + + - name: Run releasekit publish + run: | + set -euo pipefail + + cmd=(uv run --directory ${{ env.RELEASEKIT_DIR }} releasekit --workspace java publish --force) + + if [ "${{ env.DRY_RUN }}" = "true" ]; then + cmd+=(--dry-run) + fi + + CONCURRENCY="${{ inputs.concurrency }}" + if [ -n "$CONCURRENCY" ] && [ "$CONCURRENCY" != "0" ]; then + cmd+=(--concurrency "$CONCURRENCY") + fi + + if [ -n "${{ inputs.group }}" ]; then + cmd+=(--group "${{ inputs.group }}") + fi + + MAX_RETRIES="${{ inputs.max_retries }}" + if [ -n "$MAX_RETRIES" ] && [ "$MAX_RETRIES" != "0" ]; then + cmd+=(--max-retries "$MAX_RETRIES") + fi + + echo "::group::Running: ${cmd[*]}" + OUTPUT=$("${cmd[@]}" 2>&1) || EXIT_CODE=$? + echo "$OUTPUT" + echo "::endgroup::" + if [ "${EXIT_CODE:-0}" -ne 0 ]; then + echo "::error::releasekit publish failed with exit code $EXIT_CODE" + exit $EXIT_CODE + fi + env: + OSSRH_USERNAME: ${{ secrets.OSSRH_USERNAME }} + OSSRH_PASSWORD: ${{ secrets.OSSRH_PASSWORD }} + GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }} + + - name: Upload manifest artifact + if: success() && env.DRY_RUN != 'true' + uses: actions/upload-artifact@v4 + with: + name: release-manifest-java + path: ${{ env.WORKSPACE_DIR }}/.releasekit-state.json + retention-days: 90 + + # ═══════════════════════════════════════════════════════════════════════ + # NOTIFY: Post-release notifications + # ═══════════════════════════════════════════════════════════════════════ + notify: + name: Notify Downstream + needs: publish + if: success() + runs-on: ubuntu-latest + timeout-minutes: 5 + steps: + - name: Dispatch release event + uses: peter-evans/repository-dispatch@v3 + with: + token: ${{ secrets.GITHUB_TOKEN }} + event-type: genkit-java-release + client-payload: '{"release_url": "${{ needs.release.outputs.release_url }}"}' diff --git a/py/tools/releasekit/github/workflows/releasekit-pnpm.yml b/py/tools/releasekit/github/workflows/releasekit-pnpm.yml index 8a97ce732c..d4624aa64b 100644 --- a/py/tools/releasekit/github/workflows/releasekit-pnpm.yml +++ b/py/tools/releasekit/github/workflows/releasekit-pnpm.yml @@ -35,8 +35,8 @@ # ── Automatic Flow ────────────────────────────────────────────────── # # push to main ──► releasekit prepare --workspace js ──► Release PR (js) -# (js/**, releasekit prepare --workspace js-cli ──► Release PR (js-cli) -# genkit-tools/**) │ +# (js/** or releasekit prepare --workspace js-cli ──► Release PR (js-cli) +# typescript/**) │ # merge PR # │ # ▼ @@ -161,6 +161,7 @@ on: branches: [main] paths: - "js/**" + - "typescript/**" - "genkit-tools/**" pull_request: types: [closed] diff --git a/py/tools/releasekit/github/workflows/releasekit-uv.yml b/py/tools/releasekit/github/workflows/releasekit-uv.yml index bf3a258678..b3ba4d8703 100644 --- a/py/tools/releasekit/github/workflows/releasekit-uv.yml +++ b/py/tools/releasekit/github/workflows/releasekit-uv.yml @@ -32,8 +32,8 @@ # ── Automatic Flow ────────────────────────────────────────────────── # # push to main ──► releasekit prepare ──► Release PR -# (py/packages/** (autorelease: pending) -# py/plugins/**) │ +# (py/** or python/**) (autorelease: pending) +# │ # merge PR # │ # ▼ @@ -160,6 +160,8 @@ on: paths: - "py/packages/**" - "py/plugins/**" + - "python/packages/**" + - "python/plugins/**" pull_request: types: [closed] branches: [main] diff --git a/py/tools/releasekit/pyproject.toml b/py/tools/releasekit/pyproject.toml index ff671222c2..f5da2863c0 100644 --- a/py/tools/releasekit/pyproject.toml +++ b/py/tools/releasekit/pyproject.toml @@ -44,6 +44,9 @@ dependencies = [ "jinja2>=3.1.0", # Release notes templates "diagnostic>=3.0.0", # Rust-style error rendering "httpx>=0.27.0", # Async HTTP with connection pooling (PyPIBackend) + "opentelemetry-api>=1.20.0", # Distributed tracing API + "opentelemetry-sdk>=1.20.0", # Distributed tracing SDK + "sigstore>=3.0.0", # Keyless artifact signing via Sigstore OIDC "strenum>=0.4.15; python_version < '3.11'", # StrEnum backport for 3.10 ] description = "Release orchestration for polyglot monorepos" @@ -53,9 +56,6 @@ readme = "README.md" requires-python = ">=3.10" version = "0.1.0" -[project.optional-dependencies] -tracing = ["opentelemetry-api>=1.20.0", "opentelemetry-sdk>=1.20.0"] - [project.scripts] releasekit = "releasekit.cli:_main" @@ -70,7 +70,12 @@ packages = ["src/releasekit"] "LICENSE" = "releasekit/LICENSE" [dependency-groups] -dev = ["pytest>=8.0.0", "pytest-asyncio>=0.25.0", "pytest-cov>=7.0.0"] +dev = [ + "jsonschema>=4.26.0", + "pytest>=8.0.0", + "pytest-asyncio>=0.25.0", + "pytest-cov>=7.0.0", +] [tool.pytest.ini_options] addopts = "--cov=releasekit --cov-config=pyproject.toml --cov-report=term-missing" @@ -90,6 +95,6 @@ exclude_also = [ "@overload", "raise NotImplementedError", ] -fail_under = 80 +fail_under = 90 show_missing = true skip_covered = false diff --git a/py/tools/releasekit/roadmap.md b/py/tools/releasekit/roadmap.md index d299cfc1c8..517b91b235 100644 --- a/py/tools/releasekit/roadmap.md +++ b/py/tools/releasekit/roadmap.md @@ -494,7 +494,8 @@ Remaining migration steps: | 4c: UI States | ✅ Complete | observer.py, sliding window, keyboard shortcuts, signal handlers | | 5: Release-Please | ✅ Complete | Orchestrators, CI workflow, workspace-sourced deps | | 6: UX Polish | ✅ Complete | init, formatters (9), rollback, completion, diagnostics, granular flags, TOML config migration | -| 7: Quality + Ship | 🔶 In progress | 1,293 tests pass, 76 source modules, 51 test files (~19.3K test LOC) | +| 7: Quality + Ship | 🔶 In progress | 1,739 tests pass, 78 source modules, 64 test files (~28K test LOC), 91.07% coverage | +| 8: Release Automation | ⬜ Planned | Continuous deploy, cadence releases, hooks, branch channels (from competitive analysis) | ### Phase 5 completion status @@ -879,7 +880,7 @@ Phase 6: UX Polish ▼ ✅ COMPLETE │ Phase 7: Quality + Ship ▼ 🔶 IN PROGRESS ┌─────────────────────────────────────────────────────────┐ -│ tests (1,293 tests, 51 files, ~19.3K lines) │ +│ tests (1,470 tests, 56 files, ~24K lines) │ │ type checking (ty, pyright, pyrefly -- zero errors) │ │ README.md (21 sections, mermaid diagrams) │ │ workspace config (releasekit init on genkit repo) │ @@ -1395,13 +1396,184 @@ deletion. Shell completion works in bash/zsh/fish. | Type checking | Zero errors from `ty`, `pyright`, and `pyrefly` in strict mode. | config | | `README.md` | 21 sections with Mermaid workflow diagrams, CLI reference, config reference, testing workflow, vulnerability scanning, migration guide. | ~800 | | Workspace config | Run `releasekit init` on the genkit repo. Review auto-detected groups. Commit generated config. | config | -| `migrate.py` | `releasekit migrate` subcommand for mid-stream adoption. See details below. | ~200 | +| `migrate.py` | ✅ `releasekit migrate` subcommand for mid-stream adoption. See details below. | 119 src + 34 tests | **Done when**: `pytest --cov-fail-under=90` passes, all three type checkers report zero errors, README is complete. **Milestone**: Ship `releasekit` v0.1.0 to PyPI. +### Phase 8: Release Automation ⬜ Planned + +Features identified through competitive analysis against release-it, +semantic-release, and other alternatives. See +[competitive-gap-analysis.md](docs/competitive-gap-analysis.md) §8 for +full rationale. + +| Item | Module | Description | Est. Lines | Priority | +|------|--------|-------------|-----------|----------| +| **R1** | `config.py`, `cli.py` | **Continuous deploy mode** — `release_mode = "continuous"` config. In this mode, `releasekit publish` skips PR creation and goes directly to tag + publish. `--if-needed` flag exits 0 if no releasable changes. Idempotent: checks if HEAD already has a release tag → no-op. Uses release lock for concurrent CI safety. | ~120 | High | +| **R2** | `should_release.py`, `cli.py` | **`releasekit should-release`** command for CI cron integration. Returns exit 0 if a release should happen based on: (a) releasable commits exist, (b) within release window, (c) cooldown elapsed, (d) minimum bump met. Designed for `cron` + `should-release || exit 0` pattern. | ~100 | High | +| **R3** | `hooks.py`, `publisher.py` | **Lifecycle hooks** — `[hooks]` section in `releasekit.toml`. `before_publish`, `after_publish`, `after_tag`, `before_prepare` keys, each a list of shell commands. Template variables: `${version}`, `${name}`, `${tag}`. Executed via `_run.py` subprocess abstraction. Dry-run aware. | ~150 | High | +| **R4** | `config.py` | **Scheduled release config** — `[schedule]` section: `cadence` (`daily`, `weekly:monday`, `biweekly`, `on-push`), `release_window` (UTC time range), `cooldown_minutes`, `min_bump` (skip if only chore/docs). Read by `should-release` command. | ~60 | Medium | +| **R5** | `config.py`, `versioning.py` | **Branch-to-channel mapping** — `[branches]` config section: `main = "latest"`, `"release/v1.*" = "v1-maintenance"`, `"next" = "next"`. Maps current branch to a release channel for dist-tag / pre-release suffix. | ~80 | Medium | +| **R6** | `config.py`, `versioning.py` | **CalVer support** — Calendar-based versioning (`YYYY.MM.DD`, `YYYY.MM.MICRO`). New `versioning_scheme = "calver"` config. Compute version from date instead of semver bump. | ~100 | Low | + +#### Configuration Override Hierarchy + +All Phase 8 settings follow the existing 3-tier config model. Workspace +sections can override root-level defaults; package-level `releasekit.toml` +can override workspace settings where it makes sense. + +**Resolution order**: package > workspace > root > built-in default. + +##### Phase 8 settings — override scope + +| Setting | Root | Workspace | Package | Rationale | +|---------|:----:|:---------:|:-------:|-----------| +| `release_mode` | ✅ | ✅ | ❌ | JS might use continuous deploy while Python uses PR-based | +| `[schedule]` (all keys) | ✅ | ✅ | ❌ | Python daily, JS weekly — different ecosystem velocity | +| `[hooks]` (all keys) | ✅ | ✅ | ✅ | Root hooks run for all; workspace adds ecosystem-specific; package for special cases | +| `[branches]` | ✅ | ✅ | ❌ | JS might ship `next` channel while Python doesn't | +| `versioning_scheme` | ✅ | ✅ | ❌ | One workspace CalVer, another semver | +| `calver_format` | ✅ | ✅ | ❌ | Follows `versioning_scheme` | + +##### Existing settings gaining workspace-level override + +These settings currently only exist at root level but should be +overridable per-workspace: + +| Setting | Current Scope | New Scope | Rationale | +|---------|:------------:|:---------:|-----------| +| `publish_from` | Root | Root + Workspace | Python from CI, Go via git tags locally | +| `concurrency` | Root | Root + Workspace | PyPI is slower than npm — different limits | +| `max_retries` | Root | Root + Workspace | npm rarely needs retries, PyPI often does | +| `poll_timeout` | Root | Root + Workspace | Maven Central ~10min sync vs PyPI ~30s | +| `verify_checksums` | Root | Root + Workspace | Not all registries support checksum verification | +| `major_on_zero` | Root | Root + Workspace | JS commonly ships 0.x breaking changes, Python doesn't | +| `prerelease_mode` | Root | Root + Workspace | Different rollup strategies per ecosystem | + +Settings that remain **root-only** (not overridable): + +| Setting | Rationale | +|---------|-----------| +| `pr_title_template` | Single PR spans all workspaces — one title | +| `http_pool_size` | Shared connection pool across all registries | +| `forge` | One forge per repo | +| `repo_owner`, `repo_name` | One repo identity | +| `default_branch` | One default branch | + +##### Hook merge semantics + +Hooks **concatenate** across tiers (root → workspace → package), not +replace. This ensures global hooks (e.g. lint) always run while +workspace/package hooks add specifics. + +```toml +# Root releasekit.toml +[hooks] +before_publish = ["./scripts/lint.sh"] + +# [workspace.py] section (or workspace-level hooks) +[workspace.py.hooks] +before_publish = ["./scripts/build-docs.sh"] + +# py/packages/genkit/releasekit.toml (package-level) +[hooks] +before_publish = ["./scripts/validate-schema.sh"] +``` + +Effective execution order for `genkit` (Python workspace): + +``` +before_publish: + 1. ./scripts/lint.sh ← root + 2. ./scripts/build-docs.sh ← workspace.py + 3. ./scripts/validate-schema.sh ← package +``` + +To **replace** instead of concatenate, set `hooks_replace = true` at +the workspace or package level: + +```toml +# py/packages/special/releasekit.toml +hooks_replace = true # discard root + workspace hooks for this package + +[hooks] +before_publish = ["./scripts/special-only.sh"] +``` + +##### Example: multi-workspace with per-workspace overrides + +```toml +# releasekit.toml (root) +forge = "github" +repo_owner = "firebase" +repo_name = "genkit" +default_branch = "main" +release_mode = "pr" # default for all workspaces +major_on_zero = false +concurrency = 5 +max_retries = 2 + +[schedule] +cadence = "on-push" # default: release on every push + +[hooks] +after_publish = ["./scripts/notify-slack.sh ${version}"] + +[branches] +main = "latest" + +[workspace.py] +ecosystem = "python" +tool = "uv" +root = "py" +tag_format = "{name}@{version}" +concurrency = 3 # override: PyPI is slower +max_retries = 3 # override: PyPI needs more retries +poll_timeout = 60.0 # override: PyPI indexing delay +major_on_zero = false + +[workspace.py.schedule] +cadence = "daily" # override: Python releases daily +release_window = "14:00-16:00" +cooldown_minutes = 120 +min_bump = "patch" + +[workspace.py.hooks] +before_publish = ["./scripts/build-wheels.sh"] + +[workspace.py.branches] +main = "latest" +"release/v1.*" = "v1-maintenance" + +[workspace.js] +ecosystem = "js" +tool = "pnpm" +root = "." +tag_format = "{name}@{version}" +release_mode = "continuous" # override: JS ships on every push +synchronize = true +concurrency = 10 # override: npm is fast +verify_checksums = false # override: npm doesn't support it + +[workspace.js.schedule] +cadence = "on-push" # override: continuous + +[workspace.js.branches] +main = "latest" +next = "next" +beta = "beta" +``` + +**Done when**: `releasekit publish --if-needed` works in continuous mode, +`releasekit should-release` integrates with CI cron, hooks execute at +lifecycle points, workspace-level overrides resolve correctly. + +**Milestone**: First release tool with built-in cadence release support +and per-workspace override hierarchy. + #### `releasekit migrate` — Automatic Tag Detection and Bootstrap When adopting releasekit on a repo that already has releases, the user @@ -1517,9 +1689,10 @@ shell completion) is enhancement. | 4b: Streaming Publisher | 2 (+tests) | ~250 | 541 src + ~640 tests | ✅ Complete | | 5: Post-Pipeline + CI | 5 (+CI workflow) | ~700 | prepare, release, tags, changelog, release_notes | ✅ Complete | | 6: UX Polish | 3 (+ 9 formatters) | ~570 | init + formatters + config migration | ✅ Complete | -| 7: Quality + Ship | tests + docs | ~2800 | 706 tests pass | 🔶 In progress | +| 7: Quality + Ship | tests + docs | ~2800 | 1,739 tests pass, 91.07% coverage | 🔶 In progress | +| 8: Release Automation | 6 modules | ~610 | — | ⬜ Planned | -**Current totals**: 16,783 lines source, 12,105 lines tests, 706 tests pass. +**Current totals**: ~17,400 lines source, ~28K lines tests, 1,739 tests pass, 91.07% coverage. All three type checkers (ty, pyrefly, pyright) report zero errors. --- @@ -1789,6 +1962,943 @@ The 3-stage process separates planning, tagging, and publishing. └───────────────────────────────────────────────────────────────┘ ``` +## Bazel Backend — Polyglot Build & Publish Support + +Bazel is a polyglot build system used by many of the largest open-source +projects. This section defines the implementation plan for adding Bazel +support to releasekit, informed by analysis of 20 major Bazel repositories: + +**Library publishing repos** (Phase 1–2): + +| Repo | Languages | Publish Pattern | Version Pattern | +|------|-----------|----------------|-----------------| +| google/dotprompt | 7 (Java, Python, Go, Rust, Dart, JS, Kotlin) | `java_export .publish` (Java), `dart_pub_publish` (Dart) | `maven_coordinates` inline (Java), `pubspec.yaml version:` (Dart) | +| protocolbuffers/protobuf | 8+ (C++, Java, Python, Ruby, Rust, etc.) | `java_export .publish` | `.bzl` constants | +| google/dagger | Java/Kotlin/Android | `bazel build` + `mvn deploy` | Placeholder (`${project.version}`) | +| angular/angular | JS/TS | Custom `ng-dev` + npm | Placeholder (`0.0.0-PLACEHOLDER`) | +| tensorflow/tensorflow | C++/Python | `bazel build :wheel` + twine | `.bzl` constant (`TF_VERSION`) | +| grpc/grpc | C++/Python/Ruby/PHP/etc. | `setup.py` + twine | `MODULE.bazel` + 15× `grpc_version.py` | + +**Container / binary release repos** (Phase 4): + +| Repo | Languages | Publish Pattern | Version Pattern | +|------|-----------|----------------|-----------------| +| GoogleContainerTools/distroless | Starlark/Go | `rules_oci` `oci_push` + cosign sign + SBOM attest | Commit SHA tags (no semver) | +| buchgr/bazel-remote | Go | `rules_oci` `oci_push` + multi-arch `go_binary` | `x_defs` git stamp (`{STABLE_GIT_COMMIT}`) | +| buildfarm/buildfarm | Java | `rules_oci` `oci_push` + `java_binary` | `MODULE.bazel` (no version field) | +| envoyproxy/envoy | C++ | GitHub Release + Docker | `VERSION.txt` plain text | +| google/gvisor | Go | Buildkite + GitHub Release | Git tags (injected at build) | +| ApolloAuto/apollo | C++ | Docker images | `version.json` | + +**Bazel rules / ecosystem repos** (Phase 4 — BCR): + +| Repo | Languages | Publish Pattern | Version Pattern | +|------|-----------|----------------|-----------------| +| bazel-contrib/rules_oci | Starlark/Go | GitHub Release + BCR PR | `MODULE.bazel` `module(version=)` | +| bazelbuild/rules_apple | Starlark | GitHub Release + BCR PR | `MODULE.bazel` `version = "0"` (placeholder) | +| bazelbuild/apple_support | Starlark | GitHub Release + BCR PR | `MODULE.bazel` (no version field) | +| bazel-ios/rules_ios | Starlark | GitHub Release + BCR PR | `MODULE.bazel` `version = "0"` (placeholder) | +| bazelbuild/rules_swift | Starlark | GitHub Release + BCR PR | `MODULE.bazel` `version = "0"` (placeholder) | +| aspect-build/rules_ts | Starlark | GitHub Release + BCR PR | `MODULE.bazel` (no version field) | +| bazelbuild/rules_python | Starlark | GitHub Release + BCR PR | `MODULE.bazel` `module(version=)` | + +### Key Findings + +**10 version patterns** discovered across 20 repos: + +``` +Pattern Format Used By +───────────────────────────────────────────────────────────────────────────── +module() module(version = "X.Y.Z") envoy, gvisor, rules_python, rules_oci +maven_coordinates maven_coordinates = "g:a:X.Y.Z" dotprompt +bzl_constant VAR = "X.Y.Z" in *.bzl tensorflow, protobuf +version_txt Plain text file envoy +version_json JSON with per-language versions protobuf, apollo +placeholder Sentinel replaced at build/release dagger, angular +build_define --define=pom_version=X build flag dagger +x_defs / embed_label Go x_defs stamp or --embed_label bazel-remote, gvisor, rules_oci +pubspec_yaml version: X.Y.Z in pubspec.yaml dotprompt (Dart) +package_json "version": "X.Y.Z" in package.json rules_js (npm) +``` + +**12 publish patterns** discovered: + +``` +Pattern Flow Used By +───────────────────────────────────────────────────────────────────────────── +java_export .publish bazel run //target:name.publish dotprompt, protobuf +mvn_deploy bazel build → mvn deploy:deploy-file dagger +bazel_wheel bazel build :wheel → twine upload tensorflow +native_tool bazel build → ecosystem-native publish angular (npm), grpc (twine) +github_release GitHub Release + BCR PR / Docker envoy, abseil, rules_python +oci_push rules_oci oci_push → crane push distroless, bazel-remote, buildfarm +oci_push + cosign oci_push + cosign sign + SBOM attest distroless +dart_pub_publish bazel run → dart pub publish (pub.dev) dotprompt (Dart) +npm_package .publish bazel run //pkg:name.publish (npm) rules_js +py_wheel .publish bazel run :wheel.publish (twine→PyPI) rules_python +kt_jvm_export .publish bazel run :name.publish (Maven, Kotlin) rules_jvm_external +dotnet publish_binary bazel run :publish (dotnet nuget push) rules_dotnet +``` + +**WORKSPACE vs bzlmod**: No special handling needed. The difference only +affects dependency resolution (`lock()` command). Build, test, publish, +and version rewriting work identically. WORKSPACE is deprecated in +Bazel 8+; all new projects use bzlmod. + +### New Findings from Round 2 Analysis (9 repos + proposals) + +**Container publishing via `rules_oci`** is the dominant pattern: + +- **distroless**: The canonical example. Uses `sign_and_push_all` macro that + wraps `oci_push` + `cosign sign` + `cosign attest` (SBOM via SPDX). + Images are pushed by digest first, then tagged. Cosign keyless signing + uses OIDC (Google accounts). Multi-arch via `oci_image_index`. No semver — + tags are `latest`, `nonroot`, `debug`, `debug-nonroot` × arch × distro. +- **bazel-remote**: Go binary cross-compiled to 5 platform variants + (`linux-amd64`, `linux-arm64`, `darwin-amd64`, `darwin-arm64`). Version + injected via `x_defs` (`main.gitCommit = "{STABLE_GIT_COMMIT}"`). OCI + images built with `oci_image` + `pkg_tar`, pushed via `oci_push` to + Docker Hub. Multi-arch images for amd64 + arm64. +- **buildfarm**: Java binaries (`java_binary`) packaged into OCI images + with Amazon Corretto base. Uses `rules_oci` `oci.pull` for base images + with digest pinning and multi-platform support (`linux/amd64`, `linux/arm64/v8`). + Maven deps managed via `rules_jvm_external` with `REPIN=1 bazel run @maven//:pin`. + +**Design implications for B16 (ContainerBackend)**: + +1. `oci_push` is the standard — `bazel run //:push` with `--repository` override +2. Cosign signing is table-stakes for production containers (keyless via OIDC) +3. SBOM attestation via `cosign attest --type spdx` is emerging standard +4. Multi-arch is handled by `oci_image_index` (not releasekit's concern) +5. Version tagging: `crane tag` after push-by-digest (atomic push pattern) +6. Container registries: GCR, Docker Hub, ECR, GHCR all supported by crane + +**Apple platform rules ecosystem** (informs Phase 3): + +- **rules_apple**: The authoritative iOS/macOS/tvOS/watchOS rules. Key rules: + `ios_application`, `ios_extension`, `ios_framework`, `ios_static_framework`, + `ios_app_clip`. Versioning via `apple_bundle_version` rule which sets + `CFBundleVersion` + `CFBundleShortVersionString`. Supports hard-coded + versions or `--embed_label` parsing with regex capture groups and fallback. + Provisioning profiles handled via `provisioning_profile_repository` extension. +- **apple_support**: Toolchain layer. Provides `apple_cc_configure` for + cross-compilation. No versioning or publishing — pure build infrastructure. +- **rules_ios**: Community rules wrapping `rules_apple`. Adds `apple_library` + convenience macro, `process_infoplists`, `framework_middleman`. Depends on + `rules_apple` + `rules_swift`. Uses `arm64-to-sim` for simulator builds. +- **rules_swift**: Swift compilation rules. `swift_library`, `swift_binary`, + `swift_test`. Depends on `apple_support`. Provides Swift toolchain + registration. No publishing — build-only. + +**Design implications for B13 (AppStoreConnectBackend)**: + +1. `apple_bundle_version` is the version source of truth for iOS apps — + releasekit must rewrite the `build_version` and `short_version_string` + attributes (or the `--embed_label` flag) +2. Provisioning profiles are local-only (`local_provisioning_profiles` repo) — + releasekit should validate they exist but not manage them +3. Code signing is handled by `rules_apple` at build time — releasekit + only needs to ensure the right signing identity is configured +4. The `xcarchive` rule produces `.xcarchive` bundles for App Store submission +5. `rules_ios` is a convenience layer — target `rules_apple` directly + +**TypeScript / JavaScript rules and npm publishing** (informs Phase 1 JS support): + +- **rules_ts** (`aspect-build/rules_ts`): `ts_project` macro wraps + TypeScript compilation. Version stamping via `expand_template` + + `stamp_substitutions` (same `BUILD_EMBED_LABEL` pattern as + `rules_oci`). No publishing — TS output is consumed by `rules_js`. +- **rules_js** (`aspect-build/rules_js`): `npm_package` macro packages + JS/TS output for npm. With `publishable = True`, produces a + `[name].publish` target that runs `npm publish` under the hood — + same `.publish` pattern as `java_export` in `rules_jvm_external`. + Supports npm workspaces for monorepo packages. Version source is + `package.json` `"version"` field (10th version pattern). + Auth via `NPM_TOKEN` env var or `.npmrc` file. + Lock via `pnpm install --frozen-lockfile` or `npm ci`. +- **Pipeline**: `rules_ts` (compile TS) → `rules_js` `npm_package` + (package) → `npm_package.publish` (publish to npm registry). + +**Dart rules and pub.dev publishing** (informs Phase 1 Dart support): + +- **rules_dart** (in `google/dotprompt` at `bazel/rules_dart/`): Full-featured + Dart Bazel rules with `dart_library`, `dart_binary`, `dart_test`, + `dart_native_binary`, `dart_js_binary`, `dart_wasm_binary`, + `dart_aot_snapshot`, `dart_pub_get`, and crucially **`dart_pub_publish`**. + The `dart_pub_publish` macro wraps `dart pub publish` as a Bazel run target. + Includes Gazelle extension for BUILD file generation, `dart_deps` module + extension for `pubspec.lock`-based dependency resolution, toolchain + abstraction, persistent workers, coverage, proto/gRPC codegen, and + `build_runner` integration. BCR-ready with `.bcr/` metadata directory. + Also has a companion `rules_flutter` for Flutter app builds. +- **dotprompt Dart packages**: Two packages — `dotprompt` (core library) and + `handlebarrz` (Handlebars template engine). Both use `dart_library` + + `dart_test` in BUILD.bazel. Version in `pubspec.yaml` (`version: 0.0.1`). + **Blockers for publishing**: + 1. `dart/dotprompt/pubspec.yaml` has `publish_to: none` — must be removed + 2. Neither BUILD.bazel has a `dart_pub_publish` target — must be added + + The `handlebarrz` `path:` dependency is **not** a blocker — releasekit's + ephemeral pinning handles it automatically (same pattern as Genkit's + Python monorepo: temporarily rewrite `path: ../handlebarrz` → + `handlebarrz: 0.0.2` during publish, revert after). +- **Version pattern**: `pubspec.yaml` `version:` field — new 9th pattern. + Regex: `version:\s*(\d+\.\d+\.\d+.*)` in YAML. Releasekit's + `BazelVersionRewriter` needs a `pubspec_yaml` handler. +- **Publish pattern**: `dart_pub_publish` — `bazel run //pkg:publish` invokes + `dart pub publish`. Requires `PUB_TOKEN` env var for authentication + (or `dart pub token add` pre-configured). New 8th publish pattern. +- **Lock pattern**: `dart_pub_get` — `bazel run //pkg:pub_get` invokes + `dart pub get`, updating `pubspec.lock`. Releasekit's `lock()` should + call this target. + +**Bazel proposals** — relevant design docs: + +- **Build Stamping API for Starlark rules** (implemented): Formalizes + `--stamp` / `--embed_label` / `ctx.version_file` for injecting version + info at build time. Releasekit should support `--embed_label` as a + version injection mechanism (already covered by `build_define` pattern). +- **Bzlmod lockfile** (implemented): `MODULE.bazel.lock` is the lockfile. + Releasekit's `lock()` command should run `bazel mod deps --lockfile_mode=update`. +- **Bazel Central Registry Policies** (implemented): BCR requires a + `.bcr/` directory with `metadata.json`, `presubmit.yml`, and `source.json`. + Releasekit's B15 (BcrBackend) must generate these files. +- **Android Native to Starlark Migration** (implemented): Android rules + are now Starlark-based. No impact on releasekit — `android_binary` API + is stable. +- **Simplifying lockfile maintenance** (implemented): `bazel mod tidy` + auto-fixes `use_repo` statements. Releasekit should call this after + version bumps that affect MODULE.bazel. + +### Dependency Graph (Bazel + Gradle Tasks) + +Each node is a discrete implementation task. Edges represent "must be +done before" relationships. + +``` +┌─────────────────────────────────────────────────────────────────────────────┐ +│ Bazel + Gradle Backend Dependency Graph │ +│ │ +│ B1: Config fields ──────────────────────────────────────────────────────┐ │ +│ (bazel_* keys, ecosystems, │ │ +│ DEFAULT_TOOLS) │ │ +│ │ │ │ +│ ├──────────────────────┐ │ │ +│ │ │ │ │ +│ ▼ ▼ │ │ +│ B2: BazelVersionRewriter B3: BazelWorkspace │ │ +│ (10 version formats) (discover, rewrite, │ │ +│ │ 5 ephemeral pin modes) │ │ +│ │ │ │ │ +│ ▼ ▼ │ │ +│ B4: BazelBackend (PM) ◄──── B3 │ │ +│ (build, test, 8 publish │ │ +│ modes, lock, smoke_test) │ │ +│ │ │ │ +│ ▼ │ │ +│ B5: Dispatch wiring │ │ +│ (workspace.py, cli.py, │ │ +│ pm/__init__.py) │ │ +│ │ │ │ +│ ▼ │ │ +│ B6: Tests + lint │ │ +│ (config, workspace, PM, │ │ +│ version, dispatch) │ │ +│ │ │ │ +│ ▼ │ │ +│ B7: Integration test │ │ +│ (dry-run vs dotprompt) │ │ +│ │ │ +│ ═══════════════════════════ Phase 1 above ═══════════════════════════ │ │ +│ │ │ +│ B8: MavenCentralRegistry ◄── B4 │ │ +│ │ │ │ +│ ▼ │ │ +│ B9: Signing integration │ │ +│ │ │ │ +│ ▼ │ │ +│ B10: End-to-end publish │ │ +│ │ │ +│ ═══════════════════════════ Phase 2 above ═══════════════════════════ │ │ +│ │ │ +│ B11: AppStore protocol ◄── B4 │ │ +│ │ │ │ +│ ├──────────────────────┐ │ │ +│ ▼ ▼ │ │ +│ B12: PlayStoreBackend B13: AppStoreConnectBackend │ │ +│ │ │ │ │ +│ ▼ ▼ │ │ +│ B14: Flutter dispatch │ │ +│ │ │ +│ ═══════════════════════════ Phase 3 above ═══════════════════════════ │ │ +│ │ │ +│ B15: BcrBackend ◄── B1 │ │ +│ B16: ContainerBackend ◄── B4 │ │ +│ B17: BinaryReleaseBackend ◄── B4 │ │ +│ │ │ +│ ═══════════════════════════ Phase 4 above ═══════════════════════════ │ │ +│ │ │ +│ G1: Config fields (gradle_*) ◄── (independent, no Bazel deps) │ │ +│ │ │ │ +│ ├──────────────────────┐ │ │ +│ │ │ │ │ +│ ▼ ▼ │ │ +│ G2: GradleVersionRewriter G3: GradleWorkspace │ │ +│ (3 version formats) (discover, rewrite) │ │ +│ │ │ │ │ +│ ▼ ▼ │ │ +│ G4: GradleBackend (PM) ◄──── G3 │ │ +│ (build, 3 publish modes, │ │ +│ lock, smoke_test) │ │ +│ │ │ │ +│ ▼ │ │ +│ G5: Dispatch wiring │ │ +│ │ │ │ +│ ▼ │ │ +│ G6: Tests + lint │ │ +│ │ │ +│ ═══════════════════════════ Phase 5 above ═══════════════════════════ │ │ +└─────────────────────────────────────────────────────────────────────────────┘ +``` + +### Reverse Topological Sort (Execution Order) + +Tasks sorted so that all dependencies are completed before dependents. +Within each level, tasks are independent and can be parallelized. +Note: Gradle (G*) tasks are fully independent of Bazel (B*) tasks and +can be developed in parallel. + +``` +Level 0 (no deps): B1 Config fields (Bazel) + G1 Config fields (Gradle) ← parallel +Level 1 (deps: B1): B2 BazelVersionRewriter + B3 BazelWorkspace +Level 1 (deps: G1): G2 GradleVersionRewriter ← parallel + G3 GradleWorkspace ← parallel +Level 2 (deps: B2, B3): B4 BazelBackend (PM) +Level 2 (deps: G2, G3): G4 GradleBackend (PM) ← parallel +Level 3 (deps: B4): B5 Dispatch wiring (Bazel) +Level 3 (deps: G4): G5 Dispatch wiring (Gradle) ← parallel +Level 4 (deps: B5): B6 Tests + lint (Bazel) +Level 4 (deps: G5): G6 Tests + lint (Gradle) ← parallel +Level 5 (deps: B6): B7 Integration test (Bazel) +─── Phase 1 complete ─── ─── Phase 5 complete ─── +Level 6 (deps: B4): B8 MavenCentralRegistry +Level 7 (deps: B8): B9 Signing integration +Level 8 (deps: B9): B10 End-to-end publish +─── Phase 2 complete ─── +Level 9 (deps: B4): B11 AppStore protocol +Level 10 (deps: B11): B12 PlayStoreBackend + B13 AppStoreConnectBackend +Level 11 (deps: B12,B13): B14 Flutter dispatch +─── Phase 3 complete ─── +Level 12 (deps: B1): B15 BcrBackend +Level 12 (deps: B4): B16 ContainerBackend + B17 BinaryReleaseBackend +─── Phase 4 complete ─── +``` + +### Mermaid (renders on GitHub) + +```mermaid +flowchart TD + subgraph phase_b1 ["Bazel Phase 1: Library Build + Discover"] + B1["B1: Config fields
(bazel_* keys, ecosystems, tools)"] + B2["B2: BazelVersionRewriter
(10 version formats)"] + B3["B3: BazelWorkspace
(discover, 5 pin modes)"] + B4["B4: BazelBackend PM
(build, test, 8 publish modes)"] + B5["B5: Dispatch wiring
(workspace.py, cli.py)"] + B6["B6: Tests + lint"] + B7["B7: Integration test
(dry-run vs dotprompt)"] + end + + subgraph phase_b2 ["Bazel Phase 2: Maven Central E2E"] + B8["B8: MavenCentralRegistry
(check, poll, latest)"] + B9["B9: Signing integration
(GPG + Sigstore)"] + B10["B10: End-to-end publish
(prepare → tag → publish)"] + end + + subgraph phase_b3 ["Bazel Phase 3: Mobile App Releases"] + B11["B11: AppStore protocol
(upload, promote, status)"] + B12["B12: PlayStoreBackend
(Android .apk/.aab)"] + B13["B13: AppStoreConnectBackend
(iOS .ipa, TestFlight)"] + B14["B14: Flutter dispatch
(reuses B12 + B13)"] + end + + subgraph phase_b4 ["Bazel Phase 4: Binary + BCR + Container"] + B15["B15: BcrBackend
(Bazel Central Registry)"] + B16["B16: ContainerBackend
(rules_oci + cosign)"] + B17["B17: BinaryReleaseBackend
(GitHub Release binaries)"] + end + + subgraph phase_g ["Phase 5: Gradle Backend"] + G1["G1: Config fields
(gradle_* keys)"] + G2["G2: GradleVersionRewriter
(3 version formats)"] + G3["G3: GradleWorkspace
(discover, rewrite)"] + G4["G4: GradleBackend PM
(build, 3 publish modes)"] + G5["G5: Dispatch wiring"] + G6["G6: Tests + lint"] + end + + B1 --> B2 + B1 --> B3 + B2 --> B4 + B3 --> B4 + B4 --> B5 + B5 --> B6 + B6 --> B7 + + B4 --> B8 + B8 --> B9 + B9 --> B10 + + B4 --> B11 + B11 --> B12 + B11 --> B13 + B12 --> B14 + B13 --> B14 + + B1 --> B15 + B4 --> B16 + B4 --> B17 + + G1 --> G2 + G1 --> G3 + G2 --> G4 + G3 --> G4 + G4 --> G5 + G5 --> G6 + + phase_b1 --> phase_b2 + phase_b2 --> phase_b3 + phase_b3 --> phase_b4 +``` + +### Bazel Phase 1: Library Build + Discover + +Goal: Build and discover Java artifacts from Bazel workspaces. Dry-run +validated against dotprompt and protobuf repo structures. + +| Task | Module | Description | Est. Lines | Deps | +|------|--------|-------------|-----------|------| +| **B1** | `config.py` | Add `bazel_*` fields to `VALID_WORKSPACE_KEYS` and `WorkspaceConfig`. Add `'android'`, `'ios'`, `'macos'`, `'cpp'`, `'bazel'` to `ALLOWED_ECOSYSTEMS`. Add `'bazel': 'bazel'` to `DEFAULT_TOOLS`. New fields: `bazel_build_target`, `bazel_test_target`, `bazel_publish_target`, `bazel_publish_mode` (`java_export` / `kt_jvm_export` / `mvn_deploy` / `dart_pub_publish` / `npm_package_publish` / `py_wheel_publish` / `dotnet_publish` / `custom`), `bazel_publish_script`, `bazel_version_file`, `bazel_version_format` (`module` / `maven_coordinates` / `bzl_constant` / `version_txt` / `version_json` / `placeholder` / `pubspec_yaml` / `package_json`), `bazel_version_variable`, `bazel_version_jsonpath`, `bazel_version_placeholder`, `bazel_maven_coordinates`, `bazel_signing` (`gpg` / `sigstore` / `both`), `bazel_build_defines`, `bazel_artifact_type` (`jar` / `aar` / `wheel` / `binary`), `bazel_lock_command`. | ~80 | — | +| **B2** | `backends/workspace/bazel_version.py` | `BazelVersionRewriter` with 10 format handlers: (1) `module` — regex `module(version = "...")` in MODULE.bazel; (2) `maven_coordinates` — regex `group:artifact:VERSION` in BUILD.bazel; (3) `bzl_constant` — regex `VARIABLE = "VERSION"` in .bzl; (4) `version_txt` — plain text file; (5) `version_json` — JSON path rewrite; (6) `placeholder` — string replacement of sentinel; (7) `build_define` — no file rewrite, returns `--define` flag; (8) `x_defs` — Go `x_defs` stamp vars or `--embed_label` (per bazel-remote/rules_oci pattern, returns build flags, no file rewrite); (9) `pubspec_yaml` — regex `version: X.Y.Z` in pubspec.yaml (per dotprompt Dart pattern); (10) `package_json` — regex `"version": "X.Y.Z"` in package.json (per rules_js npm pattern). Each handler: `read_version(path) -> str` and `write_version(path, new_version)`. | ~260 | B1 | +| **B3** | `backends/workspace/bazel.py` | `BazelWorkspace` implementing the `Workspace` protocol. `discover()` reads `bazel_maven_coordinates` from config, calls `BazelVersionRewriter.read_version()` to get current version, returns `list[Package]`. `rewrite_version()` delegates to `BazelVersionRewriter.write_version()`. `rewrite_dependency_version()` supports 5 ephemeral pinning modes: (A) Maven — rewrites `maven.install()` artifact versions in MODULE.bazel or WORKSPACE; (B) Dart `pubspec.yaml` — rewrites `path:` deps to hosted version (e.g. `path: ../handlebarrz` → `handlebarrz: 0.0.2`); (C) Bzlmod dev overrides — removes any of `local_path_override()`, `git_override()`, or `archive_override()` stanzas and updates `bazel_dep(version=)` in MODULE.bazel (for BCR publishing of monorepo rules like rules_flutter → rules_dart; all three override types follow the same strip-override + set-version pattern); (D) npm `package.json` — rewrites `workspace:*` deps to hosted version (e.g. `"workspace:*"` → `"^1.2.0"`) for npm monorepo packages; (E) `single_version_override` — updates the `version` arg in an existing `single_version_override()` stanza (for forcing a specific registry version without removing the override). | ~240 | B1, B2 | +| **B4** | `backends/pm/bazel.py` | `BazelBackend` implementing `PackageManager` protocol. `build()` → `bazelisk build ` with optional `--define` flags. `publish()` → 8 modes: (A) `java_export` — `bazel run .publish` with `MAVEN_REPO`/`MAVEN_USER`/`MAVEN_PASSWORD`/`GPG_SIGN` env vars; (B) `kt_jvm_export` — same as (A) but for Kotlin via `rules_jvm_external`; (C) `mvn_deploy` — `bazel build` + `mvn gpg:sign-and-deploy-file`; (D) `dart_pub_publish` — `bazel run ` invoking `dart pub publish` with `PUB_TOKEN`; (E) `npm_package_publish` — `bazel run .publish` invoking `npm publish` with `NPM_TOKEN` (rules_js); (F) `py_wheel_publish` — `bazel run .publish` invoking twine with `TWINE_USERNAME`/`TWINE_PASSWORD` (rules_python); (G) `dotnet_publish` — `bazel run :publish` invoking `dotnet nuget push` (rules_dotnet); (H) `custom` script. `lock()` → `bazelisk run ` (default `@maven//:pin`; Dart: `dart_pub_get`; npm: `pnpm install --frozen-lockfile`; Python: n/a; .NET: `dotnet restore`). `version_bump()` → delegates to `BazelWorkspace.rewrite_version()`. `smoke_test()` → `bazelisk test `. `resolve_check()` → per-ecosystem: `mvn dependency:get`, `dart pub deps`, `npm view`, `pip index versions`, `dotnet nuget list`. | ~300 | B1, B2, B3 | +| **B5** | `workspace.py`, `cli.py`, `pm/__init__.py` | Thread `tool` parameter through dispatch. In `workspace.py:_discover_via_backend()`: if `tool == 'bazel'` → `BazelWorkspace`. In `cli.py:_create_backends()`: if `tool == 'bazel'` → `BazelBackend`. Import and register in `pm/__init__.py`. | ~40 | B4 | +| **B6** | `tests/` | Unit tests: `rk_config_bazel_test.py` (config validation, 15+ tests), `rk_bazel_version_test.py` (10 format handlers × read/write, 26+ tests), `rk_bazel_workspace_test.py` (discover, rewrite, 12+ tests), `rk_pm_bazel_test.py` (protocol conformance, dry-run commands, 8 publish modes, 30+ tests), `rk_bazel_dispatch_test.py` (tool routing, 5+ tests). Lint pass. | ~1000 | B5 | +| **B7** | `tests/` | Integration test: clone dotprompt repo structure (fixtures), run `releasekit discover --dry-run` and `releasekit plan --dry-run` against it. Verify correct package discovery, version reading, and build command construction. | ~150 | B6 | + +**Done when**: `releasekit discover` finds Java packages in a Bazel +workspace, `releasekit plan` shows correct versions, `releasekit publish +--dry-run` constructs correct `bazel build` and `bazel run .publish` +commands. + +**Milestone**: Can dry-run a Bazel Java publish pipeline. + +### Bazel Phase 2: Maven Central End-to-End + +Goal: Actually publish Java JARs to Maven Central from a Bazel workspace. + +| Task | Module | Description | Est. Lines | Deps | +|------|--------|-------------|-----------|------| +| **B8** | `backends/registry/maven_central.py` | `MavenCentralRegistry` implementing `Registry` protocol. `check_published()` → query Maven Central Search API (`search.maven.org/solrsearch/select?q=g:GROUP+AND+a:ARTIFACT+AND+v:VERSION`). `poll_available()` → poll until artifact appears (Maven Central has ~10min sync delay). `latest_version()` → query latest from search API. `project_exists()` → check if group:artifact exists. `verify_checksum()` → download SHA-1 from Maven Central and compare. | ~200 | B4 | +| **B9** | `backends/pm/bazel.py` | Signing integration in `BazelBackend.publish()`. For `java_export` mode: set `GPG_SIGN=true`, `PGP_SIGNING_KEY`, `PGP_SIGNING_PWD` env vars (handled by rules_jvm_external uploader). For `mvn_deploy` mode: pass `-Dgpg.keyname=KEY` to mvn. For Sigstore: call `signing.sign_artifact()` on the built JAR in `bazel-bin/`. | ~60 | B8 | +| **B10** | Integration | End-to-end test: `releasekit prepare` → `releasekit tag` → `releasekit publish` against a real Bazel workspace publishing to Maven Central staging (OSSRH). Verify artifact appears, POM is correct, GPG signature validates. | ~100 | B9 | + +**Done when**: `releasekit publish` successfully uploads a signed JAR +to Maven Central from a Bazel workspace. + +**Milestone**: Production-ready Bazel → Maven Central pipeline. + +### Bazel Phase 3: Mobile App Releases (Future) + +Goal: Release Android and iOS apps built with Bazel or Flutter. + +App releases differ fundamentally from library publishing: + +``` +Libraries: build → sign → upload to registry → done +Apps: build → sign → upload to store → review → staged rollout +``` + +| Task | Module | Description | Est. Lines | Deps | +|------|--------|-------------|-----------|------| +| **B11** | `backends/app_store/__init__.py` | `AppStore` protocol: `upload(artifact, track, dry_run)`, `promote(version, from_track, to_track, rollout_fraction)`, `check_status(version)`. Tracks: `internal`, `alpha`, `beta`, `production`. | ~60 | B4 | +| **B12** | `backends/app_store/play_store.py` | `PlayStoreBackend`: Upload `.apk`/`.aab` via Google Play Developer API v3. Service account auth. Track management. Staged rollout support. Version code auto-increment. | ~300 | B11 | +| **B13** | `backends/app_store/app_store_connect.py` | `AppStoreConnectBackend`: Upload `.ipa` via App Store Connect API. API key auth (`.p8` file). TestFlight distribution. App Store submission. Version rewriting targets `apple_bundle_version` rule in BUILD.bazel — rewrites `build_version` and `short_version_string` attrs (or sets `--embed_label` flag with `build_label_pattern` + `capture_groups` regex parsing, per rules_apple's versioning.bzl). Validates provisioning profile exists via `local_provisioning_profiles` repo. Build via `bazel build` producing `.xcarchive` (rules_apple `xcarchive` rule) or `.ipa`. | ~350 | B11 | +| **B14** | `cli.py`, `config.py` | Flutter dispatch: `flutter_android_bundle` → `PlayStoreBackend`, `flutter_ios_app` → `AppStoreConnectBackend`. New config fields: `android_keystore`, `android_track`, `android_service_account`, `ios_team_id`, `ios_api_key`, `app_store_track`. | ~80 | B12, B13 | + +**Done when**: `releasekit publish` can upload an Android `.aab` to +Play Store internal track and an iOS `.ipa` to TestFlight. + +### Bazel Phase 4: Binary + BCR + Container (Future) + +Goal: Support non-registry release targets. + +| Task | Module | Description | Est. Lines | Deps | +|------|--------|-------------|-----------|------| +| **B15** | `backends/registry/bcr.py` | `BcrBackend`: Publish Bazel rules to Bazel Central Registry. Generate `.bcr/` metadata directory (`metadata.json` with versions/yanked_versions/maintainers, `presubmit.yml` with test matrix, `source.json` with GitHub archive URL + integrity hash). Create PR to `bazelbuild/bazel-central-registry` via Forge protocol. Rewrite `MODULE.bazel` `module(version=)`. Handle `version = "0"` placeholder pattern (rules_apple, rules_ios, rules_swift all use this — real version set only in BCR). | ~250 | B1 | +| **B16** | `backends/pm/container.py` | `ContainerBackend`: Wraps `rules_oci` `oci_push` pattern discovered in distroless/bazel-remote/buildfarm. Three-step atomic push: (1) `bazel run //:push -- --repository=REGISTRY/IMAGE` (push by digest via crane), (2) `cosign sign REGISTRY/IMAGE@DIGEST --yes` (keyless OIDC signing), (3) `cosign attest REGISTRY/IMAGE@DIGEST --predicate=SBOM --type=spdx --yes` (SBOM attestation). Config fields: `oci_push_target`, `oci_repository`, `oci_remote_tags` (list or stamped file), `oci_cosign_sign` (bool), `oci_sbom_attest` (bool), `oci_cosign_oidc_issuer`. Multi-arch handled by `oci_image_index` at build time (not releasekit's concern). Supports GCR, Docker Hub, ECR, GHCR. | ~300 | B4 | +| **B17** | `backends/pm/binary_release.py` | `BinaryReleaseBackend`: Build binaries via `bazel build` with cross-compilation (per bazel-remote pattern: `go_binary` with `goarch`/`goos` attrs, or `cc_binary` with `--platforms`). Version injection via `x_defs` stamp (`{STABLE_GIT_COMMIT}`, `{GIT_TAGS}`) or `--embed_label`. Upload to GitHub Release as assets via Forge protocol. Checksum file generation (SHA-256). Config fields: `binary_targets` (list of target+platform pairs), `binary_stamp_vars` (dict of x_defs). | ~200 | B4 | + +**Done when**: `releasekit publish` can publish a Bazel rule to BCR, +push a Docker image with cosign signing, or attach binaries to a GitHub Release. + +### Bazel Phase 5: Gradle Backend (Future) + +Goal: Support Gradle-based Java/Kotlin/Android projects that don't use Bazel, +or hybrid repos (like google/dagger) where some artifacts are published via +Gradle. This completes the JVM ecosystem coverage alongside the Bazel backend. + +**Gradle publishing landscape:** + +- **`maven-publish` plugin** — standard Gradle plugin for publishing JARs/AARs + to Maven Central (OSSRH) or any Maven repository. Generates POM, signs with + GPG via the `signing` plugin. Command: `./gradlew publish` or + `./gradlew publishToMavenCentral`. +- **`com.gradle.plugin-publish` plugin** — publishes Gradle plugins to the + Gradle Plugin Portal. Command: `./gradlew publishPlugins`. Auto-signs from + v1.0.0+. Auth via `gradle.publish.key` / `gradle.publish.secret` in + `gradle.properties` or env vars. +- **`vanniktech/gradle-maven-publish-plugin`** — popular third-party plugin + that simplifies Maven Central publishing. Used by many Android libraries. + Command: `./gradlew publishAndReleaseToMavenCentral`. + +**Gradle version patterns (3 new):** + +``` +Pattern Format Used By +───────────────────────────────────────────────────────────────────────────── +gradle_properties VERSION_NAME=X.Y.Z in gradle.properties dagger, most Android libs +build_gradle version = "X.Y.Z" in build.gradle(.kts) simple projects +version_catalog version in libs.versions.toml modern Gradle monorepos +``` + +**Gradle publish patterns (3 new):** + +``` +Pattern Flow Used By +───────────────────────────────────────────────────────────────────────────── +gradle_maven_publish ./gradlew publish (maven-publish plugin) most Java/Kotlin libs +gradle_plugin_publish ./gradlew publishPlugins (Plugin Portal) dagger Gradle plugin +gradle_vanniktech ./gradlew publishAndReleaseToMavenCentral Android libs (vanniktech) +``` + +| Task | Module | Description | Est. Lines | Deps | +|------|--------|-------------|-----------|------| +| **G1** | `config.py` | Add `'gradle'` to `ALLOWED_WORKSPACE_TOOLS`. New fields: `gradle_publish_task` (default `publish`), `gradle_build_task` (default `build`), `gradle_test_task` (default `test`), `gradle_version_file` (default `gradle.properties`), `gradle_version_format` (`gradle_properties` / `build_gradle` / `version_catalog`), `gradle_version_key` (default `VERSION_NAME`), `gradle_signing` (`gpg` / `none`), `gradle_wrapper` (bool, default `true` — use `./gradlew` vs `gradle`), `gradle_subproject` (for multi-project builds, e.g. `:dagger-compiler`). | ~50 | — | +| **G2** | `backends/workspace/gradle_version.py` | `GradleVersionRewriter` with 3 format handlers: (1) `gradle_properties` — regex `KEY=X.Y.Z` in `gradle.properties`; (2) `build_gradle` — regex `version = "X.Y.Z"` in `build.gradle` or `build.gradle.kts`; (3) `version_catalog` — TOML rewrite of version entry in `gradle/libs.versions.toml`. Each handler: `read_version(path) -> str` and `write_version(path, new_version)`. | ~120 | G1 | +| **G3** | `backends/workspace/gradle.py` | `GradleWorkspace` implementing `Workspace` protocol. `discover()` reads `gradle_version_file` + `gradle_version_format` from config, returns `list[Package]`. `rewrite_version()` delegates to `GradleVersionRewriter`. `rewrite_dependency_version()` supports: (A) `gradle.properties` — rewrite dependency version properties; (B) `version_catalog` — rewrite dependency version in `libs.versions.toml`; (C) `build.gradle` — rewrite `implementation "group:artifact:VERSION"` strings. | ~150 | G1, G2 | +| **G4** | `backends/pm/gradle.py` | `GradleBackend` implementing `PackageManager` protocol. `build()` → `./gradlew ` (or `gradle` if `gradle_wrapper = false`). `publish()` → 3 modes: (A) `gradle_maven_publish` — `./gradlew ` with `MAVEN_REPO`/`MAVEN_USER`/`MAVEN_PASSWORD` env vars + GPG signing via `ORG_GRADLE_PROJECT_signingKey`/`ORG_GRADLE_PROJECT_signingPassword`; (B) `gradle_plugin_publish` — `./gradlew publishPlugins` with `gradle.publish.key`/`gradle.publish.secret`; (C) `gradle_vanniktech` — `./gradlew publishAndReleaseToMavenCentral` with `mavenCentralUsername`/`mavenCentralPassword`. `lock()` → `./gradlew dependencies --write-locks` (Gradle dependency locking). `version_bump()` → delegates to `GradleWorkspace.rewrite_version()`. `smoke_test()` → `./gradlew `. `resolve_check()` → `./gradlew dependencyInsight --dependency=GROUP:ARTIFACT`. For multi-project: prepend `::` to all task names. | ~200 | G1, G2, G3 | +| **G5** | `workspace.py`, `cli.py`, `pm/__init__.py` | Thread `tool = "gradle"` through dispatch. | ~30 | G4 | +| **G6** | `tests/` | Unit tests: `rk_config_gradle_test.py` (10+ tests), `rk_gradle_version_test.py` (3 formats × read/write, 10+ tests), `rk_gradle_workspace_test.py` (8+ tests), `rk_pm_gradle_test.py` (3 publish modes, 15+ tests), `rk_gradle_dispatch_test.py` (5+ tests). | ~500 | G5 | + +**Done when**: `releasekit publish --dry-run` constructs correct `./gradlew` +commands for Maven Central, Gradle Plugin Portal, and vanniktech publishing. + +**Milestone**: Can dry-run a Gradle publish pipeline. Enables hybrid repos +like google/dagger where Bazel handles 30+ JARs and Gradle handles the +Gradle plugin. + +**Example configs:** + +```toml +# google/dagger — hybrid Bazel + Gradle +# 30+ Maven artifacts via Bazel (see earlier example) +[workspace.dagger-java] +ecosystem = "java" +tool = "bazel" +root = "." +synchronize = true +bazel_publish_mode = "mvn_deploy" +bazel_version_file = "build_defs.bzl" +bazel_version_format = "placeholder" +bazel_version_placeholder = "${project.version}" +bazel_signing = "gpg" + +# Gradle plugin via Gradle +[workspace.dagger-gradle-plugin] +ecosystem = "java" +tool = "gradle" +root = "java/dagger/hilt/android/plugin" +synchronize = true +gradle_publish_task = "publishPlugins" +gradle_version_file = "gradle.properties" +gradle_version_format = "gradle_properties" +gradle_version_key = "VERSION_NAME" + +# Typical Android library (vanniktech plugin) +# [workspace.my-android-lib] +# ecosystem = "java" +# tool = "gradle" +# root = "." +# gradle_publish_task = "publishAndReleaseToMavenCentral" +# gradle_version_file = "gradle.properties" +# gradle_version_format = "gradle_properties" +# gradle_version_key = "VERSION_NAME" +# gradle_signing = "gpg" +``` + +### Config Reference (All Bazel Fields) + +```toml +# ── Phase 1 fields ────────────────────────────────────────────────────── + +[workspace.java] +ecosystem = "java" +tool = "bazel" +root = "." + +# Build targets +bazel_build_target = "//java/com/google/dotprompt:dotprompt_pkg" +bazel_test_target = "//java/..." +bazel_publish_target = "//java/com/google/dotprompt:dotprompt_pkg.publish" + +# Publish mode: "java_export" | "mvn_deploy" | "custom" +bazel_publish_mode = "java_export" +bazel_publish_script = "" # For custom mode only + +# Version management (pick one format) +bazel_version_file = "java/com/google/dotprompt/BUILD.bazel" +bazel_version_format = "maven_coordinates" +# "module" → module(version = "X.Y.Z") in MODULE.bazel +# "maven_coordinates" → maven_coordinates = "g:a:X.Y.Z" in BUILD.bazel +# "bzl_constant" → VAR = "X.Y.Z" in *.bzl (needs bazel_version_variable) +# "version_txt" → plain text file +# "version_json" → JSON file (needs bazel_version_jsonpath) +# "placeholder" → sentinel replacement (needs bazel_version_placeholder) +bazel_version_variable = "" # For bzl_constant: "PROTOBUF_JAVA_VERSION" +bazel_version_jsonpath = "" # For version_json: "$.languages.java" +bazel_version_placeholder = "" # For placeholder: "${project.version}" + +# Maven coordinates (group:artifact, without version) +bazel_maven_coordinates = "com.google.dotprompt:dotprompt" + +# Signing: "gpg" | "sigstore" | "both" +bazel_signing = "both" + +# Build-time --define flags ({version} is replaced) +bazel_build_defines = [] # e.g. ["pom_version={version}"] + +# Artifact type: "jar" | "aar" | "wheel" | "binary" +bazel_artifact_type = "jar" + +# Lock command (empty = no lock step) +bazel_lock_command = "@maven//:pin" + +# ── Phase 3 fields (future) ──────────────────────────────────────────── + +# Android +android_keystore = "" +android_keystore_password = "" # env var name, not value +android_track = "internal" # internal | alpha | beta | production +android_service_account = "" # path to service account JSON + +# iOS (informed by rules_apple analysis) +ios_provisioning_profile = "" +ios_team_id = "" +ios_api_key = "" # path to .p8 file +ios_api_issuer = "" +app_store_track = "testflight" # testflight | app_store +ios_version_rule = "" # label of apple_bundle_version target +ios_embed_label_pattern = "" # e.g. "MyApp_{version}_build_{build}" + +# ── Phase 4 fields (future) ──────────────────────────────────────────── + +# Container (informed by distroless/bazel-remote/buildfarm analysis) +oci_push_target = "" # e.g. "//:push" or "//container:push" +oci_repository = "" # e.g. "gcr.io/my-project/my-image" +oci_remote_tags = [] # e.g. ["latest", "{version}"] +oci_cosign_sign = false # enable cosign keyless signing +oci_sbom_attest = false # enable SBOM attestation via cosign +oci_cosign_oidc_issuer = "" # e.g. "https://accounts.google.com" + +# Binary release (informed by bazel-remote analysis) +binary_targets = [] # e.g. ["//:app-linux-amd64", "//:app-linux-arm64"] +binary_stamp_vars = {} # e.g. {"main.gitCommit": "{STABLE_GIT_COMMIT}"} +binary_embed_label = "" # value for --embed_label flag + +# BCR (informed by rules_apple/rules_oci/rules_python analysis) +bcr_module_name = "" # module name in MODULE.bazel +bcr_presubmit_targets = [] # test targets for presubmit.yml +bcr_compatibility_level = 1 # compatibility_level for metadata.json +bcr_maintainers = [] # GitHub usernames for metadata.json +``` + +### Example Configs for Real Repos + +**dotprompt** (simplest — single `java_export` artifact): + +```toml +[workspace.java] +ecosystem = "java" +tool = "bazel" +root = "." +bazel_build_target = "//java/com/google/dotprompt:dotprompt_pkg" +bazel_test_target = "//java/..." +bazel_publish_target = "//java/com/google/dotprompt:dotprompt_pkg.publish" +bazel_publish_mode = "java_export" +bazel_version_file = "java/com/google/dotprompt/BUILD.bazel" +bazel_version_format = "maven_coordinates" +bazel_maven_coordinates = "com.google.dotprompt:dotprompt" +``` + +**protobuf** (5 Java artifacts, shared `.bzl` version constant): + +```toml +[workspace.java-core] +ecosystem = "java" +tool = "bazel" +root = "." +synchronize = true +bazel_build_target = "//java/core:core_mvn" +bazel_publish_target = "//java/core:core_mvn.publish" +bazel_publish_mode = "java_export" +bazel_version_file = "protobuf_version.bzl" +bazel_version_format = "bzl_constant" +bazel_version_variable = "PROTOBUF_JAVA_VERSION" +bazel_maven_coordinates = "com.google.protobuf:protobuf-java" + +[workspace.java-lite] +ecosystem = "java" +tool = "bazel" +root = "." +synchronize = true +bazel_build_target = "//java/core:lite_mvn" +bazel_publish_target = "//java/core:lite_mvn.publish" +bazel_publish_mode = "java_export" +bazel_version_file = "protobuf_version.bzl" +bazel_version_format = "bzl_constant" +bazel_version_variable = "PROTOBUF_JAVA_VERSION" +bazel_maven_coordinates = "com.google.protobuf:protobuf-javalite" +``` + +**dagger** (30+ artifacts, custom maven deploy, placeholder version): + +```toml +[workspace.dagger-core] +ecosystem = "java" +tool = "bazel" +root = "." +bazel_build_target = "//dagger-runtime:artifact" +bazel_publish_mode = "mvn_deploy" +bazel_build_defines = ["pom_version={version}"] +bazel_version_file = "build_defs.bzl" +bazel_version_format = "placeholder" +bazel_version_placeholder = "${project.version}" +bazel_maven_coordinates = "com.google.dagger:dagger" +``` + +**envoy** (binary release, future Phase 4): + +```toml +[workspace.envoy] +ecosystem = "cpp" +tool = "bazel" +root = "." +bazel_build_target = "//source/exe:envoy" +bazel_version_file = "VERSION.txt" +bazel_version_format = "version_txt" +bazel_artifact_type = "binary" +``` + +**tensorflow** (Python wheel from Bazel, future Phase 4): + +```toml +[workspace.tf-python] +ecosystem = "python" +tool = "bazel" +root = "." +bazel_build_target = "//tensorflow/tools/pip_package:wheel" +bazel_version_file = "tensorflow/tf_version.bzl" +bazel_version_format = "bzl_constant" +bazel_version_variable = "TF_VERSION" +bazel_artifact_type = "wheel" +``` + +**distroless** (container images with cosign signing, future Phase 4): + +```toml +[workspace.distroless] +ecosystem = "bazel" +tool = "bazel" +root = "." +oci_push_target = "//:sign_and_push" +oci_repository = "gcr.io/distroless/static" +oci_remote_tags = ["latest", "nonroot", "debug"] +oci_cosign_sign = true +oci_sbom_attest = true +oci_cosign_oidc_issuer = "https://accounts.google.com" +``` + +**bazel-remote** (Go binary + container, future Phase 4): + +```toml +[workspace.bazel-remote-binary] +ecosystem = "go" +tool = "bazel" +root = "." +bazel_artifact_type = "binary" +binary_targets = ["//:bazel-remote-linux-amd64", "//:bazel-remote-linux-arm64", "//:bazel-remote-darwin-amd64", "//:bazel-remote-darwin-arm64"] +binary_stamp_vars = {"main.gitCommit" = "{STABLE_GIT_COMMIT}", "main.gitTags" = "{GIT_TAGS}"} + +[workspace.bazel-remote-container] +ecosystem = "bazel" +tool = "bazel" +root = "." +oci_push_target = "//:push_to_dockerhub_amd64" +oci_repository = "buchgr/bazel-remote-cache" +oci_remote_tags = ["{version}"] +``` + +**rules_oci** (Bazel rules published to BCR, future Phase 4): + +```toml +[workspace.rules-oci] +ecosystem = "bazel" +tool = "bazel" +root = "." +bazel_version_file = "MODULE.bazel" +bazel_version_format = "module" +bcr_module_name = "rules_oci" +bcr_presubmit_targets = ["//oci/..."] +bcr_compatibility_level = 1 +bcr_maintainers = ["thesayyn", "alexeagle"] +``` + +**dotprompt rules_dart + rules_flutter** (Bazel rules to BCR, future Phase 4): + +```toml +# rules_dart must publish first (rules_flutter depends on it) +[workspace.rules-dart] +ecosystem = "bazel" +tool = "bazel" +root = "bazel/rules_dart" +synchronize = true +bazel_version_file = "bazel/rules_dart/MODULE.bazel" +bazel_version_format = "module" +bazel_test_target = "//bazel/rules_dart/..." +bcr_module_name = "rules_dart" +bcr_presubmit_targets = ["//:all"] +bcr_compatibility_level = 1 + +[workspace.rules-flutter] +ecosystem = "bazel" +tool = "bazel" +root = "bazel/rules_flutter" +synchronize = true +bazel_version_file = "bazel/rules_flutter/MODULE.bazel" +bazel_version_format = "module" +bazel_test_target = "//bazel/rules_flutter/..." +bcr_module_name = "rules_flutter" +bcr_presubmit_targets = ["//:all"] +bcr_compatibility_level = 1 +``` + +Ephemeral pinning handles the `rules_flutter` → `rules_dart` +`local_path_override` dependency: temporarily remove the override +and set `bazel_dep(version = "0.2.0")` during BCR publish, then +restore `local_path_override` for local dev. + +**JS/TS via rules_js** (npm packages to registry, Phase 1): + +```toml +[workspace.js-my-lib] +ecosystem = "js" +tool = "bazel" +root = "packages/my-lib" +synchronize = true +bazel_build_target = "//packages/my-lib" +bazel_test_target = "//packages/my-lib:..." +bazel_publish_target = "//packages/my-lib:my-lib" +bazel_publish_mode = "npm_package_publish" +bazel_version_file = "packages/my-lib/package.json" +bazel_version_format = "package_json" +bazel_lock_command = "pnpm install --frozen-lockfile" +``` + +The `npm_package` macro in `rules_js` with `publishable = True` +generates a `.publish` target. `bazel run //pkg:name.publish` invokes +`npm publish`. Auth via `NPM_TOKEN` env var or `.npmrc`. +Ephemeral pinning rewrites `"workspace:*"` deps to hosted versions. + +**dotprompt Dart** (2 Dart packages to pub.dev, Phase 1): + +```toml +# handlebarrz must publish first (dotprompt depends on it) +[workspace.dart-handlebarrz] +ecosystem = "dart" +tool = "bazel" +root = "dart/handlebarrz" +synchronize = true +bazel_build_target = "//dart/handlebarrz" +bazel_test_target = "//dart/handlebarrz:..." +bazel_publish_target = "//dart/handlebarrz:publish" +bazel_publish_mode = "dart_pub_publish" +bazel_version_file = "dart/handlebarrz/pubspec.yaml" +bazel_version_format = "pubspec_yaml" +bazel_lock_command = "//dart/handlebarrz:pub_get" + +[workspace.dart-dotprompt] +ecosystem = "dart" +tool = "bazel" +root = "dart/dotprompt" +synchronize = true +bazel_build_target = "//dart/dotprompt" +bazel_test_target = "//dart/dotprompt:..." +bazel_publish_target = "//dart/dotprompt:publish" +bazel_publish_mode = "dart_pub_publish" +bazel_version_file = "dart/dotprompt/pubspec.yaml" +bazel_version_format = "pubspec_yaml" +bazel_lock_command = "//dart/dotprompt:pub_get" +``` + +**Prerequisites in dotprompt repo** (before releasekit can publish): + +1. Remove `publish_to: none` from `dart/dotprompt/pubspec.yaml` +2. Add `dart_pub_publish` and `dart_pub_get` targets to both BUILD.bazel files + +**Ephemeral pinning** handles the `path: ../handlebarrz` dependency +automatically — same pattern as Genkit's Python monorepo packages: + +``` +1. Bump both pubspec.yaml versions to 0.0.2 +2. Ephemeral pin: rewrite dotprompt's handlebarrz dep + path: ../handlebarrz → handlebarrz: 0.0.2 +3. Publish handlebarrz to pub.dev +4. Publish dotprompt to pub.dev (handlebarrz 0.0.2 now exists) +5. Unpin: revert to path: ../handlebarrz for local dev +``` + +The `path:` dependency stays in the repo permanently for local +development. Releasekit only swaps it to a hosted version during +the publish window. This requires the `BazelWorkspace` (B3) to +support Dart `pubspec.yaml` path-dep → hosted-dep rewriting in +its `rewrite_dependency_version()` method. + +### New Files (Phase 1) + +``` +src/releasekit/ + backends/ + pm/ + bazel.py ← BazelBackend (PackageManager protocol) + workspace/ + bazel.py ← BazelWorkspace (Workspace protocol) + bazel_version.py ← BazelVersionRewriter (9 formats) +tests/ + backends/ + rk_config_bazel_test.py ← config validation tests + rk_bazel_version_test.py ← version rewriter tests + rk_bazel_workspace_test.py ← workspace discovery tests + rk_pm_bazel_test.py ← PM protocol conformance tests + rk_bazel_dispatch_test.py ← dispatch routing tests + rk_bazel_integration_test.py ← dry-run integration test +``` + +--- + ## Upstream & External Tasks Tasks that depend on external projects or processes outside this repo. diff --git a/py/tools/releasekit/src/releasekit/backends/_run.py b/py/tools/releasekit/src/releasekit/backends/_run.py index 63343eb5f1..945f760dc7 100644 --- a/py/tools/releasekit/src/releasekit/backends/_run.py +++ b/py/tools/releasekit/src/releasekit/backends/_run.py @@ -63,25 +63,25 @@ class CommandResult: Attributes: command: The command that was executed (as a list of strings). - returncode: Process exit code (0 = success). + return_code: Process exit code (0 = success). stdout: Captured standard output. stderr: Captured standard error. - duration_ms: Wall-clock duration in milliseconds. + duration: Wall-clock duration in milliseconds. dry_run: Whether this was a dry-run (command was not actually executed). """ command: list[str] - returncode: int + return_code: int stdout: str = '' stderr: str = '' - duration_ms: float = 0.0 + duration: float = 0.0 dry_run: bool = False env_overrides: dict[str, str] = field(default_factory=dict) @property def ok(self) -> bool: - """Whether the command succeeded (returncode == 0 or dry-run).""" - return self.returncode == 0 + """Whether the command succeeded (return_code == 0 or dry-run).""" + return self.return_code == 0 @property def command_str(self) -> str: @@ -126,10 +126,10 @@ def run_command( log.info('dry_run', cmd=cmd_str) return CommandResult( command=cmd, - returncode=0, + return_code=0, stdout='', stderr='', - duration_ms=0.0, + duration=0.0, dry_run=True, env_overrides=env or {}, ) @@ -149,17 +149,17 @@ def run_command( timeout=timeout, ) except subprocess.TimeoutExpired: - duration_ms = (time.monotonic() - start) * 1000 - log.error('command_timeout', cmd=cmd_str, timeout=timeout, duration_ms=duration_ms) + duration = (time.monotonic() - start) * 1000 + log.error('command_timeout', cmd=cmd_str, timeout=timeout, duration=duration) raise - duration_ms = (time.monotonic() - start) * 1000 + duration = (time.monotonic() - start) * 1000 cmd_result = CommandResult( command=cmd, - returncode=result.returncode, + return_code=result.returncode, stdout=result.stdout if capture else '', stderr=result.stderr if capture else '', - duration_ms=duration_ms, + duration=duration, dry_run=False, env_overrides=env or {}, ) @@ -168,9 +168,9 @@ def run_command( log.warning( 'command_failed', cmd=cmd_str, - returncode=result.returncode, + return_code=result.returncode, stderr=result.stderr[:500] if capture else '', - duration_ms=duration_ms, + duration=duration, ) if check: raise subprocess.CalledProcessError( @@ -180,7 +180,7 @@ def run_command( stderr=result.stderr, ) else: - log.debug('command_ok', cmd=cmd_str, duration_ms=duration_ms) + log.debug('command_ok', cmd=cmd_str, duration=duration) return cmd_result diff --git a/py/tools/releasekit/src/releasekit/backends/forge/bitbucket.py b/py/tools/releasekit/src/releasekit/backends/forge/bitbucket.py index f946a3aef7..0be722105c 100644 --- a/py/tools/releasekit/src/releasekit/backends/forge/bitbucket.py +++ b/py/tools/releasekit/src/releasekit/backends/forge/bitbucket.py @@ -150,11 +150,15 @@ def __init__( self._client: httpx.AsyncClient | None = None + def __repr__(self) -> str: + """Return a safe repr that never exposes credentials.""" + return f'BitbucketAPIBackend(workspace={self._workspace!r}, repo_slug={self._repo_slug!r})' + def _dry_run_result(self, method: str, url: str) -> CommandResult: """Create a synthetic CommandResult for dry-run mode.""" return CommandResult( command=[method, url], - returncode=0, + return_code=0, stdout='', stderr='', dry_run=True, @@ -197,7 +201,7 @@ async def _request( return CommandResult( command=[method, url], - returncode=0 if response.is_success else response.status_code, + return_code=0 if response.is_success else response.status_code, stdout=response.text, stderr='' if response.is_success else response.text, ) @@ -208,7 +212,7 @@ async def is_available(self) -> bool: if not result.ok: log.warning( 'bitbucket_not_available', - status=result.returncode, + status=result.return_code, hint='Check BITBUCKET_TOKEN or BITBUCKET_USERNAME + BITBUCKET_APP_PASSWORD', ) return result.ok diff --git a/py/tools/releasekit/src/releasekit/backends/forge/github.py b/py/tools/releasekit/src/releasekit/backends/forge/github.py index 7ca71495de..17c2115a33 100644 --- a/py/tools/releasekit/src/releasekit/backends/forge/github.py +++ b/py/tools/releasekit/src/releasekit/backends/forge/github.py @@ -96,10 +96,6 @@ async def create_release( cmd_parts = ['release', 'create', tag] if title: cmd_parts.extend(['--title', title]) - if body: - cmd_parts.extend(['--notes', body]) - else: - cmd_parts.append('--generate-notes') if draft: cmd_parts.append('--draft') if prerelease: @@ -109,7 +105,27 @@ async def create_release( cmd_parts.append(str(asset)) log.info('create_release', tag=tag, draft=draft) - return await asyncio.to_thread(self._gh, *cmd_parts, dry_run=dry_run) + if body: + # Use --notes-file to avoid shell argument size limits with large + # release notes (e.g. 60+ package changelogs). + notes_file = '' + try: + with tempfile.NamedTemporaryFile( + mode='w', + suffix='.md', + delete=False, + encoding='utf-8', + ) as f: + notes_file = f.name + f.write(body) + cmd_parts.extend(['--notes-file', notes_file]) + return await asyncio.to_thread(self._gh, *cmd_parts, dry_run=dry_run) + finally: + if notes_file: + os.unlink(notes_file) # noqa: PTH108 + else: + cmd_parts.append('--generate-notes') + return await asyncio.to_thread(self._gh, *cmd_parts, dry_run=dry_run) async def delete_release( self, @@ -188,19 +204,21 @@ async def create_pr( if body: # Use --body-file to avoid shell argument size limits with large # PR bodies (e.g. 60+ package changelogs + embedded manifest). - with tempfile.NamedTemporaryFile( - mode='w', - suffix='.md', - delete=False, - encoding='utf-8', - ) as f: - f.write(body) - body_file = f.name + body_file = '' try: + with tempfile.NamedTemporaryFile( + mode='w', + suffix='.md', + delete=False, + encoding='utf-8', + ) as f: + body_file = f.name + f.write(body) cmd_parts.extend(['--body-file', body_file]) return await asyncio.to_thread(self._gh, *cmd_parts, dry_run=dry_run) finally: - os.unlink(body_file) # noqa: PTH108 + if body_file: + os.unlink(body_file) # noqa: PTH108 return await asyncio.to_thread(self._gh, *cmd_parts, dry_run=dry_run) async def pr_data(self, pr_number: int) -> dict[str, Any]: @@ -303,19 +321,21 @@ async def update_pr( if body: # Use --body-file to avoid shell argument size limits with large # PR bodies (e.g. 60+ package changelogs + embedded manifest). - with tempfile.NamedTemporaryFile( - mode='w', - suffix='.md', - delete=False, - encoding='utf-8', - ) as f: - f.write(body) - body_file = f.name + body_file = '' try: + with tempfile.NamedTemporaryFile( + mode='w', + suffix='.md', + delete=False, + encoding='utf-8', + ) as f: + body_file = f.name + f.write(body) cmd_parts.extend(['--body-file', body_file]) return await asyncio.to_thread(self._gh, *cmd_parts, dry_run=dry_run) finally: - os.unlink(body_file) # noqa: PTH108 + if body_file: + os.unlink(body_file) # noqa: PTH108 return await asyncio.to_thread(self._gh, *cmd_parts, dry_run=dry_run) async def merge_pr( diff --git a/py/tools/releasekit/src/releasekit/backends/forge/github_api.py b/py/tools/releasekit/src/releasekit/backends/forge/github_api.py index 2b6caf2729..eeadec218a 100644 --- a/py/tools/releasekit/src/releasekit/backends/forge/github_api.py +++ b/py/tools/releasekit/src/releasekit/backends/forge/github_api.py @@ -119,11 +119,15 @@ def __init__( 'X-GitHub-Api-Version': _API_VERSION, } + def __repr__(self) -> str: + """Return a safe repr that never exposes the API token.""" + return f'GitHubAPIBackend(owner={self._owner!r}, repo={self._repo!r})' + def _dry_run_result(self, method: str, url: str) -> CommandResult: """Create a synthetic CommandResult for dry-run mode.""" return CommandResult( command=[method, url], - returncode=0, + return_code=0, stdout='', stderr='', dry_run=True, @@ -193,7 +197,7 @@ async def create_release( log.info('create_release', tag=tag, status=response.status_code) return CommandResult( command=['POST', url], - returncode=0 if response.is_success else response.status_code, + return_code=0 if response.is_success else response.status_code, stdout=response.text, stderr='' if response.is_success else response.text, ) @@ -221,7 +225,7 @@ async def delete_release( log.warning('release_not_found', tag=tag, status=lookup.status_code) return CommandResult( command=['GET', release_url], - returncode=lookup.status_code, + return_code=lookup.status_code, stdout='', stderr=f'Release not found for tag {tag}', ) @@ -235,7 +239,7 @@ async def delete_release( log.info('delete_release', tag=tag, status=response.status_code) return CommandResult( command=['DELETE', delete_url], - returncode=0 if response.is_success else response.status_code, + return_code=0 if response.is_success else response.status_code, stdout='', stderr='' if response.is_success else response.text, ) @@ -261,7 +265,7 @@ async def promote_release( if lookup.status_code != 200: return CommandResult( command=['GET', release_url], - returncode=lookup.status_code, + return_code=lookup.status_code, stdout='', stderr=f'Release not found for tag {tag}', ) @@ -280,7 +284,7 @@ async def promote_release( log.info('promote_release', tag=tag, status=response.status_code) return CommandResult( command=['PATCH', edit_url], - returncode=0 if response.is_success else response.status_code, + return_code=0 if response.is_success else response.status_code, stdout=response.text, stderr='' if response.is_success else response.text, ) @@ -359,7 +363,7 @@ async def create_pr( pass return CommandResult( command=['POST', url], - returncode=0 if response.is_success else response.status_code, + return_code=0 if response.is_success else response.status_code, stdout=stdout, stderr='' if response.is_success else response.text, ) @@ -494,7 +498,7 @@ async def add_labels( log.info('add_labels', pr=pr_number, labels=labels, status=response.status_code) return CommandResult( command=['POST', url], - returncode=0 if response.is_success else response.status_code, + return_code=0 if response.is_success else response.status_code, stdout=response.text, stderr='' if response.is_success else response.text, ) @@ -526,7 +530,7 @@ async def remove_labels( response = await request_with_retry(client, 'DELETE', url) last_result = CommandResult( command=['DELETE', url], - returncode=0 if response.is_success else response.status_code, + return_code=0 if response.is_success else response.status_code, stdout='', stderr='' if response.is_success else response.text, ) @@ -571,7 +575,7 @@ async def update_pr( log.info('update_pr', pr=pr_number, status=response.status_code) return CommandResult( command=['PATCH', url], - returncode=0 if response.is_success else response.status_code, + return_code=0 if response.is_success else response.status_code, stdout=response.text, stderr='' if response.is_success else response.text, ) @@ -621,7 +625,7 @@ async def merge_pr( log.info('merge_pr', pr=pr_number, method=method, status=response.status_code) return CommandResult( command=['PUT', url], - returncode=0 if response.is_success else response.status_code, + return_code=0 if response.is_success else response.status_code, stdout=response.text, stderr='' if response.is_success else response.text, ) diff --git a/py/tools/releasekit/src/releasekit/backends/forge/gitlab.py b/py/tools/releasekit/src/releasekit/backends/forge/gitlab.py index c6595cf5d1..09cf0c471f 100644 --- a/py/tools/releasekit/src/releasekit/backends/forge/gitlab.py +++ b/py/tools/releasekit/src/releasekit/backends/forge/gitlab.py @@ -151,7 +151,7 @@ async def promote_release( log.warning('gitlab_promote_noop', tag=tag, hint='GitLab has no draft releases') return CommandResult( command=['glab', 'release', 'edit', tag, '(no-op)'], - returncode=0, + return_code=0, stdout='', stderr='', dry_run=dry_run, @@ -210,19 +210,21 @@ async def create_pr( if body: # Use a temp file to avoid shell argument size limits with large # MR descriptions (e.g. 60+ package changelogs + embedded manifest). - with tempfile.NamedTemporaryFile( - mode='w', - suffix='.md', - delete=False, - encoding='utf-8', - ) as f: - f.write(body) - body_file = f.name + body_file = '' try: + with tempfile.NamedTemporaryFile( + mode='w', + suffix='.md', + delete=False, + encoding='utf-8', + ) as f: + body_file = f.name + f.write(body) cmd_parts.extend(['--description', f'@{body_file}']) return await asyncio.to_thread(self._glab, *cmd_parts, dry_run=dry_run) finally: - os.unlink(body_file) # noqa: PTH108 + if body_file: + os.unlink(body_file) # noqa: PTH108 return await asyncio.to_thread(self._glab, *cmd_parts, dry_run=dry_run) async def pr_data(self, pr_number: int) -> dict[str, Any]: diff --git a/py/tools/releasekit/src/releasekit/backends/pm/__init__.py b/py/tools/releasekit/src/releasekit/backends/pm/__init__.py index e00ffb948d..d6c809a0c8 100644 --- a/py/tools/releasekit/src/releasekit/backends/pm/__init__.py +++ b/py/tools/releasekit/src/releasekit/backends/pm/__init__.py @@ -21,6 +21,12 @@ - :class:`~releasekit.backends.pm.uv.UvBackend` — ``uv`` CLI - :class:`~releasekit.backends.pm.pnpm.PnpmBackend` — ``pnpm`` CLI +- :class:`~releasekit.backends.pm.go.GoBackend` — ``go`` CLI +- :class:`~releasekit.backends.pm.dart.DartBackend` — ``dart pub`` CLI +- :class:`~releasekit.backends.pm.maven.MavenBackend` — ``mvn`` / ``gradle`` CLI +- :class:`~releasekit.backends.pm.cargo.CargoBackend` — ``cargo`` CLI +- :class:`~releasekit.backends.pm.maturin.MaturinBackend` — ``maturin`` + ``uv`` CLI +- :class:`~releasekit.backends.pm.bazel.BazelBackend` — ``bazel`` CLI Design notes: @@ -34,10 +40,22 @@ from typing import Protocol, runtime_checkable from releasekit.backends._run import CommandResult +from releasekit.backends.pm.bazel import BazelBackend as BazelBackend +from releasekit.backends.pm.cargo import CargoBackend as CargoBackend +from releasekit.backends.pm.dart import DartBackend as DartBackend +from releasekit.backends.pm.go import GoBackend as GoBackend +from releasekit.backends.pm.maturin import MaturinBackend as MaturinBackend +from releasekit.backends.pm.maven import MavenBackend as MavenBackend from releasekit.backends.pm.pnpm import PnpmBackend as PnpmBackend from releasekit.backends.pm.uv import UvBackend as UvBackend __all__ = [ + 'BazelBackend', + 'CargoBackend', + 'DartBackend', + 'GoBackend', + 'MaturinBackend', + 'MavenBackend', 'PackageManager', 'PnpmBackend', 'UvBackend', diff --git a/py/tools/releasekit/src/releasekit/backends/pm/bazel.py b/py/tools/releasekit/src/releasekit/backends/pm/bazel.py new file mode 100644 index 0000000000..e9705b137e --- /dev/null +++ b/py/tools/releasekit/src/releasekit/backends/pm/bazel.py @@ -0,0 +1,412 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +"""Bazel package manager backend for releasekit. + +The :class:`BazelBackend` implements the +:class:`~releasekit.backends.pm.PackageManager` protocol via the +``bazel`` CLI (``bazel build``, ``bazel run``, ``bazel test``). + +Bazel is a polyglot build system. Publishing is done via ``bazel run`` +on a publish target whose name varies by ecosystem: + +- **java_export** / **kt_jvm_export** (rules_jvm_external): + ``bazel run //path:target.publish`` +- **npm_package** (rules_js): + ``bazel run //path:target.publish`` +- **py_wheel** (rules_python): + ``bazel run //path:target.publish`` +- **dart_pub_publish** (rules_dart): + ``bazel run //path:dart_pub_publish`` +- **oci_push** (rules_oci): + ``bazel run //path:push`` +- **mvn_deploy** (custom): + ``bazel build //path:target`` then ``mvn deploy`` +- **native_tool** (ecosystem-native): + ``bazel build //path:target`` then ecosystem-native publish +- **custom**: + ``bazel run //path:custom_publish_target`` + +All methods are async — blocking subprocess calls are dispatched to +``asyncio.to_thread()`` to avoid blocking the event loop. + +Configuration in ``releasekit.toml``:: + + [workspace.java] + ecosystem = 'java' + tool = 'bazel' + root = 'java' + # publish_mode = "java_export" # optional override + # publish_target = "//pkg:deploy.publish" # optional explicit target +""" + +from __future__ import annotations + +import asyncio +from pathlib import Path + +from releasekit.backends._run import CommandResult, run_command +from releasekit.logging import get_logger + +log = get_logger('releasekit.backends.pm.bazel') + +# Publish modes supported by the Bazel backend. +PUBLISH_MODES: frozenset[str] = frozenset({ + 'custom', + 'dart_pub_publish', + 'java_export', + 'kt_jvm_export', + 'mvn_deploy', + 'native_tool', + 'npm_package', + 'oci_push', + 'py_wheel', +}) + +# Publish modes that use the ``.publish`` suffix convention. +_DOT_PUBLISH_MODES: frozenset[str] = frozenset({ + 'java_export', + 'kt_jvm_export', + 'npm_package', + 'py_wheel', +}) + +# Default publish target names per mode (when no explicit target is set). +_MODE_TARGET_SUFFIX: dict[str, str] = { + 'dart_pub_publish': 'dart_pub_publish', + 'oci_push': 'push', +} + + +class BazelBackend: + """Bazel :class:`~releasekit.backends.pm.PackageManager` implementation. + + Wraps ``bazel build``, ``bazel run``, and ``bazel test`` to provide + build, publish, lock, version-bump, resolve-check, and smoke-test + operations for any Bazel-managed project. + + Args: + workspace_root: Path to the Bazel workspace root (contains + ``MODULE.bazel`` or ``WORKSPACE``). + publish_mode: How to publish artifacts. One of :data:`PUBLISH_MODES`. + Defaults to ``"java_export"``. + publish_target: Explicit Bazel target label for publishing + (e.g. ``"//pkg:deploy.publish"``). Overrides the default + target derivation from ``publish_mode``. + """ + + def __init__( + self, + workspace_root: Path, + *, + publish_mode: str = 'java_export', + publish_target: str = '', + ) -> None: + """Initialize with the Bazel workspace root.""" + self._root = workspace_root + self._publish_mode = publish_mode if publish_mode in PUBLISH_MODES else 'java_export' + self._publish_target = publish_target + + @property + def publish_mode(self) -> str: + """Return the configured publish mode.""" + return self._publish_mode + + # ------------------------------------------------------------------ + # PackageManager protocol + # ------------------------------------------------------------------ + + async def build( + self, + package_dir: Path, + *, + output_dir: Path | None = None, + no_sources: bool = True, + dry_run: bool = False, + ) -> CommandResult: + """Build a Bazel target using ``bazel build``. + + The build target is derived from the package directory relative + to the workspace root: ``/repo/java/core`` → ``//java/core:all``. + + Args: + package_dir: Path to the package directory containing a + ``BUILD`` or ``BUILD.bazel`` file. + output_dir: Unused for Bazel (output goes to ``bazel-bin``). + no_sources: Unused for Bazel. + dry_run: Log the command without executing. + """ + target = _package_label(self._root, package_dir) + cmd = ['bazel', 'build', target] + + log.info('build', target=target) + return await asyncio.to_thread( + run_command, + cmd, + cwd=self._root, + dry_run=dry_run, + ) + + async def publish( + self, + dist_dir: Path, + *, + check_url: str | None = None, + index_url: str | None = None, + dist_tag: str | None = None, + publish_branch: str | None = None, + provenance: bool = False, + dry_run: bool = False, + ) -> CommandResult: + """Publish a Bazel-built artifact via ``bazel run``. + + The publish command depends on ``publish_mode``: + + - ``java_export`` / ``kt_jvm_export``: ``bazel run //pkg:target.publish`` + - ``npm_package``: ``bazel run //pkg:target.publish`` + - ``py_wheel``: ``bazel run //pkg:target.publish`` + - ``dart_pub_publish``: ``bazel run //pkg:dart_pub_publish`` + - ``oci_push``: ``bazel run //pkg:push`` + - ``mvn_deploy``: ``bazel run //pkg:deploy`` + - ``native_tool``: ``bazel run //pkg:publish`` + - ``custom``: ``bazel run //pkg:publish`` + + Args: + dist_dir: Path to the package directory. + check_url: Unused for Bazel. + index_url: Custom registry URL. Passed as ``--define`` + for targets that support it. + dist_tag: npm dist-tag. Passed as ``--define=DIST_TAG=`` + for npm_package publish mode. + publish_branch: Unused for Bazel. + provenance: Unused for Bazel. + dry_run: Log the command without executing. + """ + if self._publish_target: + target = self._publish_target + else: + target = self._derive_publish_target(dist_dir) + + cmd = ['bazel', 'run', target] + + # Mode-specific defines. + if index_url: + cmd.extend(['--define', f'REGISTRY_URL={index_url}']) + if dist_tag and self._publish_mode == 'npm_package': + cmd.extend(['--define', f'DIST_TAG={dist_tag}']) + + log.info( + 'publish', + target=target, + mode=self._publish_mode, + dry_run=dry_run, + ) + return await asyncio.to_thread( + run_command, + cmd, + cwd=self._root, + dry_run=dry_run, + ) + + async def lock( + self, + *, + check_only: bool = False, + upgrade_package: str | None = None, + cwd: Path | None = None, + dry_run: bool = False, + ) -> CommandResult: + """Run Bazel dependency resolution. + + Uses ``bazel mod tidy`` (Bzlmod) to update the lockfile, or + ``bazel mod deps --lockfile_mode=error`` to verify it. + + For JVM projects using rules_jvm_external, runs + ``bazel run @maven//:pin`` to re-pin Maven dependencies. + + Args: + check_only: Verify the lockfile is up-to-date without + modifying it. + upgrade_package: Unused for Bazel (Bzlmod handles upgrades + via ``MODULE.bazel`` edits). + cwd: Working directory override. + dry_run: Log the command without executing. + """ + work_dir = cwd or self._root + + if self._publish_mode in ('java_export', 'kt_jvm_export', 'mvn_deploy'): + # JVM: re-pin Maven deps. + if check_only: + cmd = ['bazel', 'mod', 'deps', '--lockfile_mode=error'] + else: + cmd = ['bazel', 'run', '@maven//:pin'] + elif check_only: + cmd = ['bazel', 'mod', 'deps', '--lockfile_mode=error'] + else: + cmd = ['bazel', 'mod', 'tidy'] + + log.info('lock', check_only=check_only, cwd=str(work_dir)) + return await asyncio.to_thread( + run_command, + cmd, + cwd=work_dir, + dry_run=dry_run, + ) + + async def version_bump( + self, + package_dir: Path, + new_version: str, + *, + dry_run: bool = False, + ) -> CommandResult: + """Bump the version for a Bazel-managed package. + + Bazel packages store versions in ``MODULE.bazel`` + (``version = "x.y.z"``), ``version.bzl``, or build rule + attributes. The actual file rewrite is handled by the workspace + backend's ``rewrite_version`` method. This returns a synthetic + result indicating the version bump intent. + + Args: + package_dir: Path to the package directory. + new_version: New version string. + dry_run: Log the command without executing. + """ + log.info( + 'version_bump', + package=package_dir.name, + version=new_version, + ) + return CommandResult( + command=['bazel', 'version-bump', str(package_dir), new_version], + return_code=0, + stdout=f'Version {new_version} will be set by workspace backend.', + stderr='', + duration=0.0, + dry_run=dry_run, + ) + + async def resolve_check( + self, + package_name: str, + version: str, + *, + index_url: str | None = None, + dry_run: bool = False, + ) -> CommandResult: + """Verify a published artifact is resolvable. + + - Java/Kotlin: ``bazel run @maven//:pin`` with artifact coords. + - Other: ``bazel fetch //...`` + + Args: + package_name: Artifact coordinates (e.g. ``com.example:core``). + version: Expected version. + index_url: Custom registry URL. + dry_run: Log the command without executing. + """ + if self._publish_mode in ('java_export', 'kt_jvm_export', 'mvn_deploy'): + cmd = [ + 'bazel', + 'run', + '@maven//:pin', + '--', + f'--artifact={package_name}:{version}', + ] + else: + cmd = ['bazel', 'fetch', '//...'] + + log.info('resolve_check', package=package_name, version=version) + return await asyncio.to_thread( + run_command, + cmd, + cwd=self._root, + dry_run=dry_run, + ) + + async def smoke_test( + self, + package_name: str, + version: str, + *, + dry_run: bool = False, + ) -> CommandResult: + """Smoke-test a Bazel-built package via ``bazel test``. + + Args: + package_name: Package name or Bazel target path. + version: Version (logged but not used in the command). + dry_run: Log the command without executing. + """ + if ':' in package_name or '//' in package_name: + target = package_name + else: + target = f'//{package_name}:all' + + cmd = ['bazel', 'test', target] + + log.info('smoke_test', package=package_name, version=version) + return await asyncio.to_thread( + run_command, + cmd, + cwd=self._root, + dry_run=dry_run, + ) + + # ------------------------------------------------------------------ + # Private helpers + # ------------------------------------------------------------------ + + def _derive_publish_target(self, package_dir: Path) -> str: + """Derive the publish target label based on publish mode. + + Convention by mode: + + - ``.publish`` modes: ``//pkg:pkg.publish`` + - ``dart_pub_publish``: ``//pkg:dart_pub_publish`` + - ``oci_push``: ``//pkg:push`` + - ``mvn_deploy`` / ``native_tool`` / ``custom``: ``//pkg:publish`` + """ + rel = _relative_label(self._root, package_dir) + pkg_name = package_dir.name + + if self._publish_mode in _DOT_PUBLISH_MODES: + return f'{rel}:{pkg_name}.publish' + + suffix = _MODE_TARGET_SUFFIX.get(self._publish_mode, 'publish') + return f'{rel}:{suffix}' + + +def _package_label(workspace_root: Path, package_dir: Path) -> str: + """Derive ``//pkg:all`` from an absolute package directory.""" + rel = _relative_label(workspace_root, package_dir) + return f'{rel}:all' + + +def _relative_label(workspace_root: Path, package_dir: Path) -> str: + """Derive ``//pkg`` from an absolute package directory.""" + try: + rel = package_dir.resolve().relative_to(workspace_root.resolve()) + return f'//{rel}' + except ValueError: + return f'//{package_dir.name}' + + +__all__ = [ + 'BazelBackend', + 'PUBLISH_MODES', +] diff --git a/py/tools/releasekit/src/releasekit/backends/pm/cargo.py b/py/tools/releasekit/src/releasekit/backends/pm/cargo.py new file mode 100644 index 0000000000..52f6a02d40 --- /dev/null +++ b/py/tools/releasekit/src/releasekit/backends/pm/cargo.py @@ -0,0 +1,262 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +"""Rust/Cargo package manager backend for releasekit. + +The :class:`CargoBackend` implements the +:class:`~releasekit.backends.pm.PackageManager` protocol via the +``cargo`` CLI (``cargo build``, ``cargo publish``, ``cargo test``, etc.). + +Rust crates are published to `crates.io `_ using +``cargo publish``. Authentication is handled via a token stored in +``~/.cargo/credentials.toml`` or the ``CARGO_REGISTRY_TOKEN`` +environment variable. + +All methods are async — blocking subprocess calls are dispatched to +``asyncio.to_thread()`` to avoid blocking the event loop. +""" + +from __future__ import annotations + +import asyncio +import os +from pathlib import Path + +from releasekit.backends._run import CommandResult, run_command +from releasekit.logging import get_logger + +log = get_logger('releasekit.backends.pm.cargo') + + +class CargoBackend: + """Rust :class:`~releasekit.backends.pm.PackageManager` implementation. + + Args: + workspace_root: Path to the Cargo workspace root (contains + ``Cargo.toml`` with ``[workspace]``). + """ + + def __init__(self, workspace_root: Path) -> None: + """Initialize with the workspace root path.""" + self._root = workspace_root + + async def build( + self, + package_dir: Path, + *, + output_dir: Path | None = None, + no_sources: bool = True, + dry_run: bool = False, + ) -> CommandResult: + """Build a Rust crate using ``cargo build --release``. + + Args: + package_dir: Path to the crate directory containing ``Cargo.toml``. + output_dir: Target directory for build artifacts. + no_sources: Unused for Cargo (kept for protocol compatibility). + dry_run: Log the command without executing. + """ + cmd = ['cargo', 'build', '--release'] + + # If package_dir differs from workspace root, use -p flag. + crate_name = _read_crate_name(package_dir) + if crate_name and os.path.realpath(package_dir) != os.path.realpath(self._root): # noqa: ASYNC240 - metadata-only path resolution + cmd.extend(['-p', crate_name]) + + if output_dir: + cmd.extend(['--target-dir', str(output_dir)]) + + log.info('build', package=package_dir.name) + return await asyncio.to_thread( + run_command, + cmd, + cwd=self._root, + dry_run=dry_run, + ) + + async def publish( + self, + dist_dir: Path, + *, + check_url: str | None = None, + index_url: str | None = None, + dist_tag: str | None = None, + publish_branch: str | None = None, + provenance: bool = False, + dry_run: bool = False, + ) -> CommandResult: + """Publish a crate to crates.io using ``cargo publish``. + + Args: + dist_dir: Path to the crate directory (not a dist folder — + Cargo publishes from source). + check_url: Unused for Cargo. + index_url: Alternative registry URL (``--index``). + dist_tag: Unused for Cargo (no dist-tag concept). + publish_branch: Unused for Cargo. + provenance: Unused for Cargo. + dry_run: Run ``cargo publish --dry-run`` instead of actually + publishing. + """ + cmd = ['cargo', 'publish', '--no-verify'] + + crate_name = _read_crate_name(dist_dir) + if crate_name: + cmd.extend(['-p', crate_name]) + + if index_url: + cmd.extend(['--index', index_url]) + + if dry_run: + cmd.append('--dry-run') + + log.info('publish', package=dist_dir.name, dry_run=dry_run) + return await asyncio.to_thread( + run_command, + cmd, + cwd=self._root, + dry_run=False, + ) + + async def lock( + self, + *, + check_only: bool = False, + upgrade_package: str | None = None, + cwd: Path | None = None, + dry_run: bool = False, + ) -> CommandResult: + """Run ``cargo update`` or ``cargo generate-lockfile``. + + Args: + check_only: If True, verify the lockfile is up-to-date + using ``cargo check``. + upgrade_package: Specific package to upgrade. + cwd: Working directory override. + dry_run: Log the command without executing. + """ + work_dir = cwd or self._root + if check_only: + cmd = ['cargo', 'check', '--locked'] + elif upgrade_package: + cmd = ['cargo', 'update', '-p', upgrade_package] + else: + cmd = ['cargo', 'update'] + + log.info('lock', check_only=check_only, cwd=str(work_dir)) + return await asyncio.to_thread( + run_command, + cmd, + cwd=work_dir, + dry_run=dry_run, + ) + + async def version_bump( + self, + package_dir: Path, + new_version: str, + *, + dry_run: bool = False, + ) -> CommandResult: + """Bump the version in ``Cargo.toml``. + + Uses ``cargo set-version`` from ``cargo-edit`` if available, + otherwise returns a synthetic result indicating manual edit + is needed (the workspace backend's ``rewrite_version`` handles + the actual file rewrite). + """ + crate_name = _read_crate_name(package_dir) or package_dir.name + cmd = ['cargo', 'set-version', '--package', crate_name, new_version] + + log.info('version_bump', package=crate_name, version=new_version) + return await asyncio.to_thread( + run_command, + cmd, + cwd=self._root, + dry_run=dry_run, + ) + + async def resolve_check( + self, + package_name: str, + version: str, + *, + index_url: str | None = None, + dry_run: bool = False, + ) -> CommandResult: + """Verify a crate version is fetchable via ``cargo search``.""" + cmd = ['cargo', 'search', package_name, '--limit', '1'] + + if index_url: + cmd.extend(['--index', index_url]) + + log.info('resolve_check', crate=package_name, version=version) + return await asyncio.to_thread( + run_command, + cmd, + cwd=self._root, + dry_run=dry_run, + ) + + async def smoke_test( + self, + package_name: str, + version: str, + *, + dry_run: bool = False, + ) -> CommandResult: + """Smoke-test a crate by running ``cargo test`` in the workspace.""" + cmd = ['cargo', 'test', '-p', package_name, '--release'] + + log.info('smoke_test', crate=package_name, version=version) + return await asyncio.to_thread( + run_command, + cmd, + cwd=self._root, + dry_run=dry_run, + ) + + +def _read_crate_name(crate_dir: Path) -> str | None: + """Read the crate name from ``Cargo.toml`` (best-effort). + + Does a simple text parse — no TOML library required for this + lightweight extraction. + """ + cargo_toml = crate_dir / 'Cargo.toml' + if not cargo_toml.is_file(): + return None + try: + in_package = False + for line in cargo_toml.read_text(encoding='utf-8').splitlines(): + stripped = line.strip() + if stripped == '[package]': + in_package = True + continue + if stripped.startswith('[') and in_package: + break + if in_package and stripped.startswith('name'): + # name = "foo" + _, _, value = stripped.partition('=') + return value.strip().strip('"').strip("'") + except OSError: + pass + return None + + +__all__ = [ + 'CargoBackend', +] diff --git a/py/tools/releasekit/src/releasekit/backends/pm/dart.py b/py/tools/releasekit/src/releasekit/backends/pm/dart.py new file mode 100644 index 0000000000..4f9a253283 --- /dev/null +++ b/py/tools/releasekit/src/releasekit/backends/pm/dart.py @@ -0,0 +1,207 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +"""Dart/Flutter package manager backend for releasekit. + +The :class:`DartBackend` implements the +:class:`~releasekit.backends.pm.PackageManager` protocol via the +``dart`` CLI (``dart pub publish``, ``dart pub get``, etc.). + +All methods are async — blocking subprocess calls are dispatched to +``asyncio.to_thread()`` to avoid blocking the event loop. +""" + +from __future__ import annotations + +import asyncio +from pathlib import Path + +from releasekit.backends._run import CommandResult, run_command +from releasekit.logging import get_logger + +log = get_logger('releasekit.backends.pm.dart') + + +class DartBackend: + """Dart :class:`~releasekit.backends.pm.PackageManager` implementation. + + Uses ``dart pub`` for package operations. Also supports Flutter + packages via the same ``dart pub`` interface. + + Args: + workspace_root: Path to the Dart workspace root. + """ + + def __init__(self, workspace_root: Path) -> None: + """Initialize with the workspace root path.""" + self._root = workspace_root + + async def build( + self, + package_dir: Path, + *, + output_dir: Path | None = None, + no_sources: bool = True, + dry_run: bool = False, + ) -> CommandResult: + """Dart packages don't have a separate build step for publishing. + + Runs ``dart pub get`` to ensure dependencies are resolved, which + is the closest equivalent to a pre-publish build step. + """ + cmd = ['dart', 'pub', 'get'] + + log.info('build', package=package_dir.name) + return await asyncio.to_thread( + run_command, + cmd, + cwd=package_dir, + dry_run=dry_run, + ) + + async def publish( + self, + dist_dir: Path, + *, + check_url: str | None = None, + index_url: str | None = None, + dist_tag: str | None = None, + publish_branch: str | None = None, + provenance: bool = False, + dry_run: bool = False, + ) -> CommandResult: + """Publish a Dart package using ``dart pub publish``. + + Args: + dist_dir: Path to the package directory (contains ``pubspec.yaml``). + check_url: Ignored (pub.dev has no check URL). + index_url: Custom pub server URL (``--server``). + dist_tag: Ignored (pub.dev has no dist-tag concept). + publish_branch: Ignored. + provenance: Ignored. + dry_run: If True, uses ``--dry-run`` flag. + """ + cmd = ['dart', 'pub', 'publish', '--force'] + if dry_run: + cmd.append('--dry-run') + if index_url: + cmd.extend(['--server', index_url]) + + log.info('publish', package=dist_dir.name, dry_run=dry_run) + return await asyncio.to_thread( + run_command, + cmd, + cwd=dist_dir, + dry_run=dry_run, + ) + + async def lock( + self, + *, + check_only: bool = False, + upgrade_package: str | None = None, + cwd: Path | None = None, + dry_run: bool = False, + ) -> CommandResult: + """Run ``dart pub get`` or ``dart pub upgrade`` to resolve deps.""" + work_dir = cwd or self._root + if upgrade_package: + cmd = ['dart', 'pub', 'upgrade', upgrade_package] + elif check_only: + # dart pub get with --dry-run to check if deps are resolved. + cmd = ['dart', 'pub', 'get', '--dry-run'] + else: + cmd = ['dart', 'pub', 'get'] + + log.info('lock', check_only=check_only, cwd=str(work_dir)) + return await asyncio.to_thread( + run_command, + cmd, + cwd=work_dir, + dry_run=dry_run, + ) + + async def version_bump( + self, + package_dir: Path, + new_version: str, + *, + dry_run: bool = False, + ) -> CommandResult: + """Bump version in ``pubspec.yaml``. + + Dart doesn't have a built-in version bump command, so this + rewrites the ``version:`` field in ``pubspec.yaml`` directly. + The actual rewrite is handled by the workspace backend; this + method just validates the version is set. + """ + log.info( + 'version_bump', + package=package_dir.name, + version=new_version, + ) + # The workspace backend handles pubspec.yaml rewriting. + # This is a validation-only step. + return CommandResult( + command=['dart', 'version-bump', new_version], + return_code=0, + stdout=f'Version {new_version} will be set in pubspec.yaml.', + stderr='', + duration=0.0, + dry_run=dry_run, + ) + + async def resolve_check( + self, + package_name: str, + version: str, + *, + index_url: str | None = None, + dry_run: bool = False, + ) -> CommandResult: + """Verify a Dart package is available via ``dart pub cache add``.""" + cmd = ['dart', 'pub', 'cache', 'add', package_name, '--version', version] + + log.info('resolve_check', package=package_name, version=version) + return await asyncio.to_thread( + run_command, + cmd, + cwd=self._root, + dry_run=dry_run, + ) + + async def smoke_test( + self, + package_name: str, + version: str, + *, + dry_run: bool = False, + ) -> CommandResult: + """Smoke-test a Dart package by adding it as a dependency.""" + cmd = ['dart', 'pub', 'cache', 'add', package_name, '--version', version] + + log.info('smoke_test', package=package_name, version=version) + return await asyncio.to_thread( + run_command, + cmd, + cwd=self._root, + dry_run=dry_run, + ) + + +__all__ = [ + 'DartBackend', +] diff --git a/py/tools/releasekit/src/releasekit/backends/pm/go.py b/py/tools/releasekit/src/releasekit/backends/pm/go.py new file mode 100644 index 0000000000..0d3f65a0ab --- /dev/null +++ b/py/tools/releasekit/src/releasekit/backends/pm/go.py @@ -0,0 +1,201 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +"""Go package manager backend for releasekit. + +The :class:`GoBackend` implements the +:class:`~releasekit.backends.pm.PackageManager` protocol via the +``go`` CLI (``go build``, ``go test``, ``go mod tidy``, etc.). + +Go modules are published by tagging commits in VCS — there is no +separate ``publish`` step. The ``publish`` method is a no-op that +returns a success result with a note that Go uses VCS tags. + +All methods are async — blocking subprocess calls are dispatched to +``asyncio.to_thread()`` to avoid blocking the event loop. +""" + +from __future__ import annotations + +import asyncio +from pathlib import Path + +from releasekit.backends._run import CommandResult, run_command +from releasekit.logging import get_logger + +log = get_logger('releasekit.backends.pm.go') + + +class GoBackend: + """Go :class:`~releasekit.backends.pm.PackageManager` implementation. + + Args: + workspace_root: Path to the Go workspace root (contains ``go.work``). + """ + + def __init__(self, workspace_root: Path) -> None: + """Initialize with the workspace root path.""" + self._root = workspace_root + + async def build( + self, + package_dir: Path, + *, + output_dir: Path | None = None, + no_sources: bool = True, + dry_run: bool = False, + ) -> CommandResult: + """Build a Go module using ``go build``.""" + cmd = ['go', 'build', './...'] + env_extra: dict[str, str] = {} + if output_dir: + env_extra['GOBIN'] = str(output_dir) + + log.info('build', package=package_dir.name) + return await asyncio.to_thread( + run_command, + cmd, + cwd=package_dir, + dry_run=dry_run, + ) + + async def publish( + self, + dist_dir: Path, + *, + check_url: str | None = None, + index_url: str | None = None, + dist_tag: str | None = None, + publish_branch: str | None = None, + provenance: bool = False, + dry_run: bool = False, + ) -> CommandResult: + """No-op: Go modules are published via VCS tags. + + Returns a synthetic success result. The actual publishing happens + when the VCS backend pushes tags — the Go module proxy + (``proxy.golang.org``) fetches modules from VCS automatically. + """ + log.info( + 'publish_noop', + reason='Go modules are published via VCS tags, not a registry upload.', + ) + return CommandResult( + command=['go', 'publish', '(noop)'], + return_code=0, + stdout='Go modules are published via VCS tags.', + stderr='', + duration=0.0, + dry_run=dry_run, + ) + + async def lock( + self, + *, + check_only: bool = False, + upgrade_package: str | None = None, + cwd: Path | None = None, + dry_run: bool = False, + ) -> CommandResult: + """Run ``go mod tidy`` to synchronize go.sum.""" + work_dir = cwd or self._root + if check_only: + # go mod tidy doesn't have a check-only mode, but we can + # verify by running tidy and checking for changes. + cmd = ['go', 'mod', 'tidy'] + elif upgrade_package: + cmd = ['go', 'get', '-u', upgrade_package] + else: + cmd = ['go', 'mod', 'tidy'] + + log.info('lock', check_only=check_only, cwd=str(work_dir)) + return await asyncio.to_thread( + run_command, + cmd, + cwd=work_dir, + dry_run=dry_run, + ) + + async def version_bump( + self, + package_dir: Path, + new_version: str, + *, + dry_run: bool = False, + ) -> CommandResult: + """No-op: Go module versions are determined by VCS tags. + + Go modules don't have a version field in ``go.mod`` — the version + is derived from the git tag. Returns a synthetic success result. + """ + log.info( + 'version_bump_noop', + package=package_dir.name, + version=new_version, + reason='Go module versions are set by VCS tags.', + ) + return CommandResult( + command=['go', 'version-bump', '(noop)'], + return_code=0, + stdout=f'Go version {new_version} will be set by VCS tag.', + stderr='', + duration=0.0, + dry_run=dry_run, + ) + + async def resolve_check( + self, + package_name: str, + version: str, + *, + index_url: str | None = None, + dry_run: bool = False, + ) -> CommandResult: + """Verify a Go module version is fetchable via ``go list``.""" + module_version = f'{package_name}@v{version}' + cmd = ['go', 'list', '-m', module_version] + + log.info('resolve_check', module=package_name, version=version) + return await asyncio.to_thread( + run_command, + cmd, + cwd=self._root, + dry_run=dry_run, + ) + + async def smoke_test( + self, + package_name: str, + version: str, + *, + dry_run: bool = False, + ) -> CommandResult: + """Smoke-test a Go module by fetching it with ``go get``.""" + module_version = f'{package_name}@v{version}' + cmd = ['go', 'get', module_version] + + log.info('smoke_test', module=package_name, version=version) + return await asyncio.to_thread( + run_command, + cmd, + cwd=self._root, + dry_run=dry_run, + ) + + +__all__ = [ + 'GoBackend', +] diff --git a/py/tools/releasekit/src/releasekit/backends/pm/maturin.py b/py/tools/releasekit/src/releasekit/backends/pm/maturin.py new file mode 100644 index 0000000000..1a51871e79 --- /dev/null +++ b/py/tools/releasekit/src/releasekit/backends/pm/maturin.py @@ -0,0 +1,281 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +"""Maturin package manager backend for releasekit. + +The :class:`MaturinBackend` implements the +:class:`~releasekit.backends.pm.PackageManager` protocol for Python +packages that use `maturin `_ as their build +backend (i.e. Rust+Python hybrid packages via PyO3). + +Maturin handles: + +- **Building**: ``maturin build --release`` produces platform-specific + wheels containing compiled Rust extensions. +- **Publishing**: ``uv publish`` uploads the built wheels to PyPI + (maturin itself delegates to twine/uv for upload). +- **Locking**: ``uv lock`` manages the Python-side lockfile. + +Typical project layout:: + + my-package/ + ├── pyproject.toml ← build-backend = "maturin" + ├── Cargo.toml ← Rust crate metadata + ├── src/ + │ ├── lib.rs ← Rust source (PyO3 bindings) + │ └── my_package/ + │ ├── __init__.py + │ └── _native.pyi + └── tests/ + └── test_basic.py + +All methods are async — blocking subprocess calls are dispatched to +``asyncio.to_thread()`` to avoid blocking the event loop. +""" + +from __future__ import annotations + +import asyncio +from pathlib import Path + +from releasekit.backends._run import CommandResult, run_command +from releasekit.logging import get_logger + +log = get_logger('releasekit.backends.pm.maturin') + + +class MaturinBackend: + """Maturin :class:`~releasekit.backends.pm.PackageManager` implementation. + + Uses ``maturin build`` for building platform wheels and ``uv publish`` + for uploading to PyPI. Version bumps and lockfile management are + handled via ``uv`` since the Python-side metadata lives in + ``pyproject.toml``. + + Args: + workspace_root: Path to the workspace root (contains the root + ``pyproject.toml`` with ``[tool.uv.workspace]``). + """ + + def __init__(self, workspace_root: Path) -> None: + """Initialize with the workspace root path.""" + self._root = workspace_root + + async def build( + self, + package_dir: Path, + *, + output_dir: Path | None = None, + no_sources: bool = True, + dry_run: bool = False, + ) -> CommandResult: + """Build a maturin package using ``maturin build --release``. + + Produces platform-specific wheels containing compiled Rust + extensions linked to the Python module. + + Args: + package_dir: Path to the package directory containing both + ``pyproject.toml`` (with ``build-backend = "maturin"``) + and ``Cargo.toml``. + output_dir: Directory to place built wheels. Defaults to + ``target/wheels/`` within the package directory. + no_sources: Unused for maturin (kept for protocol compat). + dry_run: Log the command without executing. + """ + cmd = ['maturin', 'build', '--release'] + + if output_dir: + cmd.extend(['--out', str(output_dir)]) + + # Build an sdist alongside the wheel for source distribution. + cmd.append('--sdist') + + log.info('build', package=package_dir.name) + return await asyncio.to_thread( + run_command, + cmd, + cwd=package_dir, + dry_run=dry_run, + ) + + async def publish( + self, + dist_dir: Path, + *, + check_url: str | None = None, + index_url: str | None = None, + dist_tag: str | None = None, + publish_branch: str | None = None, + provenance: bool = False, + dry_run: bool = False, + ) -> CommandResult: + """Publish built wheels using ``uv publish``. + + Maturin builds produce wheels in the output directory. We use + ``uv publish`` to upload them to PyPI (same as the uv backend). + + Args: + dist_dir: Directory containing built ``.whl`` and ``.tar.gz`` + files from ``maturin build``. + check_url: Index URL to check for existing files (skips + duplicates). + index_url: Upload endpoint URL. Mapped to + ``uv publish --publish-url``. + dist_tag: Ignored (npm-only, accepted for protocol compat). + publish_branch: Ignored (npm-only, accepted for protocol compat). + provenance: Ignored (npm-only, accepted for protocol compat). + dry_run: Perform a dry run without uploading. + """ + cmd = ['uv', 'publish'] + if check_url: + cmd.extend(['--check-url', check_url]) + if index_url: + cmd.extend(['--publish-url', index_url]) + cmd.append(str(dist_dir)) + + log.info('publish', dist_dir=str(dist_dir)) + return await asyncio.to_thread( + run_command, + cmd, + cwd=self._root, + dry_run=dry_run, + ) + + async def lock( + self, + *, + check_only: bool = False, + upgrade_package: str | None = None, + cwd: Path | None = None, + dry_run: bool = False, + ) -> CommandResult: + """Update or verify the lock file using ``uv lock``. + + Maturin packages still use ``uv lock`` for Python-side + dependency management. + + Args: + check_only: Only verify the lock file is up-to-date. + upgrade_package: Upgrade a specific package in the lock file. + cwd: Working directory override. + dry_run: Log the command without executing. + """ + cmd = ['uv', 'lock'] + if check_only: + cmd.append('--check') + if upgrade_package: + cmd.extend(['--upgrade-package', upgrade_package]) + + effective_cwd = cwd or self._root + log.info('lock', check_only=check_only, upgrade_package=upgrade_package) + return await asyncio.to_thread( + run_command, + cmd, + cwd=effective_cwd, + dry_run=dry_run, + ) + + async def version_bump( + self, + package_dir: Path, + new_version: str, + *, + dry_run: bool = False, + ) -> CommandResult: + """Bump the version in both ``pyproject.toml`` and ``Cargo.toml``. + + Uses ``uv version`` for the Python-side version. The Rust-side + ``Cargo.toml`` version is handled by the workspace backend's + ``rewrite_version`` method (or ``cargo set-version`` if + ``cargo-edit`` is installed). + + Args: + package_dir: Path to the package directory. + new_version: New version string (PEP 440). + dry_run: Log the command without executing. + """ + cmd = ['uv', 'version', '--frozen', new_version] + + log.info( + 'version_bump', + package=package_dir.name, + version=new_version, + ) + return await asyncio.to_thread( + run_command, + cmd, + cwd=package_dir, + dry_run=dry_run, + ) + + async def resolve_check( + self, + package_name: str, + version: str, + *, + index_url: str | None = None, + dry_run: bool = False, + ) -> CommandResult: + """Verify a published package resolves using ``uv pip install --dry-run``.""" + cmd = ['uv', 'pip', 'install', '--dry-run', f'{package_name}=={version}'] + if index_url: + cmd.extend(['--default-index', index_url]) + + log.info('resolve_check', package=package_name, version=version) + return await asyncio.to_thread( + run_command, + cmd, + cwd=self._root, + dry_run=dry_run, + ) + + async def smoke_test( + self, + package_name: str, + version: str, + *, + dry_run: bool = False, + ) -> CommandResult: + """Smoke-test a published maturin package via ``uv run --with``. + + Verifies the native extension loads correctly by importing the + package and printing its version. + """ + import_name = package_name.replace('-', '_') + cmd = [ + 'uv', + 'run', + '--no-project', + '--with', + f'{package_name}=={version}', + 'python', + '-c', + f'import {import_name}; print({import_name}.__version__)', + ] + + log.info('smoke_test', package=package_name, version=version) + return await asyncio.to_thread( + run_command, + cmd, + cwd=self._root, + dry_run=dry_run, + ) + + +__all__ = [ + 'MaturinBackend', +] diff --git a/py/tools/releasekit/src/releasekit/backends/pm/maven.py b/py/tools/releasekit/src/releasekit/backends/pm/maven.py new file mode 100644 index 0000000000..5e67dfdcc6 --- /dev/null +++ b/py/tools/releasekit/src/releasekit/backends/pm/maven.py @@ -0,0 +1,269 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +"""Maven/Gradle package manager backend for releasekit. + +The :class:`MavenBackend` implements the +:class:`~releasekit.backends.pm.PackageManager` protocol via the +``mvn`` CLI (``mvn package``, ``mvn deploy``, etc.). + +Also supports Gradle-based projects by detecting ``build.gradle`` +and using ``./gradlew`` instead of ``mvn``. + +All methods are async — blocking subprocess calls are dispatched to +``asyncio.to_thread()`` to avoid blocking the event loop. +""" + +from __future__ import annotations + +import asyncio +from pathlib import Path + +from releasekit.backends._run import CommandResult, run_command +from releasekit.logging import get_logger + +log = get_logger('releasekit.backends.pm.maven') + + +class MavenBackend: + """Java :class:`~releasekit.backends.pm.PackageManager` implementation. + + Supports both Maven (``pom.xml``) and Gradle (``build.gradle``) + projects. Detects the build tool based on the presence of + ``pom.xml`` or ``build.gradle`` in the package directory. + + Args: + workspace_root: Path to the Java workspace root. + """ + + def __init__(self, workspace_root: Path) -> None: + """Initialize with the workspace root path.""" + self._root = workspace_root + + @staticmethod + def _is_gradle(package_dir: Path) -> bool: + """Check if the package uses Gradle.""" + return (package_dir / 'build.gradle').is_file() or (package_dir / 'build.gradle.kts').is_file() + + @staticmethod + def _gradle_cmd(package_dir: Path) -> str: + """Return the Gradle wrapper command if available, else 'gradle'.""" + wrapper = package_dir / 'gradlew' + if wrapper.is_file(): + return str(wrapper) + return 'gradle' + + async def build( + self, + package_dir: Path, + *, + output_dir: Path | None = None, + no_sources: bool = True, + dry_run: bool = False, + ) -> CommandResult: + """Build a Java package using Maven or Gradle.""" + if self._is_gradle(package_dir): + gradle = self._gradle_cmd(package_dir) + cmd = [gradle, 'build', '-x', 'test'] + else: + cmd = ['mvn', 'package', '-DskipTests'] + + log.info('build', package=package_dir.name) + return await asyncio.to_thread( + run_command, + cmd, + cwd=package_dir, + dry_run=dry_run, + ) + + async def publish( + self, + dist_dir: Path, + *, + check_url: str | None = None, + index_url: str | None = None, + dist_tag: str | None = None, + publish_branch: str | None = None, + provenance: bool = False, + dry_run: bool = False, + ) -> CommandResult: + """Publish a Java package using Maven deploy or Gradle publish. + + Args: + dist_dir: Path to the package directory. + check_url: Ignored. + index_url: Custom Maven repository URL. For Gradle projects + this is passed as ``-PmavenUrl=`` (the ``build.gradle`` + must read the property to configure the repository). + dist_tag: Ignored (Maven has no dist-tag concept). + publish_branch: Ignored. + provenance: Ignored. + dry_run: If True, logs the command without executing. + """ + if self._is_gradle(dist_dir): + gradle = self._gradle_cmd(dist_dir) + cmd = [gradle, 'publish'] + if index_url: + cmd.append(f'-PmavenUrl={index_url}') + else: + cmd = ['mvn', 'deploy', '-DskipTests'] + if index_url: + cmd.append(f'-DaltDeploymentRepository=releasekit::default::{index_url}') + + log.info('publish', package=dist_dir.name, dry_run=dry_run) + return await asyncio.to_thread( + run_command, + cmd, + cwd=dist_dir, + dry_run=dry_run, + ) + + async def lock( + self, + *, + check_only: bool = False, + upgrade_package: str | None = None, + cwd: Path | None = None, + dry_run: bool = False, + ) -> CommandResult: + """Resolve dependencies using Maven or Gradle. + + Args: + check_only: For Gradle, run ``dependencies`` without + ``--refresh-dependencies`` (a read-only check). + For Maven, run ``dependency:resolve`` either way. + upgrade_package: Refresh a specific dependency. For Gradle + this is ignored (Gradle refreshes all or nothing). + For Maven, this is ignored (Maven resolves all). + cwd: Working directory. + dry_run: Log the command without executing. + """ + work_dir = cwd or self._root + if self._is_gradle(work_dir): + gradle = self._gradle_cmd(work_dir) + if check_only: + cmd = [gradle, 'dependencies'] + else: + cmd = [gradle, 'dependencies', '--refresh-dependencies'] + else: + cmd = ['mvn', 'dependency:resolve'] + + log.info('lock', check_only=check_only, cwd=str(work_dir)) + return await asyncio.to_thread( + run_command, + cmd, + cwd=work_dir, + dry_run=dry_run, + ) + + async def version_bump( + self, + package_dir: Path, + new_version: str, + *, + dry_run: bool = False, + ) -> CommandResult: + """Bump version in ``pom.xml`` using Maven versions plugin. + + For Gradle projects, version bumping is handled by the workspace + backend rewriting ``build.gradle`` directly. + """ + if self._is_gradle(package_dir): + log.info( + 'version_bump_gradle', + package=package_dir.name, + version=new_version, + reason='Gradle version set via workspace backend.', + ) + return CommandResult( + command=['gradle', 'version-bump', new_version], + return_code=0, + stdout=f'Version {new_version} will be set in build.gradle.', + stderr='', + duration=0.0, + dry_run=dry_run, + ) + + cmd = [ + 'mvn', + 'versions:set', + f'-DnewVersion={new_version}', + '-DgenerateBackupPoms=false', + ] + + log.info('version_bump', package=package_dir.name, version=new_version) + return await asyncio.to_thread( + run_command, + cmd, + cwd=package_dir, + dry_run=dry_run, + ) + + async def resolve_check( + self, + package_name: str, + version: str, + *, + index_url: str | None = None, + dry_run: bool = False, + ) -> CommandResult: + """Verify a Maven/Gradle artifact is resolvable. + + For Maven, uses ``mvn dependency:get``. + For Gradle, uses ``gradle dependencyInsight`` to query the + dependency from the configured repositories. + """ + if self._is_gradle(self._root): + gradle = self._gradle_cmd(self._root) + cmd = [ + gradle, + 'dependencyInsight', + f'--dependency={package_name}', + ] + else: + cmd = [ + 'mvn', + 'dependency:get', + f'-Dartifact={package_name}:{version}', + '-Dtransitive=false', + ] + + log.info('resolve_check', package=package_name, version=version) + return await asyncio.to_thread( + run_command, + cmd, + cwd=self._root, + dry_run=dry_run, + ) + + async def smoke_test( + self, + package_name: str, + version: str, + *, + dry_run: bool = False, + ) -> CommandResult: + """Smoke-test a Maven/Gradle artifact by resolving it.""" + return await self.resolve_check( + package_name, + version, + dry_run=dry_run, + ) + + +__all__ = [ + 'MavenBackend', +] diff --git a/py/tools/releasekit/src/releasekit/backends/registry/__init__.py b/py/tools/releasekit/src/releasekit/backends/registry/__init__.py index 791d553704..ef84eb74c4 100644 --- a/py/tools/releasekit/src/releasekit/backends/registry/__init__.py +++ b/py/tools/releasekit/src/releasekit/backends/registry/__init__.py @@ -22,6 +22,10 @@ - :class:`~releasekit.backends.registry.pypi.PyPIBackend` — PyPI JSON API - :class:`~releasekit.backends.registry.npm.NpmRegistry` — npm registry API +- :class:`~releasekit.backends.registry.goproxy.GoProxyCheck` — Go module proxy +- :class:`~releasekit.backends.registry.pubdev.PubDevRegistry` — pub.dev API +- :class:`~releasekit.backends.registry.maven_central.MavenCentralRegistry` — Maven Central Search API +- :class:`~releasekit.backends.registry.crates_io.CratesIoRegistry` — crates.io API Operations are async because they involve network I/O with potential latency and rate limiting. @@ -32,12 +36,20 @@ from typing import Protocol, runtime_checkable from releasekit.backends.registry._types import ChecksumResult as ChecksumResult +from releasekit.backends.registry.crates_io import CratesIoRegistry as CratesIoRegistry +from releasekit.backends.registry.goproxy import GoProxyCheck as GoProxyCheck +from releasekit.backends.registry.maven_central import MavenCentralRegistry as MavenCentralRegistry from releasekit.backends.registry.npm import NpmRegistry as NpmRegistry +from releasekit.backends.registry.pubdev import PubDevRegistry as PubDevRegistry from releasekit.backends.registry.pypi import PyPIBackend as PyPIBackend __all__ = [ 'ChecksumResult', + 'CratesIoRegistry', + 'GoProxyCheck', + 'MavenCentralRegistry', 'NpmRegistry', + 'PubDevRegistry', 'PyPIBackend', 'Registry', ] diff --git a/py/tools/releasekit/src/releasekit/backends/registry/crates_io.py b/py/tools/releasekit/src/releasekit/backends/registry/crates_io.py new file mode 100644 index 0000000000..66832db74e --- /dev/null +++ b/py/tools/releasekit/src/releasekit/backends/registry/crates_io.py @@ -0,0 +1,202 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +"""crates.io registry backend for releasekit. + +The :class:`CratesIoRegistry` implements the +:class:`~releasekit.backends.registry.Registry` protocol using the +`crates.io API `_. + +API endpoints used:: + + GET /api/v1/crates/{name} → crate metadata + versions + GET /api/v1/crates/{name}/{version} → specific version info + +All methods are async because they involve network I/O with potential +latency and rate limiting. +""" + +from __future__ import annotations + +import asyncio +import time + +from releasekit.backends.registry._types import ChecksumResult +from releasekit.logging import get_logger +from releasekit.net import DEFAULT_POOL_SIZE, DEFAULT_TIMEOUT, http_client, request_with_retry + +log = get_logger('releasekit.backends.registry.crates_io') + + +class CratesIoRegistry: + """crates.io :class:`~releasekit.backends.registry.Registry` implementation. + + Queries the crates.io API to check crate publication status, + poll for availability, and retrieve version metadata. + + Args: + base_url: Base URL for the crates.io API. Defaults to + ``crates.io``. Use :data:`TEST_BASE_URL` for a local + Alexandrie or similar test registry. + pool_size: HTTP connection pool size. + timeout: HTTP request timeout in seconds. + """ + + #: Base URL for the production crates.io registry. + DEFAULT_BASE_URL: str = 'https://crates.io' + #: Base URL for a local Alexandrie test registry (common default). + TEST_BASE_URL: str = 'http://localhost:3000' + + def __init__( + self, + *, + base_url: str = 'https://crates.io', + pool_size: int = DEFAULT_POOL_SIZE, + timeout: float = DEFAULT_TIMEOUT, + ) -> None: + """Initialize with crates.io base URL, pool size, and timeout.""" + self._base_url = base_url.rstrip('/') + self._pool_size = pool_size + self._timeout = timeout + + async def check_published(self, package_name: str, version: str) -> bool: + """Check if a specific crate version is published on crates.io. + + Args: + package_name: Crate name (e.g. ``serde``). + version: Version string (e.g. ``1.0.200``). + """ + url = f'{self._base_url}/api/v1/crates/{package_name}/{version}' + async with http_client(pool_size=self._pool_size, timeout=self._timeout) as client: + response = await request_with_retry(client, 'GET', url) + available = response.status_code == 200 + if available: + log.info( + 'crate_version_available', + crate=package_name, + version=version, + ) + else: + log.debug( + 'crate_version_not_found', + crate=package_name, + version=version, + status=response.status_code, + ) + return available + + async def poll_available( + self, + package_name: str, + version: str, + *, + timeout: float = 300.0, + interval: float = 10.0, + ) -> bool: + """Poll crates.io until a version appears or timeout is reached. + + After ``cargo publish``, there may be a short delay before the + version is visible on the API. + """ + interval = max(1.0, min(interval, 60.0)) + timeout = max(10.0, min(timeout, 3600.0)) + + deadline = time.monotonic() + timeout + attempt = 0 + + while time.monotonic() < deadline: + attempt += 1 + available = await self.check_published(package_name, version) + if available: + log.info( + 'version_available', + crate=package_name, + version=version, + attempts=attempt, + ) + return True + + remaining = deadline - time.monotonic() + wait = min(interval, remaining) + if wait > 0: + log.debug( + 'poll_waiting', + crate=package_name, + version=version, + attempt=attempt, + wait=wait, + ) + await asyncio.sleep(wait) + + log.warning( + 'poll_timeout', + crate=package_name, + version=version, + timeout=timeout, + attempts=attempt, + ) + return False + + async def project_exists(self, package_name: str) -> bool: + """Check if a crate exists on crates.io (any version).""" + url = f'{self._base_url}/api/v1/crates/{package_name}' + async with http_client(pool_size=self._pool_size, timeout=self._timeout) as client: + response = await request_with_retry(client, 'GET', url) + return response.status_code == 200 + + async def latest_version(self, package_name: str) -> str | None: + """Query crates.io for the latest non-yanked version of a crate.""" + url = f'{self._base_url}/api/v1/crates/{package_name}' + async with http_client(pool_size=self._pool_size, timeout=self._timeout) as client: + response = await request_with_retry(client, 'GET', url) + if response.status_code != 200: + return None + try: + data = response.json() + # The crate object has a max_stable_version field. + version = data.get('crate', {}).get('max_stable_version') + if version: + return str(version) + # Fallback: newest_version includes pre-releases. + version = data.get('crate', {}).get('newest_version') + return str(version) if version else None + except (ValueError, KeyError): + log.warning('crates_io_parse_error', crate=package_name) + return None + + async def verify_checksum( + self, + package_name: str, + version: str, + local_checksums: dict[str, str], + ) -> ChecksumResult: + """Checksum verification for crates.io. + + crates.io provides a ``cksum`` (SHA-256) for each published + version in the version metadata. Per-file checksums are not + available — returns missing for all local files. + """ + log.info( + 'checksum_noop', + crate=package_name, + reason='crates.io provides per-crate checksums, not per-file.', + ) + return ChecksumResult(missing=list(local_checksums.keys())) + + +__all__ = [ + 'CratesIoRegistry', +] diff --git a/py/tools/releasekit/src/releasekit/backends/registry/goproxy.py b/py/tools/releasekit/src/releasekit/backends/registry/goproxy.py new file mode 100644 index 0000000000..f7e8c38918 --- /dev/null +++ b/py/tools/releasekit/src/releasekit/backends/registry/goproxy.py @@ -0,0 +1,208 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +"""Go module proxy registry backend for releasekit. + +The :class:`GoProxyCheck` implements the +:class:`~releasekit.backends.registry.Registry` protocol as a read-only +check against the Go module proxy (``proxy.golang.org``). + +Go modules are not uploaded to a registry — they are fetched from VCS +by the proxy on first request. This backend verifies that a tagged +version is available on the proxy by querying its HTTP API:: + + GET https://proxy.golang.org/{module}/@v/{version}.info + +A 200 response means the version is cached and available. +""" + +from __future__ import annotations + +import asyncio +import time + +from releasekit.backends.registry._types import ChecksumResult +from releasekit.logging import get_logger +from releasekit.net import DEFAULT_POOL_SIZE, DEFAULT_TIMEOUT, http_client, request_with_retry + +log = get_logger('releasekit.backends.registry.goproxy') + + +class GoProxyCheck: + """Read-only :class:`~releasekit.backends.registry.Registry` for Go modules. + + Queries the Go module proxy to verify that a tagged version is + available for download. + + Args: + base_url: Base URL for the Go module proxy. Defaults to + ``proxy.golang.org``. Use :data:`TEST_BASE_URL` for the + Go sum test database or a local Athens proxy. + pool_size: HTTP connection pool size. + timeout: HTTP request timeout in seconds. + """ + + #: Base URL for the production Go module proxy. + DEFAULT_BASE_URL: str = 'https://proxy.golang.org' + #: Base URL for a local Athens proxy (common test setup). + TEST_BASE_URL: str = 'http://localhost:3000' + + def __init__( + self, + *, + base_url: str = 'https://proxy.golang.org', + pool_size: int = DEFAULT_POOL_SIZE, + timeout: float = DEFAULT_TIMEOUT, + ) -> None: + """Initialize with proxy base URL, pool size, and timeout.""" + self._base_url = base_url.rstrip('/') + self._pool_size = pool_size + self._timeout = timeout + + async def check_published(self, package_name: str, version: str) -> bool: + """Check if a Go module version is available on the proxy. + + Args: + package_name: Full Go module path (e.g. + ``github.com/firebase/genkit/go/genkit``). + version: Version string without ``v`` prefix (e.g. ``0.5.0``). + """ + url = f'{self._base_url}/{package_name}/@v/v{version}.info' + async with http_client(pool_size=self._pool_size, timeout=self._timeout) as client: + response = await request_with_retry(client, 'GET', url) + available = response.status_code == 200 + if available: + log.info( + 'module_available', + module=package_name, + version=version, + ) + else: + log.debug( + 'module_not_found', + module=package_name, + version=version, + status=response.status_code, + ) + return available + + async def poll_available( + self, + package_name: str, + version: str, + *, + timeout: float = 300.0, + interval: float = 10.0, + ) -> bool: + """Poll the Go proxy until a version appears or timeout is reached. + + The Go proxy may take a few minutes to cache a newly tagged + version from VCS. This method polls until it appears. + """ + interval = max(1.0, min(interval, 60.0)) + timeout = max(10.0, min(timeout, 3600.0)) + + deadline = time.monotonic() + timeout + attempt = 0 + + while time.monotonic() < deadline: + attempt += 1 + available = await self.check_published(package_name, version) + if available: + log.info( + 'version_available', + module=package_name, + version=version, + attempts=attempt, + ) + return True + + remaining = deadline - time.monotonic() + wait = min(interval, remaining) + if wait > 0: + log.debug( + 'poll_waiting', + module=package_name, + version=version, + attempt=attempt, + wait=wait, + ) + await asyncio.sleep(wait) + + log.warning( + 'poll_timeout', + module=package_name, + version=version, + timeout=timeout, + attempts=attempt, + ) + return False + + async def project_exists(self, package_name: str) -> bool: + """Check if a Go module exists on the proxy (any version). + + Queries the ``/@v/list`` endpoint which returns known versions. + """ + url = f'{self._base_url}/{package_name}/@v/list' + async with http_client(pool_size=self._pool_size, timeout=self._timeout) as client: + response = await request_with_retry(client, 'GET', url) + if response.status_code != 200: + return False + # The list endpoint returns one version per line. + return bool(response.text.strip()) + + async def latest_version(self, package_name: str) -> str | None: + """Query the Go proxy for the latest version of a module. + + Uses the ``/@latest`` endpoint. + """ + url = f'{self._base_url}/{package_name}/@latest' + async with http_client(pool_size=self._pool_size, timeout=self._timeout) as client: + response = await request_with_retry(client, 'GET', url) + if response.status_code != 200: + return None + try: + data = response.json() + version = data.get('Version', '') + # Strip leading 'v' prefix. + return version.lstrip('v') if version else None + except (ValueError, KeyError): + log.warning('goproxy_parse_error', module=package_name) + return None + + async def verify_checksum( + self, + package_name: str, + version: str, + local_checksums: dict[str, str], + ) -> ChecksumResult: + """Checksum verification is not applicable for Go modules. + + Go modules use ``go.sum`` for integrity verification, which is + handled by the ``go`` toolchain itself. Returns a result with + all files marked as missing (not checked). + """ + log.info( + 'checksum_noop', + module=package_name, + reason='Go modules use go.sum for integrity, not registry checksums.', + ) + return ChecksumResult(missing=list(local_checksums.keys())) + + +__all__ = [ + 'GoProxyCheck', +] diff --git a/py/tools/releasekit/src/releasekit/backends/registry/maven_central.py b/py/tools/releasekit/src/releasekit/backends/registry/maven_central.py new file mode 100644 index 0000000000..7f2146397a --- /dev/null +++ b/py/tools/releasekit/src/releasekit/backends/registry/maven_central.py @@ -0,0 +1,217 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +"""Maven Central registry backend for releasekit. + +The :class:`MavenCentralRegistry` implements the +:class:`~releasekit.backends.registry.Registry` protocol using the +Maven Central Search API (``https://search.maven.org``). + +Maven artifact coordinates use the format ``groupId:artifactId``. +The ``package_name`` parameter should be in this format. + +All methods are async because they involve network I/O. +""" + +from __future__ import annotations + +import asyncio +import time + +from releasekit.backends.registry._types import ChecksumResult +from releasekit.logging import get_logger +from releasekit.net import DEFAULT_POOL_SIZE, DEFAULT_TIMEOUT, http_client, request_with_retry + +log = get_logger('releasekit.backends.registry.maven_central') + + +class MavenCentralRegistry: + """Maven Central :class:`~releasekit.backends.registry.Registry` implementation. + + Args: + base_url: Base URL for the Maven Central Search API. Defaults + to ``search.maven.org``. Use :data:`TEST_BASE_URL` for a + local Nexus or Reposilite staging instance. + pool_size: HTTP connection pool size. + timeout: HTTP request timeout in seconds. + """ + + #: Base URL for the production Maven Central Search API. + DEFAULT_BASE_URL: str = 'https://search.maven.org' + #: Base URL for a local Nexus/Reposilite test registry (common default). + TEST_BASE_URL: str = 'http://localhost:8081' + + def __init__( + self, + *, + base_url: str = 'https://search.maven.org', + pool_size: int = DEFAULT_POOL_SIZE, + timeout: float = DEFAULT_TIMEOUT, + ) -> None: + """Initialize with Maven Central base URL, pool size, and timeout.""" + self._base_url = base_url.rstrip('/') + self._pool_size = pool_size + self._timeout = timeout + + @staticmethod + def _parse_coordinates(package_name: str) -> tuple[str, str]: + """Parse ``groupId:artifactId`` into a tuple. + + Args: + package_name: Maven coordinates (e.g. ``com.google.genkit:genkit-core``). + + Returns: + Tuple of (groupId, artifactId). + """ + parts = package_name.split(':', 1) + if len(parts) == 2: + return parts[0], parts[1] + # If no colon, treat the whole string as artifactId. + return '', parts[0] + + async def check_published(self, package_name: str, version: str) -> bool: + """Check if a specific version exists on Maven Central. + + Args: + package_name: Maven coordinates ``groupId:artifactId``. + version: Version string to check. + """ + group_id, artifact_id = self._parse_coordinates(package_name) + query = f'g:"{group_id}" AND a:"{artifact_id}" AND v:"{version}"' + url = f'{self._base_url}/solrsearch/select?q={query}&rows=1&wt=json' + + async with http_client(pool_size=self._pool_size, timeout=self._timeout) as client: + response = await request_with_retry(client, 'GET', url) + if response.status_code != 200: + return False + try: + data = response.json() + num_found = data.get('response', {}).get('numFound', 0) + return num_found > 0 + except (ValueError, KeyError): + return False + + async def poll_available( + self, + package_name: str, + version: str, + *, + timeout: float = 600.0, + interval: float = 30.0, + ) -> bool: + """Poll Maven Central until the version appears or timeout is reached. + + Maven Central indexing can take 10-30 minutes after deployment, + so the default timeout and interval are longer than PyPI/npm. + """ + interval = max(5.0, min(interval, 120.0)) + timeout = max(30.0, min(timeout, 7200.0)) + + deadline = time.monotonic() + timeout + attempt = 0 + + while time.monotonic() < deadline: + attempt += 1 + available = await self.check_published(package_name, version) + if available: + log.info( + 'version_available', + artifact=package_name, + version=version, + attempts=attempt, + ) + return True + + remaining = deadline - time.monotonic() + wait = min(interval, remaining) + if wait > 0: + log.debug( + 'poll_waiting', + artifact=package_name, + version=version, + attempt=attempt, + wait=wait, + ) + await asyncio.sleep(wait) + + log.warning( + 'poll_timeout', + artifact=package_name, + version=version, + timeout=timeout, + attempts=attempt, + ) + return False + + async def project_exists(self, package_name: str) -> bool: + """Check if the artifact exists on Maven Central (any version).""" + group_id, artifact_id = self._parse_coordinates(package_name) + query = f'g:"{group_id}" AND a:"{artifact_id}"' + url = f'{self._base_url}/solrsearch/select?q={query}&rows=1&wt=json' + + async with http_client(pool_size=self._pool_size, timeout=self._timeout) as client: + response = await request_with_retry(client, 'GET', url) + if response.status_code != 200: + return False + try: + data = response.json() + return data.get('response', {}).get('numFound', 0) > 0 + except (ValueError, KeyError): + return False + + async def latest_version(self, package_name: str) -> str | None: + """Query Maven Central for the latest version of an artifact.""" + group_id, artifact_id = self._parse_coordinates(package_name) + query = f'g:"{group_id}" AND a:"{artifact_id}"' + url = f'{self._base_url}/solrsearch/select?q={query}&rows=1&wt=json&core=gav' + + async with http_client(pool_size=self._pool_size, timeout=self._timeout) as client: + response = await request_with_retry(client, 'GET', url) + if response.status_code != 200: + return None + try: + data = response.json() + docs = data.get('response', {}).get('docs', []) + if docs: + return docs[0].get('v') + return None + except (ValueError, KeyError): + log.warning('maven_central_parse_error', artifact=package_name) + return None + + async def verify_checksum( + self, + package_name: str, + version: str, + local_checksums: dict[str, str], + ) -> ChecksumResult: + """Verify checksums against Maven Central. + + Maven Central provides SHA-1 checksums for artifacts. For now, + this returns all files as missing since the search API doesn't + expose per-file SHA-256 checksums directly. + """ + log.info( + 'checksum_noop', + artifact=package_name, + reason='Maven Central checksum verification requires direct repo access.', + ) + return ChecksumResult(missing=list(local_checksums.keys())) + + +__all__ = [ + 'MavenCentralRegistry', +] diff --git a/py/tools/releasekit/src/releasekit/backends/registry/npm.py b/py/tools/releasekit/src/releasekit/backends/registry/npm.py index 3ab9b80761..9811657374 100644 --- a/py/tools/releasekit/src/releasekit/backends/registry/npm.py +++ b/py/tools/releasekit/src/releasekit/backends/registry/npm.py @@ -68,12 +68,18 @@ class NpmRegistry: protocol using the npm registry JSON API. Args: - base_url: Base URL for the registry API. Defaults to - ``https://registry.npmjs.org``. + base_url: Base URL for the npm registry API. Defaults to + public npm. Use :data:`TEST_BASE_URL` for a local + Verdaccio or similar test registry. pool_size: HTTP connection pool size. timeout: HTTP request timeout in seconds. """ + #: Base URL for the production npm registry. + DEFAULT_BASE_URL: str = 'https://registry.npmjs.org' + #: Base URL for a local Verdaccio test registry (common default). + TEST_BASE_URL: str = 'http://localhost:4873' + def __init__( self, *, diff --git a/py/tools/releasekit/src/releasekit/backends/registry/pubdev.py b/py/tools/releasekit/src/releasekit/backends/registry/pubdev.py new file mode 100644 index 0000000000..456c1bf3ee --- /dev/null +++ b/py/tools/releasekit/src/releasekit/backends/registry/pubdev.py @@ -0,0 +1,165 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +"""pub.dev registry backend for releasekit. + +The :class:`PubDevRegistry` implements the +:class:`~releasekit.backends.registry.Registry` protocol using the +pub.dev API (``https://pub.dev/api/packages/``). + +All methods are async because they involve network I/O. +""" + +from __future__ import annotations + +import asyncio +import time + +from releasekit.backends.registry._types import ChecksumResult +from releasekit.logging import get_logger +from releasekit.net import DEFAULT_POOL_SIZE, DEFAULT_TIMEOUT, http_client, request_with_retry + +log = get_logger('releasekit.backends.registry.pubdev') + + +class PubDevRegistry: + """pub.dev :class:`~releasekit.backends.registry.Registry` implementation. + + Args: + base_url: Base URL for the pub.dev API. Defaults to public + pub.dev. Use :data:`TEST_BASE_URL` for a local + ``dart_pub_server`` or similar test registry. + pool_size: HTTP connection pool size. + timeout: HTTP request timeout in seconds. + """ + + #: Base URL for the production pub.dev registry. + DEFAULT_BASE_URL: str = 'https://pub.dev' + #: Base URL for a local dart_pub_server test registry (common default). + TEST_BASE_URL: str = 'http://localhost:8080' + + def __init__( + self, + *, + base_url: str = 'https://pub.dev', + pool_size: int = DEFAULT_POOL_SIZE, + timeout: float = DEFAULT_TIMEOUT, + ) -> None: + """Initialize with pub.dev base URL, pool size, and timeout.""" + self._base_url = base_url.rstrip('/') + self._pool_size = pool_size + self._timeout = timeout + + async def check_published(self, package_name: str, version: str) -> bool: + """Check if a specific version exists on pub.dev.""" + url = f'{self._base_url}/api/packages/{package_name}/versions/{version}' + async with http_client(pool_size=self._pool_size, timeout=self._timeout) as client: + response = await request_with_retry(client, 'GET', url) + return response.status_code == 200 + + async def poll_available( + self, + package_name: str, + version: str, + *, + timeout: float = 300.0, + interval: float = 5.0, + ) -> bool: + """Poll pub.dev until the version appears or timeout is reached.""" + interval = max(1.0, min(interval, 60.0)) + timeout = max(10.0, min(timeout, 3600.0)) + + deadline = time.monotonic() + timeout + attempt = 0 + + while time.monotonic() < deadline: + attempt += 1 + available = await self.check_published(package_name, version) + if available: + log.info( + 'version_available', + package=package_name, + version=version, + attempts=attempt, + ) + return True + + remaining = deadline - time.monotonic() + wait = min(interval, remaining) + if wait > 0: + log.debug( + 'poll_waiting', + package=package_name, + version=version, + attempt=attempt, + wait=wait, + ) + await asyncio.sleep(wait) + + log.warning( + 'poll_timeout', + package=package_name, + version=version, + timeout=timeout, + attempts=attempt, + ) + return False + + async def project_exists(self, package_name: str) -> bool: + """Check if the package exists on pub.dev (any version).""" + url = f'{self._base_url}/api/packages/{package_name}' + async with http_client(pool_size=self._pool_size, timeout=self._timeout) as client: + response = await request_with_retry(client, 'GET', url) + return response.status_code == 200 + + async def latest_version(self, package_name: str) -> str | None: + """Query pub.dev for the latest version of a package.""" + url = f'{self._base_url}/api/packages/{package_name}' + async with http_client(pool_size=self._pool_size, timeout=self._timeout) as client: + response = await request_with_retry(client, 'GET', url) + if response.status_code != 200: + return None + try: + data = response.json() + return data.get('latest', {}).get('version') + except (ValueError, KeyError): + log.warning('pubdev_parse_error', package=package_name) + return None + + async def verify_checksum( + self, + package_name: str, + version: str, + local_checksums: dict[str, str], + ) -> ChecksumResult: + """Checksum verification against pub.dev. + + pub.dev provides SHA-256 checksums in the archive URL response. + For now, this returns all files as missing (not checked) since + pub.dev's API doesn't expose per-file checksums in the same way + as PyPI. + """ + log.info( + 'checksum_noop', + package=package_name, + reason='pub.dev does not expose per-file checksums via API.', + ) + return ChecksumResult(missing=list(local_checksums.keys())) + + +__all__ = [ + 'PubDevRegistry', +] diff --git a/py/tools/releasekit/src/releasekit/backends/registry/pypi.py b/py/tools/releasekit/src/releasekit/backends/registry/pypi.py index b3c2e644fd..48d4bef44e 100644 --- a/py/tools/releasekit/src/releasekit/backends/registry/pypi.py +++ b/py/tools/releasekit/src/releasekit/backends/registry/pypi.py @@ -40,10 +40,16 @@ class PyPIBackend: Args: base_url: Base URL for the PyPI JSON API. Defaults to public PyPI. + Use :data:`TEST_BASE_URL` for Test PyPI. pool_size: HTTP connection pool size. timeout: HTTP request timeout in seconds. """ + #: Base URL for the production PyPI registry. + DEFAULT_BASE_URL: str = 'https://pypi.org' + #: Base URL for Test PyPI (staging registry). + TEST_BASE_URL: str = 'https://test.pypi.org' + def __init__( self, *, diff --git a/py/tools/releasekit/src/releasekit/backends/vcs/__init__.py b/py/tools/releasekit/src/releasekit/backends/vcs/__init__.py index 9ae02009e3..d2a7688eb7 100644 --- a/py/tools/releasekit/src/releasekit/backends/vcs/__init__.py +++ b/py/tools/releasekit/src/releasekit/backends/vcs/__init__.py @@ -195,6 +195,21 @@ async def list_tags(self, *, pattern: str = '') -> list[str]: """ ... + async def tag_commit_sha(self, tag_name: str) -> str: + """Return the commit SHA that a tag points to. + + For annotated tags, this dereferences to the underlying commit. + For lightweight tags, this returns the tagged commit directly. + + Args: + tag_name: Tag name to resolve. + + Returns: + The full commit SHA, or an empty string if the tag does + not exist. + """ + ... + async def current_branch(self) -> str: """Return the name of the currently checked-out branch. diff --git a/py/tools/releasekit/src/releasekit/backends/vcs/git.py b/py/tools/releasekit/src/releasekit/backends/vcs/git.py index 83bb174a14..e3eb2437e4 100644 --- a/py/tools/releasekit/src/releasekit/backends/vcs/git.py +++ b/py/tools/releasekit/src/releasekit/backends/vcs/git.py @@ -167,10 +167,9 @@ async def commit( ) -> CommandResult: """Create a commit, staging specified paths first.""" if paths: - if not dry_run: - await asyncio.to_thread(self._git, 'add', *paths) - elif not dry_run: - await asyncio.to_thread(self._git, 'add', '-A') + await asyncio.to_thread(self._git, 'add', *paths, dry_run=dry_run) + else: + await asyncio.to_thread(self._git, 'add', '-A', dry_run=dry_run) log.info('commit', message=message[:80]) return await asyncio.to_thread(self._git, 'commit', '-m', message, dry_run=dry_run) @@ -245,6 +244,11 @@ async def push( log.info('push', remote=remote, tags=tags, set_upstream=set_upstream) return await asyncio.to_thread(self._git, *cmd_parts, dry_run=dry_run) + async def tag_commit_sha(self, tag_name: str) -> str: + """Return the commit SHA that a tag points to.""" + result = await asyncio.to_thread(self._git, 'rev-list', '-1', tag_name) + return result.stdout.strip() if result.ok else '' + async def list_tags(self, *, pattern: str = '') -> list[str]: """Return all tags, optionally filtered by a glob pattern.""" cmd_parts = ['tag', '--list', '--sort=version:refname'] diff --git a/py/tools/releasekit/src/releasekit/backends/vcs/mercurial.py b/py/tools/releasekit/src/releasekit/backends/vcs/mercurial.py index de2076ba74..b5f1be2c74 100644 --- a/py/tools/releasekit/src/releasekit/backends/vcs/mercurial.py +++ b/py/tools/releasekit/src/releasekit/backends/vcs/mercurial.py @@ -281,6 +281,18 @@ async def push( log.info('push', remote=hg_remote, tags=tags, set_upstream=set_upstream) return await asyncio.to_thread(self._hg, *cmd_parts, dry_run=dry_run) + async def tag_commit_sha(self, tag_name: str) -> str: + """Return the commit SHA that a tag points to.""" + result = await asyncio.to_thread( + self._hg, + 'log', + '-r', + f'tag({tag_name!r})', + '--template', + '{node}', + ) + return result.stdout.strip() if result.ok else '' + async def list_tags(self, *, pattern: str = '') -> list[str]: """Return all tags, optionally filtered by a glob pattern. diff --git a/py/tools/releasekit/src/releasekit/backends/workspace/__init__.py b/py/tools/releasekit/src/releasekit/backends/workspace/__init__.py index b96a23fb6b..4499e247b3 100644 --- a/py/tools/releasekit/src/releasekit/backends/workspace/__init__.py +++ b/py/tools/releasekit/src/releasekit/backends/workspace/__init__.py @@ -22,6 +22,11 @@ - :class:`~releasekit.backends.workspace.uv.UvWorkspace` — ``pyproject.toml`` + ``[tool.uv]`` - :class:`~releasekit.backends.workspace.pnpm.PnpmWorkspace` — ``pnpm-workspace.yaml`` + ``package.json`` +- :class:`~releasekit.backends.workspace.go.GoWorkspace` — ``go.work`` + ``go.mod`` +- :class:`~releasekit.backends.workspace.dart.DartWorkspace` — ``pubspec.yaml`` + ``melos.yaml`` +- :class:`~releasekit.backends.workspace.maven.MavenWorkspace` — ``pom.xml`` / ``settings.gradle`` +- :class:`~releasekit.backends.workspace.cargo.CargoWorkspace` — ``Cargo.toml`` + ``[workspace]`` +- :class:`~releasekit.backends.workspace.bazel.BazelWorkspace` — ``MODULE.bazel`` / ``WORKSPACE`` + ``BUILD`` """ from __future__ import annotations @@ -30,10 +35,20 @@ from typing import Protocol, runtime_checkable from releasekit.backends.workspace._types import Package as Package +from releasekit.backends.workspace.bazel import BazelWorkspace as BazelWorkspace +from releasekit.backends.workspace.cargo import CargoWorkspace as CargoWorkspace +from releasekit.backends.workspace.dart import DartWorkspace as DartWorkspace +from releasekit.backends.workspace.go import GoWorkspace as GoWorkspace +from releasekit.backends.workspace.maven import MavenWorkspace as MavenWorkspace from releasekit.backends.workspace.pnpm import PnpmWorkspace as PnpmWorkspace from releasekit.backends.workspace.uv import UvWorkspace as UvWorkspace __all__ = [ + 'BazelWorkspace', + 'CargoWorkspace', + 'DartWorkspace', + 'GoWorkspace', + 'MavenWorkspace', 'Package', 'PnpmWorkspace', 'UvWorkspace', diff --git a/py/tools/releasekit/src/releasekit/backends/workspace/_io.py b/py/tools/releasekit/src/releasekit/backends/workspace/_io.py new file mode 100644 index 0000000000..f16f24f198 --- /dev/null +++ b/py/tools/releasekit/src/releasekit/backends/workspace/_io.py @@ -0,0 +1,57 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +"""Shared async file I/O helpers for workspace backends. + +All workspace protocol methods are ``async def`` to avoid blocking +the event loop. These helpers wrap ``aiofiles`` with consistent +error handling so that each backend doesn't need to duplicate the +boilerplate. +""" + +from __future__ import annotations + +from pathlib import Path + +import aiofiles + +from releasekit.errors import E, ReleaseKitError + + +async def read_file(path: Path) -> str: + """Read a UTF-8 text file asynchronously via aiofiles.""" + try: + async with aiofiles.open(path, encoding='utf-8') as f: + return await f.read() + except OSError as exc: + raise ReleaseKitError( + code=E.WORKSPACE_PARSE_ERROR, + message=f'Failed to read {path}: {exc}', + hint=f'Check that {path} exists and is readable.', + ) from exc + + +async def write_file(path: Path, content: str) -> None: + """Write a UTF-8 text file asynchronously via aiofiles.""" + try: + async with aiofiles.open(path, mode='w', encoding='utf-8') as f: + await f.write(content) + except OSError as exc: + raise ReleaseKitError( + code=E.WORKSPACE_PARSE_ERROR, + message=f'Failed to write {path}: {exc}', + hint=f'Check file permissions for {path}.', + ) from exc diff --git a/py/tools/releasekit/src/releasekit/backends/workspace/bazel.py b/py/tools/releasekit/src/releasekit/backends/workspace/bazel.py new file mode 100644 index 0000000000..4991db83fa --- /dev/null +++ b/py/tools/releasekit/src/releasekit/backends/workspace/bazel.py @@ -0,0 +1,400 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +"""Bazel workspace backend for releasekit. + +The :class:`BazelWorkspace` implements the +:class:`~releasekit.backends.workspace.Workspace` protocol by parsing +``MODULE.bazel`` and ``BUILD`` / ``BUILD.bazel`` files. + +Bazel workspace layout (Bzlmod):: + + repo/ + ├── MODULE.bazel ← root module (version, deps) + ├── MODULE.bazel.lock ← lockfile + ├── core/ + │ ├── BUILD.bazel ← package: java_library, java_export, etc. + │ └── src/... + ├── plugins/ + │ ├── google/ + │ │ ├── BUILD.bazel + │ │ └── src/... + │ └── vertex/ + │ ├── BUILD.bazel + │ └── src/... + └── samples/ + └── ... + +Legacy WORKSPACE layout:: + + repo/ + ├── WORKSPACE ← root (external deps) + ├── BUILD.bazel ← root package + ├── core/ + │ └── BUILD.bazel + └── ... + +Version handling: + + Bazel stores versions in: + - ``MODULE.bazel``: ``module(name = "...", version = "x.y.z")`` + - ``version.bzl``: ``VERSION = "x.y.z"`` + - Build rules: ``version = "x.y.z"`` attribute + + The ``rewrite_version`` method handles ``MODULE.bazel`` and + ``version.bzl`` formats. For build rule attributes, the workspace + backend rewrites the version in the BUILD file directly. +""" + +from __future__ import annotations + +import fnmatch +import re +from pathlib import Path + +from releasekit.backends.workspace._io import read_file, write_file +from releasekit.backends.workspace._types import Package +from releasekit.logging import get_logger + +log = get_logger('releasekit.backends.workspace.bazel') + +# Regex to parse module() call in MODULE.bazel. +_MODULE_NAME_RE = re.compile( + r'module\s*\(\s*name\s*=\s*"([^"]+)"', + re.MULTILINE, +) +_MODULE_VERSION_RE = re.compile( + r'(module\s*\(\s*(?:[^)]*?,\s*)?version\s*=\s*")([^"]+)(")', + re.MULTILINE | re.DOTALL, +) + +# Regex to parse version.bzl: VERSION = "x.y.z" +_VERSION_BZL_RE = re.compile( + r'^(VERSION\s*=\s*["\'])([^"\']+)(["\'])', + re.MULTILINE, +) + +# Regex to parse bazel_dep() calls in MODULE.bazel. +_BAZEL_DEP_RE = re.compile( + r'bazel_dep\s*\(\s*name\s*=\s*"([^"]+)"\s*,\s*version\s*=\s*"([^"]+)"', + re.MULTILINE, +) + +# Regex to find BUILD/BUILD.bazel files with publishable targets. +# Matches: java_export, kt_jvm_export, npm_package, py_wheel, +# dart_pub_publish, oci_push, publish_binary. +_PUBLISH_RULE_RE = re.compile( + r'(?:java_export|kt_jvm_export|npm_package|py_wheel' + r'|dart_pub_publish|oci_push|publish_binary)\s*\(', + re.MULTILINE, +) + +# Regex to extract name = "..." from a BUILD rule. +_BUILD_NAME_RE = re.compile(r'name\s*=\s*"([^"]+)"') + +# Regex to extract version = "..." from a BUILD rule. +_BUILD_VERSION_RE = re.compile( + r'(version\s*=\s*")([^"]+)(")', +) + + +def _find_build_files(root: Path) -> list[Path]: + """Find all BUILD and BUILD.bazel files under root (non-recursive in each dir).""" + results: list[Path] = [] + for build_file in sorted(root.rglob('BUILD.bazel')): + results.append(build_file) + for build_file in sorted(root.rglob('BUILD')): + # Skip if BUILD.bazel already found in same directory. + if (build_file.parent / 'BUILD.bazel') not in results: + results.append(build_file) + return sorted(results) + + +def _parse_module_bazel(module_path: Path) -> dict[str, str]: + """Parse name and version from MODULE.bazel.""" + if not module_path.is_file(): + return {} + text = module_path.read_text(encoding='utf-8') + name_match = _MODULE_NAME_RE.search(text) + version_match = _MODULE_VERSION_RE.search(text) + return { + 'name': name_match.group(1) if name_match else '', + 'version': version_match.group(2) if version_match else '0.0.0', + } + + +def _parse_build_targets(build_path: Path) -> list[dict[str, str]]: + """Parse publishable targets from a BUILD file. + + Returns a list of dicts with 'name' and 'version' keys. + """ + if not build_path.is_file(): + return [] + text = build_path.read_text(encoding='utf-8') + + targets: list[dict[str, str]] = [] + for match in _PUBLISH_RULE_RE.finditer(text): + # Find the name = "..." within the rule block. + # Look ahead from the match position to find the closing paren. + start = match.start() + depth = 0 + block_end = len(text) + for i in range(start, len(text)): + if text[i] == '(': + depth += 1 + elif text[i] == ')': + depth -= 1 + if depth == 0: + block_end = i + break + block = text[start:block_end] + + name_match = _BUILD_NAME_RE.search(block) + version_match = _BUILD_VERSION_RE.search(block) + if name_match: + targets.append({ + 'name': name_match.group(1), + 'version': version_match.group(2) if version_match else '0.0.0', + }) + return targets + + +class BazelWorkspace: + """Bazel :class:`~releasekit.backends.workspace.Workspace` implementation. + + Supports both Bzlmod (``MODULE.bazel``) and legacy (``WORKSPACE``) + layouts. Discovers publishable packages by scanning BUILD files for + publish-capable rules. + + Args: + workspace_root: Path to the Bazel workspace root. + """ + + def __init__(self, workspace_root: Path) -> None: + """Initialize with the Bazel workspace root.""" + self._root = workspace_root.resolve() + + def _is_bzlmod(self) -> bool: + """Check if the workspace uses Bzlmod (MODULE.bazel).""" + return (self._root / 'MODULE.bazel').is_file() + + async def discover( + self, + *, + exclude_patterns: list[str] | None = None, + ) -> list[Package]: + """Discover all publishable packages in the Bazel workspace. + + Scans BUILD/BUILD.bazel files for publish-capable rules + (java_export, npm_package, py_wheel, oci_push, etc.). + + Args: + exclude_patterns: Glob patterns to exclude packages by name. + + Returns: + Sorted list of discovered packages. + """ + exclude = exclude_patterns or [] + build_files = _find_build_files(self._root) + + # Get root module metadata for fallback version. + root_meta = _parse_module_bazel(self._root / 'MODULE.bazel') + root_version = root_meta.get('version', '0.0.0') + + packages: list[Package] = [] + seen_names: set[str] = set() + + for build_path in build_files: + targets = _parse_build_targets(build_path) + if not targets: + continue + + pkg_dir = build_path.parent + for target in targets: + name = target['name'] + version = target['version'] if target['version'] != '0.0.0' else root_version + + if name in seen_names: + continue + if any(fnmatch.fnmatch(name, pat) for pat in exclude): + log.debug('excluded', target=name) + continue + + seen_names.add(name) + packages.append( + Package( + name=name, + version=version, + path=pkg_dir, + manifest_path=build_path, + internal_deps=[], + external_deps=[], + all_deps=[], + is_publishable=True, + ) + ) + + packages.sort(key=lambda p: p.name) + log.info( + 'discovered_bazel', + count=len(packages), + targets=[p.name for p in packages], + ) + return packages + + async def rewrite_version( + self, + manifest_path: Path, + new_version: str, + ) -> str: + """Rewrite the version in a Bazel manifest file. + + Handles three formats: + 1. ``MODULE.bazel``: ``module(name = "...", version = "x.y.z")`` + 2. ``version.bzl``: ``VERSION = "x.y.z"`` + 3. ``BUILD`` / ``BUILD.bazel``: ``version = "x.y.z"`` attribute + + Args: + manifest_path: Path to the manifest file. + new_version: New version string. + + Returns: + The old version string. + """ + text = await read_file(manifest_path) + + if manifest_path.name == 'MODULE.bazel': + return await self._rewrite_module_bazel(manifest_path, text, new_version) + if manifest_path.name == 'version.bzl': + return await self._rewrite_version_bzl(manifest_path, text, new_version) + # BUILD or BUILD.bazel — rewrite version attribute. + return await self._rewrite_build_version(manifest_path, text, new_version) + + async def rewrite_dependency_version( + self, + manifest_path: Path, + dep_name: str, + new_version: str, + ) -> None: + """Rewrite a dependency's version in a Bazel manifest. + + For ``MODULE.bazel``: rewrites ``bazel_dep(name = "dep", version = "old")``. + For BUILD files: rewrites ``"group:artifact:old"`` patterns. + + Args: + manifest_path: Path to the manifest file. + dep_name: Dependency name to update. + new_version: New version string. + """ + text = await read_file(manifest_path) + + if manifest_path.name == 'MODULE.bazel': + pattern = re.compile( + rf'(bazel_dep\s*\(\s*name\s*=\s*"{re.escape(dep_name)}"\s*,' + rf'\s*version\s*=\s*")([^"]+)(")', + re.MULTILINE, + ) + else: + # BUILD file: match "group:artifact:version" patterns. + pattern = re.compile( + rf'("{re.escape(dep_name)}:)[\d.]+[-\w.]*(")', + ) + + new_text = pattern.sub(rf'\g<1>{new_version}\g<3>', text) + + if new_text != text: + await write_file(manifest_path, new_text) + log.info( + 'dependency_rewritten', + manifest=str(manifest_path), + dep=dep_name, + version=new_version, + ) + else: + log.debug( + 'dependency_not_found', + manifest=str(manifest_path), + dep=dep_name, + ) + + # ------------------------------------------------------------------ + # Private rewrite helpers + # ------------------------------------------------------------------ + + @staticmethod + async def _rewrite_module_bazel( + path: Path, + text: str, + new_version: str, + ) -> str: + """Rewrite version in MODULE.bazel.""" + m = _MODULE_VERSION_RE.search(text) + old_version = m.group(2) if m else '0.0.0' + new_text = _MODULE_VERSION_RE.sub(rf'\g<1>{new_version}\g<3>', text, count=1) + if new_text != text: + await write_file(path, new_text) + log.info( + 'version_rewritten', + manifest=str(path), + old=old_version, + new=new_version, + ) + return old_version + + @staticmethod + async def _rewrite_version_bzl( + path: Path, + text: str, + new_version: str, + ) -> str: + """Rewrite VERSION in version.bzl.""" + m = _VERSION_BZL_RE.search(text) + old_version = m.group(2) if m else '0.0.0' + new_text = _VERSION_BZL_RE.sub(rf'\g<1>{new_version}\g<3>', text, count=1) + if new_text != text: + await write_file(path, new_text) + log.info( + 'version_rewritten', + manifest=str(path), + old=old_version, + new=new_version, + ) + return old_version + + @staticmethod + async def _rewrite_build_version( + path: Path, + text: str, + new_version: str, + ) -> str: + """Rewrite version attribute in BUILD/BUILD.bazel.""" + m = _BUILD_VERSION_RE.search(text) + old_version = m.group(2) if m else '0.0.0' + new_text = _BUILD_VERSION_RE.sub(rf'\g<1>{new_version}\g<3>', text, count=1) + if new_text != text: + await write_file(path, new_text) + log.info( + 'version_rewritten', + manifest=str(path), + old=old_version, + new=new_version, + ) + return old_version + + +__all__ = [ + 'BazelWorkspace', +] diff --git a/py/tools/releasekit/src/releasekit/backends/workspace/cargo.py b/py/tools/releasekit/src/releasekit/backends/workspace/cargo.py new file mode 100644 index 0000000000..4bd7f73b22 --- /dev/null +++ b/py/tools/releasekit/src/releasekit/backends/workspace/cargo.py @@ -0,0 +1,389 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +"""Rust/Cargo workspace backend for releasekit. + +The :class:`CargoWorkspace` implements the +:class:`~releasekit.backends.workspace.Workspace` protocol by parsing +``Cargo.toml`` workspace and member manifests. + +Cargo workspace layout (directory name is arbitrary):: + + rust/ + ├── Cargo.toml ← workspace root ([workspace] with members) + ├── Cargo.lock ← shared lockfile + ├── core/ + │ └── Cargo.toml ← crate: my-core + ├── utils/ + │ └── Cargo.toml ← crate: my-utils (depends on my-core) + └── cli/ + └── Cargo.toml ← crate: my-cli (depends on my-core, my-utils) + +Version handling: + + Rust crates store their version in ``Cargo.toml`` under + ``[package].version``. Workspace-level version inheritance is + supported via ``version.workspace = true`` in member crates, + with the actual version in the root ``[workspace.package].version``. + + Dependencies between workspace members use ``{ workspace = true }`` + or explicit path/version references. +""" + +from __future__ import annotations + +import fnmatch +import re +from pathlib import Path + +import tomlkit +import tomlkit.exceptions + +from releasekit.backends.workspace._io import read_file, write_file +from releasekit.backends.workspace._types import Package +from releasekit.logging import get_logger + +log = get_logger('releasekit.backends.workspace.cargo') + +# Regex to extract workspace members from root Cargo.toml. +# Matches: members = ["core", "utils", "cli/*"] +_MEMBERS_RE = re.compile(r'members\s*=\s*\[([^\]]*)\]', re.DOTALL) + +# Regex to extract a quoted string value. +_QUOTED_RE = re.compile(r'"([^"]+)"') + +# Regex to extract package name from Cargo.toml. +_NAME_RE = re.compile(r'^\s*name\s*=\s*"([^"]+)"', re.MULTILINE) + +# Regex to extract package version from Cargo.toml. +_VERSION_RE = re.compile(r'^\s*version\s*=\s*"([^"]+)"', re.MULTILINE) + +# Regex to detect workspace version inheritance. +_VERSION_WORKSPACE_RE = re.compile( + r'^\s*version\.workspace\s*=\s*true', + re.MULTILINE, +) + + +class CargoWorkspace: + """Rust :class:`~releasekit.backends.workspace.Workspace` implementation. + + Parses the root ``Cargo.toml`` to discover workspace members and + each member's ``Cargo.toml`` for metadata and dependencies. + + Args: + workspace_root: Path to the directory containing the workspace + ``Cargo.toml`` with ``[workspace]``. + """ + + def __init__(self, workspace_root: Path) -> None: + """Initialize with the Cargo workspace root.""" + self._root = workspace_root.resolve() + + async def discover( + self, + *, + exclude_patterns: list[str] | None = None, + ) -> list[Package]: + """Discover all crates in the Cargo workspace. + + Args: + exclude_patterns: Glob patterns to exclude crates by name. + + Returns: + Sorted list of discovered Rust crate packages. + """ + root_toml = self._root / 'Cargo.toml' + if not root_toml.is_file(): + log.warning('cargo_toml_not_found', root=str(self._root)) + return [] + + root_text = root_toml.read_text(encoding='utf-8') + if '[workspace]' not in root_text: + log.warning('not_a_workspace', root=str(self._root)) + return [] + + # Parse workspace-level version (for inheritance). + ws_version = _parse_workspace_version(root_text) + + # Extract member globs. + member_globs = _parse_member_globs(root_text) + if not member_globs: + log.warning('no_members', root=str(self._root)) + return [] + + # Expand globs to actual crate directories. + crate_dirs = _expand_member_globs(self._root, member_globs) + + # First pass: collect all crate names for internal dep classification. + all_crate_names: dict[str, Path] = {} + for crate_dir in crate_dirs: + cargo_toml = crate_dir / 'Cargo.toml' + if cargo_toml.is_file(): + text = cargo_toml.read_text(encoding='utf-8') + m = _NAME_RE.search(text) + if m: + all_crate_names[m.group(1)] = crate_dir + + packages: list[Package] = [] + exclude = exclude_patterns or [] + + for name, crate_dir in sorted(all_crate_names.items()): + if any(fnmatch.fnmatch(name, pat) for pat in exclude): + log.debug('excluded', crate=name, pattern=exclude) + continue + + cargo_toml = crate_dir / 'Cargo.toml' + text = cargo_toml.read_text(encoding='utf-8') + + # Parse version (may inherit from workspace). + version = _parse_crate_version(text, ws_version) + + # Parse dependencies. + deps = _parse_dependencies(text) + internal_deps = [d for d in deps if d in all_crate_names] + external_deps = [d for d in deps if d not in all_crate_names] + + # Check if crate is publishable (publish != false). + is_publishable = 'publish = false' not in text + + packages.append( + Package( + name=name, + version=version, + path=crate_dir, + manifest_path=cargo_toml, + internal_deps=internal_deps, + external_deps=external_deps, + all_deps=deps, + is_publishable=is_publishable, + ) + ) + + packages.sort(key=lambda p: p.name) + log.info( + 'discovered', + count=len(packages), + crates=[p.name for p in packages], + ) + return packages + + async def rewrite_version( + self, + manifest_path: Path, + new_version: str, + ) -> str: + """Rewrite the ``version`` field in ``Cargo.toml``. + + Handles both direct version strings and workspace-inherited + versions (rewrites the workspace root in the latter case). + + Returns: + The old version string. + """ + text = await read_file(manifest_path) + + # Check for workspace version inheritance. + if _VERSION_WORKSPACE_RE.search(text): + # Rewrite the workspace root Cargo.toml instead. + root_toml = self._root / 'Cargo.toml' + return await self._rewrite_workspace_version(root_toml, new_version) + + m = _VERSION_RE.search(text) + old_version = m.group(1) if m else '0.0.0' + + new_text = _VERSION_RE.sub( + f'version = "{new_version}"', + text, + count=1, + ) + + if new_text != text: + await write_file(manifest_path, new_text) + log.info( + 'version_rewritten', + manifest=str(manifest_path), + old=old_version, + new=new_version, + ) + + return old_version + + async def _rewrite_workspace_version( + self, + root_toml: Path, + new_version: str, + ) -> str: + """Rewrite ``[workspace.package].version`` in the root Cargo.toml.""" + text = await read_file(root_toml) + + # Match version under [workspace.package]. + ws_ver_re = re.compile( + r'(^\s*version\s*=\s*)"([^"]+)"', + re.MULTILINE, + ) + m = ws_ver_re.search(text) + old_version = m.group(2) if m else '0.0.0' + + new_text = ws_ver_re.sub(rf'\g<1>"{new_version}"', text, count=1) + + if new_text != text: + await write_file(root_toml, new_text) + log.info( + 'workspace_version_rewritten', + manifest=str(root_toml), + old=old_version, + new=new_version, + ) + + return old_version + + async def rewrite_dependency_version( + self, + manifest_path: Path, + dep_name: str, + new_version: str, + ) -> None: + """Rewrite a dependency version in ``Cargo.toml``. + + Handles both simple string versions and inline table versions:: + + foo = "1.0.0" → foo = "2.0.0" + foo = { version = "1.0" → foo = { version = "2.0" + """ + text = await read_file(manifest_path) + + # Pattern 1: simple string — foo = "1.0.0" + simple_re = re.compile( + rf'(^\s*{re.escape(dep_name)}\s*=\s*)"[^"]*"', + re.MULTILINE, + ) + # Pattern 2: inline table — foo = { version = "1.0.0" + table_re = re.compile( + rf'(^\s*{re.escape(dep_name)}\s*=\s*\{{[^}}]*version\s*=\s*)"[^"]*"', + re.MULTILINE, + ) + + new_text = simple_re.sub(rf'\g<1>"{new_version}"', text) + if new_text == text: + new_text = table_re.sub(rf'\g<1>"{new_version}"', text) + + if new_text != text: + await write_file(manifest_path, new_text) + log.info( + 'dependency_rewritten', + manifest=str(manifest_path), + dep=dep_name, + version=new_version, + ) + else: + log.debug( + 'dependency_not_found', + manifest=str(manifest_path), + dep=dep_name, + ) + + +def _parse_member_globs(root_text: str) -> list[str]: + """Extract workspace member globs from root Cargo.toml.""" + m = _MEMBERS_RE.search(root_text) + if not m: + return [] + return _QUOTED_RE.findall(m.group(1)) + + +def _expand_member_globs(root: Path, globs: list[str]) -> list[Path]: + """Expand member globs to actual directories.""" + dirs: list[Path] = [] + for pattern in globs: + if '*' in pattern or '?' in pattern: + for match in sorted(root.glob(pattern)): + if match.is_dir() and (match / 'Cargo.toml').is_file(): + dirs.append(match) + else: + candidate = root / pattern + if candidate.is_dir() and (candidate / 'Cargo.toml').is_file(): + dirs.append(candidate) + return dirs + + +def _parse_workspace_version(root_text: str) -> str: + """Extract ``[workspace.package].version`` from root Cargo.toml.""" + # Look for version after [workspace.package]. + in_ws_pkg = False + for line in root_text.splitlines(): + stripped = line.strip() + if stripped == '[workspace.package]': + in_ws_pkg = True + continue + if stripped.startswith('[') and in_ws_pkg: + break + if in_ws_pkg: + m = _VERSION_RE.match(line) + if m: + return m.group(1) + return '0.0.0' + + +def _parse_crate_version(text: str, ws_version: str) -> str: + """Extract the version from a crate's Cargo.toml. + + If the crate uses ``version.workspace = true``, returns the + workspace-level version. + """ + if _VERSION_WORKSPACE_RE.search(text): + return ws_version + m = _VERSION_RE.search(text) + return m.group(1) if m else '0.0.0' + + +def _parse_dependencies(text: str) -> list[str]: + """Extract dependency names from ``[dependencies]`` sections. + + Uses ``tomlkit`` to properly parse the TOML structure and inspect + only dependency tables (``[dependencies]``, ``[dev-dependencies]``, + ``[build-dependencies]``, and ``[target.*.dependencies]``), avoiding + false positives from unrelated sections like ``[package]``. + """ + try: + doc = tomlkit.parse(text) + except tomlkit.exceptions.ParseError: + return [] + + deps: set[str] = set() + dep_table_keys = ('dependencies', 'dev-dependencies', 'build-dependencies') + + for key in dep_table_keys: + table = doc.get(key) + if isinstance(table, dict): + deps.update(table.keys()) + + # Also check [target.*.dependencies] tables. + target = doc.get('target') + if isinstance(target, dict): + for _target_name, target_table in target.items(): + if isinstance(target_table, dict): + for key in dep_table_keys: + sub = target_table.get(key) + if isinstance(sub, dict): + deps.update(sub.keys()) + + return sorted(deps) + + +__all__ = [ + 'CargoWorkspace', +] diff --git a/py/tools/releasekit/src/releasekit/backends/workspace/dart.py b/py/tools/releasekit/src/releasekit/backends/workspace/dart.py new file mode 100644 index 0000000000..1a981f5634 --- /dev/null +++ b/py/tools/releasekit/src/releasekit/backends/workspace/dart.py @@ -0,0 +1,289 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +"""Dart workspace backend for releasekit. + +The :class:`DartWorkspace` implements the +:class:`~releasekit.backends.workspace.Workspace` protocol by parsing +``pubspec.yaml`` files and Dart/Melos workspace configurations. + +Dart workspace layout (directory name is arbitrary; Melos-style):: + + dart/ + ├── melos.yaml ← workspace root (lists package globs) + ├── pubspec.yaml ← root pubspec (optional) + ├── packages/ + │ ├── genkit/ + │ │ └── pubspec.yaml ← package: genkit + │ ├── genkit_google/ + │ │ └── pubspec.yaml ← package: genkit_google + │ └── genkit_vertex/ + │ └── pubspec.yaml ← package: genkit_vertex + └── examples/ + └── ... + +Alternatively, a single-package Dart project has just ``pubspec.yaml`` +at the root with no ``melos.yaml``. + +Version handling: + + Dart packages store their version in ``pubspec.yaml`` under the + ``version:`` key. The ``rewrite_version`` method updates this field. + Dependencies are listed under ``dependencies:`` and + ``dev_dependencies:`` with version constraints. +""" + +from __future__ import annotations + +import fnmatch +import re +from pathlib import Path + +from releasekit.backends.workspace._io import read_file, write_file +from releasekit.backends.workspace._types import Package +from releasekit.logging import get_logger + +log = get_logger('releasekit.backends.workspace.dart') + +# Regex to parse the package name from pubspec.yaml. +_NAME_RE = re.compile(r'^name:\s*(\S+)', re.MULTILINE) + +# Regex to parse the version from pubspec.yaml. +_VERSION_RE = re.compile(r'^version:\s*(\S+)', re.MULTILINE) + +# Regex to detect publish_to: none (private package). +_PUBLISH_TO_NONE_RE = re.compile(r'^publish_to:\s*["\']?none["\']?', re.MULTILINE) + +# Regex to parse dependency names from pubspec.yaml. +_DEP_RE = re.compile(r'^\s{2}(\w[\w_-]*):', re.MULTILINE) + + +def _parse_melos_packages(melos_path: Path) -> list[str]: + """Parse package glob patterns from ``melos.yaml``. + + Returns a list of glob patterns like ``['packages/*', 'plugins/*']``. + """ + if not melos_path.is_file(): + return [] + text = melos_path.read_text(encoding='utf-8') + # Simple YAML parsing for the packages: list. + in_packages = False + patterns: list[str] = [] + for line in text.splitlines(): + stripped = line.strip() + if stripped == 'packages:': + in_packages = True + continue + if in_packages: + if stripped.startswith('- '): + patterns.append(stripped[2:].strip()) + elif stripped and not stripped.startswith('#'): + break + return patterns + + +class DartWorkspace: + """Dart :class:`~releasekit.backends.workspace.Workspace` implementation. + + Discovers packages via ``melos.yaml`` glob patterns or by scanning + for ``pubspec.yaml`` files in immediate subdirectories. + + Args: + workspace_root: Path to the Dart workspace root. + """ + + def __init__(self, workspace_root: Path) -> None: + """Initialize with the Dart workspace root.""" + self._root = workspace_root.resolve() + + async def discover( + self, + *, + exclude_patterns: list[str] | None = None, + ) -> list[Package]: + """Discover all Dart packages in the workspace. + + If ``melos.yaml`` exists, uses its ``packages:`` globs. + Otherwise, scans for ``pubspec.yaml`` in immediate subdirectories. + + Args: + exclude_patterns: Glob patterns to exclude packages by name. + + Returns: + Sorted list of discovered Dart packages. + """ + melos_path = self._root / 'melos.yaml' + package_dirs: list[Path] = [] + + if melos_path.is_file(): + patterns = _parse_melos_packages(melos_path) + for pattern in patterns: + for match in sorted(self._root.glob(pattern)): + if match.is_dir() and (match / 'pubspec.yaml').is_file(): + package_dirs.append(match) + else: + # Fallback: scan for pubspec.yaml in subdirectories. + for child in sorted(self._root.iterdir()): + if child.is_dir() and (child / 'pubspec.yaml').is_file(): + package_dirs.append(child) + + # Also check the root itself if it has a pubspec.yaml. + if (self._root / 'pubspec.yaml').is_file() and self._root not in package_dirs: + package_dirs.insert(0, self._root) + + # First pass: collect all package names for internal dep classification. + all_names: set[str] = set() + for pkg_dir in package_dirs: + pubspec = pkg_dir / 'pubspec.yaml' + text = pubspec.read_text(encoding='utf-8') + m = _NAME_RE.search(text) + if m: + all_names.add(m.group(1)) + + exclude = exclude_patterns or [] + packages: list[Package] = [] + + for pkg_dir in package_dirs: + pubspec = pkg_dir / 'pubspec.yaml' + text = pubspec.read_text(encoding='utf-8') + + name_match = _NAME_RE.search(text) + if not name_match: + log.debug('no_name', path=str(pubspec)) + continue + name = name_match.group(1) + + if any(fnmatch.fnmatch(name, pat) for pat in exclude): + log.debug('excluded', package=name) + continue + + version_match = _VERSION_RE.search(text) + version = version_match.group(1) if version_match else '0.0.0' + + is_private = bool(_PUBLISH_TO_NONE_RE.search(text)) + + # Parse dependencies. + deps_section = self._extract_deps_section(text, 'dependencies') + dev_deps_section = self._extract_deps_section(text, 'dev_dependencies') + all_dep_names = _DEP_RE.findall(deps_section + '\n' + dev_deps_section) + + internal_deps = [d for d in all_dep_names if d in all_names and d != name] + external_deps = [d for d in all_dep_names if d not in all_names] + + packages.append( + Package( + name=name, + version=version, + path=pkg_dir, + manifest_path=pubspec, + internal_deps=internal_deps, + external_deps=external_deps, + all_deps=all_dep_names, + is_publishable=not is_private, + ) + ) + + packages.sort(key=lambda p: p.name) + log.info( + 'discovered', + count=len(packages), + packages=[p.name for p in packages], + ) + return packages + + @staticmethod + def _extract_deps_section(text: str, section_name: str) -> str: + """Extract a YAML section (dependencies/dev_dependencies) from pubspec.""" + lines = text.splitlines() + in_section = False + section_lines: list[str] = [] + for line in lines: + if line.rstrip() == f'{section_name}:': + in_section = True + continue + if in_section: + if line and not line[0].isspace() and not line.startswith('#'): + break + section_lines.append(line) + return '\n'.join(section_lines) + + async def rewrite_version( + self, + manifest_path: Path, + new_version: str, + ) -> str: + """Rewrite the ``version:`` field in ``pubspec.yaml``. + + Args: + manifest_path: Path to ``pubspec.yaml``. + new_version: New version string. + + Returns: + The old version string. + """ + text = await read_file(manifest_path) + m = _VERSION_RE.search(text) + old_version = m.group(1) if m else '0.0.0' + + new_text = _VERSION_RE.sub(f'version: {new_version}', text, count=1) + if new_text != text: + await write_file(manifest_path, new_text) + log.info( + 'version_rewritten', + manifest=str(manifest_path), + old=old_version, + new=new_version, + ) + return old_version + + async def rewrite_dependency_version( + self, + manifest_path: Path, + dep_name: str, + new_version: str, + ) -> None: + """Rewrite a dependency version in ``pubspec.yaml``. + + Updates simple version constraints (e.g. ``^1.0.0``) to use + the new version with a caret constraint. + """ + text = await read_file(manifest_path) + + # Match: dep_name: ^X.Y.Z or dep_name: "^X.Y.Z" or dep_name: X.Y.Z + pattern = re.compile( + rf'(\s+{re.escape(dep_name)}:\s*)["\']?\^?[\d.]+[-\w.]*["\']?', + ) + new_text = pattern.sub(rf'\g<1>^{new_version}', text) + + if new_text != text: + await write_file(manifest_path, new_text) + log.info( + 'dependency_rewritten', + manifest=str(manifest_path), + dep=dep_name, + version=new_version, + ) + else: + log.debug( + 'dependency_not_found', + manifest=str(manifest_path), + dep=dep_name, + ) + + +__all__ = [ + 'DartWorkspace', +] diff --git a/py/tools/releasekit/src/releasekit/backends/workspace/go.py b/py/tools/releasekit/src/releasekit/backends/workspace/go.py new file mode 100644 index 0000000000..32f62b8150 --- /dev/null +++ b/py/tools/releasekit/src/releasekit/backends/workspace/go.py @@ -0,0 +1,222 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +"""Go workspace backend for releasekit. + +The :class:`GoWorkspace` implements the +:class:`~releasekit.backends.workspace.Workspace` protocol by parsing +``go.work`` and ``go.mod`` files. + +Go workspace layout (directory name is arbitrary):: + + go/ + ├── go.work ← workspace root (lists module dirs) + ├── genkit/ + │ └── go.mod ← module: github.com/firebase/genkit/go/genkit + ├── plugins/ + │ ├── googleai/ + │ │ └── go.mod ← module: github.com/firebase/genkit/go/plugins/googleai + │ └── vertexai/ + │ └── go.mod ← module: github.com/firebase/genkit/go/plugins/vertexai + └── samples/ + └── ... + +Version handling: + + Go modules don't store versions in ``go.mod``. The version is derived + from the VCS tag (e.g. ``go/genkit/v0.5.0``). The ``rewrite_version`` + method is a no-op — version bumps happen via VCS tags. + + Dependencies between workspace modules are declared in ``go.mod`` + with ``require`` directives and resolved via ``go.work``'s ``use`` + directives during development. +""" + +from __future__ import annotations + +import fnmatch +import re +from pathlib import Path + +from releasekit.backends.workspace._io import read_file, write_file +from releasekit.backends.workspace._types import Package +from releasekit.logging import get_logger + +log = get_logger('releasekit.backends.workspace.go') + +# Regex to parse ``use`` directives from go.work. +# Matches both single-line ``use ./foo`` and block ``use ( ./foo \n ./bar )``. +_USE_RE = re.compile(r'^\s*(?:use\s+)?(\./\S+)', re.MULTILINE) + +# Regex to parse the module path from go.mod. +_MODULE_RE = re.compile(r'^module\s+(\S+)', re.MULTILINE) + +# Regex to parse Go version from go.mod (informational only). +_GO_VERSION_RE = re.compile(r'^go\s+(\S+)', re.MULTILINE) + +# Regex to parse require directives from go.mod. +_REQUIRE_RE = re.compile(r'^\s*(\S+)\s+v[\d.]+', re.MULTILINE) + + +class GoWorkspace: + """Go :class:`~releasekit.backends.workspace.Workspace` implementation. + + Parses ``go.work`` to discover workspace modules and ``go.mod`` for + module metadata and dependencies. + + Args: + workspace_root: Path to the directory containing ``go.work``. + """ + + def __init__(self, workspace_root: Path) -> None: + """Initialize with the Go workspace root.""" + self._root = workspace_root.resolve() + + async def discover( + self, + *, + exclude_patterns: list[str] | None = None, + ) -> list[Package]: + """Discover all Go modules listed in ``go.work``. + + Args: + exclude_patterns: Glob patterns to exclude modules by name. + + Returns: + Sorted list of discovered Go module packages. + """ + go_work = self._root / 'go.work' + if not go_work.is_file(): + log.warning('go_work_not_found', root=str(self._root)) + return [] + + text = go_work.read_text(encoding='utf-8') + use_dirs = _USE_RE.findall(text) + + # Collect all module paths first for internal dep classification. + all_module_paths: dict[str, Path] = {} + for use_dir in use_dirs: + mod_dir = (self._root / use_dir).resolve() + go_mod = mod_dir / 'go.mod' + if go_mod.is_file(): + mod_text = go_mod.read_text(encoding='utf-8') + m = _MODULE_RE.search(mod_text) + if m: + all_module_paths[m.group(1)] = mod_dir + + packages: list[Package] = [] + exclude = exclude_patterns or [] + + for module_path, mod_dir in sorted(all_module_paths.items()): + # Use the last path component as the short name. + name = module_path.rsplit('/', 1)[-1] + + if any(fnmatch.fnmatch(name, pat) for pat in exclude): + log.debug('excluded', module=module_path, pattern=exclude) + continue + + go_mod = mod_dir / 'go.mod' + mod_text = go_mod.read_text(encoding='utf-8') + + # Parse Go version (informational — not a package version). + go_ver_match = _GO_VERSION_RE.search(mod_text) + go_version = go_ver_match.group(1) if go_ver_match else '0.0.0' + + # Parse require directives. + requires = _REQUIRE_RE.findall(mod_text) + internal_deps = [r for r in requires if r in all_module_paths] + external_deps = [r for r in requires if r not in all_module_paths] + + # Go modules are always publishable (no private marker). + packages.append( + Package( + name=name, + version=go_version, + path=mod_dir, + manifest_path=go_mod, + internal_deps=[all_module_paths[dep].name for dep in internal_deps], + external_deps=external_deps, + all_deps=requires, + is_publishable=True, + ) + ) + + packages.sort(key=lambda p: p.name) + log.info( + 'discovered', + count=len(packages), + modules=[p.name for p in packages], + ) + return packages + + async def rewrite_version( + self, + manifest_path: Path, + new_version: str, + ) -> str: + """No-op: Go module versions are set by VCS tags, not go.mod. + + Returns the Go toolchain version from go.mod as a placeholder. + """ + text = await read_file(manifest_path) + m = _GO_VERSION_RE.search(text) + old_version = m.group(1) if m else '0.0.0' + log.info( + 'rewrite_version_noop', + manifest=str(manifest_path), + reason='Go versions are set by VCS tags.', + ) + return old_version + + async def rewrite_dependency_version( + self, + manifest_path: Path, + dep_name: str, + new_version: str, + ) -> None: + """Rewrite a dependency version in ``go.mod``. + + Updates the ``require`` directive for ``dep_name`` to use + ``new_version``. This is used for pinning workspace dependencies + before a release build. + """ + text = await read_file(manifest_path) + + # Match: dep_name vX.Y.Z + pattern = re.compile( + rf'(\s+{re.escape(dep_name)}\s+)v[\d.]+[-\w.]*', + ) + new_text = pattern.sub(rf'\g<1>v{new_version}', text) + + if new_text != text: + await write_file(manifest_path, new_text) + log.info( + 'dependency_rewritten', + manifest=str(manifest_path), + dep=dep_name, + version=new_version, + ) + else: + log.debug( + 'dependency_not_found', + manifest=str(manifest_path), + dep=dep_name, + ) + + +__all__ = [ + 'GoWorkspace', +] diff --git a/py/tools/releasekit/src/releasekit/backends/workspace/maven.py b/py/tools/releasekit/src/releasekit/backends/workspace/maven.py new file mode 100644 index 0000000000..09de50e3ca --- /dev/null +++ b/py/tools/releasekit/src/releasekit/backends/workspace/maven.py @@ -0,0 +1,522 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +"""Maven/Gradle workspace backend for releasekit. + +The :class:`MavenWorkspace` implements the +:class:`~releasekit.backends.workspace.Workspace` protocol by parsing +``pom.xml`` (Maven) or ``settings.gradle`` / ``build.gradle`` (Gradle) +files. + +Maven multi-module layout (directory name is arbitrary):: + + java/ + ├── pom.xml ← parent POM (lists ) + ├── core/ + │ └── pom.xml ← module: com.example:core + ├── plugins/ + │ ├── google/ + │ │ └── pom.xml ← module: com.example:plugin-google + │ └── vertex/ + │ └── pom.xml ← module: com.example:plugin-vertex + └── samples/ + └── ... + +Gradle multi-project layout (directory name is arbitrary):: + + java/ + ├── settings.gradle ← root (lists include ':core', ':plugins:google') + ├── build.gradle ← root build file + ├── core/ + │ └── build.gradle ← subproject + └── plugins/ + ├── google/ + │ └── build.gradle + └── vertex/ + └── build.gradle + +Version handling: + + Maven stores versions in ``pom.xml`` under ````. + Gradle stores versions in ``build.gradle`` or ``gradle.properties``. + The ``rewrite_version`` method handles both formats. +""" + +from __future__ import annotations + +import fnmatch +import re +import xml.etree.ElementTree as ET # noqa: N817, S405 +from pathlib import Path + +from releasekit.backends.workspace._io import read_file, write_file +from releasekit.backends.workspace._types import Package +from releasekit.logging import get_logger + +log = get_logger('releasekit.backends.workspace.maven') + +# Maven POM namespace. +_POM_NS = '{http://maven.apache.org/POM/4.0.0}' + +# Regex to parse Gradle version from build.gradle or gradle.properties. +_GRADLE_VERSION_RE = re.compile(r"""^version\s*=\s*['"]?([^'"]+)['"]?""", re.MULTILINE) + +# Regex to parse Gradle group from build.gradle. +_GRADLE_GROUP_RE = re.compile(r"""^group\s*=\s*['"]?([^'"]+)['"]?""", re.MULTILINE) + +# Regex to parse include directives from settings.gradle / settings.gradle.kts. +# Handles both Groovy: include ':core' and Kotlin DSL: include(":core") +_SETTINGS_INCLUDE_RE = re.compile(r"""include\s*\(?['"]:([\w:/-]+)['"]""") + +# Regex to parse Gradle dependency declarations. +# Matches: implementation 'group:artifact:version' +# api "group:artifact:version" +# compileOnly 'group:artifact:version' +_GRADLE_DEP_RE = re.compile( + r"""(?:implementation|api|compileOnly|runtimeOnly|testImplementation)\s*""" + r"""['"]([^'"]+:[^'"]+)(?::[\d.]+[-\w.]*)['"]""", +) + + +def _parse_pom_modules(pom_path: Path) -> list[str]: + """Parse ```` from a Maven parent POM.""" + try: + tree = ET.parse(pom_path) # noqa: S314 + except ET.ParseError: + return [] + root = tree.getroot() + modules_elem = root.find(f'{_POM_NS}modules') + if modules_elem is None: + # Try without namespace. + modules_elem = root.find('modules') + if modules_elem is None: + return [] + return [m.text for m in modules_elem.findall(f'{_POM_NS}module') if m.text] + [ + m.text for m in modules_elem.findall('module') if m.text + ] + + +def _parse_pom_metadata(pom_path: Path) -> dict[str, str]: + """Parse groupId, artifactId, and version from a POM file.""" + try: + tree = ET.parse(pom_path) # noqa: S314 + except ET.ParseError: + return {} + root = tree.getroot() + + def _find(tag: str) -> str: + elem = root.find(f'{_POM_NS}{tag}') + if elem is None: + elem = root.find(tag) + return elem.text if elem is not None and elem.text else '' + + return { + 'groupId': _find('groupId'), + 'artifactId': _find('artifactId'), + 'version': _find('version'), + } + + +def _parse_pom_dependencies(pom_path: Path) -> list[str]: + """Parse dependency artifactIds from a POM file.""" + try: + tree = ET.parse(pom_path) # noqa: S314 + except ET.ParseError: + return [] + root = tree.getroot() + deps: list[str] = [] + for deps_elem in [ + root.find(f'{_POM_NS}dependencies'), + root.find('dependencies'), + ]: + if deps_elem is None: + continue + for dep in list(deps_elem): + aid = dep.find(f'{_POM_NS}artifactId') + if aid is None: + aid = dep.find('artifactId') + if aid is not None and aid.text: + deps.append(aid.text) + return deps + + +def _parse_gradle_dependencies(build_file: Path) -> list[str]: + """Parse dependency coordinates from a Gradle build file. + + Returns a list of ``group:artifact`` strings (without version). + """ + if not build_file.is_file(): + return [] + text = build_file.read_text(encoding='utf-8') + return _GRADLE_DEP_RE.findall(text) + + +def _parse_settings_gradle(settings_path: Path) -> list[str]: + """Parse project includes from ``settings.gradle``.""" + if not settings_path.is_file(): + return [] + text = settings_path.read_text(encoding='utf-8') + # Match include ':core', ':plugins:google', etc. + return _SETTINGS_INCLUDE_RE.findall(text) + + +class MavenWorkspace: + """Java :class:`~releasekit.backends.workspace.Workspace` implementation. + + Supports both Maven (``pom.xml``) and Gradle (``settings.gradle``) + multi-module/multi-project layouts. + + Args: + workspace_root: Path to the Java workspace root. + """ + + def __init__(self, workspace_root: Path) -> None: + """Initialize with the Java workspace root.""" + self._root = workspace_root.resolve() + + def _is_gradle(self) -> bool: + """Check if the workspace uses Gradle.""" + return (self._root / 'settings.gradle').is_file() or (self._root / 'settings.gradle.kts').is_file() + + async def discover( + self, + *, + exclude_patterns: list[str] | None = None, + ) -> list[Package]: + """Discover all Java modules/subprojects in the workspace. + + For Maven: parses ```` from the parent POM. + For Gradle: parses ``include`` from ``settings.gradle``. + + Args: + exclude_patterns: Glob patterns to exclude modules by name. + + Returns: + Sorted list of discovered Java packages. + """ + if self._is_gradle(): + return await self._discover_gradle(exclude_patterns=exclude_patterns) + return await self._discover_maven(exclude_patterns=exclude_patterns) + + async def _discover_maven( + self, + *, + exclude_patterns: list[str] | None = None, + ) -> list[Package]: + """Discover Maven modules from parent POM.""" + parent_pom = self._root / 'pom.xml' + if not parent_pom.is_file(): + log.warning('pom_not_found', root=str(self._root)) + return [] + + module_names = _parse_pom_modules(parent_pom) + + # Collect all artifact IDs for internal dep classification. + all_artifacts: dict[str, Path] = {} + for mod_name in module_names: + mod_dir = self._root / mod_name + mod_pom = mod_dir / 'pom.xml' + if mod_pom.is_file(): + meta = _parse_pom_metadata(mod_pom) + aid = meta.get('artifactId', mod_name) + all_artifacts[aid] = mod_dir + + exclude = exclude_patterns or [] + packages: list[Package] = [] + + for mod_name in module_names: + mod_dir = self._root / mod_name + mod_pom = mod_dir / 'pom.xml' + if not mod_pom.is_file(): + continue + + meta = _parse_pom_metadata(mod_pom) + name = meta.get('artifactId', mod_name) + version = meta.get('version', '0.0.0') + + if any(fnmatch.fnmatch(name, pat) for pat in exclude): + log.debug('excluded', module=name) + continue + + dep_names = _parse_pom_dependencies(mod_pom) + internal_deps = [d for d in dep_names if d in all_artifacts and d != name] + external_deps = [d for d in dep_names if d not in all_artifacts] + + packages.append( + Package( + name=name, + version=version, + path=mod_dir, + manifest_path=mod_pom, + internal_deps=internal_deps, + external_deps=external_deps, + all_deps=dep_names, + is_publishable=True, + ) + ) + + packages.sort(key=lambda p: p.name) + log.info( + 'discovered_maven', + count=len(packages), + modules=[p.name for p in packages], + ) + return packages + + async def _discover_gradle( + self, + *, + exclude_patterns: list[str] | None = None, + ) -> list[Package]: + """Discover Gradle subprojects from settings.gradle.""" + settings = self._root / 'settings.gradle' + if not settings.is_file(): + settings = self._root / 'settings.gradle.kts' + if not settings.is_file(): + log.warning('settings_gradle_not_found', root=str(self._root)) + return [] + + includes = _parse_settings_gradle(settings) + exclude = exclude_patterns or [] + packages: list[Package] = [] + + # First pass: collect group:artifact identifiers for internal + # dependency classification. + all_names: set[str] = set() + artifact_to_name: dict[str, str] = {} + for inc in includes: + name = inc.replace(':', '-').lstrip('-') + all_names.add(name) + rel_path = inc.replace(':', '/') + proj_dir = self._root / rel_path + build_file = proj_dir / 'build.gradle' + if not build_file.is_file(): + build_file = proj_dir / 'build.gradle.kts' + if not build_file.is_file(): + continue + text = build_file.read_text(encoding='utf-8') + group_match = _GRADLE_GROUP_RE.search(text) + if group_match: + artifact_to_name[f'{group_match.group(1)}:{name}'] = name + + # Second pass: discover packages with dependency classification. + for inc in includes: + rel_path = inc.replace(':', '/') + proj_dir = self._root / rel_path + name = inc.replace(':', '-').lstrip('-') + + if any(fnmatch.fnmatch(name, pat) for pat in exclude): + log.debug('excluded', project=name) + continue + + build_file = proj_dir / 'build.gradle' + if not build_file.is_file(): + build_file = proj_dir / 'build.gradle.kts' + if not build_file.is_file(): + continue + + text = build_file.read_text(encoding='utf-8') + version_match = _GRADLE_VERSION_RE.search(text) + version = version_match.group(1) if version_match else '0.0.0' + + dep_coords = _parse_gradle_dependencies(build_file) + internal_deps: list[str] = [] + external_deps: list[str] = [] + for coord in dep_coords: + if coord in artifact_to_name and artifact_to_name[coord] != name: + internal_deps.append(artifact_to_name[coord]) + else: + external_deps.append(coord) + + packages.append( + Package( + name=name, + version=version, + path=proj_dir, + manifest_path=build_file, + internal_deps=internal_deps, + external_deps=external_deps, + all_deps=dep_coords, + is_publishable=True, + ) + ) + + packages.sort(key=lambda p: p.name) + log.info( + 'discovered_gradle', + count=len(packages), + projects=[p.name for p in packages], + ) + return packages + + async def rewrite_version( + self, + manifest_path: Path, + new_version: str, + ) -> str: + """Rewrite the version in a Maven POM or Gradle build file. + + Args: + manifest_path: Path to ``pom.xml`` or ``build.gradle``. + new_version: New version string. + + Returns: + The old version string. + """ + if manifest_path.name.startswith('build.gradle'): + return self._rewrite_gradle_version(manifest_path, new_version) + return self._rewrite_pom_version(manifest_path, new_version) + + @staticmethod + def _rewrite_pom_version(pom_path: Path, new_version: str) -> str: + """Rewrite ```` in a POM file. + + Uses :mod:`xml.etree.ElementTree` to locate the project-level + ```` element, then performs a targeted string + replacement to preserve the original file formatting. + """ + text = pom_path.read_text(encoding='utf-8') + try: + root = ET.fromstring(text) # noqa: S314 + except ET.ParseError: + return '0.0.0' + + # Find the project-level (with or without namespace). + version_elem = root.find(f'{_POM_NS}version') + if version_elem is None: + version_elem = root.find('version') + if version_elem is None or not version_elem.text: + return '0.0.0' + + old_version = version_elem.text + # Targeted replacement of the first occurrence of the old version tag. + old_tag = f'{old_version}' + new_tag = f'{new_version}' + new_text = text.replace(old_tag, new_tag, 1) + if new_text != text: + pom_path.write_text(new_text, encoding='utf-8') + log.info( + 'version_rewritten', + manifest=str(pom_path), + old=old_version, + new=new_version, + ) + return old_version + + @staticmethod + def _rewrite_gradle_version(build_file: Path, new_version: str) -> str: + """Rewrite ``version = '...'`` in a Gradle build file.""" + text = build_file.read_text(encoding='utf-8') + m = _GRADLE_VERSION_RE.search(text) + old_version = m.group(1) if m else '0.0.0' + new_text = _GRADLE_VERSION_RE.sub(f"version = '{new_version}'", text, count=1) + if new_text != text: + build_file.write_text(new_text, encoding='utf-8') + log.info( + 'version_rewritten', + manifest=str(build_file), + old=old_version, + new=new_version, + ) + return old_version + + async def rewrite_dependency_version( + self, + manifest_path: Path, + dep_name: str, + new_version: str, + ) -> None: + """Rewrite a dependency version in a POM or Gradle build file.""" + text = await read_file(manifest_path) + + if manifest_path.name.startswith('build.gradle'): + # Gradle: 'group:artifact:version' patterns — regex is + # unavoidable here since Gradle build files are Groovy/Kotlin. + pattern = re.compile( + rf"('{re.escape(dep_name)}:)[\d.]+[-\w.]*(')", + ) + new_text = pattern.sub(rf'\g<1>{new_version}\g<2>', text) + else: + new_text = self._rewrite_pom_dependency(text, dep_name, new_version) + + if new_text != text: + await write_file(manifest_path, new_text) + log.info( + 'dependency_rewritten', + manifest=str(manifest_path), + dep=dep_name, + version=new_version, + ) + else: + log.debug( + 'dependency_not_found', + manifest=str(manifest_path), + dep=dep_name, + ) + + @staticmethod + def _rewrite_pom_dependency(text: str, dep_name: str, new_version: str) -> str: + """Rewrite a dependency version in POM XML text. + + Uses :mod:`xml.etree.ElementTree` to locate the ```` + element whose ```` matches *dep_name*, reads its + ```` text, then performs a targeted string replacement + to preserve the original file formatting. + """ + try: + root = ET.fromstring(text) # noqa: S314 + except ET.ParseError: + return text + + # Search both namespaced and non-namespaced dependency blocks. + for deps_tag in (f'{_POM_NS}dependencies', 'dependencies'): + deps_elem = root.find(deps_tag) + if deps_elem is None: + continue + for dep in list(deps_elem): + # Find artifactId. + aid = dep.find(f'{_POM_NS}artifactId') + if aid is None: + aid = dep.find('artifactId') + if aid is None or aid.text != dep_name: + continue + # Find version. + ver = dep.find(f'{_POM_NS}version') + if ver is None: + ver = dep.find('version') + if ver is None or not ver.text: + continue + # Targeted replacement: replace the first occurrence of + # this specific ...old + # block with the new version. + old_tag = f'{ver.text}' + new_tag = f'{new_version}' + # Find the position of this artifactId in the text and + # replace the next tag after it. + aid_pos = text.find(f'{dep_name}') + if aid_pos == -1: + continue + ver_pos = text.find(old_tag, aid_pos) + if ver_pos == -1: + continue + return text[:ver_pos] + new_tag + text[ver_pos + len(old_tag) :] + + return text + + +__all__ = [ + 'MavenWorkspace', +] diff --git a/py/tools/releasekit/src/releasekit/backends/workspace/pnpm.py b/py/tools/releasekit/src/releasekit/backends/workspace/pnpm.py index 6a6540a5e1..e1583841f3 100644 --- a/py/tools/releasekit/src/releasekit/backends/workspace/pnpm.py +++ b/py/tools/releasekit/src/releasekit/backends/workspace/pnpm.py @@ -54,8 +54,7 @@ from pathlib import Path from typing import Any -import aiofiles - +from releasekit.backends.workspace._io import read_file as _read_file, write_file as _write_file from releasekit.backends.workspace._types import Package from releasekit.errors import E, ReleaseKitError from releasekit.logging import get_logger @@ -86,30 +85,6 @@ def _normalize_name(name: str) -> str: return name.lower() -async def _read_file(path: Path) -> str: - """Read a file asynchronously via aiofiles.""" - try: - async with aiofiles.open(path, encoding='utf-8') as f: - return await f.read() - except OSError as exc: - raise ReleaseKitError( - code=E.WORKSPACE_PARSE_ERROR, - message=f'Failed to read {path}: {exc}', - ) from exc - - -async def _write_file(path: Path, content: str) -> None: - """Write a file asynchronously via aiofiles.""" - try: - async with aiofiles.open(path, mode='w', encoding='utf-8') as f: - await f.write(content) - except OSError as exc: - raise ReleaseKitError( - code=E.WORKSPACE_PARSE_ERROR, - message=f'Failed to write {path}: {exc}', - ) from exc - - def _parse_json(text: str, path: Path) -> dict[str, Any]: # noqa: ANN401 - JSON dict values are inherently untyped """Parse JSON text, raising a ReleaseKitError on failure.""" try: @@ -118,11 +93,13 @@ def _parse_json(text: str, path: Path) -> dict[str, Any]: # noqa: ANN401 - JSON raise ReleaseKitError( code=E.WORKSPACE_PARSE_ERROR, message=f'Failed to parse {path}: {exc}', + hint=f'Check that {path} contains valid JSON.', ) from exc if not isinstance(data, dict): raise ReleaseKitError( code=E.WORKSPACE_PARSE_ERROR, message=f'{path} is not a JSON object', + hint=f'Expected a JSON object (dict) at the top level of {path}.', ) return data diff --git a/py/tools/releasekit/src/releasekit/backends/workspace/uv.py b/py/tools/releasekit/src/releasekit/backends/workspace/uv.py index 30c13faab8..4b62036187 100644 --- a/py/tools/releasekit/src/releasekit/backends/workspace/uv.py +++ b/py/tools/releasekit/src/releasekit/backends/workspace/uv.py @@ -26,65 +26,26 @@ from __future__ import annotations import fnmatch -import re from pathlib import Path from typing import Any -import aiofiles import tomlkit import tomlkit.exceptions -from packaging.requirements import InvalidRequirement, Requirement +from releasekit.backends.workspace._io import read_file as _read_file, write_file as _write_file from releasekit.backends.workspace._types import Package from releasekit.errors import E, ReleaseKitError from releasekit.logging import get_logger +from releasekit.utils.packaging import normalize_name as _normalize_name, parse_dep_name as _parse_dep_name log = get_logger('releasekit.backends.workspace.uv') -def _parse_dep_name(dep_spec: str) -> str: - """Extract the normalized package name from a PEP 508 dependency specifier.""" - try: - return Requirement(dep_spec).name.lower() - except InvalidRequirement: - name = re.split(r'[<>=!~,;\[]', dep_spec, maxsplit=1)[0].strip() - return name.lower() - - -def _normalize_name(name: str) -> str: - """Normalize a package name per PEP 503 (lowercase, underscores to hyphens).""" - return name.lower().replace('_', '-') - - def _is_publishable(classifiers: list[str]) -> bool: """Check if any classifier indicates the package is private.""" return not any('Private' in c and 'Do Not Upload' in c for c in classifiers) -async def _read_file(path: Path) -> str: - """Read a file asynchronously via aiofiles.""" - try: - async with aiofiles.open(path, encoding='utf-8') as f: - return await f.read() - except OSError as exc: - raise ReleaseKitError( - code=E.WORKSPACE_PARSE_ERROR, - message=f'Failed to read {path}: {exc}', - ) from exc - - -async def _write_file(path: Path, content: str) -> None: - """Write a file asynchronously via aiofiles.""" - try: - async with aiofiles.open(path, mode='w', encoding='utf-8') as f: - await f.write(content) - except OSError as exc: - raise ReleaseKitError( - code=E.WORKSPACE_PARSE_ERROR, - message=f'Failed to write {path}: {exc}', - ) from exc - - def _parse_toml(text: str, path: Path) -> tomlkit.TOMLDocument: """Parse TOML text, raising a ReleaseKitError on failure.""" try: @@ -93,6 +54,7 @@ def _parse_toml(text: str, path: Path) -> tomlkit.TOMLDocument: raise ReleaseKitError( code=E.WORKSPACE_PARSE_ERROR, message=f'Failed to parse {path}: {exc}', + hint=f'Check that {path} contains valid TOML.', ) from exc diff --git a/py/tools/releasekit/src/releasekit/bump.py b/py/tools/releasekit/src/releasekit/bump.py index 9727e3200e..a2f788b39a 100644 --- a/py/tools/releasekit/src/releasekit/bump.py +++ b/py/tools/releasekit/src/releasekit/bump.py @@ -110,6 +110,7 @@ def bump_pyproject(pyproject_path: Path, new_version: str) -> str: raise ReleaseKitError( code=E.VERSION_INVALID, message=f'Cannot read {pyproject_path}: {exc}', + hint=f'Check that {pyproject_path} exists and is readable.', ) from exc try: @@ -118,6 +119,7 @@ def bump_pyproject(pyproject_path: Path, new_version: str) -> str: raise ReleaseKitError( code=E.VERSION_INVALID, message=f'Cannot parse {pyproject_path}: {exc}', + hint=f'Check that {pyproject_path} contains valid TOML.', ) from exc project = doc.get('project') @@ -137,6 +139,7 @@ def bump_pyproject(pyproject_path: Path, new_version: str) -> str: raise ReleaseKitError( code=E.VERSION_INVALID, message=f'Cannot write {pyproject_path}: {exc}', + hint=f'Check file permissions for {pyproject_path}.', ) from exc logger.info( @@ -168,6 +171,7 @@ def bump_file(target: BumpTarget, new_version: str) -> str: raise ReleaseKitError( code=E.VERSION_INVALID, message=f'Cannot read {target.path}: {exc}', + hint=f'Check that {target.path} exists and is readable.', ) from exc compiled = re.compile(target.pattern, re.MULTILINE) @@ -202,6 +206,7 @@ def bump_file(target: BumpTarget, new_version: str) -> str: raise ReleaseKitError( code=E.VERSION_INVALID, message=f'Cannot write {target.path}: {exc}', + hint=f'Check file permissions for {target.path}.', ) from exc logger.info( diff --git a/py/tools/releasekit/src/releasekit/checks/__init__.py b/py/tools/releasekit/src/releasekit/checks/__init__.py index 2ff862df40..3043f159ce 100644 --- a/py/tools/releasekit/src/releasekit/checks/__init__.py +++ b/py/tools/releasekit/src/releasekit/checks/__init__.py @@ -50,10 +50,12 @@ │ │ CheckBackend (Protocol) │ Injected │ │ │ │ │ │ │ ┌───────────────────────┐ │ │ - │ │ │ PythonCheckBackend │ │ Default │ - │ │ │ GoCheckBackend │ │ Future │ - │ │ │ JsCheckBackend │ │ Future │ - │ │ │ PluginCheckBackend │ │ Future (plugins) │ + │ │ │ PythonCheckBackend │ │ python │ + │ │ │ JavaCheckBackend │ │ java/kotlin/clojure │ + │ │ │ GoCheckBackend │ │ go │ + │ │ │ RustCheckBackend │ │ rust │ + │ │ │ JsCheckBackend │ │ js │ + │ │ │ DartCheckBackend │ │ dart │ │ │ └───────────────────────┘ │ │ │ └─────────────────────────────┘ │ └───────────────────────────────────────────────────────┘ @@ -100,7 +102,12 @@ result = run_checks(packages, graph, backend=None) """ +from releasekit.checks._base import BaseCheckBackend from releasekit.checks._constants import DEPRECATED_CLASSIFIERS +from releasekit.checks._dart import DartCheckBackend +from releasekit.checks._go import GoCheckBackend +from releasekit.checks._java import JavaCheckBackend +from releasekit.checks._js import JsCheckBackend from releasekit.checks._protocol import CheckBackend from releasekit.checks._python import PythonCheckBackend from releasekit.checks._python_fixers import ( @@ -119,7 +126,8 @@ fix_type_markers, fix_version_field, ) -from releasekit.checks._runner import run_checks +from releasekit.checks._runner import run_checks, run_checks_async +from releasekit.checks._rust import RustCheckBackend from releasekit.checks._universal import ( fix_missing_license, fix_missing_readme, @@ -127,10 +135,48 @@ ) from releasekit.distro import fix_distro_deps +# Ecosystem → CheckBackend class mapping. +_BACKEND_MAP: dict[str, type] = { + 'python': PythonCheckBackend, + 'java': JavaCheckBackend, + 'kotlin': JavaCheckBackend, + 'clojure': JavaCheckBackend, + 'go': GoCheckBackend, + 'rust': RustCheckBackend, + 'js': JsCheckBackend, + 'dart': DartCheckBackend, +} + + +def get_check_backend( + ecosystem: str, + **kwargs: object, +) -> CheckBackend: + """Return the appropriate :class:`CheckBackend` for *ecosystem*. + + Falls back to :class:`BaseCheckBackend` (all checks pass) for + unknown ecosystems. + + Args: + ecosystem: Ecosystem identifier (e.g. ``'python'``, ``'java'``). + **kwargs: Forwarded to the backend constructor (e.g. + ``core_package``, ``plugin_prefix``). + """ + cls = _BACKEND_MAP.get(ecosystem, BaseCheckBackend) + return cls(**kwargs) + + __all__ = [ + 'BaseCheckBackend', 'CheckBackend', 'DEPRECATED_CLASSIFIERS', + 'DartCheckBackend', + 'GoCheckBackend', + 'JavaCheckBackend', + 'JsCheckBackend', 'PythonCheckBackend', + 'RustCheckBackend', + 'get_check_backend', 'fix_build_system', 'fix_changelog_url', 'fix_distro_deps', @@ -150,4 +196,5 @@ 'fix_type_markers', 'fix_version_field', 'run_checks', + 'run_checks_async', ] diff --git a/py/tools/releasekit/src/releasekit/checks/_base.py b/py/tools/releasekit/src/releasekit/checks/_base.py new file mode 100644 index 0000000000..1a91f69e92 --- /dev/null +++ b/py/tools/releasekit/src/releasekit/checks/_base.py @@ -0,0 +1,314 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +"""Base check backend with no-op defaults for all protocol methods. + +Ecosystem-specific backends inherit from :class:`BaseCheckBackend` and +override only the checks that are relevant to their ecosystem. Methods +that are not overridden automatically pass. +""" + +from __future__ import annotations + +from releasekit.preflight import PreflightResult, run_check, run_version_consistency_check +from releasekit.workspace import Package + + +class BaseCheckBackend: + """Base implementation of the :class:`CheckBackend` protocol. + + Provides concrete implementations for universal checks that are + identical across all ecosystems (duplicate dependencies, self + dependencies, version field, version consistency). All other + methods record a pass by default. Subclasses override individual + methods to add ecosystem-specific logic. + + Subclasses should set ``_core_package`` and override + ``_manifest_path`` to customise the universal checks. + """ + + _core_package: str = '' + + def _manifest_path(self, pkg: Package) -> str: + """Return the manifest file path string for *pkg*. + + Subclasses override this to return the ecosystem-specific + manifest (e.g. ``go.mod``, ``Cargo.toml``, ``package.json``). + The default returns ``pkg.manifest_path``. + """ + return str(pkg.manifest_path) + + def check_type_markers( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """No-op: pass.""" + result.add_pass('type_markers') + + def check_version_consistency( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """Check that all packages share the same version as the core package.""" + run_version_consistency_check( + result, + 'version_consistency', + packages, + core_package=self._core_package, + manifest_path_fn=self._manifest_path, + ) + + def check_naming_convention( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """No-op: pass.""" + result.add_pass('naming_convention') + + def check_metadata_completeness( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """No-op: pass.""" + result.add_pass('metadata_completeness') + + def check_python_version_consistency( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """No-op: pass.""" + result.add_pass('python_version_consistency') + + def check_python_classifiers( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """No-op: pass.""" + result.add_pass('python_classifiers') + + def check_dependency_resolution( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """No-op: pass.""" + result.add_pass('dependency_resolution') + + def check_namespace_init( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """No-op: pass.""" + result.add_pass('namespace_init') + + def check_readme_field( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """No-op: pass.""" + result.add_pass('readme_field') + + def check_changelog_url( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """No-op: pass.""" + result.add_pass('changelog_url') + + def check_publish_classifier_consistency( + self, + packages: list[Package], + result: PreflightResult, + exclude_publish: list[str] | None = None, + ) -> None: + """No-op: pass.""" + result.add_pass('publish_classifier_consistency') + + def check_test_filename_collisions( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """No-op: pass.""" + result.add_pass('test_filename_collisions') + + def check_build_system( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """No-op: pass.""" + result.add_pass('build_system') + + def check_version_field( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """Check that all packages declare a version.""" + run_check( + result, + 'version_field', + packages, + lambda pkg: [(pkg.name, self._manifest_path(pkg))] if not pkg.version or pkg.version == '0.0.0' else [], + message='Missing or default version', + hint='Declare a version in the package manifest.', + ) + + def check_duplicate_dependencies( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """Check for duplicate dependency declarations.""" + + def _probe(pkg: Package) -> list[tuple[str, str]]: + seen: set[str] = set() + dupes: list[tuple[str, str]] = [] + for dep in pkg.all_deps: + if dep in seen: + dupes.append((f'{pkg.name}: {dep}', self._manifest_path(pkg))) + seen.add(dep) + return dupes + + run_check( + result, + 'duplicate_dependencies', + packages, + _probe, + message='Duplicate deps', + hint='Remove duplicate dependency declarations.', + severity='warning', + ) + + def check_pinned_deps_in_libraries( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """No-op: pass.""" + result.add_pass('pinned_deps_in_libraries') + + def check_requires_python( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """No-op: pass.""" + result.add_pass('requires_python') + + def check_readme_content_type( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """No-op: pass.""" + result.add_pass('readme_content_type') + + def check_version_pep440( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """No-op: pass.""" + result.add_pass('version_pep440') + + def check_placeholder_urls( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """No-op: pass.""" + result.add_pass('placeholder_urls') + + def check_legacy_setup_files( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """No-op: pass.""" + result.add_pass('legacy_setup_files') + + def check_deprecated_classifiers( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """No-op: pass.""" + result.add_pass('deprecated_classifiers') + + def check_license_classifier_mismatch( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """No-op: pass.""" + result.add_pass('license_classifier_mismatch') + + def check_unreachable_extras( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """No-op: pass.""" + result.add_pass('unreachable_extras') + + def check_self_dependencies( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """Check that no package depends on itself.""" + run_check( + result, + 'self_dependencies', + packages, + lambda pkg: [(pkg.name, self._manifest_path(pkg))] if pkg.name in pkg.internal_deps else [], + message='Self-dependency', + hint='A package cannot depend on itself.', + ) + + def check_distro_deps( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """No-op: pass.""" + result.add_pass('distro_deps') + + def run_fixes( + self, + packages: list[Package], + *, + exclude_publish: list[str] | None = None, + repo_owner: str = '', + repo_name: str = '', + namespace_dirs: list[str] | None = None, + library_dirs: list[str] | None = None, + plugin_dirs: list[str] | None = None, + dry_run: bool = False, + ) -> list[str]: + """No-op: no fixes.""" + return [] diff --git a/py/tools/releasekit/src/releasekit/checks/_dart.py b/py/tools/releasekit/src/releasekit/checks/_dart.py new file mode 100644 index 0000000000..5868c5e722 --- /dev/null +++ b/py/tools/releasekit/src/releasekit/checks/_dart.py @@ -0,0 +1,307 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +"""Dart/Flutter-specific workspace check backend (``DartCheckBackend``).""" + +from __future__ import annotations + +import re + +from releasekit.checks._base import BaseCheckBackend +from releasekit.checks._dart_fixers import ( + fix_duplicate_dependencies, + fix_metadata_completeness, + fix_publish_to_consistency, +) +from releasekit.logging import get_logger +from releasekit.preflight import PreflightResult, SourceContext, run_check +from releasekit.workspace import Package + +logger = get_logger(__name__) + +# SemVer pattern for pub.dev packages. +_SEMVER_RE = re.compile(r'^\d+\.\d+\.\d+(-[\w.]+)?(\+[\w.]+)?$') + +# Dart package name pattern: lowercase + underscores. +_DART_NAME_RE = re.compile(r'^[a-z][a-z0-9_]*$') + + +def _pubspec(pkg: Package) -> str: + """Return the pubspec.yaml path string for a package.""" + return str(pkg.path / 'pubspec.yaml') + + +def _find_yaml_key_line(text: str, key: str) -> int: + """Find the 1-based line number of a top-level YAML key. + + Searches for ``key:`` at the start of a line. Returns 0 if not found. + """ + target = f'{key}:' + for i, line in enumerate(text.splitlines(), 1): + stripped = line.lstrip() + if stripped.startswith(target): + return i + return 0 + + +class DartCheckBackend(BaseCheckBackend): + """Dart/Flutter-specific workspace checks. + + Checks for: + - ``pubspec.yaml`` presence (build system) + - Package naming conventions (lowercase + underscores) + - Version field presence + - SemVer compliance + - Metadata completeness (name, version, description, environment) + - Self-dependencies + - Duplicate dependencies + - Version consistency across packages + + Args: + core_package: Name of the core package for version consistency. + plugin_prefix: Expected prefix for plugin package names. + """ + + def __init__( + self, + *, + core_package: str = '', + plugin_prefix: str = '', + **_kwargs: object, + ) -> None: + """Initialize with optional project-specific configuration.""" + self._core_package = core_package + self._plugin_prefix = plugin_prefix + + def _manifest_path(self, pkg: Package) -> str: + """Return the pubspec.yaml path string for a package.""" + return _pubspec(pkg) + + def check_build_system( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """Check that each package has a pubspec.yaml.""" + run_check( + result, + 'build_system', + packages, + lambda pkg: [(pkg.name, str(pkg.path))] if not (pkg.path / 'pubspec.yaml').is_file() else [], + message='Missing pubspec.yaml', + hint='Run `dart create` or `flutter create` to initialize.', + ) + + def check_naming_convention( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """Check that package names follow Dart naming conventions.""" + + def _probe(pkg: Package) -> list[tuple[str, str | SourceContext]]: + if _DART_NAME_RE.match(pkg.name): + return [] + pubspec_path = pkg.path / 'pubspec.yaml' + try: + text = pubspec_path.read_text(encoding='utf-8') + line = _find_yaml_key_line(text, 'name') + except Exception: + line = 0 + return [ + ( + pkg.name, + SourceContext( + path=str(pubspec_path), + line=line, + key='name', + label=f'non-standard: {pkg.name!r}', + ), + ) + ] + + run_check( + result, + 'naming_convention', + packages, + _probe, + message='Non-standard names', + hint='Dart package names should be lowercase_with_underscores.', + severity='warning', + ) + + def check_metadata_completeness( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """Check that pubspec.yaml has required fields for pub.dev.""" + + def _probe(pkg: Package) -> list[tuple[str, str | SourceContext]]: + if not pkg.is_publishable: + return [] + pubspec = pkg.path / 'pubspec.yaml' + if not pubspec.is_file(): + return [] + try: + text = pubspec.read_text(encoding='utf-8') + except OSError: + return [] + missing: list[str] = [] + for field in ('description', 'repository', 'environment'): + if f'{field}:' not in text: + missing.append(field) + if missing: + line = _find_yaml_key_line(text, 'name') or 1 + return [ + ( + f'{pkg.name}: missing {", ".join(missing)}', + SourceContext( + path=str(pubspec), + line=line, + label=f'missing: {", ".join(missing)}', + ), + ) + ] + return [] + + run_check( + result, + 'metadata_completeness', + packages, + _probe, + message='Incomplete metadata', + hint='pub.dev requires description, repository, and environment in pubspec.yaml.', + severity='warning', + joiner='; ', + ) + + def check_version_pep440( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """Check that versions are valid SemVer.""" + + def _probe(pkg: Package) -> list[tuple[str, str | SourceContext]]: + if not pkg.version or pkg.version == '0.0.0' or _SEMVER_RE.match(pkg.version): + return [] + pubspec_path = pkg.path / 'pubspec.yaml' + try: + text = pubspec_path.read_text(encoding='utf-8') + line = _find_yaml_key_line(text, 'version') + except Exception: + line = 0 + return [ + ( + f'{pkg.name}=={pkg.version}', + SourceContext( + path=str(pubspec_path), + line=line, + key='version', + label=f'not SemVer: {pkg.version!r}', + ), + ) + ] + + run_check( + result, + 'version_semver', + packages, + _probe, + message='Non-SemVer versions', + hint='Dart packages must use SemVer (e.g. 1.2.3).', + severity='warning', + ) + + def check_publish_classifier_consistency( + self, + packages: list[Package], + result: PreflightResult, + exclude_publish: list[str] | None = None, + ) -> None: + """Check that ``publish_to: none`` is consistent with publish config.""" + + def _probe(pkg: Package) -> list[tuple[str, str | SourceContext]]: + pubspec = pkg.path / 'pubspec.yaml' + if not pubspec.is_file(): + return [] + try: + text = pubspec.read_text(encoding='utf-8') + except OSError: + return [] + has_publish_none = 'publish_to: none' in text or "publish_to: 'none'" in text + line = _find_yaml_key_line(text, 'publish_to') or _find_yaml_key_line(text, 'name') or 1 + if pkg.is_publishable and has_publish_none: + return [ + ( + f'{pkg.name}: publishable but publish_to:none', + SourceContext( + path=str(pubspec), + line=line, + key='publish_to', + label='publish_to:none but publishable', + ), + ) + ] + if not pkg.is_publishable and not has_publish_none: + return [ + ( + f'{pkg.name}: excluded but missing publish_to:none', + SourceContext( + path=str(pubspec), + line=line, + key='publish_to', + label='missing publish_to:none', + ), + ) + ] + return [] + + run_check( + result, + 'publish_to_consistency', + packages, + _probe, + message='publish_to mismatch', + hint='Set publish_to: none for non-publishable packages.', + severity='warning', + joiner='; ', + ) + + def run_fixes( + self, + packages: list[Package], + *, + exclude_publish: list[str] | None = None, + repo_owner: str = '', + repo_name: str = '', + namespace_dirs: list[str] | None = None, + library_dirs: list[str] | None = None, + plugin_dirs: list[str] | None = None, + dry_run: bool = False, + ) -> list[str]: + """Run all Dart-specific auto-fixers.""" + changes: list[str] = [] + changes.extend(fix_publish_to_consistency(packages, dry_run=dry_run)) + changes.extend(fix_metadata_completeness(packages, dry_run=dry_run)) + changes.extend(fix_duplicate_dependencies(packages, dry_run=dry_run)) + return changes + + +__all__ = [ + 'DartCheckBackend', +] diff --git a/py/tools/releasekit/src/releasekit/checks/_dart_fixers.py b/py/tools/releasekit/src/releasekit/checks/_dart_fixers.py new file mode 100644 index 0000000000..a521c91d3b --- /dev/null +++ b/py/tools/releasekit/src/releasekit/checks/_dart_fixers.py @@ -0,0 +1,228 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +"""Dart/Flutter-specific auto-fixer functions for ``pubspec.yaml`` files.""" + +from __future__ import annotations + +from releasekit.logging import get_logger +from releasekit.workspace import Package + +logger = get_logger(__name__) + + +def fix_publish_to_consistency( + packages: list[Package], + *, + dry_run: bool = False, +) -> list[str]: + """Add ``publish_to: none`` to non-publishable packages missing it. + + Args: + packages: All discovered workspace packages. + dry_run: If ``True``, report what would change without writing. + + Returns: + List of human-readable descriptions of changes made. + """ + changes: list[str] = [] + + for pkg in packages: + if pkg.is_publishable: + continue + pubspec = pkg.path / 'pubspec.yaml' + if not pubspec.is_file(): + continue + try: + text = pubspec.read_text(encoding='utf-8') + except OSError: + continue + + has_publish_none = 'publish_to: none' in text or "publish_to: 'none'" in text + if has_publish_none: + continue + + # Insert publish_to: none after the name: line. + lines = text.splitlines(keepends=True) + new_lines: list[str] = [] + inserted = False + for line in lines: + new_lines.append(line) + if not inserted and line.strip().startswith('name:'): + new_lines.append('publish_to: none\n') + inserted = True + + if not inserted: + # Fallback: prepend. + new_lines.insert(0, 'publish_to: none\n') + + action = f'{pkg.name}: added publish_to: none' + changes.append(action) + if not dry_run: + pubspec.write_text(''.join(new_lines), encoding='utf-8') + logger.info('fix_dart_publish_to', action=action, path=str(pubspec)) + else: + logger.info('fix_dart_publish_to_dry_run', action=action, path=str(pubspec)) + + return changes + + +def fix_duplicate_dependencies( + packages: list[Package], + *, + dry_run: bool = False, +) -> list[str]: + """Remove duplicate dependency entries from ``pubspec.yaml``. + + This is a best-effort fixer that detects and removes duplicate + keys within ``dependencies:`` and ``dev_dependencies:`` blocks. + + Args: + packages: All discovered workspace packages. + dry_run: If ``True``, report what would change without writing. + + Returns: + List of human-readable descriptions of changes made. + """ + changes: list[str] = [] + + for pkg in packages: + # Use the all_deps field to detect duplicates. + seen: set[str] = set() + has_dupes = False + for dep in pkg.all_deps: + if dep in seen: + has_dupes = True + break + seen.add(dep) + + if not has_dupes: + continue + + pubspec = pkg.path / 'pubspec.yaml' + if not pubspec.is_file(): + continue + try: + text = pubspec.read_text(encoding='utf-8') + except OSError: + continue + + lines = text.splitlines(keepends=True) + new_lines: list[str] = [] + in_deps_block = False + deps_indent = 0 + seen_in_block: set[str] = set() + removed: list[str] = [] + + for line in lines: + stripped = line.strip() + + # Detect start of dependencies/dev_dependencies block. + if stripped in ('dependencies:', 'dev_dependencies:'): + in_deps_block = True + deps_indent = len(line) - len(line.lstrip()) + seen_in_block = set() + new_lines.append(line) + continue + + # Detect end of block (line at same or lower indent, non-empty). + if in_deps_block and stripped and not stripped.startswith('#'): + line_indent = len(line) - len(line.lstrip()) + if line_indent <= deps_indent and ':' in stripped: + in_deps_block = False + seen_in_block = set() + + if in_deps_block and stripped and not stripped.startswith('#'): + dep_name = stripped.split(':')[0].strip() + if dep_name in seen_in_block: + removed.append(dep_name) + continue + seen_in_block.add(dep_name) + + new_lines.append(line) + + if removed: + new_text = ''.join(new_lines) + action = f'{pkg.name}: removed duplicate deps: {", ".join(removed)}' + changes.append(action) + if not dry_run: + pubspec.write_text(new_text, encoding='utf-8') + logger.info('fix_dart_duplicate_deps', action=action, path=str(pubspec)) + else: + logger.info('fix_dart_duplicate_deps_dry_run', action=action, path=str(pubspec)) + + return changes + + +def fix_metadata_completeness( + packages: list[Package], + *, + dry_run: bool = False, +) -> list[str]: + """Add missing required fields to ``pubspec.yaml``. + + Adds stub ``description``, ``repository``, and ``environment`` + fields when they are absent from publishable packages. + + Args: + packages: All discovered workspace packages. + dry_run: If ``True``, report what would change without writing. + + Returns: + List of human-readable descriptions of changes made. + """ + changes: list[str] = [] + + for pkg in packages: + if not pkg.is_publishable: + continue + pubspec = pkg.path / 'pubspec.yaml' + if not pubspec.is_file(): + continue + try: + text = pubspec.read_text(encoding='utf-8') + except OSError: + continue + + additions: list[str] = [] + if 'description:' not in text: + additions.append("description: 'TODO: Add package description'") + if 'repository:' not in text: + additions.append("repository: 'TODO: Add repository URL'") + if 'environment:' not in text: + additions.append("environment:\n sdk: '>=3.0.0 <4.0.0'") + + if not additions: + continue + + new_text = text.rstrip('\n') + '\n' + '\n'.join(additions) + '\n' + added_fields = [a.split(':')[0] for a in additions] + action = f'{pkg.name}: added {", ".join(added_fields)} to pubspec.yaml' + changes.append(action) + if not dry_run: + pubspec.write_text(new_text, encoding='utf-8') + logger.info('fix_dart_metadata', action=action, path=str(pubspec)) + else: + logger.info('fix_dart_metadata_dry_run', action=action, path=str(pubspec)) + + return changes + + +__all__ = [ + 'fix_duplicate_dependencies', + 'fix_metadata_completeness', + 'fix_publish_to_consistency', +] diff --git a/py/tools/releasekit/src/releasekit/checks/_go.py b/py/tools/releasekit/src/releasekit/checks/_go.py new file mode 100644 index 0000000000..f44ef4b6a4 --- /dev/null +++ b/py/tools/releasekit/src/releasekit/checks/_go.py @@ -0,0 +1,256 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +"""Go-specific workspace check backend (``GoCheckBackend``).""" + +from __future__ import annotations + +import re + +from releasekit.checks._base import BaseCheckBackend +from releasekit.checks._go_fixers import ( + fix_build_system, + fix_duplicate_dependencies, +) +from releasekit.logging import get_logger +from releasekit.preflight import PreflightResult, SourceContext, run_check, run_version_consistency_check +from releasekit.workspace import Package + +logger = get_logger(__name__) + +# SemVer pattern for Go modules (v-prefixed). +_GO_SEMVER_RE = re.compile(r'^v?\d+\.\d+\.\d+(-[\w.]+)?$') + +# Go module path pattern. +_GO_MODULE_RE = re.compile(r'^[a-z][a-z0-9./_-]+$') + + +def _go_mod(pkg: Package) -> str: + """Return the go.mod path string for a package.""" + return str(pkg.path / 'go.mod') + + +def _find_go_mod_line(text: str, directive: str) -> int: + """Find the 1-based line number of a go.mod directive. + + Searches for lines starting with ``directive `` (e.g. ``module ``, + ``go ``). Returns 0 if not found. + """ + target = f'{directive} ' + for i, line in enumerate(text.splitlines(), 1): + if line.startswith(target): + return i + return 0 + + +class GoCheckBackend(BaseCheckBackend): + """Go-specific workspace checks. + + Checks for: + - ``go.mod`` presence (build system) + - ``go.sum`` presence (lockfile) + - Module path naming conventions + - Version field presence + - SemVer compliance (Go modules require vX.Y.Z) + - Self-dependencies + - Duplicate dependencies + - Version consistency across modules + + Args: + core_package: Name of the core module for version consistency. + plugin_prefix: Expected prefix for plugin module names. + """ + + def __init__( + self, + *, + core_package: str = '', + plugin_prefix: str = '', + **_kwargs: object, + ) -> None: + """Initialize with optional project-specific configuration.""" + self._core_package = core_package + self._plugin_prefix = plugin_prefix + + def _manifest_path(self, pkg: Package) -> str: + """Return the go.mod path string for a package.""" + return _go_mod(pkg) + + def check_build_system( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """Check that each module has a go.mod file.""" + run_check( + result, + 'build_system', + packages, + lambda pkg: [(pkg.name, str(pkg.path))] if not (pkg.path / 'go.mod').is_file() else [], + message='Missing go.mod', + hint='Run `go mod init` to create go.mod.', + ) + + def check_dependency_resolution( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """Check that go.sum exists alongside go.mod.""" + run_check( + result, + 'go_sum_present', + packages, + lambda pkg: ( + [(pkg.name, _go_mod(pkg))] + if (pkg.path / 'go.mod').is_file() and not (pkg.path / 'go.sum').is_file() + else [] + ), + message='Missing go.sum', + hint='Run `go mod tidy` to generate go.sum.', + severity='warning', + ) + + def check_naming_convention( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """Check that module paths follow Go naming conventions.""" + + def _probe(pkg: Package) -> list[tuple[str, str | SourceContext]]: + if _GO_MODULE_RE.match(pkg.name): + return [] + go_mod_path = pkg.path / 'go.mod' + try: + text = go_mod_path.read_text(encoding='utf-8') + line = _find_go_mod_line(text, 'module') + except Exception: + line = 0 + return [ + ( + pkg.name, + SourceContext( + path=str(go_mod_path), + line=line, + key='module', + label=f'non-standard: {pkg.name!r}', + ), + ) + ] + + run_check( + result, + 'naming_convention', + packages, + _probe, + message='Non-standard module paths', + hint='Go module paths should be lowercase with / separators.', + severity='warning', + ) + + def check_version_field( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """Check that all modules declare a version.""" + run_check( + result, + 'version_field', + packages, + lambda pkg: [(pkg.name, _go_mod(pkg))] if not pkg.version or pkg.version == '0.0.0' else [], + message='Missing version', + hint='Go module versions are set via git tags (e.g. v1.2.3).', + severity='warning', + ) + + def check_version_pep440( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """Check that versions are valid Go SemVer (vX.Y.Z).""" + + def _probe(pkg: Package) -> list[tuple[str, str | SourceContext]]: + if not pkg.version or pkg.version == '0.0.0' or _GO_SEMVER_RE.match(pkg.version): + return [] + go_mod_path = pkg.path / 'go.mod' + try: + text = go_mod_path.read_text(encoding='utf-8') + line = _find_go_mod_line(text, 'module') + except Exception: + line = 0 + return [ + ( + f'{pkg.name}=={pkg.version}', + SourceContext( + path=str(go_mod_path), + line=line, + key='module', + label=f'not SemVer: {pkg.version!r}', + ), + ) + ] + + run_check( + result, + 'version_semver', + packages, + _probe, + message='Non-SemVer versions', + hint='Go modules require SemVer tags (e.g. v1.2.3).', + severity='warning', + ) + + def check_version_consistency( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """Check that all modules share the same version.""" + run_version_consistency_check( + result, + 'version_consistency', + packages, + core_package=self._core_package, + manifest_path_fn=_go_mod, + hint_template='All modules should use version {version}.', + filter_fn=lambda pkg: bool(pkg.version), + ) + + def run_fixes( + self, + packages: list[Package], + *, + exclude_publish: list[str] | None = None, + repo_owner: str = '', + repo_name: str = '', + namespace_dirs: list[str] | None = None, + library_dirs: list[str] | None = None, + plugin_dirs: list[str] | None = None, + dry_run: bool = False, + ) -> list[str]: + """Run all Go-specific auto-fixers.""" + changes: list[str] = [] + changes.extend(fix_build_system(packages, dry_run=dry_run)) + changes.extend(fix_duplicate_dependencies(packages, dry_run=dry_run)) + return changes + + +__all__ = [ + 'GoCheckBackend', +] diff --git a/py/tools/releasekit/src/releasekit/checks/_go_fixers.py b/py/tools/releasekit/src/releasekit/checks/_go_fixers.py new file mode 100644 index 0000000000..601c333cea --- /dev/null +++ b/py/tools/releasekit/src/releasekit/checks/_go_fixers.py @@ -0,0 +1,137 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +"""Go-specific auto-fixer functions for ``go.mod`` files.""" + +from __future__ import annotations + +from releasekit.logging import get_logger +from releasekit.workspace import Package + +logger = get_logger(__name__) + + +def fix_duplicate_dependencies( + packages: list[Package], + *, + dry_run: bool = False, +) -> list[str]: + """Remove duplicate ``require`` directives from ``go.mod``. + + Scans each package's ``go.mod`` for duplicate module paths in + ``require`` blocks and removes the later occurrences. + + Args: + packages: All discovered workspace packages. + dry_run: If ``True``, report what would change without writing. + + Returns: + List of human-readable descriptions of changes made. + """ + changes: list[str] = [] + + for pkg in packages: + go_mod = pkg.path / 'go.mod' + if not go_mod.is_file(): + continue + try: + text = go_mod.read_text(encoding='utf-8') + except OSError: + continue + + lines = text.splitlines(keepends=True) + seen: set[str] = set() + new_lines: list[str] = [] + removed: list[str] = [] + in_require = False + + for line in lines: + stripped = line.strip() + + if stripped == 'require (' or stripped.startswith('require ('): + in_require = True + new_lines.append(line) + continue + if in_require and stripped == ')': + in_require = False + new_lines.append(line) + continue + + if in_require and stripped and not stripped.startswith('//'): + # Lines inside require block look like: module/path v1.2.3 + parts = stripped.split() + if parts: + mod_path = parts[0] + if mod_path in seen: + removed.append(mod_path) + continue + seen.add(mod_path) + + new_lines.append(line) + + if removed: + new_text = ''.join(new_lines) + action = f'{pkg.name}: removed duplicate require directives: {", ".join(removed)}' + changes.append(action) + if not dry_run: + go_mod.write_text(new_text, encoding='utf-8') + logger.info('fix_go_duplicate_deps', action=action, path=str(go_mod)) + else: + logger.info('fix_go_duplicate_deps_dry_run', action=action, path=str(go_mod)) + + return changes + + +def fix_build_system( + packages: list[Package], + *, + dry_run: bool = False, +) -> list[str]: + """Create a minimal ``go.mod`` for packages missing one. + + The generated ``go.mod`` uses the package name as the module path + and defaults to ``go 1.21``. + + Args: + packages: All discovered workspace packages. + dry_run: If ``True``, report what would change without writing. + + Returns: + List of human-readable descriptions of changes made. + """ + changes: list[str] = [] + + for pkg in packages: + go_mod = pkg.path / 'go.mod' + if go_mod.is_file(): + continue + + content = f'module {pkg.name}\n\ngo 1.21\n' + action = f'{pkg.name}: created go.mod' + changes.append(action) + if not dry_run: + go_mod.write_text(content, encoding='utf-8') + logger.info('fix_go_build_system', action=action, path=str(go_mod)) + else: + logger.info('fix_go_build_system_dry_run', action=action, path=str(go_mod)) + + return changes + + +__all__ = [ + 'fix_build_system', + 'fix_duplicate_dependencies', +] diff --git a/py/tools/releasekit/src/releasekit/checks/_java.py b/py/tools/releasekit/src/releasekit/checks/_java.py new file mode 100644 index 0000000000..183aea5643 --- /dev/null +++ b/py/tools/releasekit/src/releasekit/checks/_java.py @@ -0,0 +1,490 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +"""Java/Kotlin-specific workspace check backend (``JavaCheckBackend``).""" + +from __future__ import annotations + +import re +import xml.etree.ElementTree as ET # noqa: N817, S405 +from pathlib import Path + +from releasekit.checks._base import BaseCheckBackend +from releasekit.checks._java_fixers import ( + fix_duplicate_dependencies, + fix_metadata_completeness, + fix_placeholder_urls, +) +from releasekit.logging import get_logger +from releasekit.preflight import PreflightResult, SourceContext, run_check +from releasekit.workspace import Package + +logger = get_logger(__name__) + +_POM_NS = '{http://maven.apache.org/POM/4.0.0}' + +# SemVer pattern (loose): major.minor.patch with optional pre-release. +_SEMVER_RE = re.compile(r'^\d+\.\d+\.\d+(-[\w.]+)?$') + +# SNAPSHOT version pattern. +_SNAPSHOT_RE = re.compile(r'-SNAPSHOT$', re.IGNORECASE) + +# Maven Central required POM elements (project-level). +_MAVEN_CENTRAL_REQUIRED = ('name', 'description', 'url', 'licenses', 'developers', 'scm') + + +def _find_xml_tag_line(text: str, tag: str) -> int: + """Find the 1-based line number of an XML tag in file content. + + Searches for ```` or ``', f'<{tag} ') + for i, line in enumerate(text.splitlines(), 1): + for target in targets: + if target in line: + return i + return 0 + + +def _find_gradle_key_line(text: str, key: str) -> int: + """Find the 1-based line number of a Gradle key assignment. + + Searches for ``key =`` or ``key=`` at the start of a line. + Returns 0 if not found. + """ + for i, line in enumerate(text.splitlines(), 1): + stripped = line.strip() + if stripped.startswith(f'{key} =') or stripped.startswith(f'{key}='): + return i + return 0 + + +def _pom_find(root: ET.Element, tag: str) -> ET.Element | None: + """Find a POM element with or without namespace.""" + elem = root.find(f'{_POM_NS}{tag}') + if elem is None: + elem = root.find(tag) + return elem + + +def _read_pom(manifest_path: Path) -> ET.Element | None: + """Parse a POM file, returning the root element or None.""" + if not manifest_path.name.endswith('.xml'): + return None + try: + tree = ET.parse(manifest_path) # noqa: S314 + return tree.getroot() + except (ET.ParseError, OSError): + return None + + +def _read_gradle(manifest_path: Path) -> str: + """Read a Gradle build file, returning its text or empty string.""" + if not manifest_path.name.startswith('build.gradle'): + return '' + try: + return manifest_path.read_text(encoding='utf-8') + except OSError: + return '' + + +class JavaCheckBackend(BaseCheckBackend): + """Java/Kotlin-specific workspace checks. + + Checks for: + - SNAPSHOT dependencies in release builds + - Maven Central metadata requirements + - Version consistency across modules + - Version field presence + - Duplicate dependencies + - Build system presence (pom.xml or build.gradle) + - SemVer compliance + - Self-dependencies + - Metadata completeness (groupId, artifactId, version) + + Args: + core_package: Name of the core module for version consistency. + plugin_prefix: Expected prefix for plugin module names. + """ + + def __init__( + self, + *, + core_package: str = '', + plugin_prefix: str = '', + **_kwargs: object, + ) -> None: + """Initialize with optional project-specific configuration.""" + self._core_package = core_package + self._plugin_prefix = plugin_prefix + + def check_metadata_completeness( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """Check that POM files have groupId, artifactId, version. + + For Gradle projects, checks that build.gradle has ``group`` + and ``version`` declarations. + """ + check_name = 'metadata_completeness' + incomplete: list[str] = [] + locations: list[str | SourceContext] = [] + + for pkg in packages: + root = _read_pom(pkg.manifest_path) + if root is not None: + missing: list[str] = [] + for field in ('groupId', 'artifactId', 'version'): + elem = _pom_find(root, field) + if elem is None or not elem.text: + missing.append(field) + if missing: + incomplete.append(f'{pkg.name}: missing {", ".join(missing)}') + try: + text = pkg.manifest_path.read_text(encoding='utf-8') + line = _find_xml_tag_line(text, 'project') or 1 + except Exception: + line = 1 + locations.append( + SourceContext( + path=str(pkg.manifest_path), + line=line, + label=f'missing: {", ".join(missing)}', + ) + ) + continue + + text = _read_gradle(pkg.manifest_path) + if text: + missing_g: list[str] = [] + if not re.search(r'^group\s*=', text, re.MULTILINE): + missing_g.append('group') + if not re.search(r'^version\s*=', text, re.MULTILINE): + missing_g.append('version') + if missing_g: + incomplete.append(f'{pkg.name}: missing {", ".join(missing_g)}') + locations.append( + SourceContext( + path=str(pkg.manifest_path), + line=1, + label=f'missing: {", ".join(missing_g)}', + ) + ) + + if incomplete: + result.add_failure( + check_name, + f'Incomplete metadata: {"; ".join(incomplete)}', + hint='Ensure all modules declare groupId/group, artifactId, and version.', + context=locations, + ) + else: + result.add_pass(check_name) + + def check_build_system( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """Check that each module has a pom.xml or build.gradle.""" + run_check( + result, + 'build_system', + packages, + lambda pkg: ( + [(pkg.name, str(pkg.path))] + if not (pkg.path / 'pom.xml').is_file() + and not (pkg.path / 'build.gradle').is_file() + and not (pkg.path / 'build.gradle.kts').is_file() + else [] + ), + message='No build file', + hint='Each module needs pom.xml or build.gradle.', + ) + + def check_version_pep440( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """Check that versions are valid SemVer (Maven convention).""" + + def _probe(pkg: Package) -> list[tuple[str, str | SourceContext]]: + if not pkg.version or _SEMVER_RE.match(pkg.version): + return [] + try: + text = pkg.manifest_path.read_text(encoding='utf-8') + line = _find_xml_tag_line(text, 'version') or _find_gradle_key_line(text, 'version') or 0 + except Exception: + line = 0 + return [ + ( + f'{pkg.name}=={pkg.version}', + SourceContext( + path=str(pkg.manifest_path), + line=line, + key='version', + label=f'not SemVer: {pkg.version!r}', + ), + ) + ] + + run_check( + result, + 'version_semver', + packages, + _probe, + message='Non-SemVer versions', + hint='Maven artifacts should use SemVer (e.g. 1.2.3 or 1.2.3-beta.1).', + severity='warning', + ) + + def check_dependency_resolution( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """Check for -SNAPSHOT dependencies (release blocker). + + Release builds must not depend on -SNAPSHOT versions. + This is the #1 cause of broken Maven releases. + """ + check_name = 'snapshot_dependencies' + snapshots: list[str] = [] + locations: list[str | SourceContext] = [] + for pkg in packages: + root = _read_pom(pkg.manifest_path) + if root is None: + continue + try: + pom_text = pkg.manifest_path.read_text(encoding='utf-8') + except Exception: + pom_text = '' + for deps_tag in (f'{_POM_NS}dependencies', 'dependencies'): + deps_elem = root.find(deps_tag) + if deps_elem is None: + continue + for dep in list(deps_elem): + ver_elem = dep.find(f'{_POM_NS}version') + if ver_elem is None: + ver_elem = dep.find('version') + if ver_elem is not None and ver_elem.text and _SNAPSHOT_RE.search(ver_elem.text): + aid = dep.find(f'{_POM_NS}artifactId') + if aid is None: + aid = dep.find('artifactId') + dep_name = aid.text if aid is not None and aid.text else '?' + snapshots.append(f'{pkg.name} → {dep_name}:{ver_elem.text}') + line = _find_xml_tag_line(pom_text, 'dependencies') if pom_text else 0 + locations.append( + SourceContext( + path=str(pkg.manifest_path), + line=line, + label=f'SNAPSHOT: {dep_name}:{ver_elem.text}', + ) + ) + + if snapshots: + result.add_failure( + check_name, + f'SNAPSHOT deps: {"; ".join(snapshots)}', + hint='Replace -SNAPSHOT versions with release versions before publishing.', + context=locations, + ) + else: + result.add_pass(check_name) + + def check_readme_field( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """Check that POM has or Gradle has description.""" + check_name = 'description_field' + missing_pkgs: list[str] = [] + locations: list[str | SourceContext] = [] + for pkg in packages: + if not pkg.is_publishable: + continue + root = _read_pom(pkg.manifest_path) + if root is not None: + desc = _pom_find(root, 'description') + if desc is None or not desc.text: + missing_pkgs.append(pkg.name) + try: + text = pkg.manifest_path.read_text(encoding='utf-8') + line = _find_xml_tag_line(text, 'project') or 1 + except Exception: + line = 1 + locations.append( + SourceContext( + path=str(pkg.manifest_path), + line=line, + label=' missing', + ) + ) + continue + text = _read_gradle(pkg.manifest_path) + if text and not re.search(r'description\s*=', text): + missing_pkgs.append(pkg.name) + locations.append( + SourceContext( + path=str(pkg.manifest_path), + line=1, + label='description missing', + ) + ) + + if missing_pkgs: + result.add_warning( + check_name, + f'Missing description: {", ".join(missing_pkgs)}', + hint='Add to pom.xml or description to build.gradle.', + context=locations, + ) + else: + result.add_pass(check_name) + + def check_changelog_url( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """Check that POM has and for Maven Central.""" + check_name = 'maven_central_metadata' + incomplete: list[str] = [] + locations: list[str | SourceContext] = [] + for pkg in packages: + if not pkg.is_publishable: + continue + root = _read_pom(pkg.manifest_path) + if root is None: + continue + missing_tags: list[str] = [] + for tag in _MAVEN_CENTRAL_REQUIRED: + if _pom_find(root, tag) is None: + missing_tags.append(tag) + if missing_tags: + incomplete.append(f'{pkg.name}: missing {", ".join(missing_tags)}') + try: + text = pkg.manifest_path.read_text(encoding='utf-8') + line = _find_xml_tag_line(text, 'project') or 1 + except Exception: + line = 1 + locations.append( + SourceContext( + path=str(pkg.manifest_path), + line=line, + label=f'missing: {", ".join(missing_tags)}', + ) + ) + + if incomplete: + result.add_warning( + check_name, + f'Maven Central requirements: {"; ".join(incomplete)}', + hint='Maven Central requires name, description, url, licenses, developers, scm.', + context=locations, + ) + else: + result.add_pass(check_name) + + def check_placeholder_urls( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """Check for placeholder URLs in POM or .""" + check_name = 'placeholder_urls' + placeholders: list[str] = [] + locations: list[str | SourceContext] = [] + for pkg in packages: + root = _read_pom(pkg.manifest_path) + if root is None: + continue + url_elem = _pom_find(root, 'url') + if url_elem is not None and url_elem.text: + if 'example.com' in url_elem.text or not url_elem.text.strip(): + placeholders.append(f'{pkg.name}: ') + try: + text = pkg.manifest_path.read_text(encoding='utf-8') + line = _find_xml_tag_line(text, 'url') or 1 + except Exception: + line = 1 + locations.append( + SourceContext( + path=str(pkg.manifest_path), + line=line, + key='url', + label='placeholder URL', + ) + ) + + if placeholders: + result.add_warning( + check_name, + f'Placeholder URLs: {", ".join(placeholders)}', + hint='Replace placeholder URLs with real project URLs.', + context=locations, + ) + else: + result.add_pass(check_name) + + def check_legacy_setup_files( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """Check for mixed build systems (pom.xml + build.gradle).""" + run_check( + result, + 'mixed_build_systems', + packages, + lambda pkg: ( + [(pkg.name, str(pkg.path))] + if (pkg.path / 'pom.xml').is_file() + and ((pkg.path / 'build.gradle').is_file() or (pkg.path / 'build.gradle.kts').is_file()) + else [] + ), + message='Both pom.xml and build.gradle', + hint='Use one build system per module. Remove the unused build file.', + severity='warning', + ) + + def run_fixes( + self, + packages: list[Package], + *, + exclude_publish: list[str] | None = None, + repo_owner: str = '', + repo_name: str = '', + namespace_dirs: list[str] | None = None, + library_dirs: list[str] | None = None, + plugin_dirs: list[str] | None = None, + dry_run: bool = False, + ) -> list[str]: + """Run all Java-specific auto-fixers.""" + changes: list[str] = [] + changes.extend(fix_placeholder_urls(packages, dry_run=dry_run)) + changes.extend(fix_metadata_completeness(packages, dry_run=dry_run)) + changes.extend(fix_duplicate_dependencies(packages, dry_run=dry_run)) + return changes + + +__all__ = [ + 'JavaCheckBackend', +] diff --git a/py/tools/releasekit/src/releasekit/checks/_java_fixers.py b/py/tools/releasekit/src/releasekit/checks/_java_fixers.py new file mode 100644 index 0000000000..d436f56a40 --- /dev/null +++ b/py/tools/releasekit/src/releasekit/checks/_java_fixers.py @@ -0,0 +1,210 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +"""Java/Kotlin-specific auto-fixer functions for POM and Gradle files.""" + +from __future__ import annotations + +import re +import xml.etree.ElementTree as ET # noqa: N817, S405 + +from releasekit.logging import get_logger +from releasekit.workspace import Package + +logger = get_logger(__name__) + +_POM_NS = '{http://maven.apache.org/POM/4.0.0}' + + +def _pom_find(root: ET.Element, tag: str) -> ET.Element | None: + """Find a POM element with or without namespace.""" + elem = root.find(f'{_POM_NS}{tag}') + if elem is None: + elem = root.find(tag) + return elem + + +def fix_placeholder_urls( + packages: list[Package], + *, + dry_run: bool = False, +) -> list[str]: + """Remove placeholder ``example.com`` URLs from POM ```` elements. + + Replaces ```` elements containing ``example.com`` with an + empty string to signal that the URL needs to be filled in. + + Args: + packages: All discovered workspace packages. + dry_run: If ``True``, report what would change without writing. + + Returns: + List of human-readable descriptions of changes made. + """ + changes: list[str] = [] + + for pkg in packages: + if not pkg.manifest_path.name.endswith('.xml'): + continue + try: + tree = ET.parse(pkg.manifest_path) # noqa: S314 + except (ET.ParseError, OSError): + continue + + root = tree.getroot() + url_elem = _pom_find(root, 'url') + if url_elem is None or not url_elem.text: + continue + if 'example.com' not in url_elem.text: + continue + + url_elem.text = '' + action = f'{pkg.name}: cleared placeholder in pom.xml' + changes.append(action) + if not dry_run: + tree.write(str(pkg.manifest_path), xml_declaration=True, encoding='unicode') + logger.info('fix_java_placeholder_url', action=action, path=str(pkg.manifest_path)) + else: + logger.info('fix_java_placeholder_url_dry_run', action=action, path=str(pkg.manifest_path)) + + return changes + + +def fix_duplicate_dependencies( + packages: list[Package], + *, + dry_run: bool = False, +) -> list[str]: + """Remove duplicate ```` entries from POM files. + + Detects duplicate dependencies by ``groupId:artifactId`` and + removes later occurrences. + + Args: + packages: All discovered workspace packages. + dry_run: If ``True``, report what would change without writing. + + Returns: + List of human-readable descriptions of changes made. + """ + changes: list[str] = [] + + for pkg in packages: + if not pkg.manifest_path.name.endswith('.xml'): + continue + try: + tree = ET.parse(pkg.manifest_path) # noqa: S314 + except (ET.ParseError, OSError): + continue + + root = tree.getroot() + modified = False + removed: list[str] = [] + + for deps_tag in (f'{_POM_NS}dependencies', 'dependencies'): + deps_elem = root.find(deps_tag) + if deps_elem is None: + continue + + seen: set[str] = set() + to_remove: list[ET.Element] = [] + for dep in list(deps_elem): + gid = dep.find(f'{_POM_NS}groupId') + if gid is None: + gid = dep.find('groupId') + aid = dep.find(f'{_POM_NS}artifactId') + if aid is None: + aid = dep.find('artifactId') + gid_text = gid.text if gid is not None and gid.text else '?' + aid_text = aid.text if aid is not None and aid.text else '?' + key = f'{gid_text}:{aid_text}' + if key in seen: + to_remove.append(dep) + removed.append(key) + else: + seen.add(key) + + for dep in to_remove: + deps_elem.remove(dep) + modified = True + + if modified and removed: + action = f'{pkg.name}: removed duplicate deps: {", ".join(removed)}' + changes.append(action) + if not dry_run: + tree.write(str(pkg.manifest_path), xml_declaration=True, encoding='unicode') + logger.info('fix_java_duplicate_deps', action=action, path=str(pkg.manifest_path)) + else: + logger.info('fix_java_duplicate_deps_dry_run', action=action, path=str(pkg.manifest_path)) + + return changes + + +def fix_metadata_completeness( + packages: list[Package], + *, + dry_run: bool = False, +) -> list[str]: + """Add missing ``group`` and ``version`` to ``build.gradle`` files. + + For Gradle projects missing ``group`` or ``version`` declarations, + appends stub declarations. + + Args: + packages: All discovered workspace packages. + dry_run: If ``True``, report what would change without writing. + + Returns: + List of human-readable descriptions of changes made. + """ + changes: list[str] = [] + + for pkg in packages: + manifest = pkg.manifest_path + if not manifest.name.startswith('build.gradle'): + continue + try: + text = manifest.read_text(encoding='utf-8') + except OSError: + continue + + additions: list[str] = [] + if not re.search(r'^group\s*=', text, re.MULTILINE): + additions.append("group = 'TODO'") + if not re.search(r'^version\s*=', text, re.MULTILINE): + additions.append("version = '0.0.1'") + + if not additions: + continue + + new_text = text.rstrip('\n') + '\n' + '\n'.join(additions) + '\n' + added_fields = [a.split(' =')[0].strip() for a in additions] + action = f'{pkg.name}: added {", ".join(added_fields)} to {manifest.name}' + changes.append(action) + if not dry_run: + manifest.write_text(new_text, encoding='utf-8') + logger.info('fix_java_metadata', action=action, path=str(manifest)) + else: + logger.info('fix_java_metadata_dry_run', action=action, path=str(manifest)) + + return changes + + +__all__ = [ + 'fix_duplicate_dependencies', + 'fix_metadata_completeness', + 'fix_placeholder_urls', +] diff --git a/py/tools/releasekit/src/releasekit/checks/_js.py b/py/tools/releasekit/src/releasekit/checks/_js.py new file mode 100644 index 0000000000..1bbd5f4f4e --- /dev/null +++ b/py/tools/releasekit/src/releasekit/checks/_js.py @@ -0,0 +1,369 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +"""JavaScript/TypeScript-specific workspace check backend (``JsCheckBackend``).""" + +from __future__ import annotations + +import json +import re + +from releasekit.checks._base import BaseCheckBackend +from releasekit.checks._js_fixers import ( + fix_duplicate_dependencies, + fix_metadata_completeness, + fix_private_field_consistency, +) +from releasekit.logging import get_logger +from releasekit.preflight import PreflightResult, SourceContext, run_check +from releasekit.workspace import Package + +logger = get_logger(__name__) + +# SemVer pattern for npm packages. +_SEMVER_RE = re.compile(r'^\d+\.\d+\.\d+(-[\w.]+)?$') + +# npm scoped package pattern: @scope/name. +_SCOPED_RE = re.compile(r'^@[\w-]+/[\w.-]+$') + +# npm package name pattern (unscoped). +_NPM_NAME_RE = re.compile(r'^[a-z][\w.-]*$') + + +def _read_package_json(pkg: Package) -> dict[str, object] | None: + """Read and parse package.json for a package.""" + pj = pkg.path / 'package.json' + if not pj.is_file(): + return None + try: + return json.loads(pj.read_text(encoding='utf-8')) + except (json.JSONDecodeError, OSError): + return None + + +def _pkg_json(pkg: Package) -> str: + """Return the package.json path string for a package.""" + return str(pkg.path / 'package.json') + + +def _find_json_key_line(text: str, key: str) -> int: + """Find the 1-based line number of a JSON key in file content. + + Searches for ``"key":`` patterns. Returns 0 if not found. + """ + target = f'"{key}"' + for i, line in enumerate(text.splitlines(), 1): + if target in line and ':' in line[line.index(target) :]: + return i + return 0 + + +class JsCheckBackend(BaseCheckBackend): + """JavaScript/TypeScript-specific workspace checks. + + Checks for: + - ``package.json`` presence (build system) + - TypeScript ``d.ts`` type declarations (type markers) + - npm naming conventions (``@scope/name``) + - Version field presence + - SemVer compliance + - Metadata completeness (name, version, description, license, repository) + - Self-dependencies + - Duplicate dependencies + - Version consistency across packages + - ``private: true`` consistency with publish config + + Args: + core_package: Name of the core package for version consistency. + plugin_prefix: Expected prefix (e.g. ``@genkit/``). + """ + + def __init__( + self, + *, + core_package: str = '', + plugin_prefix: str = '', + **_kwargs: object, + ) -> None: + """Initialize with optional project-specific configuration.""" + self._core_package = core_package + self._plugin_prefix = plugin_prefix + + def _manifest_path(self, pkg: Package) -> str: + """Return the package.json path string for a package.""" + return _pkg_json(pkg) + + def check_build_system( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """Check that each package has a package.json.""" + run_check( + result, + 'build_system', + packages, + lambda pkg: [(pkg.name, str(pkg.path))] if not (pkg.path / 'package.json').is_file() else [], + message='Missing package.json', + hint='Run `npm init` or `pnpm init` to create package.json.', + ) + + def check_type_markers( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """Check that TypeScript packages have ``types`` field in package.json.""" + + def _probe(pkg: Package) -> list[tuple[str, str | SourceContext]]: + if not pkg.is_publishable: + return [] + pj = _read_package_json(pkg) + if pj is None: + return [] + if 'types' not in pj and 'typings' not in pj: + if (pkg.path / 'tsconfig.json').is_file(): + pj_path = pkg.path / 'package.json' + try: + text = pj_path.read_text(encoding='utf-8') + line = _find_json_key_line(text, 'name') or 1 + except Exception: + line = 1 + return [ + ( + pkg.name, + SourceContext( + path=str(pj_path), + line=line, + key='types', + label='types field missing', + ), + ) + ] + return [] + + run_check( + result, + 'type_declarations', + packages, + _probe, + message='Missing types field', + hint='Add "types" field to package.json for TypeScript packages.', + severity='warning', + ) + + def check_naming_convention( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """Check that package names follow npm naming conventions.""" + + def _probe(pkg: Package) -> list[tuple[str, str | SourceContext]]: + if _SCOPED_RE.match(pkg.name) or _NPM_NAME_RE.match(pkg.name): + return [] + pj_path = pkg.path / 'package.json' + try: + text = pj_path.read_text(encoding='utf-8') + line = _find_json_key_line(text, 'name') + except Exception: + line = 0 + return [ + ( + pkg.name, + SourceContext( + path=str(pj_path), + line=line, + key='name', + label=f'non-standard: {pkg.name!r}', + ), + ) + ] + + run_check( + result, + 'naming_convention', + packages, + _probe, + message='Non-standard names', + hint='npm names should be lowercase. Scoped: @scope/name. Unscoped: name.', + severity='warning', + ) + + def check_metadata_completeness( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """Check that package.json has required fields for npm.""" + + def _probe(pkg: Package) -> list[tuple[str, str | SourceContext]]: + if not pkg.is_publishable: + return [] + pj = _read_package_json(pkg) + if pj is None: + return [] + missing: list[str] = [] + for field in ('name', 'version', 'description', 'license', 'repository'): + if field not in pj or not pj[field]: + missing.append(field) + if missing: + pj_path = pkg.path / 'package.json' + try: + text = pj_path.read_text(encoding='utf-8') + line = _find_json_key_line(text, 'name') or 1 + except Exception: + line = 1 + return [ + ( + f'{pkg.name}: missing {", ".join(missing)}', + SourceContext( + path=str(pj_path), + line=line, + label=f'missing: {", ".join(missing)}', + ), + ) + ] + return [] + + run_check( + result, + 'metadata_completeness', + packages, + _probe, + message='Incomplete metadata', + hint='npm requires name, version, description, license, repository.', + severity='warning', + joiner='; ', + ) + + def check_version_pep440( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """Check that versions are valid SemVer.""" + + def _probe(pkg: Package) -> list[tuple[str, str | SourceContext]]: + if not pkg.version or pkg.version == '0.0.0' or _SEMVER_RE.match(pkg.version): + return [] + pj_path = pkg.path / 'package.json' + try: + text = pj_path.read_text(encoding='utf-8') + line = _find_json_key_line(text, 'version') + except Exception: + line = 0 + return [ + ( + f'{pkg.name}=={pkg.version}', + SourceContext( + path=str(pj_path), + line=line, + key='version', + label=f'not SemVer: {pkg.version!r}', + ), + ) + ] + + run_check( + result, + 'version_semver', + packages, + _probe, + message='Non-SemVer versions', + hint='npm packages must use SemVer (e.g. 1.2.3).', + severity='warning', + ) + + def check_publish_classifier_consistency( + self, + packages: list[Package], + result: PreflightResult, + exclude_publish: list[str] | None = None, + ) -> None: + """Check that ``private: true`` is consistent with publish config.""" + + def _probe(pkg: Package) -> list[tuple[str, str | SourceContext]]: + pj = _read_package_json(pkg) + if pj is None: + return [] + is_private = pj.get('private', False) is True + pj_path = pkg.path / 'package.json' + try: + text = pj_path.read_text(encoding='utf-8') + line = _find_json_key_line(text, 'private') or _find_json_key_line(text, 'name') or 1 + except Exception: + line = 1 + if pkg.is_publishable and is_private: + return [ + ( + f'{pkg.name}: publishable but private:true', + SourceContext( + path=str(pj_path), + line=line, + key='private', + label='private:true but publishable', + ), + ) + ] + if not pkg.is_publishable and not is_private: + return [ + ( + f'{pkg.name}: excluded but missing private:true', + SourceContext( + path=str(pj_path), + line=line, + key='private', + label='missing private:true', + ), + ) + ] + return [] + + run_check( + result, + 'private_field_consistency', + packages, + _probe, + message='Private field mismatch', + hint='Set "private": true for non-publishable packages.', + severity='warning', + joiner='; ', + ) + + def run_fixes( + self, + packages: list[Package], + *, + exclude_publish: list[str] | None = None, + repo_owner: str = '', + repo_name: str = '', + namespace_dirs: list[str] | None = None, + library_dirs: list[str] | None = None, + plugin_dirs: list[str] | None = None, + dry_run: bool = False, + ) -> list[str]: + """Run all JavaScript-specific auto-fixers.""" + changes: list[str] = [] + changes.extend(fix_private_field_consistency(packages, dry_run=dry_run)) + changes.extend(fix_metadata_completeness(packages, dry_run=dry_run)) + changes.extend(fix_duplicate_dependencies(packages, dry_run=dry_run)) + return changes + + +__all__ = [ + 'JsCheckBackend', +] diff --git a/py/tools/releasekit/src/releasekit/checks/_js_fixers.py b/py/tools/releasekit/src/releasekit/checks/_js_fixers.py new file mode 100644 index 0000000000..329bbb4b16 --- /dev/null +++ b/py/tools/releasekit/src/releasekit/checks/_js_fixers.py @@ -0,0 +1,194 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +"""JavaScript/TypeScript-specific auto-fixer functions for ``package.json`` files.""" + +from __future__ import annotations + +import json +from typing import cast + +from releasekit.logging import get_logger +from releasekit.workspace import Package + +logger = get_logger(__name__) + + +def _read_package_json(pkg: Package) -> tuple[dict[str, object] | None, str]: + """Read and parse package.json, returning (data, raw_text).""" + pj = pkg.path / 'package.json' + if not pj.is_file(): + return None, '' + try: + text = pj.read_text(encoding='utf-8') + return json.loads(text), text + except (json.JSONDecodeError, OSError): + return None, '' + + +def _write_package_json(pkg: Package, data: dict[str, object]) -> None: + """Write package.json with 2-space indent and trailing newline.""" + pj = pkg.path / 'package.json' + pj.write_text(json.dumps(data, indent=2, ensure_ascii=False) + '\n', encoding='utf-8') + + +def fix_private_field_consistency( + packages: list[Package], + *, + dry_run: bool = False, +) -> list[str]: + """Add ``"private": true`` to non-publishable packages missing it. + + Args: + packages: All discovered workspace packages. + dry_run: If ``True``, report what would change without writing. + + Returns: + List of human-readable descriptions of changes made. + """ + changes: list[str] = [] + + for pkg in packages: + if pkg.is_publishable: + continue + data, _ = _read_package_json(pkg) + if data is None: + continue + if data.get('private', False) is True: + continue + + data['private'] = True + action = f'{pkg.name}: added "private": true to package.json' + changes.append(action) + if not dry_run: + _write_package_json(pkg, data) + logger.info('fix_js_private_field', action=action, path=str(pkg.path / 'package.json')) + else: + logger.info('fix_js_private_field_dry_run', action=action, path=str(pkg.path / 'package.json')) + + return changes + + +def fix_metadata_completeness( + packages: list[Package], + *, + dry_run: bool = False, +) -> list[str]: + """Add missing required fields to ``package.json``. + + Adds stub ``description``, ``license``, and ``repository`` fields + when they are absent from publishable packages. + + Args: + packages: All discovered workspace packages. + dry_run: If ``True``, report what would change without writing. + + Returns: + List of human-readable descriptions of changes made. + """ + changes: list[str] = [] + + for pkg in packages: + if not pkg.is_publishable: + continue + data, _ = _read_package_json(pkg) + if data is None: + continue + + added: list[str] = [] + if not data.get('description'): + data['description'] = 'TODO: Add package description' + added.append('description') + if not data.get('license'): + data['license'] = 'Apache-2.0' + added.append('license') + if not data.get('repository'): + data['repository'] = {'type': 'git', 'url': 'TODO: Add repository URL'} + added.append('repository') + + if not added: + continue + + action = f'{pkg.name}: added {", ".join(added)} to package.json' + changes.append(action) + if not dry_run: + _write_package_json(pkg, data) + logger.info('fix_js_metadata', action=action, path=str(pkg.path / 'package.json')) + else: + logger.info('fix_js_metadata_dry_run', action=action, path=str(pkg.path / 'package.json')) + + return changes + + +def fix_duplicate_dependencies( + packages: list[Package], + *, + dry_run: bool = False, +) -> list[str]: + """Remove duplicate dependency entries from ``package.json``. + + Checks ``dependencies``, ``devDependencies``, and + ``peerDependencies`` for duplicate keys. Since JSON objects + cannot have duplicate keys (later values win), this fixer + detects when the same package appears in multiple dep sections + and removes it from ``devDependencies`` if also in + ``dependencies``. + + Args: + packages: All discovered workspace packages. + dry_run: If ``True``, report what would change without writing. + + Returns: + List of human-readable descriptions of changes made. + """ + changes: list[str] = [] + + for pkg in packages: + data, _ = _read_package_json(pkg) + if data is None: + continue + + raw_deps = data.get('dependencies', {}) + raw_dev = data.get('devDependencies', {}) + if not isinstance(raw_deps, dict) or not isinstance(raw_dev, dict): + continue + deps = cast(dict[str, object], raw_deps) + dev_deps = cast(dict[str, object], raw_dev) + + # Remove from devDependencies if already in dependencies. + overlap = set(deps.keys()) & set(dev_deps.keys()) + if not overlap: + continue + + for dep_name in overlap: + del dev_deps[dep_name] + + action = f'{pkg.name}: removed {", ".join(sorted(overlap))} from devDependencies (already in dependencies)' + changes.append(action) + if not dry_run: + _write_package_json(pkg, data) + logger.info('fix_js_duplicate_deps', action=action, path=str(pkg.path / 'package.json')) + else: + logger.info('fix_js_duplicate_deps_dry_run', action=action, path=str(pkg.path / 'package.json')) + + return changes + + +__all__ = [ + 'fix_duplicate_dependencies', + 'fix_metadata_completeness', + 'fix_private_field_consistency', +] diff --git a/py/tools/releasekit/src/releasekit/checks/_python.py b/py/tools/releasekit/src/releasekit/checks/_python.py index b2e8722bfa..4075419605 100644 --- a/py/tools/releasekit/src/releasekit/checks/_python.py +++ b/py/tools/releasekit/src/releasekit/checks/_python.py @@ -53,7 +53,7 @@ fix_distro_deps as _fix_distro_deps_for_package, ) from releasekit.logging import get_logger -from releasekit.preflight import PreflightResult +from releasekit.preflight import PreflightResult, SourceContext, find_key_line from releasekit.workspace import Package logger = get_logger(__name__) @@ -149,6 +149,7 @@ def check_type_markers( """ check_name = 'type_markers' missing: list[str] = [] + locations: list[str] = [] for pkg in packages: if not pkg.is_publishable: continue @@ -160,10 +161,13 @@ def check_type_markers( py_typed_files = list(src_dir.rglob('py.typed')) if not py_typed_files: missing.append(pkg.name) + locations.append(str(src_dir)) if missing: result.add_warning( check_name, f'Missing py.typed marker: {", ".join(missing)}', + hint="Create an empty py.typed file in each package's src// directory (PEP 561).", + context=locations, ) else: result.add_pass(check_name) @@ -184,11 +188,13 @@ def check_version_consistency( result.add_warning( check_name, f'Core "{self._core_package}" package not found; cannot verify versions.', + hint=f'Ensure a package named "{self._core_package}" exists in the workspace.', ) return core_version = core_pkg.version mismatches: list[str] = [] + locations: list[str] = [] for pkg in packages: if not pkg.name.startswith(self._plugin_prefix): continue @@ -196,11 +202,14 @@ def check_version_consistency( mismatches.append( f'{pkg.name}=={pkg.version} (expected {core_version})', ) + locations.append(str(pkg.manifest_path)) if mismatches: result.add_warning( check_name, f'Plugin version mismatches: {", ".join(mismatches)}', + hint=f'Run `releasekit bump` to align all plugin versions with {self._core_package}.', + context=locations, ) else: result.add_pass(check_name) @@ -222,6 +231,7 @@ def check_naming_convention( return mismatches: list[str] = [] + locations: list[str] = [] for pkg in packages: dir_name = pkg.path.name parent_name = pkg.path.parent.name @@ -234,11 +244,14 @@ def check_naming_convention( mismatches.append( f'{dir_name}/ → {pkg.name} (expected {expected})', ) + locations.append(str(pkg.manifest_path)) if mismatches: result.add_warning( check_name, f'Naming mismatches: {", ".join(mismatches)}', + hint=f'Rename the package in pyproject.toml to match the pattern {self._plugin_prefix}.', + context=locations, ) else: result.add_pass(check_name) @@ -252,6 +265,7 @@ def check_metadata_completeness( check_name = 'metadata_completeness' required_fields = ['description', 'authors', 'license'] issues: list[str] = [] + locations: list[str | SourceContext] = [] for pkg in packages: if not pkg.is_publishable: @@ -266,6 +280,7 @@ def check_metadata_completeness( error=str(exc), ) issues.append(f'{pkg.name}: cannot parse pyproject.toml') + locations.append(str(pkg.manifest_path)) continue project: dict[str, object] = data.get('project', {}) @@ -278,11 +293,20 @@ def check_metadata_completeness( issues.append( f'{pkg.name}: missing {", ".join(missing_fields)}', ) + locations.append( + SourceContext( + path=str(pkg.manifest_path), + line=find_key_line(content, '', section='project') or 1, + label=f'missing: {", ".join(missing_fields)}', + ) + ) if issues: result.add_warning( check_name, f'Incomplete metadata: {"; ".join(issues)}', + hint='Add description, authors, and license fields to [project] in pyproject.toml.', + context=locations, ) else: result.add_pass(check_name) @@ -326,9 +350,29 @@ def check_python_version_consistency( parts: list[str] = [] for ver, pkgs in sorted(versions.items()): parts.append(f'{ver}: {len(pkgs)} packages') + # Collect manifest paths with line numbers for context. + all_locs: list[str | SourceContext] = [] + for pkgs_list in versions.values(): + for pname in pkgs_list: + p = next((x for x in packages if x.name == pname), None) + if p: + try: + c = p.manifest_path.read_text(encoding='utf-8') + line = find_key_line(c, 'requires-python') + except Exception: + line = 0 + all_locs.append( + SourceContext( + path=str(p.manifest_path), + line=line, + key='requires-python', + ) + ) result.add_warning( check_name, f'Inconsistent requires-python: {"; ".join(parts)}', + hint='Align requires-python across all packages. Run `releasekit check --fix`.', + context=all_locs, ) def check_python_classifiers( @@ -345,6 +389,7 @@ def check_python_classifiers( check_name = 'python_classifiers' expected_versions = {'3.10', '3.11', '3.12', '3.13', '3.14'} issues: list[str] = [] + locations: list[str] = [] for pkg in packages: if not pkg.is_publishable: @@ -374,11 +419,14 @@ def check_python_classifiers( issues.append( f'{pkg.name}: missing classifiers for Python {", ".join(sorted(missing))}', ) + locations.append(str(pkg.manifest_path)) if issues: result.add_warning( check_name, f'Missing Python classifiers: {"; ".join(issues)}', + hint='Add Programming Language :: Python :: 3.{10..14} classifiers. Run `releasekit check --fix`.', + context=locations, ) else: result.add_pass(check_name) @@ -405,12 +453,14 @@ def check_dependency_resolution( result.add_warning( check_name, 'uv not found; skipping dependency resolution check.', + hint='Install uv: `curl -LsSf https://astral.sh/uv/install.sh | sh`.', ) return except subprocess.TimeoutExpired: result.add_warning( check_name, 'uv pip check timed out after 60 seconds.', + hint='Check for slow network or large dependency trees.', ) return @@ -424,6 +474,7 @@ def check_dependency_resolution( result.add_warning( check_name, f'Dependency issues: {output}', + hint='Run `uv pip install -e .` to fix missing deps, or update version constraints.', ) def check_namespace_init( @@ -448,6 +499,7 @@ def check_namespace_init( return offenders: list[str] = [] + locations: list[str] = [] for pkg in packages: if self._plugin_dirs and pkg.path.parent.name not in self._plugin_dirs: @@ -461,11 +513,14 @@ def check_namespace_init( if init_file.exists(): relative = init_file.relative_to(pkg.path) offenders.append(f'{pkg.name}: {relative}') + locations.append(str(init_file)) if offenders: result.add_failure( check_name, f'Namespace dirs must not have __init__.py: {", ".join(offenders)}', + hint='Delete the __init__.py files in namespace directories. Run `releasekit check --fix`.', + context=locations, ) else: result.add_pass(check_name) @@ -483,6 +538,7 @@ def check_readme_field( """ check_name = 'readme_field' missing: list[str] = [] + locations: list[str | SourceContext] = [] for pkg in packages: if not pkg.is_publishable: @@ -492,17 +548,27 @@ def check_readme_field( data = tomlkit.parse(content) except Exception: missing.append(f'{pkg.name}: cannot parse pyproject.toml') + locations.append(str(pkg.manifest_path)) continue project: dict[str, object] = data.get('project', {}) if 'readme' not in project or not project['readme']: missing.append(pkg.name) + locations.append( + SourceContext( + path=str(pkg.manifest_path), + line=find_key_line(content, '', section='project') or 1, + key='readme', + label='readme field missing', + ) + ) if missing: result.add_warning( check_name, f'Missing readme field: {", ".join(missing)}', hint='Add readme = "README.md" to the [project] section in pyproject.toml.', + context=locations, ) else: result.add_pass(check_name) @@ -520,6 +586,7 @@ def check_changelog_url( """ check_name = 'changelog_url' missing: list[str] = [] + locations: list[str | SourceContext] = [] for pkg in packages: if not pkg.is_publishable: @@ -529,6 +596,7 @@ def check_changelog_url( data = tomlkit.parse(content) except Exception: missing.append(f'{pkg.name}: cannot parse pyproject.toml') + locations.append(str(pkg.manifest_path)) continue project: dict[str, object] = data.get('project', {}) @@ -537,12 +605,23 @@ def check_changelog_url( has_changelog = any(key.lower() == 'changelog' for key in urls) if not has_changelog: missing.append(pkg.name) + line = find_key_line(content, '', section='project.urls') + if not line: + line = find_key_line(content, '', section='project') or 1 + locations.append( + SourceContext( + path=str(pkg.manifest_path), + line=line, + label='Changelog URL missing', + ) + ) if missing: result.add_warning( check_name, f'Missing Changelog URL in [project.urls]: {", ".join(missing)}', hint='Add Changelog = "https://github.com/.../CHANGELOG.md" to [project.urls] in pyproject.toml.', + context=locations, ) else: result.add_pass(check_name) @@ -567,6 +646,7 @@ def check_publish_classifier_consistency( return issues: list[str] = [] + locations: list[str] = [] for pkg in packages: is_excluded = any(fnmatch.fnmatch(pkg.name, pat) for pat in exclude_publish) has_private_classifier = not pkg.is_publishable @@ -575,16 +655,19 @@ def check_publish_classifier_consistency( issues.append( f'{pkg.name}: has Private :: Do Not Upload but is NOT in exclude_publish', ) + locations.append(str(pkg.manifest_path)) elif is_excluded and not has_private_classifier: issues.append( f'{pkg.name}: in exclude_publish but missing Private :: Do Not Upload classifier', ) + locations.append(str(pkg.manifest_path)) if issues: result.add_warning( check_name, f'Publish classifier mismatch: {"; ".join(issues)}', hint='Ensure exclude_publish patterns and Private :: Do Not Upload classifiers agree.', + context=locations, ) else: result.add_pass(check_name) @@ -626,11 +709,20 @@ def check_test_filename_collisions( collisions.append(f'{rel_path} in {", ".join(sorted(pkg_names))}') if collisions: + # Collect the actual colliding file paths for context. + collision_locs: list[str] = [] + for rel_path, pkg_names in sorted(seen.items()): + if len(pkg_names) > 1: + for pname in pkg_names: + p = next((x for x in packages if x.name == pname), None) + if p: + collision_locs.append(str(p.path / rel_path)) result.add_warning( check_name, f'Test file collisions (pytest may shadow): {"; ".join(collisions)}', hint='Rename colliding test files to be unique across packages, ' 'e.g. tests/pkgname_utils_test.py instead of tests/utils_test.py.', + context=collision_locs, ) else: result.add_pass(check_name) @@ -644,6 +736,20 @@ def _parse_pyproject(pkg: Package) -> tomlkit.TOMLDocument | None: except Exception: return None + @staticmethod + def _parse_pyproject_with_content(pkg: Package) -> tuple[tomlkit.TOMLDocument | None, str]: + """Parse a package's pyproject.toml, returning ``(doc, raw_content)``. + + The raw content is needed by :func:`find_key_line` to locate + TOML keys by line number (tomlkit does not expose positions). + Returns ``(None, '')`` on parse failure. + """ + try: + content = pkg.manifest_path.read_text(encoding='utf-8') + return tomlkit.parse(content), content + except Exception: + return None, '' + def check_build_system( self, packages: list[Package], @@ -652,25 +758,43 @@ def check_build_system( """Check that ``[build-system]`` is present and has ``build-backend``.""" check_name = 'build_system' issues: list[str] = [] + locations: list[str | SourceContext] = [] for pkg in packages: if not pkg.is_publishable: continue - doc = self._parse_pyproject(pkg) + doc, content = self._parse_pyproject_with_content(pkg) if doc is None: issues.append(f'{pkg.name}: cannot parse pyproject.toml') + locations.append(str(pkg.manifest_path)) continue build_system = doc.get('build-system') if not isinstance(build_system, dict): issues.append(f'{pkg.name}: missing [build-system]') + locations.append( + SourceContext( + path=str(pkg.manifest_path), + line=find_key_line(content, '', section='project') or 1, + label='[build-system] section missing', + ) + ) elif 'build-backend' not in build_system: issues.append(f'{pkg.name}: [build-system] missing build-backend') + locations.append( + SourceContext( + path=str(pkg.manifest_path), + line=find_key_line(content, '', section='build-system'), + key='build-backend', + label='build-backend key missing', + ) + ) if issues: result.add_failure( check_name, f'Build system issues: {"; ".join(issues)}', hint='Add [build-system] with requires and build-backend to pyproject.toml.', + context=locations, ) else: result.add_pass(check_name) @@ -683,11 +807,12 @@ def check_version_field( """Check that ``[project].version`` is present or declared dynamic.""" check_name = 'version_field' issues: list[str] = [] + locations: list[str | SourceContext] = [] for pkg in packages: if not pkg.is_publishable: continue - doc = self._parse_pyproject(pkg) + doc, content = self._parse_pyproject_with_content(pkg) if doc is None: continue project = doc.get('project') @@ -698,12 +823,21 @@ def check_version_field( has_dynamic_version = isinstance(dynamic, list) and 'version' in dynamic if not has_version and not has_dynamic_version: issues.append(pkg.name) + locations.append( + SourceContext( + path=str(pkg.manifest_path), + line=find_key_line(content, '', section='project') or 1, + key='version', + label='version key missing', + ) + ) if issues: result.add_warning( check_name, f'Missing version field (will build as 0.0.0): {", ".join(issues)}', hint='Add version = "x.y.z" to [project] or add "version" to dynamic.', + context=locations, ) else: result.add_pass(check_name) @@ -716,6 +850,7 @@ def check_duplicate_dependencies( """Check for duplicate entries in ``[project.dependencies]``.""" check_name = 'duplicate_dependencies' issues: list[str] = [] + locations: list[str] = [] for pkg in packages: doc = self._parse_pyproject(pkg) @@ -740,12 +875,14 @@ def check_duplicate_dependencies( dupes = [n for n, count in seen_names.items() if count > 1] if dupes: issues.append(f'{pkg.name}: {", ".join(sorted(dupes))}') + locations.append(str(pkg.manifest_path)) if issues: result.add_warning( check_name, f'Duplicate dependencies: {"; ".join(issues)}', hint='Remove duplicate entries from [project.dependencies].', + context=locations, ) else: result.add_pass(check_name) @@ -758,11 +895,12 @@ def check_pinned_deps_in_libraries( """Check that publishable library packages don't pin deps with ``==``.""" check_name = 'pinned_deps_in_libraries' issues: list[str] = [] + locations: list[str | SourceContext] = [] for pkg in packages: if not pkg.is_publishable: continue - doc = self._parse_pyproject(pkg) + doc, content = self._parse_pyproject_with_content(pkg) if doc is None: continue project = doc.get('project') @@ -775,12 +913,26 @@ def check_pinned_deps_in_libraries( pinned = [d for d in deps if isinstance(d, str) and '==' in d] if pinned: issues.append(f'{pkg.name}: {", ".join(pinned)}') + # Point at the first pinned dep line. + first_pinned = pinned[0].split('==')[0].strip() + line = find_key_line(content, first_pinned) + if not line: + line = find_key_line(content, 'dependencies') or 1 + locations.append( + SourceContext( + path=str(pkg.manifest_path), + line=line, + key=pinned[0], + label='pinned with == (use >= for libraries)', + ) + ) if issues: result.add_warning( check_name, f'Pinned dependencies in libraries (use >= instead): {"; ".join(issues)}', hint='Libraries should use >= version specifiers, not ==, to avoid breaking downstream users.', + context=locations, ) else: result.add_pass(check_name) @@ -793,11 +945,12 @@ def check_requires_python( """Check that publishable packages declare ``requires-python``.""" check_name = 'requires_python' missing: list[str] = [] + locations: list[str | SourceContext] = [] for pkg in packages: if not pkg.is_publishable: continue - doc = self._parse_pyproject(pkg) + doc, content = self._parse_pyproject_with_content(pkg) if doc is None: continue project = doc.get('project') @@ -805,12 +958,21 @@ def check_requires_python( continue if 'requires-python' not in project or not project['requires-python']: missing.append(pkg.name) + locations.append( + SourceContext( + path=str(pkg.manifest_path), + line=find_key_line(content, '', section='project') or 1, + key='requires-python', + label='requires-python missing', + ) + ) if missing: result.add_warning( check_name, f'Missing requires-python: {", ".join(missing)}', hint='Add requires-python = ">=3.10" (or appropriate version) to [project].', + context=locations, ) else: result.add_pass(check_name) @@ -823,6 +985,7 @@ def check_readme_content_type( """Check that readme file extension matches content-type declaration.""" check_name = 'readme_content_type' issues: list[str] = [] + locations: list[str] = [] for pkg in packages: if not pkg.is_publishable: @@ -850,14 +1013,17 @@ def check_readme_content_type( ext = Path(file_path).suffix.lower() if ext == '.md' and 'rst' in content_type.lower(): issues.append(f'{pkg.name}: {file_path} is Markdown but content-type is {content_type}') + locations.append(str(pkg.manifest_path)) elif ext == '.rst' and 'markdown' in content_type.lower(): issues.append(f'{pkg.name}: {file_path} is RST but content-type is {content_type}') + locations.append(str(pkg.manifest_path)) if issues: result.add_warning( check_name, f'Readme content-type mismatch: {"; ".join(issues)}', hint='Ensure readme file extension matches content-type (text/markdown for .md, text/x-rst for .rst).', + context=locations, ) else: result.add_pass(check_name) @@ -870,18 +1036,33 @@ def check_version_pep440( """Check that package versions are PEP 440 compliant.""" check_name = 'version_pep440' invalid: list[str] = [] + locations: list[str | SourceContext] = [] for pkg in packages: if not pkg.is_publishable: continue if pkg.version and not _PEP440_RE.match(pkg.version): invalid.append(f'{pkg.name}: {pkg.version!r}') + try: + content = pkg.manifest_path.read_text(encoding='utf-8') + line = find_key_line(content, 'version') + except Exception: + line = 0 + locations.append( + SourceContext( + path=str(pkg.manifest_path), + line=line, + key='version', + label=f'not PEP 440: {pkg.version!r}', + ) + ) if invalid: result.add_failure( check_name, f'Non-PEP 440 versions (PyPI will reject): {"; ".join(invalid)}', hint='Use PEP 440 compliant versions like 1.0.0, 1.0.0a1, 1.0.0.post1, etc.', + context=locations, ) else: result.add_pass(check_name) @@ -894,6 +1075,7 @@ def check_placeholder_urls( """Check for placeholder or empty URLs in ``[project.urls]``.""" check_name = 'placeholder_urls' issues: list[str] = [] + locations: list[str] = [] for pkg in packages: if not pkg.is_publishable: @@ -914,14 +1096,17 @@ def check_placeholder_urls( url_lower = url.lower().strip() if not url_lower: issues.append(f'{pkg.name}: [{label}] is empty') + locations.append(str(pkg.manifest_path)) elif any(p.lower() in url_lower for p in _PLACEHOLDER_URL_PATTERNS): issues.append(f'{pkg.name}: [{label}] = {url!r} looks like a placeholder') + locations.append(str(pkg.manifest_path)) if issues: result.add_warning( check_name, f'Placeholder URLs: {"; ".join(issues)}', hint='Replace placeholder URLs in [project.urls] with real values.', + context=locations, ) else: result.add_pass(check_name) @@ -934,17 +1119,20 @@ def check_legacy_setup_files( """Check for leftover ``setup.py`` or ``setup.cfg`` files.""" check_name = 'legacy_setup_files' found: list[str] = [] + locations: list[str] = [] for pkg in packages: for legacy in ('setup.py', 'setup.cfg'): if (pkg.path / legacy).exists(): found.append(f'{pkg.name}: {legacy}') + locations.append(str(pkg.path / legacy)) if found: result.add_warning( check_name, f'Legacy setup files found: {"; ".join(found)}', hint='Remove setup.py/setup.cfg and use pyproject.toml exclusively.', + context=locations, ) else: result.add_pass(check_name) @@ -957,6 +1145,7 @@ def check_deprecated_classifiers( """Check for deprecated trove classifiers.""" check_name = 'deprecated_classifiers' issues: list[str] = [] + locations: list[str] = [] for pkg in packages: doc = self._parse_pyproject(pkg) @@ -976,14 +1165,17 @@ def check_deprecated_classifiers( replacement = DEPRECATED_CLASSIFIERS[clf] if replacement: issues.append(f'{pkg.name}: {clf!r} → {replacement!r}') + locations.append(str(pkg.manifest_path)) else: issues.append(f'{pkg.name}: {clf!r} (remove)') + locations.append(str(pkg.manifest_path)) if issues: result.add_warning( check_name, f'Deprecated classifiers: {"; ".join(issues)}', hint='Run with --fix to auto-replace deprecated classifiers.', + context=locations, ) else: result.add_pass(check_name) @@ -996,6 +1188,7 @@ def check_license_classifier_mismatch( """Check that license classifiers match the LICENSE file content.""" check_name = 'license_classifier_mismatch' issues: list[str] = [] + locations: list[str] = [] for pkg in packages: if not pkg.is_publishable: @@ -1039,12 +1232,14 @@ def check_license_classifier_mismatch( f'{pkg.name}: LICENSE file looks like {detected_classifier.split("::")[-1].strip()}' f' but classifiers say {", ".join(license_classifiers)}', ) + locations.append(str(pkg.manifest_path)) if issues: result.add_warning( check_name, f'License mismatch: {"; ".join(issues)}', hint='Ensure license classifiers in pyproject.toml match the LICENSE file.', + context=locations, ) else: result.add_pass(check_name) @@ -1057,6 +1252,7 @@ def check_unreachable_extras( """Check that optional-dependencies reference known packages.""" check_name = 'unreachable_extras' issues: list[str] = [] + locations: list[str] = [] # Build set of all known workspace package names. workspace_names = {pkg.name.lower().replace('-', '_').replace('.', '_') for pkg in packages} @@ -1085,6 +1281,7 @@ def check_unreachable_extras( match = _DEP_NAME_RE.match(dep_stripped) if not match: issues.append(f'{pkg.name}[{extra_name}]: unparseable dep {dep!r}') + locations.append(str(pkg.manifest_path)) continue dep_name = match.group(1).lower().replace('-', '_').replace('.', '_') # Only flag if it looks like a workspace package reference that doesn't exist. @@ -1100,6 +1297,7 @@ def check_unreachable_extras( check_name, f'Unreachable extras: {"; ".join(issues)}', hint='Check that optional-dependencies reference valid package names.', + context=locations, ) else: result.add_pass(check_name) @@ -1112,6 +1310,7 @@ def check_self_dependencies( """Check that no package lists itself in its own dependencies.""" check_name = 'self_dependencies' issues: list[str] = [] + locations: list[str] = [] for pkg in packages: doc = self._parse_pyproject(pkg) @@ -1134,6 +1333,7 @@ def check_self_dependencies( dep_name = match.group(1).lower().replace('-', '_').replace('.', '_') if dep_name == pkg_norm: issues.append(f'{pkg.name}: lists itself ({dep.strip()!r})') + locations.append(str(pkg.manifest_path)) break if issues: @@ -1141,6 +1341,7 @@ def check_self_dependencies( check_name, f'Self-dependencies found: {"; ".join(issues)}', hint='Remove the package from its own [project].dependencies.', + context=locations, ) else: result.add_pass(check_name) @@ -1153,6 +1354,7 @@ def check_distro_deps( """Check that distro packaging deps match ``pyproject.toml``.""" check_name = 'distro_deps' issues: list[str] = [] + locations: list[str] = [] for pkg in packages: if not pkg.is_publishable: @@ -1172,12 +1374,14 @@ def check_distro_deps( if diff.version_mismatch: parts.append(f'version mismatch: {", ".join(diff.version_mismatch)}') issues.append(f'{pkg.name} ({diff.distro}): {"; ".join(parts)}') + locations.append(str(pkg.manifest_path)) if issues: result.add_warning( check_name, f'Distro packaging deps out of sync: {"; ".join(issues)}', hint='Run "releasekit check --fix" to update distro packaging files.', + context=locations, ) else: result.add_pass(check_name) diff --git a/py/tools/releasekit/src/releasekit/checks/_runner.py b/py/tools/releasekit/src/releasekit/checks/_runner.py index ac905c3340..4dc55f6109 100644 --- a/py/tools/releasekit/src/releasekit/checks/_runner.py +++ b/py/tools/releasekit/src/releasekit/checks/_runner.py @@ -14,10 +14,17 @@ # # SPDX-License-Identifier: Apache-2.0 -"""Orchestrator that runs all workspace health checks.""" +"""Orchestrator that runs all workspace health checks. + +Individual checks are dispatched concurrently via +:func:`asyncio.gather` + :func:`asyncio.to_thread`. +:class:`~releasekit.preflight.PreflightResult` is thread-safe, +so concurrent writes from multiple checks are safe. +""" from __future__ import annotations +import asyncio from pathlib import Path from releasekit.checks._protocol import CheckBackend @@ -42,7 +49,7 @@ _USE_DEFAULT = object() -def run_checks( +async def run_checks_async( packages: list[Package], graph: DependencyGraph, backend: CheckBackend | object = _USE_DEFAULT, @@ -56,7 +63,7 @@ def run_checks( library_dirs: list[str] | None = None, plugin_dirs: list[str] | None = None, ) -> PreflightResult: - """Run all workspace health checks. + """Run all workspace health checks concurrently. **Universal checks** always run (cycles, self_deps, orphan_deps, missing_license, missing_readme, stale_artifacts). @@ -65,6 +72,10 @@ def run_checks( If no backend is specified, defaults to :class:`PythonCheckBackend`. Pass ``backend=None`` to skip language-specific checks entirely. + All checks are dispatched concurrently via :func:`asyncio.gather` + and :func:`asyncio.to_thread` since the check functions are + synchronous. :class:`PreflightResult` is thread-safe. + Args: packages: All discovered workspace packages. graph: The workspace dependency graph. @@ -92,6 +103,156 @@ def run_checks( """ result = PreflightResult() + # Collect all check tasks for concurrent execution. + tasks: list[asyncio.Task[None]] = [] + + # Universal checks. + tasks.append(asyncio.create_task(asyncio.to_thread(_check_cycles, graph, result))) + tasks.append(asyncio.create_task(asyncio.to_thread(_check_self_deps, packages, result))) + tasks.append(asyncio.create_task(asyncio.to_thread(_check_orphan_deps, packages, result))) + tasks.append(asyncio.create_task(asyncio.to_thread(_check_missing_license, packages, result))) + tasks.append(asyncio.create_task(asyncio.to_thread(_check_missing_readme, packages, result))) + tasks.append(asyncio.create_task(asyncio.to_thread(_check_stale_artifacts, packages, result))) + tasks.append(asyncio.create_task(asyncio.to_thread(_check_ungrouped_packages, packages, groups or {}, result))) + if workspace_root is not None: + tasks.append(asyncio.create_task(asyncio.to_thread(_check_lockfile_staleness, workspace_root, result))) + + if backend is _USE_DEFAULT: + backend = PythonCheckBackend( + core_package=core_package, + plugin_prefix=plugin_prefix, + namespace_dirs=namespace_dirs, + library_dirs=library_dirs, + plugin_dirs=plugin_dirs, + ) + + if backend is not None and isinstance(backend, CheckBackend): + tasks.append(asyncio.create_task(asyncio.to_thread(backend.check_type_markers, packages, result))) + tasks.append(asyncio.create_task(asyncio.to_thread(backend.check_version_consistency, packages, result))) + tasks.append(asyncio.create_task(asyncio.to_thread(backend.check_naming_convention, packages, result))) + tasks.append(asyncio.create_task(asyncio.to_thread(backend.check_metadata_completeness, packages, result))) + tasks.append(asyncio.create_task(asyncio.to_thread(backend.check_python_version_consistency, packages, result))) + tasks.append(asyncio.create_task(asyncio.to_thread(backend.check_python_classifiers, packages, result))) + tasks.append(asyncio.create_task(asyncio.to_thread(backend.check_dependency_resolution, packages, result))) + tasks.append(asyncio.create_task(asyncio.to_thread(backend.check_namespace_init, packages, result))) + tasks.append(asyncio.create_task(asyncio.to_thread(backend.check_readme_field, packages, result))) + tasks.append(asyncio.create_task(asyncio.to_thread(backend.check_changelog_url, packages, result))) + tasks.append( + asyncio.create_task( + asyncio.to_thread( + backend.check_publish_classifier_consistency, + packages, + result, + exclude_publish, + ) + ) + ) + tasks.append(asyncio.create_task(asyncio.to_thread(backend.check_test_filename_collisions, packages, result))) + tasks.append(asyncio.create_task(asyncio.to_thread(backend.check_build_system, packages, result))) + tasks.append(asyncio.create_task(asyncio.to_thread(backend.check_version_field, packages, result))) + tasks.append(asyncio.create_task(asyncio.to_thread(backend.check_duplicate_dependencies, packages, result))) + tasks.append(asyncio.create_task(asyncio.to_thread(backend.check_pinned_deps_in_libraries, packages, result))) + tasks.append(asyncio.create_task(asyncio.to_thread(backend.check_requires_python, packages, result))) + tasks.append(asyncio.create_task(asyncio.to_thread(backend.check_readme_content_type, packages, result))) + tasks.append(asyncio.create_task(asyncio.to_thread(backend.check_version_pep440, packages, result))) + tasks.append(asyncio.create_task(asyncio.to_thread(backend.check_placeholder_urls, packages, result))) + tasks.append(asyncio.create_task(asyncio.to_thread(backend.check_legacy_setup_files, packages, result))) + tasks.append(asyncio.create_task(asyncio.to_thread(backend.check_deprecated_classifiers, packages, result))) + tasks.append( + asyncio.create_task( + asyncio.to_thread( + backend.check_license_classifier_mismatch, + packages, + result, + ) + ) + ) + tasks.append(asyncio.create_task(asyncio.to_thread(backend.check_unreachable_extras, packages, result))) + tasks.append(asyncio.create_task(asyncio.to_thread(backend.check_self_dependencies, packages, result))) + tasks.append(asyncio.create_task(asyncio.to_thread(backend.check_distro_deps, packages, result))) + + await asyncio.gather(*tasks) + + logger.info('checks_complete', summary=result.summary()) + return result + + +def run_checks( + packages: list[Package], + graph: DependencyGraph, + backend: CheckBackend | object = _USE_DEFAULT, + exclude_publish: list[str] | None = None, + groups: dict[str, list[str]] | None = None, + workspace_root: Path | None = None, + *, + core_package: str = '', + plugin_prefix: str = '', + namespace_dirs: list[str] | None = None, + library_dirs: list[str] | None = None, + plugin_dirs: list[str] | None = None, +) -> PreflightResult: + """Synchronous wrapper around :func:`run_checks_async`. + + Creates a new event loop if none is running, otherwise uses + the existing loop. This preserves backward compatibility for + callers that don't use ``await``. + """ + try: + loop = asyncio.get_running_loop() + except RuntimeError: + loop = None + + if loop is not None and loop.is_running(): + # Already inside an event loop (e.g. pytest-asyncio). + # Fall back to sequential execution to avoid nested loop issues. + return _run_checks_sync( + packages, + graph, + backend, + exclude_publish, + groups, + workspace_root, + core_package=core_package, + plugin_prefix=plugin_prefix, + namespace_dirs=namespace_dirs, + library_dirs=library_dirs, + plugin_dirs=plugin_dirs, + ) + + return asyncio.run( + run_checks_async( + packages, + graph, + backend, + exclude_publish, + groups, + workspace_root, + core_package=core_package, + plugin_prefix=plugin_prefix, + namespace_dirs=namespace_dirs, + library_dirs=library_dirs, + plugin_dirs=plugin_dirs, + ) + ) + + +def _run_checks_sync( + packages: list[Package], + graph: DependencyGraph, + backend: CheckBackend | object = _USE_DEFAULT, + exclude_publish: list[str] | None = None, + groups: dict[str, list[str]] | None = None, + workspace_root: Path | None = None, + *, + core_package: str = '', + plugin_prefix: str = '', + namespace_dirs: list[str] | None = None, + library_dirs: list[str] | None = None, + plugin_dirs: list[str] | None = None, +) -> PreflightResult: + """Sequential fallback when already inside a running event loop.""" + result = PreflightResult() + _check_cycles(graph, result) _check_self_deps(packages, result) _check_orphan_deps(packages, result) diff --git a/py/tools/releasekit/src/releasekit/checks/_rust.py b/py/tools/releasekit/src/releasekit/checks/_rust.py new file mode 100644 index 0000000000..dcd96ebec4 --- /dev/null +++ b/py/tools/releasekit/src/releasekit/checks/_rust.py @@ -0,0 +1,307 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +"""Rust/Cargo-specific workspace check backend (``RustCheckBackend``).""" + +from __future__ import annotations + +import re + +from releasekit.checks._base import BaseCheckBackend +from releasekit.checks._rust_fixers import ( + fix_duplicate_dependencies, + fix_metadata_completeness, + fix_wildcard_dependencies, +) +from releasekit.logging import get_logger +from releasekit.preflight import PreflightResult, SourceContext, find_key_line, run_check +from releasekit.workspace import Package + +logger = get_logger(__name__) + +# SemVer pattern for Cargo crates. +_SEMVER_RE = re.compile(r'^\d+\.\d+\.\d+(-[\w.]+)?$') + +# Crate name pattern: lowercase alphanumeric + hyphens/underscores. +_CRATE_NAME_RE = re.compile(r'^[a-z][a-z0-9_-]*$') + + +def _cargo_toml(pkg: Package) -> str: + """Return the Cargo.toml path string for a package.""" + return str(pkg.path / 'Cargo.toml') + + +class RustCheckBackend(BaseCheckBackend): + """Rust/Cargo-specific workspace checks. + + Checks for: + - ``Cargo.toml`` presence (build system) + - ``Cargo.lock`` presence (lockfile) + - Crate naming conventions + - Version field presence + - SemVer compliance + - Metadata completeness (name, version, edition, description, license) + - Self-dependencies + - Duplicate dependencies + - Version consistency across crates + - Wildcard dependency versions (``*``) + + Args: + core_package: Name of the core crate for version consistency. + plugin_prefix: Expected prefix for plugin crate names. + """ + + def __init__( + self, + *, + core_package: str = '', + plugin_prefix: str = '', + **_kwargs: object, + ) -> None: + """Initialize with optional project-specific configuration.""" + self._core_package = core_package + self._plugin_prefix = plugin_prefix + + def _manifest_path(self, pkg: Package) -> str: + """Return the Cargo.toml path string for a package.""" + return _cargo_toml(pkg) + + def check_build_system( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """Check that each crate has a Cargo.toml.""" + run_check( + result, + 'build_system', + packages, + lambda pkg: [(pkg.name, str(pkg.path))] if not (pkg.path / 'Cargo.toml').is_file() else [], + message='Missing Cargo.toml', + hint='Run `cargo init` to create Cargo.toml.', + ) + + def check_dependency_resolution( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """Check that Cargo.lock exists in the workspace root.""" + check_name = 'cargo_lock_present' + # Only check the first package's parent for workspace-level lock. + if packages: + ws_root = packages[0].path.parent + # Walk up to find workspace root (where Cargo.lock lives). + for parent in [packages[0].path, *packages[0].path.parents]: + if (parent / 'Cargo.lock').is_file(): + result.add_pass(check_name) + return + if (parent / 'Cargo.toml').is_file(): + ws_root = parent + result.add_warning( + check_name, + f'Missing Cargo.lock in {ws_root}', + hint='Run `cargo generate-lockfile` to create Cargo.lock.', + context=[str(ws_root)], + ) + else: + result.add_pass(check_name) + + def check_naming_convention( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """Check that crate names follow Rust naming conventions.""" + + def _probe(pkg: Package) -> list[tuple[str, str | SourceContext]]: + if _CRATE_NAME_RE.match(pkg.name): + return [] + cargo_path = pkg.path / 'Cargo.toml' + try: + content = cargo_path.read_text(encoding='utf-8') + line = find_key_line(content, 'name') + except Exception: + line = 0 + return [ + ( + pkg.name, + SourceContext( + path=str(cargo_path), + line=line, + key='name', + label=f'non-standard: {pkg.name!r}', + ), + ) + ] + + run_check( + result, + 'naming_convention', + packages, + _probe, + message='Non-standard crate names', + hint='Crate names should be lowercase with hyphens or underscores.', + severity='warning', + ) + + def check_metadata_completeness( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """Check that Cargo.toml has required fields for crates.io.""" + + def _probe(pkg: Package) -> list[tuple[str, str | SourceContext]]: + if not pkg.is_publishable: + return [] + cargo_toml = pkg.path / 'Cargo.toml' + if not cargo_toml.is_file(): + return [] + try: + text = cargo_toml.read_text(encoding='utf-8') + except OSError: + return [] + missing: list[str] = [] + for field in ('description', 'license', 'repository'): + if f'{field} =' not in text and f'{field}=' not in text: + missing.append(field) + if missing: + line = find_key_line(text, 'name', section='package') or 1 + return [ + ( + f'{pkg.name}: missing {", ".join(missing)}', + SourceContext( + path=str(cargo_toml), + line=line, + label=f'missing: {", ".join(missing)}', + ), + ) + ] + return [] + + run_check( + result, + 'metadata_completeness', + packages, + _probe, + message='Incomplete metadata', + hint='crates.io requires description, license, and repository in Cargo.toml.', + severity='warning', + joiner='; ', + ) + + def check_version_pep440( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """Check that versions are valid SemVer.""" + + def _probe(pkg: Package) -> list[tuple[str, str | SourceContext]]: + if not pkg.version or pkg.version == '0.0.0' or _SEMVER_RE.match(pkg.version): + return [] + cargo_path = pkg.path / 'Cargo.toml' + try: + content = cargo_path.read_text(encoding='utf-8') + line = find_key_line(content, 'version') + except Exception: + line = 0 + return [ + ( + f'{pkg.name}=={pkg.version}', + SourceContext( + path=str(cargo_path), + line=line, + key='version', + label=f'not SemVer: {pkg.version!r}', + ), + ) + ] + + run_check( + result, + 'version_semver', + packages, + _probe, + message='Non-SemVer versions', + hint='Cargo crates must use SemVer (e.g. 1.2.3).', + severity='warning', + ) + + def check_pinned_deps_in_libraries( + self, + packages: list[Package], + result: PreflightResult, + ) -> None: + """Check for wildcard (*) dependency versions.""" + + def _probe(pkg: Package) -> list[tuple[str, str | SourceContext]]: + cargo_toml = pkg.path / 'Cargo.toml' + if not cargo_toml.is_file(): + return [] + try: + text = cargo_toml.read_text(encoding='utf-8') + except OSError: + return [] + match = re.search(r'=\s*"\*"', text) + if match: + line = text[: match.start()].count('\n') + 1 + return [ + ( + pkg.name, + SourceContext( + path=str(cargo_toml), + line=line, + label='wildcard version "*"', + ), + ) + ] + return [] + + run_check( + result, + 'wildcard_dependencies', + packages, + _probe, + message='Wildcard deps', + hint='Avoid version = "*" in dependencies. Use specific version ranges.', + severity='warning', + ) + + def run_fixes( + self, + packages: list[Package], + *, + exclude_publish: list[str] | None = None, + repo_owner: str = '', + repo_name: str = '', + namespace_dirs: list[str] | None = None, + library_dirs: list[str] | None = None, + plugin_dirs: list[str] | None = None, + dry_run: bool = False, + ) -> list[str]: + """Run all Rust-specific auto-fixers.""" + changes: list[str] = [] + changes.extend(fix_metadata_completeness(packages, dry_run=dry_run)) + changes.extend(fix_wildcard_dependencies(packages, dry_run=dry_run)) + changes.extend(fix_duplicate_dependencies(packages, dry_run=dry_run)) + return changes + + +__all__ = [ + 'RustCheckBackend', +] diff --git a/py/tools/releasekit/src/releasekit/checks/_rust_fixers.py b/py/tools/releasekit/src/releasekit/checks/_rust_fixers.py new file mode 100644 index 0000000000..0852b8f5ed --- /dev/null +++ b/py/tools/releasekit/src/releasekit/checks/_rust_fixers.py @@ -0,0 +1,215 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +"""Rust/Cargo-specific auto-fixer functions for ``Cargo.toml`` files.""" + +from __future__ import annotations + +import re + +from releasekit.logging import get_logger +from releasekit.workspace import Package + +logger = get_logger(__name__) + + +def fix_metadata_completeness( + packages: list[Package], + *, + dry_run: bool = False, +) -> list[str]: + """Add missing required fields to ``Cargo.toml``. + + Adds stub ``description``, ``license``, and ``repository`` fields + to the ``[package]`` section when they are absent from publishable + crates. + + Args: + packages: All discovered workspace packages. + dry_run: If ``True``, report what would change without writing. + + Returns: + List of human-readable descriptions of changes made. + """ + changes: list[str] = [] + + for pkg in packages: + if not pkg.is_publishable: + continue + cargo_toml = pkg.path / 'Cargo.toml' + if not cargo_toml.is_file(): + continue + try: + text = cargo_toml.read_text(encoding='utf-8') + except OSError: + continue + + additions: list[str] = [] + if 'description =' not in text and 'description=' not in text: + additions.append('description = "TODO: Add crate description"') + if 'license =' not in text and 'license=' not in text: + additions.append('license = "Apache-2.0"') + if 'repository =' not in text and 'repository=' not in text: + additions.append('repository = "TODO: Add repository URL"') + + if not additions: + continue + + # Insert after [package] section header. + lines = text.splitlines(keepends=True) + new_lines: list[str] = [] + inserted = False + for line in lines: + new_lines.append(line) + if not inserted and line.strip() == '[package]': + for addition in additions: + new_lines.append(addition + '\n') + inserted = True + + if not inserted: + # Fallback: append to end. + for addition in additions: + new_lines.append(addition + '\n') + + added_fields = [a.split(' =')[0].split('=')[0].strip() for a in additions] + action = f'{pkg.name}: added {", ".join(added_fields)} to Cargo.toml' + changes.append(action) + if not dry_run: + cargo_toml.write_text(''.join(new_lines), encoding='utf-8') + logger.info('fix_rust_metadata', action=action, path=str(cargo_toml)) + else: + logger.info('fix_rust_metadata_dry_run', action=action, path=str(cargo_toml)) + + return changes + + +def fix_wildcard_dependencies( + packages: list[Package], + *, + dry_run: bool = False, +) -> list[str]: + r"""Replace wildcard (``*``) dependency versions with ``>=0``. + + Finds lines matching ``= "*"`` and replaces them with ``= ">=0"``. + + Args: + packages: All discovered workspace packages. + dry_run: If ``True``, report what would change without writing. + + Returns: + List of human-readable descriptions of changes made. + """ + changes: list[str] = [] + + for pkg in packages: + cargo_toml = pkg.path / 'Cargo.toml' + if not cargo_toml.is_file(): + continue + try: + text = cargo_toml.read_text(encoding='utf-8') + except OSError: + continue + + new_text, count = re.subn(r'=\s*"\*"', '= ">=0"', text) + if count == 0: + continue + + action = f'{pkg.name}: replaced {count} wildcard dep version(s) with ">=0"' + changes.append(action) + if not dry_run: + cargo_toml.write_text(new_text, encoding='utf-8') + logger.info('fix_rust_wildcard_deps', action=action, path=str(cargo_toml)) + else: + logger.info('fix_rust_wildcard_deps_dry_run', action=action, path=str(cargo_toml)) + + return changes + + +def fix_duplicate_dependencies( + packages: list[Package], + *, + dry_run: bool = False, +) -> list[str]: + """Remove duplicate dependency entries from ``Cargo.toml``. + + Scans ``[dependencies]``, ``[dev-dependencies]``, and + ``[build-dependencies]`` sections for duplicate crate names + and removes later occurrences. + + Args: + packages: All discovered workspace packages. + dry_run: If ``True``, report what would change without writing. + + Returns: + List of human-readable descriptions of changes made. + """ + changes: list[str] = [] + _dep_section_re = re.compile(r'^\[((?:dev-|build-)?dependencies)\]') + + for pkg in packages: + cargo_toml = pkg.path / 'Cargo.toml' + if not cargo_toml.is_file(): + continue + try: + text = cargo_toml.read_text(encoding='utf-8') + except OSError: + continue + + lines = text.splitlines(keepends=True) + new_lines: list[str] = [] + in_dep_section = False + seen_in_section: set[str] = set() + removed: list[str] = [] + + for line in lines: + stripped = line.strip() + + # Detect section headers. + if stripped.startswith('['): + in_dep_section = bool(_dep_section_re.match(stripped)) + seen_in_section = set() + new_lines.append(line) + continue + + if in_dep_section and stripped and not stripped.startswith('#'): + # Lines look like: crate_name = "version" or crate_name = { ... } + dep_name = stripped.split('=')[0].strip() if '=' in stripped else '' + if dep_name: + if dep_name in seen_in_section: + removed.append(dep_name) + continue + seen_in_section.add(dep_name) + + new_lines.append(line) + + if removed: + new_text = ''.join(new_lines) + action = f'{pkg.name}: removed duplicate deps: {", ".join(removed)}' + changes.append(action) + if not dry_run: + cargo_toml.write_text(new_text, encoding='utf-8') + logger.info('fix_rust_duplicate_deps', action=action, path=str(cargo_toml)) + else: + logger.info('fix_rust_duplicate_deps_dry_run', action=action, path=str(cargo_toml)) + + return changes + + +__all__ = [ + 'fix_duplicate_dependencies', + 'fix_metadata_completeness', + 'fix_wildcard_dependencies', +] diff --git a/py/tools/releasekit/src/releasekit/checks/_universal.py b/py/tools/releasekit/src/releasekit/checks/_universal.py index df4fa03688..41ce9edb7e 100644 --- a/py/tools/releasekit/src/releasekit/checks/_universal.py +++ b/py/tools/releasekit/src/releasekit/checks/_universal.py @@ -66,14 +66,17 @@ def _check_self_deps( """ check_name = 'self_deps' offenders: list[str] = [] + locations: list[str] = [] for pkg in packages: if pkg.name in pkg.internal_deps: offenders.append(pkg.name) + locations.append(str(pkg.manifest_path)) if offenders: result.add_failure( check_name, f'Packages depend on themselves: {", ".join(offenders)}', hint='Remove the package from its own [project.dependencies] list.', + context=locations, ) else: result.add_pass(check_name) @@ -92,14 +95,18 @@ def _check_orphan_deps( check_name = 'orphan_deps' known_names = {pkg.name for pkg in packages} orphans: list[str] = [] + locations: list[str] = [] for pkg in packages: for dep in pkg.internal_deps: if dep not in known_names: orphans.append(f'{pkg.name} → {dep}') + locations.append(str(pkg.manifest_path)) if orphans: result.add_warning( check_name, f'Internal deps not found in workspace: {", ".join(orphans)}', + hint='Add the missing packages to the workspace or remove the stale dependency.', + context=locations, ) else: result.add_pass(check_name) @@ -117,17 +124,20 @@ def _check_missing_license( """ check_name = 'missing_license' missing: list[str] = [] + locations: list[str] = [] for pkg in packages: if not pkg.is_publishable: continue license_path = pkg.path / 'LICENSE' if not license_path.exists(): missing.append(pkg.name) + locations.append(str(pkg.path)) if missing: result.add_failure( check_name, f'Missing LICENSE file: {", ".join(missing)}', hint='Copy the Apache 2.0 LICENSE file into each listed package directory.', + context=locations, ) else: result.add_pass(check_name) @@ -145,17 +155,20 @@ def _check_missing_readme( """ check_name = 'missing_readme' missing: list[str] = [] + locations: list[str] = [] for pkg in packages: if not pkg.is_publishable: continue readme_path = pkg.path / 'README.md' if not readme_path.exists(): missing.append(pkg.name) + locations.append(str(pkg.path)) if missing: result.add_failure( check_name, f'Missing README.md file: {", ".join(missing)}', hint='Create a README.md in each listed package directory describing the package.', + context=locations, ) else: result.add_pass(check_name) @@ -175,22 +188,26 @@ def _check_stale_artifacts( """ check_name = 'stale_artifacts' stale: list[str] = [] + locations: list[str] = [] for pkg in packages: bak_files = list(pkg.path.glob('*.bak')) if bak_files: stale.append(f'{pkg.name}: {len(bak_files)} .bak file(s)') + locations.extend(str(f) for f in bak_files) dist_dir = pkg.path / 'dist' if dist_dir.is_dir(): dist_files = list(dist_dir.iterdir()) if dist_files: stale.append(f'{pkg.name}: dist/ has {len(dist_files)} file(s)') + locations.append(str(dist_dir)) if stale: result.add_warning( check_name, f'Stale artifacts: {"; ".join(stale)}', hint='Remove stale files with: rm -f *.bak && rm -rf dist/', + context=locations, ) else: result.add_pass(check_name) @@ -225,15 +242,18 @@ def _check_ungrouped_packages( all_patterns.append(pat) ungrouped: list[str] = [] + locations: list[str] = [] for pkg in packages: if not any(fnmatch.fnmatch(pkg.name, pat) for pat in all_patterns): ungrouped.append(pkg.name) + locations.append(str(pkg.manifest_path)) if ungrouped: result.add_warning( check_name, f'Packages not in any config group: {", ".join(sorted(ungrouped))}', hint='Add each package to a [groups] entry in releasekit.toml so it is covered by exclusion rules.', + context=locations, ) else: result.add_pass(check_name) @@ -277,6 +297,7 @@ def _check_lockfile_staleness( check_name, 'uv.lock is out of date with pyproject.toml dependencies.', hint="Run 'uv lock' to regenerate the lockfile.", + context=[str(workspace_root / 'uv.lock')], ) except subprocess.TimeoutExpired: result.add_warning( diff --git a/py/tools/releasekit/src/releasekit/cli.py b/py/tools/releasekit/src/releasekit/cli.py index 3bd657c635..63c98810ec 100644 --- a/py/tools/releasekit/src/releasekit/cli.py +++ b/py/tools/releasekit/src/releasekit/cli.py @@ -29,6 +29,8 @@ releasekit version Show computed version bumps releasekit explain Explain an error code releasekit doctor Diagnose release state consistency + releasekit sign Sign artifacts with Sigstore (keyless) + releasekit verify Verify Sigstore bundles Usage:: @@ -50,6 +52,8 @@ import argparse import asyncio +import concurrent.futures +import dataclasses import fnmatch import json import sys @@ -62,8 +66,26 @@ from releasekit.backends.forge import Forge, GitHubAPIBackend, GitHubCLIBackend from releasekit.backends.forge.bitbucket import BitbucketAPIBackend from releasekit.backends.forge.gitlab import GitLabCLIBackend -from releasekit.backends.pm import PackageManager, PnpmBackend, UvBackend -from releasekit.backends.registry import NpmRegistry, PyPIBackend, Registry +from releasekit.backends.pm import ( + BazelBackend, + CargoBackend, + DartBackend, + GoBackend, + MaturinBackend, + MavenBackend, + PackageManager, + PnpmBackend, + UvBackend, +) +from releasekit.backends.registry import ( + CratesIoRegistry, + GoProxyCheck, + MavenCentralRegistry, + NpmRegistry, + PubDevRegistry, + PyPIBackend, + Registry, +) from releasekit.backends.vcs import GitCLIBackend from releasekit.checks import ( PythonCheckBackend, @@ -84,11 +106,18 @@ from releasekit.formatters import FORMATTERS, format_graph from releasekit.graph import build_graph, topo_sort from releasekit.groups import filter_by_group -from releasekit.init import print_scaffold_preview, scaffold_config +from releasekit.init import ( + print_scaffold_preview, + print_tag_scan_report, + scaffold_config, + scaffold_multi_config, + scan_and_bootstrap, +) from releasekit.lock import release_lock from releasekit.logging import get_logger +from releasekit.migrate import MIGRATION_SOURCES, migrate_from_source from releasekit.plan import build_plan -from releasekit.preflight import run_preflight +from releasekit.preflight import PreflightResult, SourceContext, read_source_snippet, run_preflight from releasekit.prepare import prepare_release from releasekit.publisher import PublishConfig, publish_workspace from releasekit.release import tag_release @@ -155,7 +184,7 @@ def _get_ecosystem_filter(args: argparse.Namespace) -> Ecosystem | None: eco_str = getattr(args, 'ecosystem', None) if eco_str is None: return None - for member in list(Ecosystem): + for member in Ecosystem: if member.value == eco_str: return member return None @@ -186,7 +215,7 @@ class _NullForge: workflows). """ - _NOOP = CommandResult(command=[], returncode=0, stdout='', stderr='') + _NOOP = CommandResult(command=[], return_code=0, stdout='', stderr='') async def is_available(self) -> bool: return False @@ -297,6 +326,16 @@ def _create_backends( - ``"uv"`` → :class:`UvBackend` + :class:`PyPIBackend` - ``"pnpm"`` → :class:`PnpmBackend` + :class:`NpmRegistry` + - ``"go"`` → :class:`GoBackend` + :class:`GoProxyCheck` + - ``"pub"`` → :class:`DartBackend` + :class:`PubDevRegistry` + - ``"gradle"`` / ``"maven"`` → :class:`MavenBackend` + :class:`MavenCentralRegistry` + - ``"bazel"`` → :class:`BazelBackend` + :class:`MavenCentralRegistry` + - ``"cargo"`` → :class:`CargoBackend` + :class:`CratesIoRegistry` + - ``"maturin"`` → :class:`MaturinBackend` + :class:`PyPIBackend` + + When ``ws_config.registry_url`` is set, it overrides the default + base URL for the registry backend (e.g. for Test PyPI, a local + Verdaccio, or a staging crates.io). Args: config_root: Directory containing ``releasekit.toml`` (repo root). @@ -314,14 +353,39 @@ def _create_backends( vcs = GitCLIBackend(config_root) tool = ws_config.tool if ws_config else 'uv' + registry_url = ws_config.registry_url if ws_config else '' + pool = config.http_pool_size + pm: PackageManager registry: Registry + registry_kw: dict[str, object] = {'pool_size': pool} + if registry_url: + registry_kw['base_url'] = registry_url + if tool == 'pnpm': pm = PnpmBackend(effective_root) - registry = NpmRegistry(pool_size=config.http_pool_size) + registry = NpmRegistry(**registry_kw) # type: ignore[arg-type] + elif tool == 'go': + pm = GoBackend(effective_root) + registry = GoProxyCheck(**registry_kw) # type: ignore[arg-type] + elif tool == 'pub': + pm = DartBackend(effective_root) + registry = PubDevRegistry(**registry_kw) # type: ignore[arg-type] + elif tool in ('gradle', 'maven'): + pm = MavenBackend(effective_root) + registry = MavenCentralRegistry(**registry_kw) # type: ignore[arg-type] + elif tool == 'bazel': + pm = BazelBackend(effective_root) + registry = MavenCentralRegistry(**registry_kw) # type: ignore[arg-type] + elif tool == 'cargo': + pm = CargoBackend(effective_root) + registry = CratesIoRegistry(**registry_kw) # type: ignore[arg-type] + elif tool == 'maturin': + pm = MaturinBackend(effective_root) + registry = PyPIBackend(**registry_kw) # type: ignore[arg-type] else: pm = UvBackend(effective_root) - registry = PyPIBackend(pool_size=config.http_pool_size) + registry = PyPIBackend(**registry_kw) # type: ignore[arg-type] owner = config.repo_owner repo = config.repo_name @@ -397,6 +461,12 @@ async def _cmd_publish(args: argparse.Namespace) -> int: config_root = _find_workspace_root() config = load_config(config_root) ws_config = _resolve_ws_config(config, getattr(args, 'workspace', None)) + + # CLI --registry-url overrides the config-file registry_url. + cli_registry_url = getattr(args, 'registry_url', None) + if cli_registry_url: + ws_config = dataclasses.replace(ws_config, registry_url=cli_registry_url) + ws_root = _effective_workspace_root(config_root, ws_config) forge_backend = getattr(args, 'forge_backend', 'cli') vcs, pm, forge, registry = _create_backends( @@ -743,11 +813,19 @@ def _cmd_graph(args: argparse.Namespace) -> int: return 0 -def _cmd_check(args: argparse.Namespace) -> int: - """Handle the ``check`` subcommand.""" - config_root = _find_workspace_root() - config = load_config(config_root) - ws_config = _resolve_ws_config(config, getattr(args, 'workspace', None)) +def _check_one_workspace( + config_root: Path, + config: ReleaseConfig, + ws_config: WorkspaceConfig, + *, + fix: bool = False, +) -> tuple[str, PreflightResult]: + """Run checks for a single workspace and return ``(label, result)``. + + This is the core logic extracted from ``_cmd_check`` so that + multiple workspaces can be checked concurrently. + """ + label = ws_config.label or ws_config.ecosystem or 'default' ws_root = _effective_workspace_root(config_root, ws_config) packages = discover_packages( ws_root, @@ -759,7 +837,7 @@ def _cmd_check(args: argparse.Namespace) -> int: resolved_exclude_publish = resolve_group_refs(ws_config.exclude_publish, ws_config.groups) # --fix: auto-fix issues before running checks. - if getattr(args, 'fix', False): + if fix: all_changes: list[str] = [] # Universal fixers (ecosystem-agnostic). @@ -789,7 +867,7 @@ def _cmd_check(args: argparse.Namespace) -> int: if all_changes: for change in all_changes: - print(f' 🔧 {change}') # noqa: T201 - CLI output + print(f' 🔧 [{label}] {change}') # noqa: T201 - CLI output print() # noqa: T201 - CLI output # Re-discover packages so checks see the updated state. packages = discover_packages( @@ -811,30 +889,134 @@ def _cmd_check(args: argparse.Namespace) -> int: library_dirs=ws_config.library_dirs, plugin_dirs=ws_config.plugin_dirs, ) + return label, result + + +def _print_source_context(loc: str | SourceContext) -> None: + """Print a single location annotation with optional source snippet. - # Print detailed results. + For plain strings, prints ``--> path``. For :class:`SourceContext` + with a line number, prints the surrounding source lines with the + offending line highlighted:: + + --> plugins/foo/pyproject.toml:5 + | + 3 | version = "1.0.0" + 4 | description = "A test" + 5 | requires-python = ">=3.10" + | ^^^^^^^^^^^^^^^^ missing here + 6 | + | + """ + if isinstance(loc, SourceContext) and loc.line > 0: + print(f' --> {loc}') # noqa: T201 - CLI output + snippet = read_source_snippet(loc.path, loc.line) + if snippet: + gutter_width = len(str(snippet[-1][0])) + print(f' {" " * gutter_width} |') # noqa: T201 - CLI output + for lineno, text in snippet: + marker = '>' if lineno == loc.line else ' ' + print(f' {lineno:>{gutter_width}} |{marker} {text}') # noqa: T201 - CLI output + if loc.label: + # Underline the key on the offending line. + offending_text = next((t for n, t in snippet if n == loc.line), '') + if loc.key and loc.key in offending_text: + col = offending_text.index(loc.key) + underline = ' ' * col + '^' * len(loc.key) + ' ' + loc.label + else: + underline = loc.label + print(f' {" " * gutter_width} | {underline}') # noqa: T201 - CLI output + print(f' {" " * gutter_width} |') # noqa: T201 - CLI output + else: + print(f' --> {loc}') # noqa: T201 - CLI output + + +def _print_check_result(label: str, result: PreflightResult, *, show_label: bool = False) -> None: + """Print the check results for one workspace. + + Output follows Rust-style diagnostic formatting with source context:: + + ✅ check_name + ⚠️ warning[check_name]: message + --> path/to/file.toml:5 + | + 3 | version = "1.0.0" + 4 | description = "A test" + 5 |> requires-python = ">=3.10" + | ^^^^^^^^^^^^^^^^ missing here + 6 | + | + = hint: actionable suggestion + ❌ error[check_name]: message + --> path/to/file.toml + = hint: actionable suggestion + """ + prefix = f'[{label}] ' if show_label else '' if result.passed: for name in result.passed: - print(f' ✅ {name}') # noqa: T201 - CLI output + print(f' ✅ {prefix}{name}') # noqa: T201 - CLI output if result.warnings: for name in result.warnings: msg = result.warning_messages.get(name, '') - print(f' ⚠️ {name}: {msg}') # noqa: T201 - CLI output + print(f' ⚠️ {prefix}warning[{name}]: {msg}') # noqa: T201 - CLI output + for loc in result.context.get(name, []): + _print_source_context(loc) hint = result.hints.get(name, '') if hint: print(f' = hint: {hint}') # noqa: T201 - CLI output if result.failed: for name in result.failed: msg = result.errors.get(name, '') - print(f' ❌ {name}: {msg}') # noqa: T201 - CLI output + print(f' ❌ {prefix}error[{name}]: {msg}') # noqa: T201 - CLI output + for loc in result.context.get(name, []): + _print_source_context(loc) hint = result.hints.get(name, '') if hint: print(f' = hint: {hint}') # noqa: T201 - CLI output print() # noqa: T201 - CLI output - print(f' {result.summary()}') # noqa: T201 - CLI output + print(f' {prefix}{result.summary()}') # noqa: T201 - CLI output + + +def _cmd_check(args: argparse.Namespace) -> int: + """Handle the ``check`` subcommand. + + When ``--workspace`` is specified, checks that single workspace. + Otherwise, checks **all** configured workspaces in parallel using + :class:`concurrent.futures.ThreadPoolExecutor`. + """ + config_root = _find_workspace_root() + config = load_config(config_root) + fix = getattr(args, 'fix', False) + explicit_ws = getattr(args, 'workspace', None) - return 0 if result.ok else 1 + # Single workspace: original behaviour. + if explicit_ws or len(config.workspaces) <= 1: + ws_config = _resolve_ws_config(config, explicit_ws) + label, result = _check_one_workspace(config_root, config, ws_config, fix=fix) + _print_check_result(label, result) + return 0 if result.ok else 1 + + # Multiple workspaces: run in parallel. + ws_configs = list(config.workspaces.values()) + results: list[tuple[str, PreflightResult]] = [] + + with concurrent.futures.ThreadPoolExecutor(max_workers=len(ws_configs)) as pool: + futures = {pool.submit(_check_one_workspace, config_root, config, wsc, fix=fix): wsc for wsc in ws_configs} + for future in concurrent.futures.as_completed(futures): + results.append(future.result()) + + # Sort by label for deterministic output. + results.sort(key=lambda r: r[0]) + + all_ok = True + for label, result in results: + print(f'\n--- workspace: {label} ---') # noqa: T201 - CLI output + _print_check_result(label, result, show_label=False) + if not result.ok: + all_ok = False + + return 0 if all_ok else 1 async def _cmd_version(args: argparse.Namespace) -> int: @@ -905,10 +1087,9 @@ async def _cmd_changelog(args: argparse.Namespace) -> int: Generates per-package changelogs from Conventional Commits and writes them to ``CHANGELOG.md`` in each package directory. """ - from datetime import datetime, timezone - from releasekit.changelog import generate_changelog, render_changelog, write_changelog from releasekit.tags import format_tag + from releasekit.utils.date import utc_today config_root = _find_workspace_root() config = load_config(config_root) @@ -925,7 +1106,7 @@ async def _cmd_changelog(args: argparse.Namespace) -> int: packages = _maybe_filter_group(all_packages, ws_config, group) dry_run = getattr(args, 'dry_run', False) - today = datetime.now(tz=timezone.utc).strftime('%Y-%m-%d') + today = utc_today() written = 0 skipped = 0 @@ -973,13 +1154,14 @@ def _cmd_explain(args: argparse.Namespace) -> int: return 0 -def _cmd_init(args: argparse.Namespace) -> int: +async def _cmd_init(args: argparse.Namespace) -> int: """Handle the ``init`` subcommand. Detects all ecosystems in the monorepo and scaffolds a ``releasekit.toml`` at the monorepo root. If ``--ecosystem`` is specified, only that ecosystem is included in the generated - config. + config. After scaffolding, scans existing git tags and writes + ``bootstrap_sha`` for mid-stream adoption. """ monorepo_root, ecosystems = _resolve_ecosystems(args) dry_run = getattr(args, 'dry_run', False) @@ -989,16 +1171,26 @@ def _cmd_init(args: argparse.Namespace) -> int: eco_summary = ', '.join(f'{e.ecosystem.value} ({e.root.relative_to(monorepo_root)})' for e in ecosystems) logger.info('init_detected_ecosystems', ecosystems=eco_summary) - # Use the first uv workspace root for backward compatibility - # with the single-ecosystem scaffold_config. - uv_ecosystems = [e for e in ecosystems if e.ecosystem == Ecosystem.PYTHON] - workspace_root = uv_ecosystems[0].root if uv_ecosystems else monorepo_root - - toml_fragment = scaffold_config( - workspace_root, - dry_run=dry_run, - force=force, - ) + # Multi-ecosystem path: generate one [workspace.