diff --git a/.github/.release-please-manifest.json b/.github/.release-please-manifest.json new file mode 100644 index 00000000..37fcefaa --- /dev/null +++ b/.github/.release-please-manifest.json @@ -0,0 +1,3 @@ +{ + ".": "1.0.0" +} diff --git a/.github/release-please-config.json b/.github/release-please-config.json new file mode 100644 index 00000000..bb93a2bc --- /dev/null +++ b/.github/release-please-config.json @@ -0,0 +1,10 @@ +{ + "packages": { + ".": { + "release-type": "ruby", + "package-name": "html2rss-web", + "version-file": "config/version.rb", + "changelog-path": "CHANGELOG.md" + } + } +} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1297cf6a..76c8f23e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -172,96 +172,3 @@ jobs: DOCKER_SMOKE_SKIP_BUILD: "true" SMOKE_AUTO_SOURCE_ENABLED: ${{ matrix.smoke_auto_source_enabled }} run: bundle exec rake - - docker-publish: - if: github.event_name == 'push' && github.ref == 'refs/heads/main' - needs: - - docker-test - runs-on: ubuntu-latest - permissions: - contents: read - packages: write - env: - IMAGE_NAME: html2rss/web - TAG_SHA: ${{ github.sha }} - steps: - - name: Checkout code - uses: actions/checkout@v6 - - - name: Setup pnpm - uses: pnpm/action-setup@v6 - with: - cache: true - cache_dependency_path: frontend/pnpm-lock.yaml - package_json_file: frontend/package.json - - - name: Setup Node.js for Docker build - uses: actions/setup-node@v6 - with: - node-version-file: ".tool-versions" - - - name: Install frontend dependencies - run: pnpm install --frozen-lockfile - working-directory: frontend - - - name: Build frontend static assets - run: pnpm run build - working-directory: frontend - - - name: Set up QEMU - uses: docker/setup-qemu-action@v4 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v4 - - - name: Get Git commit timestamps - run: echo "TIMESTAMP=$(git log -1 --pretty=%ct)" >> $GITHUB_ENV - - - name: Extract metadata - id: meta - uses: docker/metadata-action@v6 - with: - images: ${{ env.IMAGE_NAME }} - - - name: Log in to DockerHub - uses: docker/login-action@v4 - with: - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} - - - name: Cache Docker layers - uses: actions/cache@v5 - with: - path: /tmp/.buildx-cache - key: ${{ runner.os }}-buildx-${{ github.sha }} - restore-keys: | - ${{ runner.os }}-buildx- - - - name: Build and push Docker image - uses: docker/build-push-action@v7 - env: - SOURCE_DATE_EPOCH: ${{ env.TIMESTAMP }} - with: - context: . - push: true - tags: | - html2rss/web:latest - html2rss/web:${{ github.sha }} - ${{ steps.meta.outputs.tags }} - platforms: linux/amd64,linux/arm64 - cache-from: type=local,src=/tmp/.buildx-cache - cache-to: type=local,dest=/tmp/.buildx-cache-new - provenance: true - sbom: true - labels: | - org.opencontainers.image.source=https://github.com/${{ github.repository }} - org.opencontainers.image.created=${{ github.event.head_commit.timestamp }} - org.opencontainers.image.revision=${{ github.sha }} - org.opencontainers.image.title=html2rss-web - org.opencontainers.image.description=Generates RSS feeds of any website & serves to the web! - org.opencontainers.image.sbom=https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}/artifacts - - - name: Move updated cache into place - run: | - rm -rf /tmp/.buildx-cache - mv /tmp/.buildx-cache-new /tmp/.buildx-cache diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 00000000..a944c296 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,204 @@ +name: release + +on: + # Release only after the CI workflow succeeds on main so Docker publishes + # are tied to a CI-validated commit instead of any direct branch push. + workflow_run: + workflows: + - ci + types: + - completed + branches: + - main + workflow_dispatch: + +permissions: + contents: read + +concurrency: + group: release-${{ github.event.workflow_run.head_sha || github.sha }} + cancel-in-progress: true + +jobs: + guard: + runs-on: ubuntu-latest + outputs: + target_sha: ${{ steps.resolve.outputs.target_sha }} + target_ref: ${{ steps.resolve.outputs.target_ref }} + steps: + - name: Validate release trigger and resolve target + id: resolve + env: + EVENT_NAME: ${{ github.event_name }} + WORKFLOW_CONCLUSION: ${{ github.event.workflow_run.conclusion }} + WORKFLOW_HEAD_SHA: ${{ github.event.workflow_run.head_sha }} + WORKFLOW_HEAD_BRANCH: ${{ github.event.workflow_run.head_branch }} + GITHUB_REF_VALUE: ${{ github.ref }} + GITHUB_SHA_VALUE: ${{ github.sha }} + run: | + if [ "$EVENT_NAME" = "workflow_run" ]; then + if [ "$WORKFLOW_CONCLUSION" != "success" ]; then + echo "Release requires successful CI on main; got conclusion=$WORKFLOW_CONCLUSION" >&2 + exit 1 + fi + + if [ -z "$WORKFLOW_HEAD_SHA" ] || [ -z "$WORKFLOW_HEAD_BRANCH" ]; then + echo "workflow_run payload missing head SHA or branch" >&2 + exit 1 + fi + + echo "target_sha=$WORKFLOW_HEAD_SHA" >> "$GITHUB_OUTPUT" + echo "target_ref=refs/heads/$WORKFLOW_HEAD_BRANCH" >> "$GITHUB_OUTPUT" + exit 0 + fi + + if [ "$EVENT_NAME" = "workflow_dispatch" ]; then + if [ "$GITHUB_REF_VALUE" != "refs/heads/main" ]; then + echo "Manual release is restricted to refs/heads/main; got $GITHUB_REF_VALUE" >&2 + exit 1 + fi + + echo "target_sha=$GITHUB_SHA_VALUE" >> "$GITHUB_OUTPUT" + echo "target_ref=$GITHUB_REF_VALUE" >> "$GITHUB_OUTPUT" + exit 0 + fi + + echo "Unsupported event: $EVENT_NAME" >&2 + exit 1 + + release: + needs: + - guard + runs-on: ubuntu-latest + permissions: + contents: write + pull-requests: write + outputs: + release_created: ${{ steps.release.outputs.release_created }} + tag_name: ${{ steps.release.outputs.tag_name }} + steps: + - uses: actions/checkout@v6 + with: + ref: ${{ needs.guard.outputs.target_sha }} + fetch-depth: 0 + + - name: Run release-please + id: release + uses: googleapis/release-please-action@v4 + with: + token: ${{ secrets.RELEASE_PLEASE_TOKEN || github.token }} + config-file: .github/release-please-config.json + manifest-file: .github/.release-please-manifest.json + + - name: Summarize release outcome + env: + RELEASE_CREATED: ${{ steps.release.outputs.release_created }} + RELEASE_TAG: ${{ steps.release.outputs.tag_name }} + run: | + { + echo "## Release outcome" + echo + echo "- Release created: ${RELEASE_CREATED:-false}" + if [ -n "${RELEASE_TAG}" ]; then + echo "- Release tag: ${RELEASE_TAG}" + else + echo "- Release tag: none" + fi + } >> "$GITHUB_STEP_SUMMARY" + + docker-publish: + if: needs.release.outputs.release_created == 'true' + needs: + - guard + - release + runs-on: ubuntu-latest + env: + IMAGE_NAME: html2rss/web + TAG_SHA: ${{ needs.guard.outputs.target_sha }} + RELEASE_TAG: ${{ needs.release.outputs.tag_name }} + steps: + - uses: actions/checkout@v6 + with: + ref: ${{ needs.guard.outputs.target_sha }} + fetch-depth: 0 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v4 + + - name: Get Git commit timestamp + run: | + echo "TIMESTAMP_EPOCH=$(git log -1 --format=%ct)" >> "$GITHUB_ENV" + echo "TIMESTAMP_ISO=$(git log -1 --format=%cI)" >> "$GITHUB_ENV" + + - name: Compute Docker tags + id: tags + run: | + release_version="${RELEASE_TAG#v}" + echo "RELEASE_VERSION=${release_version}" >> "$GITHUB_ENV" + major="${release_version%%.*}" + { + echo "tags<> "$GITHUB_OUTPUT" + + - name: Log in to DockerHub + uses: docker/login-action@v4 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Cache Docker layers + uses: actions/cache@v4 + with: + path: /tmp/.buildx-cache + key: ${{ runner.os }}-buildx-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-buildx- + + - name: Build and push Docker image + uses: docker/build-push-action@v7 + env: + SOURCE_DATE_EPOCH: ${{ env.TIMESTAMP_EPOCH }} + with: + context: . + push: true + tags: ${{ steps.tags.outputs.tags }} + build-args: | + BUILD_TAG=${{ env.RELEASE_VERSION }} + GIT_SHA=${{ needs.guard.outputs.target_sha }} + platforms: linux/amd64,linux/arm64 + cache-from: type=local,src=/tmp/.buildx-cache + cache-to: type=local,dest=/tmp/.buildx-cache-new + provenance: true + sbom: true + labels: | + org.opencontainers.image.created=${{ env.TIMESTAMP_ISO }} + org.opencontainers.image.description=Generates RSS feeds of any website & serves to the web! + org.opencontainers.image.ref.name=${{ env.RELEASE_TAG }} + org.opencontainers.image.revision=${{ needs.guard.outputs.target_sha }} + org.opencontainers.image.source=https://github.com/${{ github.repository }} + org.opencontainers.image.title=html2rss-web + org.opencontainers.image.url=https://github.com/${{ github.repository }}/releases/tag/${{ env.RELEASE_TAG }} + org.opencontainers.image.version=${{ env.RELEASE_VERSION }} + + - name: Move updated cache into place + run: | + rm -rf /tmp/.buildx-cache + mv /tmp/.buildx-cache-new /tmp/.buildx-cache + + - name: Summarize published image tags + run: | + { + echo "## Docker publish" + echo + echo "- Release tag: ${RELEASE_TAG}" + echo "- Docker tags pushed:" + echo "${{ steps.tags.outputs.tags }}" | sed 's/^/ - /' + } >> "$GITHUB_STEP_SUMMARY" diff --git a/.github/workflows/release_artifacts.yml b/.github/workflows/release_artifacts.yml new file mode 100644 index 00000000..cb4e9728 --- /dev/null +++ b/.github/workflows/release_artifacts.yml @@ -0,0 +1,107 @@ +name: refresh release artifacts + +on: + pull_request: + types: + - opened + - reopened + - synchronize + branches: + - main + push: + branches: + - release-please--branches--main + +permissions: + contents: read + +concurrency: + group: release-artifacts-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +jobs: + verify-generated-artifacts: + if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository && startsWith(github.event.pull_request.head.ref, 'release-please--branches--') + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v6 + with: + ref: ${{ github.event.pull_request.head.ref }} + fetch-depth: 0 + + - uses: ruby/setup-ruby@v1 + with: + bundler-cache: true + + - name: Setup pnpm + uses: pnpm/action-setup@v6 + with: + cache: true + cache_dependency_path: frontend/pnpm-lock.yaml + package_json_file: frontend/package.json + + - uses: actions/setup-node@v6 + with: + node-version-file: ".tool-versions" + + - name: Install frontend dependencies + run: pnpm install --frozen-lockfile + working-directory: frontend + + - name: Refresh OpenAPI artifacts + run: | + make openapi + make openapi-client + + - name: Assert generated artifacts are current + run: git diff --exit-code -- public/openapi.yaml frontend/src/api/generated + + refresh-generated-artifacts: + if: github.event_name == 'push' && github.repository == 'html2rss/html2rss-web' && github.ref == 'refs/heads/release-please--branches--main' && github.actor != 'github-actions[bot]' + runs-on: ubuntu-latest + permissions: + contents: write + + steps: + - uses: actions/checkout@v6 + with: + ref: ${{ github.ref_name }} + fetch-depth: 0 + + - uses: ruby/setup-ruby@v1 + with: + bundler-cache: true + + - name: Setup pnpm + uses: pnpm/action-setup@v6 + with: + cache: true + cache_dependency_path: frontend/pnpm-lock.yaml + package_json_file: frontend/package.json + + - uses: actions/setup-node@v6 + with: + node-version-file: ".tool-versions" + + - name: Install frontend dependencies + run: pnpm install --frozen-lockfile + working-directory: frontend + + - name: Refresh OpenAPI artifacts + run: | + make openapi + make openapi-client + + - name: Commit generated artifacts + run: | + if git diff --quiet -- public/openapi.yaml frontend/src/api/generated; then + echo "Generated artifacts already up to date" + exit 0 + fi + + git config user.name "github-actions[bot]" + git config user.email "41898282+github-actions[bot]@users.noreply.github.com" + git add public/openapi.yaml frontend/src/api/generated + git commit -m "chore: refresh release artifacts" + git push diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 00000000..6361e43e --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,3 @@ +# Changelog + +All notable changes to this project will be documented in this file. diff --git a/Dockerfile b/Dockerfile index 31f53eb7..492082b3 100644 --- a/Dockerfile +++ b/Dockerfile @@ -45,9 +45,14 @@ LABEL maintainer="Gil Desmarais " SHELL ["/bin/ash", "-o", "pipefail", "-c"] +ARG BUILD_TAG +ARG GIT_SHA + ENV PORT=4000 \ RACK_ENV=production \ - RUBY_YJIT_ENABLE=1 + RUBY_YJIT_ENABLE=1 \ + BUILD_TAG=${BUILD_TAG} \ + GIT_SHA=${GIT_SHA} EXPOSE $PORT diff --git a/config/version.rb b/config/version.rb new file mode 100644 index 00000000..8b39128e --- /dev/null +++ b/config/version.rb @@ -0,0 +1,8 @@ +# frozen_string_literal: true + +module Html2rss + module Web + VERSION = '1.0.0' + public_constant :VERSION + end +end diff --git a/docs/README.md b/docs/README.md index ba66490a..7329c3a2 100644 --- a/docs/README.md +++ b/docs/README.md @@ -95,6 +95,15 @@ make ready --- +## Release Automation + +- Official releases run only after the `ci` GitHub Actions workflow completes successfully for a commit on `main`. +- Manual `release` workflow dispatch is an emergency/manual replay path and is restricted to `main`. +- Docker publish uses the exact CI-validated commit SHA for release metadata, OCI labels, and `BUILD_TAG` / `GIT_SHA` wiring. +- Branch protection on `main` must continue to require the `ci` workflow even though the release workflow also gates on successful CI. + +--- + ## Backend Structure Rules - `app/` is the Zeitwerk root for `Html2rss`. diff --git a/spec/html2rss/web_spec.rb b/spec/html2rss/web_spec.rb new file mode 100644 index 00000000..ea102279 --- /dev/null +++ b/spec/html2rss/web_spec.rb @@ -0,0 +1,13 @@ +# frozen_string_literal: true + +require 'spec_helper' + +require_relative '../../config/version' + +RSpec.describe Html2rss::Web do + describe 'VERSION' do + it 'defines the canonical application release version' do + expect(described_class::VERSION).to match(/\A\d+\.\d+\.\d+(?:-[0-9A-Za-z.-]+)?(?:\+[0-9A-Za-z.-]+)?\z/) + end + end +end diff --git a/spec/support/openapi.rb b/spec/support/openapi.rb index 5705a7d5..4eaef8a0 100644 --- a/spec/support/openapi.rb +++ b/spec/support/openapi.rb @@ -1,206 +1,208 @@ # frozen_string_literal: true -return unless ENV['OPENAPI'] - -require 'rspec/openapi' - -RSpec::OpenAPI.path = 'public/openapi.yaml' -RSpec::OpenAPI.title = 'html2rss-web API' -RSpec::OpenAPI.application_version = '1.0.0' -RSpec::OpenAPI.enable_example = false -RSpec::OpenAPI.enable_example_summary = false -RSpec::OpenAPI.example_types = [:request] -RSpec::OpenAPI.request_headers = ['Authorization'] -RSpec::OpenAPI.servers = [ - { url: 'https://api.html2rss.dev/api/v1', description: 'Production server' }, - { url: 'http://127.0.0.1:4000/api/v1', description: 'Development server' } -] -RSpec::OpenAPI.info = { - description: 'RESTful API for converting websites to RSS feeds.', - contact: { - name: 'html2rss-web Support', - url: 'https://github.com/html2rss/html2rss-web' - }, - license: { - name: 'MIT', - url: 'https://opensource.org/licenses/MIT' +if ENV['OPENAPI'] + require 'rspec/openapi' + require_relative '../../config/version' + + RSpec::OpenAPI.path = 'public/openapi.yaml' + RSpec::OpenAPI.title = 'html2rss-web API' + RSpec::OpenAPI.application_version = Html2rss::Web::VERSION + RSpec::OpenAPI.enable_example = false + RSpec::OpenAPI.enable_example_summary = false + RSpec::OpenAPI.example_types = [:request] + RSpec::OpenAPI.request_headers = ['Authorization'] + RSpec::OpenAPI.servers = [ + { url: 'https://api.html2rss.dev/api/v1', description: 'Production server' }, + { url: 'http://127.0.0.1:4000/api/v1', description: 'Development server' } + ] + RSpec::OpenAPI.info = { + description: 'RESTful API for converting websites to RSS feeds.', + contact: { + name: 'html2rss-web Support', + url: 'https://github.com/html2rss/html2rss-web' + }, + license: { + name: 'MIT', + url: 'https://opensource.org/licenses/MIT' + } } -} -RSpec::OpenAPI.security_schemes = { - 'BearerAuth' => { - description: 'Bearer token authentication for API access.', - type: 'http', - scheme: 'bearer', - bearerFormat: 'JWT' + RSpec::OpenAPI.security_schemes = { + 'BearerAuth' => { + description: 'Bearer token authentication for API access.', + type: 'http', + scheme: 'bearer', + bearerFormat: 'JWT' + } } -} - -RSpec::OpenAPI.summary_builder = lambda { |example| - example.metadata.dig(:example_group, :openapi, :summary) || example.metadata[:summary] -} -RSpec::OpenAPI.tags_builder = lambda { |example| - example.metadata.dig(:example_group, :openapi, :tags) || example.metadata[:tags] -} -RSpec::OpenAPI.description_builder = lambda { |example| - example.metadata.dig(:example_group, :openapi, :description) || example.metadata[:description] || example.description -} - -# Keep path keys relative to /api/v1 because servers include the versioned base path. -RSpec::OpenAPI.post_process_hook = lambda do |_path, _records, spec| - token_feed_error_statuses = %w[401 403 500].freeze - - stringify = lambda do |value| - case value - when Hash - value.each_with_object({}) { |(key, nested_value), mapped| mapped[key.to_s] = stringify.call(nested_value) } - when Array - value.map { |item| stringify.call(item) } - else - value + + RSpec::OpenAPI.summary_builder = lambda { |example| + example.metadata.dig(:example_group, :openapi, :summary) || example.metadata[:summary] + } + RSpec::OpenAPI.tags_builder = lambda { |example| + example.metadata.dig(:example_group, :openapi, :tags) || example.metadata[:tags] + } + RSpec::OpenAPI.description_builder = lambda { |example| + example.metadata.dig(:example_group, :openapi, + :description) || example.metadata[:description] || example.description + } + + # Keep path keys relative to /api/v1 because servers include the versioned base path. + RSpec::OpenAPI.post_process_hook = lambda do |_path, _records, spec| + token_feed_error_statuses = %w[401 403 500].freeze + + stringify = lambda do |value| + case value + when Hash + value.each_with_object({}) { |(key, nested_value), mapped| mapped[key.to_s] = stringify.call(nested_value) } + when Array + value.map { |item| stringify.call(item) } + else + value + end end - end - deep_sort = lambda do |value| - case value - when Hash - value.keys.sort_by(&:to_s).to_h { |key| [key, deep_sort.call(value[key])] } - when Array - value.map { |item| deep_sort.call(item) } - else - value + deep_sort = lambda do |value| + case value + when Hash + value.keys.sort_by(&:to_s).to_h { |key| [key, deep_sort.call(value[key])] } + when Array + value.map { |item| deep_sort.call(item) } + else + value + end end - end - merge_responses = lambda do |existing_responses, new_responses| - canonical_description = lambda do |*responses| - descriptions = responses - .filter_map { |response| response['description']&.to_s&.strip } - .reject(&:empty?) - .uniq + merge_responses = lambda do |existing_responses, new_responses| + canonical_description = lambda do |*responses| + descriptions = responses + .filter_map { |response| response['description']&.to_s&.strip } + .reject(&:empty?) + .uniq - next nil if descriptions.empty? + next nil if descriptions.empty? - # Prefer the most generic/canonical wording when duplicate examples define - # the same status differently. - descriptions.min_by { |description| [description.length, description] } - end + # Prefer the most generic/canonical wording when duplicate examples define + # the same status differently. + descriptions.min_by { |description| [description.length, description] } + end - statuses = existing_responses.keys | new_responses.keys - - statuses.each_with_object({}) do |status, merged_responses| - current = existing_responses[status] || {} - incoming = new_responses[status] || {} - merged_response = current.merge(incoming) - - current_content = current['content'] || {} - incoming_content = incoming['content'] || {} - if current_content.any? || incoming_content.any? - content_types = current_content.keys | incoming_content.keys - merged_response['content'] = content_types.to_h do |content_type| - current_entry = current_content[content_type] || {} - incoming_entry = incoming_content[content_type] || {} - [content_type, current_entry.merge(incoming_entry)] + statuses = existing_responses.keys | new_responses.keys + + statuses.each_with_object({}) do |status, merged_responses| + current = existing_responses[status] || {} + incoming = new_responses[status] || {} + merged_response = current.merge(incoming) + + current_content = current['content'] || {} + incoming_content = incoming['content'] || {} + if current_content.any? || incoming_content.any? + content_types = current_content.keys | incoming_content.keys + merged_response['content'] = content_types.to_h do |content_type| + current_entry = current_content[content_type] || {} + incoming_entry = incoming_content[content_type] || {} + [content_type, current_entry.merge(incoming_entry)] + end end - end - current_headers = current['headers'] || {} - incoming_headers = incoming['headers'] || {} - if current_headers.any? || incoming_headers.any? - merged_response['headers'] = current_headers.merge(incoming_headers) - end + current_headers = current['headers'] || {} + incoming_headers = incoming['headers'] || {} + if current_headers.any? || incoming_headers.any? + merged_response['headers'] = current_headers.merge(incoming_headers) + end - merged_response['description'] = canonical_description.call(current, incoming) - merged_responses[status] = merged_response + merged_response['description'] = canonical_description.call(current, incoming) + merged_responses[status] = merged_response + end end - end - token_feed_error_examples = { - 'application/xml' => { - 'example' => <<~XML.strip - - ErrorInternal Server Error - XML - }, - 'application/feed+json' => { - 'example' => '{"version":"https://jsonfeed.org/version/1.1","title":"Error"}' + token_feed_error_examples = { + 'application/xml' => { + 'example' => <<~XML.strip + + ErrorInternal Server Error + XML + }, + 'application/feed+json' => { + 'example' => '{"version":"https://jsonfeed.org/version/1.1","title":"Error"}' + } } - } - path_map = spec['paths'] || spec[:paths] - next unless path_map.is_a?(Hash) - - normalized_paths = {} - path_map.each do |raw_path, operation| - original_path = raw_path.to_s - normalized = if original_path.match?(%r{\A/api/v1/feeds/[^/]+\z}) - '/feeds/{token}' - elsif original_path.start_with?('/api/v1') - original_path.delete_prefix('/api/v1') - else - original_path - end - normalized = '/' if normalized.empty? - normalized_paths[normalized] ||= {} - - stringify.call(operation).each do |verb, operation_doc| - existing = normalized_paths[normalized][verb] - - if existing - merged = existing.merge(operation_doc) - merged['responses'] = merge_responses.call(existing['responses'] || {}, operation_doc['responses'] || {}) - merged['parameters'] = [*(existing['parameters'] || []), *(operation_doc['parameters'] || [])] - merged['parameters'].uniq! { |parameter| [parameter['name'], parameter['in']] } - normalized_paths[normalized][verb] = deep_sort.call(merged) - else - normalized_paths[normalized][verb] = deep_sort.call(operation_doc) - end + path_map = spec['paths'] || spec[:paths] + next unless path_map.is_a?(Hash) + + normalized_paths = {} + path_map.each do |raw_path, operation| + original_path = raw_path.to_s + normalized = if original_path.match?(%r{\A/api/v1/feeds/[^/]+\z}) + '/feeds/{token}' + elsif original_path.start_with?('/api/v1') + original_path.delete_prefix('/api/v1') + else + original_path + end + normalized = '/' if normalized.empty? + normalized_paths[normalized] ||= {} + + stringify.call(operation).each do |verb, operation_doc| + existing = normalized_paths[normalized][verb] + + if existing + merged = existing.merge(operation_doc) + merged['responses'] = merge_responses.call(existing['responses'] || {}, operation_doc['responses'] || {}) + merged['parameters'] = [*(existing['parameters'] || []), *(operation_doc['parameters'] || [])] + merged['parameters'].uniq! { |parameter| [parameter['name'], parameter['in']] } + normalized_paths[normalized][verb] = deep_sort.call(merged) + else + normalized_paths[normalized][verb] = deep_sort.call(operation_doc) + end - normalized_paths[normalized][verb]['description'] ||= normalized_paths[normalized][verb]['summary'] + normalized_paths[normalized][verb]['description'] ||= normalized_paths[normalized][verb]['summary'] - next unless normalized == '/feeds/{token}' + next unless normalized == '/feeds/{token}' - normalized_paths[normalized][verb]['parameters'] ||= [] - has_token_param = normalized_paths[normalized][verb]['parameters'].any? do |parameter| - parameter['name'] == 'token' && parameter['in'] == 'path' - end - unless has_token_param - normalized_paths[normalized][verb]['parameters'] << { - 'name' => 'token', - 'in' => 'path', - 'required' => true, - 'schema' => { 'type' => 'string' } - } - end + normalized_paths[normalized][verb]['parameters'] ||= [] + has_token_param = normalized_paths[normalized][verb]['parameters'].any? do |parameter| + parameter['name'] == 'token' && parameter['in'] == 'path' + end + unless has_token_param + normalized_paths[normalized][verb]['parameters'] << { + 'name' => 'token', + 'in' => 'path', + 'required' => true, + 'schema' => { 'type' => 'string' } + } + end - token_feed_error_statuses.each do |status| - response = normalized_paths[normalized][verb].dig('responses', status) - next unless response + token_feed_error_statuses.each do |status| + response = normalized_paths[normalized][verb].dig('responses', status) + next unless response - response['content'] ||= {} - token_feed_error_examples.each do |content_type, example| - response['content'][content_type] ||= { 'schema' => { 'type' => 'string' } } - response['content'][content_type].merge!(example) + response['content'] ||= {} + token_feed_error_examples.each do |content_type, example| + response['content'][content_type] ||= { 'schema' => { 'type' => 'string' } } + response['content'][content_type].merge!(example) + end end end end - end - if spec.key?('paths') - spec['paths'] = deep_sort.call(normalized_paths) - else - spec[:paths] = deep_sort.call(normalized_paths) - end + if spec.key?('paths') + spec['paths'] = deep_sort.call(normalized_paths) + else + spec[:paths] = deep_sort.call(normalized_paths) + end - tags = [ - { 'name' => 'Root', 'description' => 'API metadata and service-level information.' }, - { 'name' => 'Health', 'description' => 'Health and readiness endpoints.' }, - { 'name' => 'Strategies', 'description' => 'Feed extraction strategy discovery.' }, - { 'name' => 'Feeds', 'description' => 'Feed creation and feed rendering operations.' } - ] + tags = [ + { 'name' => 'Root', 'description' => 'API metadata and service-level information.' }, + { 'name' => 'Health', 'description' => 'Health and readiness endpoints.' }, + { 'name' => 'Strategies', 'description' => 'Feed extraction strategy discovery.' }, + { 'name' => 'Feeds', 'description' => 'Feed creation and feed rendering operations.' } + ] - if spec.key?('tags') - spec['tags'] = tags - else - spec[:tags] = tags + if spec.key?('tags') + spec['tags'] = tags + else + spec[:tags] = tags + end end end