diff --git a/.github/workflows/accept-size-increase.yml b/.github/workflows/accept-size-increase.yml new file mode 100644 index 000000000000..f5b9a910e0f0 --- /dev/null +++ b/.github/workflows/accept-size-increase.yml @@ -0,0 +1,101 @@ +name: 'Automation: Accept Bundlesize Increase' +on: + pull_request: + types: [labeled] + +concurrency: + group: accept-size-increase-${{ github.event.pull_request.number }} + cancel-in-progress: true + +jobs: + bump-size-limits: + if: github.event.label.name == 'Accept Bundlesize Increase' + runs-on: ubuntu-24.04 + permissions: + contents: write + pull-requests: write + actions: read + steps: + - name: Generate GitHub App token + id: app-token + uses: actions/create-github-app-token@v2 + with: + app-id: ${{ vars.GITFLOW_APP_ID }} + private-key: ${{ secrets.GITFLOW_APP_PRIVATE_KEY }} + + - uses: actions/checkout@v6 + with: + ref: ${{ github.head_ref }} + token: ${{ steps.app-token.outputs.token }} + + - uses: actions/setup-node@v6 + with: + node-version-file: 'package.json' + + - name: Install dependencies + run: yarn install --ignore-engines --frozen-lockfile + + - name: Find latest CI run for this PR + id: find-run + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + RUN_JSON=$(gh run list \ + --workflow "CI: Build & Test" \ + --branch "${{ github.head_ref }}" \ + --status completed \ + --limit 1 \ + --json databaseId,headSha) + RUN_ID=$(echo "$RUN_JSON" | jq -r '.[0].databaseId') + RUN_SHA=$(echo "$RUN_JSON" | jq -r '.[0].headSha') + if [ -z "$RUN_ID" ] || [ "$RUN_ID" = "null" ]; then + echo "::error::No completed CI run found. Wait for CI to finish, then re-add the label." + exit 1 + fi + HEAD_SHA=$(git rev-parse HEAD) + if [ "$RUN_SHA" != "$HEAD_SHA" ]; then + echo "::error::CI run ($RUN_SHA) does not match current HEAD ($HEAD_SHA). Wait for the latest CI run to complete, then re-add the label." + exit 1 + fi + echo "run_id=$RUN_ID" >> "$GITHUB_OUTPUT" + + - name: Download build artifacts + uses: actions/download-artifact@v6 + with: + name: build-output + run-id: ${{ steps.find-run.outputs.run_id }} + github-token: ${{ secrets.GITHUB_TOKEN }} + + - name: Run size-limit and bump failing entries + id: bump + run: node scripts/bump-size-limits.mjs + + - name: Format + run: yarn format + + - name: Commit and push + run: | + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + git add .size-limit.js + git diff --cached --quiet && echo "No changes to commit" && exit 0 + git commit -m "chore: Bump size limits" + git push + + - name: Comment on PR + if: steps.bump.outputs.summary != '' + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + SUMMARY: ${{ steps.bump.outputs.summary }} + run: | + echo "$SUMMARY" > /tmp/pr-comment.md + gh pr comment "${{ github.event.pull_request.number }}" \ + --body-file /tmp/pr-comment.md + + - name: Remove label + if: always() + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + gh pr edit "${{ github.event.pull_request.number }}" \ + --remove-label "Accept Bundlesize Increase" diff --git a/.size-limit.js b/.size-limit.js index 38a83445d021..84153bd6aa0a 100644 --- a/.size-limit.js +++ b/.size-limit.js @@ -178,13 +178,13 @@ module.exports = [ name: 'CDN Bundle', path: createCDNPath('bundle.min.js'), gzip: true, - limit: '29 KB', + limit: '27 KB', }, { name: 'CDN Bundle (incl. Tracing)', path: createCDNPath('bundle.tracing.min.js'), gzip: true, - limit: '44 KB', + limit: '42 KB', }, { name: 'CDN Bundle (incl. Logs, Metrics)', @@ -283,14 +283,14 @@ module.exports = [ path: createCDNPath('bundle.tracing.replay.feedback.min.js'), gzip: false, brotli: false, - limit: '264 KB', + limit: '263 KB', }, { name: 'CDN Bundle (incl. Tracing, Replay, Feedback, Logs, Metrics) - uncompressed', path: createCDNPath('bundle.tracing.replay.feedback.logs.metrics.min.js'), gzip: false, brotli: false, - limit: '264 KB', + limit: '200 KB', }, // Next.js SDK (ESM) { @@ -333,7 +333,7 @@ module.exports = [ path: 'packages/node/build/esm/index.js', import: createImport('initWithoutDefaultIntegrations', 'getDefaultIntegrationsWithoutPerformance'), gzip: true, - limit: '98 KB', + limit: '90 KB', ignore: [...builtinModules, ...nodePrefixedBuiltinModules], modifyWebpackConfig: function (config) { const webpack = require('webpack'); @@ -356,7 +356,7 @@ module.exports = [ import: createImport('init'), ignore: [...builtinModules, ...nodePrefixedBuiltinModules], gzip: true, - limit: '114 KB', + limit: '1 KB', }, ]; diff --git a/dev-packages/size-limit-gh-action/index.mjs b/dev-packages/size-limit-gh-action/index.mjs index 3dac81a3f080..6e80046f9054 100644 --- a/dev-packages/size-limit-gh-action/index.mjs +++ b/dev-packages/size-limit-gh-action/index.mjs @@ -182,24 +182,25 @@ async function run() { if (status > 0) { try { - const results = limit.parseResults(output); - const failedResults = results - .filter(result => result.passed || false) - .map(result => ({ - name: result.name, - size: +result.size, - sizeLimit: +result.sizeLimit, - })); + const results = JSON.parse(output); + const failedResults = results.filter(result => !result.passed); if (failedResults.length > 0) { - // eslint-disable-next-line no-console - console.log('Exceeded size-limits:', failedResults); + const lines = failedResults.map(r => { + const size = (r.size / 1024).toFixed(1); + const max = (r.sizeLimit / 1024).toFixed(1); + const over = ((r.size - r.sizeLimit) / 1024).toFixed(1); + return ` ${r.name}: ${size} KB (limit: ${max} KB, +${over} KB over)`; + }); + core.error(`Size limit exceeded:\n${lines.join('\n')}`); } } catch { // noop } - setFailed('Size limit has been exceeded.'); + setFailed( + 'Size limit has been exceeded. To accept this increase, add the "Accept Bundlesize Increase" label to this PR or update `.size-limit.js` manually.', + ); } } catch (error) { core.error(error); diff --git a/scripts/bump-size-limits.mjs b/scripts/bump-size-limits.mjs new file mode 100644 index 000000000000..46a66ef8fc69 --- /dev/null +++ b/scripts/bump-size-limits.mjs @@ -0,0 +1,84 @@ +/* oxlint-disable no-console */ +import { execSync } from 'node:child_process'; +import { readFileSync, writeFileSync, appendFileSync } from 'node:fs'; + +const SIZE_LIMIT_CONFIG = '.size-limit.js'; + +function roundUpToHalfKB(bytes) { + return Math.ceil((bytes / 1024) * 2) / 2; +} + +function run() { + let output; + try { + output = execSync('yarn run --silent size-limit --json', { + encoding: 'utf-8', + maxBuffer: 10 * 1024 * 1024, + // size-limit exits with non-zero when limits are exceeded, which is expected + stdio: ['pipe', 'pipe', 'pipe'], + }); + } catch (error) { + // size-limit exits with code 1 when limits are exceeded, but still writes JSON to stdout + output = error.stdout; + if (!output) { + console.error('size-limit produced no output.'); + process.exit(1); + } + } + + let results; + try { + results = JSON.parse(output); + } catch { + console.error('Failed to parse size-limit JSON output.'); + console.error('Raw output:', output.slice(0, 500)); + process.exit(1); + } + + const failedEntries = results.filter(r => !r.passed); + + if (failedEntries.length === 0) { + console.log('All size-limit checks passed. Nothing to bump.'); + return; + } + + console.log(`Found ${failedEntries.length} failing size-limit entries:`); + + let config = readFileSync(SIZE_LIMIT_CONFIG, 'utf-8'); + const summaryLines = []; + + for (const entry of failedEntries) { + const actualSize = entry.size; + const margin = Math.min(actualSize * 0.1, 1024); + const newLimitKB = roundUpToHalfKB(actualSize + margin); + const newLimitStr = `${newLimitKB} KB`; + + const nameEscaped = entry.name.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); + const regex = new RegExp(`(name:\\s*'${nameEscaped}'[\\s\\S]*?limit:\\s*')([^']+)(')`); + + const match = config.match(regex); + if (!match) { + console.error(` WARNING: Could not find limit entry for "${entry.name}" in ${SIZE_LIMIT_CONFIG}`); + continue; + } + + const oldLimitStr = match[2]; + console.log(` ${entry.name}: ${oldLimitStr} -> ${newLimitStr}`); + summaryLines.push(`- \`${entry.name}\`: ${oldLimitStr} -> ${newLimitStr}`); + + config = config.replace(regex, `$1${newLimitStr}$3`); + } + + writeFileSync(SIZE_LIMIT_CONFIG, config, 'utf-8'); + console.log(`\nUpdated ${SIZE_LIMIT_CONFIG}`); + + // Write summary to $GITHUB_OUTPUT for the PR comment step + if (process.env.GITHUB_OUTPUT) { + const summary = `Bumped size limits:\n${summaryLines.join('\n')}`; + // Multi-line output requires delimiter syntax + const delimiter = `EOF_${Date.now()}`; + appendFileSync(process.env.GITHUB_OUTPUT, `summary<<${delimiter}\n${summary}\n${delimiter}\n`); + } +} + +run();