diff --git a/.github/workflows/bump-size-limits.yml b/.github/workflows/bump-size-limits.yml new file mode 100644 index 000000000000..c713a52bae4a --- /dev/null +++ b/.github/workflows/bump-size-limits.yml @@ -0,0 +1,99 @@ +name: 'Auto-bump size-limit thresholds' + +on: + schedule: + - cron: '0 9 * * 5' # Friday 09:00 UTC + workflow_dispatch: + +permissions: + contents: write + pull-requests: write + issues: write + +concurrency: + group: bump-size-limits + cancel-in-progress: false + +jobs: + bump: + name: Bump size-limit thresholds + runs-on: ubuntu-24.04 + timeout-minutes: 25 + steps: + - name: Generate GitHub App token + id: app-token + uses: actions/create-github-app-token@v2 + with: + app-id: ${{ vars.GITFLOW_APP_ID }} + private-key: ${{ secrets.GITFLOW_APP_PRIVATE_KEY }} + + - name: Checkout develop + uses: actions/checkout@v6 + with: + ref: develop + token: ${{ steps.app-token.outputs.token }} + + - name: Set up Node + uses: actions/setup-node@v6 + with: + node-version-file: 'package.json' + + - name: Install dependencies + uses: ./.github/actions/install-dependencies + + - name: Build packages + run: yarn build + + - name: Run bumper + # Capture stdout AND exit code without failing the step on exit-2 (no-op). + # The script writes .size-limit.js in place; create-pull-request handles + # commit/branch/PR — if there's no diff, it skips opening a PR. + run: | + set +e + node scripts/bump-size-limits.mjs > /tmp/bump-summary.md + code=$? + set -e + if [ "$code" -ne 0 ] && [ "$code" -ne 2 ]; then + echo "::error::bump script failed with exit code $code" + cat /tmp/bump-summary.md || true + exit "$code" + fi + cat /tmp/bump-summary.md + + - name: Create or update PR + uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 + with: + token: ${{ steps.app-token.outputs.token }} + commit-message: 'chore(size-limit): auto-bump weekly drift' + title: 'chore(size-limit): weekly auto-bump' + body-path: /tmp/bump-summary.md + branch: bot/bump-size-limits + base: develop + labels: 'Dev: CI' + add-paths: '.size-limit.js' + delete-branch: true + + - name: Open or comment on failure issue + if: failure() + env: + GH_TOKEN: ${{ steps.app-token.outputs.token }} + RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + run: | + title='Weekly size-limit auto-bump failure' + existing=$(gh issue list --search "in:title \"$title\"" --state open --json number,title --jq ".[] | select(.title == \"$title\") | .number" | head -n1) + if [ -n "$existing" ]; then + gh issue comment "$existing" --body "Auto-bump workflow failed again: $RUN_URL" + else + body=$(cat <} + * @throws {TypeError | SyntaxError} on malformed input + */ +export function parseSizeLimitOutput(raw) { + const data = JSON.parse(raw); + if (!Array.isArray(data)) { + throw new TypeError(`size-limit output: expected array, got ${typeof data}`); + } + return data.map((entry, i) => { + if (!entry || typeof entry !== 'object') { + throw new TypeError(`size-limit entry [${i}]: expected object`); + } + if (typeof entry.name !== 'string' || entry.name.length === 0) { + throw new TypeError(`size-limit entry [${i}]: 'name' must be a non-empty string`); + } + if (typeof entry.size !== 'number' || !Number.isFinite(entry.size)) { + throw new TypeError(`size-limit entry [${i}] (${entry.name}): 'size' must be a finite number`); + } + if (typeof entry.sizeLimit !== 'number' || !Number.isFinite(entry.sizeLimit)) { + throw new TypeError(`size-limit entry [${i}] (${entry.name}): 'sizeLimit' must be a finite number`); + } + return { name: entry.name, size: entry.size, sizeLimit: entry.sizeLimit }; + }); +} + +/** + * Escape a string for safe inclusion in a markdown table cell. + * Replaces newlines with spaces, escapes pipes and backticks. + * + * @param {unknown} value + * @returns {string} + */ +export function sanitizeMarkdownCell(value) { + return String(value) + .replace(/\r\n|\r|\n/g, ' ') + .replace(/[|`]/g, m => `\\${m}`); +} + +/** + * Escape a string for literal use inside a RegExp. + */ +function reEscape(s) { + return s.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); +} + +/** + * Inspect the source for the current limit string of a given entry. + * Returns null if no entry with that name is found. + * + * @param {string} src + * @param {string} name + * @returns {{ value: number, unit: 'KB' | 'KiB', raw: string } | null} + */ +export function extractCurrentLimit(src, name) { + const namePattern = `name:\\s*'${reEscape(name)}'`; + const limitPattern = `limit:\\s*'(\\d+(?:\\.\\d+)?)\\s*(KB|KiB)'`; + const re = new RegExp(`${namePattern}[^]*?${limitPattern}`); + const m = re.exec(src); + if (!m) return null; + return { value: Number(m[1]), unit: /** @type {'KB' | 'KiB'} */ (m[2]), raw: `${m[1]} ${m[2]}` }; +} + +/** + * Convert a numeric byte value into a whole-unit display value matching the + * entry's existing unit. KB uses 1000, KiB uses 1024. + * + * @param {number} newBytes + * @param {'KB' | 'KiB'} unit + * @returns {number} + */ +function bytesToDisplay(newBytes, unit) { + const divisor = unit === 'KiB' ? BYTES_PER_KIB : BYTES_PER_KB; + return Math.ceil(newBytes / divisor); +} + +/** + * Rewrite `.size-limit.js` source to apply a list of limit updates. + * Operates on plain text — never executes the source. For each change, + * locates the entry by exact `name:` match and rewrites the next `limit:` + * line in that window. + * + * @param {string} src - contents of .size-limit.js + * @param {Array<{ name: string, newLimitKb: number, unit: 'KB' | 'KiB' }>} changes + * @returns {string} updated source + * @throws {Error} if any change's name doesn't match exactly one entry + */ +export function rewriteSizeLimitFile(src, changes) { + let out = src; + for (const { name, newLimitKb, unit } of changes) { + const namePattern = `name:\\s*'${reEscape(name)}'`; + const limitPattern = `limit:\\s*'(\\d+(?:\\.\\d+)?)\\s*(KB|KiB)'`; + const re = new RegExp(`(${namePattern}[^]*?)${limitPattern}`); + + let matchCount = 0; + const replaced = out.replace(re, (_full, prefix) => { + matchCount++; + return `${prefix}limit: '${newLimitKb} ${unit}'`; + }); + + if (matchCount === 0) { + throw new Error(`rewriteSizeLimitFile: no entry matched for name='${name}'`); + } + out = replaced; + } + return out; +} + +/** + * Render a markdown summary of size-limit changes for the PR body. + * + * @param {Array<{ name: string, oldLimit: string, newLimit: string, delta: number, unit: 'KB' | 'KiB' }>} changes + * @returns {string} + */ +export function renderSummary(changes) { + const header = '## Size limit auto-bump\n'; + if (changes.length === 0) { + return `${header}\nAll size limits already provide ≥5 KB headroom. No changes needed.\n`; + } + const lines = [header, '| Entry | Old limit | New limit | Δ |', '| --- | --- | --- | --- |']; + for (const c of changes) { + const sign = c.delta >= 0 ? '+' : ''; + const delta = `${sign}${c.delta} ${c.unit}`; + lines.push(`| ${sanitizeMarkdownCell(c.name)} | ${c.oldLimit} | ${c.newLimit} | ${delta} |`); + } + return `${lines.join('\n')}\n`; +} + +// CLI entrypoint +async function main() { + // 1. Run size-limit. Capture JSON. execFile (no shell). + let raw; + try { + // `--silent` suppresses yarn's `yarn run v…` header and `Done in …` footer, + // which would otherwise break JSON.parse on the captured stdout. + const { stdout } = await execFileAsync('yarn', ['--silent', 'size-limit', '--json'], { + cwd: REPO_ROOT, + maxBuffer: 16 * 1024 * 1024, + }); + raw = stdout; + } catch (err) { + // size-limit exits non-zero when entries fail their existing limit. We still want the JSON. + if (err && typeof err === 'object' && 'stdout' in err && err.stdout) { + raw = /** @type {string} */ (err.stdout); + } else { + throw err; + } + } + + const measurements = parseSizeLimitOutput(raw); + + // 2. Read .size-limit.js as text. NEVER require() it. + const src = await readFile(SIZE_LIMIT_FILE, 'utf8'); + + // 3. Compute changes. + const changes = []; + const summaryRows = []; + for (const m of measurements) { + const newBytes = computeNewLimit(m.size); + + const cur = extractCurrentLimit(src, m.name); + if (!cur) { + throw new Error(`size-limit reported entry '${m.name}' but it was not found in .size-limit.js`); + } + + const displayValue = bytesToDisplay(newBytes, cur.unit); + const newLimitStr = `${displayValue} ${cur.unit}`; + + if (newLimitStr === cur.raw) { + // After unit conversion the displayed value didn't move. Skip — avoids + // no-op edits caused by KiB rounding. + continue; + } + + changes.push({ name: m.name, newLimitKb: displayValue, unit: cur.unit }); + summaryRows.push({ + name: m.name, + oldLimit: cur.raw, + newLimit: newLimitStr, + delta: displayValue - cur.value, + unit: cur.unit, + }); + } + + // 4. Print summary regardless (workflow captures stdout). + process.stdout.write(renderSummary(summaryRows)); + + if (changes.length === 0) { + process.exit(2); + } + + // 5. Atomic write: temp file + rename. + const updated = rewriteSizeLimitFile(src, changes); + const tmpPath = `${SIZE_LIMIT_FILE}.tmp`; + await writeFile(tmpPath, updated, 'utf8'); + await rename(tmpPath, SIZE_LIMIT_FILE); + + process.exit(0); +} + +const isMain = process.argv[1] && fileURLToPath(import.meta.url) === path.resolve(process.argv[1]); +if (isMain) { + main().catch(err => { + // oxlint-disable-next-line no-console + console.error(err.stack || err.message || err); + process.exit(1); + }); +} diff --git a/scripts/bump-size-limits.test.ts b/scripts/bump-size-limits.test.ts new file mode 100644 index 000000000000..ee046ea9f619 --- /dev/null +++ b/scripts/bump-size-limits.test.ts @@ -0,0 +1,241 @@ +import * as fs from 'fs'; +import * as path from 'path'; +import { describe, expect, it } from 'vitest'; +// @ts-expect-error -- .mjs source has no declarations under `moduleResolution: "node"` +import * as bumpSizeLimits from './bump-size-limits.mjs'; + +const { + BYTES_PER_KB, + BYTES_PER_KIB, + computeNewLimit, + extractCurrentLimit, + HEADROOM_BYTES, + parseSizeLimitOutput, + renderSummary, + rewriteSizeLimitFile, + sanitizeMarkdownCell, +} = bumpSizeLimits; + +const FIXTURE_PATH = path.join(__dirname, '__fixtures__', 'size-limit-sample.js'); +function readFixture(): string { + return fs.readFileSync(FIXTURE_PATH, 'utf8'); +} + +describe('constants', () => { + it('exports the documented thresholds', () => { + expect(HEADROOM_BYTES).toBe(5000); + expect(BYTES_PER_KB).toBe(1000); + expect(BYTES_PER_KIB).toBe(1024); + }); +}); + +describe('computeNewLimit', () => { + it('always returns currentSize + 5 KB, rounded up to the next full KB', () => { + // current 27_500 → +5000 = 32_500 → ceil to 33_000 + expect(computeNewLimit(27_500)).toBe(33_000); + // current 21_000 → +5000 = 26_000 → already round → 26_000 + expect(computeNewLimit(21_000)).toBe(26_000); + }); + + it('rounds up to next full KB', () => { + // current 27_001 → +5000 = 32_001 → ceil to 33_000 + expect(computeNewLimit(27_001)).toBe(33_000); + // current 27_999 → +5000 = 32_999 → ceil to 33_000 + expect(computeNewLimit(27_999)).toBe(33_000); + // current 28_000 → +5000 = 33_000 → already round → 33_000 + expect(computeNewLimit(28_000)).toBe(33_000); + }); + + it('handles zero-size measurements safely', () => { + expect(computeNewLimit(0)).toBe(5_000); + }); +}); + +describe('parseSizeLimitOutput', () => { + it('accepts well-formed input and returns name/size/sizeLimit triples', () => { + const raw = JSON.stringify([ + { name: '@sentry/browser', size: 27_500, sizeLimit: 27_000, passed: false }, + { name: 'CDN Bundle', size: 28_000, sizeLimit: 29_000, passed: true }, + ]); + expect(parseSizeLimitOutput(raw)).toEqual([ + { name: '@sentry/browser', size: 27_500, sizeLimit: 27_000 }, + { name: 'CDN Bundle', size: 28_000, sizeLimit: 29_000 }, + ]); + }); + + it('rejects non-array root', () => { + expect(() => parseSizeLimitOutput('{}')).toThrow(/expected array/i); + expect(() => parseSizeLimitOutput('null')).toThrow(/expected array/i); + }); + + it('rejects malformed JSON', () => { + expect(() => parseSizeLimitOutput('not json')).toThrow(SyntaxError); + }); + + it('rejects entries missing required fields', () => { + expect(() => parseSizeLimitOutput(JSON.stringify([{ name: 'x', size: 1 }]))).toThrow(/sizeLimit/); + expect(() => parseSizeLimitOutput(JSON.stringify([{ size: 1, sizeLimit: 2 }]))).toThrow(/name/); + }); + + it('rejects entries with non-string name', () => { + expect(() => parseSizeLimitOutput(JSON.stringify([{ name: 42, size: 1, sizeLimit: 2 }]))).toThrow(/name/); + }); + + it('rejects entries with non-finite numbers', () => { + expect(() => parseSizeLimitOutput(JSON.stringify([{ name: 'x', size: 'one', sizeLimit: 2 }]))).toThrow(/size/); + expect(() => parseSizeLimitOutput('[{"name":"x","size":1e500,"sizeLimit":2}]')).toThrow(/size/); + }); + + it('ignores extra fields without complaint', () => { + const raw = JSON.stringify([{ name: 'x', size: 1, sizeLimit: 2, passed: true, extra: 'ok' }]); + expect(parseSizeLimitOutput(raw)).toEqual([{ name: 'x', size: 1, sizeLimit: 2 }]); + }); +}); + +describe('sanitizeMarkdownCell', () => { + it('passes plain text through unchanged', () => { + expect(sanitizeMarkdownCell('@sentry/browser')).toBe('@sentry/browser'); + }); + + it('escapes pipes', () => { + expect(sanitizeMarkdownCell('a|b')).toBe('a\\|b'); + }); + + it('escapes backticks', () => { + expect(sanitizeMarkdownCell('a`b')).toBe('a\\`b'); + }); + + it('replaces newlines with spaces', () => { + expect(sanitizeMarkdownCell('a\nb')).toBe('a b'); + expect(sanitizeMarkdownCell('a\r\nb')).toBe('a b'); + }); + + it('preserves parentheses, commas, periods', () => { + expect(sanitizeMarkdownCell('CDN Bundle (incl. Tracing, Replay)')).toBe('CDN Bundle (incl. Tracing, Replay)'); + }); +}); + +describe('renderSummary', () => { + it('renders an empty header when there are no changes', () => { + const out = renderSummary([]); + expect(out).toContain('## Size limit auto-bump'); + expect(out).toContain('All size limits already provide ≥5 KB headroom. No changes needed.'); + }); + + it('renders a markdown table for one change', () => { + const out = renderSummary([ + { name: '@sentry/browser', oldLimit: '27 KB', newLimit: '28 KB', delta: 1, unit: 'KB' }, + ]); + expect(out).toContain('| Entry | Old limit | New limit | Δ |'); + expect(out).toContain('| @sentry/browser | 27 KB | 28 KB | +1 KB |'); + }); + + it('formats negative deltas with a minus', () => { + const out = renderSummary([ + { name: '@sentry/node', oldLimit: '177 KB', newLimit: '175 KB', delta: -2, unit: 'KB' }, + ]); + expect(out).toContain('| @sentry/node | 177 KB | 175 KB | -2 KB |'); + }); + + it('uses the entry unit for the delta column (KiB)', () => { + const out = renderSummary([ + { + name: '@sentry/cloudflare (withSentry)', + oldLimit: '420 KiB', + newLimit: '425 KiB', + delta: 5, + unit: 'KiB', + }, + ]); + expect(out).toContain('| @sentry/cloudflare (withSentry) | 420 KiB | 425 KiB | +5 KiB |'); + }); + + it('escapes pipes in entry names', () => { + const out = renderSummary([{ name: 'evil|name', oldLimit: '1 KB', newLimit: '2 KB', delta: 1, unit: 'KB' }]); + expect(out).toContain('evil\\|name'); + }); +}); + +describe('rewriteSizeLimitFile', () => { + it('updates a single entry, preserving KB unit', () => { + const src = readFixture(); + const out = rewriteSizeLimitFile(src, [{ name: '@sentry/browser', newLimitKb: 28, unit: 'KB' }]); + expect(out).toMatch(/name: '@sentry\/browser',[\s\S]*?limit: '28 KB',/); + expect(out).toMatch(/name: '@sentry\/browser - with treeshaking flags',[\s\S]*?limit: '25 KB',/); + }); + + it('updates entries with name-prefix collision correctly', () => { + const src = readFixture(); + const out = rewriteSizeLimitFile(src, [ + { name: '@sentry/browser - with treeshaking flags', newLimitKb: 30, unit: 'KB' }, + ]); + expect(out).toMatch(/name: '@sentry\/browser',[\s\S]*?limit: '27 KB',/); + expect(out).toMatch(/name: '@sentry\/browser - with treeshaking flags',[\s\S]*?limit: '30 KB',/); + }); + + it('preserves KiB unit', () => { + const src = readFixture(); + const out = rewriteSizeLimitFile(src, [{ name: '@sentry/cloudflare (withSentry)', newLimitKb: 425, unit: 'KiB' }]); + expect(out).toMatch(/name: '@sentry\/cloudflare \(withSentry\)',[\s\S]*?limit: '425 KiB',/); + }); + + it('handles names with parentheses and decimals in original limit', () => { + const src = readFixture(); + const out = rewriteSizeLimitFile(src, [{ name: 'CDN Bundle (incl. Tracing)', newLimitKb: 50, unit: 'KB' }]); + expect(out).toMatch(/name: 'CDN Bundle \(incl\. Tracing\)',[\s\S]*?limit: '50 KB',/); + expect(out).not.toContain("limit: '46.5 KB'"); + }); + + it('applies multiple changes', () => { + const src = readFixture(); + const out = rewriteSizeLimitFile(src, [ + { name: '@sentry/browser', newLimitKb: 28, unit: 'KB' }, + { name: 'CDN Bundle (incl. Tracing)', newLimitKb: 50, unit: 'KB' }, + ]); + expect(out).toContain("limit: '28 KB'"); + expect(out).toContain("limit: '50 KB'"); + }); + + it('throws if a name does not match any entry', () => { + const src = readFixture(); + expect(() => rewriteSizeLimitFile(src, [{ name: '@sentry/nonexistent', newLimitKb: 1, unit: 'KB' }])).toThrow( + /@sentry\/nonexistent/, + ); + }); + + it('returns unchanged source when changes is empty', () => { + const src = readFixture(); + expect(rewriteSizeLimitFile(src, [])).toBe(src); + }); + + it('does not modify the input string in-place', () => { + const src = readFixture(); + const before = src; + rewriteSizeLimitFile(src, [{ name: '@sentry/browser', newLimitKb: 28, unit: 'KB' }]); + expect(src).toBe(before); + }); +}); + +describe('extractCurrentLimit', () => { + const FIXTURE_SRC = `module.exports = [ + { name: '@sentry/browser', limit: '27 KB' }, + { name: '@sentry/cloudflare (withSentry)', limit: '420 KiB' }, +];`; + + it('extracts the limit value and unit by name', () => { + expect(extractCurrentLimit(FIXTURE_SRC, '@sentry/browser')).toEqual({ + value: 27, + unit: 'KB', + raw: '27 KB', + }); + expect(extractCurrentLimit(FIXTURE_SRC, '@sentry/cloudflare (withSentry)')).toEqual({ + value: 420, + unit: 'KiB', + raw: '420 KiB', + }); + }); + + it('returns null when the name is not present', () => { + expect(extractCurrentLimit(FIXTURE_SRC, '@sentry/missing')).toBeNull(); + }); +});