diff --git a/.circleci/config.yml b/.circleci/config.yml index d2cdca6d379..98565956c4f 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -49,6 +49,14 @@ executors: CACHE_VERSION: v1 working_directory: ~/react-spectrum + rsp-rust: + docker: + - image: cimg/rust:1.86.0 + resource_class: large + environment: + CACHE_VERSION: v1 + working_directory: ~/react-spectrum + commands: deploy-s3: parameters: @@ -395,6 +403,15 @@ jobs: steps: - restore_cache: key: react-spectrum-{{ .Environment.CACHE_VERSION }}-{{ .Environment.CIRCLE_SHA1 }} + # Bring in the rsp-api-check binary persisted by ts-build-binary so we + # can extract the branch API immediately after `make build`, while the + # freshly-built workspace (with its gitignored subpath stubs at paths + # like packages/react-aria/useButton/package.json) is still intact. + # Doing it here avoids the Node10 resolution breakage we saw when the + # stubs had to cross a persist_to_workspace boundary into a separate + # ts-build-branch job. + - attach_workspace: + at: ~/react-spectrum - run: name: test @@ -403,6 +420,33 @@ jobs: node --loader ./scripts/esm-support/loader.mjs ./scripts/esm-support/testESM.mjs node scripts/testCJS.cjs + - run: + name: install ts-extractor dependencies + command: cd rsp-api-checker/ts-extractor && npm ci + + - run: + name: extract branch API + command: | + cd rsp-api-checker + target/release/rsp-api-check get-local-api --repo-root .. --output ../dist/branch-api + + - run: + name: env-report (branch side) + command: | + cd rsp-api-checker + target/release/rsp-api-check env-report --repo-root .. --output ../dist/branch-api/env-report.json || true + + - store_artifacts: + path: dist/branch-api + destination: branch-api + + - persist_to_workspace: + root: . + paths: + - 'packages/*/dist/' + - 'packages/@*/*/dist/' + - 'dist/branch-api/' + lint: executor: rsp-xlarge steps: @@ -415,55 +459,87 @@ jobs: yarn test:lint yarn lint - ts-build-branch: - executor: rsp-large + # Build the rsp-api-check Rust binary once and share it via workspace. + # Runs in parallel with the yarn `install` job — no dependency needed. + # The Cargo registry + target dir are cached by Cargo.lock hash so + # incremental rebuilds are fast when only Rust source changes. + ts-build-binary: + executor: rsp-rust steps: + - checkout - restore_cache: - key: react-spectrum-{{ .Environment.CACHE_VERSION }}-{{ .Environment.CIRCLE_SHA1 }} - + keys: + - rsp-api-check-v1-{{ checksum "rsp-api-checker/Cargo.lock" }} + - rsp-api-check-v1- - run: - name: build branch apis - command: yarn build:api-branch - + name: build rsp-api-check + command: cd rsp-api-checker && cargo build --release + - save_cache: + key: rsp-api-check-v1-{{ checksum "rsp-api-checker/Cargo.lock" }} + paths: + - ~/.cargo/registry + - ~/.cargo/git + - rsp-api-checker/target - persist_to_workspace: - root: dist + root: . paths: - - 'branch-api/' - - ts-build-fork-point: + - rsp-api-checker/target/release/rsp-api-check + + # Fetch the last published release from npm and extract its type API. + # Runs in parallel with test-build (which does the branch-side extraction + # right after `make build` completes). + # + # Future fork-point support: to compare against a specific git ref rather + # than the npm release, add a ts-build-main job that checks out that ref, + # runs `yarn build`, then `rsp-api-check get-local-api --output dist/base-api`, + # and substitute it for ts-get-published in the ts-diff requires list. + ts-get-published: executor: rsp-large steps: - restore_cache: key: react-spectrum-{{ .Environment.CACHE_VERSION }}-{{ .Environment.CIRCLE_SHA1 }} - + - attach_workspace: + at: ~/react-spectrum + - run: + name: install ts-extractor dependencies + command: cd rsp-api-checker/ts-extractor && npm ci - run: - name: build fork-point apis + name: extract published API command: | - mkdir -p ~/.ssh - curl -L https://api.github.com/meta | jq -r '.ssh_keys | .[]' | sed -e 's/^/github.com /' >> ~/.ssh/known_hosts - yarn build:api-branch --githash="origin/main" --output="base-api" && yarn build:api-branch && yarn compare:apis - + cd rsp-api-checker + target/release/rsp-api-check get-published-api --repo-root .. --output ../dist/base-api + - run: + # Runs after extraction (the extraction wipes its output dir on start) + # so the report lands next to the api.json tree. Non-fatal: if env + # inspection itself errors, we still want the extracted api.json. + name: env-report (published side) + command: | + cd rsp-api-checker + target/release/rsp-api-check env-report --repo-root .. --output ../dist/base-api/env-report.json || true + - store_artifacts: + path: dist/base-api + destination: base-api - persist_to_workspace: - root: dist + root: . paths: - - 'base-api/' + - dist/base-api/ ts-diff: - executor: rsp-large + executor: rsp steps: - - restore_cache: - key: react-spectrum-{{ .Environment.CACHE_VERSION }}-{{ .Environment.CIRCLE_SHA1 }} - - attach_workspace: - at: /tmp/dist - + at: ~/react-spectrum - run: - name: compare api + name: compare APIs command: | - yarn test:parcel - mkdir -p dist - yarn compare:apis --isCI --branch-api-dir="/tmp/dist/branch-api" --base-api-dir="/tmp/dist/base-api" | tee dist/ts-diff.txt - + cd rsp-api-checker + target/release/rsp-api-check compare \ + --base-api-dir ../dist/base-api \ + --branch-api-dir ../dist/branch-api \ + --ci | tee ../dist/ts-diff.txt + - store_artifacts: + path: dist/ts-diff.txt + destination: ts-diff.txt - persist_to_workspace: root: dist paths: @@ -922,28 +998,29 @@ workflows: - test-ssr-18: requires: - install-18 + # ts-build-binary has to run on every branch (including main) because + # test-build now consumes it to extract the branch API. The cost is + # small: Cargo caches by Cargo.lock hash, so this is typically a + # seconds-long cache hit. + - ts-build-binary - test-build: requires: - install + - ts-build-binary - lint: requires: - install - - ts-build-fork-point: - requires: - - install - filters: - branches: - ignore: main - - ts-build-branch: + - ts-get-published: requires: - install + - ts-build-binary filters: branches: ignore: main - ts-diff: requires: - - ts-build-fork-point - - ts-build-branch + - ts-get-published + - test-build filters: branches: ignore: main diff --git a/packages/react-aria-components/src/Button.tsx b/packages/react-aria-components/src/Button.tsx index 34bc565a9d2..211a2468a56 100644 --- a/packages/react-aria-components/src/Button.tsx +++ b/packages/react-aria-components/src/Button.tsx @@ -76,7 +76,9 @@ export interface ButtonProps extends Omit`. +- Use this list as the authoritative set for both `get-published-api` + (resolves what to install from npm) and `get-local-api` (resolves what to + extract). +- Keep the fs-walk path as a fallback when `yarn` isn't available (fixtures, + CI without yarn). +- Remove the depth-4 assumption; remove the hardcoded `dev/` skip (yarn's + `private: true` handles it). + +### A2. Cache published api.json by version + +**Why:** `get-published-api` spawns a temp dir, runs `npm install` for ~80 +packages, then extracts. For an unchanged `latest`, this is pure waste. +Typical cost: tens of seconds per run. + +**Action:** + +- Compute a cache key: `sha256(sorted(name@resolved_version)) + extractor_version_sha`. +- Cache location: `~/.cache/rsp-api-check/published//` (respect + `XDG_CACHE_HOME`). +- On `get-published-api`: if key hit, copy/symlink cached api.json tree to + `--output-dir`. Otherwise do the install+extract, then populate cache. +- Add `--no-cache` flag to force refresh. + +### A3. Cache local api.json by source SHA + +**Why:** `get-local-api` re-runs the extractor even when nothing changed. For +large monorepos this is the slowest local step. + +**Action:** + +- Before extracting, compute a per-package SHA = + `sha256(sorted(d.ts file path + mtime))` across all `.d.ts` under the entry + point's resolution closure. (Or just the repo's `git rev-parse HEAD` + + `git status --porcelain` for a coarser key.) +- Store api.json alongside a `.cache-key` file. Skip extraction if key + matches. +- Invalidates automatically on any `.d.ts` mtime change (which is exactly + when extraction output can change). + +### A4. Stable sort of export keys and interface properties before diffing + +**Why:** `format_interface` (`interface_builder.rs:729`) iterates `properties` +in `IndexMap` insertion order. TS compiler export/property order is *mostly* +stable but not guaranteed (it depends on resolution order, which depends on +the program's file order, which depends on the `entryFiles` array in +`ts.createProgram`). A real-world symptom: properties appearing "reordered" +in a diff with no API change. + +**Action:** + +- In `differ.rs`, sort `all_names` alphabetically before the diff loop. +- In `format_interface` (`interface_builder.rs:729`) and + `format_prop`/`render_properties`, sort properties alphabetically before + joining. Cross-reference with `type_renderer.rs` to catch all sites. +- Add a Rust test: feed two api.jsons identical except for key order → + assert zero diffs. + +--- + +## 🟠 Medium impact + +### B1. Fix silent version-mismatch fallback for React types + +**Why:** `local_installed_version` (`get_published.rs:26-34`) swallows errors +with `.ok()?`, and the call site falls back to `"latest"` on `None` (line 74). +If reading the local `react/package.json` fails for any reason, published +install uses React `latest` while local uses whatever's checked in — +producing spurious diffs on any type change in React. + +**Action:** + +- Change signature to return `Result` and bail with a clear message + if the version can't be read. +- Log the resolved version at the top of the `get-published-api` run. + +### B2. Conditional exports: environment-aware `resolveTypesField` + +**Why:** `resolveTypesField` (`utils.ts:50-78`) walks keys in fixed order +`["types", "import", "default", "require"]`. For packages using conditional +exports like `{ "react-native": "...", "default": "..." }`, this picks +arbitrarily. More concerning: if `types` isn't the first key in a nested +condition, we can pick the wrong branch. + +**Action:** + +- Prefer `types` strictly over all other keys at each object level. +- Then prefer `import` over `require` (modern ESM bias). +- Explicitly reject non-types keys like `react-native`, `node-addons`, + `worker` unless nothing else matches. +- Add unit tests for each conditional-exports shape we've seen in + react-spectrum + the common npm patterns. + +### B3. Symlinked workspace packages: correct owner attribution + +**Why:** `isExternalDeclaration` (`extract-api.ts:118-128`) derives package +ownership from the file path's `/node_modules///` slice. With +yarn/pnpm workspaces symlinking local packages into `node_modules`, a +symlinked package's files resolve to the *symlink target* (under +`packages/`), not the node_modules path — so the current check says "not +external" correctly, but the reverse case (a published package's file that +happens to live under a nested node_modules) may be mis-attributed. + +**Action:** + +- Also check `path.realpath()` of the declaration file; if the realpath lands + outside `packages/`, treat as external. +- Add a test fixture with a symlinked workspace package. + +### B4. Report partial npm install failures usefully + +**Why:** `workspace.rs:20-22` bails on any non-zero npm exit code, with no +breakdown of which package(s) failed. For a first-time user this is "the +whole thing exploded." + +**Action:** + +- Capture npm's stderr, grep for `404` / `ETARGET` lines, print a targeted + message: "These packages couldn't be resolved: X, Y, Z" before bailing. +- Suggest `--tag next` or listing available dist-tags when `latest` fails. + +### B5. Handle `workspace:*` / `*` version specifiers explicitly + +**Why:** If a dep uses `workspace:*` or `*`, the npm registry query returns +`None` and the package is silently dropped from the install set. This means +some local workspace packages never get compared against npm. + +**Action:** + +- Detect these version forms in `get-published-api`; resolve via the + registry's `latest` dist-tag with a logged warning ("Resolved workspace:* + to @foo/bar@1.2.3 from npm latest"). + +--- + +## 🟡 Lower impact (nice to have) + +### C1. Time-spent-per-phase reporting + +Add `--timing` flag. Print: discovery, install, extract, compare. Lets us +actually know where the time goes. + +### C2. Eliminate duplicate private-package filter in TS + +`extract-api.ts:2003-2005` skips private packages during *write*, but +discovery already excluded them at line 150 via the walk. Either: + +- Remove the redundant write-time check, OR +- Move the discovery filter into a single place and document it. + +### C3. Parallelize per-package api.json *write* (not extraction) + +The `ts.Program` must stay single-threaded, but serializing symbols + writing +JSON per package can use a worker pool. Modest gain (~20% on large +monorepos). + +### C4. Explicit error when zero types are resolvable + +When every export becomes `any` (because `@types/react` is missing or +resolution is broken), the tool currently produces a huge useless diff. Add a +threshold check: if `>50%` of exports serialize as `any`, fail with "type +resolution is broken — check @types/react is installed in packages-dir." + +### C5. Clean up extractor `findPackageJsonDirs` after A1 + +Once yarn workspaces is the primary discovery path, the fs-walk becomes a +narrow fallback — shrink it. + +--- + +## Correctness bugs found (not improvements) + +### D1. `findPackageJsonDirs` doesn't skip `node_modules` symlinks + +The walk (`extract-api.ts:312`) uses `readdirSync` with +`withFileTypes: true`, but a symlink to a directory reports +`isDirectory() === true`. If any `packages/` entry is symlinked out (unusual +but possible), the walk could follow it into unrelated trees. + +**Action:** use `entry.isDirectory() && !entry.isSymbolicLink()`. + +### D2. `newestMtimeInSources` walks `src/` but not monorepo-local deps + +If package A re-exports from package B and B's source changes without +rebuilding B, A's check passes (A's `src/` isn't newer than A's +`dist/types/`) but the resulting diff is still wrong. + +**Action:** either document this limitation explicitly, or extend the check +to include declared workspace deps. (Probably not worth extending — loudly +document in the error message.) + +### D3. Extractor succeeds when no packages are found + +If `discoverPackages` returns an empty list (wrong `--packages-dir`, bad +filter, etc.), the extractor prints "Found 0 packages" but still exits 0. +`compare` then finds zero pairs and reports "no changes." + +**Action:** exit 1 when zero packages are discovered, unless an explicit +`--allow-empty` flag is passed. + +--- + +## Proposed order of execution + +1. **D1, D3, B1** (small correctness wins, low risk) +2. **A4** (sort exports/properties — removes the largest source of spurious + diffs) +3. **A1** (yarn workspaces discovery — unblocks removing fragile fs-walk + code) +4. **A3** (local cache — biggest UX win for iterative use) +5. **A2** (published cache — biggest wall-clock savings) +6. **B2, B3, B4, B5** (accuracy edge cases) +7. **C1–C5** (polish) + +Minimum viable first pass: **D1, D3, B1, A4**. diff --git a/rsp-api-checker/Cargo.lock b/rsp-api-checker/Cargo.lock new file mode 100644 index 00000000000..d64b1f56b57 --- /dev/null +++ b/rsp-api-checker/Cargo.lock @@ -0,0 +1,1926 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "anstream" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "824a212faf96e9acacdbd09febd34438f8f711fb84e09a8916013cd7815ca28d" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is_terminal_polyfill", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "940b3a0ca603d1eade50a4846a2afffd5ef57a9feac2c0e2ec2e14f9ead76000" + +[[package]] +name = "anstyle-parse" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52ce7f38b242319f7cabaa6813055467063ecdc9d355bbb4ce0c68908cd8130e" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d" +dependencies = [ + "anstyle", + "once_cell_polyfill", + "windows-sys 0.61.2", +] + +[[package]] +name = "anyhow" +version = "1.0.102" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c" + +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "bitflags" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4512299f36f043ab09a583e57bceb5a5aab7a73db1805848e8fef3c9e8c78b3" + +[[package]] +name = "bumpalo" +version = "3.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d20789868f4b01b2f2caec9f5c4e0213b41e3e5702a50157d699ae31ced2fcb" + +[[package]] +name = "bytes" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33" + +[[package]] +name = "cc" +version = "1.2.60" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43c5703da9466b66a946814e1adf53ea2c90f10063b86290cc9eb67ce3478a20" +dependencies = [ + "find-msvc-tools", + "shlex", +] + +[[package]] +name = "cfg-if" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" + +[[package]] +name = "clap" +version = "4.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ddb117e43bbf7dacf0a4190fef4d345b9bad68dfc649cb349e7d17d28428e51" +dependencies = [ + "clap_builder", + "clap_derive", +] + +[[package]] +name = "clap_builder" +version = "4.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "714a53001bf66416adb0e2ef5ac857140e7dc3a0c48fb28b2f10762fc4b5069f" +dependencies = [ + "anstream", + "anstyle", + "clap_lex", + "strsim", +] + +[[package]] +name = "clap_derive" +version = "4.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2ce8604710f6733aa641a2b3731eaa1e8b3d9973d5e3565da11800813f997a9" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "clap_lex" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8d4a3bb8b1e0c1050499d1815f5ab16d04f0959b233085fb31653fbfc9d98f9" + +[[package]] +name = "colorchoice" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d07550c9036bf2ae0c684c4297d503f838287c83c53686d05370d0e139ae570" + +[[package]] +name = "colored" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c" +dependencies = [ + "lazy_static", + "windows-sys 0.59.0", +] + +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2a6cd9ae233e7f62ba4e9353e81a88df7fc8a5987b8d445b4d90c879bd156f6" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "encoding_rs" +version = "0.8.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "errno" +version = "0.3.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" +dependencies = [ + "libc", + "windows-sys 0.61.2", +] + +[[package]] +name = "fastrand" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f1f227452a390804cdb637b74a86990f2a7d7ba4b7d5693aac9b4dd6defd8d6" + +[[package]] +name = "find-msvc-tools" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582" + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "form_urlencoded" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "futures" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d" + +[[package]] +name = "futures-executor" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf29c38818342a3b26b5b923639e7b1f4a61fc5e76102d4b1981c6dc7a7579d" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718" + +[[package]] +name = "futures-macro" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "futures-sink" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893" + +[[package]] +name = "futures-task" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393" + +[[package]] +name = "futures-util" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "slab", +] + +[[package]] +name = "getrandom" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff2abc00be7fca6ebc474524697ae276ad847ad0a6b3faa4bcb027e9a4614ad0" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "getrandom" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0de51e6874e94e7bf76d726fc5d13ba782deca734ff60d5bb2fb2607c7406555" +dependencies = [ + "cfg-if", + "libc", + "r-efi", + "wasip2", + "wasip3", +] + +[[package]] +name = "glob" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" + +[[package]] +name = "h2" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f44da3a8150a6703ed5d34e164b875fd14c2cdab9af1252a9a1020bde2bdc54" +dependencies = [ + "atomic-waker", + "bytes", + "fnv", + "futures-core", + "futures-sink", + "http", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "hashbrown" +version = "0.15.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" +dependencies = [ + "foldhash", +] + +[[package]] +name = "hashbrown" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f467dd6dccf739c208452f8014c75c18bb8301b050ad1cfb27153803edb0f51" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "http" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a" +dependencies = [ + "bytes", + "itoa", +] + +[[package]] +name = "http-body" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" +dependencies = [ + "bytes", + "http", +] + +[[package]] +name = "http-body-util" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" +dependencies = [ + "bytes", + "futures-core", + "http", + "http-body", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" + +[[package]] +name = "hyper" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6299f016b246a94207e63da54dbe807655bf9e00044f73ded42c3ac5305fbcca" +dependencies = [ + "atomic-waker", + "bytes", + "futures-channel", + "futures-core", + "h2", + "http", + "http-body", + "httparse", + "itoa", + "pin-project-lite", + "smallvec", + "tokio", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.27.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33ca68d021ef39cf6463ab54c1d0f5daf03377b70561305bb89a8f83aab66e0f" +dependencies = [ + "http", + "hyper", + "hyper-util", + "rustls", + "tokio", + "tokio-rustls", + "tower-service", +] + +[[package]] +name = "hyper-tls" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" +dependencies = [ + "bytes", + "http-body-util", + "hyper", + "hyper-util", + "native-tls", + "tokio", + "tokio-native-tls", + "tower-service", +] + +[[package]] +name = "hyper-util" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96547c2556ec9d12fb1578c4eaf448b04993e7fb79cbaad930a656880a6bdfa0" +dependencies = [ + "base64", + "bytes", + "futures-channel", + "futures-util", + "http", + "http-body", + "hyper", + "ipnet", + "libc", + "percent-encoding", + "pin-project-lite", + "socket2", + "system-configuration", + "tokio", + "tower-service", + "tracing", + "windows-registry", +] + +[[package]] +name = "icu_collections" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2984d1cd16c883d7935b9e07e44071dca8d917fd52ecc02c04d5fa0b5a3f191c" +dependencies = [ + "displaydoc", + "potential_utf", + "utf8_iter", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locale_core" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92219b62b3e2b4d88ac5119f8904c10f8f61bf7e95b640d25ba3075e6cac2c29" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_normalizer" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c56e5ee99d6e3d33bd91c5d85458b6005a22140021cc324cea84dd0e72cff3b4" +dependencies = [ + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da3be0ae77ea334f4da67c12f149704f19f81d1adf7c51cf482943e84a2bad38" + +[[package]] +name = "icu_properties" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bee3b67d0ea5c2cca5003417989af8996f8604e34fb9ddf96208a033901e70de" +dependencies = [ + "icu_collections", + "icu_locale_core", + "icu_properties_data", + "icu_provider", + "zerotrie", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e2bbb201e0c04f7b4b3e14382af113e17ba4f63e2c9d2ee626b720cbce54a14" + +[[package]] +name = "icu_provider" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "139c4cf31c8b5f33d7e199446eff9c1e02decfc2f0eec2c8d71f65befa45b421" +dependencies = [ + "displaydoc", + "icu_locale_core", + "writeable", + "yoke", + "zerofrom", + "zerotrie", + "zerovec", +] + +[[package]] +name = "id-arena" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954" + +[[package]] +name = "idna" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "indexmap" +version = "2.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d466e9454f08e4a911e14806c24e16fba1b4c121d1ea474396f396069cf949d9" +dependencies = [ + "equivalent", + "hashbrown 0.17.0", + "serde", + "serde_core", +] + +[[package]] +name = "ipnet" +version = "2.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d98f6fed1fde3f8c21bc40a1abb88dd75e67924f9cffc3ef95607bad8017f8e2" + +[[package]] +name = "iri-string" +version = "0.7.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25e659a4bb38e810ebc252e53b5814ff908a8c58c2a9ce2fae1bbec24cbf4e20" +dependencies = [ + "memchr", + "serde", +] + +[[package]] +name = "is_terminal_polyfill" +version = "1.70.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695" + +[[package]] +name = "itoa" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f42a60cbdf9a97f5d2305f08a87dc4e09308d1276d28c869c684d7777685682" + +[[package]] +name = "js-sys" +version = "0.3.95" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2964e92d1d9dc3364cae4d718d93f227e3abb088e747d92e0395bfdedf1c12ca" +dependencies = [ + "cfg-if", + "futures-util", + "once_cell", + "wasm-bindgen", +] + +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" + +[[package]] +name = "leb128fmt" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" + +[[package]] +name = "libc" +version = "0.2.185" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52ff2c0fe9bc6cb6b14a0592c2ff4fa9ceb83eea9db979b0487cd054946a2b8f" + +[[package]] +name = "linux-raw-sys" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a66949e030da00e8c7d4434b251670a91556f4144941d37452769c25d58a53" + +[[package]] +name = "litemap" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92daf443525c4cce67b150400bc2316076100ce0b3686209eb8cf3c31612e6f0" + +[[package]] +name = "lock_api" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965" +dependencies = [ + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" + +[[package]] +name = "memchr" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79" + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "mio" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50b7e5b27aa02a74bac8c3f23f448f8d87ff11f92d3aac1a6ed369ee08cc56c1" +dependencies = [ + "libc", + "wasi", + "windows-sys 0.61.2", +] + +[[package]] +name = "native-tls" +version = "0.2.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "465500e14ea162429d264d44189adc38b199b62b1c21eea9f69e4b73cb03bbf2" +dependencies = [ + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + +[[package]] +name = "once_cell" +version = "1.21.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f7c3e4beb33f85d45ae3e3a1792185706c8e16d043238c593331cc7cd313b50" + +[[package]] +name = "once_cell_polyfill" +version = "1.70.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe" + +[[package]] +name = "openssl" +version = "0.10.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfe4646e360ec77dff7dde40ed3d6c5fee52d156ef4a62f53973d38294dad87f" +dependencies = [ + "bitflags", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "openssl-probe" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c87def4c32ab89d880effc9e097653c8da5d6ef28e6b539d313baaacfbafcbe" + +[[package]] +name = "openssl-sys" +version = "0.9.113" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad2f2c0eba47118757e4c6d2bff2838f3e0523380021356e7875e858372ce644" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "parking_lot" +version = "0.12.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-link", +] + +[[package]] +name = "percent-encoding" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" + +[[package]] +name = "pin-project-lite" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a89322df9ebe1c1578d689c92318e070967d1042b512afbe49518723f4e6d5cd" + +[[package]] +name = "pkg-config" +version = "0.3.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19f132c84eca552bf34cab8ec81f1c1dcc229b811638f9d283dceabe58c5569e" + +[[package]] +name = "potential_utf" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0103b1cef7ec0cf76490e969665504990193874ea05c85ff9bab8b911d0a0564" +dependencies = [ + "zerovec", +] + +[[package]] +name = "prettyplease" +version = "0.2.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" +dependencies = [ + "proc-macro2", + "syn", +] + +[[package]] +name = "proc-macro2" +version = "1.0.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41f2619966050689382d2b44f664f4bc593e129785a36d6ee376ddf37259b924" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "r-efi" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8dcc9c7d52a811697d2151c701e0d08956f92b0e24136cf4cf27b57a6a0d9bf" + +[[package]] +name = "redox_syscall" +version = "0.5.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" +dependencies = [ + "bitflags", +] + +[[package]] +name = "reqwest" +version = "0.12.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eddd3ca559203180a307f12d114c268abf583f59b03cb906fd0b3ff8646c1147" +dependencies = [ + "base64", + "bytes", + "encoding_rs", + "futures-core", + "h2", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-rustls", + "hyper-tls", + "hyper-util", + "js-sys", + "log", + "mime", + "native-tls", + "percent-encoding", + "pin-project-lite", + "rustls-pki-types", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "tokio", + "tokio-native-tls", + "tower", + "tower-http", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "ring" +version = "0.17.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" +dependencies = [ + "cc", + "cfg-if", + "getrandom 0.2.17", + "libc", + "untrusted", + "windows-sys 0.52.0", +] + +[[package]] +name = "rsp-api-check" +version = "0.1.0" +dependencies = [ + "anyhow", + "clap", + "colored", + "futures", + "glob", + "indexmap", + "reqwest", + "serde", + "serde_json", + "similar", + "tempfile", + "tokio", +] + +[[package]] +name = "rustix" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6fe4565b9518b83ef4f91bb47ce29620ca828bd32cb7e408f0062e9930ba190" +dependencies = [ + "bitflags", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.61.2", +] + +[[package]] +name = "rustls" +version = "0.23.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69f9466fb2c14ea04357e91413efb882e2a6d4a406e625449bc0a5d360d53a21" +dependencies = [ + "once_cell", + "rustls-pki-types", + "rustls-webpki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-pki-types" +version = "1.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be040f8b0a225e40375822a563fa9524378b9d63112f53e19ffff34df5d33fdd" +dependencies = [ + "zeroize", +] + +[[package]] +name = "rustls-webpki" +version = "0.103.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8279bb85272c9f10811ae6a6c547ff594d6a7f3c6c6b02ee9726d1d0dcfcdd06" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", +] + +[[package]] +name = "rustversion" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" + +[[package]] +name = "ryu" +version = "1.0.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9774ba4a74de5f7b1c1451ed6cd5285a32eddb5cccb8cc655a4e50009e06477f" + +[[package]] +name = "schannel" +version = "0.1.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91c1b7e4904c873ef0710c1f407dde2e6287de2bebc1bbbf7d430bb7cbffd939" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "security-framework" +version = "3.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7f4bc775c73d9a02cde8bf7b2ec4c9d12743edf609006c7facc23998404cd1d" +dependencies = [ + "bitflags", + "core-foundation 0.10.1", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce2691df843ecc5d231c0b14ece2acc3efb62c0a398c7e1d875f3983ce020e3" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "semver" +version = "1.0.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a7852d02fc848982e0c167ef163aaff9cd91dc640ba85e263cb1ce46fae51cd" + +[[package]] +name = "serde" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" +dependencies = [ + "serde_core", + "serde_derive", +] + +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.149" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" +dependencies = [ + "itoa", + "memchr", + "serde", + "serde_core", + "zmij", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "signal-hook-registry" +version = "1.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4db69cba1110affc0e9f7bcd48bbf87b3f4fc7c61fc9155afd4c469eb3d6c1b" +dependencies = [ + "errno", + "libc", +] + +[[package]] +name = "similar" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbbb5d9659141646ae647b42fe094daf6c6192d1620870b449d9557f748b2daa" + +[[package]] +name = "slab" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c790de23124f9ab44544d7ac05d60440adc586479ce501c1d6d7da3cd8c9cf5" + +[[package]] +name = "smallvec" +version = "1.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" + +[[package]] +name = "socket2" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a766e1110788c36f4fa1c2b71b387a7815aa65f88ce0229841826633d93723e" +dependencies = [ + "libc", + "windows-sys 0.61.2", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + +[[package]] +name = "syn" +version = "2.0.117" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" +dependencies = [ + "futures-core", +] + +[[package]] +name = "synstructure" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "system-configuration" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a13f3d0daba03132c0aa9767f98351b3488edc2c100cda2d2ec2b04f3d8d3c8b" +dependencies = [ + "bitflags", + "core-foundation 0.9.4", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "tempfile" +version = "3.27.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32497e9a4c7b38532efcdebeef879707aa9f794296a4f0244f6f69e9bc8574bd" +dependencies = [ + "fastrand", + "getrandom 0.4.2", + "once_cell", + "rustix", + "windows-sys 0.61.2", +] + +[[package]] +name = "tinystr" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8323304221c2a851516f22236c5722a72eaa19749016521d6dff0824447d96d" +dependencies = [ + "displaydoc", + "zerovec", +] + +[[package]] +name = "tokio" +version = "1.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a91135f59b1cbf38c91e73cf3386fca9bb77915c45ce2771460c9d92f0f3d776" +dependencies = [ + "bytes", + "libc", + "mio", + "parking_lot", + "pin-project-lite", + "signal-hook-registry", + "socket2", + "tokio-macros", + "windows-sys 0.61.2", +] + +[[package]] +name = "tokio-macros" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "385a6cb71ab9ab790c5fe8d67f1645e6c450a7ce006a33de03daa956cf70a496" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61" +dependencies = [ + "rustls", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ae9cec805b01e8fc3fd2fe289f89149a9b66dd16786abd8b19cfa7b48cb0098" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tower" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebe5ef63511595f1344e2d5cfa636d973292adc0eec1f0ad45fae9f0851ab1d4" +dependencies = [ + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper", + "tokio", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-http" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8" +dependencies = [ + "bitflags", + "bytes", + "futures-util", + "http", + "http-body", + "iri-string", + "pin-project-lite", + "tower", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-layer" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" + +[[package]] +name = "tower-service" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" + +[[package]] +name = "tracing" +version = "0.1.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100" +dependencies = [ + "pin-project-lite", + "tracing-core", +] + +[[package]] +name = "tracing-core" +version = "0.1.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a" +dependencies = [ + "once_cell", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "unicode-ident" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75" + +[[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "url" +version = "2.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] +name = "utf8parse" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.11.1+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" + +[[package]] +name = "wasip2" +version = "1.0.2+wasi-0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9517f9239f02c069db75e65f174b3da828fe5f5b945c4dd26bd25d89c03ebcf5" +dependencies = [ + "wit-bindgen", +] + +[[package]] +name = "wasip3" +version = "0.4.0+wasi-0.3.0-rc-2026-01-06" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5428f8bf88ea5ddc08faddef2ac4a67e390b88186c703ce6dbd955e1c145aca5" +dependencies = [ + "wit-bindgen", +] + +[[package]] +name = "wasm-bindgen" +version = "0.2.118" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf938a0bacb0469e83c1e148908bd7d5a6010354cf4fb73279b7447422e3a89" +dependencies = [ + "cfg-if", + "once_cell", + "rustversion", + "wasm-bindgen-macro", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.68" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f371d383f2fb139252e0bfac3b81b265689bf45b6874af544ffa4c975ac1ebf8" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.118" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eeff24f84126c0ec2db7a449f0c2ec963c6a49efe0698c4242929da037ca28ed" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.118" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d08065faf983b2b80a79fd87d8254c409281cf7de75fc4b773019824196c904" +dependencies = [ + "bumpalo", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.118" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5fd04d9e306f1907bd13c6361b5c6bfc7b3b3c095ed3f8a9246390f8dbdee129" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "wasm-encoder" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "990065f2fe63003fe337b932cfb5e3b80e0b4d0f5ff650e6985b1048f62c8319" +dependencies = [ + "leb128fmt", + "wasmparser", +] + +[[package]] +name = "wasm-metadata" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb0e353e6a2fbdc176932bbaab493762eb1255a7900fe0fea1a2f96c296cc909" +dependencies = [ + "anyhow", + "indexmap", + "wasm-encoder", + "wasmparser", +] + +[[package]] +name = "wasmparser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe" +dependencies = [ + "bitflags", + "hashbrown 0.15.5", + "indexmap", + "semver", +] + +[[package]] +name = "web-sys" +version = "0.3.95" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f2dfbb17949fa2088e5d39408c48368947b86f7834484e87b73de55bc14d97d" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "windows-link" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" + +[[package]] +name = "windows-registry" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02752bf7fbdcce7f2a27a742f798510f3e5ad88dbe84871e5168e2120c3d5720" +dependencies = [ + "windows-link", + "windows-result", + "windows-strings", +] + +[[package]] +name = "windows-result" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-strings" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-sys" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_gnullvm", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "wit-bindgen" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" +dependencies = [ + "wit-bindgen-rust-macro", +] + +[[package]] +name = "wit-bindgen-core" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea61de684c3ea68cb082b7a88508a8b27fcc8b797d738bfc99a82facf1d752dc" +dependencies = [ + "anyhow", + "heck", + "wit-parser", +] + +[[package]] +name = "wit-bindgen-rust" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7c566e0f4b284dd6561c786d9cb0142da491f46a9fbed79ea69cdad5db17f21" +dependencies = [ + "anyhow", + "heck", + "indexmap", + "prettyplease", + "syn", + "wasm-metadata", + "wit-bindgen-core", + "wit-component", +] + +[[package]] +name = "wit-bindgen-rust-macro" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c0f9bfd77e6a48eccf51359e3ae77140a7f50b1e2ebfe62422d8afdaffab17a" +dependencies = [ + "anyhow", + "prettyplease", + "proc-macro2", + "quote", + "syn", + "wit-bindgen-core", + "wit-bindgen-rust", +] + +[[package]] +name = "wit-component" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2" +dependencies = [ + "anyhow", + "bitflags", + "indexmap", + "log", + "serde", + "serde_derive", + "serde_json", + "wasm-encoder", + "wasm-metadata", + "wasmparser", + "wit-parser", +] + +[[package]] +name = "wit-parser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecc8ac4bc1dc3381b7f59c34f00b67e18f910c2c0f50015669dde7def656a736" +dependencies = [ + "anyhow", + "id-arena", + "indexmap", + "log", + "semver", + "serde", + "serde_derive", + "serde_json", + "unicode-xid", + "wasmparser", +] + +[[package]] +name = "writeable" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ffae5123b2d3fc086436f8834ae3ab053a283cfac8fe0a0b8eaae044768a4c4" + +[[package]] +name = "yoke" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "abe8c5fda708d9ca3df187cae8bfb9ceda00dd96231bed36e445a1a48e66f9ca" +dependencies = [ + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de844c262c8848816172cef550288e7dc6c7b7814b4ee56b3e1553f275f1858e" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerofrom" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69faa1f2a1ea75661980b013019ed6687ed0e83d069bc1114e2cc74c6c04c4df" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11532158c46691caf0f2593ea8358fed6bbf68a0315e80aae9bd41fbade684a1" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zeroize" +version = "1.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" + +[[package]] +name = "zerotrie" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f9152d31db0792fa83f70fb2f83148effb5c1f5b8c7686c3459e361d9bc20bf" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] + +[[package]] +name = "zerovec" +version = "0.11.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90f911cbc359ab6af17377d242225f4d75119aec87ea711a880987b18cd7b239" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "625dc425cab0dca6dc3c3319506e6593dcb08a9f387ea3b284dbd52a92c40555" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zmij" +version = "1.0.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa" diff --git a/rsp-api-checker/Cargo.toml b/rsp-api-checker/Cargo.toml new file mode 100644 index 00000000000..bc6d2ed2f79 --- /dev/null +++ b/rsp-api-checker/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "rsp-api-check" +version = "0.1.0" +edition = "2021" +description = "API comparison tool for the react-spectrum monorepo" + +[dependencies] +clap = { version = "4", features = ["derive"] } +serde = { version = "1", features = ["derive"] } +serde_json = "1" +similar = { version = "2", features = ["text"] } +glob = "0.3" +tempfile = "3" +tokio = { version = "1", features = ["full"] } +reqwest = { version = "0.12", features = ["json"] } +colored = "2" +indexmap = { version = "2", features = ["serde"] } +anyhow = "1" +futures = "0.3" diff --git a/rsp-api-checker/README.md b/rsp-api-checker/README.md new file mode 100644 index 00000000000..a67f571d95f --- /dev/null +++ b/rsp-api-checker/README.md @@ -0,0 +1,175 @@ +# rsp-api-check + +API comparison tool for the [react-spectrum](https://github.com/adobe/react-spectrum) monorepo. Replaces the Node.js scripts (`buildPublishedAPI.js`, `buildBranchAPI.js`, `compareAPIs.js`) with a Rust CLI + a standalone TypeScript extractor. + +## Architecture + +``` +┌─────────────────────┐ ┌──────────────────────┐ +│ get-published-api │ │ get-local-api │ +│ │ │ │ +│ npm registry HTTP │ │ reads local build │ +│ → npm install │ │ .d.ts files │ +│ → ts-extractor │ │ → ts-extractor │ +│ → dist/base-api/ │ │ → dist/branch-api/ │ +└────────┬────────────┘ └───────────┬───────────┘ + │ │ + └──────────┐ ┌──────────────┘ + ▼ ▼ + ┌─────────────────┐ + │ compare │ + │ │ + │ reads api.json │ + │ rebuilds types │ + │ diffs output │ + └─────────────────┘ +``` + +**Key difference from the old scripts:** the TypeScript type extraction now reads `.d.ts` files directly (via the TS compiler API in `ts-extractor/extract-api.ts`) instead of requiring a full Parcel build. This makes the published-API path dramatically faster since the `.d.ts` files already exist in the npm package. + +## Prerequisites + +- **Rust** (1.75+) — to build the CLI +- **Node.js** (18+) — needed by the TypeScript extractor +- **npm** — for installing packages in the published-API workflow + +## Build + +```sh +cargo build --release +``` + +The binary is at `target/release/rsp-api-check`. + +## Usage + +All commands assume you're running from the react-spectrum monorepo root. + +### 1. Extract the published (baseline) API + +```sh +rsp-api-check get-published-api --repo-root . +``` + +This queries the npm registry, downloads all published packages, and runs the TypeScript extractor on their `.d.ts` files. Output goes to `dist/base-api/` by default. + +### 2. Extract the local (branch) API + +First, build the project so `.d.ts` files are generated: + +```sh +yarn build +``` + +Then extract: + +```sh +rsp-api-check get-local-api --repo-root . +``` + +Output goes to `dist/branch-api/` by default. + +### 3. Compare + +```sh +rsp-api-check compare +``` + +Options: + +| Flag | Description | +|------|-------------| +| `--base-api-dir ` | Base API directory (default: `dist/base-api`) | +| `--branch-api-dir ` | Branch API directory (default: `dist/branch-api`) | +| `--package ` | Filter to a specific package (substring match) | +| `--interface ` | Filter to a specific interface/export name | +| `--ci` | Output GitHub-flavored markdown with collapsible sections | +| `--json` | Output machine-readable JSON | +| `--verbose` | Extra debug output | + +### All-in-one (published vs local) + +```sh +rsp-api-check get-published-api --repo-root . +rsp-api-check get-local-api --repo-root . +rsp-api-check compare +``` + +### All-in-one (main branch vs local) + +Build the main branch API first, then your branch: + +```sh +# On your branch: +rsp-api-check get-local-api --repo-root . --output dist/branch-api +git stash +git checkout main +yarn build +rsp-api-check get-local-api --repo-root . --output dist/base-api +git checkout - +git stash pop +rsp-api-check compare +``` + +## Output format + +The diff output uses a TypeScript-like syntax with `+`/`-` markers: + +``` +### react-aria-components + +#### react-aria-components:ComboBox + ComboBox { + allowsCustomValue?: boolean + ... +- onChange?: (T) => void ++ onChange?: (ChangeValueType) => void + ... + } +``` + +Export names are formatted as `(@scope/)?package:ExportName`. + +External types (React, DOM, etc.) are **not** flattened into the API surface. Instead they appear in `extends` clauses: + +``` + FieldButton extends HTMLAttributes { + isActive?: boolean + isQuiet?: boolean + ... + } +``` + +## TypeScript Extractor + +The `ts-extractor/` directory contains a standalone TypeScript script that uses the TS compiler API to walk `.d.ts` exports and produce `api.json` files. It handles: + +- Cross-package type resolution (via `node_modules`) +- Generic type parameters and constraints +- Interface inheritance (flattens internal types, preserves external extends) +- Component detection (functions returning JSX.Element/ReactNode) +- JSDoc `@default` tag extraction + +Install its dependencies once: + +```sh +cd ts-extractor && npm install +``` + +It can also be run directly: + +```sh +npx tsx ts-extractor/extract-api.ts --packages-dir ./packages --output-dir ./dist/branch-api +``` + +## Differences from the original scripts + +| Aspect | Old (Node.js) | New (Rust + TS extractor) | +|--------|---------------|---------------------------| +| Published API extraction | Parcel build on downloaded source | TS compiler on `.d.ts` (no Parcel) | +| Local API extraction | Parcel build in temp dir | TS compiler on local `.d.ts` | +| npm queries | `npm view` subprocesses (serial) | HTTP requests (parallel) | +| Type rendering | 3 copies of `processType()` | Single `render_type()` in Rust | +| Diff engine | JS `diff` library | Rust `similar` crate | +| Output modes | Terminal + partial CI markdown | Terminal, CI markdown, JSON | +| External types | Flattened (HTMLAttributes inlined) | Preserved as `extends` | diff --git a/rsp-api-checker/src/api_json.rs b/rsp-api-checker/src/api_json.rs new file mode 100644 index 00000000000..59e2ed5e9db --- /dev/null +++ b/rsp-api-checker/src/api_json.rs @@ -0,0 +1,601 @@ +//! Strongly-typed representation of the `api.json` format produced by +//! the TypeScript extractor (and previously by parcel-transformer-docs). + +use indexmap::IndexMap; +use serde::Deserialize; + +/// Top-level api.json structure. +#[derive(Debug, Deserialize, Default)] +pub struct ApiJson { + #[serde(default)] + pub exports: IndexMap, + #[serde(default)] + pub links: IndexMap, +} + +/// Every type node in the API JSON. Uses serde's internally-tagged enum +/// representation: `{ "type": "string", "value": "hello" }`. +#[derive(Debug, Clone, Deserialize)] +#[serde(tag = "type", rename_all = "camelCase")] +pub enum TypeNode { + // ── Primitives ────────────────────────────────────────────────────── + Any, + Null, + Undefined, + Void, + Unknown, + Never, + This, + Symbol, + + Boolean { + value: Option, + }, + String { + value: Option, + }, + Number { + value: Option, + }, + + // ── Composite types ───────────────────────────────────────────────── + Union { + elements: Vec, + }, + Intersection { + types: Vec, + }, + Array { + #[serde(rename = "elementType")] + element_type: Box, + }, + Tuple { + elements: Vec, + }, + Object { + properties: Option>, + #[serde(default)] + exact: bool, + }, + + // ── Generics / type-level constructs ──────────────────────────────── + Application { + base: Box, + #[serde(rename = "typeParameters", default)] + type_parameters: Vec, + }, + TypeParameter { + name: String, + constraint: Option>, + default: Option>, + }, + Conditional { + #[serde(rename = "checkType")] + check_type: Box, + #[serde(rename = "extendsType")] + extends_type: Box, + #[serde(rename = "trueType")] + true_type: Box, + #[serde(rename = "falseType")] + false_type: Box, + }, + IndexedAccess { + #[serde(rename = "objectType")] + object_type: Box, + #[serde(rename = "indexType")] + index_type: Box, + }, + #[serde(rename = "keyof")] + Keyof { + keyof: Box, + }, + TypeOperator { + operator: String, + value: Box, + }, + Mapped { + #[serde(rename = "typeParameter")] + type_parameter: Box, + #[serde(rename = "typeAnnotation")] + type_annotation: Box, + readonly: Option, + }, + Infer { + value: String, + }, + Template { + elements: Vec, + }, + + // ── Named references ──────────────────────────────────────────────── + Identifier { + name: String, + }, + Link { + id: Option, + }, + + // ── Declarations ──────────────────────────────────────────────────── + Interface { + #[serde(default)] + id: Option, + #[serde(default)] + name: Option, + #[serde(default)] + properties: IndexMap, + #[serde(rename = "typeParameters", default)] + type_parameters: Vec, + #[serde(default)] + extends: Vec, + #[serde(default)] + description: Option, + #[serde(default)] + access: Option, + }, + Component { + #[serde(default)] + id: Option, + #[serde(default)] + name: Option, + props: Option>, + #[serde(rename = "typeParameters", default)] + type_parameters: Vec, + #[serde(rename = "ref", default)] + ref_type: Option>, + #[serde(default)] + description: Option, + #[serde(default)] + access: Option, + }, + Function { + #[serde(default)] + id: Option, + #[serde(default)] + name: Option, + #[serde(default)] + parameters: ParameterMap, + #[serde(rename = "return", default)] + return_type: Option>, + #[serde(rename = "typeParameters", default)] + type_parameters: Vec, + #[serde(default)] + description: Option, + #[serde(default)] + access: Option, + }, + Alias { + #[serde(default)] + id: Option, + #[serde(default)] + name: Option, + value: Box, + #[serde(rename = "typeParameters", default)] + type_parameters: Vec, + #[serde(default)] + description: Option, + #[serde(default)] + access: Option, + }, + Property { + name: String, + #[serde(rename = "indexType", default)] + index_type: Option>, + value: Box, + #[serde(default)] + optional: bool, + #[serde(default)] + description: Option, + #[serde(default)] + access: Option, + #[serde(default)] + default: Option, + }, + Method { + name: String, + value: Box, + #[serde(default)] + optional: bool, + #[serde(default)] + access: Option, + #[serde(default)] + description: Option, + #[serde(default)] + default: Option, + #[serde(rename = "static", default)] + is_static: bool, + #[serde(rename = "abstract", default)] + is_abstract: bool, + }, + Parameter { + #[serde(default)] + name: Option, + value: Box, + #[serde(default)] + optional: bool, + #[serde(default)] + rest: bool, + }, +} + +/// Parameters can come as either `[param, param, ...]` or `{ name: param, ... }`. +#[derive(Debug, Clone)] +pub enum ParameterMap { + Map(IndexMap), + List(Vec), +} + +impl Default for ParameterMap { + fn default() -> Self { + Self::Map(IndexMap::new()) + } +} + +impl ParameterMap { + pub fn iter_ordered(&self) -> Vec<(&str, &TypeNode)> { + match self { + Self::Map(m) => m.iter().map(|(k, v)| (k.as_str(), v)).collect(), + Self::List(l) => l + .iter() + .enumerate() + .map(|(i, v)| { + // Try to extract the name from the parameter node + let name = match v { + TypeNode::Parameter { name: Some(n), .. } => n.as_str(), + _ => "", + }; + let _ = i; // suppress unused warning + (name, v) + }) + .collect(), + } + } +} + +impl<'de> Deserialize<'de> for ParameterMap { + fn deserialize>(d: D) -> Result { + use serde::de; + + struct Visitor; + impl<'de> de::Visitor<'de> for Visitor { + type Value = ParameterMap; + fn expecting(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + write!(f, "an object or array of parameters") + } + fn visit_seq>(self, mut seq: A) -> Result { + let mut list = Vec::new(); + while let Some(item) = seq.next_element()? { + list.push(item); + } + Ok(ParameterMap::List(list)) + } + fn visit_map>(self, mut map: A) -> Result { + let mut m = IndexMap::new(); + while let Some((k, v)) = map.next_entry()? { + m.insert(k, v); + } + Ok(ParameterMap::Map(m)) + } + } + + d.deserialize_any(Visitor) + } +} + +impl ApiJson { + pub fn load(path: &std::path::Path) -> anyhow::Result { + let data = std::fs::read_to_string(path)?; + Ok(serde_json::from_str(&data)?) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + fn parse_node(s: &str) -> TypeNode { + serde_json::from_str(s).expect("failed to parse TypeNode") + } + + // ── Primitives ───────────────────────────────────────────────────────── + + #[test] + fn test_primitive_unit_variants() { + assert!(matches!(parse_node(r#"{"type":"any"}"#), TypeNode::Any)); + assert!(matches!(parse_node(r#"{"type":"null"}"#), TypeNode::Null)); + assert!(matches!(parse_node(r#"{"type":"undefined"}"#), TypeNode::Undefined)); + assert!(matches!(parse_node(r#"{"type":"void"}"#), TypeNode::Void)); + assert!(matches!(parse_node(r#"{"type":"unknown"}"#), TypeNode::Unknown)); + assert!(matches!(parse_node(r#"{"type":"never"}"#), TypeNode::Never)); + assert!(matches!(parse_node(r#"{"type":"this"}"#), TypeNode::This)); + assert!(matches!(parse_node(r#"{"type":"symbol"}"#), TypeNode::Symbol)); + } + + #[test] + fn test_boolean_variants() { + assert!(matches!(parse_node(r#"{"type":"boolean"}"#), TypeNode::Boolean { value: None })); + assert!( + matches!(parse_node(r#"{"type":"boolean","value":true}"#), TypeNode::Boolean { value: Some(true) }) + ); + assert!( + matches!(parse_node(r#"{"type":"boolean","value":false}"#), TypeNode::Boolean { value: Some(false) }) + ); + } + + #[test] + fn test_string_variants() { + assert!(matches!(parse_node(r#"{"type":"string"}"#), TypeNode::String { value: None })); + let node = parse_node(r#"{"type":"string","value":"hello"}"#); + assert!(matches!(node, TypeNode::String { value: Some(ref v) } if v == "hello")); + } + + #[test] + fn test_number_variants() { + assert!(matches!(parse_node(r#"{"type":"number"}"#), TypeNode::Number { value: None })); + let node = parse_node(r#"{"type":"number","value":42}"#); + assert!(matches!(node, TypeNode::Number { value: Some(_) })); + } + + // ── Composite types ──────────────────────────────────────────────────── + + #[test] + fn test_union() { + let node = parse_node(r#"{"type":"union","elements":[{"type":"string"},{"type":"null"}]}"#); + assert!(matches!(node, TypeNode::Union { elements } if elements.len() == 2)); + } + + #[test] + fn test_intersection() { + let node = parse_node(r#"{"type":"intersection","types":[{"type":"any"},{"type":"never"}]}"#); + assert!(matches!(node, TypeNode::Intersection { types } if types.len() == 2)); + } + + #[test] + fn test_array() { + let node = parse_node(r#"{"type":"array","elementType":{"type":"string"}}"#); + assert!(matches!(node, TypeNode::Array { element_type } if matches!(*element_type, TypeNode::String { .. }))); + } + + #[test] + fn test_tuple() { + let node = parse_node(r#"{"type":"tuple","elements":[{"type":"string"},{"type":"number"}]}"#); + assert!(matches!(node, TypeNode::Tuple { elements } if elements.len() == 2)); + } + + #[test] + fn test_object_with_properties() { + let json = r#"{"type":"object","properties":{"x":{"type":"property","name":"x","value":{"type":"number"}}}}"#; + let node = parse_node(json); + assert!(matches!(node, TypeNode::Object { properties: Some(_), exact: false })); + } + + #[test] + fn test_object_no_properties() { + let node = parse_node(r#"{"type":"object"}"#); + assert!(matches!(node, TypeNode::Object { properties: None, .. })); + } + + // ── Generic / type-level ─────────────────────────────────────────────── + + #[test] + fn test_application() { + let json = r#"{"type":"application","base":{"type":"identifier","name":"Promise"},"typeParameters":[{"type":"string"}]}"#; + let node = parse_node(json); + assert!(matches!(node, TypeNode::Application { .. })); + } + + #[test] + fn test_type_parameter() { + let json = r#"{"type":"typeParameter","name":"T","constraint":{"type":"string"}}"#; + let node = parse_node(json); + assert!(matches!(node, TypeNode::TypeParameter { name, constraint: Some(_), .. } if name == "T")); + } + + #[test] + fn test_conditional() { + let json = r#"{ + "type": "conditional", + "checkType": {"type":"any"}, + "extendsType": {"type":"string"}, + "trueType": {"type":"string"}, + "falseType": {"type":"never"} + }"#; + assert!(matches!(parse_node(json), TypeNode::Conditional { .. })); + } + + #[test] + fn test_indexed_access() { + let json = r#"{"type":"indexedAccess","objectType":{"type":"identifier","name":"T"},"indexType":{"type":"string","value":"key"}}"#; + assert!(matches!(parse_node(json), TypeNode::IndexedAccess { .. })); + } + + #[test] + fn test_keyof() { + let json = r#"{"type":"keyof","keyof":{"type":"identifier","name":"Props"}}"#; + assert!(matches!(parse_node(json), TypeNode::Keyof { .. })); + } + + #[test] + fn test_mapped() { + let json = r#"{"type":"mapped","typeParameter":{"type":"typeParameter","name":"K"},"typeAnnotation":{"type":"string"}}"#; + assert!(matches!(parse_node(json), TypeNode::Mapped { .. })); + } + + #[test] + fn test_infer() { + let json = r#"{"type":"infer","value":"R"}"#; + assert!(matches!(parse_node(json), TypeNode::Infer { value } if value == "R")); + } + + #[test] + fn test_template() { + let json = r#"{"type":"template","elements":[{"type":"string","value":"prefix-"}]}"#; + assert!(matches!(parse_node(json), TypeNode::Template { elements } if elements.len() == 1)); + } + + // ── Named references ─────────────────────────────────────────────────── + + #[test] + fn test_identifier() { + let node = parse_node(r#"{"type":"identifier","name":"ReactNode"}"#); + assert!(matches!(node, TypeNode::Identifier { name } if name == "ReactNode")); + } + + #[test] + fn test_link_with_id() { + let node = parse_node(r#"{"type":"link","id":"@pkg:SomeType"}"#); + assert!(matches!(node, TypeNode::Link { id: Some(ref s) } if s == "@pkg:SomeType")); + } + + #[test] + fn test_link_no_id() { + let node = parse_node(r#"{"type":"link"}"#); + assert!(matches!(node, TypeNode::Link { id: None })); + } + + // ── Declarations ────────────────────────────────────────────────────── + + #[test] + fn test_interface() { + let json = r#"{ + "type": "interface", + "name": "ButtonProps", + "properties": { + "isDisabled": { + "type": "property", + "name": "isDisabled", + "value": {"type": "boolean"}, + "optional": true + } + }, + "typeParameters": [], + "extends": [] + }"#; + let node = parse_node(json); + assert!(matches!(node, TypeNode::Interface { name: Some(ref n), .. } if n == "ButtonProps")); + if let TypeNode::Interface { properties, .. } = node { + assert!(properties.contains_key("isDisabled")); + } + } + + #[test] + fn test_component() { + let json = r#"{"type":"component","name":"Button","props":{"type":"interface","properties":{}}}"#; + let node = parse_node(json); + assert!(matches!(node, TypeNode::Component { name: Some(ref n), .. } if n == "Button")); + } + + #[test] + fn test_function_with_map_parameters() { + let json = r#"{ + "type": "function", + "name": "myFn", + "parameters": { + "x": {"type":"parameter","name":"x","value":{"type":"string"}} + }, + "return": {"type":"void"} + }"#; + let node = parse_node(json); + if let TypeNode::Function { parameters, .. } = node { + assert!(matches!(parameters, ParameterMap::Map(_))); + assert_eq!(parameters.iter_ordered().len(), 1); + } else { + panic!("expected Function variant"); + } + } + + #[test] + fn test_function_with_list_parameters() { + let json = r#"{ + "type": "function", + "parameters": [ + {"type":"parameter","name":"a","value":{"type":"number"}}, + {"type":"parameter","name":"b","value":{"type":"string"}} + ] + }"#; + let node = parse_node(json); + if let TypeNode::Function { parameters, .. } = node { + assert!(matches!(parameters, ParameterMap::List(_))); + assert_eq!(parameters.iter_ordered().len(), 2); + } else { + panic!("expected Function variant"); + } + } + + #[test] + fn test_alias() { + let json = r#"{"type":"alias","name":"MyAlias","value":{"type":"string"},"typeParameters":[]}"#; + let node = parse_node(json); + assert!(matches!(node, TypeNode::Alias { name: Some(ref n), .. } if n == "MyAlias")); + } + + #[test] + fn test_property() { + let json = r#"{"type":"property","name":"foo","value":{"type":"string"},"optional":true}"#; + let node = parse_node(json); + assert!( + matches!(node, TypeNode::Property { ref name, optional: true, .. } if name == "foo") + ); + } + + #[test] + fn test_method() { + let json = r#"{"type":"method","name":"onClick","value":{"type":"function","parameters":[]},"optional":false}"#; + let node = parse_node(json); + assert!(matches!(node, TypeNode::Method { ref name, optional: false, .. } if name == "onClick")); + } + + #[test] + fn test_parameter() { + let json = r#"{"type":"parameter","name":"value","value":{"type":"string"},"optional":false,"rest":false}"#; + let node = parse_node(json); + assert!( + matches!(node, TypeNode::Parameter { name: Some(ref n), optional: false, rest: false, .. } if n == "value") + ); + } + + // ── ParameterMap ────────────────────────────────────────────────────── + + #[test] + fn test_parameter_map_iter_ordered_list_uses_param_name() { + let json = r#"{"type":"function","parameters":[{"type":"parameter","name":"arg1","value":{"type":"string"}}]}"#; + if let TypeNode::Function { parameters, .. } = parse_node(json) { + let ordered = parameters.iter_ordered(); + assert_eq!(ordered[0].0, "arg1"); + } + } + + #[test] + fn test_parameter_map_iter_ordered_map_uses_key() { + let json = r#"{"type":"function","parameters":{"myParam":{"type":"parameter","value":{"type":"number"}}}}"#; + if let TypeNode::Function { parameters, .. } = parse_node(json) { + let ordered = parameters.iter_ordered(); + assert_eq!(ordered[0].0, "myParam"); + } + } + + // ── ApiJson ──────────────────────────────────────────────────────────── + + #[test] + fn test_api_json_defaults() { + let json: ApiJson = serde_json::from_str(r#"{}"#).unwrap(); + assert!(json.exports.is_empty()); + assert!(json.links.is_empty()); + } + + #[test] + fn test_api_json_load_from_file() { + use std::io::Write; + let mut f = tempfile::NamedTempFile::new().unwrap(); + let content = r#"{"exports":{"Foo":{"type":"any"}},"links":{"Bar":{"type":"string"}}}"#; + f.write_all(content.as_bytes()).unwrap(); + let loaded = ApiJson::load(f.path()).unwrap(); + assert!(loaded.exports.contains_key("Foo")); + assert!(loaded.links.contains_key("Bar")); + } + + #[test] + fn test_api_json_load_nonexistent_returns_error() { + let result = ApiJson::load(std::path::Path::new("/nonexistent/path/api.json")); + assert!(result.is_err()); + } +} diff --git a/rsp-api-checker/src/commands/compare.rs b/rsp-api-checker/src/commands/compare.rs new file mode 100644 index 00000000000..5a0bf6a111d --- /dev/null +++ b/rsp-api-checker/src/commands/compare.rs @@ -0,0 +1,135 @@ +//! `compare` command: reads two directories of `api.json` files and produces +//! a human-readable diff of all API changes. + +use std::path::PathBuf; + +use anyhow::Result; + +use crate::differ::{diff_package, discover_pairs, format_output, PackageDiff}; + +#[derive(Debug)] +pub struct CompareOpts { + /// Directory containing the "base" (published / main) API files. + pub base_dir: PathBuf, + /// Directory containing the "branch" (local / PR) API files. + pub branch_dir: PathBuf, + /// Only diff this package (optional filter). + pub package_filter: Option, + /// Only diff this interface (optional filter). + pub interface_filter: Option, + /// Output GitHub-compatible markdown. + pub is_ci: bool, + /// Print extra debug info. + pub verbose: bool, + /// Output as JSON instead of text. + pub json: bool, + /// Print per-phase timing breakdown on completion. + pub timing: bool, +} + +pub async fn execute(opts: CompareOpts) -> Result<()> { + let t_total = std::time::Instant::now(); + if !opts.base_dir.exists() { + anyhow::bail!( + "Base API directory not found: {}. \ + Run `get-published-api` or `get-local-api` first.", + opts.base_dir.display() + ); + } + if !opts.branch_dir.exists() { + anyhow::bail!( + "Branch API directory not found: {}. \ + Run `get-local-api` first.", + opts.branch_dir.display() + ); + } + + let t_discover = std::time::Instant::now(); + let pairs = discover_pairs(&opts.base_dir, &opts.branch_dir)?; + if pairs.is_empty() { + println!("No API files found to compare."); + return Ok(()); + } + let discover_elapsed = t_discover.elapsed(); + + if opts.verbose { + println!("Found {} package pairs to compare", pairs.len()); + } + + let mut all_diffs: Vec = Vec::new(); + + let t_diff = std::time::Instant::now(); + for pair in pairs { + if let Some(ref filter) = opts.package_filter { + if !pair.package_name.contains(filter.as_str()) { + continue; + } + } + + if opts.verbose { + println!("Diffing {}...", pair.package_name); + } + + let mut pkg_diff = diff_package( + &pair.package_name, + &pair.base, + &pair.branch, + opts.is_ci, + ); + + // Apply interface filter + if let Some(ref iface_filter) = opts.interface_filter { + pkg_diff.diffs.retain(|d| { + d.qualified_name + .rsplit(':') + .next() + .map(|n| n == iface_filter.as_str()) + .unwrap_or(false) + }); + } + + all_diffs.push(pkg_diff); + } + let diff_elapsed = t_diff.elapsed(); + + let t_format = std::time::Instant::now(); + if opts.json { + print_json(&all_diffs)?; + } else { + let output = format_output(&all_diffs, opts.is_ci); + if output.is_empty() { + println!("No API changes detected."); + } else { + print!("{output}"); + } + } + let format_elapsed = t_format.elapsed(); + + if opts.timing { + eprintln!( + "Timing: discover={:.2}s diff={:.2}s format={:.2}s total={:.2}s", + discover_elapsed.as_secs_f64(), + diff_elapsed.as_secs_f64(), + format_elapsed.as_secs_f64(), + t_total.elapsed().as_secs_f64(), + ); + } + + Ok(()) +} + +fn print_json(diffs: &[PackageDiff]) -> Result<()> { + let mut output = Vec::new(); + for pkg in diffs { + for diff in &pkg.diffs { + output.push(serde_json::json!({ + "package": pkg.package_name, + "export": diff.qualified_name, + "changedBy": diff.changed_by, + "affects": diff.affects, + })); + } + } + println!("{}", serde_json::to_string_pretty(&output)?); + Ok(()) +} diff --git a/rsp-api-checker/src/commands/env_report.rs b/rsp-api-checker/src/commands/env_report.rs new file mode 100644 index 00000000000..d26d3d466ce --- /dev/null +++ b/rsp-api-checker/src/commands/env_report.rs @@ -0,0 +1,579 @@ +//! `env-report` command: captures environment + per-package state so we can +//! diff CI vs local runs and pinpoint cross-package TS resolution failures. +//! +//! This never modifies anything — purely read-only inspection. The output is +//! meant to be persisted as a CI artifact and diffed against a local run. + +use std::path::{Path, PathBuf}; + +use anyhow::{Context, Result}; +use serde::Serialize; + +use crate::workspace::run_capture; + +#[derive(Debug)] +pub struct EnvReportOpts { + /// Root of the monorepo. + pub repo_root: PathBuf, + /// Where to write the JSON report. If None, stdout only. + pub output: Option, +} + +#[derive(Serialize)] +struct EnvReport { + /// ISO-8601 timestamp when the report was generated. + generated_at: String, + /// Tool versions collected via ` --version` subprocesses. + tools: ToolVersions, + /// git state of the repo_root. + git: GitInfo, + /// Per-package state: does its d.ts exist, mtimes, symlink resolution. + packages: Vec, + /// Aggregate counts across all packages. + summary: Summary, +} + +#[derive(Serialize, Default)] +struct ToolVersions { + node: Option, + npm: Option, + yarn: Option, + tsc: Option, + tsx: Option, + rustc: Option, + os: Option, +} + +#[derive(Serialize, Default)] +struct GitInfo { + head_sha: Option, + branch: Option, + /// Number of modified / untracked / staged files by `git status --porcelain`. + working_tree_changes: usize, + /// When `working_tree_changes > 0`, the first ~20 lines of porcelain output + /// so CI runs on PR branches show roughly what's dirty. + working_tree_sample: Vec, +} + +#[derive(Serialize)] +struct PackageState { + name: String, + /// Path relative to repo_root. + dir: String, + private: bool, + has_types_entry: bool, + /// The `types` / `typings` / `exports["."]["types"]` path if declared. + types_entry: Option, + /// Whether the types entry actually exists on disk. + types_entry_exists: bool, + /// mtime (unix seconds) of the types entry file, if it exists. + types_entry_mtime: Option, + /// Newest mtime (unix seconds) among .ts/.tsx source files under src/. + newest_source_mtime: Option, + /// True when source is newer than the types entry — local build is stale. + source_newer_than_types: bool, + /// For each workspace dep listed in this package.json, where does + /// `realpath(this_pkg/node_modules/)` actually resolve to? A missing + /// entry means the dep isn't reachable from this package at all. + workspace_dep_resolution: Vec, + /// Every `.d.ts` file under `dist/types/`, sorted by relative path. Lets + /// us diff CI vs local to spot subpath types that weren't built/persisted + /// (e.g. `react-aria/useButton` resolving empty because + /// `dist/types/exports/useButton.d.ts` is missing). `None` when the + /// package has no `dist/types/` directory. + dist_types_files: Option>, +} + +#[derive(Serialize)] +struct DistTypesFile { + /// Path relative to the package dir, e.g. `dist/types/exports/useButton.d.ts`. + path: String, + /// Size in bytes. Trivially empty files (re-exports only) are still + /// meaningful — `0` would indicate an emit failure. + size: u64, + /// mtime in unix seconds. Matches `types_entry_mtime` for files built in + /// the same emit pass. + mtime: u64, +} + +#[derive(Serialize)] +struct WorkspaceDepResolution { + dep_name: String, + /// The path we looked up: `/node_modules/`. + lookup_path: String, + /// What realpath resolved to. None if the path doesn't exist. + resolved_to: Option, + /// True when resolved_to points somewhere under `packages/` (i.e. this is + /// a workspace symlink, not a separately-installed copy). + is_workspace_symlink: bool, + /// True when the resolved package has a types entry file on disk. + resolved_types_exist: Option, +} + +#[derive(Serialize)] +struct Summary { + total_packages: usize, + public_packages: usize, + packages_with_types_entry: usize, + packages_with_types_on_disk: usize, + packages_with_stale_types: usize, + /// Sum of workspace deps across all packages that failed to resolve. + unresolved_workspace_deps: usize, + /// Total `.d.ts` files found across every package's `dist/types/` tree. + /// A large CI-vs-local delta here is a smoking gun for partial builds / + /// partial workspace persistence. + total_dist_types_files: usize, + /// Number of packages that declare a `types` entry but whose `dist/types/` + /// directory is absent. + packages_missing_dist_types_dir: usize, +} + +pub async fn execute(opts: EnvReportOpts) -> Result<()> { + let repo_root = std::fs::canonicalize(&opts.repo_root) + .context(format!("resolving repo root: {}", opts.repo_root.display()))?; + let packages_dir = repo_root.join("packages"); + if !packages_dir.exists() { + anyhow::bail!( + "packages/ directory not found at {}", + packages_dir.display() + ); + } + + println!("Collecting environment report from {}", repo_root.display()); + + let tools = collect_tool_versions(&repo_root).await; + let git = collect_git_info(&repo_root).await; + let packages = collect_package_states(&repo_root, &packages_dir)?; + let summary = summarize(&packages); + + let report = EnvReport { + generated_at: chrono_like_now(), + tools, + git, + packages, + summary, + }; + + // Pretty-printed to stdout so it shows up in CI logs even without the artifact. + let json = serde_json::to_string_pretty(&report)?; + println!("\n===== env-report ====="); + println!("{}", short_summary(&report)); + println!("======================\n"); + + if let Some(output_path) = opts.output { + if let Some(parent) = output_path.parent() { + if !parent.as_os_str().is_empty() { + std::fs::create_dir_all(parent) + .context(format!("creating {}", parent.display()))?; + } + } + std::fs::write(&output_path, &json) + .context(format!("writing {}", output_path.display()))?; + println!("env-report written to {}", output_path.display()); + } else { + // Dump full JSON to stdout too so `| tee` captures it. + println!("{}", json); + } + + Ok(()) +} + +fn short_summary(r: &EnvReport) -> String { + let t = &r.tools; + let g = &r.git; + let s = &r.summary; + format!( + "node={} yarn={} tsc={} rustc={}\n\ + git HEAD={} branch={} dirty files={}\n\ + packages: {} total, {} public, {} with types entry, {} with types on disk, \ + {} stale, {} unresolved workspace deps\n\ + dist/types: {} files across all packages, {} packages declare types but have no dist/types/ dir", + t.node.as_deref().unwrap_or("?"), + t.yarn.as_deref().unwrap_or("?"), + t.tsc.as_deref().unwrap_or("?"), + t.rustc.as_deref().unwrap_or("?"), + g.head_sha.as_deref().unwrap_or("?"), + g.branch.as_deref().unwrap_or("?"), + g.working_tree_changes, + s.total_packages, + s.public_packages, + s.packages_with_types_entry, + s.packages_with_types_on_disk, + s.packages_with_stale_types, + s.unresolved_workspace_deps, + s.total_dist_types_files, + s.packages_missing_dist_types_dir, + ) +} + +async fn collect_tool_versions(repo_root: &Path) -> ToolVersions { + let mut v = ToolVersions::default(); + v.node = first_line(run_capture("node", &["--version"], repo_root).await.ok()); + v.npm = first_line(run_capture("npm", &["--version"], repo_root).await.ok()); + v.yarn = first_line(run_capture("yarn", &["--version"], repo_root).await.ok()); + v.tsc = first_line( + run_capture("npx", &["--no-install", "tsc", "--version"], repo_root) + .await + .ok(), + ); + v.tsx = first_line( + run_capture("npx", &["--no-install", "tsx", "--version"], repo_root) + .await + .ok(), + ); + v.rustc = first_line(run_capture("rustc", &["--version"], repo_root).await.ok()); + v.os = Some(format!("{} {}", std::env::consts::OS, std::env::consts::ARCH)); + v +} + +fn first_line(s: Option) -> Option { + s.and_then(|t| t.lines().next().map(|l| l.trim().to_string())) + .filter(|s| !s.is_empty()) +} + +async fn collect_git_info(repo_root: &Path) -> GitInfo { + let mut g = GitInfo::default(); + g.head_sha = first_line(run_capture("git", &["rev-parse", "HEAD"], repo_root).await.ok()); + g.branch = first_line( + run_capture("git", &["rev-parse", "--abbrev-ref", "HEAD"], repo_root) + .await + .ok(), + ); + if let Ok(porcelain) = run_capture("git", &["status", "--porcelain"], repo_root).await { + let lines: Vec<&str> = porcelain.lines().collect(); + g.working_tree_changes = lines.len(); + g.working_tree_sample = lines.iter().take(20).map(|l| l.to_string()).collect(); + } + g +} + +fn collect_package_states(repo_root: &Path, packages_dir: &Path) -> Result> { + let mut states = Vec::new(); + let mut names = Vec::new(); + // Reuse the shared walker so the package set matches what the extractor sees. + crate::npm::walk_for_package_dirs(packages_dir, 0, &mut names)?; + for pkg_dir in names { + if let Ok(state) = inspect_package(repo_root, &pkg_dir) { + states.push(state); + } + } + Ok(states) +} + +fn inspect_package(repo_root: &Path, pkg_dir: &Path) -> Result { + let pkg_json_path = pkg_dir.join("package.json"); + let contents = std::fs::read_to_string(&pkg_json_path) + .context(format!("reading {}", pkg_json_path.display()))?; + let pkg: serde_json::Value = serde_json::from_str(&contents)?; + let name = pkg + .get("name") + .and_then(|v| v.as_str()) + .unwrap_or("") + .to_string(); + let private = pkg.get("private").and_then(|v| v.as_bool()).unwrap_or(false); + + let types_entry = resolve_types_entry(&pkg); + let (types_entry_exists, types_entry_mtime) = match &types_entry { + Some(rel) => { + let abs = pkg_dir.join(rel); + match std::fs::metadata(&abs) { + Ok(m) => (true, Some(mtime_secs(&m))), + Err(_) => (false, None), + } + } + None => (false, None), + }; + + let newest_source_mtime = newest_src_mtime(pkg_dir); + let source_newer_than_types = match (newest_source_mtime, types_entry_mtime) { + (Some(s), Some(t)) => s > t, + _ => false, + }; + + let workspace_dep_resolution = inspect_workspace_deps(&pkg, pkg_dir, repo_root); + let dist_types_files = collect_dist_types_files(pkg_dir); + + let rel_dir = pkg_dir + .strip_prefix(repo_root) + .map(|p| p.to_string_lossy().to_string()) + .unwrap_or_else(|_| pkg_dir.display().to_string()); + + Ok(PackageState { + name, + dir: rel_dir, + private, + has_types_entry: types_entry.is_some(), + types_entry, + types_entry_exists, + types_entry_mtime, + newest_source_mtime, + source_newer_than_types, + workspace_dep_resolution, + dist_types_files, + }) +} + +/// Enumerate every `.d.ts` file under `/dist/types/`, sorted by +/// relative path. Returns `None` if `dist/types/` doesn't exist — distinguishes +/// "no types built at all" from "empty types tree". +fn collect_dist_types_files(pkg_dir: &Path) -> Option> { + let dist_types = pkg_dir.join("dist").join("types"); + if !dist_types.exists() { + return None; + } + let mut files = Vec::new(); + fn walk(root: &Path, cur: &Path, out: &mut Vec, depth: usize) { + if depth > 10 { + return; + } + let Ok(entries) = std::fs::read_dir(cur) else { + return; + }; + for entry in entries.flatten() { + let path = entry.path(); + match entry.file_type() { + Ok(ft) if ft.is_dir() => walk(root, &path, out, depth + 1), + Ok(ft) if ft.is_file() => { + if path.extension().and_then(|s| s.to_str()) == Some("ts") + && path + .to_string_lossy() + .ends_with(".d.ts") + { + if let Ok(meta) = entry.metadata() { + let rel = path + .strip_prefix(root) + .map(|p| p.to_string_lossy().to_string()) + .unwrap_or_else(|_| path.display().to_string()); + out.push(DistTypesFile { + path: rel, + size: meta.len(), + mtime: mtime_secs(&meta), + }); + } + } + } + _ => {} + } + } + } + walk(pkg_dir, &dist_types, &mut files, 0); + // Sort so CI vs local diffs cleanly. + files.sort_by(|a, b| a.path.cmp(&b.path)); + Some(files) +} + +fn mtime_secs(m: &std::fs::Metadata) -> u64 { + m.modified() + .ok() + .and_then(|t| t.duration_since(std::time::UNIX_EPOCH).ok()) + .map(|d| d.as_secs()) + .unwrap_or(0) +} + +/// Mirror of the TS `resolveTypesField` — we need the same answer the +/// extractor would get. Kept intentionally simple: walk nested objects/arrays +/// looking for a string ending in `.d.ts`. +fn resolve_types_entry(pkg: &serde_json::Value) -> Option { + // Prefer `exports["."]["types"]`, then `types`, then `typings`. + if let Some(v) = pkg.pointer("/exports/./types") { + if let Some(s) = walk_for_dts(v) { + return Some(s); + } + } + if let Some(v) = pkg.pointer("/exports/.") { + if let Some(s) = walk_for_dts(v) { + return Some(s); + } + } + if let Some(v) = pkg.get("types").or_else(|| pkg.get("typings")) { + if let Some(s) = walk_for_dts(v) { + return Some(s); + } + } + None +} + +fn walk_for_dts(v: &serde_json::Value) -> Option { + match v { + serde_json::Value::String(s) if s.ends_with(".d.ts") => Some(s.clone()), + serde_json::Value::Array(arr) => arr.iter().find_map(walk_for_dts), + serde_json::Value::Object(map) => map.values().find_map(walk_for_dts), + _ => None, + } +} + +fn newest_src_mtime(pkg_dir: &Path) -> Option { + let src = pkg_dir.join("src"); + if !src.exists() { + return None; + } + let mut newest: Option = None; + fn walk(dir: &Path, newest: &mut Option, depth: usize) { + if depth > 6 { + return; + } + let Ok(entries) = std::fs::read_dir(dir) else { + return; + }; + for entry in entries.flatten() { + let path = entry.path(); + let name = entry.file_name(); + let name_str = name.to_string_lossy(); + if name_str == "node_modules" || name_str == ".git" || name_str == "dist" { + continue; + } + match entry.file_type() { + Ok(ft) if ft.is_dir() => walk(&path, newest, depth + 1), + Ok(ft) if ft.is_file() => { + let lossy = path.to_string_lossy(); + let is_src = lossy.ends_with(".ts") || lossy.ends_with(".tsx"); + if is_src { + if let Ok(m) = entry.metadata() { + let t = mtime_secs(&m); + if newest.map_or(true, |n| t > n) { + *newest = Some(t); + } + } + } + } + _ => {} + } + } + } + walk(&src, &mut newest, 0); + newest +} + +fn inspect_workspace_deps( + pkg: &serde_json::Value, + pkg_dir: &Path, + repo_root: &Path, +) -> Vec { + let mut out = Vec::new(); + let packages_root = repo_root.join("packages"); + for field in ["dependencies", "peerDependencies"] { + let Some(deps) = pkg.get(field).and_then(|v| v.as_object()) else { + continue; + }; + for (dep_name, _version) in deps { + // We only care about deps that *could* be workspace deps — i.e. + // under the scopes we publish. Keep this in sync with `is_our_package`. + if !is_our_scope(dep_name) { + continue; + } + // Node-style upward resolution: walk up from pkg_dir looking for + // node_modules/. Yarn workspaces hoist to the root's + // node_modules, so a package-local lookup would spuriously miss + // everything. + let (lookup_path, resolved) = resolve_node_style(pkg_dir, dep_name); + let is_workspace_symlink = resolved + .as_ref() + .and_then(|p| { + std::fs::canonicalize(&packages_root) + .ok() + .map(|r| p.starts_with(&r)) + }) + .unwrap_or(false); + let resolved_types_exist = resolved.as_ref().and_then(|r| { + let pkg_json = r.join("package.json"); + let contents = std::fs::read_to_string(&pkg_json).ok()?; + let parsed: serde_json::Value = serde_json::from_str(&contents).ok()?; + let types_rel = resolve_types_entry(&parsed)?; + Some(r.join(types_rel).exists()) + }); + out.push(WorkspaceDepResolution { + dep_name: dep_name.clone(), + lookup_path, + resolved_to: resolved.map(|p| p.display().to_string()), + is_workspace_symlink, + resolved_types_exist, + }); + } + } + out +} + +/// Node-style resolution: starting at `start_dir`, walk up the tree looking +/// for `node_modules//package.json`. Returns (the path we used for the +/// lookup_path field, the canonicalized resolution if found). +fn resolve_node_style(start_dir: &Path, dep_name: &str) -> (String, Option) { + let first = start_dir.join("node_modules").join(dep_name); + let mut cur = start_dir; + loop { + let candidate = cur.join("node_modules").join(dep_name); + if candidate.join("package.json").exists() { + if let Ok(real) = std::fs::canonicalize(&candidate) { + return (first.display().to_string(), Some(real)); + } + } + match cur.parent() { + Some(p) => cur = p, + None => return (first.display().to_string(), None), + } + } +} + +fn is_our_scope(name: &str) -> bool { + name.starts_with("@react-spectrum/") + || name.starts_with("@react-aria/") + || name.starts_with("@react-stately/") + || name.starts_with("@react-types/") + || name.starts_with("@internationalized/") + || name.starts_with("@adobe/react-spectrum") + || name == "react-aria" + || name == "react-aria-components" + || name == "react-stately" +} + +fn summarize(packages: &[PackageState]) -> Summary { + let total_packages = packages.len(); + let public_packages = packages.iter().filter(|p| !p.private).count(); + let packages_with_types_entry = packages.iter().filter(|p| p.has_types_entry).count(); + let packages_with_types_on_disk = + packages.iter().filter(|p| p.types_entry_exists).count(); + let packages_with_stale_types = packages.iter().filter(|p| p.source_newer_than_types).count(); + let unresolved_workspace_deps = packages + .iter() + .flat_map(|p| p.workspace_dep_resolution.iter()) + .filter(|d| d.resolved_to.is_none()) + .count(); + let total_dist_types_files = packages + .iter() + .filter_map(|p| p.dist_types_files.as_ref().map(|v| v.len())) + .sum(); + // A package that explicitly points its `types` entry inside `dist/types/` + // but has no `dist/types/` directory at all is the most suspicious case — + // its subpath imports will silently resolve to empty types. We skip + // packages whose types live elsewhere (e.g. `@react-types/*` ship raw .ts + // via `src/`) and stub package.json files with no name. + let packages_missing_dist_types_dir = packages + .iter() + .filter(|p| { + p.name != "" + && p.types_entry + .as_deref() + .map(|t| t.contains("dist/types/")) + .unwrap_or(false) + && p.dist_types_files.is_none() + }) + .count(); + Summary { + total_packages, + public_packages, + packages_with_types_entry, + packages_with_types_on_disk, + packages_with_stale_types, + unresolved_workspace_deps, + total_dist_types_files, + packages_missing_dist_types_dir, + } +} + +fn chrono_like_now() -> String { + // Avoid pulling in `chrono` just for a timestamp. Unix seconds is enough. + let secs = std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .map(|d| d.as_secs()) + .unwrap_or(0); + format!("unix:{secs}") +} diff --git a/rsp-api-checker/src/commands/get_local.rs b/rsp-api-checker/src/commands/get_local.rs new file mode 100644 index 00000000000..98294f0da3d --- /dev/null +++ b/rsp-api-checker/src/commands/get_local.rs @@ -0,0 +1,118 @@ +//! `get-local-api` command: extracts the type API from the local build's +//! `.d.ts` files. Assumes the build has already been run. + +use std::path::PathBuf; + +use anyhow::{Context, Result}; + +use crate::workspace::run_extractor; +use crate::workspaces::discover_workspaces; + +#[derive(Debug)] +pub struct GetLocalOpts { + /// Root of the monorepo. + pub repo_root: PathBuf, + /// Where to write the extracted API files. + pub output_dir: PathBuf, + /// Print per-phase timing breakdown on completion. + pub timing: bool, +} + +pub async fn execute(opts: GetLocalOpts) -> Result<()> { + let t_total = std::time::Instant::now(); + let packages_dir = opts.repo_root.join("packages"); + if !packages_dir.exists() { + anyhow::bail!( + "packages/ directory not found at {}", + packages_dir.display() + ); + } + + // Verify that at least some .d.ts files exist + let has_dts = has_declaration_files(&packages_dir); + if !has_dts { + anyhow::bail!( + "No .d.ts files found under {}/*/dist/. \ + Run your build first (e.g. `yarn build`) to generate type declarations.", + packages_dir.display() + ); + } + + // Clean output directory + if opts.output_dir.exists() { + std::fs::remove_dir_all(&opts.output_dir) + .context("removing existing output directory")?; + } + + // Ask yarn for the authoritative set of workspace packages and pass the + // list through to the extractor. This avoids the extractor's fragile + // depth-4 fs-walk, which silently drops any package published outside the + // assumed layout. Fall back to fs-walk (workspaces_file = None) when yarn + // isn't available. + std::fs::create_dir_all(&opts.output_dir) + .context("creating output directory")?; + let t_discover = std::time::Instant::now(); + let workspaces_file = match discover_workspaces(&opts.repo_root).await? { + Some(workspaces) => { + println!( + "Using yarn workspaces list: {} public packages", + workspaces.len() + ); + let path = opts.output_dir.join(".workspaces.json"); + let json: Vec = workspaces + .iter() + .map(|w| { + serde_json::json!({ + "name": w.name, + "location": w.location.to_string_lossy(), + }) + }) + .collect(); + std::fs::write(&path, serde_json::to_string(&json)?) + .context("writing workspaces file")?; + Some(path) + } + None => { + println!("yarn workspaces list unavailable — using extractor fs walk"); + None + } + }; + let discover_elapsed = t_discover.elapsed(); + + // Run the TypeScript extractor directly on the local packages. + // Pass `check_build_freshness = true` so we fail loudly if any package's + // src/ is newer than its dist/types/ — that means `yarn build` is overdue + // and the diff would silently drop newly-added props. + let t_extract = std::time::Instant::now(); + run_extractor( + &packages_dir, + &opts.output_dir, + true, + workspaces_file.as_deref(), + ) + .await?; + let extract_elapsed = t_extract.elapsed(); + + println!("\nLocal API extracted to {}", opts.output_dir.display()); + if opts.timing { + println!( + "Timing: discover={:.2}s extract={:.2}s total={:.2}s", + discover_elapsed.as_secs_f64(), + extract_elapsed.as_secs_f64(), + t_total.elapsed().as_secs_f64(), + ); + } + Ok(()) +} + +/// Check if any `.d.ts` files exist under packages/*/dist/. +fn has_declaration_files(packages_dir: &std::path::Path) -> bool { + let pattern = packages_dir + .join("**") + .join("dist") + .join("**") + .join("*.d.ts"); + glob::glob(&pattern.to_string_lossy()) + .map(|mut g| g.next().is_some()) + .unwrap_or(false) +} diff --git a/rsp-api-checker/src/commands/get_published.rs b/rsp-api-checker/src/commands/get_published.rs new file mode 100644 index 00000000000..a64c3157c84 --- /dev/null +++ b/rsp-api-checker/src/commands/get_published.rs @@ -0,0 +1,208 @@ +//! `get-published-api` command: downloads the latest published versions of all +//! packages from npm and extracts their type API from `.d.ts` files. + +use std::path::{Path, PathBuf}; + +use anyhow::{Context, Result}; +use tempfile::TempDir; + +use crate::npm::get_published_packages; +use crate::workspace::{run_extractor, run_npm_install, write_package_json}; +use crate::workspaces::discover_workspaces; + +#[derive(Debug)] +pub struct GetPublishedOpts { + /// Root of the monorepo (to discover which packages exist). + pub repo_root: PathBuf, + /// Where to write the extracted API files. + pub output_dir: PathBuf, + /// Max concurrent npm registry requests. + pub concurrency: usize, + /// npm dist-tag to install (e.g. "latest", "nightly"). + pub tag: String, + /// Print per-phase timing breakdown on completion. + pub timing: bool, +} + +/// Read the installed version of a package from `node_modules//package.json`. +/// +/// Returns an error with full context if the file is missing or unreadable so +/// the caller can bail instead of silently falling back to "latest" — a silent +/// fallback would let React/ReactDOM type changes between the local and +/// published runs produce spurious diffs that look like our API changed. +fn local_installed_version(repo_root: &Path, package_name: &str) -> Result { + let pkg_json = repo_root + .join("node_modules") + .join(package_name) + .join("package.json"); + let contents = std::fs::read_to_string(&pkg_json) + .with_context(|| format!("reading {}", pkg_json.display()))?; + let parsed: serde_json::Value = serde_json::from_str(&contents) + .with_context(|| format!("parsing {}", pkg_json.display()))?; + parsed["version"] + .as_str() + .map(|s| s.to_string()) + .ok_or_else(|| anyhow::anyhow!("no \"version\" field in {}", pkg_json.display())) +} + +/// Recognise version specifiers that are NOT concrete semver: `workspace:*`, +/// `workspace:^1.2`, `*`, empty string, etc. When a package.json resolves to +/// one of these we can't hand it to `npm install` as-is — npm will bail with +/// ETARGET or (for `*`) accept it but install arbitrary versions. Callers +/// should resolve via the npm `latest` dist-tag instead, with a warning. +fn is_wildcard_version(version: &str) -> bool { + let trimmed = version.trim(); + trimmed.is_empty() + || trimmed == "*" + || trimmed == "latest" + || trimmed.starts_with("workspace:") +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn wildcard_version_detects_star_and_workspace_forms() { + assert!(is_wildcard_version("*")); + assert!(is_wildcard_version("workspace:*")); + assert!(is_wildcard_version("workspace:^1.2.3")); + assert!(is_wildcard_version("workspace:~1.0.0")); + assert!(is_wildcard_version("")); + assert!(is_wildcard_version(" ")); + assert!(is_wildcard_version("latest")); + } + + #[test] + fn wildcard_version_accepts_concrete_semver() { + assert!(!is_wildcard_version("1.2.3")); + assert!(!is_wildcard_version("^1.2.3")); + assert!(!is_wildcard_version("~1.2.3")); + assert!(!is_wildcard_version("18.0.0-beta.1")); + } +} + +pub async fn execute(opts: GetPublishedOpts) -> Result<()> { + let t_total = std::time::Instant::now(); + let packages_dir = opts.repo_root.join("packages"); + if !packages_dir.exists() { + anyhow::bail!( + "packages/ directory not found at {}", + packages_dir.display() + ); + } + + // Clean output directory + if opts.output_dir.exists() { + std::fs::remove_dir_all(&opts.output_dir) + .context("removing existing output directory")?; + } + + // 1. Discover workspace package names — ask yarn first (respects the + // repo's workspaces globs and private flag), fall back to fs-walk when + // yarn isn't installed. + let t_discover = std::time::Instant::now(); + let preresolved_names = match discover_workspaces(&opts.repo_root).await? { + Some(workspaces) => { + println!( + "Using yarn workspaces list: {} public packages", + workspaces.len() + ); + Some(workspaces.into_iter().map(|w| w.name).collect()) + } + None => { + println!("yarn workspaces list unavailable — falling back to filesystem walk"); + None + } + }; + + // 2. Query npm to find all published packages + let published = + get_published_packages(&packages_dir, opts.concurrency, &opts.tag, preresolved_names) + .await?; + if published.is_empty() { + anyhow::bail!("No published packages found"); + } + let discover_elapsed = t_discover.elapsed(); + + // 2. Create temp workspace and install from npm + let tmp = TempDir::new().context("creating temp directory")?; + let tmp_dir = tmp.path(); + println!("Working in {}", tmp_dir.display()); + + let tag = &opts.tag; + let mut deps: Vec<(String, String)> = published + .iter() + .map(|p| (p.name.clone(), tag.clone())) + .collect(); + + // React types are needed so the TS checker can resolve JSX.Element, ReactNode, etc. + // Pin to the SAME versions the local repo uses to avoid false diffs from + // external type definition changes between versions. Failing loudly here + // beats silently installing "latest" — a React types change between runs + // would look like our API changed. + for peer in ["react", "react-dom", "@types/react", "@types/react-dom"] { + if !deps.iter().any(|(n, _)| n == peer) { + let version = local_installed_version(&opts.repo_root, peer) + .with_context(|| format!( + "could not resolve local version of `{peer}` — \ + this must be installed in the monorepo's node_modules/ \ + so the published run uses matching React types" + ))?; + // If the local version is non-concrete (e.g. `workspace:*` got + // copied in by mistake), fall through to npm latest. This should + // never happen for external React types, but we'd rather tell + // the user what we're doing than silently install a wildcard. + let resolved = if is_wildcard_version(&version) { + println!( + " warn: local version of {peer} is `{version}` — resolving via npm `latest` instead" + ); + "latest".to_string() + } else { + version + }; + println!(" Pinning {peer}@{resolved} (matching local)"); + deps.push((peer.to_string(), resolved)); + } + } + + write_package_json(tmp_dir, &deps)?; + + println!("Installing {} packages from npm...", deps.len()); + let t_install = std::time::Instant::now(); + run_npm_install( + &["install", "--no-audit", "--no-fund", "--ignore-scripts"], + tmp_dir, + ) + .await?; + let install_elapsed = t_install.elapsed(); + + // 3. Run the TypeScript extractor on the installed packages + // The extractor looks for package.json files with `types` entries + // under the given directory. npm installs into node_modules/. + let nm_dir = tmp_dir.join("node_modules"); + // check_build_freshness = false: published tarballs are immutable, so + // their src/ vs dist/types/ mtime relationship doesn't mean the build + // is stale — it's whatever npm chose to include. + // + // No workspaces_file: the tmp node_modules isn't a yarn workspace — it's + // whatever npm installed. Fall back to the extractor's fs-walk. + let t_extract = std::time::Instant::now(); + run_extractor(&nm_dir, &opts.output_dir, false, None).await?; + let extract_elapsed = t_extract.elapsed(); + + println!( + "\nPublished API extracted to {}", + opts.output_dir.display() + ); + if opts.timing { + println!( + "Timing: discover={:.2}s install={:.2}s extract={:.2}s total={:.2}s", + discover_elapsed.as_secs_f64(), + install_elapsed.as_secs_f64(), + extract_elapsed.as_secs_f64(), + t_total.elapsed().as_secs_f64(), + ); + } + Ok(()) +} diff --git a/rsp-api-checker/src/commands/mod.rs b/rsp-api-checker/src/commands/mod.rs new file mode 100644 index 00000000000..6ef0169e170 --- /dev/null +++ b/rsp-api-checker/src/commands/mod.rs @@ -0,0 +1,4 @@ +pub mod compare; +pub mod env_report; +pub mod get_local; +pub mod get_published; diff --git a/rsp-api-checker/src/differ.rs b/rsp-api-checker/src/differ.rs new file mode 100644 index 00000000000..13d27ea34ab --- /dev/null +++ b/rsp-api-checker/src/differ.rs @@ -0,0 +1,695 @@ +//! Diff engine: compares two rebuilt interface sets and produces human-readable output. + +use std::collections::HashSet; +use std::path::Path; + +use colored::*; +use indexmap::IndexMap; +use similar::{ChangeTag, TextDiff}; + +use crate::api_json::ApiJson; +use crate::interface_builder::{format_interface, rebuild_interfaces}; +use crate::type_renderer::{ + follow_dependencies, follow_inverted_dependencies, invert_dependencies, RenderContext, +}; + +/// A single interface diff result. +#[derive(Debug)] +pub struct InterfaceDiff { + /// Fully qualified name: `/@scope/package:ExportName` + pub qualified_name: String, + /// The formatted diff text (with +/- lines). + pub diff_text: String, + /// Other exports whose changes caused this one to change. + pub changed_by: Vec, + /// Exports that are affected by this one changing. + pub affects: Vec, +} + +/// Result of diffing one package pair. +pub struct PackageDiff { + /// Display name like `@react-aria/button` + pub package_name: String, + pub diffs: Vec, +} + +/// Compare a base api.json against a branch api.json for a single package. +pub fn diff_package( + package_name: &str, + base_json: &ApiJson, + branch_json: &ApiJson, + is_ci: bool, +) -> PackageDiff { + let mut base_ctx = RenderContext::new(); + let mut branch_ctx = RenderContext::new(); + + let base_interfaces = rebuild_interfaces(base_json, &mut base_ctx); + let branch_interfaces = rebuild_interfaces(branch_json, &mut branch_ctx); + + // Union of all interface names, sorted alphabetically so diff output is + // stable regardless of the order the TS compiler walked the exports. + // (Compiler order depends on the entry-file array in `ts.createProgram`, + // which is stable across runs but not guaranteed — and adding/removing an + // export or re-ordering entry files would otherwise reshuffle the whole + // output.) + let all_names: Vec = { + let mut names: std::collections::BTreeSet = std::collections::BTreeSet::new(); + for k in base_interfaces.keys() { + names.insert(k.clone()); + } + for k in branch_interfaces.keys() { + names.insert(k.clone()); + } + names.into_iter().collect() + }; + + // Build combined dependency graph + let mut all_deps = base_ctx.dependencies; + for (k, v) in branch_ctx.dependencies { + all_deps.entry(k).or_default().extend(v); + } + + // First pass: find all changed exports + let mut all_changed = HashSet::new(); + let mut raw_diffs: Vec<(String, String, String)> = Vec::new(); // (qualified, diff_text, base_text) + + for name in &all_names { + let qualified = format!("{package_name}:{name}"); + + let base_formatted = base_interfaces + .get(name) + .map(|e| format_interface(name, e)) + .unwrap_or_default(); + let branch_formatted = branch_interfaces + .get(name) + .map(|e| format_interface(name, e)) + .unwrap_or_default(); + + if base_formatted == branch_formatted { + raw_diffs.push((qualified, String::new(), base_formatted)); + continue; + } + + let diff_text = compute_diff(&base_formatted, &branch_formatted, is_ci); + + if !diff_text.is_empty() { + all_changed.insert(qualified.clone()); + } + raw_diffs.push((qualified, diff_text, base_formatted)); + } + + // Second pass: annotate with dependency info + let inverted = invert_dependencies(&all_deps); + let mut diffs = Vec::new(); + + for (qualified, diff_text, _) in raw_diffs { + if diff_text.is_empty() { + continue; + } + let changed_by = follow_dependencies(&qualified, &all_deps, &all_changed); + let affects = follow_inverted_dependencies(&qualified, &inverted); + + diffs.push(InterfaceDiff { + qualified_name: qualified, + diff_text, + changed_by, + affects, + }); + } + + PackageDiff { + package_name: package_name.to_string(), + diffs, + } +} + +/// How many unchanged lines to preserve on each side of a change. +const DIFF_CONTEXT: usize = 2; + +/// Compute a colored/marked diff between two formatted interface strings. +/// +/// Unchanged lines that are far from any change are collapsed to `...` so the +/// output stays readable for large interfaces. The first line (interface header) +/// and last line (closing `}`) are always shown so the output is self-contained. +fn compute_diff(base: &str, branch: &str, is_ci: bool) -> String { + let text_diff = TextDiff::from_lines(base, branch); + + let has_changes = text_diff + .iter_all_changes() + .any(|c| c.tag() != ChangeTag::Equal); + + if !has_changes { + return String::new(); + } + + // Collect all (tag, formatted_line) pairs, dropping blank Equal lines + // (the blank line inserted between the interface header and its properties). + let all_lines: Vec<(ChangeTag, String)> = text_diff + .iter_all_changes() + .filter_map(|change| { + let line = change.to_string_lossy(); + let line = line.trim_end_matches('\n'); + if line.is_empty() && change.tag() == ChangeTag::Equal { + return None; + } + let formatted = match change.tag() { + ChangeTag::Delete => { + if is_ci { + format!("-{line}") + } else { + format!("-{line}").red().to_string() + } + } + ChangeTag::Insert => { + if is_ci { + format!("+{line}") + } else { + format!("+{line}").green().to_string() + } + } + ChangeTag::Equal => format!(" {line}"), + }; + Some((change.tag(), formatted)) + }) + .collect(); + + let n = all_lines.len(); + if n == 0 { + return String::new(); + } + + // Decide which lines to keep: every changed line, DIFF_CONTEXT lines around + // it, plus always the first (interface header) and last (closing brace). + let mut keep = vec![false; n]; + keep[0] = true; + keep[n - 1] = true; + for i in 0..n { + if all_lines[i].0 != ChangeTag::Equal { + let lo = i.saturating_sub(DIFF_CONTEXT); + let hi = (i + DIFF_CONTEXT + 1).min(n); + for j in lo..hi { + keep[j] = true; + } + } + } + + // Emit kept lines; collapse each contiguous run of omitted lines to " ...". + let mut result_lines: Vec = Vec::new(); + let mut in_gap = false; + for i in 0..n { + if keep[i] { + in_gap = false; + result_lines.push(all_lines[i].1.clone()); + } else if !in_gap { + result_lines.push(" ...".to_string()); + in_gap = true; + } + } + + if is_ci { + format!("```diff\n{}\n```", result_lines.join("\n")) + } else { + result_lines.join("\n") + } +} + +// ── High-level comparison across all packages ─────────────────────────────── + +/// A paired set of api.json files for one package. +pub struct ApiPair { + pub package_name: String, + pub base: ApiJson, + pub branch: ApiJson, +} + +/// Discover and pair api.json files from two directories. +pub fn discover_pairs(base_dir: &Path, branch_dir: &Path) -> anyhow::Result> { + let base_apis = find_api_jsons(base_dir)?; + let branch_apis = find_api_jsons(branch_dir)?; + + let mut pairs = Vec::new(); + let mut matched_branch: HashSet = HashSet::new(); + + // Match base → branch + for (name, base_path) in &base_apis { + if let Some(branch_path) = branch_apis.get(name) { + matched_branch.insert(name.clone()); + pairs.push(ApiPair { + package_name: name.clone(), + base: ApiJson::load(base_path)?, + branch: ApiJson::load(branch_path)?, + }); + } else { + // Package removed in branch + pairs.push(ApiPair { + package_name: name.clone(), + base: ApiJson::load(base_path)?, + branch: ApiJson::default(), + }); + } + } + + // New packages in branch + for (name, branch_path) in &branch_apis { + if !matched_branch.contains(name) { + // Check if it's a private package + let pkg_json_path = branch_path + .parent() + .and_then(|p| p.parent()) + .map(|p| p.join("package.json")); + let is_private = pkg_json_path + .as_ref() + .and_then(|p| std::fs::read_to_string(p).ok()) + .and_then(|s| serde_json::from_str::(&s).ok()) + .and_then(|v| v.get("private")?.as_bool()) + .unwrap_or(false); + + if !is_private { + pairs.push(ApiPair { + package_name: name.clone(), + base: ApiJson::default(), + branch: ApiJson::load(branch_path)?, + }); + } + } + } + + pairs.sort_by(|a, b| a.package_name.cmp(&b.package_name)); + Ok(pairs) +} + +/// Find all api.json files under a directory, keyed by package name. +pub(crate) fn find_api_jsons(dir: &Path) -> anyhow::Result> { + let mut result = IndexMap::new(); + let pattern = dir.join("**").join("dist").join("api.json"); + let pattern_str = pattern.to_string_lossy(); + + for entry in glob::glob(&pattern_str)? { + let path = entry?; + // Find the package.json next to the dist/ directory + let pkg_dir = path.parent().and_then(|p| p.parent()); + if let Some(pkg_dir) = pkg_dir { + let pkg_json = pkg_dir.join("package.json"); + if let Ok(contents) = std::fs::read_to_string(&pkg_json) { + if let Ok(v) = serde_json::from_str::(&contents) { + if let Some(name) = v.get("name").and_then(|n| n.as_str()) { + result.insert(name.to_string(), path); + } + } + } + } + } + + Ok(result) +} + +// ── Output formatting ─────────────────────────────────────────────────────── + +/// Format the full comparison output. +pub fn format_output(package_diffs: &[PackageDiff], is_ci: bool) -> String { + let mut sections = Vec::new(); + + for pkg in package_diffs { + if pkg.diffs.is_empty() { + continue; + } + + let mut changes = Vec::new(); + for diff in &pkg.diffs { + let mut section = String::new(); + section.push_str(&format!("\n#### {}\n", diff.qualified_name)); + + if !diff.changed_by.is_empty() { + section.push_str("changed by:\n"); + for dep in &diff.changed_by { + section.push_str(&format!(" - {dep}\n")); + } + section.push('\n'); + } + + // Remove extra blank lines from the diff for readability + let cleaned: String = diff + .diff_text + .lines() + .filter(|l| !l.trim().is_empty() || l.starts_with("```")) + .collect::>() + .join("\n"); + section.push_str(&cleaned); + + if !diff.affects.is_empty() { + if is_ci { + section.push_str("\n
\n it changed\n
    \n"); + for affected in &diff.affects { + section.push_str(&format!("
  • {affected}
  • \n")); + } + section.push_str("
\n
\n"); + } else { + section.push_str("\nit changed:\n"); + for affected in &diff.affects { + section.push_str(&format!(" - {affected}\n")); + } + } + } + + changes.push(section); + } + + if !changes.is_empty() { + let header = format!("\n### {}\n", pkg.package_name); + let body = changes.join("\n"); + sections.push(format!("{header}{body}\n-----------------------------------\n")); + } + } + + sections.join("\n") +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::api_json::{ApiJson, ParameterMap, TypeNode}; + use indexmap::IndexMap; + + fn prop_node(name: &str, optional: bool, value: TypeNode) -> TypeNode { + TypeNode::Property { + name: name.into(), + index_type: None, + value: Box::new(value), + optional, + description: None, + access: None, + default: None, + } + } + + /// Build an Interface node. Pass `iface_name = None` to let the map key + /// become the display name (used for new/removed export tests). + fn iface_with_props(props: Vec<(&str, bool, TypeNode)>) -> TypeNode { + iface_named(Some("ButtonProps"), props) + } + + fn iface_named(iface_name: Option<&str>, props: Vec<(&str, bool, TypeNode)>) -> TypeNode { + let mut map = IndexMap::new(); + for (n, opt, ty) in props { + map.insert(n.into(), prop_node(n, opt, ty)); + } + TypeNode::Interface { + id: None, + name: iface_name.map(str::to_string), + properties: map, + type_parameters: vec![], + extends: vec![], + description: None, + access: None, + } + } + + fn api(key: &str, node: TypeNode) -> ApiJson { + let mut exports = IndexMap::new(); + exports.insert(key.into(), node); + ApiJson { exports, links: IndexMap::new() } + } + + fn void_fn() -> TypeNode { + TypeNode::Function { + id: None, + name: None, + parameters: ParameterMap::Map(IndexMap::new()), + return_type: Some(Box::new(TypeNode::Void)), + type_parameters: vec![], + description: None, + access: None, + } + } + + // ── diff_package ─────────────────────────────────────────────────────── + + #[test] + fn test_identical_apis_produce_no_diffs() { + let node = iface_with_props(vec![("isDisabled", true, TypeNode::Boolean { value: None })]); + let base = api("@pkg:ButtonProps", node.clone()); + let branch = api("@pkg:ButtonProps", node); + let result = diff_package("@pkg", &base, &branch, true); + assert!(result.diffs.is_empty(), "expected no diffs for identical APIs"); + } + + #[test] + fn test_added_property_shows_plus_line() { + let base = api( + "@pkg:ButtonProps", + iface_with_props(vec![("isDisabled", true, TypeNode::Boolean { value: None })]), + ); + let branch = api( + "@pkg:ButtonProps", + iface_with_props(vec![ + ("isDisabled", true, TypeNode::Boolean { value: None }), + ("onPress", true, void_fn()), + ]), + ); + let result = diff_package("@pkg", &base, &branch, true); + assert_eq!(result.diffs.len(), 1); + assert!(result.diffs[0].diff_text.contains("onPress?:"), "expected onPress in diff"); + assert!(result.diffs[0].diff_text.contains('+'), "expected + marker for addition"); + } + + #[test] + fn test_removed_property_shows_minus_line() { + let base = api( + "@pkg:ButtonProps", + iface_with_props(vec![ + ("isDisabled", true, TypeNode::Boolean { value: None }), + ("onPress", true, void_fn()), + ]), + ); + let branch = api( + "@pkg:ButtonProps", + iface_with_props(vec![("isDisabled", true, TypeNode::Boolean { value: None })]), + ); + let result = diff_package("@pkg", &base, &branch, true); + assert_eq!(result.diffs.len(), 1); + assert!(result.diffs[0].diff_text.contains("onPress?:"), "expected onPress in diff"); + assert!(result.diffs[0].diff_text.contains('-'), "expected - marker for removal"); + } + + #[test] + fn test_changed_type_shows_both_old_and_new() { + let base = api( + "@pkg:Props", + iface_with_props(vec![("value", false, TypeNode::String { value: None })]), + ); + let branch = api( + "@pkg:Props", + iface_with_props(vec![("value", false, TypeNode::Number { value: None })]), + ); + let result = diff_package("@pkg", &base, &branch, true); + assert_eq!(result.diffs.len(), 1); + let text = &result.diffs[0].diff_text; + assert!(text.contains("- value: string"), "expected old type removed"); + assert!(text.contains("+ value: number"), "expected new type added"); + } + + #[test] + fn test_new_export_in_branch_shows_as_added() { + let base = ApiJson::default(); + // Use name: None so export_name() uses the map key "NewExport" + let branch = api( + "@pkg:NewExport", + iface_named(None, vec![("label", false, TypeNode::String { value: None })]), + ); + let result = diff_package("@pkg", &base, &branch, true); + assert_eq!(result.diffs.len(), 1); + assert!(result.diffs[0].diff_text.contains('+')); + assert!(result.diffs[0].qualified_name.contains("NewExport")); + } + + #[test] + fn test_reordered_exports_produce_no_diff() { + // Two api.jsons identical except export order must not diff. + let mut base_exports = IndexMap::new(); + base_exports.insert( + "AProps".into(), + iface_named(Some("AProps"), vec![("a", false, TypeNode::String { value: None })]), + ); + base_exports.insert( + "ZProps".into(), + iface_named(Some("ZProps"), vec![("z", false, TypeNode::Number { value: None })]), + ); + let base = ApiJson { exports: base_exports, links: IndexMap::new() }; + + let mut branch_exports = IndexMap::new(); + branch_exports.insert( + "ZProps".into(), + iface_named(Some("ZProps"), vec![("z", false, TypeNode::Number { value: None })]), + ); + branch_exports.insert( + "AProps".into(), + iface_named(Some("AProps"), vec![("a", false, TypeNode::String { value: None })]), + ); + let branch = ApiJson { exports: branch_exports, links: IndexMap::new() }; + + let result = diff_package("@pkg", &base, &branch, true); + assert!( + result.diffs.is_empty(), + "expected no diffs when only export order differs, got: {:?}", + result.diffs.iter().map(|d| &d.qualified_name).collect::>() + ); + } + + #[test] + fn test_reordered_properties_produce_no_diff() { + // An interface with the same properties in different orders must not diff. + let base = api( + "@pkg:Props", + iface_named( + Some("Props"), + vec![ + ("zulu", false, TypeNode::String { value: None }), + ("alpha", false, TypeNode::Number { value: None }), + ], + ), + ); + let branch = api( + "@pkg:Props", + iface_named( + Some("Props"), + vec![ + ("alpha", false, TypeNode::Number { value: None }), + ("zulu", false, TypeNode::String { value: None }), + ], + ), + ); + let result = diff_package("@pkg", &base, &branch, true); + assert!( + result.diffs.is_empty(), + "expected no diffs when only property order differs" + ); + } + + #[test] + fn test_removed_export_in_branch_shows_as_deleted() { + // Use name: None so export_name() uses the map key "OldExport" + let base = api( + "@pkg:OldExport", + iface_named(None, vec![("label", false, TypeNode::String { value: None })]), + ); + let branch = ApiJson::default(); + let result = diff_package("@pkg", &base, &branch, true); + assert_eq!(result.diffs.len(), 1); + assert!(result.diffs[0].diff_text.contains('-')); + assert!(result.diffs[0].qualified_name.contains("OldExport")); + } + + #[test] + fn test_ci_mode_wraps_in_code_fence() { + let base = api( + "@pkg:Props", + iface_with_props(vec![("x", false, TypeNode::String { value: None })]), + ); + let branch = api( + "@pkg:Props", + iface_with_props(vec![("x", false, TypeNode::Number { value: None })]), + ); + let result = diff_package("@pkg", &base, &branch, true); + let text = &result.diffs[0].diff_text; + assert!(text.starts_with("```diff\n"), "CI output should start with code fence"); + assert!(text.ends_with("```"), "CI output should end with code fence"); + } + + #[test] + fn test_large_interface_elides_unchanged_middle() { + // Build an interface with many props where only one in the middle changes. + let mut base_props: Vec<(&str, bool, TypeNode)> = Vec::new(); + let mut branch_props: Vec<(&str, bool, TypeNode)> = Vec::new(); + for i in 0..10_u32 { + let name = Box::leak(format!("prop{i}").into_boxed_str()) as &str; + base_props.push((name, true, TypeNode::String { value: None })); + // Change prop5 from string to number + let ty = if i == 5 { TypeNode::Number { value: None } } else { TypeNode::String { value: None } }; + branch_props.push((name, true, ty)); + } + let base = api("@pkg:BigProps", iface_with_props(base_props)); + let branch = api("@pkg:BigProps", iface_with_props(branch_props)); + let result = diff_package("@pkg", &base, &branch, false); + let text = &result.diffs[0].diff_text; + + // Changed line must be present + assert!(text.contains("- prop5?: string"), "expected removal of prop5 string"); + assert!(text.contains("+ prop5?: number"), "expected addition of prop5 number"); + + // Distant unchanged lines must be collapsed + assert!(text.contains("..."), "expected ellipsis for collapsed context"); + assert!(!text.contains("prop0"), "prop0 is far from the change and should be elided"); + assert!(!text.contains("prop9"), "prop9 is far from the change and should be elided"); + + // First and last lines of the interface must always be present. + // iface_with_props hard-codes the internal name as "ButtonProps". + let lines: Vec<&str> = text.lines().collect(); + assert!(lines.first().unwrap().contains("ButtonProps {"), "first line must be the interface header"); + assert_eq!(lines.last().unwrap().trim(), "}", "last line must be the closing brace"); + } + + #[test] + fn test_non_ci_mode_no_code_fence() { + let base = api( + "@pkg:Props", + iface_with_props(vec![("x", false, TypeNode::String { value: None })]), + ); + let branch = api( + "@pkg:Props", + iface_with_props(vec![("x", false, TypeNode::Number { value: None })]), + ); + let result = diff_package("@pkg", &base, &branch, false); + let text = &result.diffs[0].diff_text; + assert!(!text.contains("```diff"), "non-CI output should not have markdown fences"); + } + + // ── format_output ────────────────────────────────────────────────────── + + #[test] + fn test_format_output_empty_diffs_produces_empty_string() { + let pkg = PackageDiff { package_name: "@pkg".into(), diffs: vec![] }; + assert!(format_output(&[pkg], false).is_empty()); + } + + #[test] + fn test_format_output_includes_package_and_export_headers() { + let diff = InterfaceDiff { + qualified_name: "@pkg:Props".into(), + diff_text: "```diff\n-old\n+new\n```".into(), + changed_by: vec![], + affects: vec![], + }; + let pkg = PackageDiff { package_name: "@pkg".into(), diffs: vec![diff] }; + let out = format_output(&[pkg], true); + assert!(out.contains("### @pkg"), "missing package header"); + assert!(out.contains("#### @pkg:Props"), "missing export header"); + } + + #[test] + fn test_format_output_ci_affects_renders_html_details() { + let diff = InterfaceDiff { + qualified_name: "@pkg:Props".into(), + diff_text: "```diff\n-old\n+new\n```".into(), + changed_by: vec![], + affects: vec!["@pkg:OtherProps".into()], + }; + let pkg = PackageDiff { package_name: "@pkg".into(), diffs: vec![diff] }; + let out = format_output(&[pkg], true); + assert!(out.contains("
"), "CI affects should use
"); + assert!(out.contains("@pkg:OtherProps")); + } + + #[test] + fn test_format_output_non_ci_affects_renders_plain_text() { + let diff = InterfaceDiff { + qualified_name: "@pkg:Props".into(), + diff_text: "-old\n+new".into(), + changed_by: vec![], + affects: vec!["@pkg:OtherProps".into()], + }; + let pkg = PackageDiff { package_name: "@pkg".into(), diffs: vec![diff] }; + let out = format_output(&[pkg], false); + assert!(out.contains("it changed:"), "non-CI affects should use plain text"); + assert!(!out.contains("
"), "non-CI should not use HTML"); + } +} diff --git a/rsp-api-checker/src/interface_builder.rs b/rsp-api-checker/src/interface_builder.rs new file mode 100644 index 00000000000..a6f9a36d23e --- /dev/null +++ b/rsp-api-checker/src/interface_builder.rs @@ -0,0 +1,741 @@ +//! Reconstructs a simplified interface representation from the raw [`ApiJson`] +//! exports. This is the Rust equivalent of `rebuildInterfaces()` in compareAPIs.js. + +use indexmap::IndexMap; +use serde_json::Value as JsonValue; + +use crate::api_json::{ApiJson, TypeNode}; +use crate::type_renderer::{render_type, RenderContext}; + +/// A single property in a rebuilt interface. +#[derive(Debug, Clone)] +pub struct PropertyData { + pub optional: bool, + pub default_val: Option, + pub value: String, +} + +/// A rebuilt interface/function/component export. +#[derive(Debug, Clone)] +pub enum RebuiltExport { + /// A structured interface with named properties. + Interface { + type_params: Option, + extends: Option, + properties: IndexMap, + }, + /// An untyped or unrecognized export. + Untyped, +} + +/// Rebuild all exports from an [`ApiJson`] into displayable interfaces. +/// +/// Returns a map from export name → rebuilt representation, plus populates +/// the `ctx.dependencies` graph. +pub fn rebuild_interfaces( + json: &ApiJson, + ctx: &mut RenderContext, +) -> IndexMap { + let mut result = IndexMap::new(); + + for (key, item) in &json.exports { + ctx.current_export = key.clone(); + + let rebuilt = rebuild_single(key, item, ctx); + let name = export_name(key, item); + result.insert(name, rebuilt); + } + + result +} + +/// Extract the display name for an export. +fn export_name(key: &str, node: &TypeNode) -> String { + match node { + TypeNode::Component { name: Some(n), .. } + | TypeNode::Function { name: Some(n), .. } + | TypeNode::Interface { name: Some(n), .. } + | TypeNode::Alias { name: Some(n), .. } => n.clone(), + _ => key.to_string(), + } +} + +fn rebuild_single(key: &str, item: &TypeNode, ctx: &mut RenderContext) -> RebuiltExport { + match item { + TypeNode::Component { + name, + props, + type_parameters, + .. + } => { + let mut properties = IndexMap::new(); + let display_name = name.as_deref().unwrap_or(key); + + if let Some(props_node) = props { + match props_node.as_ref() { + TypeNode::Interface { + properties: iface_props, + .. + } => { + collect_properties(iface_props, &mut properties, ctx); + } + TypeNode::Link { .. } | TypeNode::Identifier { .. } => { + let val = render_type(props_node, ctx); + properties.insert( + display_name.to_string(), + PropertyData { + optional: false, + default_val: None, + value: val, + }, + ); + } + TypeNode::Object { + properties: Some(obj_props), + .. + } => { + collect_properties(obj_props, &mut properties, ctx); + } + _ => {} + } + } + + let type_params = format_type_params(type_parameters, ctx); + let extends = extract_extends(props.as_deref(), ctx); + + RebuiltExport::Interface { + type_params, + extends, + properties, + } + } + + TypeNode::Function { + parameters, + return_type, + type_parameters, + .. + } => { + let mut properties = IndexMap::new(); + + for (_, param) in parameters.iter_ordered() { + if let TypeNode::Parameter { + name: param_name, + value, + optional, + .. + } = param + { + let name = param_name.as_deref().unwrap_or("arg"); + if is_private(param) { + continue; + } + properties.insert( + name.to_string(), + PropertyData { + optional: *optional, + default_val: None, + value: render_type(value, ctx), + }, + ); + } + } + + if let Some(ret) = return_type { + properties.insert( + "returnVal".to_string(), + PropertyData { + optional: false, + default_val: None, + value: render_type(ret, ctx), + }, + ); + } else { + properties.insert( + "returnVal".to_string(), + PropertyData { + optional: false, + default_val: None, + value: "undefined".into(), + }, + ); + } + + RebuiltExport::Interface { + type_params: format_type_params(type_parameters, ctx), + extends: None, + properties, + } + } + + TypeNode::Interface { + properties: iface_props, + type_parameters, + extends, + .. + } => { + let mut properties = IndexMap::new(); + collect_sorted_properties(iface_props, &mut properties, ctx); + + let extends_str = if extends.is_empty() { + None + } else { + let parts: Vec = extends + .iter() + .map(|e| render_type(e, ctx)) + .collect(); + Some(format!("extends {}", parts.join(", "))) + }; + + RebuiltExport::Interface { + type_params: format_type_params(type_parameters, ctx), + extends: extends_str, + properties, + } + } + + TypeNode::Alias { + value, + type_parameters, + name, + .. + } => { + // If the alias resolves to an interface-like shape, rebuild it + match value.as_ref() { + TypeNode::Interface { + properties: iface_props, + type_parameters: inner_tp, + extends, + .. + } => { + let mut properties = IndexMap::new(); + collect_sorted_properties(iface_props, &mut properties, ctx); + + let tp = if !type_parameters.is_empty() { + format_type_params(type_parameters, ctx) + } else { + format_type_params(inner_tp, ctx) + }; + + let extends_str = if extends.is_empty() { + None + } else { + let parts: Vec = extends + .iter() + .map(|e| render_type(e, ctx)) + .collect(); + Some(format!("extends {}", parts.join(", "))) + }; + + RebuiltExport::Interface { + type_params: tp, + extends: extends_str, + properties, + } + } + _ => { + let val = render_type(value, ctx); + let mut properties = IndexMap::new(); + properties.insert( + name.as_deref().unwrap_or(key).to_string(), + PropertyData { + optional: false, + default_val: None, + value: val, + }, + ); + RebuiltExport::Interface { + type_params: format_type_params(type_parameters, ctx), + extends: None, + properties, + } + } + } + } + + TypeNode::Link { id } => { + // Type alias that's just a link - render as a simple value + if let Some(id_str) = id { + let name = id_str.rsplit(':').next().unwrap_or(id_str); + let mut properties = IndexMap::new(); + properties.insert( + name.to_string(), + PropertyData { + optional: false, + default_val: None, + value: render_type(item, ctx), + }, + ); + RebuiltExport::Interface { + type_params: None, + extends: None, + properties, + } + } else { + RebuiltExport::Untyped + } + } + + // Identifiers with no type info + TypeNode::Identifier { .. } => RebuiltExport::Untyped, + + _ => RebuiltExport::Untyped, + } +} + +fn collect_properties( + props: &IndexMap, + out: &mut IndexMap, + ctx: &mut RenderContext, +) { + // Sort by key so diff output is stable no matter what order the TS + // compiler emitted properties in. Property ordering out of the compiler + // depends on resolution order, which depends on entry-file order — so + // two otherwise-identical api.jsons can reorder keys between runs. + let mut sorted: Vec<_> = props.iter().collect(); + sorted.sort_by(|(a, _), (b, _)| a.cmp(b)); + for (_, prop) in sorted { + add_property(prop, out, ctx); + } +} + +fn collect_sorted_properties( + props: &IndexMap, + out: &mut IndexMap, + ctx: &mut RenderContext, +) { + collect_properties(props, out, ctx); +} + +fn add_property(prop: &TypeNode, out: &mut IndexMap, ctx: &mut RenderContext) { + match prop { + TypeNode::Property { + name, + value, + optional, + default, + access, + .. + } => { + if is_access_private(access.as_deref()) { + return; + } + // Set depth to 2 to match property indentation (" ") so inline objects nest correctly + let saved_depth = ctx.depth; + ctx.depth = 2; + let rendered = render_type(value, ctx); + ctx.depth = saved_depth; + out.insert( + name.clone(), + PropertyData { + optional: *optional, + default_val: format_json_default(default), + value: rendered, + }, + ); + } + TypeNode::Method { + name, + value, + optional, + default, + access, + .. + } => { + if is_access_private(access.as_deref()) { + return; + } + let saved_depth = ctx.depth; + ctx.depth = 2; + let rendered = render_type(value, ctx); + ctx.depth = saved_depth; + out.insert( + name.clone(), + PropertyData { + optional: *optional, + default_val: format_json_default(default), + value: rendered, + }, + ); + } + _ => {} + } +} + +fn is_private(node: &TypeNode) -> bool { + match node { + TypeNode::Property { access, .. } + | TypeNode::Method { access, .. } => is_access_private(access.as_deref()), + _ => false, + } +} + +fn is_access_private(access: Option<&str>) -> bool { + matches!(access, Some("private") | Some("protected")) +} + +fn format_json_default(val: &Option) -> Option { + match val { + None | Some(JsonValue::Null) => None, + Some(JsonValue::String(s)) => Some(s.clone()), + Some(v) => Some(v.to_string()), + } +} + +fn format_type_params(params: &[TypeNode], ctx: &mut RenderContext) -> Option { + if params.is_empty() { + return None; + } + let rendered: Vec = params.iter().map(|p| render_type(p, ctx)).collect(); + // Sort for stable output + let mut sorted = rendered; + sorted.sort(); + Some(format!("<{}>", sorted.join(", "))) +} + +fn extract_extends(props: Option<&TypeNode>, ctx: &mut RenderContext) -> Option { + match props { + Some(TypeNode::Interface { extends, .. }) if !extends.is_empty() => { + let parts: Vec = extends.iter().map(|e| render_type(e, ctx)).collect(); + let mut sorted = parts; + sorted.sort(); + Some(format!("extends {}", sorted.join(", "))) + } + _ => None, + } +} + +// ── Formatting for diff output ────────────────────────────────────────────── + +#[cfg(test)] +mod tests { + use super::*; + use crate::api_json::{ApiJson, ParameterMap, TypeNode}; + use crate::type_renderer::RenderContext; + use indexmap::IndexMap; + + fn make_property(name: &str, optional: bool, access: Option<&str>, value: TypeNode) -> TypeNode { + TypeNode::Property { + name: name.into(), + index_type: None, + value: Box::new(value), + optional, + description: None, + access: access.map(str::to_string), + default: None, + } + } + + fn make_iface(props: Vec<(&str, TypeNode)>) -> TypeNode { + let mut map = IndexMap::new(); + for (name, ty) in props { + map.insert(name.into(), make_property(name, false, None, ty)); + } + TypeNode::Interface { + id: None, + name: Some("MyInterface".into()), + properties: map, + type_parameters: vec![], + extends: vec![], + description: None, + access: None, + } + } + + fn rebuild(node: TypeNode) -> (String, RebuiltExport) { + let mut exports = IndexMap::new(); + exports.insert("TheExport".into(), node); + let json = ApiJson { exports, links: IndexMap::new() }; + let mut ctx = RenderContext::new(); + let mut result = rebuild_interfaces(&json, &mut ctx); + result.pop().unwrap() + } + + // ── Interface ───────────────────────────────────────────────────────── + + #[test] + fn test_interface_properties_sorted_alphabetically() { + let node = make_iface(vec![ + ("zebra", TypeNode::String { value: None }), + ("alpha", TypeNode::Number { value: None }), + ("middle", TypeNode::Boolean { value: None }), + ]); + let (_, rebuilt) = rebuild(node); + if let RebuiltExport::Interface { properties, .. } = rebuilt { + let keys: Vec<&str> = properties.keys().map(String::as_str).collect(); + assert_eq!(keys, ["alpha", "middle", "zebra"]); + } else { + panic!("expected Interface"); + } + } + + #[test] + fn test_interface_filters_private_properties() { + let mut props = IndexMap::new(); + props.insert("pub".into(), make_property("pub", false, None, TypeNode::String { value: None })); + props.insert("sec".into(), make_property("sec", false, Some("private"), TypeNode::String { value: None })); + props.insert("pro".into(), make_property("pro", false, Some("protected"), TypeNode::String { value: None })); + + let node = TypeNode::Interface { + id: None, + name: None, + properties: props, + type_parameters: vec![], + extends: vec![], + description: None, + access: None, + }; + let (_, rebuilt) = rebuild(node); + if let RebuiltExport::Interface { properties, .. } = rebuilt { + assert!(properties.contains_key("pub")); + assert!(!properties.contains_key("sec")); + assert!(!properties.contains_key("pro")); + } + } + + #[test] + fn test_interface_with_external_extends() { + let node = TypeNode::Interface { + id: None, + name: None, + properties: IndexMap::new(), + type_parameters: vec![], + extends: vec![TypeNode::Identifier { name: "HTMLAttributes".into() }], + description: None, + access: None, + }; + let (_, rebuilt) = rebuild(node); + if let RebuiltExport::Interface { extends, .. } = rebuilt { + assert_eq!(extends.as_deref(), Some("extends HTMLAttributes")); + } + } + + // ── Component ───────────────────────────────────────────────────────── + + #[test] + fn test_component_extracts_inline_props() { + let mut prop_map = IndexMap::new(); + prop_map.insert("label".into(), make_property("label", false, None, TypeNode::String { value: None })); + + let node = TypeNode::Component { + id: None, + name: Some("Button".into()), + props: Some(Box::new(TypeNode::Interface { + id: None, + name: None, + properties: prop_map, + type_parameters: vec![], + extends: vec![], + description: None, + access: None, + })), + type_parameters: vec![], + ref_type: None, + description: None, + access: None, + }; + let (_, rebuilt) = rebuild(node); + if let RebuiltExport::Interface { properties, .. } = rebuilt { + assert!(properties.contains_key("label")); + } + } + + // ── Function ────────────────────────────────────────────────────────── + + #[test] + fn test_function_parameters_plus_return_val() { + let mut params = IndexMap::new(); + params.insert( + "input".into(), + TypeNode::Parameter { + name: Some("input".into()), + value: Box::new(TypeNode::String { value: None }), + optional: false, + rest: false, + }, + ); + let node = TypeNode::Function { + id: None, + name: Some("myFn".into()), + parameters: ParameterMap::Map(params), + return_type: Some(Box::new(TypeNode::Boolean { value: None })), + type_parameters: vec![], + description: None, + access: None, + }; + let (_, rebuilt) = rebuild(node); + if let RebuiltExport::Interface { properties, .. } = rebuilt { + assert!(properties.contains_key("input")); + assert!(properties.contains_key("returnVal")); + assert_eq!(properties["returnVal"].value, "boolean"); + } + } + + #[test] + fn test_function_without_return_type_gets_undefined() { + let node = TypeNode::Function { + id: None, + name: None, + parameters: ParameterMap::Map(IndexMap::new()), + return_type: None, + type_parameters: vec![], + description: None, + access: None, + }; + let (_, rebuilt) = rebuild(node); + if let RebuiltExport::Interface { properties, .. } = rebuilt { + assert_eq!(properties["returnVal"].value, "undefined"); + } + } + + // ── Alias ───────────────────────────────────────────────────────────── + + #[test] + fn test_alias_wrapping_interface_is_structured() { + let mut inner_props = IndexMap::new(); + inner_props.insert("x".into(), make_property("x", false, None, TypeNode::Number { value: None })); + + let node = TypeNode::Alias { + id: None, + name: Some("MyAlias".into()), + value: Box::new(TypeNode::Interface { + id: None, + name: None, + properties: inner_props, + type_parameters: vec![], + extends: vec![], + description: None, + access: None, + }), + type_parameters: vec![], + description: None, + access: None, + }; + let (_, rebuilt) = rebuild(node); + if let RebuiltExport::Interface { properties, .. } = rebuilt { + assert!(properties.contains_key("x")); + } + } + + #[test] + fn test_alias_wrapping_primitive_is_interface_with_name_key() { + let node = TypeNode::Alias { + id: None, + name: Some("MyAlias".into()), + value: Box::new(TypeNode::String { value: None }), + type_parameters: vec![], + description: None, + access: None, + }; + let (_, rebuilt) = rebuild(node); + if let RebuiltExport::Interface { properties, .. } = rebuilt { + // key should be the alias name + assert!(properties.contains_key("MyAlias")); + assert_eq!(properties["MyAlias"].value, "string"); + } + } + + // ── format_prop ──────────────────────────────────────────────────────── + + #[test] + fn test_format_prop_required_no_default() { + let prop = PropertyData { optional: false, default_val: None, value: "string".into() }; + assert_eq!(format_prop("name", &prop), " name: string"); + } + + #[test] + fn test_format_prop_optional_no_default() { + let prop = PropertyData { optional: true, default_val: None, value: "number".into() }; + assert_eq!(format_prop("count", &prop), " count?: number"); + } + + #[test] + fn test_format_prop_with_default() { + let prop = PropertyData { + optional: true, + default_val: Some("42".into()), + value: "number".into(), + }; + assert_eq!(format_prop("count", &prop), " count?: number = 42"); + } + + // ── format_interface ────────────────────────────────────────────────── + + #[test] + fn test_format_interface_basic() { + let mut properties = IndexMap::new(); + properties.insert("foo".into(), PropertyData { optional: true, default_val: None, value: "string".into() }); + let export = RebuiltExport::Interface { type_params: None, extends: None, properties }; + let out = format_interface("MyInterface", &export); + assert!(out.starts_with("MyInterface {")); + assert!(out.contains("foo?: string")); + assert!(out.ends_with("}\n")); + } + + #[test] + fn test_format_interface_with_type_params_and_extends() { + let export = RebuiltExport::Interface { + type_params: Some("".into()), + extends: Some("extends Base".into()), + properties: IndexMap::new(), + }; + let out = format_interface("MyInterface", &export); + assert!(out.starts_with("MyInterface extends Base {")); + } + + #[test] + fn test_format_interface_untyped() { + let out = format_interface("Mystery", &RebuiltExport::Untyped); + assert!(out.contains("UNTYPED")); + assert!(out.contains("Mystery")); + } +} + +/// Format a single property as a display line. +pub fn format_prop(name: &str, prop: &PropertyData) -> String { + let opt = if prop.optional { "?" } else { "" }; + let def = match &prop.default_val { + Some(d) => format!(" = {d}"), + None => String::new(), + }; + format!(" {name}{opt}: {}{def}", prop.value) +} + +/// Format an entire interface for diffing. The format is: +/// ```text +/// Name extends Foo { +/// +/// propA?: string +/// propB: number = 42 +/// } +/// ``` +pub fn format_interface(name: &str, export: &RebuiltExport) -> String { + match export { + RebuiltExport::Interface { + type_params, + extends, + properties, + } => { + let mut header = name.to_string(); + if let Some(tp) = type_params { + header.push(' '); + header.push_str(tp); + } + if let Some(ext) = extends { + header.push(' '); + header.push_str(ext); + } + // Extra blank line after header so interface names always diff together + header.push_str(" {\n\n"); + + let props: Vec = properties + .iter() + .map(|(k, v)| format_prop(k, v)) + .collect(); + format!("{header}{}\n}}\n", props.join("\n")) + } + RebuiltExport::Untyped => { + format!("{name} {{\n\n UNTYPED\n}}\n") + } + } +} diff --git a/rsp-api-checker/src/lib.rs b/rsp-api-checker/src/lib.rs new file mode 100644 index 00000000000..e202e3cc6d5 --- /dev/null +++ b/rsp-api-checker/src/lib.rs @@ -0,0 +1,8 @@ +pub mod api_json; +pub mod commands; +pub mod differ; +pub mod interface_builder; +pub mod npm; +pub mod type_renderer; +pub mod workspace; +pub mod workspaces; diff --git a/rsp-api-checker/src/main.rs b/rsp-api-checker/src/main.rs new file mode 100644 index 00000000000..0fa4ad06a8c --- /dev/null +++ b/rsp-api-checker/src/main.rs @@ -0,0 +1,200 @@ +//! rsp-api-check: API comparison tool for the react-spectrum monorepo. +//! +//! Compares the public TypeScript API surface between published npm packages +//! and a local branch, producing a human-readable diff. +//! +//! # Commands +//! +//! - `get-published-api` — Download published packages from npm, extract `.d.ts` API +//! - `get-local-api` — Extract API from a local build's `.d.ts` files +//! - `compare` — Diff two API snapshots and show changes + +use rsp_api_check::commands; +use std::path::PathBuf; + +use clap::{Parser, Subcommand}; + +#[derive(Parser)] +#[command( + name = "rsp-api-check", + about = "API comparison tool for the react-spectrum monorepo", + version +)] +struct Cli { + #[command(subcommand)] + command: Commands, +} + +#[derive(Subcommand)] +enum Commands { + /// Download latest published packages from npm and extract their type API. + /// + /// This fetches each public package at its `latest` tag, then runs the + /// TypeScript extractor on the `.d.ts` files to produce api.json files. + #[command(name = "get-published-api")] + GetPublishedApi { + /// Root of the react-spectrum monorepo (used to discover package names). + #[arg(long, default_value = ".")] + repo_root: PathBuf, + + /// Output directory for the extracted API files. + #[arg(long, short, default_value = "dist/base-api")] + output: PathBuf, + + /// Max concurrent npm registry HTTP requests. + #[arg(long, default_value_t = 20)] + concurrency: usize, + + /// npm dist-tag to install (e.g. "latest", "nightly"). + #[arg(long, default_value = "latest")] + tag: String, + + /// Print a per-phase timing breakdown when the command finishes. + #[arg(long)] + timing: bool, + }, + + /// Extract the type API from a local build's `.d.ts` files. + /// + /// Assumes you've already run your build (e.g. `yarn build`). The tool + /// reads `.d.ts` entry points from each package's `package.json` and + /// produces api.json files. + #[command(name = "get-local-api")] + GetLocalApi { + /// Root of the react-spectrum monorepo. + #[arg(long, default_value = ".")] + repo_root: PathBuf, + + /// Output directory for the extracted API files. + #[arg(long, short, default_value = "dist/branch-api")] + output: PathBuf, + + /// Print a per-phase timing breakdown when the command finishes. + #[arg(long)] + timing: bool, + }, + + /// Collect environment + per-package state so CI and local runs can be + /// diffed to pinpoint cross-package TS resolution failures. + /// + /// Reports tool versions, git state, each package's types-entry presence + /// and mtimes, and where workspace dep symlinks resolve. Write it as a + /// CI artifact and compare against a local run when the CI diff + /// disagrees with local. + #[command(name = "env-report")] + EnvReport { + /// Root of the react-spectrum monorepo. + #[arg(long, default_value = ".")] + repo_root: PathBuf, + + /// Write the report here. If omitted, dumps JSON to stdout. + #[arg(long, short)] + output: Option, + }, + + /// Compare two API snapshots and output a diff. + /// + /// By default, compares `dist/base-api` (published) against + /// `dist/branch-api` (local). The output format looks like TypeScript + /// interfaces with +/- diff markers. + #[command(name = "compare")] + Compare { + /// Directory containing the base (published) API files. + #[arg(long, default_value = "dist/base-api")] + base_api_dir: PathBuf, + + /// Directory containing the branch (local) API files. + #[arg(long, default_value = "dist/branch-api")] + branch_api_dir: PathBuf, + + /// Only compare a specific package (substring match). + #[arg(long)] + package: Option, + + /// Only compare a specific interface name. + #[arg(long, name = "interface")] + interface_filter: Option, + + /// Output GitHub-flavored markdown (for CI comments). + #[arg(long)] + ci: bool, + + /// Print extra debug information. + #[arg(long, short)] + verbose: bool, + + /// Output as JSON instead of text. + #[arg(long)] + json: bool, + + /// Print a per-phase timing breakdown when the command finishes. + #[arg(long)] + timing: bool, + }, +} + +#[tokio::main] +async fn main() -> anyhow::Result<()> { + let cli = Cli::parse(); + + match cli.command { + Commands::GetPublishedApi { + repo_root, + output, + concurrency, + tag, + timing, + } => { + commands::get_published::execute(commands::get_published::GetPublishedOpts { + repo_root, + output_dir: output, + concurrency, + tag, + timing, + }) + .await?; + } + + Commands::GetLocalApi { repo_root, output, timing } => { + commands::get_local::execute(commands::get_local::GetLocalOpts { + repo_root, + output_dir: output, + timing, + }) + .await?; + } + + Commands::EnvReport { repo_root, output } => { + commands::env_report::execute(commands::env_report::EnvReportOpts { + repo_root, + output, + }) + .await?; + } + + Commands::Compare { + base_api_dir, + branch_api_dir, + package, + interface_filter, + ci, + verbose, + json, + timing, + } => { + commands::compare::execute(commands::compare::CompareOpts { + base_dir: base_api_dir, + branch_dir: branch_api_dir, + package_filter: package, + interface_filter, + is_ci: ci, + verbose, + json, + timing, + }) + .await?; + } + } + + Ok(()) +} diff --git a/rsp-api-checker/src/npm.rs b/rsp-api-checker/src/npm.rs new file mode 100644 index 00000000000..cb7cae62d9d --- /dev/null +++ b/rsp-api-checker/src/npm.rs @@ -0,0 +1,409 @@ +//! Query the npm registry to discover published packages and their latest versions. + +use std::collections::HashMap; +use std::path::Path; +use std::time::Duration; + +use anyhow::{Context, Result}; +use futures::stream::{self, StreamExt}; + +#[derive(Debug, Clone)] +pub struct PublishedPackage { + pub name: String, + pub version: String, +} + +/// Query the npm registry for a single package. Returns `None` if the package +/// is not published or doesn't have the requested tag. +/// +/// Uses the `/-/package/{name}/dist-tags` endpoint so the response is a tiny +/// JSON object (just the dist-tags map) rather than the full package metadata +/// which can be several megabytes for popular packages. +/// +/// Retries up to 3 times with exponential back-off on transient errors +/// (network failures, HTTP 429 rate-limit, HTTP 5xx server errors). +async fn check_published( + client: &reqwest::Client, + name: &str, + tag: &str, +) -> Result> { + let url = format!( + "https://registry.npmjs.org/-/package/{}/dist-tags", + name.replace('/', "%2f") + ); + const MAX_ATTEMPTS: u32 = 3; + let mut delay_ms: u64 = 1_000; + + let dist_tags: HashMap = 'retry: { + for attempt in 0..MAX_ATTEMPTS { + let resp = client + .get(&url) + .header("Accept", "application/json") + .send() + .await; + + let resp = match resp { + Ok(r) => r, + Err(e) => { + if attempt + 1 < MAX_ATTEMPTS { + eprintln!(" warn: network error for {name} (attempt {}/{MAX_ATTEMPTS}), retrying: {e}", attempt + 1); + tokio::time::sleep(Duration::from_millis(delay_ms)).await; + delay_ms *= 2; + continue; + } + return Err(e).context(format!("network request for {name}")); + } + }; + + let status = resp.status(); + + // 404 → package is not published; return None immediately (no retry needed) + if status.as_u16() == 404 { + return Ok(None); + } + + // 429 (rate-limit) or 5xx (server error) → retry with back-off + if status.as_u16() == 429 || status.is_server_error() { + if attempt + 1 < MAX_ATTEMPTS { + eprintln!(" warn: HTTP {} for {name} (attempt {}/{MAX_ATTEMPTS}), retrying", status.as_u16(), attempt + 1); + tokio::time::sleep(Duration::from_millis(delay_ms)).await; + delay_ms *= 2; + continue; + } + return Err(anyhow::anyhow!("HTTP {} from npm registry for {name}", status.as_u16())); + } + + if !status.is_success() { + // Other non-success status (e.g. 401, 403) — treat as not published + return Ok(None); + } + + match resp.json::>().await { + Ok(tags) => break 'retry tags, + Err(e) => { + if attempt + 1 < MAX_ATTEMPTS { + eprintln!(" warn: failed to parse npm response for {name} (attempt {}/{MAX_ATTEMPTS}), retrying: {e}", attempt + 1); + tokio::time::sleep(Duration::from_millis(delay_ms)).await; + delay_ms *= 2; + continue; + } + return Err(e).context(format!("parsing npm response for {name}")); + } + } + } + // All attempts exhausted — should be unreachable because the last iteration + // always returns, but the compiler needs a value for this branch. + return Ok(None); + }; + + // Resolve the version for the requested tag + let version = if let Some(v) = dist_tags.get(tag) { + v.clone() + } else if tag == "nightly" { + // Fallback: if "nightly" tag doesn't exist, try "latest" + match dist_tags.get("latest") { + Some(v) => v.clone(), + None => return Ok(None), + } + } else { + return Ok(None); + }; + + // For the "latest" tag, skip packages whose latest version is itself a + // nightly build (i.e. the package has never had a stable release). + if tag == "latest" && version.contains("nightly") { + return Ok(None); + } + + Ok(Some(PublishedPackage { + name: name.to_string(), + version, + })) +} + +/// Discover all non-private packages under a directory and check which +/// are published to npm. Returns the list of published packages. +/// +/// If `preresolved_names` is `Some`, skips the fs-walk and uses those names +/// directly. Callers should prefer yarn-workspaces-based discovery when +/// available — see [`crate::workspaces::discover_workspaces`]. +pub async fn get_published_packages( + packages_dir: &Path, + concurrency: usize, + tag: &str, + preresolved_names: Option>, +) -> Result> { + let local_packages = match preresolved_names { + Some(names) => names, + None => discover_local_packages(packages_dir)?, + }; + let client = reqwest::Client::builder() + .user_agent("rsp-api-check") + .build()?; + + println!( + "Checking {} packages against npm registry (concurrency: {concurrency})...", + local_packages.len() + ); + + let results: Vec<(String, Option)> = stream::iter(local_packages) + .map(|name| { + let client = client.clone(); + let tag = tag.to_string(); + async move { + let name_for_log = name.clone(); + let pkg = match check_published(&client, &name, &tag).await { + Ok(pkg) => pkg, + Err(e) => { + eprintln!(" warn: failed to check {name}: {e}"); + None + } + }; + (name_for_log, pkg) + } + }) + .buffer_unordered(concurrency) + .collect() + .await; + + let mut published = Vec::new(); + let mut dropped = Vec::new(); + for (name, pkg) in results { + match pkg { + Some(p) => published.push(p), + None => dropped.push(name), + } + } + + // Surface dropped packages loudly so "package missing from diff" doesn't + // silently hide behind a registry lookup miss. + if !dropped.is_empty() { + eprintln!( + " warn: {} package{} not published under tag `{tag}` — excluded from diff:", + dropped.len(), + if dropped.len() == 1 { "" } else { "s" } + ); + for name in &dropped { + eprintln!(" • {name}"); + } + if tag != "latest" { + eprintln!(" (consider retrying with --tag latest if these packages only have stable releases)"); + } + } + + println!("Found {} published packages", published.len()); + Ok(published) +} + +/// Walk a packages directory and return all non-private package names. +fn discover_local_packages(dir: &Path) -> Result> { + let mut names = Vec::new(); + walk_for_packages(dir, 0, &mut names)?; + Ok(names) +} + +/// Same traversal as `walk_for_packages`, but returns directory paths instead +/// of names. Used by `env-report` to inspect each package's on-disk state. +/// Includes private packages (the report wants to show them) — callers can +/// filter after reading the package.json. +pub(crate) fn walk_for_package_dirs( + dir: &Path, + depth: usize, + out: &mut Vec, +) -> Result<()> { + if depth > 4 { + return Ok(()); + } + let entries = std::fs::read_dir(dir).context(format!("reading {}", dir.display()))?; + for entry in entries { + let entry = entry?; + let name = entry.file_name(); + let name_str = name.to_string_lossy(); + if name_str == "node_modules" || name_str == ".git" || name_str == "dev" { + continue; + } + let path = entry.path(); + if path.is_dir() { + if path.join("package.json").exists() { + out.push(path.clone()); + } + walk_for_package_dirs(&path, depth + 1, out)?; + } + } + Ok(()) +} + +pub(crate) fn walk_for_packages(dir: &Path, depth: usize, out: &mut Vec) -> Result<()> { + if depth > 4 { + return Ok(()); + } + let entries = std::fs::read_dir(dir).context(format!("reading {}", dir.display()))?; + for entry in entries { + let entry = entry?; + let name = entry.file_name(); + let name_str = name.to_string_lossy(); + if name_str == "node_modules" || name_str == ".git" || name_str == "dev" { + continue; + } + let path = entry.path(); + if path.is_dir() { + let pkg_json = path.join("package.json"); + if pkg_json.exists() { + if let Ok(contents) = std::fs::read_to_string(&pkg_json) { + if let Ok(v) = serde_json::from_str::(&contents) { + let is_private = v.get("private").and_then(|p| p.as_bool()).unwrap_or(false); + let pkg_name = v.get("name").and_then(|n| n.as_str()); + if let Some(name) = pkg_name { + if !is_private { + out.push(name.to_string()); + } + } + } + } + } + walk_for_packages(&path, depth + 1, out)?; + } + } + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + use std::fs; + use tempfile::TempDir; + + fn write_pkg_json(dir: &std::path::Path, name: &str, private: bool) { + let json = if private { + format!(r#"{{"name":"{name}","private":true}}"#) + } else { + format!(r#"{{"name":"{name}"}}"#) + }; + fs::write(dir.join("package.json"), json).unwrap(); + } + + // ── discover_local_packages ──────────────────────────────────────────── + + #[test] + fn test_discovers_public_package() { + let dir = TempDir::new().unwrap(); + let pkg = dir.path().join("button"); + fs::create_dir(&pkg).unwrap(); + write_pkg_json(&pkg, "@react-aria/button", false); + + let names = discover_local_packages(dir.path()).unwrap(); + assert!(names.contains(&"@react-aria/button".to_string())); + } + + #[test] + fn test_skips_private_package() { + let dir = TempDir::new().unwrap(); + let pkg = dir.path().join("internal"); + fs::create_dir(&pkg).unwrap(); + write_pkg_json(&pkg, "@internal/pkg", true); + + let names = discover_local_packages(dir.path()).unwrap(); + assert!(!names.contains(&"@internal/pkg".to_string())); + } + + // @adobe/react-spectrum is a public API package and must be included in + // the npm check so that the published and local extractions are symmetric. + // If it is excluded here but the TypeScript extractor finds it under + // packages/, every one of its exports will appear as "added" in the diff. + #[test] + fn walk_for_packages_includes_adobe_react_spectrum() { + let dir = TempDir::new().unwrap(); + let pkg = dir.path().join("adobe-pkg"); + fs::create_dir(&pkg).unwrap(); + write_pkg_json(&pkg, "@adobe/react-spectrum", false); + + let mut names = Vec::new(); + walk_for_packages(dir.path(), 0, &mut names).unwrap(); + assert!( + names.contains(&"@adobe/react-spectrum".to_string()), + "@adobe/react-spectrum must be included in the npm package list: \ + excluding it causes a spurious diff because the local extractor \ + always finds it under packages/@adobe/react-spectrum/" + ); + } + + #[test] + fn test_skips_node_modules() { + let dir = TempDir::new().unwrap(); + let nm = dir.path().join("node_modules").join("some-dep"); + fs::create_dir_all(&nm).unwrap(); + write_pkg_json(&nm, "some-dep", false); + + let names = discover_local_packages(dir.path()).unwrap(); + assert!(!names.contains(&"some-dep".to_string())); + } + + #[test] + fn test_skips_git_directory() { + let dir = TempDir::new().unwrap(); + let git = dir.path().join(".git").join("hooks"); + fs::create_dir_all(&git).unwrap(); + // .git itself doesn't have a package.json, but ensure we don't walk into it + let names = discover_local_packages(dir.path()).unwrap(); + assert!(names.is_empty()); + } + + #[test] + fn test_discovers_nested_scoped_package() { + let dir = TempDir::new().unwrap(); + let nested = dir.path().join("packages").join("scope").join("widget"); + fs::create_dir_all(&nested).unwrap(); + write_pkg_json(&nested, "@scope/widget", false); + + let names = discover_local_packages(dir.path()).unwrap(); + assert!(names.contains(&"@scope/widget".to_string())); + } + + #[test] + fn test_depth_limit_stops_at_4() { + let dir = TempDir::new().unwrap(); + // The walker finds a package when it reads its parent directory. + // walk(parent, depth) reads children and checks their package.json. + // walk stops when depth > 4, i.e. at depth 5. + // So a package whose parent would be processed at depth 5 is never found. + // Parent at depth 5 = dir/a/b/c/d/e → package in dir/a/b/c/d/e/f/ + let deep = dir.path().join("a").join("b").join("c").join("d").join("e").join("f"); + fs::create_dir_all(&deep).unwrap(); + write_pkg_json(&deep, "@too/deep", false); + + let names = discover_local_packages(dir.path()).unwrap(); + assert!(!names.contains(&"@too/deep".to_string())); + } + + // ── dist-tags endpoint deserialization ──────────────────────────────── + + #[test] + fn test_dist_tags_endpoint_parses_correctly() { + // The /-/package/{name}/dist-tags endpoint returns a flat map of tag → version. + let json = r#"{"latest":"1.2.3","nightly":"2.0.0-nightly.1"}"#; + let tags: HashMap = serde_json::from_str(json).unwrap(); + assert_eq!(tags.get("latest").unwrap(), "1.2.3"); + assert_eq!(tags.get("nightly").unwrap(), "2.0.0-nightly.1"); + } + + #[test] + fn test_nightly_only_package_skipped_for_latest_tag() { + // A package whose "latest" dist-tag points to a nightly version + // should be excluded — it has never had a stable release. + let latest_version = "2.0.0-nightly.1"; + assert!(latest_version.contains("nightly")); + } + + #[test] + fn test_stable_latest_tag_not_skipped() { + // A package whose "latest" dist-tag points to a stable version is included. + let latest_version = "3.7.0"; + assert!(!latest_version.contains("nightly")); + } + + #[test] + fn test_dist_tags_empty_map_parses() { + let tags: HashMap = serde_json::from_str(r#"{}"#).unwrap(); + assert!(tags.is_empty()); + } +} diff --git a/rsp-api-checker/src/type_renderer.rs b/rsp-api-checker/src/type_renderer.rs new file mode 100644 index 00000000000..9cb6fc8e8f9 --- /dev/null +++ b/rsp-api-checker/src/type_renderer.rs @@ -0,0 +1,814 @@ +//! Renders [`TypeNode`] values into human-readable type strings that look like TypeScript. +//! +//! This is the single source of truth, replacing the triplicated `processType` functions +//! in the original JS codebase. + +use std::collections::{HashMap, HashSet}; + +use crate::api_json::TypeNode; + +/// Tracks state while rendering types: dependency links and indentation depth. +pub struct RenderContext { + /// The fully-qualified name of the export currently being processed + /// (e.g. `/@react-aria/button:ButtonProps`). + pub current_export: String, + /// Maps `exportName → [dependency names]`. + pub dependencies: HashMap>, + /// Current indentation depth for nested objects. + pub(crate) depth: usize, +} + +impl RenderContext { + pub fn new() -> Self { + Self { + current_export: String::new(), + dependencies: HashMap::new(), + depth: 0, + } + } + + /// Record that `current_export` depends on `dep_name`. + fn add_dependency(&mut self, dep_name: &str) { + if self.current_export.is_empty() { + return; + } + let deps = self + .dependencies + .entry(self.current_export.clone()) + .or_default(); + if !deps.contains(&dep_name.to_string()) { + deps.push(dep_name.to_string()); + } + } +} + +/// Render a [`TypeNode`] to a display string. +pub fn render_type(node: &TypeNode, ctx: &mut RenderContext) -> String { + match node { + // ── Primitives ────────────────────────────────────────────── + TypeNode::Any => "any".into(), + TypeNode::Null => "null".into(), + TypeNode::Undefined => "undefined".into(), + TypeNode::Void => "void".into(), + TypeNode::Unknown => "unknown".into(), + TypeNode::Never => "never".into(), + TypeNode::This => "this".into(), + TypeNode::Symbol => "symbol".into(), + + TypeNode::Boolean { .. } => "boolean".into(), + TypeNode::Number { value: None } => "number".into(), + TypeNode::Number { value: Some(v) } => v.to_string(), + TypeNode::String { value: None } => "string".into(), + TypeNode::String { value: Some(v) } => format!("'{v}'"), + + // ── Composite ─────────────────────────────────────────────── + TypeNode::Union { elements } => elements + .iter() + .map(|e| render_type(e, ctx)) + .collect::>() + .join(" | "), + + TypeNode::Intersection { types } => { + let inner = types + .iter() + .map(|t| render_type(t, ctx)) + .collect::>() + .join(" & "); + format!("({inner})") + } + + TypeNode::Array { element_type } => { + format!("Array<{}>", render_type(element_type, ctx)) + } + + TypeNode::Tuple { elements } => { + let inner = elements + .iter() + .map(|e| render_type(e, ctx)) + .collect::>() + .join(", "); + format!("[{inner}]") + } + + TypeNode::Object { + properties: Some(props), + exact, + } => { + let open = if *exact { "{\\" } else { "{" }; + if props.is_empty() { + return "{}".into(); + } + let mut lines = Vec::new(); + ctx.depth += 2; + // Sort by key so inline object literals render deterministically — + // the TS compiler can emit property order that depends on entry-file + // order, which would otherwise produce spurious diff churn. + let mut sorted: Vec<_> = props.iter().collect(); + sorted.sort_by(|(a, _), (b, _)| a.cmp(b)); + for (_, prop) in sorted { + let indent = " ".repeat(ctx.depth); + let line = render_property(prop, ctx); + lines.push(format!("{indent}{line}")); + } + ctx.depth -= 2; + let close_indent = " ".repeat(ctx.depth); + let close = if *exact { format!("{close_indent}\\}}") } else { format!("{close_indent}}}") }; + format!("{open}\n{}\n{close}", lines.join("\n")) + } + TypeNode::Object { properties: None, .. } => "{}".into(), + + // ── Generics / type-level ─────────────────────────────────── + TypeNode::Application { + base, + type_parameters, + } => { + let base_name = match base.as_ref() { + TypeNode::Identifier { name } => name.clone(), + other => render_type(other, ctx), + }; + let args = type_parameters + .iter() + .map(|t| render_type(t, ctx)) + .collect::>() + .join(", "); + format!("{base_name}<{args}>") + } + + TypeNode::TypeParameter { + name, + constraint, + default, + } => { + let mut out = name.clone(); + if let Some(c) = constraint { + out.push_str(&format!(" extends {}", render_type(c, ctx))); + } + if let Some(d) = default { + out.push_str(&format!(" = {}", render_type(d, ctx))); + } + out + } + + TypeNode::Conditional { + check_type, + extends_type, + true_type, + false_type, + } => { + let sep = match false_type.as_ref() { + TypeNode::Conditional { .. } => " :\n", + _ => " : ", + }; + format!( + "{} extends {} ? {}{}{}", + render_type(check_type, ctx), + render_type(extends_type, ctx), + render_type(true_type, ctx), + sep, + render_type(false_type, ctx), + ) + } + + TypeNode::IndexedAccess { + object_type, + index_type, + } => { + format!( + "{}[{}]", + render_type(object_type, ctx), + render_type(index_type, ctx), + ) + } + + TypeNode::Keyof { keyof } => { + format!("keyof {}", render_type(keyof, ctx)) + } + + TypeNode::TypeOperator { operator, value } => { + format!("{operator} {}", render_type(value, ctx)) + } + + TypeNode::Mapped { + type_parameter, + type_annotation, + readonly, + } => { + let prefix = match readonly.as_deref() { + Some("-") => "-readonly", + _ => "", + }; + format!( + "{prefix}{}: {}", + render_type(type_parameter, ctx), + render_type(type_annotation, ctx), + ) + } + + TypeNode::Infer { value } => format!("infer {value}"), + + TypeNode::Template { elements } => { + let parts: Vec = elements + .iter() + .map(|e| match e { + TypeNode::String { value: Some(v) } => v.clone(), + other => format!("${{{}}}", render_type(other, ctx)), + }) + .collect(); + format!("`{}`", parts.join("")) + } + + // ── Named references ──────────────────────────────────────── + TypeNode::Identifier { name } => name.clone(), + + TypeNode::Link { id } => { + if let Some(id) = id { + let name = id + .rfind(':') + .map(|i| &id[i + 1..]) + .unwrap_or(id) + .to_string(); + ctx.add_dependency(&name); + name + } else { + "unknown".into() + } + } + + // ── Declarations (appear when flattening) ─────────────────── + TypeNode::Function { + parameters, + return_type, + .. + } => { + let params = parameters + .iter_ordered() + .iter() + .map(|(_, p)| render_type(p, ctx)) + .collect::>() + .join(", "); + let ret = return_type + .as_ref() + .map(|r| render_type(r, ctx)) + .unwrap_or_else(|| "void".into()); + format!("({params}) => {ret}") + } + + TypeNode::Parameter { value, .. } => render_type(value, ctx), + + TypeNode::Property { .. } | TypeNode::Method { .. } => { + // Properties/methods are rendered via render_property + render_property(node, ctx) + } + + TypeNode::Interface { .. } | TypeNode::Component { .. } | TypeNode::Alias { .. } => { + // These are handled at the interface_builder level + "UNTYPED".into() + } + } +} + +/// Render a property/method node as a single line: `name?: Type = default` +fn render_property(node: &TypeNode, ctx: &mut RenderContext) -> String { + match node { + TypeNode::Property { + name, + index_type, + value, + optional, + default, + .. + } => { + let name_part = if let Some(idx) = index_type { + format!("[{}: {}]", name, render_type(idx, ctx)) + } else { + name.clone() + }; + let opt = if *optional { "?" } else { "" }; + let val = render_type(value, ctx); + let def = format_default(default); + format!("{name_part}{opt}: {val}{def}") + } + TypeNode::Method { + name, + value, + optional, + default, + .. + } => { + let opt = if *optional { "?" } else { "" }; + let val = render_type(value, ctx); + let def = format_default(default); + format!("{name}{opt}: {val}{def}") + } + _ => render_type(node, ctx), + } +} + +fn format_default(default: &Option) -> String { + match default { + Some(serde_json::Value::Null) => String::new(), + Some(v) => { + let s = match v { + serde_json::Value::String(s) => s.clone(), + other => other.to_string(), + }; + format!(" = {s}") + } + None => String::new(), + } +} + +/// Follow a chain of dependency names to find which changed dependencies +/// caused this export to change. +pub fn follow_dependencies( + name: &str, + deps: &HashMap>, + all_changed: &HashSet, +) -> Vec { + let mut visited = HashSet::new(); + let mut result = Vec::new(); + fn visit( + name: &str, + deps: &HashMap>, + all_changed: &HashSet, + visited: &mut HashSet, + result: &mut Vec, + ) { + if !visited.insert(name.to_string()) { + return; + } + if let Some(dep_names) = deps.get(name) { + for dep in dep_names { + if all_changed.contains(dep) { + result.push(dep.clone()); + } + visit(dep, deps, all_changed, visited, result); + } + } + } + visit(name, deps, all_changed, &mut visited, &mut result); + result +} + +/// Invert the dependency graph: from `A depends on [B, C]` to `B is depended on by [A]`. +pub fn invert_dependencies( + deps: &HashMap>, +) -> HashMap> { + let mut inverted: HashMap> = HashMap::new(); + for (key, values) in deps { + for val in values { + inverted + .entry(val.clone()) + .or_default() + .push(key.clone()); + } + } + inverted +} + +/// Follow inverted dependencies to find all exports affected by a change. +pub fn follow_inverted_dependencies( + name: &str, + inv_deps: &HashMap>, +) -> Vec { + let mut visited = HashSet::new(); + let mut result = Vec::new(); + fn visit( + name: &str, + inv_deps: &HashMap>, + visited: &mut HashSet, + result: &mut Vec, + ) { + if !visited.insert(name.to_string()) { + return; + } + if let Some(affected) = inv_deps.get(name) { + for dep in affected { + result.push(dep.clone()); + visit(dep, inv_deps, visited, result); + } + } + } + visit(name, inv_deps, &mut visited, &mut result); + result +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::api_json::{ParameterMap, TypeNode}; + use indexmap::IndexMap; + + fn ctx() -> RenderContext { + RenderContext::new() + } + + fn render(node: &TypeNode) -> String { + render_type(node, &mut ctx()) + } + + // ── Primitives ───────────────────────────────────────────────────────── + + #[test] + fn test_primitive_unit_variants() { + assert_eq!(render(&TypeNode::Any), "any"); + assert_eq!(render(&TypeNode::Null), "null"); + assert_eq!(render(&TypeNode::Undefined), "undefined"); + assert_eq!(render(&TypeNode::Void), "void"); + assert_eq!(render(&TypeNode::Unknown), "unknown"); + assert_eq!(render(&TypeNode::Never), "never"); + assert_eq!(render(&TypeNode::This), "this"); + assert_eq!(render(&TypeNode::Symbol), "symbol"); + } + + #[test] + fn test_boolean_always_renders_as_boolean() { + assert_eq!(render(&TypeNode::Boolean { value: None }), "boolean"); + assert_eq!(render(&TypeNode::Boolean { value: Some(true) }), "boolean"); + assert_eq!(render(&TypeNode::Boolean { value: Some(false) }), "boolean"); + } + + #[test] + fn test_string_with_and_without_value() { + assert_eq!(render(&TypeNode::String { value: None }), "string"); + assert_eq!(render(&TypeNode::String { value: Some("hello".into()) }), "'hello'"); + } + + #[test] + fn test_number_with_and_without_value() { + assert_eq!(render(&TypeNode::Number { value: None }), "number"); + assert_eq!( + render(&TypeNode::Number { value: Some(serde_json::Number::from(42)) }), + "42" + ); + } + + // ── Composite ────────────────────────────────────────────────────────── + + #[test] + fn test_union() { + let node = TypeNode::Union { + elements: vec![ + TypeNode::String { value: None }, + TypeNode::Null, + TypeNode::Undefined, + ], + }; + assert_eq!(render(&node), "string | null | undefined"); + } + + #[test] + fn test_intersection() { + let node = TypeNode::Intersection { + types: vec![ + TypeNode::Identifier { name: "A".into() }, + TypeNode::Identifier { name: "B".into() }, + ], + }; + assert_eq!(render(&node), "(A & B)"); + } + + #[test] + fn test_array() { + let node = TypeNode::Array { + element_type: Box::new(TypeNode::String { value: None }), + }; + assert_eq!(render(&node), "Array"); + } + + #[test] + fn test_tuple() { + let node = TypeNode::Tuple { + elements: vec![TypeNode::String { value: None }, TypeNode::Number { value: None }], + }; + assert_eq!(render(&node), "[string, number]"); + } + + #[test] + fn test_empty_object() { + assert_eq!(render(&TypeNode::Object { properties: None, exact: false }), "{}"); + assert_eq!( + render(&TypeNode::Object { properties: Some(IndexMap::new()), exact: false }), + "{}" + ); + } + + #[test] + fn test_object_with_property() { + let mut props = IndexMap::new(); + props.insert( + "x".into(), + TypeNode::Property { + name: "x".into(), + index_type: None, + value: Box::new(TypeNode::Number { value: None }), + optional: false, + description: None, + access: None, + default: None, + }, + ); + let node = TypeNode::Object { properties: Some(props), exact: false }; + let result = render(&node); + assert_eq!(result, "{\n x: number\n}"); + } + + // ── Generics ─────────────────────────────────────────────────────────── + + #[test] + fn test_application() { + let node = TypeNode::Application { + base: Box::new(TypeNode::Identifier { name: "Promise".into() }), + type_parameters: vec![TypeNode::String { value: None }], + }; + assert_eq!(render(&node), "Promise"); + } + + #[test] + fn test_type_parameter_simple() { + assert_eq!( + render(&TypeNode::TypeParameter { name: "T".into(), constraint: None, default: None }), + "T" + ); + } + + #[test] + fn test_type_parameter_with_constraint() { + let node = TypeNode::TypeParameter { + name: "T".into(), + constraint: Some(Box::new(TypeNode::String { value: None })), + default: None, + }; + assert_eq!(render(&node), "T extends string"); + } + + #[test] + fn test_type_parameter_with_default() { + let node = TypeNode::TypeParameter { + name: "T".into(), + constraint: None, + default: Some(Box::new(TypeNode::String { value: None })), + }; + assert_eq!(render(&node), "T = string"); + } + + #[test] + fn test_conditional_simple_false_type() { + let node = TypeNode::Conditional { + check_type: Box::new(TypeNode::Any), + extends_type: Box::new(TypeNode::String { value: None }), + true_type: Box::new(TypeNode::String { value: None }), + false_type: Box::new(TypeNode::Never), + }; + assert_eq!(render(&node), "any extends string ? string : never"); + } + + #[test] + fn test_indexed_access() { + let node = TypeNode::IndexedAccess { + object_type: Box::new(TypeNode::Identifier { name: "Props".into() }), + index_type: Box::new(TypeNode::String { value: Some("key".into()) }), + }; + assert_eq!(render(&node), "Props['key']"); + } + + #[test] + fn test_keyof() { + let node = TypeNode::Keyof { + keyof: Box::new(TypeNode::Identifier { name: "Props".into() }), + }; + assert_eq!(render(&node), "keyof Props"); + } + + #[test] + fn test_type_operator() { + let node = TypeNode::TypeOperator { + operator: "readonly".into(), + value: Box::new(TypeNode::Array { element_type: Box::new(TypeNode::String { value: None }) }), + }; + assert_eq!(render(&node), "readonly Array"); + } + + #[test] + fn test_mapped() { + let node = TypeNode::Mapped { + type_parameter: Box::new(TypeNode::TypeParameter { + name: "K".into(), + constraint: None, + default: None, + }), + type_annotation: Box::new(TypeNode::String { value: None }), + readonly: None, + }; + assert_eq!(render(&node), "K: string"); + } + + #[test] + fn test_infer() { + assert_eq!(render(&TypeNode::Infer { value: "R".into() }), "infer R"); + } + + #[test] + fn test_template_with_literal_and_interpolation() { + let node = TypeNode::Template { + elements: vec![ + TypeNode::String { value: Some("prefix-".into()) }, + TypeNode::Identifier { name: "T".into() }, + TypeNode::String { value: Some("-suffix".into()) }, + ], + }; + assert_eq!(render(&node), "`prefix-${T}-suffix`"); + } + + // ── Named references ─────────────────────────────────────────────────── + + #[test] + fn test_identifier() { + assert_eq!(render(&TypeNode::Identifier { name: "ReactNode".into() }), "ReactNode"); + } + + #[test] + fn test_link_with_colon_extracts_short_name() { + let node = TypeNode::Link { id: Some("@react-aria/button:ButtonProps".into()) }; + assert_eq!(render(&node), "ButtonProps"); + } + + #[test] + fn test_link_without_colon_uses_full_id() { + let node = TypeNode::Link { id: Some("MyType".into()) }; + assert_eq!(render(&node), "MyType"); + } + + #[test] + fn test_link_no_id_renders_unknown() { + assert_eq!(render(&TypeNode::Link { id: None }), "unknown"); + } + + #[test] + fn test_link_records_dependency_on_current_export() { + let node = TypeNode::Link { id: Some("@pkg:ButtonProps".into()) }; + let mut ctx = RenderContext::new(); + ctx.current_export = "ComboBoxProps".into(); + render_type(&node, &mut ctx); + let deps = ctx.dependencies.get("ComboBoxProps").unwrap(); + assert!(deps.contains(&"ButtonProps".to_string())); + } + + #[test] + fn test_link_does_not_record_dependency_without_current_export() { + let node = TypeNode::Link { id: Some("@pkg:ButtonProps".into()) }; + let mut ctx = RenderContext::new(); + render_type(&node, &mut ctx); + assert!(ctx.dependencies.is_empty()); + } + + #[test] + fn test_link_deduplicates_dependencies() { + let node = TypeNode::Link { id: Some("@pkg:ButtonProps".into()) }; + let mut ctx = RenderContext::new(); + ctx.current_export = "A".into(); + render_type(&node, &mut ctx); + render_type(&node, &mut ctx); // same dep twice + let deps = ctx.dependencies.get("A").unwrap(); + assert_eq!(deps.iter().filter(|d| *d == "ButtonProps").count(), 1); + } + + // ── Function / Parameter ─────────────────────────────────────────────── + + #[test] + fn test_function_with_parameters_and_return() { + let mut params = IndexMap::new(); + params.insert( + "x".into(), + TypeNode::Parameter { + name: Some("x".into()), + value: Box::new(TypeNode::String { value: None }), + optional: false, + rest: false, + }, + ); + let node = TypeNode::Function { + id: None, + name: None, + parameters: ParameterMap::Map(params), + return_type: Some(Box::new(TypeNode::Void)), + type_parameters: vec![], + description: None, + access: None, + }; + assert_eq!(render(&node), "(string) => void"); + } + + #[test] + fn test_function_no_params_no_return_type_renders_void() { + let node = TypeNode::Function { + id: None, + name: None, + parameters: ParameterMap::Map(IndexMap::new()), + return_type: None, + type_parameters: vec![], + description: None, + access: None, + }; + assert_eq!(render(&node), "() => void"); + } + + #[test] + fn test_parameter_renders_inner_value() { + let node = TypeNode::Parameter { + name: Some("x".into()), + value: Box::new(TypeNode::Number { value: None }), + optional: false, + rest: false, + }; + assert_eq!(render(&node), "number"); + } + + // ── Dependency graph functions ───────────────────────────────────────── + + #[test] + fn test_follow_dependencies_direct_match() { + let mut deps = HashMap::new(); + deps.insert("A".into(), vec!["B".into()]); + + let mut changed = HashSet::new(); + changed.insert("B".into()); + + let result = follow_dependencies("A", &deps, &changed); + assert!(result.contains(&"B".to_string())); + } + + #[test] + fn test_follow_dependencies_no_changed_match() { + let mut deps = HashMap::new(); + deps.insert("A".into(), vec!["B".into()]); + + let changed = HashSet::new(); // nothing changed + + let result = follow_dependencies("A", &deps, &changed); + assert!(result.is_empty()); + } + + #[test] + fn test_follow_dependencies_transitive_chain() { + let mut deps = HashMap::new(); + deps.insert("A".into(), vec!["B".into()]); + deps.insert("B".into(), vec!["C".into()]); + + let mut changed = HashSet::new(); + changed.insert("C".into()); + + let result = follow_dependencies("A", &deps, &changed); + assert!(result.contains(&"C".to_string())); + } + + #[test] + fn test_follow_dependencies_cycle_does_not_loop() { + let mut deps = HashMap::new(); + deps.insert("A".into(), vec!["B".into()]); + deps.insert("B".into(), vec!["A".into()]); + + let mut changed = HashSet::new(); + changed.insert("A".into()); + + // Should terminate without infinite loop + let result = follow_dependencies("A", &deps, &changed); + let _ = result; // just checking it terminates + } + + #[test] + fn test_invert_dependencies() { + let mut deps = HashMap::new(); + deps.insert("A".into(), vec!["B".into(), "C".into()]); + deps.insert("X".into(), vec!["B".into()]); + + let inv = invert_dependencies(&deps); + let b_affected = inv.get("B").unwrap(); + assert!(b_affected.contains(&"A".to_string())); + assert!(b_affected.contains(&"X".to_string())); + let c_affected = inv.get("C").unwrap(); + assert!(c_affected.contains(&"A".to_string())); + } + + #[test] + fn test_follow_inverted_dependencies_transitive() { + let mut inv = HashMap::new(); + inv.insert("B".into(), vec!["A".into()]); + inv.insert("A".into(), vec!["Root".into()]); + + let result = follow_inverted_dependencies("B", &inv); + assert!(result.contains(&"A".to_string())); + assert!(result.contains(&"Root".to_string())); + } + + #[test] + fn test_follow_inverted_dependencies_cycle_terminates() { + let mut inv = HashMap::new(); + inv.insert("A".into(), vec!["B".into()]); + inv.insert("B".into(), vec!["A".into()]); + + let result = follow_inverted_dependencies("A", &inv); + assert!(result.contains(&"B".to_string())); + // Should terminate, not panic + } +} diff --git a/rsp-api-checker/src/workspace.rs b/rsp-api-checker/src/workspace.rs new file mode 100644 index 00000000000..1bd1c8c1509 --- /dev/null +++ b/rsp-api-checker/src/workspace.rs @@ -0,0 +1,295 @@ +//! Helpers for setting up temporary workspaces and running subprocesses. + +use std::path::{Path, PathBuf}; +use std::process::Stdio; + +use anyhow::{bail, Context, Result}; +use tokio::process::Command; + +/// Run a command, inheriting stdio. Fails if exit code is non-zero. +pub async fn run(cmd: &str, args: &[&str], cwd: &Path) -> Result<()> { + println!(" $ {} {}", cmd, args.join(" ")); + let status = Command::new(cmd) + .args(args) + .current_dir(cwd) + .stdin(Stdio::null()) + .status() + .await + .context(format!("failed to spawn `{cmd}`"))?; + + if !status.success() { + bail!("`{cmd} {}` exited with {status}", args.join(" ")); + } + Ok(()) +} + +/// Run `npm install …`, capturing stderr. On failure, parse the output for +/// 404 / ETARGET entries so we can tell the user *which* package(s) npm could +/// not resolve — not just "the whole thing exploded". +pub async fn run_npm_install(args: &[&str], cwd: &Path) -> Result<()> { + println!(" $ npm {}", args.join(" ")); + let output = Command::new("npm") + .args(args) + .current_dir(cwd) + .stdin(Stdio::null()) + .stdout(Stdio::inherit()) + .stderr(Stdio::piped()) + .output() + .await + .context("failed to spawn `npm`")?; + + if output.status.success() { + return Ok(()); + } + + let stderr = String::from_utf8_lossy(&output.stderr); + // Forward the raw stderr so the user still sees npm's own message. + eprint!("{stderr}"); + + let unresolved = extract_unresolved_packages(&stderr); + if !unresolved.is_empty() { + eprintln!(); + eprintln!("npm could not resolve the following package{}:", if unresolved.len() == 1 { "" } else { "s" }); + for pkg in &unresolved { + eprintln!(" • {pkg}"); + } + eprintln!(); + eprintln!( + "This usually means the package isn't published under the requested \ + dist-tag. Retry with `--tag next` or check `npm view dist-tags` \ + for the available tags." + ); + } + + bail!("`npm {}` exited with {}", args.join(" "), output.status); +} + +/// Parse npm's stderr for 404 / ETARGET / ENOVERSIONS entries. npm emits +/// messages like `npm error 404 'react-aria@latest' is not in this registry.` +/// or `npm ERR! code ETARGET` followed by a line naming the package. We keep +/// the extraction permissive — missing an entry is better than listing the +/// wrong one. +fn extract_unresolved_packages(stderr: &str) -> Vec { + let mut out: Vec = Vec::new(); + for line in stderr.lines() { + // 404 lines: `npm error 404 '@scope/name@latest' is not in this registry` + // or the older `npm ERR! 404 Not Found - GET https://.../name`. + if line.contains("404") { + if let Some(pkg) = extract_quoted_pkg_spec(line) { + if !out.contains(&pkg) { + out.push(pkg); + } + continue; + } + } + // ETARGET / ENOVERSIONS: npm prints `No matching version found for name@spec.` + if line.contains("No matching version found for ") { + if let Some(idx) = line.find("No matching version found for ") { + let rest = &line[idx + "No matching version found for ".len()..]; + let pkg = rest + .trim_end_matches('.') + .trim_end_matches(' ') + .to_string(); + if !pkg.is_empty() && !out.contains(&pkg) { + out.push(pkg); + } + } + } + } + out +} + +/// Extract the first `'...'` or `"..."` quoted package spec from a line. +fn extract_quoted_pkg_spec(line: &str) -> Option { + for quote in ['\'', '"'] { + if let Some(start) = line.find(quote) { + if let Some(end) = line[start + 1..].find(quote) { + let spec = &line[start + 1..start + 1 + end]; + // Heuristic: must look like a package spec (contains @ or is a bare name) + if !spec.is_empty() && !spec.contains(' ') { + return Some(spec.to_string()); + } + } + } + } + None +} + +/// Run a command and capture stdout. +pub async fn run_capture(cmd: &str, args: &[&str], cwd: &Path) -> Result { + let output = Command::new(cmd) + .args(args) + .current_dir(cwd) + .stdin(Stdio::null()) + .stderr(Stdio::inherit()) + .output() + .await + .context(format!("failed to spawn `{cmd}`"))?; + + if !output.status.success() { + bail!( + "`{cmd} {}` exited with {}", + args.join(" "), + output.status + ); + } + Ok(String::from_utf8_lossy(&output.stdout).to_string()) +} + +/// Resolve the path to the ts-extractor script, looking in several locations. +pub fn find_extractor_script() -> Result { + // 1. Next to the binary + let exe = std::env::current_exe()?; + let exe_dir = exe.parent().unwrap(); + let candidates = [ + exe_dir.join("ts-extractor").join("extract-api.ts"), + exe_dir + .join("..") + .join("ts-extractor") + .join("extract-api.ts"), + // When running from the project directory + PathBuf::from("ts-extractor").join("extract-api.ts"), + ]; + + for candidate in &candidates { + if candidate.exists() { + return Ok(candidate.canonicalize()?); + } + } + + bail!( + "Could not find ts-extractor/extract-api.ts. \ + Looked in: {}", + candidates + .iter() + .map(|p| p.display().to_string()) + .collect::>() + .join(", ") + ); +} + +/// Run the TypeScript API extractor on a packages directory. +/// +/// When `workspaces_file` is `Some`, its path is passed through as +/// `--workspaces-file` so the extractor uses the yarn-supplied list instead +/// of doing its own fs-walk. +pub async fn run_extractor( + packages_dir: &Path, + output_dir: &Path, + check_build_freshness: bool, + workspaces_file: Option<&Path>, +) -> Result<()> { + let script = find_extractor_script()?; + let script_dir = script.parent().unwrap(); + + // Resolve to absolute paths so they work regardless of subprocess cwd + let abs_packages = std::fs::canonicalize(packages_dir) + .context(format!("resolving packages dir: {}", packages_dir.display()))?; + let abs_output = std::env::current_dir()?.join(output_dir); + std::fs::create_dir_all(&abs_output) + .context(format!("creating output dir: {}", abs_output.display()))?; + let abs_output = std::fs::canonicalize(&abs_output)?; + + // Ensure ts-extractor dependencies are installed + let node_modules = script_dir.join("node_modules"); + if !node_modules.exists() { + println!("Installing ts-extractor dependencies..."); + run("npm", &["install", "--no-audit", "--no-fund"], script_dir).await?; + } + + println!("Running API extractor..."); + let abs_packages_str = abs_packages.to_str().unwrap().to_string(); + let abs_output_str = abs_output.to_str().unwrap().to_string(); + let workspaces_str = workspaces_file.map(|p| p.to_string_lossy().into_owned()); + + let mut extractor_args: Vec<&str> = vec![ + "tsx", + script.to_str().unwrap(), + "--packages-dir", + &abs_packages_str, + "--output-dir", + &abs_output_str, + ]; + // Only meaningful against the local workspace — published tarballs are + // immutable, so mtimes there don't represent "out of date". + if check_build_freshness { + extractor_args.push("--check-build-freshness"); + } + if let Some(ws) = &workspaces_str { + extractor_args.push("--workspaces-file"); + extractor_args.push(ws); + } + run("npx", &extractor_args, script_dir).await?; + + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn extracts_404_quoted_package_specs() { + let stderr = "\ +npm error code E404 +npm error 404 Not Found - GET https://registry.npmjs.org/@react-aria%2fmissing - Not found +npm error 404 +npm error 404 '@react-aria/missing@latest' is not in this registry. +"; + let pkgs = extract_unresolved_packages(stderr); + assert!( + pkgs.contains(&"@react-aria/missing@latest".to_string()), + "expected @react-aria/missing@latest in {pkgs:?}" + ); + } + + #[test] + fn extracts_etarget_no_matching_version() { + let stderr = "\ +npm error code ETARGET +npm error notarget No matching version found for @scope/pkg@next. +"; + let pkgs = extract_unresolved_packages(stderr); + assert!( + pkgs.contains(&"@scope/pkg@next".to_string()), + "expected @scope/pkg@next in {pkgs:?}" + ); + } + + #[test] + fn deduplicates_repeated_mentions() { + let stderr = "\ +npm error 404 'a@latest' is not in this registry. +npm error 404 'a@latest' is not in this registry. +"; + let pkgs = extract_unresolved_packages(stderr); + assert_eq!(pkgs, vec!["a@latest".to_string()]); + } + + #[test] + fn returns_empty_when_no_patterns_match() { + let stderr = "npm warn deprecated foo@1\nsomething went wrong\n"; + assert!(extract_unresolved_packages(stderr).is_empty()); + } +} + +/// Write a minimal package.json for npm install. +pub fn write_package_json(dir: &Path, deps: &[(String, String)]) -> Result<()> { + let mut pkg = serde_json::json!({ + "name": "rsp-api-check-workspace", + "version": "0.0.0", + "private": true, + "dependencies": {} + }); + + let dep_obj = pkg.get_mut("dependencies").unwrap().as_object_mut().unwrap(); + for (name, version) in deps { + dep_obj.insert(name.clone(), serde_json::Value::String(version.clone())); + } + + std::fs::write( + dir.join("package.json"), + serde_json::to_string_pretty(&pkg)?, + )?; + Ok(()) +} diff --git a/rsp-api-checker/src/workspaces.rs b/rsp-api-checker/src/workspaces.rs new file mode 100644 index 00000000000..0b5952b915e --- /dev/null +++ b/rsp-api-checker/src/workspaces.rs @@ -0,0 +1,103 @@ +//! Discover the set of public workspace packages by asking yarn. Preferred +//! over walking the filesystem because yarn's output honors the root +//! `package.json#workspaces` globs and the `private: true` flag — no +//! hard-coded depth limit or skip list required. +//! +//! Falls back gracefully when yarn isn't available (e.g. CI environments +//! without yarn installed, or test fixtures): callers should treat `None` +//! as "use the fs-walk fallback". + +use std::path::{Path, PathBuf}; +use std::process::Stdio; + +use anyhow::Result; +use tokio::process::Command; + +#[derive(Debug, Clone)] +pub struct Workspace { + /// Package name as declared in its `package.json` (e.g. `@react-aria/button`). + pub name: String, + /// Absolute path to the workspace directory. + pub location: PathBuf, +} + +/// Run `yarn workspaces list --json --no-private` in `repo_root` and parse the +/// newline-delimited JSON output. Returns `Ok(None)` when yarn is not +/// installed or the command fails — the caller should then fall back to an +/// fs-walk. +/// +/// We pass `--no-private` so we don't have to filter by `private: true` after +/// the fact. Yarn excludes the monorepo root itself from the list. +pub async fn discover_workspaces(repo_root: &Path) -> Result>> { + let output = Command::new("yarn") + .args(["workspaces", "list", "--json", "--no-private"]) + .current_dir(repo_root) + .stdin(Stdio::null()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .output() + .await; + + let output = match output { + Ok(o) => o, + // yarn not on PATH — not an error, just signal fallback. + Err(_) => return Ok(None), + }; + + if !output.status.success() { + return Ok(None); + } + + let stdout = String::from_utf8_lossy(&output.stdout); + let mut workspaces = Vec::new(); + for line in stdout.lines() { + let line = line.trim(); + if line.is_empty() { + continue; + } + let v: serde_json::Value = match serde_json::from_str(line) { + Ok(v) => v, + Err(_) => continue, + }; + let name = match v.get("name").and_then(|n| n.as_str()) { + Some(n) => n.to_string(), + None => continue, + }; + let loc = match v.get("location").and_then(|l| l.as_str()) { + Some(l) => l.to_string(), + None => continue, + }; + // Skip the root workspace itself (yarn reports location "." when the + // root is listed as a member of itself on older setups). + if loc == "." { + continue; + } + workspaces.push(Workspace { + name, + location: repo_root.join(loc), + }); + } + + Ok(Some(workspaces)) +} + +#[cfg(test)] +mod tests { + use super::*; + use std::fs; + use tempfile::TempDir; + + #[tokio::test] + async fn test_missing_yarn_returns_none_or_unsuccessful() { + // If yarn is not available in PATH or the dir has no yarn project, + // the function must return Ok(None) — never bail. + let dir = TempDir::new().unwrap(); + // Write a minimal non-yarn package.json so the command can at least cd in. + fs::write(dir.path().join("package.json"), r#"{"name":"test","private":true}"#).unwrap(); + + let result = discover_workspaces(dir.path()).await.unwrap(); + // Either yarn isn't installed (None) or yarn fails on this non-workspace + // project (also None). Both are acceptable — we just must not panic. + assert!(result.is_none() || result.as_ref().is_some_and(|v| v.is_empty())); + } +} diff --git a/rsp-api-checker/tests/compare_integration.rs b/rsp-api-checker/tests/compare_integration.rs new file mode 100644 index 00000000000..7daead10d90 --- /dev/null +++ b/rsp-api-checker/tests/compare_integration.rs @@ -0,0 +1,140 @@ +use rsp_api_check::differ::{diff_package, discover_pairs, format_output}; +use std::fs; +use tempfile::TempDir; + +const BASE_API_JSON: &str = r#"{ + "exports": { + "@test/button:ButtonProps": { + "type": "interface", + "name": "ButtonProps", + "properties": { + "isDisabled": { + "type": "property", + "name": "isDisabled", + "value": {"type": "boolean"}, + "optional": true + } + }, + "typeParameters": [], + "extends": [] + } + }, + "links": {} +}"#; + +const BRANCH_API_JSON: &str = r#"{ + "exports": { + "@test/button:ButtonProps": { + "type": "interface", + "name": "ButtonProps", + "properties": { + "isDisabled": { + "type": "property", + "name": "isDisabled", + "value": {"type": "boolean"}, + "optional": true + }, + "onPress": { + "type": "property", + "name": "onPress", + "value": { + "type": "function", + "parameters": [], + "return": {"type": "void"} + }, + "optional": true + } + }, + "typeParameters": [], + "extends": [] + } + }, + "links": {} +}"#; + +/// Build a temporary fixture directory pair: +/// base//package.json + base//dist/api.json +/// branch//package.json + branch//dist/api.json +/// +/// Returns the TempDir (kept alive for the test's lifetime) plus the base and +/// branch paths. +fn make_fixture_dirs( + pkg_name: &str, + base_json: &str, + branch_json: &str, +) -> (TempDir, std::path::PathBuf, std::path::PathBuf) { + let dir = TempDir::new().unwrap(); + + let pkg_slug = pkg_name.replace('/', "-").replace('@', ""); + + for (subdir, api_json) in [("base", base_json), ("branch", branch_json)] { + let pkg_dir = dir.path().join(subdir).join(&pkg_slug); + fs::create_dir_all(pkg_dir.join("dist")).unwrap(); + fs::write(pkg_dir.join("package.json"), format!(r#"{{"name":"{pkg_name}"}}"#)).unwrap(); + fs::write(pkg_dir.join("dist").join("api.json"), api_json).unwrap(); + } + + let base = dir.path().join("base"); + let branch = dir.path().join("branch"); + (dir, base, branch) +} + +#[test] +fn discover_pairs_finds_both_fixture_packages() { + let (_dir, base, branch) = make_fixture_dirs("@test/button", BASE_API_JSON, BRANCH_API_JSON); + let pairs = discover_pairs(&base, &branch).unwrap(); + assert_eq!(pairs.len(), 1, "expected exactly one package pair"); + assert_eq!(pairs[0].package_name, "@test/button"); +} + +#[test] +fn compare_fixtures_detects_added_property() { + let (_dir, base, branch) = make_fixture_dirs("@test/button", BASE_API_JSON, BRANCH_API_JSON); + let pairs = discover_pairs(&base, &branch).unwrap(); + + let pair = &pairs[0]; + let pkg_diff = diff_package(&pair.package_name, &pair.base, &pair.branch, true); + + assert_eq!(pkg_diff.diffs.len(), 1, "expected exactly one changed export"); + let diff = &pkg_diff.diffs[0]; + assert_eq!(diff.qualified_name, "@test/button:ButtonProps"); + assert!(diff.diff_text.contains("onPress?:"), "expected onPress in diff output"); + assert!(diff.diff_text.contains('+'), "expected + marker for new property"); +} + +#[test] +fn compare_fixtures_shows_no_diff_for_unchanged_property() { + let (_dir, base, branch) = make_fixture_dirs("@test/button", BASE_API_JSON, BRANCH_API_JSON); + let pairs = discover_pairs(&base, &branch).unwrap(); + + let pair = &pairs[0]; + let pkg_diff = diff_package(&pair.package_name, &pair.base, &pair.branch, true); + + assert_eq!(pkg_diff.diffs.len(), 1); + let diff_text = &pkg_diff.diffs[0].diff_text; + assert!(!diff_text.contains("-isDisabled"), "unchanged property should not be marked deleted"); + assert!(!diff_text.contains("+isDisabled"), "unchanged property should not be marked added"); +} + +#[test] +fn compare_identical_json_produces_no_diffs() { + let (_dir, base, branch) = make_fixture_dirs("@test/button", BASE_API_JSON, BASE_API_JSON); + let pairs = discover_pairs(&base, &branch).unwrap(); + + let pair = &pairs[0]; + let pkg_diff = diff_package(&pair.package_name, &pair.base, &pair.branch, true); + assert!(pkg_diff.diffs.is_empty(), "identical API should yield no diffs"); +} + +#[test] +fn format_output_contains_package_name_in_header() { + let (_dir, base, branch) = make_fixture_dirs("@test/button", BASE_API_JSON, BRANCH_API_JSON); + let pairs = discover_pairs(&base, &branch).unwrap(); + + let pair = &pairs[0]; + let pkg_diff = diff_package(&pair.package_name, &pair.base, &pair.branch, true); + let output = format_output(&[pkg_diff], true); + + assert!(output.contains("### @test/button"), "output should contain the package header"); + assert!(output.contains("#### @test/button:ButtonProps"), "output should contain the export header"); +} diff --git a/rsp-api-checker/ts-extractor/extract-api.ts b/rsp-api-checker/ts-extractor/extract-api.ts new file mode 100644 index 00000000000..f4888b0667f --- /dev/null +++ b/rsp-api-checker/ts-extractor/extract-api.ts @@ -0,0 +1,2298 @@ +/** + * extract-api.ts + * + * Standalone TypeScript API extractor that reads .d.ts entry points + * and produces api.json files compatible with the compareAPIs tool. + * + * Usage: + * npx tsx extract-api.ts --packages-dir --output-dir + * + * For each package found under packages-dir, it reads the `types` field + * from package.json, walks all exported symbols using the TS compiler API, + * and writes a dist/api.json alongside each package. + */ + +import * as ts from "typescript"; +import * as path from "path"; +import * as fs from "fs"; +import { isOurPackage, shouldSkipProperty, resolveTypesField, resolveSourceField, OUR_SCOPES, OUR_PACKAGES } from "./utils.js"; + +// --------------------------------------------------------------------------- +// CLI +// --------------------------------------------------------------------------- + +const args = parseCliArgs(); + +interface CliArgs { + packagesDir: string; + outputDir: string | null; + verbose: boolean; + debug: string | null; // export name to debug in detail + // When true, fail if any package's src/ is newer than its dist/types/ — + // meaningful only for the local workspace (published tarballs are + // immutable by definition, so the check would just noise). + checkBuildFreshness: boolean; + // When true, succeed even when no packages are discovered. Default is to + // exit 1, because finding zero packages almost always means a bad + // --packages-dir or broken filter — and a silent zero-diff from compare + // then hides the real bug. + allowEmpty: boolean; + // Optional path to a JSON file listing workspace package directories: + // [{ "name": "...", "location": "absolute/path" }, ...]. When supplied, + // the extractor skips its fs-walk and uses this list as the authoritative + // set — produced by the Rust wrapper from `yarn workspaces list`, which + // honors the repo's actual workspace globs rather than our hard-coded + // depth-4 walk. + workspacesFile: string | null; +} + +function parseCliArgs(): CliArgs { + const argv = process.argv.slice(2); + let packagesDir = ""; + let outputDir: string | null = null; + let verbose = false; + let debug: string | null = null; + let checkBuildFreshness = false; + let allowEmpty = false; + let workspacesFile: string | null = null; + for (let i = 0; i < argv.length; i++) { + if (argv[i] === "--packages-dir" && argv[i + 1]) { + packagesDir = argv[++i]; + } else if (argv[i] === "--output-dir" && argv[i + 1]) { + outputDir = argv[++i]; + } else if (argv[i] === "--verbose" || argv[i] === "-v") { + verbose = true; + } else if (argv[i] === "--debug" && argv[i + 1]) { + debug = argv[++i]; + verbose = true; + } else if (argv[i] === "--check-build-freshness") { + checkBuildFreshness = true; + } else if (argv[i] === "--allow-empty") { + allowEmpty = true; + } else if (argv[i] === "--workspaces-file" && argv[i + 1]) { + workspacesFile = argv[++i]; + } + } + if (!packagesDir) { + console.error("Usage: npx tsx extract-api.ts --packages-dir [--output-dir ] [--verbose] [--debug ] [--check-build-freshness] [--allow-empty] [--workspaces-file ]"); + process.exit(1); + } + return { packagesDir, outputDir, verbose, debug, checkBuildFreshness, allowEmpty, workspacesFile }; +} + +// --------------------------------------------------------------------------- +// Diagnostic file logger — writes to {outputDir}/extract-diag.log +// --------------------------------------------------------------------------- + +let diagLogFd: number | null = null; + +function initDiagLog() { + const dir = args.outputDir ?? '.'; + fs.mkdirSync(dir, { recursive: true }); + const logPath = path.join(dir, 'extract-diag.log'); + diagLogFd = fs.openSync(logPath, 'w'); + diag('INIT', `Diagnostic log started at ${new Date().toISOString()}`); + console.log(`Diagnostics → ${logPath}`); +} + +function diag(tag: string, msg: string) { + if (diagLogFd !== null) { + fs.writeSync(diagLogFd, `[${tag}] ${msg}\n`); + } +} + +function closeDiagLog() { + if (diagLogFd !== null) { + fs.closeSync(diagLogFd); + diagLogFd = null; + } +} + +// --------------------------------------------------------------------------- +// Scope detection — which packages are "ours" vs external +// (isOurPackage, OUR_SCOPES, OUR_PACKAGES imported from utils.ts) +// --------------------------------------------------------------------------- + +function isExternalSymbol(symbol: ts.Symbol, checker: ts.TypeChecker): boolean { + const decls = symbol.getDeclarations(); + if (!decls || decls.length === 0) return true; + return isExternalDeclaration(decls[0]); +} + +/** Check if a property is declared directly on the given type symbol (not inherited). */ +function isOwnProperty(prop: ts.Symbol, ownerSymbol: ts.Symbol): boolean { + const propDecls = prop.getDeclarations(); + if (!propDecls || propDecls.length === 0) return false; + const ownerDecls = ownerSymbol.getDeclarations(); + if (!ownerDecls || ownerDecls.length === 0) return false; + + const ownerFiles = new Set(ownerDecls.map((d) => d.getSourceFile().fileName)); + return propDecls.some((d) => ownerFiles.has(d.getSourceFile().fileName)); +} + +/** Check if a declaration is in an external (non-our-package) file. + * + * Uses both the path as seen by the TS compiler AND the realpath of the + * source file. With yarn/pnpm workspaces, a published package can pull in + * types from a nested `node_modules/` whose symlink target lives under + * `packages/` (or vice versa): comparing only the literal path would + * mis-attribute those files. If either path indicates the file lives inside + * a node_modules// tree, treat it as external. + */ +function isExternalDeclaration(decl: ts.Declaration): boolean { + const fileName = decl.getSourceFile().fileName; + const external = externalFromPath(fileName); + if (external === true) return true; + + // Resolve symlinks and re-check. For a symlinked workspace package the + // literal path is under node_modules/ but realpath is under packages/ — + // which correctly reports "not external". The reverse case (a nested + // node_modules/.../real location outside packages) is what this catches. + try { + const real = fs.realpathSync(fileName); + if (real !== fileName) { + const realExternal = externalFromPath(real); + if (realExternal === true) return true; + if (external === null && realExternal === false) return false; + } + } catch { + // realpath may fail on virtual files — fall through + } + // `external` is narrowed to false | null here (true was handled above). + // Neither indicates external, so the declaration is ours. + return false; +} + +/** + * Returns `true` if the path is inside a non-our-package node_modules tree, + * `false` if it's definitely ours (path-based determination), or `null` when + * the path gives no information (no node_modules segment). + */ +function externalFromPath(fileName: string): boolean | null { + const nm = "/node_modules/"; + const idx = fileName.lastIndexOf(nm); + if (idx === -1) return null; + const afterNm = fileName.slice(idx + nm.length); + const pkgName = afterNm.startsWith("@") + ? afterNm.split("/").slice(0, 2).join("/") + : afterNm.split("/")[0]; + return !isOurPackage(pkgName); +} + +// (shouldSkipProperty and resolveTypesField imported from utils.ts) + +// --------------------------------------------------------------------------- +// Package discovery +// --------------------------------------------------------------------------- + +interface PackageEntry { + name: string; + dir: string; + typesEntryPoint: string; +} + +interface OutOfDatePackage { + name: string; + dir: string; + sourceEntry: string; + typesEntry: string; +} + +function discoverPackages(rootDir: string): PackageEntry[] { + const result: PackageEntry[] = []; + const outOfDate: OutOfDatePackage[] = []; + const globDirs = findPackageJsonDirs(rootDir); + for (const dir of globDirs) { + const pkgPath = path.join(dir, "package.json"); + if (!fs.existsSync(pkgPath)) continue; + const pkg = JSON.parse(fs.readFileSync(pkgPath, "utf8")); + if (!pkg.name) continue; + + // Drop private packages up front. The yarn-workspaces path has already + // excluded them (we pass --no-private), but the fs-walk fallback and any + // manually-assembled node_modules can still surface them. Filtering once + // here means the rest of the pipeline doesn't need to re-check. + if (pkg.private) { + if (args.verbose) console.log(` skip private: ${pkg.name}`); + continue; + } + + // Find the types entry point (.d.ts). This is the default — the extractor + // was designed against declaration files, the full dependency graph + // resolves well in a monorepo, and both the published and local sides + // produce consistent output. + let typesEntry: string | undefined; + if (pkg.exports?.["."]?.types) { + typesEntry = resolveTypesField(pkg.exports["."].types); + } + if (!typesEntry && pkg.exports?.["."]) { + // Some packages put types at the top level of the export condition + typesEntry = resolveTypesField(pkg.exports["."]); + } + if (!typesEntry && pkg.types) { + typesEntry = resolveTypesField(pkg.types); + } + if (!typesEntry && pkg.typings) { + typesEntry = resolveTypesField(pkg.typings); + } + + // Detect an out-of-date build: if a `source` entry (.ts/.tsx) exists on + // disk AND any source file in the package is newer than the types entry, + // the developer added/changed API in src/ without re-running + // `yarn build`. We *could* silently fall back to reading source, but + // that introduces a second extraction path whose output can diverge + // from the .d.ts-based path in subtle ways (e.g. unresolved relative + // imports into un-built sibling packages → TS falls back to `any`). + // Keeping the extractor on a single, consistent path means we collect + // these packages and fail loudly — the user should rebuild. + let sourceEntry: string | undefined; + if (pkg.exports?.["."]) { + sourceEntry = resolveSourceField(pkg.exports["."]); + } + if (!sourceEntry && pkg.source) { + sourceEntry = resolveSourceField(pkg.source); + } + + if (args.checkBuildFreshness && sourceEntry && typesEntry && isOurPackage(pkg.name)) { + const resolvedSource = path.resolve(dir, sourceEntry); + const resolvedTypes = path.resolve(dir, typesEntry); + if (fs.existsSync(resolvedSource) && fs.existsSync(resolvedTypes)) { + const newestSource = newestMtimeInSources(dir); + const typesM = fs.statSync(resolvedTypes).mtimeMs; + if (newestSource !== null && newestSource > typesM) { + outOfDate.push({ + name: pkg.name, + dir, + sourceEntry, + typesEntry, + }); + } + } + } + + const entry = typesEntry; + if (!entry) continue; + + const resolved = path.resolve(dir, entry); + if (!fs.existsSync(resolved)) { + if (args.verbose) console.warn(` skip ${pkg.name}: entry point not found at ${resolved}`); + continue; + } + + result.push({ + name: pkg.name, + dir, + typesEntryPoint: resolved, + }); + } + + if (outOfDate.length > 0) { + const lines = outOfDate.map( + (p) => + ` • ${p.name}: src/ is newer than ${path.relative(p.dir, path.resolve(p.dir, p.typesEntry))}`, + ); + console.error( + `\nError: ${outOfDate.length} package${outOfDate.length === 1 ? "" : "s"} ` + + `${outOfDate.length === 1 ? "has" : "have"} source files newer than the generated ` + + `declaration files:\n${lines.join("\n")}\n\n` + + `Run \`yarn build\` to regenerate dist/types/ before extracting API. The extractor ` + + `reads .d.ts only — stale declarations would silently drop newly-added props from ` + + `the diff.\n\n` + + `Note: this check only compares each package's own src/ against its dist/types/. ` + + `If package A re-exports from package B and you edited B's src/ without rebuilding ` + + `B, A's check passes (A's src/ wasn't touched) and B's re-exported symbols in A's ` + + `.d.ts may still be stale. When in doubt, run \`yarn build\` across the whole ` + + `workspace before comparing.`, + ); + process.exit(1); + } + + return result; +} + +/** + * Return the newest mtime (ms) found among .ts/.tsx source files under the + * package directory, skipping `dist/`, `node_modules/`, and `.git/`. + * Returns `null` if no source files are found. + * + * We use this to detect out-of-date `.d.ts` files: if any source file in + * the package is newer than the built types, a `yarn build` is overdue. + * We surface this as a hard error rather than silently swapping to source, + * since that would create a second extraction path whose output can + * diverge from the .d.ts-based path in subtle ways. + * + * Known limitation: this walks only the package's own src/. If package A + * re-exports from B and you touch B's src/ without rebuilding B, A's check + * still passes — but A's dist/types/ re-exports the stale B symbols, so + * the diff is wrong. Following workspace-dep edges was judged more + * complexity than it buys; the error message in discoverPackages warns + * about this case so the user knows to rebuild the whole workspace when + * they're not sure. + */ +function newestMtimeInSources(pkgDir: string): number | null { + let newest: number | null = null; + function walk(dir: string, depth: number): void { + if (depth > 6) return; + let entries: fs.Dirent[]; + try { + entries = fs.readdirSync(dir, { withFileTypes: true }); + } catch { + return; + } + for (const entry of entries) { + if ( + entry.name === "node_modules" || + entry.name === ".git" || + entry.name === "dist" || + entry.name === "stories" || + entry.name === "test" || + entry.name === "tests" || + entry.name === "__tests__" + ) { + continue; + } + const full = path.join(dir, entry.name); + if (entry.isDirectory()) { + walk(full, depth + 1); + } else if (entry.isFile()) { + // Consider only files that contribute to the compiled type surface. + if ( + entry.name.endsWith(".ts") || + entry.name.endsWith(".tsx") || + entry.name.endsWith(".d.ts") + ) { + // Skip the `.d.ts` files that live under dist/ (already excluded + // via the directory filter above); but in some packages like + // @react-types/shared the "source" IS a set of .d.ts files that + // live in src/, and those are what we want to pick up. + try { + const m = fs.statSync(full).mtimeMs; + if (newest === null || m > newest) newest = m; + } catch { + /* ignore */ + } + } + } + } + } + walk(pkgDir, 0); + return newest; +} + +/** + * Recursively count occurrences of `{ type: "any" }` nodes anywhere in the + * api.json tree, plus total type nodes for a ratio. `topLevelAnyExports` is + * the narrower signal matching the historical warning. When CI and local + * runs disagree, comparing these counts quickly reveals whether one + * environment's TS compiler is falling back to `any` far more than the + * other — which is the signature of broken cross-package resolution. + */ +interface HealthReport { + topLevelExports: number; + topLevelAnyExports: number; + totalTypeNodes: number; + anyTypeNodes: number; + anyRatio: number; // anyTypeNodes / totalTypeNodes, 0..1 +} + +function computeHealth(apiJson: { exports: Record; links: Record }): HealthReport { + let total = 0; + let anyCount = 0; + function walk(node: unknown): void { + if (node === null || node === undefined) return; + if (Array.isArray(node)) { + for (const item of node) walk(item); + return; + } + if (typeof node !== "object") return; + const obj = node as Record; + if (typeof obj.type === "string") { + total += 1; + if (obj.type === "any") anyCount += 1; + } + for (const v of Object.values(obj)) walk(v); + } + for (const v of Object.values(apiJson.exports)) walk(v); + for (const v of Object.values(apiJson.links)) walk(v); + + const topLevelExports = Object.keys(apiJson.exports).length; + const topLevelAnyExports = Object.values(apiJson.exports).filter( + (v: any) => v?.type === "any", + ).length; + + return { + topLevelExports, + topLevelAnyExports, + totalTypeNodes: total, + anyTypeNodes: anyCount, + anyRatio: total === 0 ? 0 : anyCount / total, + }; +} + +/** + * Read the workspace list written by the Rust wrapper (from + * `yarn workspaces list --json`). Format: one JSON object per line, + * `{ name, location }`, with absolute or root-relative paths. + * + * Returns `null` when no file was provided so the caller falls through to + * the fs-walk. + */ +function readWorkspacesFile(filePath: string | null): string[] | null { + if (!filePath) return null; + if (!fs.existsSync(filePath)) { + console.warn(` workspaces-file not found: ${filePath} — falling back to fs walk`); + return null; + } + try { + const raw = fs.readFileSync(filePath, "utf8"); + const parsed = JSON.parse(raw); + if (!Array.isArray(parsed)) return null; + const dirs: string[] = []; + for (const entry of parsed) { + const loc = entry?.location; + if (typeof loc !== "string") continue; + dirs.push(path.resolve(loc)); + } + return dirs; + } catch (err: any) { + console.warn(` failed to read workspaces-file: ${err?.message ?? err} — falling back to fs walk`); + return null; + } +} + +function findPackageJsonDirs(rootDir: string): string[] { + // Prefer the yarn-supplied list when available — it honors the monorepo's + // actual workspace globs and `private: true` filtering without needing the + // extractor to mirror them in code. + const fromWorkspaces = readWorkspacesFile(args.workspacesFile); + if (fromWorkspaces && fromWorkspaces.length > 0) { + return fromWorkspaces.filter((d) => fs.existsSync(path.join(d, "package.json"))); + } + + // Fallback fs-walk. Used only when yarn workspaces isn't available — the + // primary case is the `get-published-api` flow, whose rootDir is a + // tmp node_modules/ (flat: either `name/` or `@scope/name/`). Both layouts + // need depth 2 max to reach the package.json, so we stop there and avoid + // walking into src/, dist/, etc. + const dirs: string[] = []; + function walk(dir: string, depth: number) { + if (depth > 2) return; + const entries = fs.readdirSync(dir, { withFileTypes: true }); + for (const entry of entries) { + if (entry.name === "node_modules" || entry.name === ".git" || entry.name === "dev") continue; + // Skip symlinked directories: a symlink reports isDirectory() === true, + // which would cause us to follow links into unrelated trees (e.g. a + // workspace package symlinked out of the monorepo). + if (!entry.isDirectory() || entry.isSymbolicLink()) continue; + const full = path.join(dir, entry.name); + if (fs.existsSync(path.join(full, "package.json"))) { + dirs.push(full); + // Packages don't nest — once we've found a package.json, stop + // descending so we don't re-enter src/, dist/, etc. + continue; + } + walk(full, depth + 1); + } + } + walk(rootDir, 0); + return dirs; +} + +// --------------------------------------------------------------------------- +// Type serialization context +// --------------------------------------------------------------------------- + +interface SerializeContext { + checker: ts.TypeChecker; + links: Record; + /** Track visited types to avoid infinite recursion */ + visiting: Set; + /** The depth of serialization recursion */ + depth: number; +} + +const MAX_DEPTH = 8; +const MAX_UNION_LITERALS = 15; // Truncate string literal unions beyond this count + +function makeCtx(checker: ts.TypeChecker): SerializeContext { + return { checker, links: {}, visiting: new Set(), depth: 0 }; +} + +/** Safe wrapper for checker.typeToString that catches stack overflows */ +function safeTypeToString(checker: ts.TypeChecker, type: ts.Type): string { + // Try the symbol name first (cheap, no recursion risk) + const sym = type.getSymbol() ?? type.aliasSymbol; + if (sym) { + const name = sym.getName(); + if (name && name !== "__type" && name !== "__object") return name; + } + // Fall back to typeToString (can overflow on circular types) + try { + return checker.typeToString(type, undefined, ts.TypeFormatFlags.NoTruncation); + } catch { + return "unknown"; + } +} + +/** Create a child context with incremented depth */ +function childCtx(ctx: SerializeContext): SerializeContext { + return { ...ctx, depth: ctx.depth + 1 }; +} + +/** + * Serialize a type argument for an external generic type. + * - Primitives / literals → serialize normally + * - Types from our packages → serialize normally (we want to show our own generics) + * - External types / complex unions → just use the identifier name (don't expand) + */ +function serializeTypeArgShallow(type: ts.Type, ctx: SerializeContext): any { + const { checker } = ctx; + + // Primitives always serialize cheaply + if (type.flags & (ts.TypeFlags.String | ts.TypeFlags.Number | ts.TypeFlags.Boolean + | ts.TypeFlags.Void | ts.TypeFlags.Undefined | ts.TypeFlags.Null + | ts.TypeFlags.Any | ts.TypeFlags.Unknown | ts.TypeFlags.Never + | ts.TypeFlags.StringLiteral | ts.TypeFlags.NumberLiteral + | ts.TypeFlags.BooleanLiteral | ts.TypeFlags.BigInt)) { + return serializeType(type, ctx); + } + + // If it has a symbol, check if it's ours or external + const sym = type.getSymbol() ?? type.aliasSymbol; + if (sym) { + const decls = sym.getDeclarations(); + if (decls && decls.length > 0) { + const isExternal = isExternalDeclaration(decls[0]); + if (!isExternal) { + // Our type — serialize normally + return serializeType(type, ctx); + } + } + // External type — just use the name + const name = sym.getName(); + if (name && name !== "__type" && name !== "__object") { + return { type: "identifier", name }; + } + } + + // For unions/intersections with an alias, use the alias name + if (type.aliasSymbol) { + return { type: "identifier", name: type.aliasSymbol.getName() }; + } + + // Fallback: use safeTypeToString + return { type: "identifier", name: safeTypeToString(checker, type) }; +} + +/** + * Serialize a TypeNode from the declaration AST, substituting type parameters. + * This preserves references like ReactNode as identifiers instead of expanding them. + */ +function serializeTypeNode( + node: ts.TypeNode, + typeParamMap: Map, + ctx: SerializeContext, +): any { + const { checker } = ctx; + const deep = childCtx(ctx); + + if (ctx.depth > MAX_DEPTH) { + const t = checker.getTypeFromTypeNode(node); + return { type: "identifier", name: safeTypeToString(checker, t) }; + } + + // Union + if (ts.isUnionTypeNode(node)) { + // Apply the same string-literal truncation as serializeType + const stringLitNodes = node.types.filter( + (t) => ts.isLiteralTypeNode(t) && ts.isStringLiteral(t.literal) + ); + const otherNodes = node.types.filter( + (t) => !(ts.isLiteralTypeNode(t) && ts.isStringLiteral(t.literal)) + ); + + if (stringLitNodes.length > MAX_UNION_LITERALS) { + const kept = stringLitNodes.slice(0, MAX_UNION_LITERALS); + const elements = [ + ...otherNodes.map((t) => serializeTypeNode(t, typeParamMap, deep)), + ...kept.map((t) => serializeTypeNode(t, typeParamMap, deep)), + { type: "string", value: `... ${stringLitNodes.length - MAX_UNION_LITERALS} more` }, + ]; + return { type: "union", elements }; + } + + const elements = node.types.map((t) => serializeTypeNode(t, typeParamMap, deep)); + return { type: "union", elements }; + } + + // Intersection + if (ts.isIntersectionTypeNode(node)) { + const types = node.types.map((t) => serializeTypeNode(t, typeParamMap, deep)); + return { type: "intersection", types }; + } + + // Parenthesized + if (ts.isParenthesizedTypeNode(node)) { + return serializeTypeNode(node.type, typeParamMap, deep); + } + + // Type reference (e.g., ReactNode, T, ButtonRenderProps) + if (ts.isTypeReferenceNode(node)) { + const refName = node.typeName.getText(); + + // Substitute type parameters + if (typeParamMap.has(refName)) { + const subst = typeParamMap.get(refName)!; + // Expand our own types (e.g., ToggleButtonRenderProps) so their props are visible. + // External types get identifier-only treatment. + const substSym = subst.getSymbol() ?? subst.aliasSymbol; + if (substSym) { + const substDecls = substSym.getDeclarations(); + if (substDecls?.[0] && isExternalDeclaration(substDecls[0])) { + return { type: "identifier", name: substSym.getName() }; + } + } + // Our type or primitive → serialize normally + return serializeType(subst, deep); + } + + // Check if external → identifier only + const sym = checker.getSymbolAtLocation(node.typeName); + if (sym) { + const decls = sym.getDeclarations(); + if (decls?.[0] && isExternalDeclaration(decls[0])) { + return { type: "identifier", name: refName }; + } + } + + // Internal named type → keep as identifier (expansion happens at top-level props). + // Only anonymous/inline types get expanded here. + if (sym) { + const symName = sym.getName(); + if (symName && symName !== "__type" && symName !== "__object") { + // Serialize type arguments if present + if (node.typeArguments && node.typeArguments.length > 0) { + const typeArgs = node.typeArguments.map((ta) => + serializeTypeNode(ta, typeParamMap, deep) + ); + return { + type: "application", + base: { type: "identifier", name: symName }, + typeParameters: typeArgs, + }; + } + return { type: "identifier", name: symName }; + } + } + + // Unnamed/anonymous → resolve and serialize + const resolved = checker.getTypeFromTypeNode(node); + return serializeType(resolved, deep); + } + + // Function type: (params) => ReturnType + if (ts.isFunctionTypeNode(node)) { + const parameters: Record = {}; + for (const param of node.parameters) { + const paramName = param.name.getText(); + const paramType = param.type + ? serializeTypeNode(param.type, typeParamMap, deep) + : { type: "any" }; + parameters[paramName] = { type: "parameter", name: paramName, value: paramType }; + } + const retType = node.type + ? serializeTypeNode(node.type, typeParamMap, deep) + : { type: "void" }; + return { type: "function", parameters, return: retType }; + } + + // Object literal type: { prop: type, ... } + if (ts.isTypeLiteralNode(node)) { + const properties: Record = {}; + for (const member of node.members) { + if (ts.isPropertySignature(member) && member.name) { + const propName = member.name.getText(); + const propType = member.type + ? serializeTypeNode(member.type, typeParamMap, deep) + : { type: "any" }; + properties[propName] = { + type: "property", + name: propName, + value: propType, + optional: !!member.questionToken, + default: null, + access: "public", + }; + } + } + return { type: "object", properties: Object.keys(properties).length > 0 ? properties : null }; + } + + // Array type: T[] + if (ts.isArrayTypeNode(node)) { + return { + type: "array", + elementType: serializeTypeNode(node.elementType, typeParamMap, deep), + }; + } + + // Keyword types (string, number, boolean, void, undefined, etc.) + if (ts.isToken(node)) { + switch (node.kind) { + case ts.SyntaxKind.StringKeyword: return { type: "string" }; + case ts.SyntaxKind.NumberKeyword: return { type: "number" }; + case ts.SyntaxKind.BooleanKeyword: return { type: "boolean" }; + case ts.SyntaxKind.VoidKeyword: return { type: "void" }; + case ts.SyntaxKind.UndefinedKeyword: return { type: "undefined" }; + case ts.SyntaxKind.NullKeyword: return { type: "null" }; + case ts.SyntaxKind.NeverKeyword: return { type: "never" }; + case ts.SyntaxKind.AnyKeyword: return { type: "any" }; + case ts.SyntaxKind.UnknownKeyword: return { type: "unknown" }; + } + } + + // Fallback: resolve via type checker and use serializeType + const resolved = checker.getTypeFromTypeNode(node); + return serializeType(resolved, deep); +} + +// --------------------------------------------------------------------------- +// Main type serializer +// --------------------------------------------------------------------------- + +function serializeSymbol(symbol: ts.Symbol, ctx: SerializeContext): any { + const { checker } = ctx; + const name = symbol.getName(); + const isDebug = args.debug && (args.debug === name || args.debug === '*'); + const dbg = (...msg: any[]) => { if (isDebug) console.log(` [DEBUG ${name}]`, ...msg); }; + + dbg('--- START ---'); + dbg('symbol.flags:', symbolFlagsToString(symbol.flags)); + + // Follow aliased symbols (re-exports like `export { Accordion } from './Accordion'`) + let resolvedSymbol = symbol; + if (symbol.flags & ts.SymbolFlags.Alias) { + try { + resolvedSymbol = checker.getAliasedSymbol(symbol); + dbg('followed alias →', resolvedSymbol.getName(), 'flags:', symbolFlagsToString(resolvedSymbol.flags)); + } catch (e: any) { + dbg('getAliasedSymbol threw:', e.message); + } + } + + const decls = resolvedSymbol.getDeclarations(); + const decl = decls?.[0]; + const isPascalCase = name.length > 0 && name[0] === name[0].toUpperCase() && name[0] !== name[0].toLowerCase(); + + dbg('declarations:', decls?.length ?? 0); + if (decl) { + dbg('decl kind:', ts.SyntaxKind[decl.kind]); + dbg('decl file:', decl.getSourceFile().fileName); + } + + // --- Interface declaration --- + if (decl && ts.isInterfaceDeclaration(decl)) { + dbg('→ path: InterfaceDeclaration'); + return serializeInterface(resolvedSymbol, ctx); + } + + // --- Type alias declaration --- + if (decl && ts.isTypeAliasDeclaration(decl)) { + dbg('→ path: TypeAliasDeclaration'); + const result = serializeTypeAlias(resolvedSymbol, decl, ctx); + dbg('→ result type:', result?.type, 'props count:', result?.properties ? Object.keys(result.properties).length : 'N/A'); + if (result?.properties) { + const propKeys = Object.keys(result.properties); + const numericKeys = propKeys.filter(k => /^\d+$/.test(k)); + if (numericKeys.length > 0) { + dbg('⚠ NUMERIC KEYS FOUND:', numericKeys.slice(0, 5)); + } + } + return result; + } + + // --- Class declaration --- + if (decl && ts.isClassDeclaration(decl)) { + dbg('→ path: ClassDeclaration'); + return serializeClass(resolvedSymbol, ctx); + } + + // --- Enum declaration --- + if (decl && ts.isEnumDeclaration(decl)) { + dbg('→ path: EnumDeclaration'); + return serializeEnum(resolvedSymbol, ctx); + } + + // --- Get the type for further analysis --- + const type = checker.getTypeOfSymbol(resolvedSymbol); + const typeStr = safeTypeToString(checker, type); + dbg('type flags:', typeFlagsToString(type.flags)); + dbg('typeToString:', typeStr.slice(0, 200)); + dbg('call signatures:', type.getCallSignatures().length); + dbg('properties count:', type.getProperties().length); + + // --- ForwardRefExoticComponent / MemoExoticComponent --- + if ( + isPascalCase && + (typeStr.includes("ForwardRefExoticComponent") || + typeStr.includes("MemoExoticComponent") || + typeStr.includes("ExoticComponent") || + typeStr.includes("NamedExoticComponent")) + ) { + dbg('→ path: ForwardRefExoticComponent'); + return serializeForwardRefComponent(name, type, decl, ctx); + } + + // --- Callable types (functions, components) --- + const callSigs = type.getCallSignatures(); + if (callSigs.length > 0) { + const sig = callSigs[0]; + if (isPascalCase && looksLikeComponent(sig, checker)) { + dbg('→ path: Component (via call sig)'); + return serializeComponent(symbol, sig, ctx); + } + dbg('→ path: Function'); + return serializeFunction(symbol, sig, ctx); + } + + // --- PascalCase with `any` type — try declaration-based extraction --- + if (isPascalCase && (type.flags & ts.TypeFlags.Any) && decl) { + dbg('→ path: fromDeclaration (type is any)'); + return serializeFromDeclaration(name, decl, ctx); + } + + // --- Fallback --- + dbg('→ path: FALLBACK serializeType'); + const result = serializeType(type, ctx); + dbg('→ fallback result:', JSON.stringify(result).slice(0, 200)); + return result; +} + +function symbolFlagsToString(flags: ts.SymbolFlags): string { + const names: string[] = []; + if (flags & ts.SymbolFlags.Alias) names.push('Alias'); + if (flags & ts.SymbolFlags.Interface) names.push('Interface'); + if (flags & ts.SymbolFlags.TypeAlias) names.push('TypeAlias'); + if (flags & ts.SymbolFlags.Class) names.push('Class'); + if (flags & ts.SymbolFlags.Function) names.push('Function'); + if (flags & ts.SymbolFlags.Variable) names.push('Variable'); + if (flags & ts.SymbolFlags.BlockScopedVariable) names.push('BlockScopedVariable'); + if (flags & ts.SymbolFlags.RegularEnum) names.push('RegularEnum'); + if (flags & ts.SymbolFlags.ValueModule) names.push('ValueModule'); + if (flags & ts.SymbolFlags.NamespaceModule) names.push('NamespaceModule'); + if (flags & ts.SymbolFlags.ExportValue) names.push('ExportValue'); + return names.length ? names.join('|') : String(flags); +} + +function typeFlagsToString(flags: ts.TypeFlags): string { + const names: string[] = []; + if (flags & ts.TypeFlags.Any) names.push('Any'); + if (flags & ts.TypeFlags.String) names.push('String'); + if (flags & ts.TypeFlags.Number) names.push('Number'); + if (flags & ts.TypeFlags.Boolean) names.push('Boolean'); + if (flags & ts.TypeFlags.Object) names.push('Object'); + if (flags & ts.TypeFlags.Union) names.push('Union'); + if (flags & ts.TypeFlags.Intersection) names.push('Intersection'); + if (flags & ts.TypeFlags.TypeParameter) names.push('TypeParameter'); + if (flags & ts.TypeFlags.Void) names.push('Void'); + if (flags & ts.TypeFlags.Undefined) names.push('Undefined'); + if (flags & ts.TypeFlags.Null) names.push('Null'); + if (flags & ts.TypeFlags.Never) names.push('Never'); + if (flags & ts.TypeFlags.StringLiteral) names.push('StringLiteral'); + if (flags & ts.TypeFlags.NumberLiteral) names.push('NumberLiteral'); + return names.length ? names.join('|') : String(flags); +} + +/** + * Serialize a ForwardRefExoticComponent>. + * Tries multiple strategies to extract the Props type. + */ +function serializeForwardRefComponent( + name: string, + type: ts.Type, + decl: ts.Declaration | undefined, + ctx: SerializeContext +): any { + const { checker } = ctx; + + // Strategy 1: call signatures (works when React types fully resolve) + const callSigs = type.getCallSignatures(); + if (callSigs.length > 0) { + const sig = callSigs[0]; + const params = sig.getParameters(); + if (params.length > 0) { + const propsType = checker.getTypeOfSymbol(params[0]); + if (!(propsType.flags & ts.TypeFlags.Any)) { + const cleaned = stripRefAttributes(propsType, checker); + return { + type: "component", + name, + props: serializeTypeExpanded(cleaned, ctx), + typeParameters: serializeTypeParams(sig.typeParameters, ctx), + }; + } + } + } + + // Strategy 2: type arguments on the TypeReference + const typeRef = type as ts.TypeReference; + const typeArgs = typeRef.typeArguments ?? (checker as any).getTypeArguments?.(typeRef) ?? []; + if (typeArgs.length > 0) { + const propsArg = typeArgs[0]; + if (!(propsArg.flags & ts.TypeFlags.Any)) { + const cleaned = stripRefAttributes(propsArg, checker); + return { + type: "component", + name, + props: serializeTypeExpanded(cleaned, ctx), + typeParameters: [], + }; + } + } + + // Strategy 3: parse type arguments from the declaration AST + if (decl) { + return serializeFromDeclaration(name, decl, ctx); + } + + return { type: "component", name, props: { type: "object", properties: null }, typeParameters: [] }; +} + +/** Strip React.RefAttributes from an intersection, leaving just the props. */ +function stripRefAttributes(type: ts.Type, checker: ts.TypeChecker): ts.Type { + if (type.isIntersection()) { + const filtered = type.types.filter((t) => { + const sym = t.getSymbol(); + return !sym || (sym.getName() !== "RefAttributes" && sym.getName() !== "ClassAttributes"); + }); + if (filtered.length === 1) return filtered[0]; + if (filtered.length > 0 && filtered.length < type.types.length) return filtered[0]; + } + return type; +} + +/** + * Fallback: extract component props from the variable declaration's AST type annotation. + * Handles cases where the TS checker can't fully resolve the type. + */ +function serializeFromDeclaration( + name: string, + decl: ts.Declaration, + ctx: SerializeContext +): any { + const { checker } = ctx; + + if (ts.isVariableDeclaration(decl) && decl.type && ts.isTypeReferenceNode(decl.type)) { + const typeArgs = decl.type.typeArguments; + if (typeArgs && typeArgs.length > 0) { + const propsTypeNode = typeArgs[0]; + + // Handle intersection: AccordionProps & RefAttributes<...> + if (ts.isIntersectionTypeNode(propsTypeNode)) { + for (const member of propsTypeNode.types) { + if (ts.isTypeReferenceNode(member)) { + const refName = ts.isIdentifier(member.typeName) ? member.typeName.text : undefined; + if (refName && refName !== "RefAttributes" && refName !== "ClassAttributes") { + const refType = checker.getTypeAtLocation(member); + if (!(refType.flags & ts.TypeFlags.Any)) { + return { + type: "component", + name, + props: serializeTypeExpanded(refType, ctx), + typeParameters: [], + }; + } + // Type is `any` — use the name as a reference + return { + type: "component", + name, + props: { type: "identifier", name: refName }, + typeParameters: [], + }; + } + } + } + } + + // Single type reference (no intersection) + if (ts.isTypeReferenceNode(propsTypeNode)) { + const refType = checker.getTypeAtLocation(propsTypeNode); + if (!(refType.flags & ts.TypeFlags.Any)) { + return { + type: "component", + name, + props: serializeTypeExpanded(refType, ctx), + typeParameters: [], + }; + } + const refName = ts.isIdentifier(propsTypeNode.typeName) ? propsTypeNode.typeName.text : undefined; + if (refName) { + return { + type: "component", + name, + props: { type: "identifier", name: refName }, + typeParameters: [], + }; + } + } + } + } + + // For re-export specifiers — shouldn't reach here since we follow aliases above + if (ts.isExportSpecifier(decl)) { + const aliasedSymbol = checker.getAliasedSymbol( + checker.getSymbolAtLocation(decl.name)! + ); + if (aliasedSymbol) { + return serializeSymbol(aliasedSymbol, ctx); + } + } + + return { type: "any" }; +} + +/** + * Heuristic: does this call signature look like a React component? + * i.e., (props: SomeType) => ReactNode | JSX.Element | null + */ +function looksLikeComponent(sig: ts.Signature, checker: ts.TypeChecker): boolean { + const params = sig.getParameters(); + // Components have 0 or 1 parameter (the props) + // Some also have a second `ref` parameter (forwardRef render functions) + if (params.length > 2) return false; + + const returnType = checker.getReturnTypeOfSignature(sig); + const returnStr = safeTypeToString(checker, returnType); + + return ( + returnStr.includes("Element") || + returnStr.includes("ReactNode") || + returnStr.includes("ReactElement") || + returnStr === "null" || + // If return type is `any`, still treat as component for PascalCase functions + // (this happens when React types can't be fully resolved) + returnStr === "any" + ); +} + +function serializeComponent( + symbol: ts.Symbol, + sig: ts.Signature, + ctx: SerializeContext +): any { + const { checker } = ctx; + const params = sig.getParameters(); + let propsNode: any = { type: "object", properties: null }; + + if (params.length > 0) { + const propsType = checker.getTypeOfSymbol(params[0]); + propsNode = serializeTypeExpanded(propsType, ctx); + } + + return { + type: "component", + name: symbol.getName(), + props: propsNode, + typeParameters: serializeTypeParams(sig.typeParameters, ctx), + }; +} + +/** + * Handle React.ForwardRefExoticComponent and similar wrapper types. + * Extract the Props type argument and serialize as a component. + */ +function serializeReactWrapperComponent( + symbol: ts.Symbol, + type: ts.Type, + ctx: SerializeContext +): any { + const { checker } = ctx; + + // Try to get the props from the type argument of the wrapper + // e.g., ForwardRefExoticComponent> + let propsNode: any = { type: "object", properties: null }; + + const typeRef = type as ts.TypeReference; + const typeArgs = typeRef.typeArguments ?? (typeRef as any).resolvedTypeArguments; + + if (typeArgs && typeArgs.length > 0) { + // First type arg is the props type (possibly intersected with RefAttributes) + let propsType = typeArgs[0]; + + // If it's an intersection, strip out RefAttributes + if (propsType.isIntersection()) { + const filtered = propsType.types.filter((t) => { + const sym = t.getSymbol(); + return !sym || !sym.getName().includes("RefAttributes"); + }); + if (filtered.length === 1) { + propsType = filtered[0]; + } + } + + propsNode = serializeTypeExpanded(propsType, ctx); + } else { + // Fallback: try call signatures + const callSigs = type.getCallSignatures(); + if (callSigs.length > 0 && callSigs[0].getParameters().length > 0) { + const propsType = checker.getTypeOfSymbol(callSigs[0].getParameters()[0]); + propsNode = serializeTypeExpanded(propsType, ctx); + } + } + + // Extract type parameters from the symbol's declaration + const decls = symbol.getDeclarations(); + let typeParameters: any[] = []; + if (decls && decls.length > 0) { + const decl = decls[0]; + if (ts.isVariableDeclaration(decl) && decl.type && ts.isTypeReferenceNode(decl.type)) { + // Type params come from the variable's type annotation + typeParameters = (decl.type.typeArguments ?? []) + .filter(ts.isTypeParameterDeclaration as any) + .map((tp: any) => ({ + type: "typeParameter", + name: tp.name?.text ?? safeTypeToString(checker, checker.getTypeAtLocation(tp)), + })); + } + } + + return { + type: "component", + name: symbol.getName(), + props: propsNode, + typeParameters, + }; +} + +function serializeFunction( + symbol: ts.Symbol, + sig: ts.Signature, + ctx: SerializeContext +): any { + const { checker } = ctx; + const parameters: Record = {}; + for (const param of sig.getParameters()) { + const paramType = checker.getTypeOfSymbol(param); + const paramDecl = param.getDeclarations()?.[0]; + const isOptional = paramDecl ? checker.isOptionalParameter(paramDecl as ts.ParameterDeclaration) : false; + parameters[param.getName()] = { + type: "parameter", + name: param.getName(), + value: serializeType(paramType, ctx), + optional: isOptional, + }; + } + + const returnType = checker.getReturnTypeOfSignature(sig); + + return { + type: "function", + name: symbol.getName(), + parameters, + return: serializeType(returnType, ctx), + typeParameters: serializeTypeParams(sig.typeParameters, ctx), + }; +} + +/** + * Serialize a property's VALUE by checking the declaration AST first. + * This preserves external type references (ReactNode, ElementType, CSSProperties, etc.) + * as identifiers instead of expanding them through the resolved type. + */ +function serializePropertyValue( + prop: ts.Symbol, + propDecl: ts.Declaration, + ctx: SerializeContext, +): any { + const { checker } = ctx; + + // Try to use the declaration's type node (preserves type references) + if ( + (ts.isPropertySignature(propDecl) || ts.isPropertyDeclaration(propDecl)) + && propDecl.type + ) { + return serializeTypeNode(propDecl.type, new Map(), ctx); + } + + // For method signatures, serialize from the declaration if possible + if (ts.isMethodSignature(propDecl) || ts.isMethodDeclaration(propDecl)) { + const params: Record = {}; + for (const param of propDecl.parameters) { + const paramName = param.name.getText(); + const paramType = param.type + ? serializeTypeNode(param.type, new Map(), ctx) + : { type: "any" }; + params[paramName] = { type: "parameter", name: paramName, value: paramType }; + } + const retType = propDecl.type + ? serializeTypeNode(propDecl.type, new Map(), ctx) + : { type: "void" }; + return { type: "function", parameters: params, return: retType }; + } + + // Fallback: use resolved type + const propType = checker.getTypeOfSymbol(prop); + return serializeType(propType, ctx); +} + +function serializeInterface(symbol: ts.Symbol, ctx: SerializeContext): any { + const { checker } = ctx; + const decl = symbol.getDeclarations()![0] as ts.InterfaceDeclaration; + const type = checker.getDeclaredTypeOfSymbol(symbol) as ts.InterfaceType; + const typeId = (type as any).id; + + // Avoid infinite recursion + if (ctx.visiting.has(typeId)) { + return { type: "link", id: `${symbol.getName()}:${typeId}` }; + } + ctx.visiting.add(typeId); + + // Collect extends clauses + const extendsNodes: any[] = []; + const flattenedExternalExtends: string[] = []; + const UTILITY_TYPES = new Set([ + 'Pick', 'Omit', 'Partial', 'Required', 'Readonly', + 'Record', 'Exclude', 'Extract', + ]); + + if (type.getBaseTypes) { + for (const baseType of type.getBaseTypes() ?? []) { + const baseSymbol = baseType.getSymbol(); + const baseName = baseSymbol?.getName(); + + // Detect utility type results (Omit, Pick, etc.) — inline their properties + const isAnonymousBase = !baseSymbol + || baseName === "__type" + || baseName === "__object" + || (baseSymbol!.getDeclarations()?.length ?? 0) === 0; + const isUtilityTypeBase = !!( + (baseName && UTILITY_TYPES.has(baseName)) + || (baseType.aliasSymbol && UTILITY_TYPES.has(baseType.aliasSymbol.getName())) + ); + + diag('BASE-TYPE-IFACE', `of ${symbol.getName()} baseSym=${baseName ?? 'null'}, aliasSymbol=${baseType.aliasSymbol?.getName() ?? 'null'}, isAnonymous=${isAnonymousBase}, isUtility=${isUtilityTypeBase}, typeToString=${safeTypeToString(checker, baseType).slice(0, 80)}`); + + if ((isAnonymousBase || isUtilityTypeBase) && (baseType.getProperties?.()?.length ?? 0) > 0) { + // Utility type (Omit, Pick, etc.) — inline its resolved properties + const inlineProps: Record = {}; + for (const bp of baseType.getProperties() ?? []) { + const propName = bp.getName(); + if (shouldSkipProperty(propName)) continue; + const bpDecl = bp.getDeclarations()?.[0]; + const isOptional = !!(bp.flags & ts.SymbolFlags.Optional); + const defaultVal = getJsDocDefault(bp); + // Use declaration AST when available to preserve external type references + const value = bpDecl + ? serializePropertyValue(bp, bpDecl, ctx) + : serializeType(checker.getTypeOfSymbol(bp), ctx); + inlineProps[propName] = { + type: "property", + name: propName, + value, + optional: isOptional, + default: defaultVal, + access: "public", + }; + } + extendsNodes.push({ type: "interface", properties: inlineProps }); + } else if (baseSymbol && isExternalSymbol(baseSymbol, checker)) { + // External type: keep as identifier, don't flatten + flattenedExternalExtends.push(safeTypeToString(checker, baseType)); + } else { + // Our type: serialize and flatten its properties + const baseSerialized = serializeTypeExpanded(baseType, ctx); + if (baseSerialized.type === "interface" && baseSerialized.properties) { + // Merge properties from our base types + // (handled below when we collect properties) + } + extendsNodes.push(baseSerialized); + } + } + } + + // Collect own + inherited-from-our-types properties + const properties: Record = {}; + + // First, flatten properties from our base types + for (const ext of extendsNodes) { + if (ext.type === "interface" && ext.properties) { + for (const [key, prop] of Object.entries(ext.properties)) { + if (!properties[key]) { + properties[key] = prop; + } + } + } + } + + // Then own properties (override inherited) + for (const prop of type.getProperties()) { + const propName = prop.getName(); + if (shouldSkipProperty(propName)) continue; + + const propDecl = prop.getDeclarations()?.[0]; + if (!propDecl) continue; + if (isExternalDeclaration(propDecl)) continue; + // Skip private/protected + const modifiers = ts.getCombinedModifierFlags(propDecl as ts.Declaration); + if (modifiers & ts.ModifierFlags.Private || modifiers & ts.ModifierFlags.Protected) continue; + + const isOptional = !!(prop.flags & ts.SymbolFlags.Optional); + const defaultVal = getJsDocDefault(prop); + + // Serialize the property value from the DECLARATION AST when possible. + // This preserves external type references (ReactNode, ElementType, etc.) + // instead of expanding them through the resolved type. + const value = serializePropertyValue(prop, propDecl, ctx); + + properties[prop.getName()] = { + type: "property", + name: prop.getName(), + value, + optional: isOptional, + default: defaultVal, + access: "public", + }; + } + + ctx.visiting.delete(typeId); + + const result: any = { + type: "interface", + name: symbol.getName(), + properties, + typeParameters: serializeTypeParamsFromDecl(decl.typeParameters, ctx), + extends: flattenedExternalExtends.map((name) => ({ + type: "identifier", + name, + })), + }; + + return result; +} + +function serializeTypeAlias( + symbol: ts.Symbol, + decl: ts.TypeAliasDeclaration, + ctx: SerializeContext +): any { + const { checker } = ctx; + const name = symbol.getName(); + + // Get the actual resolved type (not the alias declaration type) + const type = checker.getTypeOfSymbol(symbol); + const declaredType = checker.getDeclaredTypeOfSymbol(symbol); + + // External type alias (e.g., type TextAreaProps = React.TextareaHTMLAttributes<...>) + // → just reference it by name, don't expand + const targetSymbol = type.getSymbol() ?? type.aliasSymbol ?? declaredType.getSymbol(); + if (targetSymbol && isExternalSymbol(targetSymbol, checker)) { + return { + type: "alias", + name, + value: { type: "identifier", name: safeTypeToString(checker, type) }, + typeParameters: serializeTypeParamsFromDecl(decl.typeParameters, ctx), + }; + } + + // Internal type that resolves to an object/interface — expand its properties + const props = type.getProperties?.() ?? []; + if (props.length > 0) { + const expanded = serializeTypeExpanded(type, ctx); + if (expanded.type === "interface" && expanded.properties && Object.keys(expanded.properties).length > 0) { + return { + type: "interface", + name, + properties: expanded.properties, + typeParameters: serializeTypeParamsFromDecl(decl.typeParameters, ctx), + extends: expanded.extends ?? [], + }; + } + } + + // Simple type alias (union, literal, etc.) + const serialized = serializeType(declaredType.flags & ts.TypeFlags.Any ? type : declaredType, ctx); + return { + type: "alias", + name, + value: serialized, + typeParameters: serializeTypeParamsFromDecl(decl.typeParameters, ctx), + }; +} + +function serializeClass(symbol: ts.Symbol, ctx: SerializeContext): any { + const { checker } = ctx; + const type = checker.getDeclaredTypeOfSymbol(symbol); + const decl = symbol.getDeclarations()![0] as ts.ClassDeclaration; + + // Collect extends + const extendsNodes: any[] = []; + if (decl.heritageClauses) { + for (const clause of decl.heritageClauses) { + if (clause.token === ts.SyntaxKind.ExtendsKeyword) { + for (const expr of clause.types) { + const extType = checker.getTypeAtLocation(expr); + const extSymbol = extType.getSymbol(); + if (extSymbol && isExternalSymbol(extSymbol, checker)) { + extendsNodes.push({ + type: "identifier", + name: safeTypeToString(checker, extType), + }); + } else { + extendsNodes.push(serializeType(extType, ctx)); + } + } + } + } + } + + const properties: Record = {}; + for (const prop of type.getProperties()) { + const propName = prop.getName(); + if (shouldSkipProperty(propName)) continue; + + const propDecl = prop.getDeclarations()?.[0]; + if (!propDecl) continue; + if (isExternalDeclaration(propDecl)) continue; + const modifiers = ts.getCombinedModifierFlags(propDecl as ts.Declaration); + if (modifiers & ts.ModifierFlags.Private || modifiers & ts.ModifierFlags.Protected) continue; + + const propType = checker.getTypeOfSymbol(prop); + const isOptional = !!(prop.flags & ts.SymbolFlags.Optional); + properties[prop.getName()] = { + type: "property", + name: prop.getName(), + value: serializeType(propType, ctx), + optional: isOptional, + access: "public", + }; + } + + return { + type: "interface", + name: symbol.getName(), + properties, + typeParameters: serializeTypeParamsFromDecl(decl.typeParameters, ctx), + extends: extendsNodes, + }; +} + +function serializeEnum(symbol: ts.Symbol, ctx: SerializeContext): any { + const { checker } = ctx; + const type = checker.getDeclaredTypeOfSymbol(symbol); + if (type.isUnion()) { + return { + type: "alias", + name: symbol.getName(), + value: { + type: "union", + elements: type.types.map((t) => serializeType(t, ctx)), + }, + typeParameters: [], + }; + } + return { type: "identifier", name: symbol.getName() }; +} + +// --------------------------------------------------------------------------- +// Core type serializer (produces the type-node JSON) +// --------------------------------------------------------------------------- + +function serializeType(type: ts.Type, ctx: SerializeContext): any { + const { checker } = ctx; + + // Depth guard: prevent infinite recursion on deeply nested types + if (ctx.depth > MAX_DEPTH) { + return { type: "identifier", name: safeTypeToString(checker, type) }; + } + + // Cycle guard: prevent infinite recursion on circular type references + const typeId = (type as any).id; + if (typeId !== undefined && ctx.visiting.has(typeId)) { + return { type: "identifier", name: safeTypeToString(checker, type) }; + } + + // Track this type and increment depth for child calls + const deep = childCtx(ctx); + if (typeId !== undefined) deep.visiting = new Set(ctx.visiting).add(typeId); + + if (type.flags & ts.TypeFlags.Any) return { type: "any" }; + if (type.flags & ts.TypeFlags.Unknown) return { type: "unknown" }; + if (type.flags & ts.TypeFlags.Void) return { type: "void" }; + if (type.flags & ts.TypeFlags.Undefined) return { type: "undefined" }; + if (type.flags & ts.TypeFlags.Null) return { type: "null" }; + if (type.flags & ts.TypeFlags.Never) return { type: "never" }; + if (type.flags & ts.TypeFlags.Boolean || type.flags & ts.TypeFlags.BooleanLiteral) { + if (type.flags & ts.TypeFlags.BooleanLiteral) { + return { type: "boolean", value: safeTypeToString(checker, type) === "true" }; + } + return { type: "boolean" }; + } + if (type.flags & ts.TypeFlags.Number) return { type: "number" }; + if (type.flags & ts.TypeFlags.String) return { type: "string" }; + if (type.flags & ts.TypeFlags.NumberLiteral) { + return { type: "number", value: (type as ts.LiteralType).value }; + } + if (type.flags & ts.TypeFlags.StringLiteral) { + return { type: "string", value: (type as ts.StringLiteralType).value }; + } + if (type.flags & ts.TypeFlags.ESSymbol || type.flags & ts.TypeFlags.UniqueESSymbol) { + return { type: "symbol" }; + } + if (type.flags & ts.TypeFlags.BigInt) { + return { type: "identifier", name: "bigint" }; + } + if (type.flags & ts.TypeFlags.TemplateLiteral) { + return { type: "string" }; + } + + // Aliased types (e.g., ReactNode, CSSProperties, Key) — use the alias name + // This prevents expanding `type ReactNode = string | ReactElement | ...` into a huge union + if (type.aliasSymbol) { + const aliasDecls = type.aliasSymbol.getDeclarations(); + if (aliasDecls?.[0] && isExternalDeclaration(aliasDecls[0])) { + const name = type.aliasSymbol.getName(); + const aliasArgs = type.aliasTypeArguments; + if (aliasArgs && aliasArgs.length > 0) { + return { + type: "application", + base: { type: "identifier", name }, + typeParameters: aliasArgs.map((t) => serializeType(t, deep)), + }; + } + return { type: "identifier", name }; + } + + // Internal type alias that resolves to a union/intersection: serialize from + // the declaration AST so that references like ReactNode are preserved as + // identifiers instead of being flattened into their constituent types. + if (aliasDecls?.[0] && ts.isTypeAliasDeclaration(aliasDecls[0])) { + const decl = aliasDecls[0] as ts.TypeAliasDeclaration; + const declType = decl.type; + if (ts.isUnionTypeNode(declType) || ts.isIntersectionTypeNode(declType)) { + // DIAGNOSTIC + diag('ALIAS-AST', `depth=${ctx.depth} ${type.aliasSymbol!.getName()} (${ts.isUnionTypeNode(declType) ? 'union' : 'intersection'}, ${declType.kind === ts.SyntaxKind.UnionType ? (declType as ts.UnionTypeNode).types.length + ' members' : '?'})`); + const typeParamMap = new Map(); + if (decl.typeParameters && type.aliasTypeArguments) { + for (let i = 0; i < decl.typeParameters.length; i++) { + if (i < type.aliasTypeArguments.length) { + typeParamMap.set(decl.typeParameters[i].name.text, type.aliasTypeArguments[i]); + } + } + } + return serializeTypeNode(declType, typeParamMap, deep); + } else { + diag('ALIAS-FALLTHROUGH', `depth=${ctx.depth} ${type.aliasSymbol!.getName()} declType=${ts.SyntaxKind[declType.kind]} — NOT union/intersection, falling through`); + } + } else { + diag('ALIAS-FALLTHROUGH', `depth=${ctx.depth} ${type.aliasSymbol!.getName()} — NOT a TypeAliasDeclaration (kind=${aliasDecls?.[0] ? ts.SyntaxKind[aliasDecls[0].kind] : 'no decls'}), falling through`); + } + } + + // Union + if (type.isUnion()) { + const elements = type.types; + // Count string literals — if too many, this is likely a design token union (size-0, size-10, ...) + // Truncate to keep output manageable + const stringLiterals = elements.filter(t => t.flags & ts.TypeFlags.StringLiteral); + const others = elements.filter(t => !(t.flags & ts.TypeFlags.StringLiteral)); + + // DIAGNOSTIC: log union sizes + if (stringLiterals.length > 5) { + diag('UNION', `depth=${ctx.depth} total=${elements.length}, stringLits=${stringLiterals.length}, truncate=${stringLiterals.length > MAX_UNION_LITERALS}`); + } + + if (stringLiterals.length > MAX_UNION_LITERALS) { + // Keep some representative string literals + all non-literal members + const kept = stringLiterals.slice(0, MAX_UNION_LITERALS); + const serialized = [ + ...others.map(t => serializeType(t, deep)), + ...kept.map(t => serializeType(t, deep)), + { type: "string", value: `... ${stringLiterals.length - MAX_UNION_LITERALS} more` }, + ]; + return { type: "union", elements: serialized }; + } + + return { + type: "union", + elements: elements.map((t) => serializeType(t, deep)), + }; + } + + // Intersection + if (type.isIntersection()) { + return { + type: "intersection", + types: type.types.map((t) => serializeType(t, deep)), + }; + } + + // TypeParameter + if (type.flags & ts.TypeFlags.TypeParameter) { + const tp = type as ts.TypeParameter; + const constraint = tp.getConstraint(); + const def = tp.getDefault(); + return { + type: "typeParameter", + name: type.getSymbol()?.getName() ?? safeTypeToString(checker, type), + constraint: constraint ? serializeType(constraint, deep) : null, + default: def ? serializeType(def, deep) : null, + }; + } + + // Index + if (type.flags & ts.TypeFlags.Index) { + const indexType = type as ts.IndexType; + return { + type: "keyof", + keyof: serializeType((indexType as any).type, deep), + }; + } + + // IndexedAccess + if (type.flags & ts.TypeFlags.IndexedAccess) { + const ia = type as ts.IndexedAccessType; + return { + type: "indexedAccess", + objectType: serializeType(ia.objectType, deep), + indexType: serializeType(ia.indexType, deep), + }; + } + + // Conditional + if (type.flags & ts.TypeFlags.Conditional) { + const ct = type as ts.ConditionalType; + return { + type: "conditional", + checkType: serializeType(ct.checkType, deep), + extendsType: serializeType(ct.extendsType, deep), + trueType: serializeType(ct.resolvedTrueType ?? (ct as any).root?.trueType ?? checker.getAnyType(), deep), + falseType: serializeType(ct.resolvedFalseType ?? (ct as any).root?.falseType ?? checker.getAnyType(), deep), + }; + } + + // Object types (interfaces, classes, functions, arrays, tuples) + if (type.flags & ts.TypeFlags.Object) { + const objType = type as ts.ObjectType; + + // Array + if (checker.isArrayType(type)) { + const typeArgs = (type as ts.TypeReference).typeArguments; + return { + type: "array", + elementType: typeArgs?.[0] ? serializeType(typeArgs[0], deep) : { type: "any" }, + }; + } + + // Tuple + if (checker.isTupleType(type)) { + const typeArgs = (type as ts.TypeReference).typeArguments ?? []; + return { + type: "tuple", + elements: typeArgs.map((t) => serializeType(t, deep)), + }; + } + + // Function/callable + const callSigs = type.getCallSignatures(); + if (callSigs.length > 0 && !type.getProperties().length) { + const sig = callSigs[0]; + return serializeFunctionSig(sig, deep); + } + + // TypeReference with type arguments (generic application) + if (objType.objectFlags & ts.ObjectFlags.Reference) { + const ref = type as ts.TypeReference; + const target = ref.target; + const typeArgs = checker.getTypeArguments(ref); + const targetSymbol = target.getSymbol(); + + if (targetSymbol && typeArgs.length > 0) { + // Check if this is an external type + if (isExternalSymbol(targetSymbol, checker)) { + // For external types, only expand type args that come from our packages. + // External args (Element, Event, etc.) just get their name. + return { + type: "application", + base: { type: "identifier", name: targetSymbol.getName() }, + typeParameters: typeArgs.map((t) => serializeTypeArgShallow(t, deep)), + }; + } + } + + if (targetSymbol && typeArgs.length > 0 && target !== type) { + return { + type: "application", + base: serializeType(target, deep), + typeParameters: typeArgs.map((t) => serializeType(t, deep)), + }; + } + } + + // Named object type (interface/class) + const symbol = type.getSymbol() ?? type.aliasSymbol; + if (symbol) { + // External types: just use the name + if (isExternalSymbol(symbol, checker)) { + return { type: "identifier", name: safeTypeToString(checker, type) }; + } + + // Anonymous object literal types: inline the properties + if ( + symbol.getName() === "__type" || + objType.objectFlags & ts.ObjectFlags.Anonymous + ) { + const props = type.getProperties(); + if (props.length > 30) { + return { type: "identifier", name: safeTypeToString(checker, type) }; + } + return serializeObjectLiteral(type, deep); + } + + // Named internal type: use identifier + diag('NAMED-INTERNAL', `depth=${ctx.depth} ${symbol.getName()} → identifier (not expanded)`); + return { type: "identifier", name: symbol.getName() }; + } + + // Anonymous object with no symbol — check size before expanding + const anonProps = type.getProperties(); + if (anonProps.length > 30) { + return { type: "identifier", name: safeTypeToString(checker, type) }; + } + return serializeObjectLiteral(type, deep); + } + + // Fallback + return { type: "identifier", name: safeTypeToString(checker, type) }; +} + +/** + * Like serializeType but expands interfaces (for component props, etc.) + * instead of returning identifiers. + */ +function serializeTypeExpanded(type: ts.Type, ctx: SerializeContext): any { + const { checker } = ctx; + + // Depth guard + if (ctx.depth > MAX_DEPTH) { + const sym = type.getSymbol() ?? type.aliasSymbol; + return { type: "identifier", name: sym?.getName() ?? safeTypeToString(checker, type) }; + } + + const deep = childCtx(ctx); + + // Resolve alias types to their underlying type + let resolved = type; + if (type.aliasSymbol) { + const aliasType = checker.getDeclaredTypeOfSymbol(type.aliasSymbol); + if (aliasType && aliasType !== type) { + resolved = aliasType; + } + } + + const symbol = resolved.getSymbol() ?? resolved.aliasSymbol ?? type.getSymbol() ?? type.aliasSymbol; + + // DIAGNOSTIC + diag('EXPAND', `depth=${ctx.depth} symbol=${symbol?.getName() ?? 'null'}, alias=${type.aliasSymbol?.getName() ?? 'null'}, isExt=${symbol ? isExternalSymbol(symbol, checker) : 'N/A'}`); + + // Don't expand external or built-in types + if (symbol && isExternalSymbol(symbol, checker)) { + return serializeType(type, deep); + } + + const allProps = resolved.getProperties?.() ?? type.getProperties?.() ?? []; + if (allProps.length > 0 && symbol) { + const typeId = (resolved as any).id ?? (type as any).id ?? 0; + if (typeId && ctx.visiting.has(typeId)) { + return { type: "identifier", name: symbol.getName() }; + } + const visiting = new Set(ctx.visiting); + if (typeId) visiting.add(typeId); + const deepWithVisiting = { ...deep, visiting }; + + // Collect base types and their property names so we can exclude inherited props + const baseTypes = resolved.getBaseTypes?.() ?? type.getBaseTypes?.() ?? []; + const externalExtends: any[] = []; + const internalBaseProps: Record = {}; + const inheritedPropNames = new Set(); + + for (const baseType of baseTypes) { + const baseSym = baseType.getSymbol(); + const baseProps = baseType.getProperties?.() ?? []; + for (const bp of baseProps) { + inheritedPropNames.add(bp.getName()); + } + + // Detect computed base types that should be inlined rather than shown + // as opaque "Omit<...>" or "Pick<...>" in extends: + // 1. Truly anonymous (no symbol, __type, __object, no declarations) + // 2. TS utility types — the resolved Omit has baseSym = Pick (from lib.d.ts) + // because Omit = Pick> + // 3. Any base with aliasSymbol pointing to an external utility type + const baseName = baseSym?.getName(); + const UTILITY_TYPES = new Set([ + 'Pick', 'Omit', 'Partial', 'Required', 'Readonly', + 'Record', 'Exclude', 'Extract', + ]); + + const isAnonymousBase = !baseSym + || baseName === "__type" + || baseName === "__object" + || (baseSym.getDeclarations()?.length ?? 0) === 0; + + const isUtilityTypeBase = !!( + (baseName && UTILITY_TYPES.has(baseName)) + || (baseType.aliasSymbol && UTILITY_TYPES.has(baseType.aliasSymbol.getName())) + ); + + // DIAGNOSTIC: log base type detection + const symName = symbol?.getName() ?? '?'; + diag('BASE-TYPE', `of ${symName} baseSym=${baseName ?? 'null'}, aliasSymbol=${baseType.aliasSymbol?.getName() ?? 'null'}, isAnonymous=${isAnonymousBase}, isUtility=${isUtilityTypeBase}, propsCount=${baseProps.length}, typeToString=${safeTypeToString(checker, baseType).slice(0, 80)}`); + + if ((isAnonymousBase || isUtilityTypeBase) && baseProps.length > 0) { + for (const bp of baseProps) { + const propName = bp.getName(); + if (shouldSkipProperty(propName)) continue; + const propType = checker.getTypeOfSymbol(bp); + const isOptional = !!(bp.flags & ts.SymbolFlags.Optional); + const defaultVal = getJsDocDefault(bp); + internalBaseProps[propName] = { + type: "property", + name: propName, + value: serializeType(propType, deepWithVisiting), + optional: isOptional, + default: defaultVal, + access: "public", + }; + } + } else if (baseSym && isExternalSymbol(baseSym, checker)) { + externalExtends.push({ + type: "identifier", + name: safeTypeToString(checker, baseType), + }); + } else { + const expanded = serializeTypeExpanded(baseType, deepWithVisiting); + if (expanded.properties) { + Object.assign(internalBaseProps, expanded.properties); + } + if (expanded.extends) { + externalExtends.push(...expanded.extends); + } + } + } + + // Only include OWN properties — not inherited from any base type + const properties: Record = { ...internalBaseProps }; + for (const prop of allProps) { + const propName = prop.getName(); + + if (shouldSkipProperty(propName)) continue; + if (inheritedPropNames.has(propName) && !isOwnProperty(prop, symbol)) continue; + + const propDecl = prop.getDeclarations()?.[0]; + if (!propDecl) continue; + if (isExternalDeclaration(propDecl)) continue; + + const modifiers = ts.getCombinedModifierFlags(propDecl as ts.Declaration); + if (modifiers & ts.ModifierFlags.Private || modifiers & ts.ModifierFlags.Protected) continue; + + const propType = checker.getTypeOfSymbol(prop); + const isOptional = !!(prop.flags & ts.SymbolFlags.Optional); + const defaultVal = getJsDocDefault(prop); + + properties[propName] = { + type: "property", + name: propName, + value: serializeType(propType, deepWithVisiting), + optional: isOptional, + default: defaultVal, + access: "public", + }; + } + + return { + type: "interface", + name: symbol.getName(), + properties, + extends: externalExtends, + typeParameters: [], + }; + } + + // For intersections, try to merge properties from all branches + if (resolved.isIntersection?.() || type.isIntersection?.()) { + const interType = (resolved.isIntersection?.() ? resolved : type) as ts.IntersectionType; + const allMergedProps: Record = {}; + const externalExtends: any[] = []; + + for (const member of interType.types) { + const memberSym = member.getSymbol(); + if (memberSym && isExternalSymbol(memberSym, checker)) { + externalExtends.push({ + type: "identifier", + name: safeTypeToString(checker, member), + }); + continue; + } + const expanded = serializeTypeExpanded(member, deep); + if (expanded.type === "interface" && expanded.properties) { + Object.assign(allMergedProps, expanded.properties); + if (expanded.extends) { + externalExtends.push(...expanded.extends); + } + } else if (expanded.type === "identifier") { + externalExtends.push(expanded); + } + } + + if (Object.keys(allMergedProps).length > 0 || externalExtends.length > 0) { + return { + type: "interface", + name: symbol?.getName() ?? "__type", + properties: allMergedProps, + extends: externalExtends, + typeParameters: [], + }; + } + } + + return serializeType(type, deep); +} + +function serializeObjectLiteral(type: ts.Type, ctx: SerializeContext): any { + const { checker } = ctx; + const properties: Record = {}; + + for (const prop of type.getProperties()) { + const propName = prop.getName(); + if (shouldSkipProperty(propName)) continue; + + const propType = checker.getTypeOfSymbol(prop); + const isOptional = !!(prop.flags & ts.SymbolFlags.Optional); + const defaultVal = getJsDocDefault(prop); + + properties[prop.getName()] = { + type: "property", + name: prop.getName(), + value: serializeType(propType, ctx), + optional: isOptional, + default: defaultVal, + access: "public", + }; + } + + // Check for call signatures + const callSigs = type.getCallSignatures(); + if (callSigs.length > 0 && Object.keys(properties).length === 0) { + return serializeFunctionSig(callSigs[0], ctx); + } + + if (Object.keys(properties).length === 0) { + return { type: "object", properties: null }; + } + + return { type: "object", properties }; +} + +function serializeFunctionSig(sig: ts.Signature, ctx: SerializeContext): any { + const { checker } = ctx; + const params = sig.getParameters().map((p) => { + const pType = checker.getTypeOfSymbol(p); + const pDecl = p.getDeclarations()?.[0]; + return { + type: "parameter", + name: p.getName(), + value: serializeType(pType, ctx), + optional: pDecl ? checker.isOptionalParameter(pDecl as ts.ParameterDeclaration) : false, + }; + }); + const retType = checker.getReturnTypeOfSignature(sig); + return { + type: "function", + parameters: params, + return: serializeType(retType, ctx), + typeParameters: serializeTypeParams(sig.typeParameters, ctx), + }; +} + +function serializeTypeParams( + typeParams: readonly ts.TypeParameter[] | undefined, + ctx: SerializeContext +): any[] { + if (!typeParams) return []; + return typeParams.map((tp) => { + const constraint = tp.getConstraint(); + const def = tp.getDefault(); + return { + type: "typeParameter", + name: tp.getSymbol()?.getName() ?? safeTypeToString(ctx.checker, tp), + constraint: constraint && !(constraint.flags & ts.TypeFlags.Unknown) + ? serializeType(constraint, ctx) + : null, + default: def ? serializeType(def, ctx) : null, + }; + }); +} + +function serializeTypeParamsFromDecl( + nodes: ts.NodeArray | undefined, + ctx: SerializeContext +): any[] { + if (!nodes) return []; + const { checker } = ctx; + return nodes.map((node) => { + const symbol = checker.getSymbolAtLocation(node.name); + const type = symbol ? checker.getDeclaredTypeOfSymbol(symbol) : undefined; + const tp = type as ts.TypeParameter | undefined; + const constraint = tp?.getConstraint(); + const def = tp?.getDefault(); + return { + type: "typeParameter", + name: node.name.text, + constraint: constraint && !(constraint.flags & ts.TypeFlags.Unknown) + ? serializeType(constraint, ctx) + : null, + default: def ? serializeType(def, ctx) : null, + }; + }); +} + +// --------------------------------------------------------------------------- +// JSDoc @default extraction +// --------------------------------------------------------------------------- + +function getJsDocDefault(symbol: ts.Symbol): string | null { + const decls = symbol.getDeclarations(); + if (!decls) return null; + for (const decl of decls) { + const jsdocTags = ts.getJSDocTags(decl); + for (const tag of jsdocTags) { + if (tag.tagName.text === "default") { + const text = ts.getTextOfJSDocComment(tag.comment); + if (text) return text; + } + } + } + return null; +} + +// --------------------------------------------------------------------------- +// Main +// --------------------------------------------------------------------------- + +/** + * Validate a serialized export for common issues. + */ +function validateExport(pkgName: string, exportName: string, result: any, depth = 0): void { + if (!result || typeof result !== 'object' || depth > 5) return; + + // Check for numeric keys in properties + if (result.properties && typeof result.properties === 'object') { + const keys = Object.keys(result.properties); + const numericKeys = keys.filter(k => /^\d+$/.test(k)); + if (numericKeys.length > 0) { + console.warn(` ⚠ NUMERIC KEYS in ${pkgName}:${exportName} (result.type=${result.type}): [${numericKeys.slice(0, 3).join(', ')}]`); + console.warn(` Full output: ${JSON.stringify(result).slice(0, 500)}`); + } + } + + // Check props for components + if (result.type === 'component' && result.props) { + validateExport(pkgName, exportName + '.props', result.props, depth + 1); + } + + // Check value for aliases + if (result.type === 'alias' && result.value) { + validateExport(pkgName, exportName + '.value', result.value, depth + 1); + } +} + +async function main() { + initDiagLog(); + const allPackages = discoverPackages(args.packagesDir); + // Only process our packages, not transitive dependencies + const packages = allPackages.filter((p) => isOurPackage(p.name)); + if (packages.length === 0) { + if (args.allowEmpty) { + console.warn(`No react-spectrum packages found in ${args.packagesDir} (--allow-empty set, exiting 0)`); + closeDiagLog(); + return; + } + if (allPackages.length === 0) { + console.error(`No packages with package.json found under ${args.packagesDir}.`); + console.error(` Check that --packages-dir points at the right directory.`); + console.error(` Pass --allow-empty to override this check.`); + } else { + console.error(`No react-spectrum packages with type entry points found in ${args.packagesDir}`); + console.error(`(found ${allPackages.length} total package${allPackages.length === 1 ? '' : 's'}, but none matched our scopes)`); + console.error(` Pass --allow-empty to override this check.`); + } + process.exit(1); + } + + console.log(`Found ${packages.length} packages to extract (${allPackages.length} total in directory)`); + + // Find node_modules/@types directories for type resolution + // Walk up from packagesDir to find node_modules + const typeRoots: string[] = []; + let searchDir = path.resolve(args.packagesDir); + for (let i = 0; i < 5; i++) { + const candidate = path.join(searchDir, "@types"); + if (fs.existsSync(candidate)) { + typeRoots.push(candidate); + console.log(` Found typeRoots: ${candidate}`); + } + const nmCandidate = path.join(searchDir, "node_modules", "@types"); + if (fs.existsSync(nmCandidate)) { + typeRoots.push(nmCandidate); + console.log(` Found typeRoots: ${nmCandidate}`); + } + const parent = path.dirname(searchDir); + if (parent === searchDir) break; + searchDir = parent; + } + + const entryFiles = packages.map((p) => p.typesEntryPoint); + const compilerOptions: ts.CompilerOptions = { + target: ts.ScriptTarget.ESNext, + module: ts.ModuleKind.ESNext, + moduleResolution: ts.ModuleResolutionKind.Node10, + declaration: true, + strict: false, + skipLibCheck: true, + noEmit: true, + jsx: ts.JsxEmit.ReactJSX, + ...(typeRoots.length > 0 ? { typeRoots } : {}), + }; + const program = ts.createProgram(entryFiles, compilerOptions); + const checker = program.getTypeChecker(); + + // Diagnostic: check if React types are resolved + const reactCheck = ts.resolveModuleName("react", entryFiles[0], compilerOptions, ts.sys); + if (reactCheck.resolvedModule) { + console.log(` React types resolved: ${reactCheck.resolvedModule.resolvedFileName}`); + } else { + console.warn(" WARNING: Could not resolve 'react' module — component types may resolve to 'any'"); + console.warn(" Ensure @types/react is installed in node_modules"); + } + + // Collect writes and dispatch them concurrently at the end. The TS + // checker (and therefore serializeSymbol) must stay on the main thread, + // but disk writes can be issued in parallel so we're not paying + // fs.writeFileSync latency once per package. + const pendingWrites: Array<{ path: string; content: string }> = []; + + // Aggregate health across all packages. If the vast majority of top-level + // exports come out as `any`, cross-package type resolution is broken + // (commonly: @types/react missing from packages-dir). The resulting + // wall-of-diff would be noise, so fail loudly instead. + let totalTopLevelExports = 0; + let totalTopLevelAnyExports = 0; + + for (const pkg of packages) { + const sourceFile = program.getSourceFile(pkg.typesEntryPoint); + if (!sourceFile) { + console.warn(` could not load source file for ${pkg.name}: ${pkg.typesEntryPoint}`); + continue; + } + + const moduleSymbol = checker.getSymbolAtLocation(sourceFile); + if (!moduleSymbol) { + console.warn(` no module symbol for ${pkg.name}`); + continue; + } + + const exports = checker.getExportsOfModule(moduleSymbol); + const ctx = makeCtx(checker); + const apiExports: Record = {}; + + for (const exp of exports) { + try { + diag('EXPORT', `--- ${pkg.name}:${exp.getName()} ---`); + const result = serializeSymbol(exp, ctx); + apiExports[exp.getName()] = result; + + // Validate: check for numeric property keys (TS internal IDs leaking) + validateExport(pkg.name, exp.getName(), result); + } catch (err: any) { + if (args.verbose) { + console.warn(` error serializing ${pkg.name}:${exp.getName()}: ${err.message}`); + console.warn(` stack: ${err.stack?.split('\n').slice(0, 3).join('\n ')}`); + } + apiExports[exp.getName()] = { type: "any" }; + } + } + + const apiJson = { exports: apiExports, links: ctx.links }; + + // Health metrics: count 'any' occurrences at every depth (not just top-level + // exports) so CI and local runs can be compared for resolution health. + // A sudden spike in the `any` ratio between environments is the + // signature of broken cross-package type resolution. + const health = computeHealth(apiJson); + if (health.topLevelAnyExports > 0) { + console.warn( + ` ⚠ ${pkg.name}: ${health.topLevelAnyExports}/${health.topLevelExports} exports resolved to 'any'`, + ); + } + totalTopLevelExports += health.topLevelExports; + totalTopLevelAnyExports += health.topLevelAnyExports; + const outputBase = args.outputDir + ? path.join(args.outputDir, pkg.name) + : pkg.dir; + const outputPath = path.join(outputBase, "dist", "api.json"); + // Directory creation is sync — cheap and we need it to exist before + // any write in that directory kicks off. + fs.mkdirSync(path.dirname(outputPath), { recursive: true }); + pendingWrites.push({ path: outputPath, content: JSON.stringify(apiJson, null, 2) }); + + const healthPath = path.join(outputBase, "dist", "health.json"); + pendingWrites.push({ path: healthPath, content: JSON.stringify(health, null, 2) }); + + // Also ensure package.json exists in output dir (for compareAPIs to read the name) + const outPkgJson = path.join(outputBase, "package.json"); + if (!fs.existsSync(outPkgJson)) { + pendingWrites.push({ path: outPkgJson, content: JSON.stringify({ name: pkg.name }, null, 2) }); + } + + if (args.verbose) { + console.log(` ${pkg.name}: ${Object.keys(apiExports).length} exports → ${outputPath}`); + } else { + console.log(` ✓ ${pkg.name}`); + } + } + + // Flush all writes in parallel. Node dispatches these to the libuv + // thread pool, so for ~200 packages we overlap disk I/O instead of + // paying sync-write latency serially. + await Promise.all( + pendingWrites.map((w) => fs.promises.writeFile(w.path, w.content)), + ); + + // Fail loudly when more than half of all top-level exports resolved to + // `any`. That pattern means TS couldn't resolve component prop types + // (usually: `@types/react` missing from the packages-dir's + // node_modules/). Without this check the downstream compare produces a + // sprawling diff that looks like an API rewrite, which is worse than + // failing. + if (totalTopLevelExports > 0) { + const anyRatio = totalTopLevelAnyExports / totalTopLevelExports; + if (anyRatio > 0.5) { + console.error( + `\nError: ${totalTopLevelAnyExports}/${totalTopLevelExports} ` + + `(${(anyRatio * 100).toFixed(1)}%) of top-level exports resolved to 'any'. ` + + `Type resolution is broken — ensure @types/react (and any other required ` + + `@types/*) is installed in the packages-dir's node_modules/ so TS can ` + + `resolve component prop types.`, + ); + closeDiagLog(); + process.exit(1); + } + } + + closeDiagLog(); +} + +main().catch((err) => { + console.error(err); + process.exit(1); +}); diff --git a/rsp-api-checker/ts-extractor/extractor.test.ts b/rsp-api-checker/ts-extractor/extractor.test.ts new file mode 100644 index 00000000000..a07b19cdb4f --- /dev/null +++ b/rsp-api-checker/ts-extractor/extractor.test.ts @@ -0,0 +1,340 @@ +/** + * Integration test: runs the extractor as a subprocess on a minimal fixture + * package and verifies the produced api.json has the expected shape. + */ + +import { describe, it, expect, afterEach } from "vitest"; +import { execSync } from "child_process"; +import { fileURLToPath } from "url"; +import * as path from "path"; +import * as fs from "fs"; +import * as os from "os"; + +const __dirname = fileURLToPath(new URL(".", import.meta.url)); +const EXTRACTOR = path.join(__dirname, "extract-api.ts"); +const FIXTURES_DIR = path.join(__dirname, "tests", "fixtures", "packages"); +const WITH_DEV_PKG_DIR = path.join(__dirname, "tests", "fixtures", "with-dev-pkg", "packages"); +const WITH_PREVIOUS_TYPES_DIR = path.join(__dirname, "tests", "fixtures", "with-previous-types", "packages"); + +// Temporary output directories created during tests — cleaned up in afterEach. +const tmpDirs: string[] = []; + +afterEach(() => { + for (const dir of tmpDirs.splice(0)) { + try { fs.rmSync(dir, { recursive: true, force: true }); } catch { /* ignore */ } + } +}); + +function makeTmpDir(): string { + const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rsp-extractor-test-")); + tmpDirs.push(dir); + return dir; +} + +function runExtractor(packagesDir: string, outputDir: string, extraArgs: string[] = []): void { + const extra = extraArgs.length ? " " + extraArgs.join(" ") : ""; + execSync( + `npx tsx ${EXTRACTOR} --packages-dir ${packagesDir} --output-dir ${outputDir}${extra}`, + { cwd: __dirname, stdio: "pipe" }, + ); +} + +describe("extract-api.ts (integration)", () => { + it("creates api.json for a minimal .d.ts fixture", () => { + const outputDir = makeTmpDir(); + runExtractor(FIXTURES_DIR, outputDir); + + const apiJsonPath = path.join( + outputDir, + "@react-aria", + "test-widget", + "dist", + "api.json", + ); + expect(fs.existsSync(apiJsonPath), `api.json not found at ${apiJsonPath}`).toBe(true); + }); + + it("api.json has top-level exports and links keys", () => { + const outputDir = makeTmpDir(); + runExtractor(FIXTURES_DIR, outputDir); + + const apiJson = JSON.parse( + fs.readFileSync( + path.join(outputDir, "@react-aria", "test-widget", "dist", "api.json"), + "utf8", + ), + ); + expect(apiJson).toHaveProperty("exports"); + expect(apiJson).toHaveProperty("links"); + }); + + it("extracts WidgetProps interface with expected properties", () => { + const outputDir = makeTmpDir(); + runExtractor(FIXTURES_DIR, outputDir); + + const { exports } = JSON.parse( + fs.readFileSync( + path.join(outputDir, "@react-aria", "test-widget", "dist", "api.json"), + "utf8", + ), + ); + + expect(exports).toHaveProperty("WidgetProps"); + const widgetProps = exports["WidgetProps"]; + expect(widgetProps.type).toBe("interface"); + expect(widgetProps.properties).toHaveProperty("label"); + expect(widgetProps.properties).toHaveProperty("isDisabled"); + expect(widgetProps.properties["isDisabled"].optional).toBe(true); + expect(widgetProps.properties["label"].optional).toBe(false); + }); + + it("extracts WidgetVariant type alias as a union", () => { + const outputDir = makeTmpDir(); + runExtractor(FIXTURES_DIR, outputDir); + + const { exports } = JSON.parse( + fs.readFileSync( + path.join(outputDir, "@react-aria", "test-widget", "dist", "api.json"), + "utf8", + ), + ); + + expect(exports).toHaveProperty("WidgetVariant"); + const variant = exports["WidgetVariant"]; + expect(variant.type).toBe("alias"); + // The value should be a union of two string literals + expect(variant.value.type).toBe("union"); + const values = variant.value.elements.map((e: { value: string }) => e.value); + expect(values).toContain("primary"); + expect(values).toContain("secondary"); + }); + + it("extracts useWidget as a function export", () => { + const outputDir = makeTmpDir(); + runExtractor(FIXTURES_DIR, outputDir); + + const { exports } = JSON.parse( + fs.readFileSync( + path.join(outputDir, "@react-aria", "test-widget", "dist", "api.json"), + "utf8", + ), + ); + + expect(exports).toHaveProperty("useWidget"); + // Functions may be serialized as "function" or as an interface-like type + expect(["function", "interface"]).toContain(exports["useWidget"].type); + }); + + it("writes a package.json alongside the api.json", () => { + const outputDir = makeTmpDir(); + runExtractor(FIXTURES_DIR, outputDir); + + const pkgJsonPath = path.join(outputDir, "@react-aria", "test-widget", "package.json"); + expect(fs.existsSync(pkgJsonPath)).toBe(true); + const pkg = JSON.parse(fs.readFileSync(pkgJsonPath, "utf8")); + expect(pkg.name).toBe("@react-aria/test-widget"); + }); + + // health.json is the diagnostic companion to api.json: it reports how many + // type nodes in the extracted API are `any`, which is the signature of + // TS resolution falling back. Comparing these counts between environments + // (local vs CI) pinpoints cross-package resolution failures quickly. + it("writes a health.json with any-count metrics alongside api.json", () => { + const outputDir = makeTmpDir(); + runExtractor(FIXTURES_DIR, outputDir); + + const healthPath = path.join(outputDir, "@react-aria", "test-widget", "dist", "health.json"); + expect(fs.existsSync(healthPath)).toBe(true); + const health = JSON.parse(fs.readFileSync(healthPath, "utf8")); + expect(health).toHaveProperty("topLevelExports"); + expect(health).toHaveProperty("topLevelAnyExports"); + expect(health).toHaveProperty("totalTypeNodes"); + expect(health).toHaveProperty("anyTypeNodes"); + expect(health).toHaveProperty("anyRatio"); + // The minimal fixture has clean types — anyRatio should be low. + expect(health.anyRatio).toBeLessThan(0.5); + }); + + // Packages under a directory named "dev/" are build tools, not public API. + // The Rust walk_for_packages already skips "dev/" when collecting packages + // to install from npm. The TypeScript extractor must apply the same exclusion + // so that local and published extractions are symmetric — otherwise packages + // in packages/dev/ appear as "added" in every diff run. + it("does not extract packages from dev/ subdirectory", () => { + const outputDir = makeTmpDir(); + runExtractor(WITH_DEV_PKG_DIR, outputDir); + + // The normal package should be extracted + const normalApiPath = path.join( + outputDir, + "@react-aria", + "normal-widget", + "dist", + "api.json", + ); + expect( + fs.existsSync(normalApiPath), + `expected @react-aria/normal-widget to be extracted at ${normalApiPath}`, + ).toBe(true); + + // The package inside dev/ must NOT be extracted + const devApiPath = path.join( + outputDir, + "@react-spectrum", + "dev-tool", + "dist", + "api.json", + ); + expect( + fs.existsSync(devApiPath), + "packages inside dev/ must not be extracted (they are build tools, not public API, " + + "and the npm check already excludes the dev/ directory)", + ).toBe(false); + }); + + // When a developer adds a prop to a source .ts file but has not re-run + // `yarn build`, the dist/types/*.d.ts represents the *previous* build — + // the baseline before the current edit. The extractor reads .d.ts only + // (a single, consistent path for both the published and local sides), + // so if source is newer than types we cannot silently fall back to + // reading source — that would introduce a second extraction path whose + // output can diverge subtly from the .d.ts path. Instead, fail loudly + // with a "run yarn build" message so the user fixes the real problem. + // This guards against the "isFoo added to ButtonProps but never appears + // in the diff" class of bug. + it("errors when source is newer than dist/types (build is out of date)", () => { + const outputDir = makeTmpDir(); + + // Both files may have identical mtimes after a fresh checkout. Force + // the .d.ts to be older than the .ts so the mtime comparison kicks in. + const srcPath = path.join( + WITH_PREVIOUS_TYPES_DIR, + "@react-aria", + "previous-widget", + "src", + "index.ts", + ); + const previousTypesPath = path.join( + WITH_PREVIOUS_TYPES_DIR, + "@react-aria", + "previous-widget", + "dist", + "types", + "src", + "index.d.ts", + ); + const now = Date.now(); + fs.utimesSync(previousTypesPath, new Date(now - 60_000), new Date(now - 60_000)); + fs.utimesSync(srcPath, new Date(now), new Date(now)); + + let threw = false; + let stderr = ""; + try { + runExtractor(WITH_PREVIOUS_TYPES_DIR, outputDir, ["--check-build-freshness"]); + } catch (err) { + threw = true; + const e = err as { stderr?: Buffer; stdout?: Buffer }; + stderr = (e.stderr?.toString() ?? "") + (e.stdout?.toString() ?? ""); + } + + expect( + threw, + "extractor must fail when src/ is newer than dist/types/ — silently " + + "falling back to source would introduce a second extraction path " + + "whose output can diverge from the .d.ts path.", + ).toBe(true); + expect(stderr).toMatch(/@react-aria\/previous-widget/); + expect(stderr).toMatch(/yarn build/); + }); + + // The freshness check is only meaningful against the live workspace — + // published npm tarballs are immutable, so their src/ vs dist/types/ + // mtime relationship doesn't imply an out-of-date build. Without the + // --check-build-freshness flag, the extractor must accept the package + // as-is even when source is newer than types. + it("does not error when --check-build-freshness is not set", () => { + const outputDir = makeTmpDir(); + + const srcPath = path.join( + WITH_PREVIOUS_TYPES_DIR, + "@react-aria", + "previous-widget", + "src", + "index.ts", + ); + const previousTypesPath = path.join( + WITH_PREVIOUS_TYPES_DIR, + "@react-aria", + "previous-widget", + "dist", + "types", + "src", + "index.d.ts", + ); + const now = Date.now(); + fs.utimesSync(previousTypesPath, new Date(now - 60_000), new Date(now - 60_000)); + fs.utimesSync(srcPath, new Date(now), new Date(now)); + + // No extra flag → no freshness check → no error, even though src/ is newer. + runExtractor(WITH_PREVIOUS_TYPES_DIR, outputDir); + + const apiJsonPath = path.join( + outputDir, + "@react-aria", + "previous-widget", + "dist", + "api.json", + ); + expect(fs.existsSync(apiJsonPath)).toBe(true); + }); + + // The inverse: when the .d.ts is *up to date* (same mtime or newer than + // src/), the extractor must keep using types. Reading from source when + // unnecessary changes the resolution behaviour in a monorepo (relative + // `import` paths into un-built sibling packages → TS falls back to `any`), + // which produces spurious "removed" diffs against the published side. + it("keeps using dist/types when it is current (same or newer than source)", () => { + const outputDir = makeTmpDir(); + + // Force .d.ts to be newer than .ts + const srcPath = path.join( + WITH_PREVIOUS_TYPES_DIR, + "@react-aria", + "previous-widget", + "src", + "index.ts", + ); + const typesPath = path.join( + WITH_PREVIOUS_TYPES_DIR, + "@react-aria", + "previous-widget", + "dist", + "types", + "src", + "index.d.ts", + ); + const now = Date.now(); + fs.utimesSync(srcPath, new Date(now - 60_000), new Date(now - 60_000)); + fs.utimesSync(typesPath, new Date(now), new Date(now)); + + runExtractor(WITH_PREVIOUS_TYPES_DIR, outputDir, ["--check-build-freshness"]); + + const apiJsonPath = path.join( + outputDir, + "@react-aria", + "previous-widget", + "dist", + "api.json", + ); + const { exports } = JSON.parse(fs.readFileSync(apiJsonPath, "utf8")); + const widgetProps = exports["WidgetProps"]; + // The previous .d.ts doesn't have isFresh — since it's current enough + // per mtime, we stick with it and isFresh must NOT appear. + expect( + widgetProps.properties, + "when the .d.ts is newer than src/, the extractor must stick with the " + + "declaration file — switching to source unnecessarily causes spurious " + + "diffs in monorepos where un-built sibling packages can't resolve from .ts.", + ).not.toHaveProperty("isFresh"); + }); +}); diff --git a/rsp-api-checker/ts-extractor/package-lock.json b/rsp-api-checker/ts-extractor/package-lock.json new file mode 100644 index 00000000000..f630fee4b7b --- /dev/null +++ b/rsp-api-checker/ts-extractor/package-lock.json @@ -0,0 +1,1973 @@ +{ + "name": "rsp-api-extractor", + "version": "0.1.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "rsp-api-extractor", + "version": "0.1.0", + "dependencies": { + "tsx": "^4.0.0", + "typescript": "~5.7.0" + }, + "devDependencies": { + "vitest": "^2.0.0" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.7.tgz", + "integrity": "sha512-EKX3Qwmhz1eMdEJokhALr0YiD0lhQNwDqkPYyPhiSwKrh7/4KRjQc04sZ8db+5DVVnZ1LmbNDI1uAMPEUBnQPg==", + "cpu": [ + "ppc64" + ], + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.7.tgz", + "integrity": "sha512-jbPXvB4Yj2yBV7HUfE2KHe4GJX51QplCN1pGbYjvsyCZbQmies29EoJbkEc+vYuU5o45AfQn37vZlyXy4YJ8RQ==", + "cpu": [ + "arm" + ], + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.7.tgz", + "integrity": "sha512-62dPZHpIXzvChfvfLJow3q5dDtiNMkwiRzPylSCfriLvZeq0a1bWChrGx/BbUbPwOrsWKMn8idSllklzBy+dgQ==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.7.tgz", + "integrity": "sha512-x5VpMODneVDb70PYV2VQOmIUUiBtY3D3mPBG8NxVk5CogneYhkR7MmM3yR/uMdITLrC1ml/NV1rj4bMJuy9MCg==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.7.tgz", + "integrity": "sha512-5lckdqeuBPlKUwvoCXIgI2D9/ABmPq3Rdp7IfL70393YgaASt7tbju3Ac+ePVi3KDH6N2RqePfHnXkaDtY9fkw==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.7.tgz", + "integrity": "sha512-rYnXrKcXuT7Z+WL5K980jVFdvVKhCHhUwid+dDYQpH+qu+TefcomiMAJpIiC2EM3Rjtq0sO3StMV/+3w3MyyqQ==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.7.tgz", + "integrity": "sha512-B48PqeCsEgOtzME2GbNM2roU29AMTuOIN91dsMO30t+Ydis3z/3Ngoj5hhnsOSSwNzS+6JppqWsuhTp6E82l2w==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.7.tgz", + "integrity": "sha512-jOBDK5XEjA4m5IJK3bpAQF9/Lelu/Z9ZcdhTRLf4cajlB+8VEhFFRjWgfy3M1O4rO2GQ/b2dLwCUGpiF/eATNQ==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.7.tgz", + "integrity": "sha512-RkT/YXYBTSULo3+af8Ib0ykH8u2MBh57o7q/DAs3lTJlyVQkgQvlrPTnjIzzRPQyavxtPtfg0EopvDyIt0j1rA==", + "cpu": [ + "arm" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.7.tgz", + "integrity": "sha512-RZPHBoxXuNnPQO9rvjh5jdkRmVizktkT7TCDkDmQ0W2SwHInKCAV95GRuvdSvA7w4VMwfCjUiPwDi0ZO6Nfe9A==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.7.tgz", + "integrity": "sha512-GA48aKNkyQDbd3KtkplYWT102C5sn/EZTY4XROkxONgruHPU72l+gW+FfF8tf2cFjeHaRbWpOYa/uRBz/Xq1Pg==", + "cpu": [ + "ia32" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.7.tgz", + "integrity": "sha512-a4POruNM2oWsD4WKvBSEKGIiWQF8fZOAsycHOt6JBpZ+JN2n2JH9WAv56SOyu9X5IqAjqSIPTaJkqN8F7XOQ5Q==", + "cpu": [ + "loong64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.7.tgz", + "integrity": "sha512-KabT5I6StirGfIz0FMgl1I+R1H73Gp0ofL9A3nG3i/cYFJzKHhouBV5VWK1CSgKvVaG4q1RNpCTR2LuTVB3fIw==", + "cpu": [ + "mips64el" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.7.tgz", + "integrity": "sha512-gRsL4x6wsGHGRqhtI+ifpN/vpOFTQtnbsupUF5R5YTAg+y/lKelYR1hXbnBdzDjGbMYjVJLJTd2OFmMewAgwlQ==", + "cpu": [ + "ppc64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.7.tgz", + "integrity": "sha512-hL25LbxO1QOngGzu2U5xeXtxXcW+/GvMN3ejANqXkxZ/opySAZMrc+9LY/WyjAan41unrR3YrmtTsUpwT66InQ==", + "cpu": [ + "riscv64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.7.tgz", + "integrity": "sha512-2k8go8Ycu1Kb46vEelhu1vqEP+UeRVj2zY1pSuPdgvbd5ykAw82Lrro28vXUrRmzEsUV0NzCf54yARIK8r0fdw==", + "cpu": [ + "s390x" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.7.tgz", + "integrity": "sha512-hzznmADPt+OmsYzw1EE33ccA+HPdIqiCRq7cQeL1Jlq2gb1+OyWBkMCrYGBJ+sxVzve2ZJEVeePbLM2iEIZSxA==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.7.tgz", + "integrity": "sha512-b6pqtrQdigZBwZxAn1UpazEisvwaIDvdbMbmrly7cDTMFnw/+3lVxxCTGOrkPVnsYIosJJXAsILG9XcQS+Yu6w==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.7.tgz", + "integrity": "sha512-OfatkLojr6U+WN5EDYuoQhtM+1xco+/6FSzJJnuWiUw5eVcicbyK3dq5EeV/QHT1uy6GoDhGbFpprUiHUYggrw==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.7.tgz", + "integrity": "sha512-AFuojMQTxAz75Fo8idVcqoQWEHIXFRbOc1TrVcFSgCZtQfSdc1RXgB3tjOn/krRHENUB4j00bfGjyl2mJrU37A==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.7.tgz", + "integrity": "sha512-+A1NJmfM8WNDv5CLVQYJ5PshuRm/4cI6WMZRg1by1GwPIQPCTs1GLEUHwiiQGT5zDdyLiRM/l1G0Pv54gvtKIg==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.7.tgz", + "integrity": "sha512-+KrvYb/C8zA9CU/g0sR6w2RBw7IGc5J2BPnc3dYc5VJxHCSF1yNMxTV5LQ7GuKteQXZtspjFbiuW5/dOj7H4Yw==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.7.tgz", + "integrity": "sha512-ikktIhFBzQNt/QDyOL580ti9+5mL/YZeUPKU2ivGtGjdTYoqz6jObj6nOMfhASpS4GU4Q/Clh1QtxWAvcYKamA==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.7.tgz", + "integrity": "sha512-7yRhbHvPqSpRUV7Q20VuDwbjW5kIMwTHpptuUzV+AA46kiPze5Z7qgt6CLCK3pWFrHeNfDd1VKgyP4O+ng17CA==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.7.tgz", + "integrity": "sha512-SmwKXe6VHIyZYbBLJrhOoCJRB/Z1tckzmgTLfFYOfpMAx63BJEaL9ExI8x7v0oAO3Zh6D/Oi1gVxEYr5oUCFhw==", + "cpu": [ + "ia32" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.7.tgz", + "integrity": "sha512-56hiAJPhwQ1R4i+21FVF7V8kSD5zZTdHcVuRFMW0hn753vVfQN8xlx4uOPT4xoGH0Z/oVATuR82AiqSTDIpaHg==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "dev": true, + "license": "MIT" + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.60.1.tgz", + "integrity": "sha512-d6FinEBLdIiK+1uACUttJKfgZREXrF0Qc2SmLII7W2AD8FfiZ9Wjd+rD/iRuf5s5dWrr1GgwXCvPqOuDquOowA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.60.1.tgz", + "integrity": "sha512-YjG/EwIDvvYI1YvYbHvDz/BYHtkY4ygUIXHnTdLhG+hKIQFBiosfWiACWortsKPKU/+dUwQQCKQM3qrDe8c9BA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.60.1.tgz", + "integrity": "sha512-mjCpF7GmkRtSJwon+Rq1N8+pI+8l7w5g9Z3vWj4T7abguC4Czwi3Yu/pFaLvA3TTeMVjnu3ctigusqWUfjZzvw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.60.1.tgz", + "integrity": "sha512-haZ7hJ1JT4e9hqkoT9R/19XW2QKqjfJVv+i5AGg57S+nLk9lQnJ1F/eZloRO3o9Scy9CM3wQ9l+dkXtcBgN5Ew==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-freebsd-arm64": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.60.1.tgz", + "integrity": "sha512-czw90wpQq3ZsAVBlinZjAYTKduOjTywlG7fEeWKUA7oCmpA8xdTkxZZlwNJKWqILlq0wehoZcJYfBvOyhPTQ6w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-freebsd-x64": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.60.1.tgz", + "integrity": "sha512-KVB2rqsxTHuBtfOeySEyzEOB7ltlB/ux38iu2rBQzkjbwRVlkhAGIEDiiYnO2kFOkJp+Z7pUXKyrRRFuFUKt+g==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.60.1.tgz", + "integrity": "sha512-L+34Qqil+v5uC0zEubW7uByo78WOCIrBvci69E7sFASRl0X7b/MB6Cqd1lky/CtcSVTydWa2WZwFuWexjS5o6g==", + "cpu": [ + "arm" + ], + "dev": true, + "libc": [ + "glibc" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.60.1.tgz", + "integrity": "sha512-n83O8rt4v34hgFzlkb1ycniJh7IR5RCIqt6mz1VRJD6pmhRi0CXdmfnLu9dIUS6buzh60IvACM842Ffb3xd6Gg==", + "cpu": [ + "arm" + ], + "dev": true, + "libc": [ + "musl" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.60.1.tgz", + "integrity": "sha512-Nql7sTeAzhTAja3QXeAI48+/+GjBJ+QmAH13snn0AJSNL50JsDqotyudHyMbO2RbJkskbMbFJfIJKWA6R1LCJQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "libc": [ + "glibc" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.60.1.tgz", + "integrity": "sha512-+pUymDhd0ys9GcKZPPWlFiZ67sTWV5UU6zOJat02M1+PiuSGDziyRuI/pPue3hoUwm2uGfxdL+trT6Z9rxnlMA==", + "cpu": [ + "arm64" + ], + "dev": true, + "libc": [ + "musl" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-gnu": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.60.1.tgz", + "integrity": "sha512-VSvgvQeIcsEvY4bKDHEDWcpW4Yw7BtlKG1GUT4FzBUlEKQK0rWHYBqQt6Fm2taXS+1bXvJT6kICu5ZwqKCnvlQ==", + "cpu": [ + "loong64" + ], + "dev": true, + "libc": [ + "glibc" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-musl": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.60.1.tgz", + "integrity": "sha512-4LqhUomJqwe641gsPp6xLfhqWMbQV04KtPp7/dIp0nzPxAkNY1AbwL5W0MQpcalLYk07vaW9Kp1PBhdpZYYcEw==", + "cpu": [ + "loong64" + ], + "dev": true, + "libc": [ + "musl" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-gnu": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.60.1.tgz", + "integrity": "sha512-tLQQ9aPvkBxOc/EUT6j3pyeMD6Hb8QF2BTBnCQWP/uu1lhc9AIrIjKnLYMEroIz/JvtGYgI9dF3AxHZNaEH0rw==", + "cpu": [ + "ppc64" + ], + "dev": true, + "libc": [ + "glibc" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-musl": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.60.1.tgz", + "integrity": "sha512-RMxFhJwc9fSXP6PqmAz4cbv3kAyvD1etJFjTx4ONqFP9DkTkXsAMU4v3Vyc5BgzC+anz7nS/9tp4obsKfqkDHg==", + "cpu": [ + "ppc64" + ], + "dev": true, + "libc": [ + "musl" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.60.1.tgz", + "integrity": "sha512-QKgFl+Yc1eEk6MmOBfRHYF6lTxiiiV3/z/BRrbSiW2I7AFTXoBFvdMEyglohPj//2mZS4hDOqeB0H1ACh3sBbg==", + "cpu": [ + "riscv64" + ], + "dev": true, + "libc": [ + "glibc" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.60.1.tgz", + "integrity": "sha512-RAjXjP/8c6ZtzatZcA1RaQr6O1TRhzC+adn8YZDnChliZHviqIjmvFwHcxi4JKPSDAt6Uhf/7vqcBzQJy0PDJg==", + "cpu": [ + "riscv64" + ], + "dev": true, + "libc": [ + "musl" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.60.1.tgz", + "integrity": "sha512-wcuocpaOlaL1COBYiA89O6yfjlp3RwKDeTIA0hM7OpmhR1Bjo9j31G1uQVpDlTvwxGn2nQs65fBFL5UFd76FcQ==", + "cpu": [ + "s390x" + ], + "dev": true, + "libc": [ + "glibc" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.60.1.tgz", + "integrity": "sha512-77PpsFQUCOiZR9+LQEFg9GClyfkNXj1MP6wRnzYs0EeWbPcHs02AXu4xuUbM1zhwn3wqaizle3AEYg5aeoohhg==", + "cpu": [ + "x64" + ], + "dev": true, + "libc": [ + "glibc" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.60.1.tgz", + "integrity": "sha512-5cIATbk5vynAjqqmyBjlciMJl1+R/CwX9oLk/EyiFXDWd95KpHdrOJT//rnUl4cUcskrd0jCCw3wpZnhIHdD9w==", + "cpu": [ + "x64" + ], + "dev": true, + "libc": [ + "musl" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-openbsd-x64": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.60.1.tgz", + "integrity": "sha512-cl0w09WsCi17mcmWqqglez9Gk8isgeWvoUZ3WiJFYSR3zjBQc2J5/ihSjpl+VLjPqjQ/1hJRcqBfLjssREQILw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ] + }, + "node_modules/@rollup/rollup-openharmony-arm64": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.60.1.tgz", + "integrity": "sha512-4Cv23ZrONRbNtbZa37mLSueXUCtN7MXccChtKpUnQNgF010rjrjfHx3QxkS2PI7LqGT5xXyYs1a7LbzAwT0iCA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.60.1.tgz", + "integrity": "sha512-i1okWYkA4FJICtr7KpYzFpRTHgy5jdDbZiWfvny21iIKky5YExiDXP+zbXzm3dUcFpkEeYNHgQ5fuG236JPq0g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.60.1.tgz", + "integrity": "sha512-u09m3CuwLzShA0EYKMNiFgcjjzwqtUMLmuCJLeZWjjOYA3IT2Di09KaxGBTP9xVztWyIWjVdsB2E9goMjZvTQg==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-gnu": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.60.1.tgz", + "integrity": "sha512-k+600V9Zl1CM7eZxJgMyTUzmrmhB/0XZnF4pRypKAlAgxmedUA+1v9R+XOFv56W4SlHEzfeMtzujLJD22Uz5zg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.60.1.tgz", + "integrity": "sha512-lWMnixq/QzxyhTV6NjQJ4SFo1J6PvOX8vUx5Wb4bBPsEb+8xZ89Bz6kOXpfXj9ak9AHTQVQzlgzBEc1SyM27xQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@vitest/expect": { + "version": "2.1.9", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-2.1.9.tgz", + "integrity": "sha512-UJCIkTBenHeKT1TTlKMJWy1laZewsRIzYighyYiJKZreqtdxSos/S1t+ktRMQWu2CKqaarrkeszJx1cgC5tGZw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/spy": "2.1.9", + "@vitest/utils": "2.1.9", + "chai": "^5.1.2", + "tinyrainbow": "^1.2.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/mocker": { + "version": "2.1.9", + "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-2.1.9.tgz", + "integrity": "sha512-tVL6uJgoUdi6icpxmdrn5YNo3g3Dxv+IHJBr0GXHaEdTcw3F+cPKnsXFhli6nO+f/6SDKPHEK1UN+k+TQv0Ehg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/spy": "2.1.9", + "estree-walker": "^3.0.3", + "magic-string": "^0.30.12" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "msw": "^2.4.9", + "vite": "^5.0.0" + }, + "peerDependenciesMeta": { + "msw": { + "optional": true + }, + "vite": { + "optional": true + } + } + }, + "node_modules/@vitest/pretty-format": { + "version": "2.1.9", + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-2.1.9.tgz", + "integrity": "sha512-KhRIdGV2U9HOUzxfiHmY8IFHTdqtOhIzCpd8WRdJiE7D/HUcZVD0EgQCVjm+Q9gkUXWgBvMmTtZgIG48wq7sOQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "tinyrainbow": "^1.2.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/runner": { + "version": "2.1.9", + "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-2.1.9.tgz", + "integrity": "sha512-ZXSSqTFIrzduD63btIfEyOmNcBmQvgOVsPNPe0jYtESiXkhd8u2erDLnMxmGrDCwHCCHE7hxwRDCT3pt0esT4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/utils": "2.1.9", + "pathe": "^1.1.2" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/snapshot": { + "version": "2.1.9", + "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-2.1.9.tgz", + "integrity": "sha512-oBO82rEjsxLNJincVhLhaxxZdEtV0EFHMK5Kmx5sJ6H9L183dHECjiefOAdnqpIgT5eZwT04PoggUnW88vOBNQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/pretty-format": "2.1.9", + "magic-string": "^0.30.12", + "pathe": "^1.1.2" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/spy": { + "version": "2.1.9", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-2.1.9.tgz", + "integrity": "sha512-E1B35FwzXXTs9FHNK6bDszs7mtydNi5MIfUWpceJ8Xbfb1gBMscAnwLbEu+B44ed6W3XjL9/ehLPHR1fkf1KLQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "tinyspy": "^3.0.2" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/utils": { + "version": "2.1.9", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-2.1.9.tgz", + "integrity": "sha512-v0psaMSkNJ3A2NMrUEHFRzJtDPFn+/VWZ5WxImB21T9fjucJRmS7xCS3ppEnARb9y11OAzaD+P2Ps+b+BGX5iQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/pretty-format": "2.1.9", + "loupe": "^3.1.2", + "tinyrainbow": "^1.2.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/assertion-error": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-2.0.1.tgz", + "integrity": "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + } + }, + "node_modules/cac": { + "version": "6.7.14", + "resolved": "https://registry.npmjs.org/cac/-/cac-6.7.14.tgz", + "integrity": "sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/chai": { + "version": "5.3.3", + "resolved": "https://registry.npmjs.org/chai/-/chai-5.3.3.tgz", + "integrity": "sha512-4zNhdJD/iOjSH0A05ea+Ke6MU5mmpQcbQsSOkgdaUMJ9zTlDTD/GYlwohmIE2u0gaxHYiVHEn1Fw9mZ/ktJWgw==", + "dev": true, + "license": "MIT", + "dependencies": { + "assertion-error": "^2.0.1", + "check-error": "^2.1.1", + "deep-eql": "^5.0.1", + "loupe": "^3.1.0", + "pathval": "^2.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/check-error": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-2.1.3.tgz", + "integrity": "sha512-PAJdDJusoxnwm1VwW07VWwUN1sl7smmC3OKggvndJFadxxDRyFJBX/ggnu/KE4kQAB7a3Dp8f/YXC1FlUprWmA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 16" + } + }, + "node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/deep-eql": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-5.0.2.tgz", + "integrity": "sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/es-module-lexer": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.7.0.tgz", + "integrity": "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==", + "dev": true, + "license": "MIT" + }, + "node_modules/esbuild": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.7.tgz", + "integrity": "sha512-IxpibTjyVnmrIQo5aqNpCgoACA/dTKLTlhMHihVHhdkxKyPO1uBBthumT0rdHmcsk9uMonIWS0m4FljWzILh3w==", + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.27.7", + "@esbuild/android-arm": "0.27.7", + "@esbuild/android-arm64": "0.27.7", + "@esbuild/android-x64": "0.27.7", + "@esbuild/darwin-arm64": "0.27.7", + "@esbuild/darwin-x64": "0.27.7", + "@esbuild/freebsd-arm64": "0.27.7", + "@esbuild/freebsd-x64": "0.27.7", + "@esbuild/linux-arm": "0.27.7", + "@esbuild/linux-arm64": "0.27.7", + "@esbuild/linux-ia32": "0.27.7", + "@esbuild/linux-loong64": "0.27.7", + "@esbuild/linux-mips64el": "0.27.7", + "@esbuild/linux-ppc64": "0.27.7", + "@esbuild/linux-riscv64": "0.27.7", + "@esbuild/linux-s390x": "0.27.7", + "@esbuild/linux-x64": "0.27.7", + "@esbuild/netbsd-arm64": "0.27.7", + "@esbuild/netbsd-x64": "0.27.7", + "@esbuild/openbsd-arm64": "0.27.7", + "@esbuild/openbsd-x64": "0.27.7", + "@esbuild/openharmony-arm64": "0.27.7", + "@esbuild/sunos-x64": "0.27.7", + "@esbuild/win32-arm64": "0.27.7", + "@esbuild/win32-ia32": "0.27.7", + "@esbuild/win32-x64": "0.27.7" + } + }, + "node_modules/estree-walker": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0" + } + }, + "node_modules/expect-type": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.3.0.tgz", + "integrity": "sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/get-tsconfig": { + "version": "4.14.0", + "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.14.0.tgz", + "integrity": "sha512-yTb+8DXzDREzgvYmh6s9vHsSVCHeC0G3PI5bEXNBHtmshPnO+S5O7qgLEOn0I5QvMy6kpZN8K1NKGyilLb93wA==", + "license": "MIT", + "dependencies": { + "resolve-pkg-maps": "^1.0.0" + }, + "funding": { + "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1" + } + }, + "node_modules/loupe": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-3.2.1.tgz", + "integrity": "sha512-CdzqowRJCeLU72bHvWqwRBBlLcMEtIvGrlvef74kMnV2AolS9Y8xUv1I0U/MNAWMhBlKIoyuEgoJ0t/bbwHbLQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/magic-string": { + "version": "0.30.21", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.21.tgz", + "integrity": "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.5" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/pathe": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-1.1.2.tgz", + "integrity": "sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/pathval": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-2.0.1.tgz", + "integrity": "sha512-//nshmD55c46FuFw26xV/xFAaB5HF9Xdap7HJBBnrKdAd6/GxDBaNA1870O79+9ueg61cZLSVc+OaFlfmObYVQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 14.16" + } + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, + "node_modules/postcss": { + "version": "8.5.10", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.10.tgz", + "integrity": "sha512-pMMHxBOZKFU6HgAZ4eyGnwXF/EvPGGqUr0MnZ5+99485wwW41kW91A4LOGxSHhgugZmSChL5AlElNdwlNgcnLQ==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/resolve-pkg-maps": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz", + "integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==", + "license": "MIT", + "funding": { + "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1" + } + }, + "node_modules/rollup": { + "version": "4.60.1", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.60.1.tgz", + "integrity": "sha512-VmtB2rFU/GroZ4oL8+ZqXgSA38O6GR8KSIvWmEFv63pQ0G6KaBH9s07PO8XTXP4vI+3UJUEypOfjkGfmSBBR0w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "1.0.8" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.60.1", + "@rollup/rollup-android-arm64": "4.60.1", + "@rollup/rollup-darwin-arm64": "4.60.1", + "@rollup/rollup-darwin-x64": "4.60.1", + "@rollup/rollup-freebsd-arm64": "4.60.1", + "@rollup/rollup-freebsd-x64": "4.60.1", + "@rollup/rollup-linux-arm-gnueabihf": "4.60.1", + "@rollup/rollup-linux-arm-musleabihf": "4.60.1", + "@rollup/rollup-linux-arm64-gnu": "4.60.1", + "@rollup/rollup-linux-arm64-musl": "4.60.1", + "@rollup/rollup-linux-loong64-gnu": "4.60.1", + "@rollup/rollup-linux-loong64-musl": "4.60.1", + "@rollup/rollup-linux-ppc64-gnu": "4.60.1", + "@rollup/rollup-linux-ppc64-musl": "4.60.1", + "@rollup/rollup-linux-riscv64-gnu": "4.60.1", + "@rollup/rollup-linux-riscv64-musl": "4.60.1", + "@rollup/rollup-linux-s390x-gnu": "4.60.1", + "@rollup/rollup-linux-x64-gnu": "4.60.1", + "@rollup/rollup-linux-x64-musl": "4.60.1", + "@rollup/rollup-openbsd-x64": "4.60.1", + "@rollup/rollup-openharmony-arm64": "4.60.1", + "@rollup/rollup-win32-arm64-msvc": "4.60.1", + "@rollup/rollup-win32-ia32-msvc": "4.60.1", + "@rollup/rollup-win32-x64-gnu": "4.60.1", + "@rollup/rollup-win32-x64-msvc": "4.60.1", + "fsevents": "~2.3.2" + } + }, + "node_modules/siginfo": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/siginfo/-/siginfo-2.0.0.tgz", + "integrity": "sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==", + "dev": true, + "license": "ISC" + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/stackback": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz", + "integrity": "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==", + "dev": true, + "license": "MIT" + }, + "node_modules/std-env": { + "version": "3.10.0", + "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.10.0.tgz", + "integrity": "sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg==", + "dev": true, + "license": "MIT" + }, + "node_modules/tinybench": { + "version": "2.9.0", + "resolved": "https://registry.npmjs.org/tinybench/-/tinybench-2.9.0.tgz", + "integrity": "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==", + "dev": true, + "license": "MIT" + }, + "node_modules/tinyexec": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-0.3.2.tgz", + "integrity": "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==", + "dev": true, + "license": "MIT" + }, + "node_modules/tinypool": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-1.1.1.tgz", + "integrity": "sha512-Zba82s87IFq9A9XmjiX5uZA/ARWDrB03OHlq+Vw1fSdt0I+4/Kutwy8BP4Y/y/aORMo61FQ0vIb5j44vSo5Pkg==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.0.0 || >=20.0.0" + } + }, + "node_modules/tinyrainbow": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-1.2.0.tgz", + "integrity": "sha512-weEDEq7Z5eTHPDh4xjX789+fHfF+P8boiFB+0vbWzpbnbsEr/GRaohi/uMKxg8RZMXnl1ItAi/IUHWMsjDV7kQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/tinyspy": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-3.0.2.tgz", + "integrity": "sha512-n1cw8k1k0x4pgA2+9XrOkFydTerNcJ1zWCO5Nn9scWHTD+5tp8dghT2x1uduQePZTZgd3Tupf+x9BxJjeJi77Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/tsx": { + "version": "4.21.0", + "resolved": "https://registry.npmjs.org/tsx/-/tsx-4.21.0.tgz", + "integrity": "sha512-5C1sg4USs1lfG0GFb2RLXsdpXqBSEhAaA/0kPL01wxzpMqLILNxIxIOKiILz+cdg/pLnOUxFYOR5yhHU666wbw==", + "license": "MIT", + "dependencies": { + "esbuild": "~0.27.0", + "get-tsconfig": "^4.7.5" + }, + "bin": { + "tsx": "dist/cli.mjs" + }, + "engines": { + "node": ">=18.0.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + } + }, + "node_modules/typescript": { + "version": "5.7.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.7.3.tgz", + "integrity": "sha512-84MVSjMEHP+FQRPy3pX9sTVV/INIex71s9TL2Gm5FG/WG1SqXeKyZ0k7/blY/4FdOzI12CBy1vGc4og/eus0fw==", + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/vite": { + "version": "5.4.21", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.21.tgz", + "integrity": "sha512-o5a9xKjbtuhY6Bi5S3+HvbRERmouabWbyUcpXXUA1u+GNUKoROi9byOJ8M0nHbHYHkYICiMlqxkg1KkYmm25Sw==", + "dev": true, + "license": "MIT", + "dependencies": { + "esbuild": "^0.21.3", + "postcss": "^8.4.43", + "rollup": "^4.20.0" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^18.0.0 || >=20.0.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^18.0.0 || >=20.0.0", + "less": "*", + "lightningcss": "^1.21.0", + "sass": "*", + "sass-embedded": "*", + "stylus": "*", + "sugarss": "*", + "terser": "^5.4.0" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + } + } + }, + "node_modules/vite-node": { + "version": "2.1.9", + "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-2.1.9.tgz", + "integrity": "sha512-AM9aQ/IPrW/6ENLQg3AGY4K1N2TGZdR5e4gu/MmmR2xR3Ll1+dib+nook92g4TV3PXVyeyxdWwtaCAiUL0hMxA==", + "dev": true, + "license": "MIT", + "dependencies": { + "cac": "^6.7.14", + "debug": "^4.3.7", + "es-module-lexer": "^1.5.4", + "pathe": "^1.1.2", + "vite": "^5.0.0" + }, + "bin": { + "vite-node": "vite-node.mjs" + }, + "engines": { + "node": "^18.0.0 || >=20.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/vite/node_modules/@esbuild/aix-ppc64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz", + "integrity": "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/android-arm": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz", + "integrity": "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/android-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz", + "integrity": "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/android-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz", + "integrity": "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/darwin-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz", + "integrity": "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/darwin-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz", + "integrity": "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/freebsd-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz", + "integrity": "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/freebsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz", + "integrity": "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-arm": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz", + "integrity": "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz", + "integrity": "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-ia32": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz", + "integrity": "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-loong64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz", + "integrity": "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-mips64el": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz", + "integrity": "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-ppc64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz", + "integrity": "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-riscv64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz", + "integrity": "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-s390x": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz", + "integrity": "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz", + "integrity": "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/netbsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz", + "integrity": "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/openbsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz", + "integrity": "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/sunos-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz", + "integrity": "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/win32-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz", + "integrity": "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/win32-ia32": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz", + "integrity": "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/@esbuild/win32-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz", + "integrity": "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/vite/node_modules/esbuild": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz", + "integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=12" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.21.5", + "@esbuild/android-arm": "0.21.5", + "@esbuild/android-arm64": "0.21.5", + "@esbuild/android-x64": "0.21.5", + "@esbuild/darwin-arm64": "0.21.5", + "@esbuild/darwin-x64": "0.21.5", + "@esbuild/freebsd-arm64": "0.21.5", + "@esbuild/freebsd-x64": "0.21.5", + "@esbuild/linux-arm": "0.21.5", + "@esbuild/linux-arm64": "0.21.5", + "@esbuild/linux-ia32": "0.21.5", + "@esbuild/linux-loong64": "0.21.5", + "@esbuild/linux-mips64el": "0.21.5", + "@esbuild/linux-ppc64": "0.21.5", + "@esbuild/linux-riscv64": "0.21.5", + "@esbuild/linux-s390x": "0.21.5", + "@esbuild/linux-x64": "0.21.5", + "@esbuild/netbsd-x64": "0.21.5", + "@esbuild/openbsd-x64": "0.21.5", + "@esbuild/sunos-x64": "0.21.5", + "@esbuild/win32-arm64": "0.21.5", + "@esbuild/win32-ia32": "0.21.5", + "@esbuild/win32-x64": "0.21.5" + } + }, + "node_modules/vitest": { + "version": "2.1.9", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-2.1.9.tgz", + "integrity": "sha512-MSmPM9REYqDGBI8439mA4mWhV5sKmDlBKWIYbA3lRb2PTHACE0mgKwA8yQ2xq9vxDTuk4iPrECBAEW2aoFXY0Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/expect": "2.1.9", + "@vitest/mocker": "2.1.9", + "@vitest/pretty-format": "^2.1.9", + "@vitest/runner": "2.1.9", + "@vitest/snapshot": "2.1.9", + "@vitest/spy": "2.1.9", + "@vitest/utils": "2.1.9", + "chai": "^5.1.2", + "debug": "^4.3.7", + "expect-type": "^1.1.0", + "magic-string": "^0.30.12", + "pathe": "^1.1.2", + "std-env": "^3.8.0", + "tinybench": "^2.9.0", + "tinyexec": "^0.3.1", + "tinypool": "^1.0.1", + "tinyrainbow": "^1.2.0", + "vite": "^5.0.0", + "vite-node": "2.1.9", + "why-is-node-running": "^2.3.0" + }, + "bin": { + "vitest": "vitest.mjs" + }, + "engines": { + "node": "^18.0.0 || >=20.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "@edge-runtime/vm": "*", + "@types/node": "^18.0.0 || >=20.0.0", + "@vitest/browser": "2.1.9", + "@vitest/ui": "2.1.9", + "happy-dom": "*", + "jsdom": "*" + }, + "peerDependenciesMeta": { + "@edge-runtime/vm": { + "optional": true + }, + "@types/node": { + "optional": true + }, + "@vitest/browser": { + "optional": true + }, + "@vitest/ui": { + "optional": true + }, + "happy-dom": { + "optional": true + }, + "jsdom": { + "optional": true + } + } + }, + "node_modules/why-is-node-running": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/why-is-node-running/-/why-is-node-running-2.3.0.tgz", + "integrity": "sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==", + "dev": true, + "license": "MIT", + "dependencies": { + "siginfo": "^2.0.0", + "stackback": "0.0.2" + }, + "bin": { + "why-is-node-running": "cli.js" + }, + "engines": { + "node": ">=8" + } + } + } +} diff --git a/rsp-api-checker/ts-extractor/package.json b/rsp-api-checker/ts-extractor/package.json new file mode 100644 index 00000000000..2dd0bda6b54 --- /dev/null +++ b/rsp-api-checker/ts-extractor/package.json @@ -0,0 +1,17 @@ +{ + "name": "rsp-api-extractor", + "version": "0.1.0", + "private": true, + "type": "module", + "scripts": { + "test": "vitest run", + "test:watch": "vitest" + }, + "dependencies": { + "typescript": "~5.7.0", + "tsx": "^4.0.0" + }, + "devDependencies": { + "vitest": "^2.0.0" + } +} diff --git a/rsp-api-checker/ts-extractor/tests/fixtures/packages/@react-aria/test-widget/package.json b/rsp-api-checker/ts-extractor/tests/fixtures/packages/@react-aria/test-widget/package.json new file mode 100644 index 00000000000..b2f615e057f --- /dev/null +++ b/rsp-api-checker/ts-extractor/tests/fixtures/packages/@react-aria/test-widget/package.json @@ -0,0 +1,5 @@ +{ + "name": "@react-aria/test-widget", + "version": "1.0.0", + "types": "./types/index.d.ts" +} diff --git a/rsp-api-checker/ts-extractor/tests/fixtures/packages/@react-aria/test-widget/types/index.d.ts b/rsp-api-checker/ts-extractor/tests/fixtures/packages/@react-aria/test-widget/types/index.d.ts new file mode 100644 index 00000000000..b93026486fa --- /dev/null +++ b/rsp-api-checker/ts-extractor/tests/fixtures/packages/@react-aria/test-widget/types/index.d.ts @@ -0,0 +1,13 @@ +/** Props for the test widget. */ +export interface WidgetProps { + /** Accessible label. */ + label: string; + /** Whether the widget is disabled. */ + isDisabled?: boolean; +} + +/** Visual variants for the widget. */ +export type WidgetVariant = 'primary' | 'secondary'; + +/** A simple hook. */ +export declare function useWidget(props: WidgetProps): void; diff --git a/rsp-api-checker/ts-extractor/tests/fixtures/with-dev-pkg/packages/@react-aria/normal-widget/package.json b/rsp-api-checker/ts-extractor/tests/fixtures/with-dev-pkg/packages/@react-aria/normal-widget/package.json new file mode 100644 index 00000000000..6bf3db8201b --- /dev/null +++ b/rsp-api-checker/ts-extractor/tests/fixtures/with-dev-pkg/packages/@react-aria/normal-widget/package.json @@ -0,0 +1,5 @@ +{ + "name": "@react-aria/normal-widget", + "version": "1.0.0", + "types": "./types/index.d.ts" +} diff --git a/rsp-api-checker/ts-extractor/tests/fixtures/with-dev-pkg/packages/@react-aria/normal-widget/types/index.d.ts b/rsp-api-checker/ts-extractor/tests/fixtures/with-dev-pkg/packages/@react-aria/normal-widget/types/index.d.ts new file mode 100644 index 00000000000..5b60cd57fe7 --- /dev/null +++ b/rsp-api-checker/ts-extractor/tests/fixtures/with-dev-pkg/packages/@react-aria/normal-widget/types/index.d.ts @@ -0,0 +1,3 @@ +export interface NormalWidgetProps { + label: string; +} diff --git a/rsp-api-checker/ts-extractor/tests/fixtures/with-dev-pkg/packages/dev/@react-spectrum/dev-tool/package.json b/rsp-api-checker/ts-extractor/tests/fixtures/with-dev-pkg/packages/dev/@react-spectrum/dev-tool/package.json new file mode 100644 index 00000000000..6a5c5e4a789 --- /dev/null +++ b/rsp-api-checker/ts-extractor/tests/fixtures/with-dev-pkg/packages/dev/@react-spectrum/dev-tool/package.json @@ -0,0 +1,5 @@ +{ + "name": "@react-spectrum/dev-tool", + "version": "1.0.0", + "types": "./types/index.d.ts" +} diff --git a/rsp-api-checker/ts-extractor/tests/fixtures/with-dev-pkg/packages/dev/@react-spectrum/dev-tool/types/index.d.ts b/rsp-api-checker/ts-extractor/tests/fixtures/with-dev-pkg/packages/dev/@react-spectrum/dev-tool/types/index.d.ts new file mode 100644 index 00000000000..c194fa4e558 --- /dev/null +++ b/rsp-api-checker/ts-extractor/tests/fixtures/with-dev-pkg/packages/dev/@react-spectrum/dev-tool/types/index.d.ts @@ -0,0 +1,4 @@ +/** This is an internal build tool, not a public API. */ +export interface DevToolConfig { + mode: string; +} diff --git a/rsp-api-checker/ts-extractor/tests/fixtures/with-previous-types/packages/@react-aria/previous-widget/package.json b/rsp-api-checker/ts-extractor/tests/fixtures/with-previous-types/packages/@react-aria/previous-widget/package.json new file mode 100644 index 00000000000..18ad7dc088c --- /dev/null +++ b/rsp-api-checker/ts-extractor/tests/fixtures/with-previous-types/packages/@react-aria/previous-widget/package.json @@ -0,0 +1,12 @@ +{ + "name": "@react-aria/previous-widget", + "version": "1.0.0", + "exports": { + ".": { + "source": "./src/index.ts", + "types": "./dist/types/src/index.d.ts" + } + }, + "source": "src/index.ts", + "types": "./dist/types/src/index.d.ts" +} diff --git a/rsp-api-checker/ts-extractor/tests/fixtures/with-previous-types/packages/@react-aria/previous-widget/src/index.ts b/rsp-api-checker/ts-extractor/tests/fixtures/with-previous-types/packages/@react-aria/previous-widget/src/index.ts new file mode 100644 index 00000000000..c77eab80c15 --- /dev/null +++ b/rsp-api-checker/ts-extractor/tests/fixtures/with-previous-types/packages/@react-aria/previous-widget/src/index.ts @@ -0,0 +1,16 @@ +// Current source — includes the newly added prop `isFresh`. +// This represents a package where the developer added a prop to the +// source .ts file but has not re-run `yarn build` to regenerate the +// `.d.ts` in dist/types/. The extractor must still observe `isFresh` +// when reading the local workspace; otherwise added props silently +// disappear from the diff. +export interface WidgetProps { + /** Accessible label. */ + label: string; + /** Whether the widget is disabled. */ + isDisabled?: boolean; + /** Newly added prop — exists in src/ but not in the previous dist/types/ build. */ + isFresh?: boolean; +} + +export declare function useWidget(props: WidgetProps): void; diff --git a/rsp-api-checker/ts-extractor/tsconfig.json b/rsp-api-checker/ts-extractor/tsconfig.json new file mode 100644 index 00000000000..2c159700c55 --- /dev/null +++ b/rsp-api-checker/ts-extractor/tsconfig.json @@ -0,0 +1,11 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "ESNext", + "moduleResolution": "bundler", + "esModuleInterop": true, + "strict": true, + "skipLibCheck": true, + "resolveJsonModule": true + } +} diff --git a/rsp-api-checker/ts-extractor/utils.test.ts b/rsp-api-checker/ts-extractor/utils.test.ts new file mode 100644 index 00000000000..012f91391d0 --- /dev/null +++ b/rsp-api-checker/ts-extractor/utils.test.ts @@ -0,0 +1,244 @@ +import { describe, it, expect } from "vitest"; +import { isOurPackage, shouldSkipProperty, resolveTypesField, resolveSourceField, OUR_SCOPES, OUR_PACKAGES } from "./utils.js"; + +// --------------------------------------------------------------------------- +// isOurPackage +// --------------------------------------------------------------------------- + +describe("isOurPackage", () => { + it("recognises all documented scopes", () => { + expect(isOurPackage("@react-spectrum/button")).toBe(true); + expect(isOurPackage("@react-aria/button")).toBe(true); + expect(isOurPackage("@react-stately/tree")).toBe(true); + expect(isOurPackage("@react-types/shared")).toBe(true); + expect(isOurPackage("@internationalized/date")).toBe(true); + expect(isOurPackage("@adobe/react-spectrum")).toBe(true); + }); + + it("recognises the top-level package names in OUR_PACKAGES", () => { + for (const name of OUR_PACKAGES) { + expect(isOurPackage(name)).toBe(true); + } + }); + + it("rejects unrelated packages", () => { + expect(isOurPackage("react")).toBe(false); + expect(isOurPackage("@types/react")).toBe(false); + expect(isOurPackage("lodash")).toBe(false); + expect(isOurPackage("@emotion/react")).toBe(false); + }); + + it("requires the full scope prefix — partial match is not enough", () => { + // '@react-aria' without trailing slash should NOT match + expect(isOurPackage("@react-aria")).toBe(false); + // A package whose name starts the same but doesn't match any scope + expect(isOurPackage("@react-aria-other/something")).toBe(false); + }); + + it("is case-sensitive", () => { + expect(isOurPackage("@React-Aria/button")).toBe(false); + }); + + it("handles empty string", () => { + expect(isOurPackage("")).toBe(false); + }); +}); + +// --------------------------------------------------------------------------- +// shouldSkipProperty +// --------------------------------------------------------------------------- + +describe("shouldSkipProperty", () => { + it("skips properties starting with __@", () => { + expect(shouldSkipProperty("__@iterator")).toBe(true); + expect(shouldSkipProperty("__@Symbol.iterator")).toBe(true); + }); + + it("skips purely numeric property names", () => { + expect(shouldSkipProperty("0")).toBe(true); + expect(shouldSkipProperty("42")).toBe(true); + expect(shouldSkipProperty("12345")).toBe(true); + }); + + it("keeps normal property names", () => { + expect(shouldSkipProperty("isDisabled")).toBe(false); + expect(shouldSkipProperty("onPress")).toBe(false); + expect(shouldSkipProperty("label")).toBe(false); + expect(shouldSkipProperty("__privateField")).toBe(false); // __ but not __@ + }); + + it("keeps alphanumeric property names", () => { + expect(shouldSkipProperty("prop1")).toBe(false); + expect(shouldSkipProperty("1prop")).toBe(false); + }); +}); + +// --------------------------------------------------------------------------- +// resolveTypesField +// --------------------------------------------------------------------------- + +describe("resolveTypesField", () => { + // String forms + it("returns a .d.ts string directly", () => { + expect(resolveTypesField("./dist/index.d.ts")).toBe("./dist/index.d.ts"); + }); + + it("returns a .d.mts string directly", () => { + expect(resolveTypesField("./dist/index.d.mts")).toBe("./dist/index.d.mts"); + }); + + it("returns a .d.cts string directly", () => { + expect(resolveTypesField("./dist/index.d.cts")).toBe("./dist/index.d.cts"); + }); + + it("returns undefined for a non-.d.ts string", () => { + expect(resolveTypesField("./dist/index.js")).toBeUndefined(); + expect(resolveTypesField("./dist/index.ts")).toBeUndefined(); + expect(resolveTypesField("")).toBeUndefined(); + }); + + // Array forms + it("picks the first .d.ts from an array", () => { + expect(resolveTypesField(["./a.js", "./b.d.ts", "./c.d.ts"])).toBe("./b.d.ts"); + }); + + it("returns undefined for an array with no .d.ts entries", () => { + expect(resolveTypesField(["./a.js", "./b.ts"])).toBeUndefined(); + }); + + it("handles an empty array", () => { + expect(resolveTypesField([])).toBeUndefined(); + }); + + // Object / conditional-exports forms + it("prefers the 'types' key over other keys", () => { + const val = { types: "./types.d.ts", import: "./import.d.ts" }; + expect(resolveTypesField(val)).toBe("./types.d.ts"); + }); + + it("falls back to 'import' when 'types' is absent", () => { + const val = { import: "./import.d.ts", default: "./default.d.ts" }; + expect(resolveTypesField(val)).toBe("./import.d.ts"); + }); + + it("falls back to 'default' when 'types' and 'import' are absent", () => { + const val = { default: "./default.d.ts" }; + expect(resolveTypesField(val)).toBe("./default.d.ts"); + }); + + it("falls back to any value when preferred keys are absent", () => { + const val = { require: "./require.d.ts" }; + expect(resolveTypesField(val)).toBe("./require.d.ts"); + }); + + it("resolves nested conditional exports objects", () => { + const val = { node: { types: "./node.d.ts" } }; + expect(resolveTypesField(val)).toBe("./node.d.ts"); + }); + + it("returns undefined for an object with no .d.ts values", () => { + expect(resolveTypesField({ import: "./index.js" })).toBeUndefined(); + }); + + // Edge cases + it("returns undefined for non-object / non-string / non-array input", () => { + expect(resolveTypesField(null)).toBeUndefined(); + expect(resolveTypesField(undefined)).toBeUndefined(); + expect(resolveTypesField(42)).toBeUndefined(); + expect(resolveTypesField(true)).toBeUndefined(); + }); + + // Strict preference for `types` over runtime-environment keys + it("prefers `types` over runtime environment keys at the same level", () => { + // Real-world pattern: conditional exports with react-native before types. + // Object-key order should NOT decide — `types` must win. + const val = { + "react-native": "./dist/rn.d.ts", + types: "./dist/types.d.ts", + }; + expect(resolveTypesField(val)).toBe("./dist/types.d.ts"); + }); + + it("prefers `types` in nested condition even when parent has other keys first", () => { + const val = { + node: { "react-native": "./wrong.d.ts", types: "./right.d.ts" }, + }; + expect(resolveTypesField(val)).toBe("./right.d.ts"); + }); + + it("prefers import over require and default", () => { + const val = { + require: "./r.d.ts", + default: "./d.d.ts", + import: "./i.d.ts", + }; + expect(resolveTypesField(val)).toBe("./i.d.ts"); + }); + + it("falls back to environment keys only when nothing else resolves", () => { + // Only a runtime-environment key is present → should still return it + // rather than reporting "no types found". + const val = { "react-native": "./rn.d.ts" }; + expect(resolveTypesField(val)).toBe("./rn.d.ts"); + }); + + it("skips environment keys when a non-environment key is also present", () => { + // `default` is a standard condition and must win over react-native. + const val = { + "react-native": "./rn.d.ts", + default: "./default.d.ts", + }; + expect(resolveTypesField(val)).toBe("./default.d.ts"); + }); +}); + +// --------------------------------------------------------------------------- +// resolveSourceField +// --------------------------------------------------------------------------- + +describe("resolveSourceField", () => { + it("returns a .ts string directly", () => { + expect(resolveSourceField("./src/index.ts")).toBe("./src/index.ts"); + }); + + it("returns a .tsx string directly", () => { + expect(resolveSourceField("./src/Button.tsx")).toBe("./src/Button.tsx"); + }); + + it("returns undefined for a non-source string", () => { + // .d.ts is a *built* type file — resolveSourceField must not pick it up, + // otherwise the previous-types fallback stops working. + expect(resolveSourceField("./dist/index.d.ts")).toBeUndefined(); + expect(resolveSourceField("./dist/index.js")).toBeUndefined(); + expect(resolveSourceField("")).toBeUndefined(); + }); + + it("picks the first .ts from an array", () => { + expect(resolveSourceField(["./a.js", "./b.ts", "./c.tsx"])).toBe("./b.ts"); + }); + + it("prefers the 'source' key in a conditional-exports object", () => { + // Real-world case: exports['.']['.'] = { source, types, import, require } + const val = { + source: "./src/index.ts", + types: "./dist/types/index.d.ts", + import: "./dist/index.mjs", + require: "./dist/index.cjs", + }; + expect(resolveSourceField(val)).toBe("./src/index.ts"); + }); + + it("does not return types field as a source entry", () => { + // The exports subtree has both source and types; we must never pick types.d.ts + // as a "source" file (otherwise we'd silently read the same previous-build + // d.ts we're trying to avoid). + const val = { types: "./dist/index.d.ts" }; + expect(resolveSourceField(val)).toBeUndefined(); + }); + + it("returns undefined for non-object / non-string / non-array input", () => { + expect(resolveSourceField(null)).toBeUndefined(); + expect(resolveSourceField(undefined)).toBeUndefined(); + expect(resolveSourceField(42)).toBeUndefined(); + }); +}); diff --git a/rsp-api-checker/ts-extractor/utils.ts b/rsp-api-checker/ts-extractor/utils.ts new file mode 100644 index 00000000000..286e2477021 --- /dev/null +++ b/rsp-api-checker/ts-extractor/utils.ts @@ -0,0 +1,159 @@ +/** + * Pure utility functions shared by the extractor and its tests. + */ + +// --------------------------------------------------------------------------- +// Scope detection +// --------------------------------------------------------------------------- + +export const OUR_SCOPES = [ + "@react-spectrum/", + "@react-aria/", + "@react-stately/", + "@react-types/", + "@internationalized/", + "@adobe/react-spectrum", +]; + +export const OUR_PACKAGES = [ + "react-aria-components", + "react-aria", + "react-stately", +]; + +/** Returns true if the npm package name belongs to our monorepo. */ +export function isOurPackage(name: string): boolean { + if (OUR_PACKAGES.includes(name)) return true; + return OUR_SCOPES.some((scope) => name.startsWith(scope)); +} + +// --------------------------------------------------------------------------- +// Property filtering +// --------------------------------------------------------------------------- + +/** Returns true for property names that should always be omitted. */ +export function shouldSkipProperty(name: string): boolean { + if (name.startsWith("__@")) return true; + // Purely numeric names are TS-internal type IDs leaking through + if (/^\d+$/.test(name)) return true; + return false; +} + +// --------------------------------------------------------------------------- +// package.json `types` field resolution +// --------------------------------------------------------------------------- + +/** + * Keys we explicitly reject as candidates for type resolution when a better + * option exists. These are runtime-environment conditions that often point at + * non-type files (or wrong-environment types), so picking them would produce + * incorrect or inconsistent output. + */ +const NON_TYPES_ENVIRONMENT_KEYS = new Set([ + "react-native", + "node-addons", + "worker", + "browser", + "deno", + "bun", +]); + +/** + * Recursively resolve a `.d.ts` path from the `types` / `exports` field of a + * `package.json`, which can be a string, array, or conditional-exports object. + * + * Resolution order at each object level: + * 1. `types` (strict preference — per Node's conditional-exports spec the + * types condition should always win when present). + * 2. `import` (modern ESM bias). + * 3. `require` (CJS path). + * 4. `default`. + * 5. Any remaining key, but environment-specific runtime keys + * (`react-native`, `node-addons`, `worker`, `browser`, `deno`, `bun`) + * are only considered if nothing else matches — those branches often + * point at non-type JS or wrong-environment types. + */ +export function resolveTypesField(value: unknown): string | undefined { + if (typeof value === "string") { + return value.endsWith(".d.ts") || value.endsWith(".d.mts") || value.endsWith(".d.cts") + ? value + : undefined; + } + if (Array.isArray(value)) { + for (const item of value) { + const resolved = resolveTypesField(item); + if (resolved) return resolved; + } + return undefined; + } + if (value && typeof value === "object") { + const obj = value as Record; + + // 1. `types` always wins when present anywhere in the tree. + if (obj.types !== undefined) { + const resolved = resolveTypesField(obj.types); + if (resolved) return resolved; + } + // 2-4. ESM / CJS / default, in that order. + for (const key of ["import", "require", "default"]) { + if (obj[key] !== undefined) { + const resolved = resolveTypesField(obj[key]); + if (resolved) return resolved; + } + } + // 5a. Any remaining non-environment key (user-named or custom condition). + for (const [key, v] of Object.entries(obj)) { + if (key === "types" || key === "import" || key === "require" || key === "default") continue; + if (NON_TYPES_ENVIRONMENT_KEYS.has(key)) continue; + const resolved = resolveTypesField(v); + if (resolved) return resolved; + } + // 5b. Environment-specific keys as last resort — better to return a + // wrong-environment `.d.ts` than to silently report "no types found". + for (const [key, v] of Object.entries(obj)) { + if (!NON_TYPES_ENVIRONMENT_KEYS.has(key)) continue; + const resolved = resolveTypesField(v); + if (resolved) return resolved; + } + } + return undefined; +} + +/** + * Recursively resolve a `.ts` / `.tsx` source path from a `source` / + * `exports["."].source` field of a `package.json`. + * + * Used to detect when a package's source is newer than its generated + * `.d.ts` (an out-of-date build): we locate the source entry via this + * helper and compare mtimes. Published npm tarballs usually strip the + * `source` directory, so the returned path is only meaningful when it + * actually exists on disk (see callers). + */ +export function resolveSourceField(value: unknown): string | undefined { + if (typeof value === "string") { + // Exclude generated declaration files — those are handled by resolveTypesField. + if (value.endsWith(".d.ts") || value.endsWith(".d.mts") || value.endsWith(".d.cts")) { + return undefined; + } + return value.endsWith(".ts") || value.endsWith(".tsx") || value.endsWith(".mts") || value.endsWith(".cts") + ? value + : undefined; + } + if (Array.isArray(value)) { + for (const item of value) { + const resolved = resolveSourceField(item); + if (resolved) return resolved; + } + return undefined; + } + if (value && typeof value === "object") { + const obj = value as Record; + for (const key of ["source", "import", "default", "require"]) { + if (obj[key]) { + const resolved = resolveSourceField(obj[key]); + if (resolved) return resolved; + } + } + } + return undefined; +} diff --git a/rsp-api-checker/ts-extractor/vitest.config.ts b/rsp-api-checker/ts-extractor/vitest.config.ts new file mode 100644 index 00000000000..f624398e8de --- /dev/null +++ b/rsp-api-checker/ts-extractor/vitest.config.ts @@ -0,0 +1,7 @@ +import { defineConfig } from "vitest/config"; + +export default defineConfig({ + test: { + environment: "node", + }, +});