diff --git a/.changeset/fix-stale-consumer-registry.md b/.changeset/fix-stale-consumer-registry.md new file mode 100644 index 0000000000..3fec001cf9 --- /dev/null +++ b/.changeset/fix-stale-consumer-registry.md @@ -0,0 +1,5 @@ +--- +'@core/sync-service': patch +--- + +Fix race condition in `ConsumerRegistry.unregister_name/1` that left stale PIDs in the ETS table. Uses atomic `:ets.match_delete/2` to remove the entry only if it still belongs to the dying process, preventing accidental deletion of a replacement consumer's entry. diff --git a/.changeset/index-any-and-in-where-clauses.md b/.changeset/index-any-and-in-where-clauses.md deleted file mode 100644 index 12d5c2e658..0000000000 --- a/.changeset/index-any-and-in-where-clauses.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -'@core/sync-service': patch ---- - -Index `= ANY(array_field)` and `IN (const_list)` WHERE clause expressions for O(1) shape filtering. ANY clauses reuse the InclusionIndex (via single-element array containment), and IN clauses reuse the EqualityIndex (registering each value separately). At 1000 concurrent shapes, fan-out latency improves by 6x (ANY) and 15x (IN) compared to the previous linear scan. diff --git a/.changeset/lovely-icons-sit.md b/.changeset/lovely-icons-sit.md deleted file mode 100644 index 443e08e1b3..0000000000 --- a/.changeset/lovely-icons-sit.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -'@core/sync-service': patch ---- - -Remove redundant ShapeDb fetch from Consumer initialization diff --git a/.github/workflows/check-skills.yml b/.github/workflows/check-skills.yml new file mode 100644 index 0000000000..f875461fe9 --- /dev/null +++ b/.github/workflows/check-skills.yml @@ -0,0 +1,142 @@ +# check-skills.yml — Drop this into your library repo's .github/workflows/ +# +# Checks for stale intent skills after a release and opens a review PR +# if any skills need attention. The PR body includes a prompt you can +# paste into Claude Code, Cursor, or any coding agent to update them. +# +# Triggers: new release published, or manual workflow_dispatch. +# +# Template variables (replaced by `intent setup`): +# @electric-sql/client + +name: Check Skills + +on: + release: + types: [published] + workflow_dispatch: {} + +permissions: + contents: write + pull-requests: write + +jobs: + check: + name: Check for stale skills + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Setup Node + uses: actions/setup-node@v4 + with: + node-version: 20 + + - name: Install intent + run: npm install -g @tanstack/intent + + - name: Check staleness + id: stale + run: | + OUTPUT=$(npx @tanstack/intent stale --json 2>&1) || true + echo "$OUTPUT" + + # Check if any skills need review + NEEDS_REVIEW=$(echo "$OUTPUT" | node -e " + const input = require('fs').readFileSync('/dev/stdin','utf8'); + try { + const reports = JSON.parse(input); + const stale = reports.flatMap(r => + r.skills.filter(s => s.needsReview).map(s => ({ library: r.library, skill: s.name, reasons: s.reasons })) + ); + if (stale.length > 0) { + console.log(JSON.stringify(stale)); + } + } catch {} + ") + + if [ -z "$NEEDS_REVIEW" ]; then + echo "has_stale=false" >> "$GITHUB_OUTPUT" + else + echo "has_stale=true" >> "$GITHUB_OUTPUT" + # Escape for multiline GH output + EOF=$(dd if=/dev/urandom bs=15 count=1 status=none | base64) + echo "stale_json<<$EOF" >> "$GITHUB_OUTPUT" + echo "$NEEDS_REVIEW" >> "$GITHUB_OUTPUT" + echo "$EOF" >> "$GITHUB_OUTPUT" + fi + + - name: Build summary + if: steps.stale.outputs.has_stale == 'true' + id: summary + run: | + node -e " + const stale = JSON.parse(process.env.STALE_JSON); + const lines = stale.map(s => + '- **' + s.skill + '** (' + s.library + '): ' + s.reasons.join(', ') + ); + const summary = lines.join('\n'); + + const prompt = [ + 'Review and update the following stale intent skills for @electric-sql/client:', + '', + ...stale.map(s => '- ' + s.skill + ': ' + s.reasons.join(', ')), + '', + 'For each stale skill:', + '1. Read the current SKILL.md file', + '2. Check what changed in the library since the skill was last updated', + '3. Update the skill content to reflect current APIs and behavior', + '4. Run \`npx @tanstack/intent validate\` to verify the updated skill', + ].join('\n'); + + // Write outputs + const fs = require('fs'); + const eof = require('crypto').randomBytes(15).toString('base64'); + fs.appendFileSync(process.env.GITHUB_OUTPUT, + 'summary<<' + eof + '\n' + summary + '\n' + eof + '\n' + + 'prompt<<' + eof + '\n' + prompt + '\n' + eof + '\n' + ); + " + env: + STALE_JSON: ${{ steps.stale.outputs.stale_json }} + + - name: Open review PR + if: steps.stale.outputs.has_stale == 'true' + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + VERSION="${{ github.event.release.tag_name || 'manual' }}" + BRANCH="skills/review-${VERSION}" + + git config user.name "github-actions[bot]" + git config user.email "41898282+github-actions[bot]@users.noreply.github.com" + git checkout -b "$BRANCH" + git commit --allow-empty -m "chore: review stale skills for ${VERSION}" + git push origin "$BRANCH" + + gh pr create \ + --title "Review stale skills (${VERSION})" \ + --body "$(cat <<'PREOF' + ## Stale Skills Detected + + The following skills may need updates after the latest release: + + ${{ steps.summary.outputs.summary }} + + --- + + ### Update Prompt + + Paste this into your coding agent (Claude Code, Cursor, etc.): + + ~~~ + ${{ steps.summary.outputs.prompt }} + ~~~ + + PREOF + )" \ + --head "$BRANCH" \ + --base main diff --git a/.github/workflows/notify-intent.yml b/.github/workflows/notify-intent.yml new file mode 100644 index 0000000000..d783f7a922 --- /dev/null +++ b/.github/workflows/notify-intent.yml @@ -0,0 +1,53 @@ +# notify-intent.yml — Drop this into your library repo's .github/workflows/ +# +# Fires a repository_dispatch event to TanStack/intent whenever docs or +# source files change on merge to main. This triggers the skill staleness +# check workflow in the intent repo. +# +# Requirements: +# - A fine-grained PAT with contents:write on TanStack/intent stored +# as the INTENT_NOTIFY_TOKEN repository secret. +# +# Template variables (replaced by `intent setup`): +# @electric-sql/client +# website/docs/** +# packages/typescript-client/src/** + +name: Notify Intent + +on: + push: + branches: [main] + paths: + - 'website/docs/**' + - 'packages/typescript-client/src/**' + - 'packages/y-electric/src/**' + +jobs: + notify: + name: Notify TanStack Intent + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 2 + + - name: Collect changed files + id: changes + run: | + FILES=$(git diff --name-only HEAD~1 HEAD | jq -R -s -c 'split("\n") | map(select(length > 0))') + echo "files=$FILES" >> "$GITHUB_OUTPUT" + + - name: Dispatch to intent repo + uses: peter-evans/repository-dispatch@v3 + with: + token: ${{ secrets.INTENT_NOTIFY_TOKEN }} + repository: TanStack/intent + event-type: skill-check + client-payload: | + { + "package": "@electric-sql/client", + "sha": "${{ github.sha }}", + "changed_files": ${{ steps.changes.outputs.files }} + } diff --git a/.github/workflows/validate-skills.yml b/.github/workflows/validate-skills.yml new file mode 100644 index 0000000000..8f39716aa9 --- /dev/null +++ b/.github/workflows/validate-skills.yml @@ -0,0 +1,52 @@ +# validate-skills.yml — Drop this into your library repo's .github/workflows/ +# +# Validates skill files on PRs that touch the skills/ directory. +# Ensures frontmatter is correct, names match paths, and files stay under +# the 500-line limit. + +name: Validate Skills + +on: + pull_request: + paths: + - 'skills/**' + - '**/skills/**' + +jobs: + validate: + name: Validate skill files + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup Node + uses: actions/setup-node@v4 + with: + node-version: 20 + + - name: Install intent CLI + run: npm install -g @tanstack/intent + + - name: Find and validate skills + run: | + # Find all directories containing SKILL.md files + SKILLS_DIR="" + if [ -d "skills" ]; then + SKILLS_DIR="skills" + elif [ -d "packages" ]; then + # Monorepo — find skills/ under packages + for dir in packages/*/skills; do + if [ -d "$dir" ]; then + echo "Validating $dir..." + intent validate "$dir" + fi + done + exit 0 + fi + + if [ -n "$SKILLS_DIR" ]; then + intent validate "$SKILLS_DIR" + else + echo "No skills/ directory found — skipping validation." + fi diff --git a/.gitignore b/.gitignore index 071cc0e66a..4e1eaf6a3a 100644 --- a/.gitignore +++ b/.gitignore @@ -28,4 +28,5 @@ sst-*.log response.tmp .claude !website/.claude/commands -!website/.claude/skills \ No newline at end of file +!website/.claude/skills +_artifacts \ No newline at end of file diff --git a/examples/tanstack-db-expo-starter/CHANGELOG.md b/examples/tanstack-db-expo-starter/CHANGELOG.md index f2dc29bce7..77dc320a00 100644 --- a/examples/tanstack-db-expo-starter/CHANGELOG.md +++ b/examples/tanstack-db-expo-starter/CHANGELOG.md @@ -1,5 +1,12 @@ # expo-db-electric-starter +## 1.0.13 + +### Patch Changes + +- Updated dependencies [d1e08b8] + - @electric-sql/client@1.5.12 + ## 1.0.12 ### Patch Changes diff --git a/examples/tanstack-db-expo-starter/package.json b/examples/tanstack-db-expo-starter/package.json index a41b2a7ac5..f7e3aafc28 100644 --- a/examples/tanstack-db-expo-starter/package.json +++ b/examples/tanstack-db-expo-starter/package.json @@ -1,6 +1,6 @@ { "name": "expo-db-electric-starter", - "version": "1.0.12", + "version": "1.0.13", "main": "index.ts", "scripts": { "start": "docker compose up -d && expo start", @@ -13,7 +13,7 @@ "api": "tsx api/index.ts" }, "dependencies": { - "@electric-sql/client": "1.5.11", + "@electric-sql/client": "1.5.12", "@expo/metro-runtime": "~5.0.4", "@tanstack/electric-db-collection": "^0.0.15", "@tanstack/react-db": "^0.0.27", diff --git a/packages/experimental/CHANGELOG.md b/packages/experimental/CHANGELOG.md index 1fe61b0e6b..70cef956c0 100644 --- a/packages/experimental/CHANGELOG.md +++ b/packages/experimental/CHANGELOG.md @@ -1,5 +1,12 @@ # @electric-sql/experimental +## 6.0.12 + +### Patch Changes + +- Updated dependencies [d1e08b8] + - @electric-sql/client@1.5.12 + ## 6.0.11 ### Patch Changes diff --git a/packages/experimental/package.json b/packages/experimental/package.json index 0ed2f277ad..86bfd299b2 100644 --- a/packages/experimental/package.json +++ b/packages/experimental/package.json @@ -1,7 +1,7 @@ { "name": "@electric-sql/experimental", "description": "Experimental TypeScript features for ElectricSQL.", - "version": "6.0.11", + "version": "6.0.12", "author": "ElectricSQL team and contributors.", "bugs": { "url": "https://github.com/electric-sql/electric/issues" diff --git a/packages/react-hooks/CHANGELOG.md b/packages/react-hooks/CHANGELOG.md index ba79e7ca84..34b5413647 100644 --- a/packages/react-hooks/CHANGELOG.md +++ b/packages/react-hooks/CHANGELOG.md @@ -1,5 +1,12 @@ # @electric-sql/react +## 1.0.41 + +### Patch Changes + +- Updated dependencies [d1e08b8] + - @electric-sql/client@1.5.12 + ## 1.0.40 ### Patch Changes diff --git a/packages/react-hooks/package.json b/packages/react-hooks/package.json index 44054e1b88..3704a19299 100644 --- a/packages/react-hooks/package.json +++ b/packages/react-hooks/package.json @@ -1,7 +1,7 @@ { "name": "@electric-sql/react", "description": "React hooks for ElectricSQL", - "version": "1.0.40", + "version": "1.0.41", "author": "ElectricSQL team and contributors.", "bugs": { "url": "https://github.com/electric-sql/electric/issues" diff --git a/packages/sync-service/CHANGELOG.md b/packages/sync-service/CHANGELOG.md index 7882749a35..67052dcc5e 100644 --- a/packages/sync-service/CHANGELOG.md +++ b/packages/sync-service/CHANGELOG.md @@ -1,5 +1,11 @@ # @core/sync-service +## 1.4.13 + +### Patch Changes + +- 8daa822: Index `= ANY(array_field)` and `IN (const_list)` WHERE clause expressions for O(1) shape filtering. ANY clauses reuse the InclusionIndex (via single-element array containment), and IN clauses reuse the EqualityIndex (registering each value separately). At 1000 concurrent shapes, fan-out latency improves by 6x (ANY) and 15x (IN) compared to the previous linear scan. + ## 1.4.12 ### Patch Changes diff --git a/packages/sync-service/CLAUDE.md b/packages/sync-service/CLAUDE.md new file mode 120000 index 0000000000..47dc3e3d86 --- /dev/null +++ b/packages/sync-service/CLAUDE.md @@ -0,0 +1 @@ +AGENTS.md \ No newline at end of file diff --git a/packages/sync-service/lib/electric/shape_cache.ex b/packages/sync-service/lib/electric/shape_cache.ex index 2e676a4c05..e68738d186 100644 --- a/packages/sync-service/lib/electric/shape_cache.ex +++ b/packages/sync-service/lib/electric/shape_cache.ex @@ -364,7 +364,6 @@ defmodule Electric.ShapeCache do start_opts = opts |> Map.put(:shape_handle, shape_handle) - |> Map.put(:shape, shape) |> Map.put(:subqueries_enabled_for_stack?, "allow_subqueries" in feature_flags) case Shapes.DynamicConsumerSupervisor.start_shape_consumer(stack_id, start_opts) do diff --git a/packages/sync-service/lib/electric/shape_cache/shape_status.ex b/packages/sync-service/lib/electric/shape_cache/shape_status.ex index a57a7182d9..8496badfb7 100644 --- a/packages/sync-service/lib/electric/shape_cache/shape_status.ex +++ b/packages/sync-service/lib/electric/shape_cache/shape_status.ex @@ -186,15 +186,6 @@ defmodule Electric.ShapeCache.ShapeStatus do end) end - @spec fetch_shape_by_handle!(stack_id(), shape_handle()) :: Shape.t() | no_return() - def fetch_shape_by_handle!(stack_id, shape_handle) - when is_stack_id(stack_id) and is_shape_handle(shape_handle) do - case fetch_shape_by_handle(stack_id, shape_handle) do - {:ok, shape} -> shape - :error -> raise ArgumentError, message: "No shape found for handle #{inspect(shape_handle)}" - end - end - def has_shape_handle?(stack_id, shape_handle) do :ets.member(shape_meta_table(stack_id), shape_handle) end diff --git a/packages/sync-service/lib/electric/shapes/consumer.ex b/packages/sync-service/lib/electric/shapes/consumer.ex index 8e49d472a6..3a81a4848d 100644 --- a/packages/sync-service/lib/electric/shapes/consumer.ex +++ b/packages/sync-service/lib/electric/shapes/consumer.ex @@ -121,10 +121,7 @@ defmodule Electric.Shapes.Consumer do shape_handle: shape_handle } = state - shape = - Map.get_lazy(config, :shape, fn -> - ShapeCache.ShapeStatus.fetch_shape_by_handle!(stack_id, shape_handle) - end) + {:ok, shape} = ShapeCache.ShapeStatus.fetch_shape_by_handle(stack_id, shape_handle) state = State.initialize_shape(state, shape, config) diff --git a/packages/sync-service/lib/electric/shapes/consumer_registry.ex b/packages/sync-service/lib/electric/shapes/consumer_registry.ex index fad9670ee3..ee5511f5ed 100644 --- a/packages/sync-service/lib/electric/shapes/consumer_registry.ex +++ b/packages/sync-service/lib/electric/shapes/consumer_registry.ex @@ -28,8 +28,12 @@ defmodule Electric.Shapes.ConsumerRegistry do if register_consumer!(pid, shape_handle, ets_name(stack_id)), do: :yes, else: :no end - # don't unregister when the pid exits -- we have mechanisms to ensure that happens cleanly - def unregister_name({_stack_id, _shape_handle}) do + # Atomically remove the ETS entry only if it still belongs to the calling + # process (the dying consumer). If a replacement consumer has already + # registered under the same shape_handle, match_delete is a no-op because + # the pid won't match. + def unregister_name({stack_id, shape_handle}) do + :ets.match_delete(ets_name(stack_id), {shape_handle, self()}) :ok end diff --git a/packages/sync-service/package.json b/packages/sync-service/package.json index 43f6cdab36..e9dcea8f99 100644 --- a/packages/sync-service/package.json +++ b/packages/sync-service/package.json @@ -1,7 +1,7 @@ { "name": "@core/sync-service", "private": true, - "version": "1.4.12", + "version": "1.4.13", "scripts": { "publish:hex": "../../scripts/publish_hex.sh electric", "changeset": "pushd ../..; pnpm changeset; popd" diff --git a/packages/sync-service/test/electric/shapes/consumer_registry_test.exs b/packages/sync-service/test/electric/shapes/consumer_registry_test.exs index b431f0cb8f..cc17327be5 100644 --- a/packages/sync-service/test/electric/shapes/consumer_registry_test.exs +++ b/packages/sync-service/test/electric/shapes/consumer_registry_test.exs @@ -255,6 +255,75 @@ defmodule Electric.Shapes.ConsumerRegistryTest do end end + describe "unregister_name/1" do + test "removes entry when pid matches the calling process", ctx do + handle = "handle-1" + table = ctx.registry_state.table + + # Register the current process + :ets.insert(table, {handle, self()}) + assert [{^handle, pid}] = :ets.lookup(table, handle) + assert pid == self() + + ConsumerRegistry.unregister_name({ctx.stack_id, handle}) + + assert :ets.lookup(table, handle) == [] + end + + test "does not remove entry belonging to a different process", ctx do + handle = "handle-1" + table = ctx.registry_state.table + + other_pid = spawn(fn -> Process.sleep(:infinity) end) + :ets.insert(table, {handle, other_pid}) + + ConsumerRegistry.unregister_name({ctx.stack_id, handle}) + + assert [{^handle, ^other_pid}] = :ets.lookup(table, handle) + end + + test "does not remove a replacement consumer's entry (race scenario)", ctx do + handle = "handle-1" + table = ctx.registry_state.table + parent = self() + + # Simulate: old consumer (a spawned process) calls unregister_name, + # but a replacement has already registered under the same handle. + replacement_pid = spawn(fn -> Process.sleep(:infinity) end) + + # First, register the old process + old_task = + Task.async(fn -> + :ets.insert(table, {handle, self()}) + send(parent, :old_registered) + + # Wait for the replacement to overwrite + receive do + :proceed_to_unregister -> :ok + end + + # Now unregister_name should NOT delete the replacement's entry + ConsumerRegistry.unregister_name({ctx.stack_id, handle}) + send(parent, :old_unregistered) + end) + + assert_receive :old_registered + + # Simulate the replacement consumer registering (delete old + insert new) + :ets.delete(table, handle) + :ets.insert(table, {handle, replacement_pid}) + + # Let the old process proceed with unregister + send(old_task.pid, :proceed_to_unregister) + assert_receive :old_unregistered + + # The replacement's entry must still be there + assert [{^handle, ^replacement_pid}] = :ets.lookup(table, handle) + + Task.await(old_task) + end + end + describe "broadcast/1" do test "sends message to all subscribers" do pid = self() diff --git a/packages/typescript-client/CHANGELOG.md b/packages/typescript-client/CHANGELOG.md index 961987dd8d..77d36dc988 100644 --- a/packages/typescript-client/CHANGELOG.md +++ b/packages/typescript-client/CHANGELOG.md @@ -1,5 +1,11 @@ # @electric-sql/client +## 1.5.12 + +### Patch Changes + +- d1e08b8: Add TanStack Intent skills for AI agent guidance. Ships 9 skills covering shapes, proxy auth, schema design, debugging, deployment, new feature setup, ORM integration, Postgres security, and Yjs collaboration. + ## 1.5.11 ### Patch Changes diff --git a/packages/typescript-client/bin/intent.mjs b/packages/typescript-client/bin/intent.mjs new file mode 100644 index 0000000000..399d8a3176 --- /dev/null +++ b/packages/typescript-client/bin/intent.mjs @@ -0,0 +1,6 @@ +#!/usr/bin/env node +// Auto-generated by @tanstack/intent setup +// Exposes the intent end-user CLI for consumers of this library. +// Commit this file, then add to your package.json: +// "bin": { "intent": "./bin/intent.mjs" } +await import(`@tanstack/intent/intent-library`) diff --git a/packages/typescript-client/package.json b/packages/typescript-client/package.json index 4888cce530..da157e6fbe 100644 --- a/packages/typescript-client/package.json +++ b/packages/typescript-client/package.json @@ -1,7 +1,7 @@ { "name": "@electric-sql/client", "description": "Postgres everywhere - your data, in sync, wherever you need it.", - "version": "1.5.11", + "version": "1.5.12", "author": "ElectricSQL team and contributors.", "bugs": { "url": "https://github.com/electric-sql/electric/issues" @@ -10,6 +10,7 @@ "@microsoft/fetch-event-source": "^2.0.1" }, "devDependencies": { + "@tanstack/intent": "^0.0.9", "@types/pg": "^8.11.6", "@types/uuid": "^10.0.0", "@typescript-eslint/eslint-plugin": "^7.14.1", @@ -45,9 +46,15 @@ } } }, + "bin": { + "intent": "./bin/intent.mjs" + }, "files": [ "dist", - "src" + "src", + "skills", + "bin", + "!skills/_artifacts" ], "homepage": "https://electric-sql.com", "license": "Apache-2.0", diff --git a/packages/typescript-client/skills/electric-debugging/SKILL.md b/packages/typescript-client/skills/electric-debugging/SKILL.md new file mode 100644 index 0000000000..0e34793ac8 --- /dev/null +++ b/packages/typescript-client/skills/electric-debugging/SKILL.md @@ -0,0 +1,217 @@ +--- +name: electric-debugging +description: > + Troubleshoot Electric sync issues. Covers fast-loop detection from CDN/proxy + cache key misconfiguration, stale cache diagnosis (StaleCacheError), + MissingHeadersError from CORS misconfiguration, 409 shape expired handling, + SSE proxy buffering (nginx proxy_buffering off, Caddy flush_interval -1), + HTTP/1.1 6-connection limit in local dev (Caddy HTTP/2 proxy), WAL growth + from replication slots (max_slot_wal_keep_size), Vercel CDN cache issues, + and onError/backoff behavior. Load when shapes are not receiving updates, + sync is slow, or errors appear in the console. +type: lifecycle +library: electric +library_version: '1.5.10' +requires: + - electric-shapes + - electric-proxy-auth +sources: + - 'electric-sql/electric:packages/typescript-client/src/client.ts' + - 'electric-sql/electric:packages/typescript-client/src/fetch.ts' + - 'electric-sql/electric:packages/typescript-client/src/error.ts' + - 'electric-sql/electric:website/docs/guides/troubleshooting.md' +--- + +This skill builds on electric-shapes and electric-proxy-auth. Read those first. + +# Electric — Debugging Sync Issues + +## Setup + +Enable debug logging to see retry and state machine behavior: + +```ts +import { ShapeStream, FetchError } from '@electric-sql/client' + +const stream = new ShapeStream({ + url: '/api/todos', + backoffOptions: { + initialDelay: 1000, + maxDelay: 32000, + multiplier: 2, + debug: true, // Logs retry attempts + }, + onError: (error) => { + if (error instanceof FetchError) { + console.error(`Sync error: ${error.status} at ${error.url}`, error.json) + } + return {} // Always return {} to retry + }, +}) +``` + +## Core Patterns + +### Error retry behavior + +| Error | Auto-retry? | Action | +| --------------------- | -------------------------- | ------------------------------------------------------------- | +| 5xx server errors | Yes (exponential backoff) | Wait and retry | +| 429 rate limit | Yes (respects Retry-After) | Wait and retry | +| Network errors | Yes (exponential backoff) | Wait and retry | +| 4xx (non-429) | No | Calls `onError` — return `{}` to retry manually | +| 409 shape expired | Yes (automatic reset) | Client resets and refetches | +| `MissingHeadersError` | Never | Fix CORS/proxy — not retryable even if `onError` returns `{}` | + +### Diagnosing MissingHeadersError + +This error means Electric response headers (`electric-offset`, `electric-handle`) are being stripped, usually by CORS: + +``` +MissingHeadersError: This is often due to a proxy not setting CORS correctly +so that all Electric headers can be read by the client. +``` + +Fix: expose Electric headers in proxy CORS configuration: + +```ts +headers.set( + 'Access-Control-Expose-Headers', + 'electric-offset, electric-handle, electric-schema, electric-cursor' +) +``` + +### Diagnosing fast-loop detection + +Console message: "Detected possible fast loop" with diagnostic info. + +Cause: proxy/CDN cache key doesn't include `handle` and `offset` query params, so the client gets the same stale response repeatedly. + +Fix: ensure your proxy/CDN includes all query parameters in its cache key. + +For Vercel, add to `vercel.json`: + +```json +{ + "headers": [ + { + "source": "/api/(.*)", + "headers": [ + { "key": "CDN-Cache-Control", "value": "no-store" }, + { "key": "Vercel-CDN-Cache-Control", "value": "no-store" } + ] + } + ] +} +``` + +## Common Mistakes + +### HIGH Proxy or CDN not including query params in cache key + +Wrong: + +```nginx +# nginx caching without query params in key +proxy_cache_key $scheme$host$uri; +``` + +Correct: + +```nginx +# Include query params (handle, offset) in cache key +proxy_cache_key $scheme$host$request_uri; +``` + +Fast-loop detection fires after 5 requests in 500ms at the same offset. The client auto-clears caches once, then applies backoff, then throws after 5 consecutive detections. + +Source: `packages/typescript-client/src/client.ts:929-1002` + +### HIGH SSE responses buffered by proxy + +Wrong: + +```nginx +location /v1/shape { + proxy_pass http://electric:3000; + # Default: proxy_buffering on — SSE responses delayed +} +``` + +Correct: + +```nginx +location /v1/shape { + proxy_pass http://electric:3000; + proxy_buffering off; +} +``` + +For Caddy: + +``` +reverse_proxy localhost:3000 { + flush_interval -1 +} +``` + +Nginx and Caddy buffer responses by default, causing long delays for SSE live updates. Disable buffering for Electric endpoints. Do NOT disable caching entirely — Electric uses cache headers for request collapsing. + +Source: `website/docs/guides/troubleshooting.md:69-109` + +### MEDIUM Running 6+ shapes in local dev without HTTP/2 + +Wrong: + +```sh +# Running Electric directly on localhost:3000 +# With 7+ shapes, browser HTTP/1.1 queues all requests (6 connection limit) +``` + +Correct: + +```sh +# Run Caddy as HTTP/2 proxy on host (not in Docker — Docker prevents HTTP/2) +caddy run --config - --adapter caddyfile < + Deploy Electric via Docker, Docker Compose, or Electric Cloud. Covers + DATABASE_URL (direct connection, not pooler), ELECTRIC_SECRET (required + since v1.x), ELECTRIC_INSECURE for dev, wal_level=logical, + max_replication_slots, ELECTRIC_STORAGE_DIR persistence, + ELECTRIC_POOLED_DATABASE_URL for pooled queries, IPv6 with + ELECTRIC_DATABASE_USE_IPV6, Kubernetes readiness probes (200 vs 202), + replication slot cleanup, and Postgres v14+ requirements. Load when + deploying Electric or configuring Postgres for logical replication. +type: lifecycle +library: electric +library_version: '1.5.10' +sources: + - 'electric-sql/electric:website/docs/guides/deployment.md' + - 'electric-sql/electric:packages/sync-service/dev/postgres.conf' + - 'electric-sql/electric:packages/sync-service/CHANGELOG.md' +--- + +# Electric — Deployment + +## Setup + +### Postgres configuration + +```conf +# postgresql.conf +wal_level = logical +max_replication_slots = 10 +``` + +### Docker Compose + +```yaml +name: 'electric-backend' +services: + postgres: + image: postgres:16-alpine + environment: + POSTGRES_DB: electric + POSTGRES_USER: postgres + POSTGRES_PASSWORD: password + ports: ['54321:5432'] + volumes: ['./postgres.conf:/etc/postgresql/postgresql.conf:ro'] + tmpfs: ['/var/lib/postgresql/data', '/tmp'] + command: ['postgres', '-c', 'config_file=/etc/postgresql/postgresql.conf'] + + electric: + image: electricsql/electric:latest + environment: + DATABASE_URL: postgresql://postgres:password@postgres:5432/electric?sslmode=disable + ELECTRIC_SECRET: ${ELECTRIC_SECRET} + ports: ['3000:3000'] + volumes: ['electric_data:/var/lib/electric'] + depends_on: ['postgres'] + +volumes: + electric_data: +``` + +### Electric Cloud + +```sh +npx @electric-sql/start my-app +pnpm claim && pnpm deploy +``` + +## Core Patterns + +### Environment variables + +| Variable | Required | Description | +| ------------------------------ | ---------- | --------------------------------------------- | +| `DATABASE_URL` | Yes | Direct Postgres connection (not pooler) | +| `ELECTRIC_SECRET` | Yes (prod) | API authentication secret | +| `ELECTRIC_INSECURE` | Dev only | Set `true` to skip secret requirement | +| `ELECTRIC_STORAGE_DIR` | No | Persistent shape cache directory | +| `ELECTRIC_POOLED_DATABASE_URL` | No | Pooled connection for non-replication queries | +| `ELECTRIC_DATABASE_USE_IPV6` | No | Set `true` for IPv6 Postgres connections | + +### Kubernetes health checks + +```yaml +livenessProbe: + httpGet: + path: /v1/health + port: 3000 +readinessProbe: + exec: + command: ['curl', '-sf', 'http://localhost:3000/v1/health'] + # Use exec, not httpGet — 202 means "alive but not ready" + # Only 200 means fully ready for traffic +``` + +### Replication slot cleanup + +```sql +-- When stopping Electric for extended periods: +SELECT pg_drop_replication_slot('electric_slot_default'); + +-- Prevent unbounded WAL growth: +ALTER SYSTEM SET max_slot_wal_keep_size = '10GB'; +SELECT pg_reload_conf(); +``` + +## Common Mistakes + +### CRITICAL Not setting wal_level to logical + +Wrong: + +```conf +# postgresql.conf (default) +wal_level = replica +``` + +Correct: + +```conf +wal_level = logical +max_replication_slots = 10 +``` + +Electric requires logical replication. The default `wal_level = replica` does not support it. Requires Postgres restart after change. + +Source: `packages/sync-service/dev/postgres.conf` + +### CRITICAL Running without ELECTRIC_SECRET in production + +Wrong: + +```sh +docker run electricsql/electric \ + -e DATABASE_URL=postgres://user:pass@host/db +``` + +Correct: + +```sh +docker run electricsql/electric \ + -e DATABASE_URL=postgres://user:pass@host/db \ + -e ELECTRIC_SECRET=my-secret-key +``` + +Since v1.x, `ELECTRIC_SECRET` is required. Without it, Electric refuses to start unless `ELECTRIC_INSECURE=true` is set (dev only). + +Source: `packages/sync-service/CHANGELOG.md:832-834` + +### MEDIUM Using ephemeral storage for ELECTRIC_STORAGE_DIR + +Wrong: + +```yaml +electric: + image: electricsql/electric:latest + # No volume — shape cache lost on restart +``` + +Correct: + +```yaml +electric: + image: electricsql/electric:latest + volumes: ['electric_data:/var/lib/electric'] +``` + +Electric caches shape logs on disk. Ephemeral storage causes full re-sync on every container restart. + +Source: `website/docs/guides/deployment.md:133-157` + +### MEDIUM Using deprecated ELECTRIC_QUERY_DATABASE_URL + +Wrong: + +```sh +ELECTRIC_QUERY_DATABASE_URL=postgres://user:pass@pooler:6432/db +``` + +Correct: + +```sh +ELECTRIC_POOLED_DATABASE_URL=postgres://user:pass@pooler:6432/db +``` + +Renamed from `ELECTRIC_QUERY_DATABASE_URL` to `ELECTRIC_POOLED_DATABASE_URL` in v1.3.x. The old name may stop working in future versions. + +Source: `packages/sync-service/CHANGELOG.md:415` + +See also: electric-proxy-auth/SKILL.md — Production requires proxy with ELECTRIC_SECRET. +See also: electric-postgres-security/SKILL.md — Deployment requires correct Postgres configuration. +See also: electric-debugging/SKILL.md — Many sync issues stem from deployment configuration. + +## Version + +Targets Electric sync service v1.x. diff --git a/packages/typescript-client/skills/electric-new-feature/SKILL.md b/packages/typescript-client/skills/electric-new-feature/SKILL.md new file mode 100644 index 0000000000..59913b5e42 --- /dev/null +++ b/packages/typescript-client/skills/electric-new-feature/SKILL.md @@ -0,0 +1,366 @@ +--- +name: electric-new-feature +description: > + End-to-end guide for adding a new synced feature with Electric and TanStack + DB. Covers the full journey: design Postgres schema, set REPLICA IDENTITY + FULL, define shape, create proxy route, set up TanStack DB collection with + electricCollectionOptions, implement optimistic mutations with txid + handshake (pg_current_xact_id, awaitTxId), and build live queries with + useLiveQuery. Also covers migration from old ElectricSQL (electrify/db + pattern does not exist), current API patterns (table as query param not + path, handle not shape_id). Load when building a new feature from scratch. +type: lifecycle +library: electric +library_version: '1.5.10' +requires: + - electric-shapes + - electric-proxy-auth + - electric-schema-shapes +sources: + - 'electric-sql/electric:AGENTS.md' + - 'electric-sql/electric:examples/tanstack-db-web-starter/' +--- + +This skill builds on electric-shapes, electric-proxy-auth, and electric-schema-shapes. Read those first. + +# Electric — New Feature End-to-End + +## Setup + +### 0. Start Electric locally + +```yaml +# docker-compose.yml +services: + postgres: + image: postgres:17-alpine + environment: + POSTGRES_DB: electric + POSTGRES_USER: postgres + POSTGRES_PASSWORD: password + ports: + - '54321:5432' + tmpfs: + - /tmp + command: + - -c + - listen_addresses=* + - -c + - wal_level=logical + + electric: + image: electricsql/electric:latest + environment: + DATABASE_URL: postgresql://postgres:password@postgres:5432/electric?sslmode=disable + ELECTRIC_INSECURE: true # Dev only — use ELECTRIC_SECRET in production + ports: + - '3000:3000' + depends_on: + - postgres +``` + +```bash +docker compose up -d +``` + +### 1. Create Postgres table + +```sql +CREATE TABLE todos ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id UUID NOT NULL, + text TEXT NOT NULL, + completed BOOLEAN DEFAULT false, + created_at TIMESTAMPTZ DEFAULT now() +); + +ALTER TABLE todos REPLICA IDENTITY FULL; +``` + +### 2. Create proxy route + +The proxy forwards Electric protocol params and injects server-side secrets. Use your framework's server route pattern (TanStack Start, Next.js API route, Express, etc.). + +```ts +// Example: TanStack Start — src/routes/api/todos.ts +import { createFileRoute } from '@tanstack/react-router' +import { ELECTRIC_PROTOCOL_QUERY_PARAMS } from '@electric-sql/client' + +const serve = async ({ request }: { request: Request }) => { + const url = new URL(request.url) + const electricUrl = process.env.ELECTRIC_URL || 'http://localhost:3000' + const origin = new URL(`${electricUrl}/v1/shape`) + + url.searchParams.forEach((v, k) => { + if (ELECTRIC_PROTOCOL_QUERY_PARAMS.includes(k)) + origin.searchParams.set(k, v) + }) + + origin.searchParams.set('table', 'todos') + + // Add auth if using Electric Cloud + if (process.env.ELECTRIC_SOURCE_ID && process.env.ELECTRIC_SECRET) { + origin.searchParams.set('source_id', process.env.ELECTRIC_SOURCE_ID) + origin.searchParams.set('secret', process.env.ELECTRIC_SECRET) + } + + const res = await fetch(origin) + const headers = new Headers(res.headers) + headers.delete('content-encoding') + headers.delete('content-length') + return new Response(res.body, { + status: res.status, + statusText: res.statusText, + headers, + }) +} + +export const Route = createFileRoute('/api/todos')({ + server: { + handlers: { + GET: serve, + }, + }, +}) +``` + +### 3. Define schema + +```ts +// db/schema.ts — Zod schema matching your Postgres table +import { z } from 'zod' + +export const todoSchema = z.object({ + id: z.string().uuid(), + user_id: z.string().uuid(), + text: z.string(), + completed: z.boolean(), + created_at: z.date(), +}) + +export type Todo = z.infer +``` + +If using Drizzle, generate schemas from your table definitions with `createSelectSchema(todosTable)` from `drizzle-zod`. + +### 4. Create mutation endpoint + +Implement your write endpoint using your framework's server function or API route. The endpoint must return `{ txid }` from the same transaction as the mutation. + +```ts +// Example: server function that inserts and returns txid +async function createTodo(todo: { text: string; user_id: string }) { + const client = await pool.connect() + try { + await client.query('BEGIN') + const result = await client.query( + 'INSERT INTO todos (text, user_id) VALUES ($1, $2) RETURNING id', + [todo.text, todo.user_id] + ) + const txResult = await client.query( + 'SELECT pg_current_xact_id()::xid::text AS txid' + ) + await client.query('COMMIT') + return { id: result.rows[0].id, txid: Number(txResult.rows[0].txid) } + } finally { + client.release() + } +} +``` + +### 5. Create TanStack DB collection + +```ts +import { createCollection } from '@tanstack/react-db' +import { electricCollectionOptions } from '@tanstack/electric-db-collection' +import { todoSchema } from './db/schema' + +export const todoCollection = createCollection( + electricCollectionOptions({ + id: 'todos', + schema: todoSchema, + getKey: (row) => row.id, + shapeOptions: { + url: new URL( + '/api/todos', + typeof window !== 'undefined' + ? window.location.origin + : 'http://localhost:5173' + ).toString(), + // Electric auto-parses: bool, int2, int4, float4, float8, json, jsonb + // You only need custom parsers for types like timestamptz, date, numeric + // See electric-shapes/references/type-parsers.md for the full list + parser: { + timestamptz: (date: string) => new Date(date), + }, + }, + onInsert: async ({ transaction }) => { + const { modified: newTodo } = transaction.mutations[0] + const result = await createTodo({ + text: newTodo.text, + user_id: newTodo.user_id, + }) + return { txid: result.txid } + }, + onUpdate: async ({ transaction }) => { + const { modified: updated } = transaction.mutations[0] + const result = await updateTodo(updated.id, { + text: updated.text, + completed: updated.completed, + }) + return { txid: result.txid } + }, + onDelete: async ({ transaction }) => { + const { original: deleted } = transaction.mutations[0] + const result = await deleteTodo(deleted.id) + return { txid: result.txid } + }, + }) +) +``` + +### 6. Build live queries + +```tsx +import { useLiveQuery, eq } from '@tanstack/react-db' + +export function TodoList() { + const { data: todos } = useLiveQuery((q) => + q + .from({ todo: todoCollection }) + .where(({ todo }) => eq(todo.completed, false)) + .orderBy(({ todo }) => todo.created_at, 'desc') + .limit(50) + ) + + return ( + + ) +} +``` + +### 7. Optimistic mutations + +```tsx +const handleAdd = () => { + todoCollection.insert({ + id: crypto.randomUUID(), + text: 'New todo', + completed: false, + created_at: new Date(), + }) +} + +const handleToggle = (todo) => { + todoCollection.update(todo.id, (draft) => { + draft.completed = !draft.completed + }) +} + +const handleDelete = (todoId) => todoCollection.delete(todoId) +``` + +## Common Mistakes + +### HIGH Removing parsers because the TanStack DB schema handles types + +Wrong: + +```ts +// "My Zod schema has z.coerce.date() so I don't need a parser" +electricCollectionOptions({ + schema: z.object({ created_at: z.coerce.date() }), + shapeOptions: { url: '/api/todos' }, // No parser! +}) +``` + +Correct: + +```ts +electricCollectionOptions({ + schema: z.object({ created_at: z.coerce.date() }), + shapeOptions: { + url: '/api/todos', + parser: { timestamptz: (date: string) => new Date(date) }, + }, +}) +``` + +Electric's sync path delivers data directly into the collection store, bypassing the TanStack DB schema. The `parser` in `shapeOptions` handles type coercion on the sync path; the schema handles the mutation path. You need both. Without the parser, `timestamptz` arrives as a string and `getTime()` or other Date methods will fail at runtime. + +### CRITICAL Using old electrify() bidirectional sync API + +Wrong: + +```ts +const { db } = await electrify(conn, schema) +await db.todos.create({ text: 'New todo' }) +``` + +Correct: + +```ts +todoCollection.insert({ id: crypto.randomUUID(), text: 'New todo' }) +// Write path: collection.insert() → onInsert → API → Postgres → txid → awaitTxId +``` + +Old ElectricSQL (v0.x) had bidirectional SQLite sync. Current Electric is read-only. Writes go through your API endpoint and are reconciled via txid handshake. + +Source: `AGENTS.md:386-392` + +### HIGH Using path-based table URL pattern + +Wrong: + +```ts +const stream = new ShapeStream({ + url: 'http://localhost:3000/v1/shape/todos?offset=-1', +}) +``` + +Correct: + +```ts +const stream = new ShapeStream({ + url: 'http://localhost:3000/v1/shape?table=todos&offset=-1', +}) +``` + +The table-as-path-segment pattern (`/v1/shape/todos`) was removed in v0.8.0. Table is now a query parameter. + +Source: `packages/sync-service/CHANGELOG.md:1124` + +### MEDIUM Using shape_id instead of handle + +Wrong: + +```ts +const stream = new ShapeStream({ + url: '/api/todos', + params: { shape_id: '12345' }, +}) +``` + +Correct: + +```ts +const stream = new ShapeStream({ + url: '/api/todos', + handle: '12345', +}) +``` + +Renamed from `shape_id` to `handle` in v0.8.0. + +Source: `packages/sync-service/CHANGELOG.md:1123` + +See also: electric-orm/SKILL.md — Getting txid from ORM transactions. +See also: electric-proxy-auth/SKILL.md — E2E feature journey includes setting up proxy routes. + +## Version + +Targets @electric-sql/client v1.5.10, @tanstack/react-db latest. diff --git a/packages/typescript-client/skills/electric-orm/SKILL.md b/packages/typescript-client/skills/electric-orm/SKILL.md new file mode 100644 index 0000000000..383bf28d78 --- /dev/null +++ b/packages/typescript-client/skills/electric-orm/SKILL.md @@ -0,0 +1,189 @@ +--- +name: electric-orm +description: > + Use Electric with Drizzle ORM or Prisma for the write path. Covers getting + pg_current_xact_id() from ORM transactions using Drizzle tx.execute(sql) + and Prisma $queryRaw, running migrations that preserve REPLICA IDENTITY + FULL, and schema management patterns compatible with Electric shapes. + Load when using Drizzle or Prisma alongside Electric for writes. +type: composition +library: electric +library_version: '1.5.10' +requires: + - electric-shapes + - electric-schema-shapes +sources: + - 'electric-sql/electric:AGENTS.md' + - 'electric-sql/electric:website/docs/guides/troubleshooting.md' +--- + +This skill builds on electric-shapes and electric-schema-shapes. Read those first. + +# Electric — ORM Integration + +## Setup + +### Drizzle ORM + +```ts +import { drizzle } from 'drizzle-orm/node-postgres' +import { sql } from 'drizzle-orm' +import { todos } from './schema' + +const db = drizzle(pool) + +// Write with txid for Electric reconciliation +async function createTodo(text: string, userId: string) { + return await db.transaction(async (tx) => { + const [row] = await tx + .insert(todos) + .values({ + id: crypto.randomUUID(), + text, + userId, + }) + .returning() + + const [{ txid }] = await tx.execute<{ txid: string }>( + sql`SELECT pg_current_xact_id()::xid::text AS txid` + ) + + return { id: row.id, txid: parseInt(txid) } + }) +} +``` + +### Prisma + +```ts +import { PrismaClient } from '@prisma/client' + +const prisma = new PrismaClient() + +async function createTodo(text: string, userId: string) { + return await prisma.$transaction(async (tx) => { + const todo = await tx.todo.create({ + data: { id: crypto.randomUUID(), text, userId }, + }) + + const [{ txid }] = await tx.$queryRaw<[{ txid: string }]>` + SELECT pg_current_xact_id()::xid::text AS txid + ` + + return { id: todo.id, txid: parseInt(txid) } + }) +} +``` + +## Core Patterns + +### Drizzle migration with REPLICA IDENTITY + +```ts +// In migration file +import { sql } from 'drizzle-orm' + +export async function up(db) { + await db.execute(sql` + CREATE TABLE todos ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + text TEXT NOT NULL, + completed BOOLEAN DEFAULT false + ) + `) + await db.execute(sql`ALTER TABLE todos REPLICA IDENTITY FULL`) +} +``` + +### Prisma migration with REPLICA IDENTITY + +```sql +-- prisma/migrations/001_init/migration.sql +CREATE TABLE "todos" ( + "id" UUID PRIMARY KEY DEFAULT gen_random_uuid(), + "text" TEXT NOT NULL, + "completed" BOOLEAN DEFAULT false +); + +ALTER TABLE "todos" REPLICA IDENTITY FULL; +``` + +### Collection onInsert with ORM + +```ts +import { createCollection } from '@tanstack/react-db' +import { electricCollectionOptions } from '@tanstack/electric-db-collection' + +export const todoCollection = createCollection( + electricCollectionOptions({ + id: 'todos', + schema: todoSchema, + getKey: (row) => row.id, + shapeOptions: { url: '/api/todos' }, + onInsert: async ({ transaction }) => { + const newTodo = transaction.mutations[0].modified + const { txid } = await createTodo(newTodo.text, newTodo.userId) + return { txid } + }, + }) +) +``` + +## Common Mistakes + +### HIGH Not returning txid from ORM write operations + +Wrong: + +```ts +// Drizzle — no txid returned +const [todo] = await db.insert(todos).values({ text: 'New' }).returning() +return { id: todo.id } +``` + +Correct: + +```ts +// Drizzle — txid in same transaction +const result = await db.transaction(async (tx) => { + const [row] = await tx.insert(todos).values({ text: 'New' }).returning() + const [{ txid }] = await tx.execute<{ txid: string }>( + sql`SELECT pg_current_xact_id()::xid::text AS txid` + ) + return { id: row.id, txid: parseInt(txid) } +}) +``` + +ORMs do not return `pg_current_xact_id()` by default. Add a raw SQL query for txid within the same transaction. Without it, optimistic state may drop before the synced version arrives, causing UI flicker. + +Source: `AGENTS.md:116-119` + +### MEDIUM Running migrations that drop replica identity + +Wrong: + +```ts +// ORM migration recreates table without REPLICA IDENTITY +await db.execute(sql`DROP TABLE todos`) +await db.execute(sql`CREATE TABLE todos (...)`) +// Missing: ALTER TABLE todos REPLICA IDENTITY FULL +``` + +Correct: + +```ts +await db.execute(sql`DROP TABLE todos`) +await db.execute(sql`CREATE TABLE todos (...)`) +await db.execute(sql`ALTER TABLE todos REPLICA IDENTITY FULL`) +``` + +Some migration tools reset table properties. Always ensure `REPLICA IDENTITY FULL` is set after table recreation. Without it, Electric cannot stream updates and deletes correctly. + +Source: `website/docs/guides/troubleshooting.md:373` + +See also: electric-new-feature/SKILL.md — Full write-path journey including txid handshake. +See also: electric-schema-shapes/SKILL.md — Schema design affects both shapes and ORM queries. + +## Version + +Targets @electric-sql/client v1.5.10. diff --git a/packages/typescript-client/skills/electric-postgres-security/SKILL.md b/packages/typescript-client/skills/electric-postgres-security/SKILL.md new file mode 100644 index 0000000000..13809fac58 --- /dev/null +++ b/packages/typescript-client/skills/electric-postgres-security/SKILL.md @@ -0,0 +1,196 @@ +--- +name: electric-postgres-security +description: > + Pre-deploy security checklist for Postgres with Electric. Checks REPLICATION + role, SELECT grants, CREATE on database, table ownership, REPLICA IDENTITY + FULL on all synced tables, publication management (auto vs manual with + ELECTRIC_MANUAL_TABLE_PUBLISHING), connection pooler exclusion for + DATABASE_URL (use direct connection), and ELECTRIC_POOLED_DATABASE_URL + for pooled queries. Load before deploying Electric to production or when + diagnosing Postgres permission errors. +type: security +library: electric +library_version: '1.5.10' +requires: + - electric-proxy-auth +sources: + - 'electric-sql/electric:website/docs/guides/postgres-permissions.md' + - 'electric-sql/electric:website/docs/guides/troubleshooting.md' + - 'electric-sql/electric:website/docs/guides/deployment.md' +--- + +This skill builds on electric-proxy-auth. Read it first for proxy security patterns. + +# Electric — Postgres Security Checklist + +Run through each section before deploying Electric to production. + +## User Permission Checks + +### Check: Electric user has REPLICATION role + +Expected: + +```sql +SELECT rolreplication FROM pg_roles WHERE rolname = 'electric_user'; +-- Should return: true +``` + +Fail condition: `rolreplication = false` or user does not exist. +Fix: `ALTER ROLE electric_user WITH REPLICATION;` + +### Check: Electric user has SELECT on synced tables + +Expected: + +```sql +SELECT has_table_privilege('electric_user', 'todos', 'SELECT'); +-- Should return: true +``` + +Fail condition: Returns `false`. +Fix: `GRANT SELECT ON todos TO electric_user;` or `GRANT SELECT ON ALL TABLES IN SCHEMA public TO electric_user;` + +### Check: Electric user has CREATE on database + +Expected: + +```sql +SELECT has_database_privilege('electric_user', current_database(), 'CREATE'); +-- Should return: true (unless using manual publishing mode) +``` + +Fail condition: Returns `false` and not using `ELECTRIC_MANUAL_TABLE_PUBLISHING=true`. +Fix: `GRANT CREATE ON DATABASE mydb TO electric_user;` + +## Table Configuration Checks + +### Check: REPLICA IDENTITY FULL on all synced tables + +Expected: + +```sql +SELECT relname, relreplident +FROM pg_class +WHERE relname IN ('todos', 'users') + AND relreplident = 'f'; -- 'f' = FULL +``` + +Fail condition: `relreplident` is `'d'` (default) or `'n'` (nothing). +Fix: `ALTER TABLE todos REPLICA IDENTITY FULL;` + +### Check: Tables are in the Electric publication + +Expected: + +```sql +SELECT tablename FROM pg_publication_tables +WHERE pubname = 'electric_publication_default'; +``` + +Fail condition: Synced tables missing from the list. +Fix (manual mode): `ALTER PUBLICATION electric_publication_default ADD TABLE todos;` + +## Connection Checks + +### Check: DATABASE_URL uses direct connection (not pooler) + +Expected: + +``` +DATABASE_URL=postgres://user:pass@db-host:5432/mydb +``` + +Fail condition: URL points to a connection pooler (e.g., PgBouncer on port 6432, Supabase pooler). +Fix: Use direct Postgres connection for `DATABASE_URL`. Set `ELECTRIC_POOLED_DATABASE_URL` separately for pooled queries. + +### Check: wal_level is set to logical + +Expected: + +```sql +SHOW wal_level; +-- Should return: logical +``` + +Fail condition: Returns `replica` or `minimal`. +Fix: Set `wal_level = logical` in `postgresql.conf` and restart Postgres. + +## Common Security Mistakes + +### CRITICAL Using connection pooler for DATABASE_URL + +Wrong: + +```sh +DATABASE_URL=postgres://user:pass@pooler.example.com:6432/mydb +``` + +Correct: + +```sh +DATABASE_URL=postgres://user:pass@db.example.com:5432/mydb +ELECTRIC_POOLED_DATABASE_URL=postgres://user:pass@pooler.example.com:6432/mydb +``` + +Connection poolers (except PgBouncer 1.23+) do not support logical replication. Electric must connect directly to Postgres for its replication slot. + +Source: `website/docs/guides/deployment.md:91` + +### HIGH Missing REPLICA IDENTITY FULL on tables + +Wrong: + +```sql +CREATE TABLE todos (id UUID PRIMARY KEY, text TEXT); +-- Replica identity defaults to 'default' (PK only) +``` + +Correct: + +```sql +CREATE TABLE todos (id UUID PRIMARY KEY, text TEXT); +ALTER TABLE todos REPLICA IDENTITY FULL; +``` + +Without `REPLICA IDENTITY FULL`, Electric cannot stream the full row on updates and deletes. Updates may be missing non-PK columns. + +Source: `website/docs/guides/troubleshooting.md:373` + +### HIGH Electric user without REPLICATION role + +Wrong: + +```sql +CREATE USER electric_user WITH PASSWORD 'secret'; +``` + +Correct: + +```sql +CREATE USER electric_user WITH PASSWORD 'secret' REPLICATION; +GRANT SELECT ON ALL TABLES IN SCHEMA public TO electric_user; +``` + +Electric uses logical replication and requires the `REPLICATION` role on the database user. + +Source: `website/docs/guides/postgres-permissions.md` + +## Pre-Deploy Summary + +- [ ] Electric user has `REPLICATION` role +- [ ] Electric user has `SELECT` on all synced tables +- [ ] Electric user has `CREATE` on database (or manual publishing configured) +- [ ] All synced tables have `REPLICA IDENTITY FULL` +- [ ] All synced tables are in the Electric publication +- [ ] `DATABASE_URL` uses direct Postgres connection (not pooler) +- [ ] `wal_level = logical` in Postgres config +- [ ] `ELECTRIC_SECRET` is set (not using `ELECTRIC_INSECURE=true`) +- [ ] Secrets are injected server-side only (never in client bundle) + +See also: electric-proxy-auth/SKILL.md — Proxy injects secrets that Postgres security enforces. +See also: electric-deployment/SKILL.md — Deployment requires correct Postgres configuration. + +## Version + +Targets Electric sync service v1.x. diff --git a/packages/typescript-client/skills/electric-proxy-auth/SKILL.md b/packages/typescript-client/skills/electric-proxy-auth/SKILL.md new file mode 100644 index 0000000000..174d5b6977 --- /dev/null +++ b/packages/typescript-client/skills/electric-proxy-auth/SKILL.md @@ -0,0 +1,269 @@ +--- +name: electric-proxy-auth +description: > + Set up a server-side proxy to forward Electric shape requests securely. + Covers ELECTRIC_PROTOCOL_QUERY_PARAMS forwarding, server-side shape + definition (table, where, params), content-encoding/content-length header + cleanup, CORS configuration for electric-offset/electric-handle/ + electric-schema/electric-cursor headers, auth token injection, + ELECTRIC_SECRET/SOURCE_SECRET server-side only, tenant isolation via + WHERE positional params, onError 401 token refresh, and subset security + (AND semantics). Load when creating proxy routes, adding auth, or + configuring CORS for Electric. +type: core +library: electric +library_version: '1.5.10' +requires: + - electric-shapes +sources: + - 'electric-sql/electric:packages/typescript-client/src/constants.ts' + - 'electric-sql/electric:examples/proxy-auth/app/shape-proxy/route.ts' + - 'electric-sql/electric:website/docs/guides/auth.md' + - 'electric-sql/electric:website/docs/guides/security.md' +--- + +This skill builds on electric-shapes. Read it first for ShapeStream configuration. + +# Electric — Proxy and Auth + +## Setup + +```ts +import { ELECTRIC_PROTOCOL_QUERY_PARAMS } from '@electric-sql/client' + +// Server route (Next.js App Router example) +export async function GET(request: Request) { + const url = new URL(request.url) + const originUrl = new URL('/v1/shape', process.env.ELECTRIC_URL) + + // Only forward Electric protocol params — never table/where from client + url.searchParams.forEach((value, key) => { + if (ELECTRIC_PROTOCOL_QUERY_PARAMS.includes(key)) { + originUrl.searchParams.set(key, value) + } + }) + + // Server decides shape definition + originUrl.searchParams.set('table', 'todos') + originUrl.searchParams.set('secret', process.env.ELECTRIC_SOURCE_SECRET!) + + const response = await fetch(originUrl) + const headers = new Headers(response.headers) + headers.delete('content-encoding') + headers.delete('content-length') + + return new Response(response.body, { + status: response.status, + statusText: response.statusText, + headers, + }) +} +``` + +Client usage: + +```ts +import { ShapeStream } from '@electric-sql/client' + +const stream = new ShapeStream({ + url: '/api/todos', // Points to your proxy, not Electric directly +}) +``` + +## Core Patterns + +### Tenant isolation with WHERE params + +```ts +// In proxy route — inject user context server-side +const user = await getAuthUser(request) +originUrl.searchParams.set('table', 'todos') +originUrl.searchParams.set('where', 'org_id = $1') +originUrl.searchParams.set('params[1]', user.orgId) +``` + +### Auth token refresh on 401 + +```ts +const stream = new ShapeStream({ + url: '/api/todos', + headers: { + Authorization: async () => `Bearer ${await getToken()}`, + }, + onError: async (error) => { + if (error instanceof FetchError && error.status === 401) { + const newToken = await refreshToken() + return { headers: { Authorization: `Bearer ${newToken}` } } + } + return {} + }, +}) +``` + +### CORS configuration for cross-origin proxies + +```ts +// In proxy response headers +headers.set( + 'Access-Control-Expose-Headers', + 'electric-offset, electric-handle, electric-schema, electric-cursor' +) +``` + +### Subset security (AND semantics) + +Electric combines the main shape WHERE (set in proxy) with subset WHERE (from POST body) using AND. Subsets can only narrow results, never widen them: + +```sql +-- Main shape: WHERE org_id = $1 (set by proxy) +-- Subset: WHERE status = 'active' (from client POST) +-- Effective: WHERE org_id = $1 AND status = 'active' +``` + +Even `WHERE 1=1` in the subset cannot bypass the main shape's WHERE. + +## Common Mistakes + +### CRITICAL Forwarding all client params to Electric + +Wrong: + +```ts +url.searchParams.forEach((value, key) => { + originUrl.searchParams.set(key, value) +}) +``` + +Correct: + +```ts +import { ELECTRIC_PROTOCOL_QUERY_PARAMS } from '@electric-sql/client' + +url.searchParams.forEach((value, key) => { + if (ELECTRIC_PROTOCOL_QUERY_PARAMS.includes(key)) { + originUrl.searchParams.set(key, value) + } +}) +originUrl.searchParams.set('table', 'todos') +``` + +Forwarding all params lets the client control `table`, `where`, and `columns`, accessing any Postgres table. Only forward `ELECTRIC_PROTOCOL_QUERY_PARAMS`. + +Source: `examples/proxy-auth/app/shape-proxy/route.ts` + +### CRITICAL Not deleting content-encoding and content-length headers + +Wrong: + +```ts +return new Response(response.body, { + status: response.status, + headers: response.headers, +}) +``` + +Correct: + +```ts +const headers = new Headers(response.headers) +headers.delete('content-encoding') +headers.delete('content-length') +return new Response(response.body, { status: response.status, headers }) +``` + +`fetch()` decompresses the response body but keeps the original `content-encoding` and `content-length` headers, causing browser decoding failures. + +Source: `examples/proxy-auth/app/shape-proxy/route.ts:49-56` + +### CRITICAL Exposing ELECTRIC_SECRET or SOURCE_SECRET to browser + +Wrong: + +```ts +// Client-side code +const url = `/v1/shape?table=todos&secret=${import.meta.env.VITE_ELECTRIC_SOURCE_SECRET}` +``` + +Correct: + +```ts +// Server proxy only +originUrl.searchParams.set('secret', process.env.ELECTRIC_SOURCE_SECRET!) +``` + +Bundlers like Vite expose `VITE_*` env vars to client code. The secret must only be injected server-side in the proxy. + +Source: `AGENTS.md:17-20` + +### CRITICAL SQL injection in WHERE clause via string interpolation + +Wrong: + +```ts +originUrl.searchParams.set('where', `org_id = '${user.orgId}'`) +``` + +Correct: + +```ts +originUrl.searchParams.set('where', 'org_id = $1') +originUrl.searchParams.set('params[1]', user.orgId) +``` + +String interpolation in WHERE clauses enables SQL injection. Use positional params (`$1`, `$2`). + +Source: `website/docs/guides/auth.md` + +### HIGH Not exposing Electric response headers via CORS + +Wrong: + +```ts +// No CORS header configuration — browser strips custom headers +return new Response(response.body, { headers }) +``` + +Correct: + +```ts +headers.set( + 'Access-Control-Expose-Headers', + 'electric-offset, electric-handle, electric-schema, electric-cursor' +) +return new Response(response.body, { headers }) +``` + +The client throws `MissingHeadersError` if Electric response headers are stripped by CORS. Expose `electric-offset`, `electric-handle`, `electric-schema`, and `electric-cursor`. + +Source: `packages/typescript-client/src/error.ts:109-118` + +### CRITICAL Calling Electric directly from production client + +Wrong: + +```ts +new ShapeStream({ + url: 'https://my-electric.example.com/v1/shape', + params: { table: 'todos' }, +}) +``` + +Correct: + +```ts +new ShapeStream({ + url: '/api/todos', // Your proxy route +}) +``` + +Electric's HTTP API is public by default with no auth. Always proxy through your server so the server controls shape definitions and injects secrets. + +Source: `AGENTS.md:19-20` + +See also: electric-shapes/SKILL.md — Shape URLs must point to proxy routes, not directly to Electric. +See also: electric-deployment/SKILL.md — Production requires ELECTRIC_SECRET and proxy; dev uses ELECTRIC_INSECURE=true. +See also: electric-postgres-security/SKILL.md — Proxy injects secrets that Postgres security enforces. + +## Version + +Targets @electric-sql/client v1.5.10. diff --git a/packages/typescript-client/skills/electric-schema-shapes/SKILL.md b/packages/typescript-client/skills/electric-schema-shapes/SKILL.md new file mode 100644 index 0000000000..6e1587391a --- /dev/null +++ b/packages/typescript-client/skills/electric-schema-shapes/SKILL.md @@ -0,0 +1,200 @@ +--- +name: electric-schema-shapes +description: > + Design Postgres schema and Electric shape definitions together for a new + feature. Covers single-table shape constraint, cross-table joins using + multiple shapes, WHERE clause design for tenant isolation, column selection + for bandwidth optimization, replica mode choice (default vs full for + old_value), enum casting in WHERE clauses, and txid handshake setup with + pg_current_xact_id() for optimistic writes. Load when designing database + tables for use with Electric shapes. +type: core +library: electric +library_version: '1.5.10' +requires: + - electric-shapes +sources: + - 'electric-sql/electric:AGENTS.md' + - 'electric-sql/electric:website/docs/guides/shapes.md' +--- + +This skill builds on electric-shapes. Read it first for ShapeStream configuration. + +# Electric — Schema and Shapes + +## Setup + +Design tables knowing each shape syncs one table. For cross-table data, use multiple shapes with client-side joins. + +```sql +-- Schema designed for Electric shapes +CREATE TABLE todos ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + org_id UUID NOT NULL, + text TEXT NOT NULL, + completed BOOLEAN DEFAULT false, + created_at TIMESTAMPTZ DEFAULT now() +); + +ALTER TABLE todos REPLICA IDENTITY FULL; +``` + +```ts +import { ShapeStream } from '@electric-sql/client' + +const todoStream = new ShapeStream({ + url: '/api/todos', // Proxy sets: table=todos, where=org_id=$1 +}) +``` + +## Core Patterns + +### Cross-table data with multiple shapes + +```ts +// Each shape syncs one table — join client-side +const todoStream = new ShapeStream({ url: '/api/todos' }) +const userStream = new ShapeStream({ url: '/api/users' }) + +// With TanStack DB, use .join() in live queries: +// q.from({ todo: todoCollection }) +// .join({ user: userCollection }, ({ todo, user }) => eq(todo.userId, user.id)) +``` + +### Choose replica mode + +```ts +// Default: only changed columns sent on update +const stream = new ShapeStream({ url: '/api/todos' }) + +// Full: all columns + old_value on updates (more bandwidth, needed for diffs) +const stream = new ShapeStream({ + url: '/api/todos', + params: { replica: 'full' }, +}) +``` + +### Backend txid handshake for optimistic writes + +Call `pg_current_xact_id()::xid::text` inside the same transaction as your mutation. If you query it outside the transaction, you get a different txid and the client will never reconcile. + +```ts +// API endpoint — txid MUST be in the same transaction as the INSERT +app.post('/api/todos', async (req, res) => { + const client = await pool.connect() + try { + await client.query('BEGIN') + const result = await client.query( + 'INSERT INTO todos (id, text, org_id) VALUES ($1, $2, $3) RETURNING id', + [crypto.randomUUID(), req.body.text, req.body.orgId] + ) + const txResult = await client.query( + 'SELECT pg_current_xact_id()::xid::text AS txid' + ) + await client.query('COMMIT') + // txid accepts number | bigint | `${bigint}` + res.json({ id: result.rows[0].id, txid: parseInt(txResult.rows[0].txid) }) + } finally { + client.release() + } +}) +``` + +```ts +// Client awaits txid before dropping optimistic state +await todoCollection.utils.awaitTxId(txid) +``` + +## Common Mistakes + +### HIGH Designing shapes that span multiple tables + +Wrong: + +```ts +const stream = new ShapeStream({ + url: '/api/data', + params: { + table: 'todos JOIN users ON todos.user_id = users.id', + }, +}) +``` + +Correct: + +```ts +const todoStream = new ShapeStream({ url: '/api/todos' }) +const userStream = new ShapeStream({ url: '/api/users' }) +``` + +Shapes are single-table only. Cross-table data requires multiple shapes joined client-side via TanStack DB live queries. + +Source: `AGENTS.md:104-105` + +### MEDIUM Using enum columns without casting to text in WHERE + +Wrong: + +```ts +// Proxy route +originUrl.searchParams.set('where', "status IN ('active', 'done')") +``` + +Correct: + +```ts +originUrl.searchParams.set('where', "status::text IN ('active', 'done')") +``` + +Enum types in WHERE clauses require explicit `::text` cast. Without it, the query may fail or return unexpected results. + +Source: `packages/sync-service/lib/electric/replication/eval/env/known_functions.ex` + +### HIGH Not setting up txid handshake for optimistic writes + +Wrong: + +```ts +// Backend: just INSERT, return id +app.post('/api/todos', async (req, res) => { + const result = await db.query( + 'INSERT INTO todos (text) VALUES ($1) RETURNING id', + [req.body.text] + ) + res.json({ id: result.rows[0].id }) +}) +``` + +Correct: + +```ts +// Backend: INSERT and return txid in same transaction +app.post('/api/todos', async (req, res) => { + const client = await pool.connect() + try { + await client.query('BEGIN') + const result = await client.query( + 'INSERT INTO todos (text) VALUES ($1) RETURNING id', + [req.body.text] + ) + const txResult = await client.query( + 'SELECT pg_current_xact_id()::xid::text AS txid' + ) + await client.query('COMMIT') + res.json({ id: result.rows[0].id, txid: parseInt(txResult.rows[0].txid) }) + } finally { + client.release() + } +}) +``` + +Without txid, the UI flickers when optimistic state is dropped before the synced version arrives from Electric. The client uses `awaitTxId(txid)` to hold optimistic state until the real data syncs. + +Source: `AGENTS.md:116-119` + +See also: electric-shapes/SKILL.md — Shapes are immutable; dynamic filters require new ShapeStream instances. +See also: electric-orm/SKILL.md — Schema design affects both shapes (read) and ORM queries (write). + +## Version + +Targets @electric-sql/client v1.5.10. diff --git a/packages/typescript-client/skills/electric-shapes/SKILL.md b/packages/typescript-client/skills/electric-shapes/SKILL.md new file mode 100644 index 0000000000..12d3c0fd03 --- /dev/null +++ b/packages/typescript-client/skills/electric-shapes/SKILL.md @@ -0,0 +1,339 @@ +--- +name: electric-shapes +description: > + Configure ShapeStream and Shape to sync a Postgres table to the client. + Covers ShapeStreamOptions (url, table, where, columns, replica, offset, + handle), custom type parsers (timestamptz, jsonb, int8), column mappers + (snakeCamelMapper, createColumnMapper), onError retry semantics, backoff + options, log modes (full, changes_only), requestSnapshot, fetchSnapshot, + subscribe/unsubscribe, and Shape materialized view. Load when setting up + sync, configuring shapes, parsing types, or handling sync errors. +type: core +library: electric +library_version: '1.5.10' +sources: + - 'electric-sql/electric:packages/typescript-client/src/client.ts' + - 'electric-sql/electric:packages/typescript-client/src/shape.ts' + - 'electric-sql/electric:packages/typescript-client/src/types.ts' + - 'electric-sql/electric:packages/typescript-client/src/parser.ts' + - 'electric-sql/electric:packages/typescript-client/src/column-mapper.ts' + - 'electric-sql/electric:website/docs/guides/shapes.md' +--- + +# Electric — Shape Streaming + +## Setup + +```ts +import { ShapeStream, Shape } from '@electric-sql/client' + +const stream = new ShapeStream({ + url: '/api/todos', // Your proxy route, NOT direct Electric URL + // Built-in parsers auto-handle: bool, int2, int4, float4, float8, json, jsonb + // Add custom parsers for other types (see references/type-parsers.md) + parser: { + timestamptz: (date: string) => new Date(date), + }, +}) + +const shape = new Shape(stream) + +shape.subscribe(({ rows }) => { + console.log('synced rows:', rows) +}) + +// Wait for initial sync +const rows = await shape.rows +``` + +## Core Patterns + +### Filter rows with WHERE clause and positional params + +```ts +const stream = new ShapeStream({ + url: '/api/todos', + params: { + table: 'todos', + where: 'user_id = $1 AND status = $2', + params: { '1': userId, '2': 'active' }, + }, +}) +``` + +### Select specific columns (must include primary key) + +```ts +const stream = new ShapeStream({ + url: '/api/todos', + params: { + table: 'todos', + columns: ['id', 'title', 'status'], // PK required + }, +}) +``` + +### Map column names between snake_case and camelCase + +```ts +import { ShapeStream, snakeCamelMapper } from '@electric-sql/client' + +const stream = new ShapeStream({ + url: '/api/todos', + columnMapper: snakeCamelMapper(), +}) +// DB column "created_at" arrives as "createdAt" in client +// WHERE clauses auto-translate: "createdAt" → "created_at" +``` + +### Handle errors with retry + +```ts +const stream = new ShapeStream({ + url: '/api/todos', + onError: (error) => { + console.error('sync error', error) + return {} // Return {} to retry; returning void stops the stream + }, +}) +``` + +For auth token refresh on 401 errors, see electric-proxy-auth/SKILL.md. + +### Resume from stored offset + +```ts +const stream = new ShapeStream({ + url: '/api/todos', + offset: storedOffset, // Both offset AND handle required + handle: storedHandle, +}) +``` + +### Get replica with old values on update + +```ts +const stream = new ShapeStream({ + url: '/api/todos', + params: { + table: 'todos', + replica: 'full', // Sends unchanged columns + old_value on updates + }, +}) +``` + +## Common Mistakes + +### CRITICAL Returning void from onError stops sync permanently + +Wrong: + +```ts +const stream = new ShapeStream({ + url: '/api/todos', + onError: (error) => { + console.error('sync error', error) + // Returning nothing = stream stops forever + }, +}) +``` + +Correct: + +```ts +const stream = new ShapeStream({ + url: '/api/todos', + onError: (error) => { + console.error('sync error', error) + return {} // Return {} to retry + }, +}) +``` + +`onError` returning `undefined` signals the stream to permanently stop. Return at least `{}` to retry, or return `{ headers, params }` to retry with updated values. + +Source: `packages/typescript-client/src/client.ts:409-418` + +### HIGH Using columns without including primary key + +Wrong: + +```ts +const stream = new ShapeStream({ + url: '/api/todos', + params: { + table: 'todos', + columns: ['title', 'status'], + }, +}) +``` + +Correct: + +```ts +const stream = new ShapeStream({ + url: '/api/todos', + params: { + table: 'todos', + columns: ['id', 'title', 'status'], + }, +}) +``` + +Server returns 400 error. The `columns` list must always include the primary key column(s). + +Source: `website/docs/guides/shapes.md` + +### HIGH Setting offset without handle for resumption + +Wrong: + +```ts +new ShapeStream({ + url: '/api/todos', + offset: storedOffset, +}) +``` + +Correct: + +```ts +new ShapeStream({ + url: '/api/todos', + offset: storedOffset, + handle: storedHandle, +}) +``` + +Throws `MissingShapeHandleError`. Both `offset` AND `handle` are required to resume a stream from a stored position. + +Source: `packages/typescript-client/src/client.ts:1997-2003` + +### HIGH Using non-deterministic functions in WHERE clause + +Wrong: + +```ts +const stream = new ShapeStream({ + url: '/api/events', + params: { + table: 'events', + where: 'start_time > now()', + }, +}) +``` + +Correct: + +```ts +const stream = new ShapeStream({ + url: '/api/events', + params: { + table: 'events', + where: 'start_time > $1', + params: { '1': new Date().toISOString() }, + }, +}) +``` + +Server rejects WHERE clauses with non-deterministic functions like `now()`, `random()`, `count()`. Use static values or positional params. + +Source: `packages/sync-service/lib/electric/replication/eval/env/known_functions.ex` + +### HIGH Not parsing custom Postgres types + +Wrong: + +```ts +const stream = new ShapeStream({ + url: '/api/events', +}) +// createdAt will be string "2024-01-15T10:30:00.000Z", not a Date +``` + +Correct: + +```ts +const stream = new ShapeStream({ + url: '/api/events', + parser: { + timestamptz: (date: string) => new Date(date), + timestamp: (date: string) => new Date(date), + }, +}) +``` + +Electric auto-parses `bool`, `int2`, `int4`, `float4`, `float8`, `json`, `jsonb`, and `int8` (→ BigInt). All other types arrive as strings — add custom parsers for `timestamptz`, `date`, `numeric`, etc. See [references/type-parsers.md](references/type-parsers.md) for the full list. + +Source: `AGENTS.md:300-308` + +### MEDIUM Using reserved parameter names in params + +Wrong: + +```ts +const stream = new ShapeStream({ + url: '/api/todos', + params: { + table: 'todos', + cursor: 'abc', // Reserved! + offset: '0', // Reserved! + }, +}) +``` + +Correct: + +```ts +const stream = new ShapeStream({ + url: '/api/todos', + params: { + table: 'todos', + page_cursor: 'abc', + page_offset: '0', + }, +}) +``` + +Throws `ReservedParamError`. Names `cursor`, `handle`, `live`, `offset`, `cache-buster`, and all `subset__*` prefixed params are reserved by the Electric protocol. + +Source: `packages/typescript-client/src/client.ts:1984-1985` + +### MEDIUM Mutating shape options on a running stream + +Wrong: + +```ts +const stream = new ShapeStream({ + url: '/api/todos', + params: { table: 'todos', where: "status = 'active'" }, +}) +// Later... +stream.options.params.where = "status = 'done'" // No effect! +``` + +Correct: + +```ts +// Create a new stream with different params +const newStream = new ShapeStream({ + url: '/api/todos', + params: { table: 'todos', where: "status = 'done'" }, +}) +``` + +Shapes are immutable per subscription. Changing params on a running stream has no effect. Create a new `ShapeStream` instance for different filters. + +Source: `AGENTS.md:106` + +## References + +- [WHERE clause supported types and functions](references/where-clause.md) +- [Built-in type parsers](references/type-parsers.md) + +See also: electric-proxy-auth/SKILL.md — Shape URLs must point to proxy routes, not directly to Electric. +See also: electric-debugging/SKILL.md — onError semantics and backoff are essential for diagnosing sync problems. + +## Version + +Targets @electric-sql/client v1.5.10. diff --git a/packages/typescript-client/skills/electric-shapes/references/type-parsers.md b/packages/typescript-client/skills/electric-shapes/references/type-parsers.md new file mode 100644 index 0000000000..0d72afc044 --- /dev/null +++ b/packages/typescript-client/skills/electric-shapes/references/type-parsers.md @@ -0,0 +1,64 @@ +# Electric Shapes — Type Parser Reference + +## Built-in Parsers + +These parsers are applied automatically. All other types arrive as strings. + +| Postgres Type | Parser | Output Type | Notes | +| ------------- | ------------- | ----------- | -------------------------- | +| `int2` | `parseNumber` | `number` | | +| `int4` | `parseNumber` | `number` | | +| `int8` | `parseBigInt` | `BigInt` | Returns BigInt, not number | +| `float4` | `parseNumber` | `number` | | +| `float8` | `parseNumber` | `number` | | +| `bool` | `parseBool` | `boolean` | | +| `json` | `parseJson` | `object` | | +| `jsonb` | `parseJson` | `object` | | + +## Common Custom Parsers + +```ts +const stream = new ShapeStream({ + url: '/api/items', + parser: { + timestamptz: (date: string) => new Date(date), + timestamp: (date: string) => new Date(date), + date: (date: string) => new Date(date), + numeric: (n: string) => parseFloat(n), + interval: (i: string) => i, // Keep as string or use a library + }, +}) +``` + +## Parser Signature + +```ts +type ParseFunction = ( + value: string, + additionalInfo?: Omit +) => Value +``` + +The `additionalInfo` parameter provides column metadata like `precision`, `scale`, `max_length`, `not_null`. + +## NULL Handling + +If a column has `not_null: true` in the schema and a `NULL` value is received, the parser throws `ParserNullValueError`. This indicates a schema mismatch. + +## Transformer vs Parser + +- **Parser**: converts individual column values by Postgres type name +- **Transformer**: transforms the entire row object after parsing + +```ts +const stream = new ShapeStream({ + url: '/api/items', + parser: { + timestamptz: (date: string) => new Date(date), + }, + transformer: (row) => ({ + ...row, + fullName: `${row.firstName} ${row.lastName}`, + }), +}) +``` diff --git a/packages/typescript-client/skills/electric-shapes/references/where-clause.md b/packages/typescript-client/skills/electric-shapes/references/where-clause.md new file mode 100644 index 0000000000..ecf6256877 --- /dev/null +++ b/packages/typescript-client/skills/electric-shapes/references/where-clause.md @@ -0,0 +1,64 @@ +# Electric Shapes — WHERE Clause Reference + +## Supported Column Types + +| Type | Example | Notes | +| -------------------------- | ------------------------------------- | --------------------- | +| `text`, `varchar`, `char` | `name = 'Alice'` | String comparison | +| `int2`, `int4`, `int8` | `age > 21` | Numeric comparison | +| `float4`, `float8` | `price < 9.99` | Float comparison | +| `bool` | `active = true` | Boolean | +| `uuid` | `id = '550e8400-...'` | UUID comparison | +| `date` | `created > '2024-01-01'` | Date comparison | +| `timestamp`, `timestamptz` | `updated_at > '2024-01-01T00:00:00Z'` | Timestamp comparison | +| `interval` | `duration > '1 hour'` | Interval comparison | +| `numeric` | `amount >= 100.50` | Arbitrary precision | +| `arrays` | `tags && ARRAY['urgent']` | Array operations | +| `enum` | `status::text IN ('active', 'done')` | **Must cast to text** | + +## Unsupported + +- `timetz` — not supported in WHERE +- Non-deterministic functions: `now()`, `random()`, `count()`, `current_timestamp` +- Aggregate functions +- Subqueries (experimental, requires `ELECTRIC_FEATURE_FLAGS=allow_subqueries`) + +## Positional Parameters + +```ts +// Array format +params: { where: 'org_id = $1 AND role = $2', params: ['org-123', 'admin'] } + +// Object format +params: { where: 'org_id = $1 AND role = $2', params: { '1': 'org-123', '2': 'admin' } } +``` + +## Operators + +| Operator | Example | +| ------------------------ | --------------------------------- | +| `=`, `!=`, `<>` | `status = 'active'` | +| `<`, `>`, `<=`, `>=` | `age >= 18` | +| `IN` | `status IN ('active', 'pending')` | +| `NOT IN` | `status NOT IN ('deleted')` | +| `LIKE`, `ILIKE` | `name ILIKE '%john%'` | +| `IS NULL`, `IS NOT NULL` | `deleted_at IS NULL` | +| `AND`, `OR`, `NOT` | `active = true AND age > 18` | +| `BETWEEN` | `age BETWEEN 18 AND 65` | +| `ANY`, `ALL` | Array comparisons | + +## Enum Gotcha + +Enum columns require explicit `::text` cast: + +```ts +// Wrong — fails silently or errors +params: { + where: "status IN ('active', 'done')" +} + +// Correct +params: { + where: "status::text IN ('active', 'done')" +} +``` diff --git a/packages/y-electric/CHANGELOG.md b/packages/y-electric/CHANGELOG.md index 1d7ff2baa4..7742a06e70 100644 --- a/packages/y-electric/CHANGELOG.md +++ b/packages/y-electric/CHANGELOG.md @@ -1,5 +1,13 @@ # @electric-sql/y-electric +## 0.1.38 + +### Patch Changes + +- d1e08b8: Add TanStack Intent skills for AI agent guidance. Ships 9 skills covering shapes, proxy auth, schema design, debugging, deployment, new feature setup, ORM integration, Postgres security, and Yjs collaboration. +- Updated dependencies [d1e08b8] + - @electric-sql/client@1.5.12 + ## 0.1.37 ### Patch Changes diff --git a/packages/y-electric/bin/intent.mjs b/packages/y-electric/bin/intent.mjs new file mode 100644 index 0000000000..399d8a3176 --- /dev/null +++ b/packages/y-electric/bin/intent.mjs @@ -0,0 +1,6 @@ +#!/usr/bin/env node +// Auto-generated by @tanstack/intent setup +// Exposes the intent end-user CLI for consumers of this library. +// Commit this file, then add to your package.json: +// "bin": { "intent": "./bin/intent.mjs" } +await import(`@tanstack/intent/intent-library`) diff --git a/packages/y-electric/package.json b/packages/y-electric/package.json index b9469987c1..b91504a37b 100644 --- a/packages/y-electric/package.json +++ b/packages/y-electric/package.json @@ -1,6 +1,6 @@ { "name": "@electric-sql/y-electric", - "version": "0.1.37", + "version": "0.1.38", "description": "YJS network provider for ElectricSQL", "author": "ElectricSQL team and contributors.", "bugs": { @@ -14,6 +14,7 @@ "yjs": "^13.6.6" }, "devDependencies": { + "@tanstack/intent": "^0.0.9", "@types/node": "^22.0.0", "@typescript-eslint/eslint-plugin": "^7.14.1", "@typescript-eslint/parser": "^7.14.1", @@ -56,9 +57,15 @@ } } }, + "bin": { + "intent": "./bin/intent.mjs" + }, "files": [ "dist", - "src" + "src", + "skills", + "bin", + "!skills/_artifacts" ], "homepage": "https://electric-sql.com", "license": "Apache-2.0", diff --git a/packages/y-electric/skills/electric-yjs/SKILL.md b/packages/y-electric/skills/electric-yjs/SKILL.md new file mode 100644 index 0000000000..8b06abf4d8 --- /dev/null +++ b/packages/y-electric/skills/electric-yjs/SKILL.md @@ -0,0 +1,268 @@ +--- +name: electric-yjs +description: > + Set up ElectricProvider for real-time collaborative editing with Yjs via + Electric shapes. Covers ElectricProvider configuration, document updates + shape with BYTEA parser (parseToDecoder), awareness shape at offset='now', + LocalStorageResumeStateProvider for reconnection with stableStateVector + diff, debounceMs for batching writes, sendUrl PUT endpoint, required + Postgres schema (ydoc_update and ydoc_awareness tables), CORS header + exposure, and sendErrorRetryHandler. Load when implementing collaborative + editing with Yjs and Electric. +type: composition +library: electric +library_version: '0.1.36' +requires: + - electric-shapes +sources: + - 'electric-sql/electric:packages/y-electric/src/y-electric.ts' + - 'electric-sql/electric:packages/y-electric/src/types.ts' + - 'electric-sql/electric:packages/y-electric/src/local-storage-resume-state.ts' + - 'electric-sql/electric:packages/y-electric/src/utils.ts' + - 'electric-sql/electric:examples/yjs/' +--- + +This skill builds on electric-shapes. Read it first for ShapeStream configuration. + +# Electric — Yjs Collaboration + +## Setup + +### 1. Create Postgres tables + +```sql +CREATE TABLE ydoc_update ( + id SERIAL PRIMARY KEY, + room TEXT NOT NULL, + update BYTEA NOT NULL +); + +CREATE TABLE ydoc_awareness ( + client_id TEXT, + room TEXT, + update BYTEA NOT NULL, + updated_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (client_id, room) +); + +-- Garbage collect stale awareness entries +CREATE OR REPLACE FUNCTION gc_awareness_timeouts() +RETURNS TRIGGER AS $$ +BEGIN + DELETE FROM ydoc_awareness + WHERE updated_at < (CURRENT_TIMESTAMP - INTERVAL '30 seconds') + AND room = NEW.room; + RETURN NEW; +END; +$$ LANGUAGE plpgsql; + +CREATE TRIGGER gc_awareness + AFTER INSERT OR UPDATE ON ydoc_awareness + FOR EACH ROW EXECUTE FUNCTION gc_awareness_timeouts(); +``` + +### 2. Create server endpoint for receiving updates + +```ts +// PUT /api/yjs/update — receives binary Yjs update +app.put('/api/yjs/update', async (req, res) => { + const body = Buffer.from(await req.arrayBuffer()) + await db.query('INSERT INTO ydoc_update (room, update) VALUES ($1, $2)', [ + req.headers['x-room-id'], + body, + ]) + res.status(200).end() +}) +``` + +### 3. Configure ElectricProvider + +```ts +import * as Y from 'yjs' +import { + ElectricProvider, + LocalStorageResumeStateProvider, + parseToDecoder, +} from '@electric-sql/y-electric' + +const ydoc = new Y.Doc() +const roomId = 'my-document' + +const resumeProvider = new LocalStorageResumeStateProvider(roomId) + +const provider = new ElectricProvider({ + doc: ydoc, + documentUpdates: { + shape: { + url: `/api/yjs/doc-shape?room=${roomId}`, + parser: parseToDecoder, + }, + sendUrl: '/api/yjs/update', + getUpdateFromRow: (row) => row.update, + }, + awarenessUpdates: { + shape: { + url: `/api/yjs/awareness-shape?room=${roomId}`, + parser: parseToDecoder, + offset: 'now', // Only live awareness, no historical backfill + }, + sendUrl: '/api/yjs/awareness', + protocol: provider.awareness, + getUpdateFromRow: (row) => row.update, + }, + resumeState: resumeProvider.load(), + debounceMs: 100, // Batch rapid edits +}) + +// Persist resume state for efficient reconnection +resumeProvider.subscribeToResumeState(provider) +``` + +## Core Patterns + +### CORS headers for Yjs proxy + +```ts +// Proxy must expose Electric headers +const corsHeaders = { + 'Access-Control-Expose-Headers': + 'electric-offset, electric-handle, electric-schema, electric-cursor', +} +``` + +### Resume state for reconnection + +```ts +// On construction, pass stored resume state +const provider = new ElectricProvider({ + doc: ydoc, + documentUpdates: { shape: shapeOpts, sendUrl: '/api/yjs/update' }, + resumeState: resumeProvider.load(), +}) + +// Subscribe to persist updates +const unsub = resumeProvider.subscribeToResumeState(provider) + +// Clean up +provider.destroy() +unsub() +``` + +When `stableStateVector` is provided in resume state, the provider sends only the diff between the stored vector and current doc state on reconnect. + +### Connection lifecycle + +```ts +provider.on('status', ({ status }) => { + // 'connecting' | 'connected' | 'disconnected' + console.log('Yjs sync status:', status) +}) + +provider.on('sync', (synced: boolean) => { + console.log('Document synced:', synced) +}) + +// Manual disconnect/reconnect +provider.disconnect() +provider.connect() +``` + +## Common Mistakes + +### HIGH Not persisting resume state for reconnection + +Wrong: + +```ts +const provider = new ElectricProvider({ + doc: ydoc, + documentUpdates: { + shape: { url: '/api/yjs/doc-shape', parser: parseToDecoder }, + sendUrl: '/api/yjs/update', + getUpdateFromRow: (row) => row.update, + }, +}) +``` + +Correct: + +```ts +const resumeProvider = new LocalStorageResumeStateProvider('my-doc') +const provider = new ElectricProvider({ + doc: ydoc, + documentUpdates: { + shape: { url: '/api/yjs/doc-shape', parser: parseToDecoder }, + sendUrl: '/api/yjs/update', + getUpdateFromRow: (row) => row.update, + }, + resumeState: resumeProvider.load(), +}) +resumeProvider.subscribeToResumeState(provider) +``` + +Without `resumeState`, the provider fetches the ENTIRE document shape on every reconnect. With `stableStateVector`, only a diff is sent. + +Source: `packages/y-electric/src/types.ts:102-112` + +### HIGH Missing BYTEA parser for shape streams + +Wrong: + +```ts +documentUpdates: { + shape: { url: '/api/yjs/doc-shape' }, + sendUrl: '/api/yjs/update', + getUpdateFromRow: (row) => row.update, +} +``` + +Correct: + +```ts +import { parseToDecoder } from '@electric-sql/y-electric' + +documentUpdates: { + shape: { + url: '/api/yjs/doc-shape', + parser: parseToDecoder, + }, + sendUrl: '/api/yjs/update', + getUpdateFromRow: (row) => row.update, +} +``` + +Yjs updates are stored as BYTEA in Postgres. Without `parseToDecoder`, the shape returns raw hex strings instead of lib0 Decoders, and `Y.applyUpdate` fails silently or corrupts the document. + +Source: `packages/y-electric/src/utils.ts` + +### MEDIUM Not setting debounceMs for collaborative editing + +Wrong: + +```ts +const provider = new ElectricProvider({ + doc: ydoc, + documentUpdates: { shape: shapeOpts, sendUrl: '/api/yjs/update' }, + // Default debounceMs = 0: every keystroke sends a PUT +}) +``` + +Correct: + +```ts +const provider = new ElectricProvider({ + doc: ydoc, + documentUpdates: { shape: shapeOpts, sendUrl: '/api/yjs/update' }, + debounceMs: 100, +}) +``` + +Default `debounceMs` is 0, sending a PUT request for every keystroke. Set to 100+ to batch rapid edits and reduce server load. + +Source: `packages/y-electric/src/y-electric.ts` + +See also: electric-shapes/SKILL.md — Shape configuration and parser setup. + +## Version + +Targets @electric-sql/y-electric v0.1.x. diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 008dd3611f..7e78c6ba91 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1011,7 +1011,7 @@ importers: examples/tanstack-db-expo-starter: dependencies: '@electric-sql/client': - specifier: 1.5.11 + specifier: 1.5.12 version: link:../../packages/typescript-client '@expo/metro-runtime': specifier: ~5.0.4 @@ -1714,6 +1714,9 @@ importers: specifier: ^2.0.1 version: 2.0.1(patch_hash=46f4e76dd960e002a542732bb4323817a24fce1673cb71e2f458fe09776fa188) devDependencies: + '@tanstack/intent': + specifier: ^0.0.9 + version: 0.0.9 '@types/pg': specifier: ^8.11.6 version: 8.11.10 @@ -1794,6 +1797,9 @@ importers: specifier: ^13.6.6 version: 13.6.26 devDependencies: + '@tanstack/intent': + specifier: ^0.0.9 + version: 0.0.9 '@types/node': specifier: ^22.0.0 version: 22.19.1 @@ -6595,6 +6601,10 @@ packages: resolution: {integrity: sha512-l6wcxwDBeh/7Dhles23U1O8lp9kNJmAb2yNjekR6olZwCRNAVA8TCXlVCrueELyFlYZqvQkh0ofxnzG62A1Kkg==} engines: {node: '>=12'} + '@tanstack/intent@0.0.9': + resolution: {integrity: sha512-Ionw2sYSXlbg9AsA0iXDEo+kO75uzgKeX7B9YV6I2O9+CAYaqxblyM0ASIMGirqfv8NkHvBK4wb5zXjrqDUT+A==} + hasBin: true + '@tanstack/pacer-lite@0.1.0': resolution: {integrity: sha512-a5A0PI0H4npUy7u3VOjOhdynXnRBna+mDvpt8ghDCVzS3Tgn8DlGzHlRqS2rKJP8ZcLuVO2qxlIIblhcoaiv8Q==} engines: {node: '>=18'} @@ -21079,6 +21089,10 @@ snapshots: '@tanstack/history@1.139.0': {} + '@tanstack/intent@0.0.9': + dependencies: + yaml: 2.8.1 + '@tanstack/pacer-lite@0.1.0': {} '@tanstack/query-core@5.59.20': {} @@ -28755,7 +28769,7 @@ snapshots: postcss-load-config@4.0.2(postcss@8.4.47): dependencies: lilconfig: 3.1.2 - yaml: 2.6.0 + yaml: 2.8.1 optionalDependencies: postcss: 8.4.47 diff --git a/website/blog/posts/2026-03-06-agent-skills-now-shipping.md b/website/blog/posts/2026-03-06-agent-skills-now-shipping.md new file mode 100644 index 0000000000..209720a9d9 --- /dev/null +++ b/website/blog/posts/2026-03-06-agent-skills-now-shipping.md @@ -0,0 +1,50 @@ +--- +title: "Agent Skills Now Shipping in Our npm Packages" +description: >- + Electric, TanStack DB, and Durable Streams now ship agent skills — versioned knowledge that travels with your packages so coding agents actually understand how to use them. +excerpt: >- + We've been collaborating with the TanStack maintainers to ship agent skills directly in our npm packages. Update your dependencies and run one command to give your coding agent deep knowledge of Electric, TanStack DB, and Durable Streams. +authors: [kyle] +image: /img/blog/agent-skills-now-shipping/hero.png +imageWidth: 2752 +imageHeight: 1536 +tags: [agentic, AI, development] +outline: [2, 3] +post: true +--- + +We've been collaborating with the [TanStack](https://tanstack.com) maintainers on a new [Intent system](https://tanstack.com/blog/from-docs-to-agents) for shipping agent skills with npm packages. Our packages now ship with skills built in. + +Update to the latest versions of `@tanstack/db`, `@electric-sql/client`, and `@durable-streams/client`, then ask your coding agent to run: + +```bash +npx @tanstack/intent install +``` + +This loads a skill *inside your agent* that installs the relevant skills for your project's dependencies. When it finishes, your agent has structured, versioned knowledge of how to use these libraries correctly. + +## Why This Matters + +If you've used a coding agent with a fast-moving library, you've felt the pain. The agent confidently writes code against APIs renamed two versions ago. For newer libraries like [Durable Streams](/products/durable-streams), agents know nothing — the library falls outside their training data. You paste in docs; the agent half-reads them. You point it at a rules file on GitHub; it's already stale. + +As we wrote on the TanStack blog, the core problem is version fragmentation: "once a breaking change ships, models don't 'catch up.' They develop a permanent split-brain — training data contains both versions forever with no way to disambiguate." + +The workarounds — hunting for community-maintained rules files, copy-pasting knowledge with no versioning or staleness signal — fail to scale. Library maintainers already hold the knowledge agents need: docs, migration guides, type signatures. But none of it reached agents through a channel the maintainer controls. + +## Skills That Travel With Your Packages + +Shipping skills *inside* the package fixes this. When you `npm update`, the skills update too — a single source of truth, maintained by us, versioned and distributed through the same channel as the code. + +This creates a compounding loop: when users report skill issues, the fix ships to everyone on the next release. Each `npm update` distributes the improvement across the entire user base. + +## Try It + +Clone the [Playbook repo](https://github.com/KyleAMathews/kpb) — it's set up with Electric, TanStack DB, and Durable Streams as dependencies, so the intent install pulls in all the relevant skills. Fire up your coding agent and ask it to build something. + +We'd love to hear how the skills work for you. They're new and may have rough edges. We've made a feedback skill that walks you through telling the maintainers what can be improved — run it with: + +```bash +npx @tanstack/intent meta collection-feedback +``` + +Every report makes the skills better for everyone. diff --git a/website/public/img/blog/agent-skills-now-shipping/hero.png b/website/public/img/blog/agent-skills-now-shipping/hero.png new file mode 100644 index 0000000000..dd0bf5bffe Binary files /dev/null and b/website/public/img/blog/agent-skills-now-shipping/hero.png differ