diff --git a/.claude/settings.json b/.claude/settings.json new file mode 100644 index 0000000..5cfa585 --- /dev/null +++ b/.claude/settings.json @@ -0,0 +1,84 @@ +{ + "hooks": { + "PostToolUse": [ + { + "matcher": "Task", + "hooks": [ + { + "type": "command", + "command": "entire hooks claude-code post-task" + } + ] + }, + { + "matcher": "TodoWrite", + "hooks": [ + { + "type": "command", + "command": "entire hooks claude-code post-todo" + } + ] + } + ], + "PreToolUse": [ + { + "matcher": "Task", + "hooks": [ + { + "type": "command", + "command": "entire hooks claude-code pre-task" + } + ] + } + ], + "SessionEnd": [ + { + "matcher": "", + "hooks": [ + { + "type": "command", + "command": "entire hooks claude-code session-end" + } + ] + } + ], + "SessionStart": [ + { + "matcher": "", + "hooks": [ + { + "type": "command", + "command": "entire hooks claude-code session-start" + } + ] + } + ], + "Stop": [ + { + "matcher": "", + "hooks": [ + { + "type": "command", + "command": "entire hooks claude-code stop" + } + ] + } + ], + "UserPromptSubmit": [ + { + "matcher": "", + "hooks": [ + { + "type": "command", + "command": "entire hooks claude-code user-prompt-submit" + } + ] + } + ] + }, + "permissions": { + "deny": [ + "Read(./.entire/metadata/**)" + ] + } +} diff --git a/.claude/skills/release-cli/SKILL.md b/.claude/skills/release-cli/SKILL.md new file mode 100644 index 0000000..f9c05db --- /dev/null +++ b/.claude/skills/release-cli/SKILL.md @@ -0,0 +1,97 @@ +--- +name: release-cli +description: Run the full CLI release workflow — version bump, changelog, build, commit, push, and GitHub release. npm publish happens via trusted publishing in CI. +user-invocable: true +allowed-tools: Bash, Read, Edit, Write, Glob, Grep +argument-hint: +--- + +# Release CLI Workflow + +Execute the full release pipeline for `@1a35e1/sonar-cli`. The user invokes this skill with a semver bump type: `patch`, `minor`, or `major`. + +npm publishing uses **trusted publishing** via GitHub Actions OIDC — no OTP required. The `publish.yml` workflow triggers automatically when a GitHub release is created. + +## Argument Validation + +The argument MUST be one of: `patch`, `minor`, or `major`. If missing or invalid, print usage and stop: + +``` +Usage: /release-cli +``` + +## Step 1: Pre-flight Checks + +Run these checks sequentially. Abort on the first failure with a clear message. + +1. **Clean working tree**: Run `git status --porcelain`. If output is non-empty, abort: "Working tree is not clean. Commit or stash changes first." +2. **On main branch**: Run `git branch --show-current`. If not `main`, abort: "Must be on the main branch to release." +3. **Build passes**: Run `pnpm build` (this runs `tsc`). If it fails, abort: "Build failed. Fix type errors before releasing." + +## Step 2: Version Bump + +1. Run `pnpm version --no-git-tag-version` to bump the version in `package.json`. +2. Read `package.json` and extract the new `"version"` field. Store it as `NEW_VERSION` for use in later steps. +3. Print: "Version bumped to NEW_VERSION" + +## Step 3: Update CHANGELOG + +1. Collect commits since the last release. Run: + ``` + git log --oneline $(git log --all --grep='chore: release' --format='%H' -1)..HEAD + ``` + If no release commit is found, collect all commits with `git log --oneline`. + +2. Generate a new changelog section following the existing Keep a Changelog format. Use today's date (YYYY-MM-DD). Categorize commits under `### Added`, `### Fixed`, `### Changed`, etc. based on conventional commit prefixes (`feat:` -> Added, `fix:` -> Fixed, `chore:`/`refactor:` -> Changed). Omit empty categories. Each entry should be a bullet starting with the commit message in bold, with a short description if the message is clear enough. + +3. Insert the new section into `CHANGELOG.md` immediately after the header block (the `# Changelog` line and the two description lines). The new section goes BEFORE any existing `## [x.y.z]` sections. + + Example format: + ```markdown + ## [0.3.0] - 2026-03-04 + + ### Added + + - **feat: add foo command** — Description of the change. + + ### Fixed + + - **fix: bar edge case** — Description of the fix. + ``` + +4. Print the generated changelog section for the user to review. + +## Step 4: Build + Drift Checks + +1. Run `pnpm build` to compile TypeScript to `dist/`. Abort if this fails. +2. Run `pnpm drift:surface:update` to update the command surface snapshot. +3. Run `pnpm drift:check` to verify all drift checks pass. Abort if any fail. + +## Step 5: Git Commit + +1. Stage exactly these files: `package.json`, `pnpm-lock.yaml`, `CHANGELOG.md`, `.drift/command-surface.snapshot.json` +2. Commit with message: `chore: release NEW_VERSION` +3. Print: "Committed: chore: release NEW_VERSION" + +## Step 6: Push + GitHub Release (Confirmation Gate) + +**ASK THE USER FOR CONFIRMATION** before proceeding: + +> Ready to push to remote and create GitHub release v{NEW_VERSION}. This will trigger npm publish via trusted publishing in CI. Proceed? + +If confirmed: +1. Run `git push` +2. Create a GitHub release using the changelog section from Step 3: + ``` + gh release create v --title "v" --notes "" + ``` +3. Print the GitHub release URL from the `gh` output. +4. Print: "npm publish will run automatically via GitHub Actions. Monitor at: https://github.com/1a35e1/sonar-cli/actions" + +If denied, remind the user they can push and create the release manually later. + +## Error Handling + +- If any command fails, print the full error output and abort immediately. +- Do NOT continue past a failed step — each step depends on the previous one. +- If push or release creation fails, the commit is already made locally. Inform the user they can retry with `git push` and `gh release create` manually. diff --git a/.drift/command-surface.snapshot.json b/.drift/command-surface.snapshot.json new file mode 100644 index 0000000..8d03ee0 --- /dev/null +++ b/.drift/command-surface.snapshot.json @@ -0,0 +1,36 @@ +{ + "source": "src/commands/**/*.tsx", + "commandCount": 30, + "commands": [ + "sonar", + "sonar account", + "sonar account add", + "sonar account remove", + "sonar account rename", + "sonar account switch", + "sonar archive", + "sonar config", + "sonar config env", + "sonar config nuke", + "sonar config set", + "sonar config setup", + "sonar config skill", + "sonar data backup", + "sonar data path", + "sonar data pull", + "sonar data restore", + "sonar data sql", + "sonar data verify", + "sonar feed", + "sonar later", + "sonar refresh", + "sonar skip", + "sonar status", + "sonar topics", + "sonar topics add", + "sonar topics delete", + "sonar topics edit", + "sonar topics suggest", + "sonar topics view" + ] +} diff --git a/.entire/.gitignore b/.entire/.gitignore new file mode 100644 index 0000000..2cffdef --- /dev/null +++ b/.entire/.gitignore @@ -0,0 +1,4 @@ +tmp/ +settings.local.json +metadata/ +logs/ diff --git a/.entire/settings.json b/.entire/settings.json new file mode 100644 index 0000000..d2b34c8 --- /dev/null +++ b/.entire/settings.json @@ -0,0 +1,5 @@ +{ + "strategy": "manual-commit", + "enabled": true, + "telemetry": true +} diff --git a/.github/workflows/drift-checks.yml b/.github/workflows/drift-checks.yml new file mode 100644 index 0000000..b926e9e --- /dev/null +++ b/.github/workflows/drift-checks.yml @@ -0,0 +1,40 @@ +name: Drift Checks + +on: + pull_request: + push: + branches: + - main + - feat/** + - phase-** + +jobs: + drift: + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup pnpm + uses: pnpm/action-setup@v4 + with: + version: 10 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: 20 + cache: pnpm + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Build + run: pnpm build + + - name: Run drift checks + env: + CI: "true" + SONAR_API_URL: https://api.sonar.8640p.info/graphql + run: pnpm drift:check diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml new file mode 100644 index 0000000..dcf735f --- /dev/null +++ b/.github/workflows/publish.yml @@ -0,0 +1,47 @@ +name: Publish to npm + +on: + release: + types: [published] + +jobs: + publish: + name: Publish + runs-on: ubuntu-latest + + permissions: + contents: read + id-token: write + + steps: + - uses: actions/checkout@v4 + with: + ref: ${{ github.event.release.target_commitish }} + + - uses: pnpm/action-setup@v4 + with: + version: 10.19.0 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: 20 + cache: pnpm + registry-url: https://registry.npmjs.org + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Build + run: pnpm build + + - name: Drift checks + env: + CI: "true" + SONAR_API_URL: https://api.sonar.8640p.info/graphql + run: pnpm drift:check + + - name: Publish + run: pnpm publish --access public --no-git-checks --provenance + env: + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..3d39748 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,92 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [0.4.0] - 2026-04-08 + +### Added + +- **`sonar account` — multi-account management** — `add`, `switch`, `remove`, `rename` commands with `~/.sonar/accounts.json`. Random fun names (bouncy-rabbit) for unnamed accounts. +- **`sonar refresh` composable flags** — `--bookmarks`, `--likes`, `--graph`, `--tweets`, `--suggestions` for selective pipeline execution. Any combo works. +- **`sonar sync likes`** — sync likes from X (mirrors `sync bookmarks`). +- **`sonar data` namespace** — `pull`, `backup`, `restore`, `verify`, `path`, `sql` moved from `config data`. +- **Help banner** — spaced `S O N A R` header with version on `--help`. +- **Account rename** — `sonar account rename ` with hint on random names. + +### Changed + +- **WASM SQLite** — replaced `better-sqlite3` (native) with `node-sqlite3-wasm`. No more Node version mismatch errors, works on any platform. +- **`SONAR_API_KEY` removed** — auth is now exclusively via `sonar account add `. Existing `config.json` tokens auto-migrate. +- **`config data` → `data`** — flatter namespace. `download`/`sync` merged into `sonar data pull`. +- **`interests` → `topics`** — consistent naming in local SQLite schema. +- **Pulse spinner** — switched to `unicode-animations` pulse spinner. + +### Fixed + +- **Pipeline auth errors** — detect expired X OAuth, show re-auth guidance, surface backend error messages. +- **Pipeline error visibility** — backend now exposes error field in status endpoint, stale errors cleared on new runs. + +## [0.3.1] - 2026-04-08 + +### Added + +- **feat: `sonar feed` command** — Read-only feed view with `--hours`, `--days`, `--limit`, `--kind`, `--render`, `--width`, `--json` flags. No triage, pure pipe-friendly output. +- **feat: `sonar feed --follow`** — Continuous polling mode with NDJSON streaming (`--follow --json`), configurable interval (`--interval`), and xid-based deduplication. +- **feat: `sonar topics suggest`** — AI-powered topic suggestions using OpenAI or Anthropic. Interactive accept/reject UI with `--count`, `--vendor`, and `--json` flags. +- **feat: GraphQL client retry with exponential backoff** — Automatic retries on network errors and 5xx with jittered backoff. Configurable via `SONAR_MAX_RETRIES` env var. +- **feat: trusted publishing via GitHub Actions** — `publish.yml` workflow triggers on GitHub release, publishes with OIDC provenance. No OTP required. +- **feat: `sonar topics view`, `topics delete`** — Full topic CRUD from the CLI. +- **feat: `sonar sync bookmarks`** — Sync bookmarks from X. +- **feat: `status --watch` improvements** — Press `r` to refresh, `q` to quit, pipeline step progress, deferred job counts. +- **feat: drift prevention checks** — CI gate for schema, surface, docs, and data compatibility drift. + +### Fixed + +- **fix: config nuke deletes real local database** — Previously left orphaned DB file. +- **fix: align CLI queries with current topics schema** — Renamed `interests` → `topics` throughout. +- **fix: suppress spinner in `--json` mode** — Clean JSON output safe to pipe. +- **fix: unknown command shows error** — Instead of falling through silently. + +### Changed + +- **chore: renamed `release` skill to `release-cli`** — Clearer naming, switched to trusted publishing pipeline. +- **refactor: `interests` → `topics` rename** — Consistent naming across CLI commands, queries, and user-facing strings. + +## [0.2.1] - 2026-03-04 + +### Added + +- **feat: add `sonar quickstart` command** — New quick-start command for first-time setup (#7). + +### Fixed + +- **fix: correct CLI command from 'sonar ingest monitor' to 'sonar monitor'** — Fixed incorrect command reference in documentation/output (#9). + +### Changed + +- **chore: added release skill** — Added automated release workflow skill. + +## [0.2.0] - 2026-02-23 + +### Added + +- **feat: add sonar quickstart command** — New `sonar quickstart` command for first-time setup. Checks authentication, proposes 3 starter interests tailored to the typical Sonar user, creates them on confirmation, triggers tweet indexing, and shows an initial inbox preview — all in one step. + +- **feat(config data): add sqlite backup/restore/verify commands** — New `sonar config data backup`, `config data restore`, and `config data verify` commands for managing the local SQLite database. Useful for safeguarding your data before migrations or upgrades. + +### Fixed + +- **fix: suppress spinner output in --json mode for interests create/update** — Running `interests create` or `interests update` with `--json` no longer leaks spinner/progress text into the JSON output, making it safe to pipe to `jq` and other tools. + +- **fix: ingest hang diagnostics — timeout detection + actionable error output** — `ingest` commands that stall due to upstream API delays now detect the hang, surface a clear timeout error with guidance, and exit cleanly instead of hanging indefinitely. + +- **fix: --from-prompt timeout handling with actionable error output** — When using `--from-prompt` and the AI step times out, sonar-cli now reports the timeout with an actionable message rather than crashing silently. + +- **fix: actionable diagnostics for empty feed/inbox results** — When `feed` or `inbox` returns no results, sonar-cli now explains why (e.g. no interests configured, no items ingested) and suggests next steps instead of printing an unhelpful empty list. + +## [0.1.3] - prior + +See git history for earlier changes. diff --git a/README.md b/README.md index e4afe07..300f8ae 100644 --- a/README.md +++ b/README.md @@ -1,422 +1,331 @@ -# 🔊 Sonar (Preview) +# 🔊 Sonar (Alpha) -Experimental X CLI for OpenClaw 🦞 power users. +Agent optimised [X](https://x.com) CLI for founders who want to stay ahead of the curve. -Sonar matches interests from your X graph using various AI pipelines. We built this to automate our social intelligence. +We got tired of missing important content in our feed and built Sonar to fix it. -This cli has been designed to handover indexing and consumption to agents. - -* Pipe it into scripts, -* automate your morning briefing, -* Or just discover tweets you probably missed out on the web interface. - ---- +Sonar matches your interests from your X network, filtering only relevant content from your graph using a variety of AI pipelines. We built this to automate our social intelligence at [@LighthouseGov](https://x.com/LighthouseGov). ## Get started -* Register with `X` to get an API key from `https://sonar.8640p.info/` - * Learn more about which [scopes](#scopes) we request and why. +* Login with your `X` account to obtain a [free API key](https://sonar.8640p.info/). Install the CLI ```sh -pnpm add -g @1a35e1/sonar-cli +pnpm add -g @1a35e1/sonar-cli@latest ``` Register your API key. ```sh -# Make "SONAR_API_KEY" avaliable in your env -export SONAR_API_KEY=snr_xxxxx - -# or, manually register -sonar config setup key= +sonar account add snr_xxxxx ``` -View your account to ensure evrything works. +View your account status: ```sh -sonar account +sonar status ``` -Ingest your first `tweets` and check to `monitor` progress. +Run your first refresh to index tweets and generate suggestions: -> The first time this you run this command it will take some time. +> The first time you run this it will take some time. ```sh -sonar ingest tweets - -sonar ingest monitor -sonar ingest monitor --watch +sonar refresh +sonar status --watch ``` --- ## Scopes -* We currently request `read:*` and `offline:processing` scopes based on <. If there is an appite +* We currently request `read:*` and `offline:processing` scopes +* This allows us to read your feed, bookmarks, followers/following, and other account data to power our signal filtering and topic suggestions. -* So we can stay connected to your account until you revoke access. -* Posts you’ve liked and likes you can view. -* All the posts you can view, including posts from protected accounts. -* Accounts you’ve muted. -* Accounts you’ve blocked. -* People who follow you and people who you follow. -* All your Bookmarks. -* Lists, list members, and list followers of lists you’ve created or are a member of, including private lists. -* Any account you can view, including protected accounts. -## Why Sonar exists +## Use cases -Setting up your own social data pipeline is genuinely awful. You're looking at OAuth flows, rate limit math, pagination handling, webhook plumbing, deduplication logic, and a SQLite schema you'll regret in three weeks — before you've seen a single useful result. Most developers who try it abandon it halfway through. - -**Sonar skips all of that. Get actionalable data for OpenClaw in 15 minutes.** +### Morning briefing in one command -We believe your data is yours. So you want to go deeper than our platform allows — build your own models, run custom queries, pipe it into your own tooling — you can download everything we have indexed on your behalf into a local SQLite database and do whatever you want with it: +Pull everything relevant that happened while you slept: ```bash -pnpm run cli -- data download # full snapshot → ~/.sonar/data.db -pnpm run cli -- data sync # incremental updates -pnpm run cli -- data sql # drop into a sqlite3 shell +sonar feed --hours 8 ``` -No lock-in. If you outgrow us, you leave with your data intact. - -## Design philosophy +### Stream your feed in real time -There's a quiet shift happening in how developer tools are built. +Watch for new items as they appear: -In the early web2 era, API-first was a revelation. Stripe, Twilio, Sendgrid — companies that exposed clean REST contracts unlocked entire ecosystems of products built on top of them. The insight was simple: if your service has strong, reliable APIs, anyone can build anything. The interface didn't matter as much as the contract underneath. -We're at a similar inflection point now, but the interface layer has changed dramatically. - -The goal for most workflows today is fire and forget — you define what you want, set it in motion, and let agents handle the execution. That only works if the underlying APIs are strong enough to support complex, long-running ETL pipelines without hand-holding. Sonar is built with that assumption: the API is the product, the CLI is just one interface into it. -Which raises an interesting question about CLIs themselves. Traditionally a CLI was developer-first by definition — you were writing for someone comfortable with flags, pipes, and man pages. But if the primary consumer of your CLI is increasingly an agent (OpenClaw, a cron job, an LLM with tool access), the design principles shift: - -Output should be machine-readable by default. Every command has a --json flag. Agents don't parse card renders. -Commands should be composable. Small, single-purpose commands that pipe cleanly into each other are more useful to an agent than monolithic workflows. - -Side effects should be explicit. An agent calling index --force should know exactly what it's triggering. No surprises. -Errors should be structured. A human reads an error message. An agent needs to know whether to retry, skip, or escalate. - -The CLI still needs to work well for humans — interactive mode, card renders, readable output — but those are progressive enhancements on top of a foundation built for automation. Design for the agent, polish for the human. -This is what API-first looks like in the agentic era: strong contracts at the service layer, composable interfaces at the CLI layer, and a clear separation between the two. - ---- - -## What you can do with it +```bash +sonar feed --follow # visual cards, polls every 30s +sonar feed --follow --json | jq .score # NDJSON stream for agents +``` -### Morning briefing in one command +### Discover new topics with AI -Pull everything relevant that happened while you slept: +Let Sonar suggest topics based on your interests and feed: ```bash -pnpm run cli -- feed --hours 8 --render card -pnpm run cli -- inbox --status inbox +sonar topics suggest # interactive accept/reject +sonar topics suggest --count 3 # just 3 suggestions ``` -### Track a topic you care about — right now +### Track a topic you care about -Create a new interest from a plain English prompt and get content immediately: +Add a topic, then refresh: ```bash -pnpm run cli -- interests create \ - --from-prompt "I want to follow AI evals and agent infrastructure" - -pnpm run cli -- index suggestions --days 1 -pnpm run cli -- feed --hours 24 +sonar topics add "AI agents" +sonar refresh +sonar feed --hours 24 ``` -Sonar generates keywords and topics from your prompt, kicks off indexing, and your feed updates with relevant posts. +Sonar rebuilds your social graph, indexes recent tweets, and generates suggestions matched against your topics and interest profile. ### Build a scriptable news digest Combine `--json` output with `jq` to pipe Sonar content wherever you want: ```bash -# Get today's top feed items as JSON -pnpm run cli -- feed --hours 24 --json | jq '.[] | {author, text, url}' - -# Summarize your inbox with an LLM -pnpm run cli -- inbox --json | jq '.[].text' | your-summarizer-script -``` - -### Keep your local data fresh and queryable +# Get today's feed as JSON +sonar feed --hours 24 --json | jq '.[] | {author: .tweet.user.username, text: .tweet.text}' -Download a full SQLite snapshot of your Sonar data and query it directly: +# Summarize with an LLM +sonar feed --json | jq '.[].tweet.text' | your-summarizer-script -```bash -pnpm run cli -- data download -pnpm run cli -- data sql -# Now you have a full sqlite3 shell — write any query you want +# Stream high-score items to a file +sonar feed --follow --json | jq --unbuffered 'select(.score > 0.7)' >> highlights.jsonl ``` -Run incremental syncs on a cron to keep it current: +### Monitor the pipeline + +Watch the queue in real time while refresh runs: ```bash -# crontab: sync every 30 minutes -*/30 * * * * cd /your/project && pnpm run cli -- data sync +sonar refresh +sonar status --watch ``` ### Interactive triage -Work through your inbox without leaving the terminal: +Work through suggestions without leaving the terminal: ```bash -pnpm run cli -- inbox --interactive -pnpm run cli -- feed --interactive +sonar # interactive triage is on by default +sonar --no-interactive # disable for scripting ``` -Mark suggestions as read, skip, archive, or save for later — keyboard-driven. +Mark suggestions as skip, later, or archive — keyboard-driven. -### Monitor indexing jobs +### Build your own filters and dashboards (WIP) -Watch the queue in real time while you trigger a full re-index: +Download your data and build your own tools on top of it. ```bash -pnpm run cli -- index # trigger all jobs -pnpm run cli -- index status --watch # watch until complete +sonar sync # sync data to ~/.sonar/data.db ``` ---- - -## What Sonar doesn't do +No lock-in. If you outgrow us, you leave with your data intact. -Sonar is **not a global search engine**. It won't crawl the entire internet or index trending posts from people you've never heard of. +--- -Instead, it searches within your social graph — your followers and the people you follow — up to **2 degrees of separation**. That's it. This is an intentional constraint, not a limitation we're working around. +## How Sonar finds relevant content -The reason is practical: API rate limits make broad crawling impossible at any useful refresh frequency. But the reason it works is more interesting — **the people in your network are already a curated signal layer**. The accounts you follow, and the accounts they follow, are a surprisingly high-quality filter for what's relevant to your domain. Sonar's job is to surface what's moving through that graph before it reaches mainstream feeds. +Sonar surfaces relevant content from your immediate network — the people you follow and who follow you. Your network is already a curated signal layer. Sonar's job is to surface what's moving through that graph before it reaches mainstream feeds. What this means in practice: * Results reflect your network's attention, not global virality -* You won't see noise from accounts you have no connection to * The feed gets more useful the more intentional you are about who you follow -* Adding interests with specific keywords and topics sharpens what Sonar surfaces *within* that graph - -If you want global trend monitoring, tools like Brandwatch or Twitter's native search are better fits. Sonar is for developers who want a focused, low-noise signal from a network they've already curated. +* Bookmarking and liking content improves your recommendations over time +* Topics sharpen what Sonar surfaces within your graph ---- - -## Pair with OpenClaw +## Setup -[OpenClaw](https://github.com/openclaw/openclaw) is a local-first autonomous AI agent that runs on your machine and talks to you through WhatsApp, Telegram, Discord, Slack, or iMessage. It can execute shell commands, run on a schedule, and be extended with custom skills. +### Prerequisites -Sonar + OpenClaw is a natural stack: **Sonar handles the signal filtering and curation, OpenClaw handles delivery and action.** Together they turn your social feed into an ambient intelligence layer you don't have to babysit. +* Node.js 20+ +* `pnpm` +* A Sonar API key from [sonar.8640p.info](https://sonar.8640p.info/) -### Morning briefing delivered to your phone +### Install and authenticate -Set up a cron job in OpenClaw to run your Sonar digest and pipe it back to you on Telegram every morning: +```bash +pnpm add -g @1a35e1/sonar-cli@latest +sonar account add ``` -# In OpenClaw: schedule a daily 8am briefing -"Every morning at 8am, run `sonar feed --hours 8 --json` and summarize the top 5 posts for me" -``` - -OpenClaw will execute the CLI, pass the JSON output to your LLM, and send a clean summary straight to your phone — no dashboard to open. - -### Ask your feed questions in natural language -Because `--json` makes Sonar output composable, OpenClaw can reason over it conversationally: +Verify it works: -``` -# Example prompts you can send OpenClaw via WhatsApp: -"What's the most discussed topic in my Sonar feed today?" -"Did anyone in my feed mention Uniswap V4 in the last 48 hours?" -"Summarize my unread Sonar inbox" +```bash +sonar status +sonar topics ``` -Wire it up once as an OpenClaw skill and your feed becomes queryable from any messaging app. +--- -### Triage your inbox hands-free +## Command Reference -Combine OpenClaw's scheduling with Sonar's inbox API to automatically mark low-signal suggestions: +### Default — triage suggestions ```bash -# Shell script you can hand to OpenClaw as a scheduled skill -sonar inbox --json | \ - jq '[.[] | select(.score < 0.4) | .id]' | \ - xargs -I{} sonar inbox skip {} +sonar # interactive triage (default) +sonar --hours 24 # widen time window +sonar --days 3 # last 3 days +sonar --kind bookmarks # default | bookmarks | followers | following +sonar --render table --limit 50 # table layout +sonar --json # raw JSON output +sonar --no-interactive # disable interactive mode ``` -Run this nightly and your inbox stays clean without manual triage. - -### Get alerted when a topic spikes - -Use OpenClaw's Heartbeat (scheduled wake-up) to watch for signal surges and notify you: +### Feed — read-only view +```bash +sonar feed # read-only feed (last 12h, limit 20) +sonar feed --hours 48 --limit 50 # widen window +sonar feed --kind bookmarks # bookmarks | followers | following +sonar feed --render table # table layout +sonar feed --json | jq . # pipe to jq ``` -# OpenClaw cron: check every 2 hours -"Run `sonar feed --hours 2 --json` — if there are more than 10 posts about -'token launchpad' or 'LVR', send me a Telegram alert with the highlights" -``` - -Effectively a custom Google Alert, but filtered through your actual interest graph. -### Build a Sonar skill for OpenClaw +#### Streaming with --follow -The cleanest integration is wrapping Sonar as a reusable OpenClaw skill. Drop a skill file in your OpenClaw workspace: +Poll for new items continuously and stream them to your terminal or another process: -```typescript -// skills/sonar.ts -export async function getFeed(hours = 12) { - const { stdout } = await exec(`sonar feed --hours ${hours} --json`); - return JSON.parse(stdout); -} - -export async function getInbox() { - const { stdout } = await exec(`sonar inbox --json`); - return JSON.parse(stdout); -} +```bash +sonar feed --follow # poll every 30s, visual cards +sonar feed --follow --interval 10 # poll every 10s +sonar feed --follow --json # NDJSON stream (one JSON per line) +sonar feed --follow --json | jq --unbuffered '.score' ``` -Once registered, OpenClaw can call these tools autonomously whenever it decides they're relevant — no manual prompting required. +Press `q` to quit follow mode. ---- - -## Setup - -### Prerequisites - -* Node.js 20+ -* `pnpm` -* A Sonar API key from [sonar.sh/account](https://sonar.sh/account?tab=api-keys) -* Optional: `sqlite3` CLI (only needed for `data sql`) - -### Install and authenticate +### Topics ```bash -pnpm install - -export SONAR_API_KEY="your_api_key_here" -pnpm run cli -- init +sonar topics # list all topics +sonar topics --json # JSON output +sonar topics add "AI agents" # add a topic +sonar topics view # view a topic +sonar topics edit --name "New Name" +sonar topics delete # delete a topic ``` -`init` writes your config to `~/.sonar/config.json`. If `SONAR_API_KEY` is set in your environment, it always takes precedence. +#### AI-powered topic suggestions -Verify it works: +Let Sonar suggest new topics based on your existing interests and recent feed: ```bash -pnpm run cli -- account -pnpm run cli -- interests +sonar topics suggest # interactive — y/n/q per suggestion +sonar topics suggest --count 3 # limit to 3 suggestions +sonar topics suggest --vendor anthropic # use Anthropic instead of OpenAI +sonar topics suggest --json # raw suggestions as JSON ``` ---- - -## Command Reference +Requires `OPENAI_API_KEY` or `ANTHROPIC_API_KEY` depending on vendor. -### Account & Config +### Account ```bash -pnpm run cli -- account # plan, usage, suggestion counters -pnpm run cli -- config # show current config -pnpm run cli -- config set vendor anthropic # or openai -pnpm run cli -- config set feed-render card # or table -pnpm run cli -- config set feed-width 100 +sonar account # list accounts, * marks active +sonar account add # add account (random name) +sonar account add --alias work # add with custom name +sonar account switch # switch active account +sonar account rename # rename an account +sonar account remove # remove (--force if active) ``` -### Interests +### Refresh ```bash -pnpm run cli -- interests # list all -pnpm run cli -- interests --json # JSON output - -# Create manually -pnpm run cli -- interests create \ - --name "Rust Systems" \ - --description "Rust, compilers, and systems tooling" \ - --keywords "rust,cargo,wasm" \ - --topics "systems programming,performance" - -# Create from a natural language prompt (requires OPENAI_API_KEY or ANTHROPIC_API_KEY) -pnpm run cli -- interests create \ - --from-prompt "I want to follow AI evals and agent infra" - -# Update -pnpm run cli -- interests update --id --name "New Name" -pnpm run cli -- interests update --id --add-keywords "mcp,langgraph" -pnpm run cli -- interests update --id --remove-topics "old-topic" +sonar refresh # full pipeline (all steps) +sonar refresh --bookmarks # just sync bookmarks from X +sonar refresh --likes # just sync likes from X +sonar refresh --graph # just rebuild social graph +sonar refresh --tweets # just index tweets +sonar refresh --suggestions # just regenerate suggestions +sonar refresh --likes --bookmarks # any combo of flags ``` -### Feed +### Status ```bash -pnpm run cli -- feed # last 12h, limit 20, card render -pnpm run cli -- feed --hours 24 -pnpm run cli -- feed --days 3 -pnpm run cli -- feed --kind bookmarks # default | bookmarks | followers | following -pnpm run cli -- feed --render table --limit 50 -pnpm run cli -- feed --interactive -pnpm run cli -- feed --json +sonar status # account status, queue activity +sonar status --watch # poll every 2s ``` -### Inbox +### Triage ```bash -pnpm run cli -- inbox # list inbox suggestions -pnpm run cli -- inbox --all -pnpm run cli -- inbox --status inbox --limit 50 -pnpm run cli -- inbox --interactive -pnpm run cli -- inbox --json - -pnpm run cli -- inbox read --id -pnpm run cli -- inbox skip --id -pnpm run cli -- inbox later --id -pnpm run cli -- inbox archive --id +sonar skip --id # skip a suggestion +sonar later --id # save for later +sonar archive --id # archive a suggestion ``` -### Indexing +### Data ```bash -pnpm run cli -- reindex # run all jobs -pnpm run cli -- reindex tweets -pnpm run cli -- reindex graph -pnpm run cli -- reindex graph --force -pnpm run cli -- reindex suggestions --days 1 -pnpm run cli -- reindex bookmarks -pnpm run cli -- reindex status -pnpm run cli -- reindex status --watch +sonar data pull # download feed/suggestions/topics to local SQLite +sonar data backup # backup local DB +sonar data restore --from # restore from backup +sonar data verify # integrity check +sonar data path # show DB location +sonar data sql # query helper ``` -### Local Data +### Config ```bash -pnpm run cli -- data download # full download → ~/.sonar/data.db -pnpm run cli -- data sync # incremental sync -pnpm run cli -- data path # print DB path -pnpm run cli -- data sql # open sqlite3 shell +sonar config # show current config +sonar config setup --key= # legacy setup +sonar config set vendor anthropic # set AI vendor +sonar config skill --install # install OpenClaw skill (--force to overwrite) ``` --- ## Environment Variables -| Variable | Required | Purpose | -|---|---|---| -| `SONAR_API_KEY` | Yes (unless saved by `init`) | Auth token | -| `SONAR_API_URL` | No | GraphQL endpoint (default: `http://localhost:8000/graphql`) | -| `SONAR_AI_VENDOR` | No | AI vendor for prompt generation (`openai` or `anthropic`) | -| `SONAR_FEED_RENDER` | No | Default render style (`card` or `table`) | -| `SONAR_FEED_WIDTH` | No | Default card width | -| `OPENAI_API_KEY` | Sometimes | Required for OpenAI-powered `--from-prompt` | -| `ANTHROPIC_API_KEY` | Sometimes | Required for Anthropic-powered `--from-prompt` | +| Variable | Required | Purpose | +| ------------------- | -------------------- | ------------------------------------------------------------------- | +| `SONAR_API_URL` | No | GraphQL endpoint (default: production API) | +| `SONAR_MAX_RETRIES` | No | Max retry attempts on transient failures (default: 3, 0 to disable) | +| `OPENAI_API_KEY` | For `topics suggest` | Required when using OpenAI vendor for AI suggestions | +| `ANTHROPIC_API_KEY` | For `topics suggest` | Required when using Anthropic vendor for AI suggestions | ## Local Files -| Path | Contents | -|---|---| +| Path | Contents | +| ---------------------- | ---------------------------- | | `~/.sonar/config.json` | Token, API URL, CLI defaults | -| `~/.sonar/data.db` | Local synced SQLite database | +| `~/.sonar/data.db` | Local synced SQLite database | + +--- + +## Drift Prevention Checks + +```bash +# Run all drift checks (surface/docs/data/schema) +pnpm drift:check + +# Refresh committed command snapshot after intentional command changes +pnpm drift:surface:update +``` + +`drift:schema:check` validates GraphQL documents against the live schema. +Locally, it skips when offline; in CI (`CI=true`) it is enforced. --- ## Troubleshooting -**`No token found. Set SONAR_API_KEY or run: sonar init`** -Set `SONAR_API_KEY` in your environment, then run `pnpm run cli -- init`. +**`No token found. Run: sonar account add `** +Add an account with `sonar account add `. Get a key at [sonar.8640p.info](https://sonar.8640p.info/). **`Unable to reach server, please try again shortly.`** -Check `SONAR_API_URL`, your network, and API availability. - -**`OPENAI_API_KEY is not set` / `ANTHROPIC_API_KEY is not set`** -Set the key for your chosen vendor before using `--from-prompt` or interactive reply generation. +Check your network connection and API availability. The CLI automatically retries transient failures (network errors, 5xx) up to 3 times with exponential backoff. Use `--debug` to see retry attempts. Set `SONAR_MAX_RETRIES=0` to disable retries. diff --git a/codegen.ts b/codegen.ts index 88379ea..ea0126b 100644 --- a/codegen.ts +++ b/codegen.ts @@ -1,9 +1,15 @@ import type { CodegenConfig } from '@graphql-codegen/cli' +const defaultSchemaUrl = 'https://api.sonar.8640p.info/graphql' +const rawSchemaUrl = process.env.SONAR_API_URL ?? defaultSchemaUrl +const schemaUrl = rawSchemaUrl.endsWith('/graphql') + ? rawSchemaUrl + : `${rawSchemaUrl.replace(/\/$/, '')}/graphql` + // https://the-guild.dev/graphql/codegen/plugins/typescript/typescript-graphql-request const config: CodegenConfig = { overwrite: true, - schema: 'https://api.sonar.8640p.info/graphql', + schema: schemaUrl, documents: ['src/**/*'], hooks: { afterAllFileWrite: ['pnpm biome check --write src/types/sonar.ts --linter-enabled=false'], diff --git a/package.json b/package.json index 107e56a..e41b4d9 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "@1a35e1/sonar-cli", - "version": "0.1.2", - "description": "X/Twitter social graph CLI for signal filtering and curation", + "version": "0.4.2", + "description": "X social graph CLI for signal filtering and curation", "type": "module", "bin": { "sonar": "dist/cli.js" @@ -13,6 +13,10 @@ "engines": { "node": ">=20" }, + "repository": { + "type": "git", + "url": "https://github.com/1a35e1/sonar-cli" + }, "publishConfig": { "access": "public" }, @@ -21,29 +25,37 @@ "version:minor": "pnpm version minor --no-git-tag-version", "types": "graphql-codegen --config codegen.ts", "sonar": "tsx src/cli.ts", - "build": "tsc", + "generate:skill": "tsx scripts/generate-skill.ts", + "build": "tsx scripts/generate-skill.ts && tsc", "typecheck": "tsc --noEmit", - "prepublishOnly": "tsc" + "drift:schema:check": "node scripts/check-schema-drift.mjs", + "drift:surface:update": "node scripts/update-command-surface-snapshot.mjs", + "drift:surface:check": "node scripts/check-command-surface-snapshot.mjs", + "drift:docs:check": "node scripts/check-doc-command-parity.mjs", + "drift:data:check": "node scripts/check-data-compat.mjs", + "drift:skill:check": "node scripts/check-skill-drift.mjs", + "drift:check": "pnpm drift:surface:check && pnpm drift:skill:check && pnpm drift:docs:check && pnpm drift:data:check && pnpm drift:schema:check", + "prepublishOnly": "tsx scripts/generate-skill.ts && tsc" }, "dependencies": { - "better-sqlite3": "^11", "date-fns": "4.1.0", "graphql": "^16.12.0", "graphql-request": "^7.4.0", "ink": "^6", + "ink-link": "^5.0.0", "ink-table": "^3.1.0", + "node-sqlite3-wasm": "^0.8.55", "pastel": "^3.0.0", "react": "^19", + "unicode-animations": "^1.0.3", "zod": "^3.25.76" }, "devDependencies": { "@graphql-codegen/cli": "^5.0.5", "@graphql-codegen/typescript-graphql-request": "^6.4.0", - "@types/better-sqlite3": "^7", "@types/node": "^22", "@types/react": "^19", "biome": "^0.3.3", - "ink-link": "^5.0.0", "tsx": "^4", "typescript": "^5" } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index cefeb3e..4aef483 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -8,9 +8,6 @@ importers: .: dependencies: - better-sqlite3: - specifier: ^11 - version: 11.10.0 date-fns: specifier: 4.1.0 version: 4.1.0 @@ -23,15 +20,24 @@ importers: ink: specifier: ^6 version: 6.8.0(@types/react@19.2.14)(react@19.2.4) + ink-link: + specifier: ^5.0.0 + version: 5.0.0(ink@6.8.0(@types/react@19.2.14)(react@19.2.4)) ink-table: specifier: ^3.1.0 version: 3.1.0(ink@6.8.0(@types/react@19.2.14)(react@19.2.4))(react@19.2.4) + node-sqlite3-wasm: + specifier: ^0.8.55 + version: 0.8.55 pastel: specifier: ^3.0.0 version: 3.0.0(ink@6.8.0(@types/react@19.2.14)(react@19.2.4))(react@19.2.4)(zod@3.25.76) react: specifier: ^19 version: 19.2.4 + unicode-animations: + specifier: ^1.0.3 + version: 1.0.3 zod: specifier: ^3.25.76 version: 3.25.76 @@ -42,9 +48,6 @@ importers: '@graphql-codegen/typescript-graphql-request': specifier: ^6.4.0 version: 6.4.0(graphql-request@7.4.0(graphql@16.12.0))(graphql-tag@2.12.6(graphql@16.12.0))(graphql@16.12.0) - '@types/better-sqlite3': - specifier: ^7 - version: 7.6.13 '@types/node': specifier: ^22 version: 22.19.11 @@ -54,9 +57,6 @@ importers: biome: specifier: ^0.3.3 version: 0.3.3 - ink-link: - specifier: ^5.0.0 - version: 5.0.0(ink@6.8.0(@types/react@19.2.14)(react@19.2.4)) tsx: specifier: ^4 version: 4.21.0 @@ -851,9 +851,6 @@ packages: peerDependencies: graphql: ^16.0.0 - '@types/better-sqlite3@7.6.13': - resolution: {integrity: sha512-NMv9ASNARoKksWtsq/SHakpYAYnhBrQgGD8zkLYk/jaK8jUGn08CfEdTRgYhMypUQAfzSP8W6gNLe0q19/t4VA==} - '@types/js-yaml@4.0.9': resolution: {integrity: sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg==} @@ -999,12 +996,6 @@ packages: bcrypt-pbkdf@1.0.2: resolution: {integrity: sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==} - better-sqlite3@11.10.0: - resolution: {integrity: sha512-EwhOpyXiOEL/lKzHz9AW1msWFNzGc/z+LzeB3/jnFJpxu+th2yqvzsSWas1v9jgs9+xiXJcD5A8CJxAG2TaghQ==} - - bindings@1.5.0: - resolution: {integrity: sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==} - biome@0.3.3: resolution: {integrity: sha512-4LXjrQYbn9iTXu9Y4SKT7ABzTV0WnLDHCVSd2fPUOKsy1gQ+E4xPFmlY1zcWexoi0j7fGHItlL6OWA2CZ/yYAQ==} hasBin: true @@ -1078,9 +1069,6 @@ packages: chardet@2.1.1: resolution: {integrity: sha512-PsezH1rqdV9VvyNhxxOW32/d75r01NY7TQCmOqomRo15ZSOKbpTFVsfjghxo6JloQUCGnH4k1LGu0R4yCLlWQQ==} - chownr@1.1.4: - resolution: {integrity: sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==} - clean-stack@2.2.0: resolution: {integrity: sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==} engines: {node: '>=6'} @@ -1241,14 +1229,6 @@ packages: resolution: {integrity: sha512-G7Cqgaelq68XHJNGlZ7lrNQyhZGsFqpwtGFexqUv4IQdjKoSYF7ipZ9UuTJZUSQXFj/XaoBLuEVIVqr8EJngEQ==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - decompress-response@6.0.0: - resolution: {integrity: sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==} - engines: {node: '>=10'} - - deep-extend@0.6.0: - resolution: {integrity: sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==} - engines: {node: '>=4.0.0'} - deepmerge@4.3.1: resolution: {integrity: sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==} engines: {node: '>=0.10.0'} @@ -1268,10 +1248,6 @@ packages: resolution: {integrity: sha512-reYkTUJAZb9gUuZ2RvVCNhVHdg62RHnJ7WJl8ftMi4diZ6NWlciOzQN88pUhSELEwflJht4oQDv0F0BMlwaYtA==} engines: {node: '>=8'} - detect-libc@2.1.2: - resolution: {integrity: sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==} - engines: {node: '>=8'} - dir-glob@3.0.1: resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==} engines: {node: '>=8'} @@ -1305,9 +1281,6 @@ packages: emoji-regex@8.0.0: resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} - end-of-stream@1.4.5: - resolution: {integrity: sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==} - environment@1.1.0: resolution: {integrity: sha512-xUtoPkMggbz0MPyPiIWr1Kp4aeWJjDZ6SMvURhimjdZgsRuDplF5/s9hcgGhyXMhs+6vpnuoiZ2kFiu3FMnS8Q==} engines: {node: '>=18'} @@ -1339,10 +1312,6 @@ packages: resolution: {integrity: sha512-MsG3prOVw1WtLXAZbM3KiYtooKR1LvxHh3VHsVtIy0uiUu8usxgB/94DP2HxtD/661lLdB6yzQ09lGJSQr6nkg==} engines: {node: '>=0.10.0'} - expand-template@2.0.3: - resolution: {integrity: sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==} - engines: {node: '>=6'} - extend@3.0.2: resolution: {integrity: sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==} @@ -1388,9 +1357,6 @@ packages: resolution: {integrity: sha512-d+l3qxjSesT4V7v2fh+QnmFnUWv9lSpjarhShNTgBOfA0ttejbQUAlHLitbjkoRiDulW0OPoQPYIGhIC8ohejg==} engines: {node: '>=18'} - file-uri-to-path@1.0.0: - resolution: {integrity: sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==} - fill-range@7.1.1: resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} engines: {node: '>=8'} @@ -1414,9 +1380,6 @@ packages: resolution: {integrity: sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==} engines: {node: '>=12.20.0'} - fs-constants@1.0.0: - resolution: {integrity: sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==} - fs-extra@0.26.7: resolution: {integrity: sha512-waKu+1KumRhYv8D8gMRCKJGAMI9pRnPuEb1mvgYD0f7wBscg+h6bW4FDTmEZhB9VKxvoTtxW+Y7bnIlB7zja6Q==} @@ -1450,9 +1413,6 @@ packages: getpass@0.1.7: resolution: {integrity: sha512-0fzj9JxOLfJ+XGLhR8ze3unN0KZCgZwiSSDz168VERjK8Wl8kVSdcu2kspd4s4wtAa1y/qrVRiAA0WclVsu0ng==} - github-from-package@0.0.0: - resolution: {integrity: sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==} - glob-parent@5.1.2: resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} engines: {node: '>= 6'} @@ -1596,9 +1556,6 @@ packages: inherits@2.0.4: resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} - ini@1.3.8: - resolution: {integrity: sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==} - ink-link@5.0.0: resolution: {integrity: sha512-TFDXc/0mwUW7LMjsr0/LeLxPVV5BnHDuDQff9RCgP4rb3R+V/4dIwGBZbCevcJZtQnVcW+Iz1LUrUbpq+UDwYA==} engines: {node: '>=18'} @@ -1860,10 +1817,6 @@ packages: resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} engines: {node: '>=6'} - mimic-response@3.1.0: - resolution: {integrity: sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==} - engines: {node: '>=10'} - minimatch@3.1.3: resolution: {integrity: sha512-M2GCs7Vk83NxkUyQV1bkABc4yxgz9kILhHImZiBPAZ9ybuvCb0/H7lEl5XvIg3g+9d4eNotkZA5IWwYl0tibaA==} @@ -1871,12 +1824,6 @@ packages: resolution: {integrity: sha512-kQAVowdR33euIqeA0+VZTDqU+qo1IeVY+hrKYtZMio3Pg0P0vuh/kwRylLUddJhB6pf3q/botcOvRtx4IN1wqQ==} engines: {node: '>=16 || 14 >=14.17'} - minimist@1.2.8: - resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==} - - mkdirp-classic@0.5.3: - resolution: {integrity: sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==} - ms@2.1.3: resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} @@ -1889,16 +1836,9 @@ packages: mz@2.7.0: resolution: {integrity: sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==} - napi-build-utils@2.0.0: - resolution: {integrity: sha512-GEbrYkbfF7MoNaoh2iGG84Mnf/WZfB0GdGEsM8wz7Expx/LlWf5U8t9nvJKXSp3qr5IsEbK04cBGhol/KwOsWA==} - no-case@3.0.4: resolution: {integrity: sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==} - node-abi@3.87.0: - resolution: {integrity: sha512-+CGM1L1CgmtheLcBuleyYOn7NWPVu0s0EJH2C4puxgEZb9h8QpR9G2dBfZJOAUhi7VQxuBPMd0hiISWcTyiYyQ==} - engines: {node: '>=10'} - node-domexception@1.0.0: resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==} engines: {node: '>=10.5.0'} @@ -1923,6 +1863,9 @@ packages: node-releases@2.0.27: resolution: {integrity: sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA==} + node-sqlite3-wasm@0.8.55: + resolution: {integrity: sha512-C2m7JzZgKiv9XVZ1ts9oPmS56PCvyHeQffTOF2KNO2TVZzq5IW2s+NFeEZn+eP6bnAuD2We/O9cOJSjQVf7Xxw==} + normalize-package-data@6.0.2: resolution: {integrity: sha512-V6gygoYb/5EmNI+MEGrWkC+e6+Rr7mTmfHrxDbLzxQogBkgzo76rkok0Am6thgSF7Mv2nLOajAJj5vDJZEFn7g==} engines: {node: ^16.14.0 || >=18.0.0} @@ -2059,21 +2002,12 @@ packages: resolution: {integrity: sha512-VP/72JeXqak2KiOzjgKtQen5y3IZHn+9GOuLDafPv0eXa47xq0At93XahYBs26MsifCQ4enGKwbjBTKgb9QJXg==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - prebuild-install@7.1.3: - resolution: {integrity: sha512-8Mf2cbV7x1cXPUILADGI3wuhfqWvtiLA1iclTDbFRZkgRQS0NqsPZphna9V+HyTEadheuPmjaJMsbzKQFOzLug==} - engines: {node: '>=10'} - deprecated: No longer maintained. Please contact the author of the relevant native addon; alternatives are available. - hasBin: true - promise@7.3.1: resolution: {integrity: sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg==} psl@1.15.0: resolution: {integrity: sha512-JZd3gMVBAVQkSs6HdNZo9Sdo0LNcQeMNP3CozBJb3JYC/QUYZTnKxP+f8oWRX4rHP5EurWxqAHTSwUCjlNKa1w==} - pump@3.0.3: - resolution: {integrity: sha512-todwxLMY7/heScKmntwQG8CXVkWUOdYxIvY2s0VWAAMh/nd8SoYiRaKjlr7+iCs984f2P8zvrfWcDDYVb73NfA==} - punycode@2.3.1: resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==} engines: {node: '>=6'} @@ -2085,10 +2019,6 @@ packages: queue-microtask@1.2.3: resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} - rc@1.2.8: - resolution: {integrity: sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==} - hasBin: true - react-reconciler@0.33.0: resolution: {integrity: sha512-KetWRytFv1epdpJc3J4G75I4WrplZE5jOL7Yq0p34+OVOKF4Se7WrdIdVC45XsSSmUTlht2FM/fM1FZb1mfQeA==} engines: {node: '>=0.10.0'} @@ -2237,12 +2167,6 @@ packages: signedsource@1.0.0: resolution: {integrity: sha512-6+eerH9fEnNmi/hyM1DXcRK3pWdoMQtlkQ+ns0ntzunjKqp5i3sKCc80ym8Fib3iaYhdJUOPdhlJWj1tvge2Ww==} - simple-concat@1.0.1: - resolution: {integrity: sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==} - - simple-get@4.0.1: - resolution: {integrity: sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA==} - slash@3.0.0: resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} engines: {node: '>=8'} @@ -2324,10 +2248,6 @@ packages: resolution: {integrity: sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==} engines: {node: '>=12'} - strip-json-comments@2.0.1: - resolution: {integrity: sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==} - engines: {node: '>=0.10.0'} - supports-color@10.2.2: resolution: {integrity: sha512-SS+jx45GF1QjgEXQx4NJZV9ImqmO2NPz5FNsIHrsDjh2YsHnawpan7SNQ1o8NuhrbHZy9AZhIoCUiCeaW/C80g==} engines: {node: '>=18'} @@ -2359,13 +2279,6 @@ packages: resolution: {integrity: sha512-yEFYrVhod+hdNyx7g5Bnkkb0G6si8HJurOoOEgC8B/O0uXLHlaey/65KRv6cuWBNhBgHKAROVpc7QyYqE5gFng==} engines: {node: '>=20'} - tar-fs@2.1.4: - resolution: {integrity: sha512-mDAjwmZdh7LTT6pNleZ05Yt65HC3E+NiQzl672vQG38jIrehtJk/J3mNwIg+vShQPcLF/LV7CMnDW6vjj6sfYQ==} - - tar-stream@2.2.0: - resolution: {integrity: sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==} - engines: {node: '>=6'} - terminal-link@5.0.0: resolution: {integrity: sha512-qFAy10MTMwjzjU8U16YS4YoZD+NQLHzLssFMNqgravjbvIPNiqkGFR4yjhJfmY9R5OFU7+yHxc6y+uGHkKwLRA==} engines: {node: '>=20'} @@ -2453,6 +2366,10 @@ packages: undici-types@6.21.0: resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==} + unicode-animations@1.0.3: + resolution: {integrity: sha512-+klB2oWwcYZjYWhwP4Pr8UZffWDFVx6jKeIahE6z0QYyM2dwDeDPyn5nevCYbyotxvtT9lh21cVURO1RX0+YMg==} + hasBin: true + unicorn-magic@0.1.0: resolution: {integrity: sha512-lRfVq8fE8gz6QMBuDM6a+LO3IAzTi05H6gCVaUpir2E1Rwpo4ZUog45KpNXKC/Mn3Yb9UDuHumeFTo9iV/D9FQ==} engines: {node: '>=18'} @@ -3646,10 +3563,6 @@ snapshots: transitivePeerDependencies: - supports-color - '@types/better-sqlite3@7.6.13': - dependencies: - '@types/node': 22.19.11 - '@types/js-yaml@4.0.9': {} '@types/node@22.19.11': @@ -3798,15 +3711,6 @@ snapshots: dependencies: tweetnacl: 0.14.5 - better-sqlite3@11.10.0: - dependencies: - bindings: 1.5.0 - prebuild-install: 7.1.3 - - bindings@1.5.0: - dependencies: - file-uri-to-path: 1.0.0 - biome@0.3.3: dependencies: bluebird: 3.7.2 @@ -3921,8 +3825,6 @@ snapshots: chardet@2.1.1: {} - chownr@1.1.4: {} - clean-stack@2.2.0: {} cli-boxes@3.0.0: {} @@ -4052,12 +3954,6 @@ snapshots: decamelize@6.0.1: {} - decompress-response@6.0.0: - dependencies: - mimic-response: 3.1.0 - - deep-extend@0.6.0: {} - deepmerge@4.3.1: {} defaults@1.0.4: @@ -4070,8 +3966,6 @@ snapshots: detect-indent@6.1.0: {} - detect-libc@2.1.2: {} - dir-glob@3.0.1: dependencies: path-type: 4.0.0 @@ -4105,10 +3999,6 @@ snapshots: emoji-regex@8.0.0: {} - end-of-stream@1.4.5: - dependencies: - once: 1.4.0 - environment@1.1.0: {} error-ex@1.3.4: @@ -4154,8 +4044,6 @@ snapshots: exit-hook@1.1.1: {} - expand-template@2.0.3: {} - extend@3.0.2: {} extsprintf@1.3.0: {} @@ -4212,8 +4100,6 @@ snapshots: dependencies: is-unicode-supported: 2.1.0 - file-uri-to-path@1.0.0: {} - fill-range@7.1.1: dependencies: to-regex-range: 5.0.1 @@ -4237,8 +4123,6 @@ snapshots: dependencies: fetch-blob: 3.2.0 - fs-constants@1.0.0: {} - fs-extra@0.26.7: dependencies: graceful-fs: 4.2.11 @@ -4273,8 +4157,6 @@ snapshots: dependencies: assert-plus: 1.0.0 - github-from-package@0.0.0: {} - glob-parent@5.1.2: dependencies: is-glob: 4.0.3 @@ -4422,8 +4304,6 @@ snapshots: inherits@2.0.4: {} - ini@1.3.8: {} - ink-link@5.0.0(ink@6.8.0(@types/react@19.2.14)(react@19.2.4)): dependencies: ink: 6.8.0(@types/react@19.2.14)(react@19.2.4) @@ -4699,8 +4579,6 @@ snapshots: mimic-fn@2.1.0: {} - mimic-response@3.1.0: {} - minimatch@3.1.3: dependencies: brace-expansion: 1.1.12 @@ -4709,10 +4587,6 @@ snapshots: dependencies: brace-expansion: 5.0.3 - minimist@1.2.8: {} - - mkdirp-classic@0.5.3: {} - ms@2.1.3: {} mute-stream@0.0.5: {} @@ -4725,17 +4599,11 @@ snapshots: object-assign: 4.1.1 thenify-all: 1.6.0 - napi-build-utils@2.0.0: {} - no-case@3.0.4: dependencies: lower-case: 2.0.2 tslib: 2.8.1 - node-abi@3.87.0: - dependencies: - semver: 7.7.4 - node-domexception@1.0.0: {} node-fetch@2.7.0: @@ -4752,6 +4620,8 @@ snapshots: node-releases@2.0.27: {} + node-sqlite3-wasm@0.8.55: {} + normalize-package-data@6.0.2: dependencies: hosted-git-info: 7.0.2 @@ -4888,21 +4758,6 @@ snapshots: dependencies: irregular-plurals: 3.5.0 - prebuild-install@7.1.3: - dependencies: - detect-libc: 2.1.2 - expand-template: 2.0.3 - github-from-package: 0.0.0 - minimist: 1.2.8 - mkdirp-classic: 0.5.3 - napi-build-utils: 2.0.0 - node-abi: 3.87.0 - pump: 3.0.3 - rc: 1.2.8 - simple-get: 4.0.1 - tar-fs: 2.1.4 - tunnel-agent: 0.6.0 - promise@7.3.1: dependencies: asap: 2.0.6 @@ -4911,24 +4766,12 @@ snapshots: dependencies: punycode: 2.3.1 - pump@3.0.3: - dependencies: - end-of-stream: 1.4.5 - once: 1.4.0 - punycode@2.3.1: {} qs@6.5.5: {} queue-microtask@1.2.3: {} - rc@1.2.8: - dependencies: - deep-extend: 0.6.0 - ini: 1.3.8 - minimist: 1.2.8 - strip-json-comments: 2.0.1 - react-reconciler@0.33.0(react@19.2.4): dependencies: react: 19.2.4 @@ -5084,14 +4927,6 @@ snapshots: signedsource@1.0.0: {} - simple-concat@1.0.1: {} - - simple-get@4.0.1: - dependencies: - decompress-response: 6.0.0 - once: 1.4.0 - simple-concat: 1.0.1 - slash@3.0.0: {} slice-ansi@3.0.0: @@ -5196,8 +5031,6 @@ snapshots: dependencies: ansi-regex: 6.2.2 - strip-json-comments@2.0.1: {} - supports-color@10.2.2: {} supports-color@2.0.0: {} @@ -5229,21 +5062,6 @@ snapshots: tagged-tag@1.0.0: {} - tar-fs@2.1.4: - dependencies: - chownr: 1.1.4 - mkdirp-classic: 0.5.3 - pump: 3.0.3 - tar-stream: 2.2.0 - - tar-stream@2.2.0: - dependencies: - bl: 4.1.0 - end-of-stream: 1.4.5 - fs-constants: 1.0.0 - inherits: 2.0.4 - readable-stream: 3.6.2 - terminal-link@5.0.0: dependencies: ansi-escapes: 7.3.0 @@ -5315,6 +5133,8 @@ snapshots: undici-types@6.21.0: {} + unicode-animations@1.0.3: {} + unicorn-magic@0.1.0: {} unixify@1.0.0: diff --git a/scripts/check-command-surface-snapshot.mjs b/scripts/check-command-surface-snapshot.mjs new file mode 100644 index 0000000..4d164db --- /dev/null +++ b/scripts/check-command-surface-snapshot.mjs @@ -0,0 +1,38 @@ +import { existsSync, readFileSync } from 'node:fs' +import { join } from 'node:path' +import { spawnSync } from 'node:child_process' + +const ROOT = process.cwd() +const SNAPSHOT_PATH = join(ROOT, '.drift', 'command-surface.snapshot.json') + +if (!existsSync(SNAPSHOT_PATH)) { + process.stderr.write( + `Missing snapshot at ${SNAPSHOT_PATH}\n` + + 'Run: pnpm drift:surface:update\n', + ) + process.exit(1) +} + +const before = readFileSync(SNAPSHOT_PATH, 'utf8') + +const update = spawnSync( + process.execPath, + [join(ROOT, 'scripts', 'update-command-surface-snapshot.mjs')], + { stdio: 'pipe', encoding: 'utf8' }, +) + +if (update.status !== 0) { + process.stderr.write(update.stderr || update.stdout) + process.exit(update.status ?? 1) +} + +const after = readFileSync(SNAPSHOT_PATH, 'utf8') +if (before !== after) { + process.stderr.write( + 'Command surface snapshot drift detected.\n' + + 'Run: pnpm drift:surface:update and commit the updated snapshot.\n', + ) + process.exit(1) +} + +process.stdout.write('Command surface snapshot is up to date.\n') diff --git a/scripts/check-data-compat.mjs b/scripts/check-data-compat.mjs new file mode 100644 index 0000000..5d55f3c --- /dev/null +++ b/scripts/check-data-compat.mjs @@ -0,0 +1,43 @@ +import { readFileSync } from 'node:fs' +import { join } from 'node:path' + +const ROOT = process.cwd() +const configSource = readFileSync(join(ROOT, 'src', 'lib', 'config.ts'), 'utf8') +const querySource = readFileSync(join(ROOT, 'src', 'lib', 'data-queries.ts'), 'utf8') +const dbSource = readFileSync(join(ROOT, 'src', 'lib', 'db.ts'), 'utf8') + +const failures = [] + +if (configSource.includes('database.sqlite')) { + failures.push( + 'config deleteDatabase still references legacy database.sqlite path; expected DB_PATH/data.db.', + ) +} + +const interestsQueryMatch = querySource.match( + /export const INTERESTS_QUERY\s*=\s*`([\s\S]*?)`/, +) +if (!interestsQueryMatch) { + failures.push('Unable to locate INTERESTS_QUERY in src/lib/data-queries.ts.') +} else { + const interestsQuery = interestsQueryMatch[1] + if (/\bkeywords\b/.test(interestsQuery) || /\brelatedTopics\b/.test(interestsQuery)) { + failures.push( + 'INTERESTS_QUERY still requests deprecated topics fields (keywords/relatedTopics).', + ) + } +} + +if (!dbSource.includes('export const DB_PATH =')) { + failures.push('DB_PATH constant missing from src/lib/db.ts.') +} + +if (failures.length > 0) { + process.stderr.write('Data compatibility checks failed:\n') + for (const failure of failures) { + process.stderr.write(`- ${failure}\n`) + } + process.exit(1) +} + +process.stdout.write('Data compatibility checks passed.\n') diff --git a/scripts/check-doc-command-parity.mjs b/scripts/check-doc-command-parity.mjs new file mode 100644 index 0000000..2c3916e --- /dev/null +++ b/scripts/check-doc-command-parity.mjs @@ -0,0 +1,67 @@ +import { readFileSync } from 'node:fs' +import { join } from 'node:path' + +const ROOT = process.cwd() +const SNAPSHOT_PATH = join(ROOT, '.drift', 'command-surface.snapshot.json') +const SKILL_PATH = join(ROOT, 'src', 'lib', 'skill.ts') +const README_PATH = join(ROOT, 'README.md') + +function loadCommands() { + const snapshot = JSON.parse(readFileSync(SNAPSHOT_PATH, 'utf8')) + return new Set(snapshot.commands) +} + +function extractSonarCommands(text) { + const matches = text.match(/^\s*sonar[^\n`#]*$/gm) ?? [] + return matches + .map((line) => line.trim()) + .filter((line) => line.length > 0) +} + +function normalizeExampleCommand(example, knownCommands) { + // Drop inline comments and flags/args for command matching. + const cleaned = example.split('#')[0].trim() + const tokens = cleaned.split(/\s+/) + + // Try longest command prefix first. + for (let i = Math.min(tokens.length, 4); i >= 1; i -= 1) { + const candidate = tokens.slice(0, i).join(' ') + if (knownCommands.has(candidate)) { + return candidate + } + } + return null +} + +const known = loadCommands() + +const docs = [ + { name: 'src/lib/skill.ts', content: readFileSync(SKILL_PATH, 'utf8') }, + { name: 'README.md', content: readFileSync(README_PATH, 'utf8') }, +] + +const unknownByFile = [] + +for (const doc of docs) { + const raw = extractSonarCommands(doc.content) + const unknown = raw.filter((line) => normalizeExampleCommand(line, known) === null) + if (unknown.length > 0) { + unknownByFile.push({ file: doc.name, unknown }) + } +} + +if (unknownByFile.length > 0) { + process.stderr.write('Found doc commands not present in CLI command surface:\n') + for (const entry of unknownByFile) { + process.stderr.write(`- ${entry.file}\n`) + for (const line of entry.unknown) { + process.stderr.write(` - ${line}\n`) + } + } + process.stderr.write( + '\nUpdate docs/skill examples or command files, then refresh snapshot if needed.\n', + ) + process.exit(1) +} + +process.stdout.write('Docs and skill command examples match current CLI surface.\n') diff --git a/scripts/check-schema-drift.mjs b/scripts/check-schema-drift.mjs new file mode 100644 index 0000000..677f6c5 --- /dev/null +++ b/scripts/check-schema-drift.mjs @@ -0,0 +1,122 @@ +import { readdirSync, readFileSync } from 'node:fs' +import { join } from 'node:path' +import { + buildClientSchema, + getIntrospectionQuery, + parse, + validate, +} from 'graphql' + +const ROOT = process.cwd() +const SRC_DIR = join(ROOT, 'src') +const defaultSchemaUrl = 'https://api.sonar.8640p.info/graphql' +const rawSchemaUrl = process.env.SONAR_API_URL ?? defaultSchemaUrl +const schemaUrl = rawSchemaUrl.endsWith('/graphql') + ? rawSchemaUrl + : `${rawSchemaUrl.replace(/\/$/, '')}/graphql` + +function walk(dir) { + const out = [] + for (const entry of readdirSync(dir, { withFileTypes: true })) { + const full = join(dir, entry.name) + if (entry.isDirectory()) out.push(...walk(full)) + else if (/\.(ts|tsx)$/.test(full)) out.push(full) + } + return out +} + +function extractGraphqlDocuments(source) { + const docs = [] + + // gql`...` + const gqlTag = /gql`([\s\S]*?)`/g + for (const match of source.matchAll(gqlTag)) { + docs.push(match[1]) + } + + // Plain template literals often used in CLI commands: const QUERY = `...` + const plainTemplate = /=\s*`([\s\S]*?)`/g + for (const match of source.matchAll(plainTemplate)) { + const body = match[1] + if (/\b(query|mutation|fragment)\b/.test(body)) { + docs.push(body) + } + } + + return docs +} + +async function fetchSchema() { + try { + const res = await fetch(schemaUrl, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ query: getIntrospectionQuery() }), + }) + if (!res.ok) { + throw new Error(`Schema introspection failed: HTTP ${res.status} ${res.statusText}`) + } + const json = await res.json() + if (!json?.data) { + throw new Error('Schema introspection returned no data.') + } + return buildClientSchema(json.data) + } catch (error) { + const msg = error instanceof Error ? error.message : String(error) + if (!process.env.CI) { + process.stdout.write( + `Schema validation skipped (network unavailable): ${msg}\n` + + 'Set CI=true to enforce this check.\n', + ) + return null + } + throw error + } +} + +const files = walk(SRC_DIR) +const docsByFile = [] +for (const file of files) { + const source = readFileSync(file, 'utf8') + const docs = extractGraphqlDocuments(source) + if (docs.length > 0) { + docsByFile.push([file, docs]) + } +} + +if (docsByFile.length === 0) { + process.stderr.write('No GraphQL documents found in src.\n') + process.exit(1) +} + +const schema = await fetchSchema() +if (!schema) { + process.exit(0) +} +const failures = [] + +for (const [file, docs] of docsByFile) { + for (const doc of docs) { + try { + const ast = parse(doc) + const errs = validate(schema, ast) + if (errs.length > 0) { + for (const err of errs) { + failures.push(`${file}: ${err.message}`) + } + } + } catch (error) { + failures.push(`${file}: ${(error instanceof Error ? error.message : String(error))}`) + } + } +} + +if (failures.length > 0) { + process.stderr.write('Schema drift detected in GraphQL documents:\n') + for (const failure of failures) { + process.stderr.write(`- ${failure}\n`) + } + process.exit(1) +} + +process.stdout.write(`Schema validation passed against ${schemaUrl}.\n`) diff --git a/scripts/check-skill-drift.mjs b/scripts/check-skill-drift.mjs new file mode 100644 index 0000000..97386f1 --- /dev/null +++ b/scripts/check-skill-drift.mjs @@ -0,0 +1,40 @@ +import { existsSync, readFileSync } from 'node:fs' +import { join } from 'node:path' +import { spawnSync } from 'node:child_process' + +const ROOT = process.cwd() +const SKILL_TS_PATH = join(ROOT, 'src', 'lib', 'skill.ts') + +if (!existsSync(SKILL_TS_PATH)) { + process.stderr.write( + `Missing generated file at ${SKILL_TS_PATH}\n` + + 'Run: pnpm generate:skill\n', + ) + process.exit(1) +} + +const before = readFileSync(SKILL_TS_PATH, 'utf8') + +// Run generator in dry-run mode to get what the file *should* look like +const result = spawnSync( + process.execPath, + ['--import', 'tsx/esm', join(ROOT, 'scripts', 'generate-skill.ts'), '--dry-run'], + { stdio: 'pipe', encoding: 'utf8' }, +) + +if (result.status !== 0) { + process.stderr.write(result.stderr || result.stdout) + process.exit(result.status ?? 1) +} + +const generated = result.stdout + +if (before !== generated) { + process.stderr.write( + 'skill.ts is out of sync with command metadata.\n' + + 'Run: pnpm generate:skill and commit the updated src/lib/skill.ts\n', + ) + process.exit(1) +} + +process.stdout.write('skill.ts is up to date.\n') diff --git a/scripts/generate-skill.ts b/scripts/generate-skill.ts new file mode 100644 index 0000000..2fd0dfb --- /dev/null +++ b/scripts/generate-skill.ts @@ -0,0 +1,277 @@ +#!/usr/bin/env tsx +/** + * Generates src/lib/skill.ts from Pastel/Zod command metadata. + * + * Usage: + * tsx scripts/generate-skill.ts # Write to src/lib/skill.ts + * tsx scripts/generate-skill.ts --dry-run # Print generated content to stdout + */ +import { readdirSync, writeFileSync } from 'node:fs' +import { join, relative } from 'node:path' +import { pathToFileURL } from 'node:url' +import zod from 'zod' + +const ROOT = process.cwd() +const COMMANDS_DIR = join(ROOT, 'src', 'commands') +const SKILL_TS_PATH = join(ROOT, 'src', 'lib', 'skill.ts') +const DRY_RUN = process.argv.includes('--dry-run') + +// ── filesystem helpers ─────────────────────────────────────────────────────── + +function walk(dir: string): string[] { + const out: string[] = [] + for (const entry of readdirSync(dir, { withFileTypes: true })) { + const full = join(dir, entry.name) + if (entry.isDirectory()) out.push(...walk(full)) + else if (entry.isFile() && full.endsWith('.tsx')) out.push(full) + } + return out.sort() +} + +function fileToCommand(filePath: string): string { + const rel = relative(COMMANDS_DIR, filePath).replace(/\\/g, '/') + const withoutExt = rel.replace(/\.tsx$/, '') + const parts = withoutExt.split('/') + if (parts[parts.length - 1] === 'index') parts.pop() + return ['sonar', ...parts].filter(Boolean).join(' ') +} + +// ── Zod introspection ──────────────────────────────────────────────────────── + +interface OptionMeta { + flag: string + type: string + optional: boolean + defaultValue: unknown + description: string +} + +function unwrapZod(schema: zod.ZodTypeAny): { + typeName: string + optional: boolean + defaultValue: unknown +} { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let s: any = schema + let optional = false + let defaultValue: unknown = undefined + + // Peel layers (ZodDefault must come before ZodOptional in Zod wrapping order) + for (;;) { + const tn: string = s._def.typeName + if (tn === 'ZodDefault') { + defaultValue = s._def.defaultValue() + s = s._def.innerType + } else if (tn === 'ZodOptional') { + optional = true + s = s._def.innerType + } else { + break + } + } + + return { typeName: s._def.typeName as string, optional, defaultValue } +} + +function zodTypeName(typeName: string): string { + const map: Record = { + ZodString: 'string', + ZodNumber: 'number', + ZodBoolean: 'boolean', + ZodArray: 'array', + ZodEnum: 'string', + } + return map[typeName] ?? typeName.replace(/^Zod/, '').toLowerCase() +} + +function extractOptions(schema: zod.ZodTypeAny | undefined): OptionMeta[] { + if (!schema) return [] + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const s = schema as any + if (s._def?.typeName !== 'ZodObject') return [] + + return Object.entries(s.shape as Record).map(([name, fieldSchema]) => { + const { typeName, optional, defaultValue } = unwrapZod(fieldSchema) + return { + flag: `--${name}`, + type: zodTypeName(typeName), + optional, + defaultValue, + description: (fieldSchema as zod.ZodTypeAny).description ?? '', + } + }) +} + +// ── command metadata collection ────────────────────────────────────────────── + +interface CommandMeta { + command: string + options: OptionMeta[] +} + +async function collectCommands(): Promise { + const files = walk(COMMANDS_DIR) + const result: CommandMeta[] = [] + + for (const file of files) { + const commandName = fileToCommand(file) + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const mod: any = await import(pathToFileURL(file).href) + const options = extractOptions(mod.options as zod.ZodTypeAny | undefined) + result.push({ command: commandName, options }) + } + + return result +} + +// ── markdown generation ────────────────────────────────────────────────────── + +function flagUsageLine(opt: OptionMeta): string { + if (opt.type === 'boolean') { + // Default-true booleans: show --no-flag; default-false: show --flag + if (opt.defaultValue === true) return `sonar ... --no-${opt.flag.slice(2)}` + return `sonar ... ${opt.flag}` + } + if (opt.type === 'number') return `sonar ... ${opt.flag} N` + return `sonar ... ${opt.flag} ` +} + +function renderCommandSection(meta: CommandMeta): string { + const lines: string[] = [`## ${meta.command}`, ''] + + if (meta.options.length === 0) { + lines.push(`\`\`\`bash`, `${meta.command}`, `\`\`\``, '') + return lines.join('\n') + } + + lines.push('```bash') + lines.push(meta.command) + for (const opt of meta.options) { + const suffix = opt.description ? ` # ${opt.description}` : '' + if (opt.type === 'boolean') { + if (opt.defaultValue === true) { + lines.push(`${meta.command} --no-${opt.flag.slice(2)}${suffix}`) + } else { + lines.push(`${meta.command} ${opt.flag}${suffix}`) + } + } else if (opt.type === 'number') { + lines.push(`${meta.command} ${opt.flag} N${suffix}`) + } else { + lines.push(`${meta.command} ${opt.flag} ${suffix}`) + } + } + lines.push('```', '') + + return lines.join('\n') +} + +function generateSkillMarkdown(commands: CommandMeta[]): string { + const header = `--- +name: sonar +description: Sonar CLI — view and triage your feed, manage topics, trigger refresh jobs, and manage local Sonar config/data. +homepage: https://sonar.sh +user-invocable: true +allowed-tools: Bash +argument-hint: [command and options] +metadata: {"openclaw":{"emoji":"📡","requires":{"bins":["sonar"]}}} +--- + +# Sonar CLI + +All commands are invoked as: \`sonar [subcommand] [flags]\`. + +` + + const sections = commands.map(renderCommandSection).join('\n') + + const footer = `## Environment variables + +| Variable | Purpose | +|---|---| +| \`SONAR_API_URL\` | Backend URL (defaults to production GraphQL endpoint) | +| \`SONAR_AI_VENDOR\` | Vendor override for AI-assisted operations (\`openai\` or \`anthropic\`) | +| \`SONAR_FEED_RENDER\` | Default feed renderer override | +| \`SONAR_FEED_WIDTH\` | Default card width override | +| \`OPENAI_API_KEY\` | Required when vendor is \`openai\` | +| \`ANTHROPIC_API_KEY\` | Required when vendor is \`anthropic\` | +` + + return header + sections + '\n' + footer +} + +// ── skill.ts template ──────────────────────────────────────────────────────── + +function generateSkillTs(skillContent: string): string { + // Escape backticks and ${} in the content for embedding in a template literal + const escaped = skillContent + .replace(/\\/g, '\\\\') + .replace(/`/g, '\\`') + .replace(/\$\{/g, '\\${') + + return `// THIS FILE IS AUTO-GENERATED. Do not edit manually. +// Run: pnpm generate:skill +// See: scripts/generate-skill.ts +import { writeFileSync, readFileSync, existsSync, mkdirSync } from 'node:fs' +import { createHash } from 'node:crypto' +import { join, dirname } from 'node:path' +import { homedir } from 'node:os' + +const SKILL_CONTENT = \`${escaped}\` + +const DEFAULT_INSTALL_PATH = join(homedir(), '.claude', 'skills', 'sonar', 'SKILL.md') + +function sha256(content: string): string { + return createHash('sha256').update(content).digest('hex') +} + +function safeWrite(target: string, content: string, force: boolean): void { + if (existsSync(target) && !force) { + const existing = readFileSync(target, 'utf8') + if (existing === content) { + process.stdout.write(\`SKILL.md is already up to date: \${target}\\n\`) + process.exit(0) + } + // File exists and differs — user may have customized it + process.stderr.write( + \`SKILL.md has been modified: \${target}\\n\` + + \`Use --force to overwrite, or manually merge.\\n\` + + \`New version hash: \${sha256(content).slice(0, 8)}\\n\` + ) + process.exit(1) + } + mkdirSync(dirname(target), { recursive: true }) + writeFileSync(target, content, 'utf8') + process.stdout.write(\`SKILL.md written to \${target}\\n\`) +} + +export function writeSkillTo(dest?: string, install?: boolean, force?: boolean): void { + if (install || dest === '--install') { + safeWrite(DEFAULT_INSTALL_PATH, SKILL_CONTENT, force ?? false) + process.exit(0) + } + + if (dest) { + safeWrite(dest, SKILL_CONTENT, force ?? false) + process.exit(0) + } + + // Default: print to stdout + process.stdout.write(SKILL_CONTENT) + process.exit(0) +} +` +} + +// ── main ───────────────────────────────────────────────────────────────────── + +const commands = await collectCommands() +const skillMarkdown = generateSkillMarkdown(commands) +const skillTs = generateSkillTs(skillMarkdown) + +if (DRY_RUN) { + process.stdout.write(skillTs) +} else { + writeFileSync(SKILL_TS_PATH, skillTs, 'utf8') + process.stdout.write(`Generated: ${SKILL_TS_PATH}\n`) + process.stdout.write(`Commands: ${commands.length}\n`) +} diff --git a/scripts/update-command-surface-snapshot.mjs b/scripts/update-command-surface-snapshot.mjs new file mode 100644 index 0000000..fb9e47b --- /dev/null +++ b/scripts/update-command-surface-snapshot.mjs @@ -0,0 +1,43 @@ +import { readdirSync, writeFileSync, mkdirSync } from 'node:fs' +import { join, relative, dirname } from 'node:path' + +const ROOT = process.cwd() +const COMMANDS_DIR = join(ROOT, 'src', 'commands') +const SNAPSHOT_PATH = join(ROOT, '.drift', 'command-surface.snapshot.json') + +function walk(dir) { + const out = [] + for (const entry of readdirSync(dir, { withFileTypes: true })) { + const full = join(dir, entry.name) + if (entry.isDirectory()) out.push(...walk(full)) + else if (entry.isFile() && full.endsWith('.tsx')) out.push(full) + } + return out +} + +function fileToCommand(filePath) { + const rel = relative(COMMANDS_DIR, filePath).replace(/\\/g, '/') + const withoutExt = rel.replace(/\.tsx$/, '') + const parts = withoutExt.split('/') + if (parts[parts.length - 1] === 'index') { + parts.pop() + } + + const commandParts = ['sonar', ...parts].filter(Boolean) + return commandParts.join(' ') +} + +const commandFiles = walk(COMMANDS_DIR) +const commands = Array.from(new Set(commandFiles.map(fileToCommand))).sort() + +const payload = { + source: 'src/commands/**/*.tsx', + commandCount: commands.length, + commands, +} + +mkdirSync(dirname(SNAPSHOT_PATH), { recursive: true }) +writeFileSync(SNAPSHOT_PATH, `${JSON.stringify(payload, null, 2)}\n`, 'utf8') + +process.stdout.write(`Updated snapshot: ${SNAPSHOT_PATH}\n`) +process.stdout.write(`Commands: ${commands.length}\n`) diff --git a/src/cli.ts b/src/cli.ts index 87452cd..56c2ca3 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -1,5 +1,21 @@ #!/usr/bin/env node +import { readFileSync } from 'node:fs' +import { fileURLToPath } from 'node:url' +import { dirname, join } from 'node:path' import Pastel from 'pastel' -const app = new Pastel({ importMeta: import.meta }) +const __dirname = dirname(fileURLToPath(import.meta.url)) +const pkg = JSON.parse(readFileSync(join(__dirname, '..', 'package.json'), 'utf8')) + +const HEADER = ` + S O N A R + ──────────────────────── + ${pkg.version} +` + +if (process.argv.includes('--help') || process.argv.includes('-h')) { + process.stdout.write(HEADER) +} + +const app = new Pastel({ importMeta: import.meta, name: 'sonar' }) await app.run() diff --git a/src/commands/account.tsx b/src/commands/account.tsx deleted file mode 100644 index ac70513..0000000 --- a/src/commands/account.tsx +++ /dev/null @@ -1,189 +0,0 @@ -import React, { useEffect, useState } from 'react' -import zod from 'zod' -import { Box, Text } from 'ink' -import { formatDistanceToNow } from 'date-fns' -import { gql } from '../lib/client.js' -import { Spinner } from '../components/Spinner.js' -import { AccountCard } from '../components/AccountCard.js' -import type { Account } from '../components/AccountCard.js' - -export const options = zod.object({ - json: zod.boolean().default(false).describe('Raw JSON output'), - debug: zod.boolean().default(false).describe('Debug mode'), -}) - -type Props = { options: zod.infer } - - -interface SuggestionCounts { - inbox: number - later: number - replied: number - read: number - skipped: number - archived: number - total: number -} - -interface DimensionUsage { - used: number - limit: number | null - atLimit: boolean -} - -interface SuggestionRefreshUsage { - used: number - limit: number | null - atLimit: boolean - resetsAt: string | null -} - -interface Usage { - plan: string - interests: DimensionUsage - apiKeys: DimensionUsage - bookmarksEnabled: boolean - socialGraphDegrees: number - socialGraphMaxUsers: number | null - suggestionRefreshes: SuggestionRefreshUsage -} - -interface StatusData { - me: Account | null - suggestionCounts: SuggestionCounts - usage: Usage | null -} - -const QUERY = ` - query Status { - me { - accountId - email - xHandle - xid - isPayingCustomer - indexingAccounts - indexedTweets - pendingEmbeddings - twitterIndexedAt - refreshedSuggestionsAt - } - suggestionCounts { - inbox - later - replied - read - skipped - archived - total - } - usage { - plan - interests { used limit atLimit } - apiKeys { used limit atLimit } - bookmarksEnabled - socialGraphDegrees - socialGraphMaxUsers - suggestionRefreshes { used limit atLimit resetsAt } - } - } -` - -export default function Account({ options: flags }: Props) { - const [data, setData] = useState(null) - const [error, setError] = useState(null) - - useEffect(() => { - async function run() { - try { - const result = await gql(QUERY, {}, { debug: flags.debug }) - - if (flags.json) { - process.stdout.write(JSON.stringify(result, null, 2) + '\n') - process.exit(0) - } - - setData(result) - } catch (err) { - if (flags.debug) { - console.error(JSON.stringify(err, null, 2)) - } - setError(err instanceof Error ? err.message : String(err)) - } - } - run() - }, []) - - if (error) return Error: {error} - if (!data) return - - const { me, suggestionCounts, usage } = data - - return ( - - {me ? : ( - - Account - Not authenticated - - )} - - {usage && ( - - Plan - - plan: - {usage.plan} - - - interests: - - {usage.interests.used}{usage.interests.limit !== null ? `/${usage.interests.limit}` : ''} - - - - api keys: - - {usage.apiKeys.used}{usage.apiKeys.limit !== null ? `/${usage.apiKeys.limit}` : ''} - - - - bookmarks: - {usage.bookmarksEnabled ? enabled : upgrade to unlock} - - - social graph: - {usage.socialGraphDegrees} degree{usage.socialGraphDegrees !== 1 ? 's' : ''} - {usage.socialGraphMaxUsers !== null ? `, up to ${usage.socialGraphMaxUsers.toLocaleString()} users` : ', unlimited'} - - - suggestion refreshes: - {usage.suggestionRefreshes.limit !== null ? ( - <> - - {usage.suggestionRefreshes.used}/{usage.suggestionRefreshes.limit} - - {usage.suggestionRefreshes.resetsAt && ( - - {' '}(resets {formatDistanceToNow(new Date(usage.suggestionRefreshes.resetsAt), { addSuffix: true })}) - - )} - - ) : ( - unlimited - )} - - - )} - - - Suggestions - inbox: 0 ? 'green' : undefined}>{suggestionCounts.inbox} - later: {suggestionCounts.later} - replied: {suggestionCounts.replied} - archived: {suggestionCounts.archived} - total: {suggestionCounts.total} - - - ) -} diff --git a/src/commands/account/add.tsx b/src/commands/account/add.tsx new file mode 100644 index 0000000..dd48455 --- /dev/null +++ b/src/commands/account/add.tsx @@ -0,0 +1,72 @@ +import React, { useEffect } from 'react' +import zod from 'zod' +import { Text } from 'ink' +import { readAccounts, writeAccounts, migrateToAccounts } from '../../lib/config.js' + +const ADJECTIVES = [ + 'bouncy', 'cosmic', 'dizzy', 'fuzzy', 'gentle', 'happy', 'jazzy', + 'lucky', 'mellow', 'nimble', 'plucky', 'quiet', 'rusty', 'snappy', + 'tiny', 'vivid', 'witty', 'zesty', 'bright', 'clever', +] + +const ANIMALS = [ + 'rabbit', 'falcon', 'panda', 'otter', 'fox', 'wolf', 'eagle', + 'dolphin', 'tiger', 'koala', 'lynx', 'owl', 'raven', 'seal', + 'hawk', 'badger', 'crane', 'finch', 'heron', 'wren', +] + +function randomName(): string { + const adj = ADJECTIVES[Math.floor(Math.random() * ADJECTIVES.length)] + const animal = ANIMALS[Math.floor(Math.random() * ANIMALS.length)] + return `${adj}-${animal}` +} + +export const args = zod.tuple([ + zod.string().describe('API key (snr_...)'), +]) + +export const options = zod.object({ + alias: zod.string().optional().describe('Account alias (default: random)'), + 'api-url': zod.string().optional().describe('Custom API URL'), +}) + +type Props = { args: zod.infer; options: zod.infer } + +export default function AccountAdd({ args: [key], options: flags }: Props) { + useEffect(() => { + migrateToAccounts() + + if (!key.startsWith('snr_')) { + process.stderr.write('Invalid API key — must start with "snr_"\n') + process.exit(1) + } + + const data = readAccounts() + let name = flags.alias ?? randomName() + + // Avoid collisions with existing names + while (data.accounts[name]) { + name = randomName() + } + + data.accounts[name] = { + token: key, + apiUrl: flags['api-url'] ?? 'https://api.sonar.8640p.info/graphql', + } + + // If this is the first account, make it active + if (!data.active || !data.accounts[data.active]) { + data.active = name + } + + writeAccounts(data) + const isActive = data.active === name ? ' (active)' : '' + process.stdout.write(`Account "${name}" added${isActive}\n`) + if (!flags.alias) { + process.stdout.write(`tip rename with: sonar account rename ${name} \n`) + } + process.exit(0) + }, []) + + return Adding account... +} diff --git a/src/commands/account/index.tsx b/src/commands/account/index.tsx new file mode 100644 index 0000000..fa319ae --- /dev/null +++ b/src/commands/account/index.tsx @@ -0,0 +1,64 @@ +import React from 'react' +import zod from 'zod' +import { Box, Text } from 'ink' +import { readAccounts, migrateToAccounts } from '../../lib/config.js' + +export const options = zod.object({ + json: zod.boolean().default(false).describe('Raw JSON output'), +}) + +type Props = { options: zod.infer } + +function maskToken(token: string): string { + if (token.length <= 8) return '***' + return token.slice(0, 4) + '...' + token.slice(-4) +} + +export default function AccountList({ options: flags }: Props) { + migrateToAccounts() + const { active, accounts } = readAccounts() + const names = Object.keys(accounts) + + if (flags.json) { + process.stdout.write(JSON.stringify({ active, accounts: names }, null, 2) + '\n') + process.exit(0) + return <> + } + + if (names.length === 0) { + return ( + + No accounts configured. + + sonar account add snr_xxxxx + sonar account add snr_yyyyy --alias work + + + ) + } + + return ( + + + Accounts + ({names.length}) + + {names.map(name => { + const isActive = name === active + const entry = accounts[name] + return ( + + + {isActive ? '* ' : ' '}{name} + + {maskToken(entry.token)} + {entry.apiUrl !== 'https://api.sonar.8640p.info/graphql' && ( + {entry.apiUrl} + )} + + ) + })} + switch: sonar account switch <name> + + ) +} diff --git a/src/commands/account/remove.tsx b/src/commands/account/remove.tsx new file mode 100644 index 0000000..c029c9a --- /dev/null +++ b/src/commands/account/remove.tsx @@ -0,0 +1,44 @@ +import React, { useEffect } from 'react' +import zod from 'zod' +import { Text } from 'ink' +import { readAccounts, writeAccounts } from '../../lib/config.js' + +export const args = zod.tuple([ + zod.string().describe('Account name to remove'), +]) + +export const options = zod.object({ + force: zod.boolean().default(false).describe('Remove even if active'), +}) + +type Props = { args: zod.infer; options: zod.infer } + +export default function AccountRemove({ args: [name], options: flags }: Props) { + useEffect(() => { + const data = readAccounts() + + if (!data.accounts[name]) { + process.stderr.write(`Account "${name}" not found.\n`) + process.exit(1) + } + + if (data.active === name && !flags.force) { + process.stderr.write(`"${name}" is the active account. Switch first, or use --force.\n`) + process.exit(1) + } + + delete data.accounts[name] + + // If we removed the active account, pick the first remaining one + if (data.active === name) { + const remaining = Object.keys(data.accounts) + data.active = remaining.length > 0 ? remaining[0] : '' + } + + writeAccounts(data) + process.stdout.write(`Account "${name}" removed\n`) + process.exit(0) + }, []) + + return Removing account... +} diff --git a/src/commands/account/rename.tsx b/src/commands/account/rename.tsx new file mode 100644 index 0000000..03369f9 --- /dev/null +++ b/src/commands/account/rename.tsx @@ -0,0 +1,43 @@ +import React, { useEffect } from 'react' +import zod from 'zod' +import { Text } from 'ink' +import { readAccounts, writeAccounts } from '../../lib/config.js' + +export const args = zod.tuple([ + zod.string().describe('Current account name'), + zod.string().describe('New account name'), +]) + +type Props = { args: zod.infer } + +export default function AccountRename({ args: [oldName, newName] }: Props) { + useEffect(() => { + const data = readAccounts() + + if (!data.accounts[oldName]) { + const names = Object.keys(data.accounts) + process.stderr.write(`Account "${oldName}" not found.`) + if (names.length > 0) process.stderr.write(` Available: ${names.join(', ')}`) + process.stderr.write('\n') + process.exit(1) + } + + if (data.accounts[newName]) { + process.stderr.write(`Account "${newName}" already exists.\n`) + process.exit(1) + } + + data.accounts[newName] = data.accounts[oldName] + delete data.accounts[oldName] + + if (data.active === oldName) { + data.active = newName + } + + writeAccounts(data) + process.stdout.write(`Renamed "${oldName}" → "${newName}"\n`) + process.exit(0) + }, []) + + return Renaming account... +} diff --git a/src/commands/account/switch.tsx b/src/commands/account/switch.tsx new file mode 100644 index 0000000..fe893f8 --- /dev/null +++ b/src/commands/account/switch.tsx @@ -0,0 +1,33 @@ +import React, { useEffect } from 'react' +import zod from 'zod' +import { Text } from 'ink' +import { readAccounts, writeAccounts } from '../../lib/config.js' + +export const args = zod.tuple([ + zod.string().describe('Account name to switch to'), +]) + +type Props = { args: zod.infer } + +export default function AccountSwitch({ args: [name] }: Props) { + useEffect(() => { + const data = readAccounts() + + if (!data.accounts[name]) { + const names = Object.keys(data.accounts) + process.stderr.write(`Account "${name}" not found.`) + if (names.length > 0) { + process.stderr.write(` Available: ${names.join(', ')}`) + } + process.stderr.write('\n') + process.exit(1) + } + + data.active = name + writeAccounts(data) + process.stdout.write(`Switched to "${name}"\n`) + process.exit(0) + }, []) + + return Switching account... +} diff --git a/src/commands/inbox/archive.tsx b/src/commands/archive.tsx similarity index 93% rename from src/commands/inbox/archive.tsx rename to src/commands/archive.tsx index 61cde0f..d0a7310 100644 --- a/src/commands/inbox/archive.tsx +++ b/src/commands/archive.tsx @@ -1,8 +1,8 @@ import React, { useEffect, useState } from 'react' import zod from 'zod' import { Text } from 'ink' -import { gql } from '../../lib/client.js' -import { Spinner } from '../../components/Spinner.js' +import { gql } from '../lib/client.js' +import { Spinner } from '../components/Spinner.js' export const options = zod.object({ id: zod.string().describe('Suggestion ID to archive'), diff --git a/src/commands/config/data/download.tsx b/src/commands/config/data/download.tsx deleted file mode 100644 index 2cc7b5f..0000000 --- a/src/commands/config/data/download.tsx +++ /dev/null @@ -1,80 +0,0 @@ -import React, { useEffect, useState } from 'react' -import { Box, Text } from 'ink' -import { unlinkSync, existsSync } from 'node:fs' -import { gql } from '../../../lib/client.js' -import { Spinner } from '../../../components/Spinner.js' -import { - DB_PATH, - openDb, - upsertTweet, - upsertFeedItem, - upsertSuggestion, - upsertInterest, - setSyncState, -} from '../../../lib/db.js' -import { FEED_QUERY, SUGGESTIONS_QUERY, INTERESTS_QUERY } from '../../../lib/data-queries.js' -import type { FeedTweet, Suggestion, Interest } from '../../../lib/data-queries.js' - -export default function DataDownload() { - const [result, setResult] = useState<{ feedCount: number; suggestionsCount: number; interestsCount: number } | null>(null) - const [error, setError] = useState(null) - - useEffect(() => { - async function run() { - try { - if (existsSync(DB_PATH)) unlinkSync(DB_PATH) - - const db = openDb() - const [feedResult, suggestionsResult, interestsResult] = await Promise.all([ - gql<{ feed: FeedTweet[] }>(FEED_QUERY, { hours: null, days: 7, limit: 500 }), - gql<{ suggestions: Suggestion[] }>(SUGGESTIONS_QUERY, { status: null, limit: 500 }), - gql<{ projects: Interest[] }>(INTERESTS_QUERY), - ]) - - for (const item of feedResult.feed) { - upsertTweet(db, item.tweet) - upsertFeedItem(db, { tweetId: item.tweet.id, score: item.score, matchedKeywords: item.matchedKeywords }) - } - for (const s of suggestionsResult.suggestions) { - upsertTweet(db, s.tweet) - upsertSuggestion(db, { suggestionId: s.suggestionId, tweetId: s.tweet.id, score: s.score, status: s.status, relevance: null, projectsMatched: s.projectsMatched }) - } - for (const i of interestsResult.projects) { - upsertInterest(db, i) - } - - setSyncState(db, 'last_synced_at', new Date().toISOString()) - db.close() - - setResult({ - feedCount: feedResult.feed.length, - suggestionsCount: suggestionsResult.suggestions.length, - interestsCount: interestsResult.projects.length, - }) - } catch (err) { - setError(err instanceof Error ? err.message : String(err)) - } - } - run() - }, []) - - if (error) return Error: {error} - if (!result) return - - return ( - - - Download complete - {DB_PATH} - - - {result.feedCount} - feed items - {result.suggestionsCount} - suggestions - {result.interestsCount} - interests - - - ) -} diff --git a/src/commands/config/env.tsx b/src/commands/config/env.tsx index 04bb90a..2ebacc7 100644 --- a/src/commands/config/env.tsx +++ b/src/commands/config/env.tsx @@ -1,16 +1,13 @@ import { useEffect } from 'react' import { Text } from 'ink' -const maskSensitive = (value: string) => { - return value.replace(/[^a-zA-Z0-9]/g, '*').slice(0, 4) + '***' + value.slice(-4) -} - export default function Env() { useEffect(() => { - process.stdout.write(`SONAR_API_KEY=${maskSensitive(process.env.SONAR_API_KEY ?? '')}\n`) - process.stdout.write(`SONAR_AI_VENDOR=${process.env.SONAR_AI_VENDOR}\n`) - process.stdout.write(`SONAR_FEED_RENDER=${process.env.SONAR_FEED_RENDER}\n`) - process.stdout.write(`SONAR_FEED_WIDTH=${process.env.SONAR_FEED_WIDTH}\n`) + process.stdout.write(`SONAR_API_URL=${process.env.SONAR_API_URL ?? ''}\n`) + process.stdout.write(`SONAR_AI_VENDOR=${process.env.SONAR_AI_VENDOR ?? ''}\n`) + process.stdout.write(`SONAR_FEED_RENDER=${process.env.SONAR_FEED_RENDER ?? ''}\n`) + process.stdout.write(`SONAR_FEED_WIDTH=${process.env.SONAR_FEED_WIDTH ?? ''}\n`) + process.stdout.write(`SONAR_MAX_RETRIES=${process.env.SONAR_MAX_RETRIES ?? ''}\n`) }, []) return Environment variables: diff --git a/src/commands/config/nuke.tsx b/src/commands/config/nuke.tsx index a5c80ff..843c27e 100644 --- a/src/commands/config/nuke.tsx +++ b/src/commands/config/nuke.tsx @@ -2,6 +2,8 @@ import { useEffect } from 'react' import { configExists, deleteConfig, deleteDatabase } from '../../lib/config.js' import { Text } from 'ink' import zod from 'zod' +import { existsSync } from 'node:fs' +import { DB_PATH } from '../../lib/db.js' export const options = zod.object({ confirm: zod.boolean().default(false).describe('Pass to confirm deletion'), @@ -11,13 +13,31 @@ type Props = { options: zod.infer } export default function Nuke({ options: flags }: Props) { useEffect(() => { - if (configExists() && flags.confirm) { + if (!flags.confirm) { + return + } + + const hadConfig = configExists() + const hadDb = existsSync(DB_PATH) + + if (hadConfig) { deleteConfig() + } + if (hadDb) { deleteDatabase() + } - process.stdout.write('Workspace deleted at ~/.sonar/config.json and ~/.sonar/database.sqlite\n') + if (!hadConfig && !hadDb) { + process.stdout.write('Nothing to delete. No local Sonar config or data database found.\n') process.exit(0) } + + const deleted: string[] = [] + if (hadConfig) deleted.push('~/.sonar/config.json') + if (hadDb) deleted.push(DB_PATH) + + process.stdout.write(`Deleted: ${deleted.join(', ')}\n`) + process.exit(0) }, []) return Tip. (pass --confirm to nuke) diff --git a/src/commands/config/setup.tsx b/src/commands/config/setup.tsx index d0f5dc9..a3972ab 100644 --- a/src/commands/config/setup.tsx +++ b/src/commands/config/setup.tsx @@ -16,11 +16,11 @@ export default function Setup({ options: flags }: Props) { process.exit(1) } - const apiKey = flags.key || process.env.SONAR_API_KEY + const apiKey = flags.key const apiUrl = process.env.SONAR_API_URL if (!apiKey) { - process.stderr.write('SONAR_API_KEY is not set. Generate a key at https://sonar.8640p.info\n') + process.stderr.write('API key required. Run: sonar config setup --key=\n') process.exit(1) } diff --git a/src/commands/config/skill.tsx b/src/commands/config/skill.tsx index 602a3c4..d6d5c4d 100644 --- a/src/commands/config/skill.tsx +++ b/src/commands/config/skill.tsx @@ -6,13 +6,14 @@ import { writeSkillTo } from '../../lib/skill.js' export const options = zod.object({ install: zod.boolean().default(false).describe('Install to ~/.claude/skills/sonar/SKILL.md'), dest: zod.string().optional().describe('Write to a custom path'), + force: zod.boolean().default(false).describe('Overwrite even if file was modified'), }) type Props = { options: zod.infer } export default function Skill({ options: flags }: Props) { useEffect(() => { - writeSkillTo(flags.dest, flags.install) + writeSkillTo(flags.dest, flags.install, flags.force) }, []) return Generating SKILL.md... diff --git a/src/commands/data/backup.tsx b/src/commands/data/backup.tsx new file mode 100644 index 0000000..7c6c88d --- /dev/null +++ b/src/commands/data/backup.tsx @@ -0,0 +1,63 @@ +import React, { useEffect, useState } from 'react' +import zod from 'zod' +import { Text } from 'ink' +import { existsSync, mkdirSync } from 'node:fs' +import { basename, dirname, join } from 'node:path' +import { DB_PATH } from '../../lib/db.js' +import { integrityCheck, copyDbWithSidecars } from '../../lib/data-utils.js' + +export const options = zod.object({ + out: zod.string().optional().describe('Backup output path (default: ~/.sonar/data-backup-.db)'), + json: zod.boolean().default(false).describe('Raw JSON output'), +}) + +type Props = { options: zod.infer } + +function ts(): string { + const d = new Date() + const p = (n: number) => String(n).padStart(2, '0') + return `${d.getUTCFullYear()}${p(d.getUTCMonth() + 1)}${p(d.getUTCDate())}${p(d.getUTCHours())}${p(d.getUTCMinutes())}${p(d.getUTCSeconds())}` +} + +export default function DataBackup({ options: flags }: Props) { + const [error, setError] = useState(null) + + useEffect(() => { + try { + if (!existsSync(DB_PATH)) throw new Error(`source database not found: ${DB_PATH}`) + + const trimmedOut = flags.out?.trim() + const out = trimmedOut && trimmedOut.length > 0 + ? trimmedOut + : join(dirname(DB_PATH), `${basename(DB_PATH, '.db')}-backup-${ts()}.db`) + + mkdirSync(dirname(out), { recursive: true }) + + copyDbWithSidecars(DB_PATH, out) + + const check = integrityCheck(out) + if (check !== 'ok') throw new Error(`backup integrity check failed: ${check}`) + + const result = { ok: true, source: DB_PATH, backup: out } + if (flags.json) { + process.stdout.write(`${JSON.stringify(result, null, 2)}\n`) + } else { + process.stdout.write(`Backup complete: ${out}\n`) + } + process.exit(0) + } catch (e) { + setError(e instanceof Error ? e.message : String(e)) + } + }, []) + + useEffect(() => { + if (!error) return + if (flags.json) { + process.stderr.write(`${error}\n`) + process.exit(1) + } + }, [error, flags.json]) + + if (error) return flags.json ? <> : Error: {error} + return flags.json ? <> : Creating backup... +} diff --git a/src/commands/config/data/path.tsx b/src/commands/data/path.tsx similarity index 83% rename from src/commands/config/data/path.tsx rename to src/commands/data/path.tsx index 35c8189..90f37ed 100644 --- a/src/commands/config/data/path.tsx +++ b/src/commands/data/path.tsx @@ -1,6 +1,6 @@ import React, { useEffect } from 'react' import { Text } from 'ink' -import { DB_PATH } from '../../../lib/db.js' +import { DB_PATH } from '../../lib/db.js' export default function DataPath() { useEffect(() => { diff --git a/src/commands/config/data/sync.tsx b/src/commands/data/pull.tsx similarity index 79% rename from src/commands/config/data/sync.tsx rename to src/commands/data/pull.tsx index 0726ed6..1287665 100644 --- a/src/commands/config/data/sync.tsx +++ b/src/commands/data/pull.tsx @@ -1,25 +1,25 @@ import React, { useEffect, useState } from 'react' import { Box, Text } from 'ink' import { unlinkSync, existsSync } from 'node:fs' -import { gql } from '../../../lib/client.js' -import { Spinner } from '../../../components/Spinner.js' +import { gql } from '../../lib/client.js' +import { Spinner } from '../../components/Spinner.js' import { DB_PATH, openDb, upsertTweet, upsertFeedItem, upsertSuggestion, - upsertInterest, + upsertTopic, getSyncState, setSyncState, -} from '../../../lib/db.js' -import { FEED_QUERY, SUGGESTIONS_QUERY, INTERESTS_QUERY } from '../../../lib/data-queries.js' -import type { FeedTweet, Suggestion, Interest } from '../../../lib/data-queries.js' +} from '../../lib/db.js' +import { FEED_QUERY, SUGGESTIONS_QUERY, INTERESTS_QUERY } from '../../lib/data-queries.js' +import type { FeedTweet, Suggestion, Interest } from '../../lib/data-queries.js' interface SyncResult { feedCount: number suggestionsCount: number - interestsCount: number + topicsCount: number isSync?: boolean deltaFeed?: number deltaSuggestions?: number @@ -42,7 +42,7 @@ export default function DataSync() { const [feedResult, suggestionsResult, interestsResult] = await Promise.all([ gql<{ feed: FeedTweet[] }>(FEED_QUERY, { hours: null, days: 7, limit: 500 }), gql<{ suggestions: Suggestion[] }>(SUGGESTIONS_QUERY, { status: null, limit: 500 }), - gql<{ projects: Interest[] }>(INTERESTS_QUERY), + gql<{ topics: Interest[] }>(INTERESTS_QUERY), ]) for (const item of feedResult.feed) { @@ -53,14 +53,14 @@ export default function DataSync() { upsertTweet(freshDb, s.tweet) upsertSuggestion(freshDb, { suggestionId: s.suggestionId, tweetId: s.tweet.id, score: s.score, status: s.status, relevance: null, projectsMatched: s.projectsMatched }) } - for (const i of interestsResult.projects) { - upsertInterest(freshDb, i) + for (const t of interestsResult.topics) { + upsertTopic(freshDb, t) } setSyncState(freshDb, 'last_synced_at', new Date().toISOString()) freshDb.close() - setResult({ feedCount: feedResult.feed.length, suggestionsCount: suggestionsResult.suggestions.length, interestsCount: interestsResult.projects.length }) + setResult({ feedCount: feedResult.feed.length, suggestionsCount: suggestionsResult.suggestions.length, topicsCount: interestsResult.topics.length }) return } @@ -74,8 +74,8 @@ export default function DataSync() { gql<{ suggestions: Suggestion[] }>(SUGGESTIONS_QUERY, { status: null, limit: 500 }), ]) - const prevFeedCount = (db.prepare('SELECT COUNT(*) as n FROM feed_items').get() as { n: number }).n - const prevSuggestionsCount = (db.prepare('SELECT COUNT(*) as n FROM suggestions').get() as { n: number }).n + const prevFeedCount = (db.get('SELECT COUNT(*) as n FROM feed_items') as { n: number }).n + const prevSuggestionsCount = (db.get('SELECT COUNT(*) as n FROM suggestions') as { n: number }).n for (const item of feedResult.feed) { upsertTweet(db, item.tweet) @@ -88,14 +88,14 @@ export default function DataSync() { setSyncState(db, 'last_synced_at', new Date().toISOString()) - const newFeedCount = (db.prepare('SELECT COUNT(*) as n FROM feed_items').get() as { n: number }).n - const newSuggestionsCount = (db.prepare('SELECT COUNT(*) as n FROM suggestions').get() as { n: number }).n + const newFeedCount = (db.get('SELECT COUNT(*) as n FROM feed_items') as { n: number }).n + const newSuggestionsCount = (db.get('SELECT COUNT(*) as n FROM suggestions') as { n: number }).n db.close() setResult({ feedCount: newFeedCount, suggestionsCount: newSuggestionsCount, - interestsCount: 0, + topicsCount: 0, isSync: true, deltaFeed: newFeedCount - prevFeedCount, deltaSuggestions: newSuggestionsCount - prevSuggestionsCount, @@ -138,8 +138,8 @@ export default function DataSync() { feed items {result.suggestionsCount} suggestions - {result.interestsCount} - interests + {result.topicsCount} + topics ) diff --git a/src/commands/data/restore.tsx b/src/commands/data/restore.tsx new file mode 100644 index 0000000..cb01e8c --- /dev/null +++ b/src/commands/data/restore.tsx @@ -0,0 +1,98 @@ +import React, { useEffect, useState } from 'react' +import zod from 'zod' +import { Text } from 'ink' +import { existsSync, mkdirSync, rmSync } from 'node:fs' +import { dirname, resolve } from 'node:path' +import { DB_PATH } from '../../lib/db.js' +import { integrityCheck, copyDbWithSidecars } from '../../lib/data-utils.js' + +export const options = zod.object({ + from: zod.string().describe('Backup database path to restore from'), + to: zod.string().optional().describe('Target database path (default: local sonar DB path)'), + json: zod.boolean().default(false).describe('Raw JSON output'), +}) + +type Props = { options: zod.infer } + +export default function DataRestore({ options: flags }: Props) { + const [error, setError] = useState(null) + + useEffect(() => { + try { + const src = resolve(flags.from) + const dst = resolve(flags.to ?? DB_PATH) + + // Guard: prevent copying a file onto itself, which would corrupt the DB. + if (src === dst) { + throw new Error( + `Source and destination resolve to the same path: ${src}\n` + + 'Specify a different --to path.' + ) + } + + if (!existsSync(src)) throw new Error(`backup not found: ${src}`) + + // Verify the backup is healthy before touching anything. + const srcCheck = integrityCheck(src) + if (srcCheck !== 'ok') throw new Error(`backup integrity check failed: ${srcCheck}`) + + mkdirSync(dirname(dst), { recursive: true }) + + // Snapshot the current DB — including WAL/SHM sidecars — so we have + // a complete, self-consistent point-in-time snapshot to roll back to if + // anything goes wrong during the restore. + const preRestore = existsSync(dst) ? `${dst}.pre-restore.${Date.now()}` : null + if (preRestore) { + copyDbWithSidecars(dst, preRestore) + } + + // Copy backup → destination (main DB + any sidecars). + copyDbWithSidecars(src, dst) + + // Verify the restored DB before declaring success. + const dstCheck = integrityCheck(dst) + if (dstCheck !== 'ok') { + // The restored file is corrupt. Roll back to the pre-restore snapshot + // so we don't leave the user with a broken local database. + if (preRestore && existsSync(preRestore)) { + copyDbWithSidecars(preRestore, dst) + for (const ext of ['-wal', '-shm']) { + rmSync(`${preRestore}${ext}`, { force: true }) + } + rmSync(preRestore, { force: true }) + throw new Error( + `Restored database failed integrity check (${dstCheck}). ` + + 'Rolled back to the previous database — your data is intact.' + ) + } + throw new Error(`restored database integrity check failed: ${dstCheck}`) + } + + // Clean up the pre-restore snapshot on success. + if (preRestore) { + for (const ext of ['-wal', '-shm']) { + rmSync(`${preRestore}${ext}`, { force: true }) + } + rmSync(preRestore, { force: true }) + } + + const result = { ok: true, from: src, to: dst } + if (flags.json) process.stdout.write(`${JSON.stringify(result, null, 2)}\n`) + else process.stdout.write(`Restore complete: ${src} -> ${dst}\n`) + process.exit(0) + } catch (e) { + setError(e instanceof Error ? e.message : String(e)) + } + }, []) + + useEffect(() => { + if (!error) return + if (flags.json) { + process.stderr.write(`${error}\n`) + process.exit(1) + } + }, [error, flags.json]) + + if (error) return flags.json ? <> : Error: {error} + return flags.json ? <> : Restoring database... +} diff --git a/src/commands/config/data/sql.tsx b/src/commands/data/sql.tsx similarity index 88% rename from src/commands/config/data/sql.tsx rename to src/commands/data/sql.tsx index a04796e..8bc39d5 100644 --- a/src/commands/config/data/sql.tsx +++ b/src/commands/data/sql.tsx @@ -1,7 +1,7 @@ import React, { useEffect } from 'react' import { Text } from 'ink' import { spawnSync } from 'node:child_process' -import { DB_PATH } from '../../../lib/db.js' +import { DB_PATH } from '../../lib/db.js' export default function DataSql() { useEffect(() => { diff --git a/src/commands/data/verify.tsx b/src/commands/data/verify.tsx new file mode 100644 index 0000000..d59243d --- /dev/null +++ b/src/commands/data/verify.tsx @@ -0,0 +1,45 @@ +import React, { useEffect, useState } from 'react' +import zod from 'zod' +import { Text } from 'ink' +import { existsSync } from 'node:fs' +import { DB_PATH } from '../../lib/db.js' +import { integrityCheck } from '../../lib/data-utils.js' + +export const options = zod.object({ + path: zod.string().optional().describe('Database path (default: local sonar DB path)'), + json: zod.boolean().default(false).describe('Raw JSON output'), +}) + +type Props = { options: zod.infer } + +export default function DataVerify({ options: flags }: Props) { + const [error, setError] = useState(null) + + useEffect(() => { + try { + const path = flags.path ?? DB_PATH + if (!existsSync(path)) throw new Error(`database not found: ${path}`) + const result = integrityCheck(path) + const ok = result === 'ok' + if (flags.json) { + process.stdout.write(`${JSON.stringify({ ok, path, integrity: result }, null, 2)}\n`) + } else { + process.stdout.write(ok ? `Integrity check passed: ${path}\n` : `Integrity check failed: ${path} (${result})\n`) + } + process.exit(ok ? 0 : 1) + } catch (e) { + setError(e instanceof Error ? e.message : String(e)) + } + }, []) + + useEffect(() => { + if (!error) return + if (flags.json) { + process.stderr.write(`${error}\n`) + process.exit(1) + } + }, [error, flags.json]) + + if (error) return flags.json ? <> : Error: {error} + return flags.json ? <> : Verifying database... +} diff --git a/src/commands/feed.tsx b/src/commands/feed.tsx index 2d19372..6f83867 100644 --- a/src/commands/feed.tsx +++ b/src/commands/feed.tsx @@ -1,337 +1,225 @@ -import React, { useEffect, useState } from 'react' +import React, { useEffect, useRef, useState } from 'react' import zod from 'zod' -import { Box, Text, useStdout } from 'ink' -import Link from 'ink-link' +import { Box, Text, useApp, useInput, useStdout } from 'ink' +import { Banner } from '../components/Banner.js' import { Spinner } from '../components/Spinner.js' -import { Table } from '../components/Table.js' -import { InteractiveFeedSession } from '../components/InteractiveSession.js' import { gql } from '../lib/client.js' -import { getFeedRender, getFeedWidth, getVendor } from '../lib/config.js' +import { getFeedRender, getFeedWidth } from '../lib/config.js' +import { TweetCard, FeedTable } from '../components/TweetCard.js' +import type { FeedTweet } from '../components/TweetCard.js' export const options = zod.object({ hours: zod.number().optional().describe('Look back N hours (default: 12)'), days: zod.number().optional().describe('Look back N days'), limit: zod.number().optional().describe('Result limit (default: 20)'), + offset: zod.number().optional().describe('Skip first N results (default: 0)'), kind: zod.string().optional().describe('Feed source: default|bookmarks|followers|following'), render: zod.string().optional().describe('Output layout: card|table'), width: zod.number().optional().describe('Card width in columns'), json: zod.boolean().default(false).describe('Raw JSON output'), - interactive: zod.boolean().default(false).describe('Interactive session mode'), - vendor: zod.string().optional().describe('AI vendor: openai|anthropic'), + follow: zod.boolean().default(false).describe('Continuously poll for new items'), + interval: zod.number().optional().describe('Poll interval in seconds (default: 30)'), }) type Props = { options: zod.infer } +const FEED_QUERY = ` + query Feed($hours: Int, $days: Int, $limit: Int, $offset: Int, $kind: String) { + feed(hours: $hours, days: $days, limit: $limit, offset: $offset, kind: $kind) { + score + matchedKeywords + tweet { + id xid text createdAt likeCount retweetCount replyCount + user { displayName username followersCount followingCount } + } + } + } +` + +const HAS_INTERESTS_QUERY = `query HasInterests { topics { id: nanoId } }` + export default function Feed({ options: flags }: Props) { - const [data, setData] = useState(null) - const [error, setError] = useState(null) + const { exit } = useApp() const { stdout } = useStdout() const termWidth = stdout.columns ?? 100 - - const render = getFeedRender(flags.render) const cardWidth = getFeedWidth(flags.width) + const render = getFeedRender(flags.render) + const pollInterval = Math.max(5, flags.interval ?? 30) * 1000 + + const [items, setItems] = useState([]) + const [noInterests, setNoInterests] = useState(false) + const [error, setError] = useState(null) + const [initialLoad, setInitialLoad] = useState(true) + const [pollCount, setPollCount] = useState(0) + const seenRef = useRef(new Set()) + + const feedVars = { + hours: flags.hours ?? null, + days: flags.days ?? null, + limit: flags.limit ?? 20, + offset: flags.offset ?? 0, + kind: flags.kind ?? 'default', + } useEffect(() => { - async function run() { + async function poll() { try { - const result = await gql<{ feed: FeedTweet[] }>(FEED_QUERY, { - hours: flags.hours ?? null, - days: flags.days ?? null, - limit: flags.limit ?? 20, - kind: flags.kind ?? 'default', - }) + if (initialLoad) { + const { topics } = await gql<{ topics: { id: string }[] }>(HAS_INTERESTS_QUERY) + if (topics.length === 0) { + setNoInterests(true) + return + } + } - if (flags.json) { - process.stdout.write(`${JSON.stringify(result.feed, null, 2)}\n`) + const res = await gql<{ feed: FeedTweet[] }>(FEED_QUERY, feedVars) + const newItems = res.feed.filter(f => !seenRef.current.has(f.tweet.xid)) + for (const f of newItems) seenRef.current.add(f.tweet.xid) + + if (flags.json && flags.follow) { + // NDJSON: one line per new item + for (const item of newItems) { + process.stdout.write(JSON.stringify(item) + '\n') + } + } else if (flags.json && initialLoad) { + // Single-shot JSON + process.stdout.write(JSON.stringify(res.feed, null, 2) + '\n') process.exit(0) + } else { + setItems(prev => [...prev, ...newItems]) } - setData(result.feed) + setInitialLoad(false) + setPollCount(c => c + 1) + setError(null) } catch (err) { - setError(err instanceof Error ? err.message : String(err)) + if (flags.follow) { + // In follow mode, log to stderr and keep polling + process.stderr.write(`poll error: ${err instanceof Error ? err.message : String(err)}\n`) + } else { + setError(err instanceof Error ? err.message : String(err)) + } + setInitialLoad(false) } } - run() - }, [flags.hours, flags.days, flags.limit, flags.json, flags.kind]) - if (error) { - return Error: {error} - } - - if (!data) { - return - } + poll() + if (!flags.follow) return + const timer = setInterval(poll, pollInterval) + return () => clearInterval(timer) + }, []) - if (data.length === 0) { - return No tweets found in this window. - } - - if (flags.interactive) { - return + // Exit after first render in non-follow mode + useEffect(() => { + if (!flags.follow && !initialLoad && !noInterests && !error) { + // Let React render one frame then exit + } + }, [initialLoad]) + + // Quit with 'q' in follow mode + useInput((input) => { + if (input === 'q') exit() + }, { isActive: flags.follow }) + + if (error) return Error: {error} + + if (noInterests) { + return ( + + + No topics yet. Add one to get started: + + sonar topics add "AI agents" + sonar topics add "Rust and systems programming" + + + ) } - const win = windowLabel(flags.hours, flags.days) + if (initialLoad) return - return ( - - - - - {flags.kind === 'bookmarks' - ? 'Bookmarks Feed' - : flags.kind === 'followers' - ? 'Followers Feed' - : flags.kind === 'following' - ? 'Following Feed' - : 'Network Feed'} - - {flags.kind !== 'bookmarks' && ( - - {' · '}last {win} - - )} - ({data.length}) + if (items.length === 0 && !flags.follow) { + return ( + + Nothing to show yet. + + 1. Refresh pipeline: sonar refresh + 2. Widen window: sonar feed --hours 48 + 3. Check status: sonar status - {'─'.repeat(Math.min(termWidth - 2, 72))} - - {render === 'table' ? ( - - ) : ( - - {data.map((item, i) => ( - - ))} - - )} - - tip adjust window → - sonar feed --hours 24 - - ) -} - -// ─── Types ──────────────────────────────────────────────────────────────────── - -export interface User { - displayName: string - username: string | null - followersCount: number | null - followingCount: number | null -} - -export interface Tweet { - id: string - xid: string - text: string - createdAt: string - likeCount: number - retweetCount: number - replyCount: number - user: User -} - -export interface FeedTweet { - score: number - matchedKeywords: string[] - tweet: Tweet -} - -// ─── Query ──────────────────────────────────────────────────────────────────── - -const FEED_QUERY = ` - query Feed($hours: Int, $days: Int, $limit: Int, $kind: String) { - feed(hours: $hours, days: $days, limit: $limit, kind: $kind) { - score - matchedKeywords - tweet { - id - xid - text - createdAt - likeCount - retweetCount - replyCount - user { - displayName - username - followersCount - followingCount - } - } - } + ) } -` - -// ─── Helpers ────────────────────────────────────────────────────────────────── - -function windowLabel(hours?: number, days?: number): string { - if (hours) return `${hours}h` - if (days) return `${days}d` - return '12h' -} - -function formatTimestamp(dateStr: string): string { - const d = new Date(dateStr) - const month = d.toLocaleString('en-US', { month: 'short' }) - const day = d.getDate() - const hours = d.getHours() - const mins = d.getMinutes().toString().padStart(2, '0') - const ampm = hours >= 12 ? 'pm' : 'am' - const h = hours % 12 || 12 - return `${month} ${day} · ${h}:${mins}${ampm}` -} - -function relativeTime(dateStr: string): string { - const diff = Date.now() - new Date(dateStr).getTime() - const mins = Math.floor(diff / 60000) - if (mins < 60) return `${mins}m` - const hours = Math.floor(mins / 60) - if (hours < 24) return `${hours}h` - return `${Math.floor(hours / 24)}d` -} - -function formatCount(n: number | null): string | null { - if (n == null) return null - if (n >= 1_000_000) return `${(n / 1_000_000).toFixed(1)}M` - if (n >= 1_000) return `${(n / 1_000).toFixed(1)}k` - return String(n) -} - -function scoreColor(score: number): string { - if (score >= 0.7) return 'green' - if (score >= 0.4) return 'yellow' - return 'white' -} - -function linkifyMentions(text: string): string { - return text.replace(/@(\w+)/g, (match, handle) => { - const url = `https://x.com/${handle}` - return `\x1b]8;;${url}\x07\x1b[94m${match}\x1b[39m\x1b]8;;\x07` - }) -} - -function TweetText({ text }: { text: string }) { - return {linkifyMentions(text)} -} - -interface TweetCardProps { - item: FeedTweet - termWidth: number - cardWidth: number - isLast: boolean -} - -export function TweetCard({ item, termWidth, cardWidth, isLast }: TweetCardProps) { - const { tweet, score } = item - const handle = tweet.user.username ?? tweet.user.displayName - const author = `@${handle}` - const bodyBoxWidth = Math.min(cardWidth + 2, termWidth) - const profileUrl = `https://x.com/${handle}` - const tweetUrl = `https://x.com/${handle}/status/${tweet.id}` - return ( - + // Follow mode with JSON handled in useEffect (writes directly to stdout) + if (flags.follow && flags.json) { + return ( - - - {formatTimestamp(tweet.createdAt)} - - - {relativeTime(tweet.createdAt)} - {score > 0 && ( - <> - · - {score.toFixed(2)} - - )} + + ) + } - - {'└'} - - - {author} - - - {formatCount(tweet.user.followersCount) && ( - <> - - {' '} - {formatCount(tweet.user.followersCount)} followers - - {formatCount(tweet.user.followingCount) && ( - - {' '} - · {formatCount(tweet.user.followingCount)} following - - )} - - )} - + const kindLabel = + flags.kind === 'bookmarks' ? 'Bookmarks' + : flags.kind === 'followers' ? 'Followers' + : flags.kind === 'following' ? 'Following' + : 'For you' - - - + const win = flags.days ? `${flags.days}d` : `${flags.hours ?? 12}h` - - ♥ {tweet.likeCount} - - ↺ {tweet.retweetCount} - {tweet.replyCount > 0 && ( - <> - - ↩ {tweet.replyCount} - + if (render === 'table') { + return ( + + + {kindLabel} + · last {win} ({items.length}) + + + {flags.follow && ( + + polling every {pollInterval / 1000}s · {items.length} items · q to quit + )} + ) + } - {item.matchedKeywords.length > 0 && ( - - keywords - {item.matchedKeywords.join(' ')} + return ( + + + + {kindLabel} + · last {win} + ({items.length}) - )} + {'─'.repeat(Math.min(termWidth - 2, 72))} + - - - {profileUrl} - - · - - {tweetUrl} - + + {items.map((item, i) => ( + + ))} - {!isLast && ( + {flags.follow ? ( + + polling every {pollInterval / 1000}s · {items.length} items · q to quit + + ) : ( - {'─'.repeat(Math.min(termWidth - 2, 72))} + tip refresh → + sonar refresh + · follow → + sonar feed --follow )} ) } - -function osc8Link(url: string, label: string): string { - return `\x1b]8;;${url}\x07${label}\x1b]8;;\x07` -} - -function FeedTable({ data }: { data: FeedTweet[] }) { - const rows = data.map((item) => { - const handle = item.tweet.user.username ?? item.tweet.user.displayName - const tweetUrl = `https://x.com/${handle}/status/${item.tweet.id}` - return { - age: osc8Link(tweetUrl, relativeTime(item.tweet.createdAt)), - score: item.score > 0 ? item.score.toFixed(2) : '—', - author: `@${handle}`, - tweet: item.tweet.text.replace(/\n/g, ' ').slice(0, 80), - } - }) - return -} diff --git a/src/commands/inbox/index.tsx b/src/commands/inbox/index.tsx deleted file mode 100644 index c8f6caa..0000000 --- a/src/commands/inbox/index.tsx +++ /dev/null @@ -1,131 +0,0 @@ -import React, { useEffect, useState } from 'react' -import zod from 'zod' -import { Box, Text } from 'ink' -import { gql } from '../../lib/client.js' -import { Spinner } from '../../components/Spinner.js' -import { Table } from '../../components/Table.js' -import { InteractiveInboxSession } from '../../components/InteractiveSession.js' -import { getVendor } from '../../lib/config.js' - -export const options = zod.object({ - status: zod.string().optional().describe('Filter by status: inbox|later|replied|archived'), - limit: zod.number().default(20).describe('Result limit'), - all: zod.boolean().default(false).describe('Show all statuses'), - json: zod.boolean().default(false).describe('Raw JSON output'), - interactive: zod.boolean().default(false).describe('Interactive session mode'), - vendor: zod.string().optional().describe('AI vendor: openai|anthropic'), -}) - -type Props = { options: zod.infer } - -export interface Suggestion { - suggestionId: string - score: number - projectsMatched: number - status: string - relevance: number | null - tweet: { - xid: string - text: string - createdAt: string - likeCount: number - retweetCount: number - user: { - displayName: string - username: string | null - } - } -} - -const LIST_QUERY = ` - query Inbox($status: SuggestionStatus, $limit: Int) { - suggestions(status: $status, limit: $limit) { - suggestionId - score - projectsMatched - status - tweet { - xid - text - createdAt - user { - displayName - username - } - } - } - } -` - -function relativeTime(dateStr: string): string { - const diff = Date.now() - new Date(dateStr).getTime() - const mins = Math.floor(diff / 60000) - if (mins < 60) return `${mins}m` - const hours = Math.floor(mins / 60) - if (hours < 24) return `${hours}h` - return `${Math.floor(hours / 24)}d` -} - -export default function Inbox({ options: flags }: Props) { - const [data, setData] = useState(null) - const [error, setError] = useState(null) - - useEffect(() => { - async function run() { - try { - const status = flags.all ? null : (flags.status?.toUpperCase() ?? 'INBOX') - const result = await gql<{ suggestions: Suggestion[] }>(LIST_QUERY, { - status, - limit: flags.limit, - }) - - if (flags.json) { - process.stdout.write(JSON.stringify(result.suggestions, null, 2) + '\n') - process.exit(0) - } - - setData(result.suggestions) - } catch (err) { - setError(err instanceof Error ? err.message : String(err)) - } - } - run() - }, []) - - if (error) { - return Error: {error} - } - - if (!data) { - return - } - - if (data.length === 0) { - return Inbox is empty. - } - - if (flags.interactive) { - return - } - - const rows = data.map((s) => ({ - id: s.suggestionId.slice(0, 8), - score: s.score.toFixed(2), - interests: s.projectsMatched, - age: relativeTime(s.tweet.createdAt), - author: `@${s.tweet.user.username ?? s.tweet.user.displayName}`, - tweet: s.tweet.text.replace(/\n/g, ' ').slice(0, 80), - })) - - const label = flags.all ? 'All' : (flags.status ? flags.status.toLowerCase() : 'Inbox') - - return ( - - - {label} - ({data.length}) - -
- - ) -} diff --git a/src/commands/inbox/read.tsx b/src/commands/inbox/read.tsx deleted file mode 100644 index 10d9984..0000000 --- a/src/commands/inbox/read.tsx +++ /dev/null @@ -1,51 +0,0 @@ -import React, { useEffect, useState } from 'react' -import zod from 'zod' -import { Text } from 'ink' -import { gql } from '../../lib/client.js' -import { Spinner } from '../../components/Spinner.js' - -export const options = zod.object({ - id: zod.string().describe('Suggestion ID to mark as read'), -}) - -type Props = { options: zod.infer } - -const UPDATE_MUTATION = ` - mutation UpdateSuggestion($suggestionId: ID!, $status: SuggestionStatus!) { - updateSuggestion(input: { suggestionId: $suggestionId, status: $status }) { - suggestionId - status - } - } -` - -export default function InboxRead({ options: flags }: Props) { - const [result, setResult] = useState<{ suggestionId: string; status: string } | null>(null) - const [error, setError] = useState(null) - - useEffect(() => { - async function run() { - try { - const res = await gql<{ updateSuggestion: { suggestionId: string; status: string } }>(UPDATE_MUTATION, { - suggestionId: flags.id, - status: 'READ', - }) - setResult(res.updateSuggestion) - } catch (err) { - setError(err instanceof Error ? err.message : String(err)) - } - } - run() - }, []) - - if (error) return Error: {error} - if (!result) return - - return ( - - {result.suggestionId.slice(0, 8)} - {' → '} - {result.status.toLowerCase()} - - ) -} diff --git a/src/commands/index.tsx b/src/commands/index.tsx index aff1225..7eaaf3d 100644 --- a/src/commands/index.tsx +++ b/src/commands/index.tsx @@ -1,25 +1,282 @@ -import { Text, Box } from 'ink' +import React, { useEffect, useState } from 'react' +import zod from 'zod' +import { Box, Text, useStdout } from 'ink' +import { Banner } from '../components/Banner.js' +import { Spinner } from '../components/Spinner.js' +import { TriageSession } from '../components/InteractiveSession.js' +import type { TriageItem } from '../components/InteractiveSession.js' +import { gql } from '../lib/client.js' +import { getFeedRender, getFeedWidth, getVendor } from '../lib/config.js' +import { TweetCard } from '../components/TweetCard.js' +import type { FeedTweet } from '../components/TweetCard.js' + +export const args = zod.tuple([]).rest(zod.string()) + +export const options = zod.object({ + hours: zod.number().optional().describe('Look back N hours (default: 12)'), + days: zod.number().optional().describe('Look back N days'), + limit: zod.number().optional().describe('Result limit (default: 20)'), + kind: zod.string().optional().describe('Feed source: default|bookmarks|followers|following'), + render: zod.string().optional().describe('Output layout: card|table'), + width: zod.number().optional().describe('Card width in columns'), + json: zod.boolean().default(false).describe('Raw JSON output'), + interactive: zod.boolean().default(true).describe('Interactive session mode (default: on, use --no-interactive to disable)'), + vendor: zod.string().optional().describe('AI vendor: openai|anthropic'), +}) + +type Props = { options: zod.infer; args: string[] } + +interface SuggestionItem { + suggestionId: string + score: number + tweet: { + id: string + xid: string + text: string + createdAt: string + likeCount: number + retweetCount: number + replyCount: number + user: { displayName: string; username: string | null; followersCount: number | null; followingCount: number | null } + } +} + +interface UnifiedItem extends TriageItem { + source: 'suggestion' | 'feed' +} + +const FEED_QUERY = ` + query Feed($hours: Int, $days: Int, $limit: Int, $kind: String) { + feed(hours: $hours, days: $days, limit: $limit, kind: $kind) { + score + matchedKeywords + tweet { + id xid text createdAt likeCount retweetCount replyCount + user { displayName username followersCount followingCount } + } + } + } +` + +const INBOX_QUERY = ` + query Inbox($status: SuggestionStatus, $limit: Int, $offset: Int) { + suggestions(status: $status, limit: $limit, offset: $offset) { + suggestionId score + tweet { + id xid text createdAt likeCount retweetCount replyCount + user { displayName username followersCount followingCount } + } + } + suggestionCounts { inbox } + } +` + +const HAS_INTERESTS_QUERY = `query HasInterests { topics { id: nanoId } }` + +export default function Sonar({ options: flags, args: positionalArgs }: Props) { + const [items, setItems] = useState(null) + const [total, setTotal] = useState(0) + const [noInterests, setNoInterests] = useState(false) + const [error, setError] = useState(null) + + // Unknown subcommand — show help hint + if (positionalArgs && positionalArgs.length > 0) { + return ( + + Unknown command: {positionalArgs.join(' ')} + Run sonar --help to see available commands. + + ) + } + const { stdout } = useStdout() + const termWidth = stdout.columns ?? 100 + const cardWidth = getFeedWidth(flags.width) + const render = getFeedRender(flags.render) + + useEffect(() => { + async function run() { + try { + const limit = flags.limit ?? 20 + + const { topics } = await gql<{ topics: { id: string }[] }>(HAS_INTERESTS_QUERY) + if (topics.length === 0) { + setNoInterests(true) + return + } + + const [feedRes, inboxRes] = await Promise.all([ + gql<{ feed: FeedTweet[] }>(FEED_QUERY, { + hours: flags.hours ?? null, + days: flags.days ?? null, + limit, + kind: flags.kind ?? 'default', + }), + gql<{ suggestions: SuggestionItem[]; suggestionCounts: { inbox: number } }>(INBOX_QUERY, { status: 'INBOX', limit, offset: 0 }), + ]) + + const inboxTotal = inboxRes.suggestionCounts.inbox + + // Merge: deduplicate by xid, suggestions take priority, sort by score + const seen = new Set() + const merged: UnifiedItem[] = [] + + for (const s of inboxRes.suggestions) { + if (!seen.has(s.tweet.xid)) { + seen.add(s.tweet.xid) + merged.push({ + key: s.tweet.xid, + score: s.score, + source: 'suggestion', + suggestionId: s.suggestionId, + matchedKeywords: [], + tweet: { + id: s.tweet.id, + xid: s.tweet.xid, + text: s.tweet.text, + createdAt: s.tweet.createdAt, + likeCount: s.tweet.likeCount, + retweetCount: s.tweet.retweetCount, + replyCount: s.tweet.replyCount, + user: s.tweet.user, + }, + }) + } + } + + for (const f of feedRes.feed) { + if (!seen.has(f.tweet.xid)) { + seen.add(f.tweet.xid) + merged.push({ + key: f.tweet.xid, + score: f.score, + source: 'feed', + matchedKeywords: f.matchedKeywords, + tweet: f.tweet, + }) + } + } + + merged.sort((a, b) => b.score - a.score) + + if (flags.json) { + process.stdout.write(JSON.stringify(merged, null, 2) + '\n') + process.exit(0) + } + + setItems(merged) + setTotal(inboxTotal) + } catch (err) { + setError(err instanceof Error ? err.message : String(err)) + } + } + run() + }, [flags.hours, flags.days, flags.limit, flags.kind, flags.json]) + + if (error) return Error: {error} + + if (noInterests) { + return ( + + + No topics yet. Add one to get started: + + sonar topics add "AI agents" + sonar topics add "Rust and systems programming" + + + ) + } + + if (!items) return + + if (items.length === 0) { + return ( + + Nothing to show yet. + + 1. Refresh pipeline: sonar refresh + 2. Widen window: sonar --hours 48 + 3. Check status: sonar status + + + ) + } + + if (flags.interactive) { + const pageSize = flags.limit ?? 20 + const fetchMore = async (offset: number): Promise => { + const res = await gql<{ suggestions: SuggestionItem[] }>(INBOX_QUERY, { + status: 'INBOX', limit: pageSize, offset, + }) + return res.suggestions.map(s => ({ + key: s.tweet.xid, + score: s.score, + source: 'suggestion' as const, + suggestionId: s.suggestionId, + matchedKeywords: [], + tweet: { + id: s.tweet.id, + xid: s.tweet.xid, + text: s.tweet.text, + createdAt: s.tweet.createdAt, + likeCount: s.tweet.likeCount, + retweetCount: s.tweet.retweetCount, + replyCount: s.tweet.replyCount, + user: s.tweet.user, + }, + })) + } + return + } + + const kindLabel = + flags.kind === 'bookmarks' ? 'Bookmarks' + : flags.kind === 'followers' ? 'Followers' + : flags.kind === 'following' ? 'Following' + : 'For you' + + const win = flags.days ? `${flags.days}d` : `${flags.hours ?? 12}h` -export default function Index() { return ( - - Sonar CLI + + + + {kindLabel} + · last {win} + ({items.length}) + + {'─'.repeat(Math.min(termWidth - 2, 72))} + + - Commands: - feed Scored tweet feed from your network - inbox Suggestions matching your interests - interests Manage interests - └── create Create a new interest - └── update Update an interest - └── match Match interests to ingested tweets - ingest Ingest tweets and bookmarks - └── tweets Ingest recent tweets from social graph - └── bookmarks Ingest X bookmarks - monitor Job queue monitor and account status - config Show or set CLI config - account Account info and plan usage + {items.map((item, i) => ( + + + {item.suggestionId && ( + + + {item.suggestionId.slice(0, 8)}{' · '} + sonar archive --id {item.suggestionId.slice(0, 8)}{' · '} + sonar later --id {item.suggestionId.slice(0, 8)}{' · '} + sonar skip --id {item.suggestionId.slice(0, 8)} + + + )} + + ))} + + + + tip refresh → + sonar refresh + · widen window → + sonar --hours 48 - Run sonar <command> --help for command-specific options. ) } diff --git a/src/commands/ingest/bookmarks.tsx b/src/commands/ingest/bookmarks.tsx deleted file mode 100644 index 573a335..0000000 --- a/src/commands/ingest/bookmarks.tsx +++ /dev/null @@ -1,39 +0,0 @@ -import React, { useEffect, useState } from 'react' -import { Box, Text } from 'ink' -import { gql } from '../../lib/client.js' -import { Spinner } from '../../components/Spinner.js' -import { RefreshTip } from '../../components/RefreshTip.js' - -export default function IndexBookmarks() { - const [queued, setQueued] = useState(null) - const [error, setError] = useState(null) - - useEffect(() => { - async function run() { - try { - const res = await gql<{ indexBookmarks: boolean }>(` - mutation IndexBookmarks { - indexBookmarks - } - `) - setQueued(res.indexBookmarks) - } catch (err) { - setError(err instanceof Error ? err.message : String(err)) - } - } - run() - }, []) - - if (error) return Error: {error} - if (queued === null) return - - return ( - - - index_bookmarks: - {queued ? '✓ queued' : '✗ failed'} - - - - ) -} diff --git a/src/commands/ingest/index.tsx b/src/commands/ingest/index.tsx deleted file mode 100644 index c955918..0000000 --- a/src/commands/ingest/index.tsx +++ /dev/null @@ -1,18 +0,0 @@ -import { Box, Text } from 'ink' - -export default function Ingest() { - return ( - - sonar ingest - - Subcommands: - tweets Ingest recent tweets from your network - bookmarks Ingest X bookmarks - - - Examples: - sonar ingest tweets - - - ) -} diff --git a/src/commands/ingest/tweets.tsx b/src/commands/ingest/tweets.tsx deleted file mode 100644 index 98e66f4..0000000 --- a/src/commands/ingest/tweets.tsx +++ /dev/null @@ -1,39 +0,0 @@ -import React, { useEffect, useState } from 'react' -import { Box, Text } from 'ink' -import { gql } from '../../lib/client.js' -import { Spinner } from '../../components/Spinner.js' -import { RefreshTip } from '../../components/RefreshTip.js' - -export default function IndexTweets() { - const [queued, setQueued] = useState(null) - const [error, setError] = useState(null) - - useEffect(() => { - async function run() { - try { - const res = await gql<{ indexTweets: boolean }>(` - mutation IndexTweets { - indexTweets - } - `) - setQueued(res.indexTweets) - } catch (err) { - setError(err instanceof Error ? err.message : String(err)) - } - } - run() - }, []) - - if (error) return Error: {error} - if (queued === null) return - - return ( - - - index_tweets: - {queued ? '✓ queued' : '✗ failed'} - - - - ) -} diff --git a/src/commands/interests/create.tsx b/src/commands/interests/create.tsx deleted file mode 100644 index 6055c15..0000000 --- a/src/commands/interests/create.tsx +++ /dev/null @@ -1,126 +0,0 @@ -import React, { useEffect, useState } from 'react' -import zod from 'zod' -import { Box, Text } from 'ink' -import { gql } from '../../lib/client.js' -import { generateInterest } from '../../lib/ai.js' -import { getVendor } from '../../lib/config.js' -import { Spinner } from '../../components/Spinner.js' -import type { Interest } from './index.js' - -export const options = zod.object({ - name: zod.string().optional().describe('Interest name'), - description: zod.string().optional().describe('Interest description'), - keywords: zod.string().optional().describe('Comma-separated keywords'), - topics: zod.string().optional().describe('Comma-separated related topics'), - fromPrompt: zod.string().optional().describe('Generate fields from a natural language prompt'), - vendor: zod.string().optional().describe('AI vendor: openai|anthropic'), - json: zod.boolean().default(false).describe('Raw JSON output'), -}) - -type Props = { options: zod.infer } - -const CREATE_MUTATION = ` - mutation CreateOrUpdateInterest( - $nanoId: String - $name: String! - $description: String - $keywords: [String!] - $relatedTopics: [String!] - ) { - createOrUpdateProject(input: { - nanoId: $nanoId - name: $name - description: $description - keywords: $keywords - relatedTopics: $relatedTopics - }) { - id: nanoId - name - description - keywords - relatedTopics - version - createdAt - updatedAt - } - } -` - -export default function InterestsCreate({ options: flags }: Props) { - const [data, setData] = useState(null) - const [error, setError] = useState(null) - - useEffect(() => { - async function run() { - try { - let name = flags.name - let description = flags.description ?? null - let keywords = flags.keywords ? flags.keywords.split(',').map((k) => k.trim()) : null - let relatedTopics = flags.topics ? flags.topics.split(',').map((t) => t.trim()) : null - - if (flags.fromPrompt) { - const vendor = getVendor(flags.vendor) - const generated = await generateInterest(flags.fromPrompt, vendor) - name = generated.name - description = generated.description - keywords = generated.keywords - relatedTopics = generated.relatedTopics - } - - if (!name) { - setError('--name or --from-prompt is required') - return - } - - const result = await gql<{ createOrUpdateProject: Interest }>(CREATE_MUTATION, { - nanoId: null, - name, - description, - keywords, - relatedTopics, - }) - - if (flags.json) { - process.stdout.write(JSON.stringify(result.createOrUpdateProject, null, 2) + '\n') - process.exit(0) - } - - setData(result.createOrUpdateProject) - } catch (err) { - setError(err instanceof Error ? err.message : String(err)) - } - } - run() - }, []) - - if (error) return Error: {error} - - if (!data) { - const label = flags.fromPrompt - ? `Generating interest via ${getVendor(flags.vendor)}...` - : 'Creating interest...' - return - } - - return ( - - - {data.name} - v{data.version} · {data.id} · created - - {data.description && {data.description}} - {data.keywords && data.keywords.length > 0 && ( - - keywords: - {data.keywords.join(', ')} - - )} - {data.relatedTopics && data.relatedTopics.length > 0 && ( - - topics: - {data.relatedTopics.join(', ')} - - )} - - ) -} diff --git a/src/commands/interests/match.tsx b/src/commands/interests/match.tsx deleted file mode 100644 index 2f601f2..0000000 --- a/src/commands/interests/match.tsx +++ /dev/null @@ -1,47 +0,0 @@ -import React, { useEffect, useState } from 'react' -import zod from 'zod' -import { Box, Text } from 'ink' -import { gql } from '../../lib/client.js' -import { Spinner } from '../../components/Spinner.js' -import { RefreshTip } from '../../components/RefreshTip.js' - -export const options = zod.object({ - days: zod.number().optional().describe('Tweet window in days (default: 1, capped by plan)'), -}) - -type Props = { options: zod.infer } - -export default function InterestsMatch({ options: flags }: Props) { - const [queued, setQueued] = useState(null) - const [error, setError] = useState(null) - - useEffect(() => { - async function run() { - try { - const res = await gql<{ regenerateSuggestions: boolean }>( - `mutation RegenerateSuggestions($days: Int) { - regenerateSuggestions(days: $days) - }`, - { days: flags.days ?? 1 }, - ) - setQueued(res.regenerateSuggestions) - } catch (err) { - setError(err instanceof Error ? err.message : String(err)) - } - } - run() - }, []) - - if (error) return Error: {error} - if (queued === null) return - - return ( - - - interests match: - {queued ? '✓ queued' : '✗ failed'} - - - - ) -} diff --git a/src/commands/interests/update.tsx b/src/commands/interests/update.tsx deleted file mode 100644 index 232d5d3..0000000 --- a/src/commands/interests/update.tsx +++ /dev/null @@ -1,174 +0,0 @@ -import React, { useEffect, useState } from 'react' -import zod from 'zod' -import { Box, Text } from 'ink' -import { gql } from '../../lib/client.js' -import { generateInterest } from '../../lib/ai.js' -import { getVendor } from '../../lib/config.js' -import { Spinner } from '../../components/Spinner.js' -import type { Interest } from './index.js' - -export const options = zod.object({ - id: zod.string().describe('Interest ID to update'), - name: zod.string().optional().describe('New name'), - description: zod.string().optional().describe('New description'), - keywords: zod.string().optional().describe('Comma-separated keywords (full replace)'), - topics: zod.string().optional().describe('Comma-separated related topics (full replace)'), - addKeywords: zod.string().optional().describe('Comma-separated keywords to add'), - removeKeywords: zod.string().optional().describe('Comma-separated keywords to remove'), - addTopics: zod.string().optional().describe('Comma-separated topics to add'), - removeTopics: zod.string().optional().describe('Comma-separated topics to remove'), - fromPrompt: zod.string().optional().describe('Regenerate all fields from a prompt'), - vendor: zod.string().optional().describe('AI vendor: openai|anthropic'), - json: zod.boolean().default(false).describe('Raw JSON output'), -}) - -type Props = { options: zod.infer } - -const QUERY = ` - query Interests { - projects { - id: nanoId - name - description - keywords - relatedTopics - version - createdAt - updatedAt - } - } -` - -const UPDATE_MUTATION = ` - mutation CreateOrUpdateInterest( - $nanoId: String - $name: String! - $description: String - $keywords: [String!] - $relatedTopics: [String!] - ) { - createOrUpdateProject(input: { - nanoId: $nanoId - name: $name - description: $description - keywords: $keywords - relatedTopics: $relatedTopics - }) { - id: nanoId - name - description - keywords - relatedTopics - version - createdAt - updatedAt - } - } -` - -async function fetchById(id: string): Promise { - const result = await gql<{ projects: Interest[] }>(QUERY) - const found = result.projects.find((p) => p.id === id) - if (!found) throw new Error(`Interest with id "${id}" not found`) - return found -} - -export default function InterestsUpdate({ options: flags }: Props) { - const [data, setData] = useState(null) - const [error, setError] = useState(null) - - useEffect(() => { - async function run() { - try { - const isPatch = !!(flags.addKeywords || flags.removeKeywords || flags.addTopics || flags.removeTopics) - - let name = flags.name - let description = flags.description ?? null - let keywords = flags.keywords ? flags.keywords.split(',').map((k) => k.trim()) : null - let relatedTopics = flags.topics ? flags.topics.split(',').map((t) => t.trim()) : null - - if (isPatch) { - const existing = await fetchById(flags.id) - name = flags.name ?? existing.name - description = flags.description ?? existing.description ?? null - - const addKw = flags.addKeywords ? flags.addKeywords.split(',').map((k) => k.trim()).filter(Boolean) : [] - const removeKw = flags.removeKeywords ? new Set(flags.removeKeywords.split(',').map((k) => k.trim())) : new Set() - const existingKw = existing.keywords ?? [] - keywords = [...new Set([...existingKw.filter((k: string) => !removeKw.has(k)), ...addKw])] - - const addT = flags.addTopics ? flags.addTopics.split(',').map((t) => t.trim()).filter(Boolean) : [] - const removeT = flags.removeTopics ? new Set(flags.removeTopics.split(',').map((t) => t.trim())) : new Set() - const existingT = existing.relatedTopics ?? [] - relatedTopics = [...new Set([...existingT.filter((t: string) => !removeT.has(t)), ...addT])] - } else if (flags.fromPrompt) { - const vendor = getVendor(flags.vendor) - const generated = await generateInterest(flags.fromPrompt, vendor) - name = generated.name - description = generated.description - keywords = generated.keywords - relatedTopics = generated.relatedTopics - } - - if (!name) { - const existing = await fetchById(flags.id) - name = existing.name - if (!description) description = existing.description ?? null - if (!keywords) keywords = existing.keywords ?? null - if (!relatedTopics) relatedTopics = existing.relatedTopics ?? null - } - - const result = await gql<{ createOrUpdateProject: Interest }>(UPDATE_MUTATION, { - nanoId: flags.id, - name, - description, - keywords, - relatedTopics, - }) - - if (flags.json) { - process.stdout.write(JSON.stringify(result.createOrUpdateProject, null, 2) + '\n') - process.exit(0) - } - - setData(result.createOrUpdateProject) - } catch (err) { - setError(err instanceof Error ? err.message : String(err)) - } - } - run() - }, []) - - if (error) return Error: {error} - - if (!data) { - const label = flags.fromPrompt - ? `Generating interest via ${getVendor(flags.vendor)}...` - : (flags.addKeywords || flags.removeKeywords || flags.addTopics || flags.removeTopics) - ? 'Updating interest...' - : 'Updating interest...' - return - } - - return ( - - - {data.name} - v{data.version} · {data.id} · updated - - {data.description && {data.description}} - {data.keywords && data.keywords.length > 0 && ( - - keywords: - {data.keywords.join(', ')} - - )} - {data.relatedTopics && data.relatedTopics.length > 0 && ( - - topics: - {data.relatedTopics.join(', ')} - - )} - - ) -} diff --git a/src/commands/inbox/later.tsx b/src/commands/later.tsx similarity index 93% rename from src/commands/inbox/later.tsx rename to src/commands/later.tsx index ee70981..2e214fb 100644 --- a/src/commands/inbox/later.tsx +++ b/src/commands/later.tsx @@ -1,8 +1,8 @@ import React, { useEffect, useState } from 'react' import zod from 'zod' import { Text } from 'ink' -import { gql } from '../../lib/client.js' -import { Spinner } from '../../components/Spinner.js' +import { gql } from '../lib/client.js' +import { Spinner } from '../components/Spinner.js' export const options = zod.object({ id: zod.string().describe('Suggestion ID to save for later'), diff --git a/src/commands/monitor.tsx b/src/commands/monitor.tsx deleted file mode 100644 index bba3038..0000000 --- a/src/commands/monitor.tsx +++ /dev/null @@ -1,130 +0,0 @@ -import React, { useEffect, useState } from 'react' -import zod from 'zod' -import { Box, Text, useApp } from 'ink' -import { getToken, getApiUrl } from '../lib/config.js' -import { gql } from '../lib/client.js' -import { Spinner } from '../components/Spinner.js' -import { AccountCard } from '../components/AccountCard.js' -import type { Account } from '../components/AccountCard.js' - -export const options = zod.object({ - watch: zod.boolean().default(false).describe('Poll and refresh every 2 seconds'), -}) - -type Props = { options: zod.infer } - -interface QueueCounts { - queued: number - running: number -} - -interface MonitorData { - me: Account - queues: Record -} - -const POLL_INTERVAL = 2000 - -const QUEUE_LABELS: Record = { - tweets: 'Tweets', - bookmarks: 'Bookmarks', - social_graph: 'Social graph', - suggestions: 'Suggestions', -} - -export default function Monitor({ options: flags }: Props) { - const { exit } = useApp() - const [data, setData] = useState(null) - const [error, setError] = useState(null) - - useEffect(() => { - const token = getToken() - const baseUrl = getApiUrl().replace(/\/graphql$/, '') - - async function fetchStatus() { - try { - const [statusRes, meRes] = await Promise.all([ - fetch(`${baseUrl}/indexing/status`, { - headers: { Authorization: `Bearer ${token}` }, - }), - gql<{ me: Account }>(` - query MonitorStatus { - me { - accountId - email - xHandle - xid - isPayingCustomer - indexingAccounts - indexedTweets - pendingEmbeddings - twitterIndexedAt - refreshedSuggestionsAt - } - } - `), - ]) - if (!statusRes.ok) throw new Error(`HTTP ${statusRes.status} from ${baseUrl}`) - const status = await statusRes.json() - setData({ me: meRes.me, queues: status.queues }) - setError(null) - } catch (err) { - setError(err instanceof Error ? err.message : String(err)) - } - } - - fetchStatus() - if (!flags.watch) return - const timer = setInterval(fetchStatus, POLL_INTERVAL) - return () => clearInterval(timer) - }, []) - - useEffect(() => { - if (!flags.watch && data !== null) exit() - }, [data]) - - useEffect(() => { - if (!flags.watch && error !== null) exit(new Error(error)) - }, [error]) - - if (error) return Error: {error} - if (!data) return - - const { me, queues } = data - const entries = Object.entries(queues) - const hasActivity = entries.length > 0 || me.pendingEmbeddings > 0 - - return ( - - - - Job Queues - {!hasActivity ? ( - <> - No active ingest jobs. - Run sonar interests match to start surface relevant tweets. - - ) : ( - - - {('Queue').padEnd(16)} - {'Running'.padEnd(10)} - Queued - - {entries.map(([name, counts]) => ( - - {(QUEUE_LABELS[name] ?? name).padEnd(16)} - 0 ? 'green' : 'white'}> - {String(counts.running).padEnd(10)} - - 0 ? 'yellow' : 'white'}> - {counts.queued} - - - ))} - - )} - - - ) -} diff --git a/src/commands/refresh.tsx b/src/commands/refresh.tsx new file mode 100644 index 0000000..d8c8a15 --- /dev/null +++ b/src/commands/refresh.tsx @@ -0,0 +1,133 @@ +import React, { useEffect, useState } from 'react' +import zod from 'zod' +import { Box, Text, useApp } from 'ink' +import { gql } from '../lib/client.js' +import { getToken, getApiUrl } from '../lib/config.js' +import { Spinner } from '../components/Spinner.js' + +export const options = zod.object({ + bookmarks: zod.boolean().default(false).describe('Sync bookmarks from X'), + likes: zod.boolean().default(false).describe('Sync likes from X'), + graph: zod.boolean().default(false).describe('Rebuild social graph'), + tweets: zod.boolean().default(false).describe('Index tweets across network'), + suggestions: zod.boolean().default(false).describe('Regenerate suggestions'), +}) + +type Props = { options: zod.infer } + +type Status = 'pending' | 'running' | 'ok' | 'failed' | 'auth-failed' + +function sleep(ms: number): Promise { + return new Promise(resolve => setTimeout(resolve, ms)) +} + +const REFRESH_MUTATION = ` + mutation Refresh($days: Int!, $steps: [String!]) { + refresh(days: $days, steps: $steps) + } +` + +export default function Refresh({ options: flags }: Props) { + const { exit } = useApp() + const [status, setStatus] = useState('pending') + const [error, setError] = useState(null) + const [batchId, setBatchId] = useState(null) + + // Build steps array from flags — null means run all + const selectedSteps: string[] = [] + if (flags.bookmarks) selectedSteps.push('bookmarks') + if (flags.likes) selectedSteps.push('likes') + if (flags.graph) selectedSteps.push('graph') + if (flags.tweets) selectedSteps.push('tweets') + if (flags.suggestions) selectedSteps.push('suggestions') + const steps = selectedSteps.length > 0 ? selectedSteps : null + + useEffect(() => { + async function run() { + setStatus('running') + try { + const result = await gql<{ refresh: string }>(REFRESH_MUTATION, { + days: 1, + steps, + }) + setBatchId(result.refresh) + + // Brief poll to catch instant pipeline failures (e.g. expired X auth) + await sleep(3000) + try { + const token = getToken() + const baseUrl = getApiUrl().replace(/\/graphql$/, '') + const res = await fetch(`${baseUrl}/indexing/status`, { + headers: { Authorization: `Bearer ${token}` }, + }) + if (res.ok) { + const data = await res.json() + if (data.pipeline?.status === 'failed') { + const pipelineError = data.pipeline?.error ?? '' + setError(pipelineError) + setStatus('auth-failed') + return + } + } + } catch { + // Poll failed — not critical, proceed normally + } + + setStatus('ok') + } catch (err) { + setStatus('failed') + setError(err instanceof Error ? err.message : String(err)) + } + } + run() + }, []) + + useEffect(() => { + if (status === 'ok' || status === 'failed' || status === 'auth-failed') exit() + }, [status]) + + const label = steps ? steps.join(', ') : 'full pipeline' + + if (status === 'running') { + return + } + + if (status === 'auth-failed') { + return ( + + Pipeline failed{error ? `: ${error}` : ''} + + Re-connect your X account at https://sonar.8640p.info/account + + + Then run sonar refresh to retry. + + + ) + } + + if (status === 'failed') { + const isAuthError = error?.includes('Re-authorize') || error?.includes('not connected') + if (isAuthError) { + return ( + + X authorization required + + Connect your X account at https://sonar.8640p.info/account + + + ) + } + return Error: {error} + } + + return ( + + ✓ Refresh queued ({label}) + {batchId && batch: {batchId}} + + Run sonar status --watch to monitor progress. + + + ) +} diff --git a/src/commands/inbox/skip.tsx b/src/commands/skip.tsx similarity index 93% rename from src/commands/inbox/skip.tsx rename to src/commands/skip.tsx index 033774f..df95af4 100644 --- a/src/commands/inbox/skip.tsx +++ b/src/commands/skip.tsx @@ -1,8 +1,8 @@ import React, { useEffect, useState } from 'react' import zod from 'zod' import { Text } from 'ink' -import { gql } from '../../lib/client.js' -import { Spinner } from '../../components/Spinner.js' +import { gql } from '../lib/client.js' +import { Spinner } from '../components/Spinner.js' export const options = zod.object({ id: zod.string().describe('Suggestion ID to skip'), diff --git a/src/commands/status.tsx b/src/commands/status.tsx new file mode 100644 index 0000000..57d53b9 --- /dev/null +++ b/src/commands/status.tsx @@ -0,0 +1,321 @@ +import React, { useEffect, useState } from 'react' +import zod from 'zod' +import { Box, Text, useApp, useInput } from 'ink' +import { formatDistanceToNow } from 'date-fns' +import { getToken, getApiUrl } from '../lib/config.js' +import { gql } from '../lib/client.js' +import { Spinner } from '../components/Spinner.js' +import type { Account } from '../components/AccountCard.js' + +export const options = zod.object({ + watch: zod.boolean().default(false).describe('Poll and refresh every 2 seconds'), + json: zod.boolean().default(false).describe('Raw JSON output'), +}) + +type Props = { options: zod.infer } + +interface QueueCounts { queued: number; running: number; deferred?: number } +interface DimensionUsage { used: number; limit: number | null; atLimit: boolean } +interface SuggestionRefreshUsage { used: number; limit: number | null; atLimit: boolean; resetsAt: string | null } +interface Usage { + plan: string + interests: DimensionUsage + bookmarksEnabled: boolean + suggestionRefreshes: SuggestionRefreshUsage +} + +interface SuggestionCounts { + inbox: number; later: number; archived: number; total: number +} + +interface PipelineStep { label: string; duration: number } +interface PipelineProgress { + batch_id: string + username: string + status: 'running' | 'complete' | 'failed' + current: string + steps: PipelineStep[] + total_duration: number + error?: string +} + +interface StatusData { + me: Account + queues: Record + pipeline: PipelineProgress | null + usage: Usage | null + suggestionCounts: SuggestionCounts +} + +const POLL_INTERVAL = 2000 + +const QUEUE_LABELS: Record = { + tweets: 'Tweets', + bookmarks: 'Bookmarks', + social_graph: 'Social graph', + suggestions: 'Suggestions', + default: 'Pipeline', + topics: 'Topics', +} + +const GQL_QUERY = ` + query Status { + me { + accountId email xHandle xid isPayingCustomer + indexingAccounts indexedTweets pendingEmbeddings + twitterIndexedAt refreshedSuggestionsAt + } + suggestionCounts { + inbox later archived total + } + usage { + plan + interests { used limit atLimit } + bookmarksEnabled + suggestionRefreshes { used limit atLimit resetsAt } + } + } +` + +function timeAgo(iso: string | null): string { + if (!iso) return 'never' + return formatDistanceToNow(new Date(iso), { addSuffix: true }) +} + +export default function Status({ options: flags }: Props) { + const { exit } = useApp() + const [data, setData] = useState(null) + const [error, setError] = useState(null) + + useEffect(() => { + const token = getToken() + const baseUrl = getApiUrl().replace(/\/graphql$/, '') + + async function fetchStatus() { + const controller = new AbortController() + const timer = setTimeout(() => controller.abort(), 10_000) + try { + const [statusRes, gqlRes] = await Promise.all([ + fetch(`${baseUrl}/indexing/status`, { + signal: controller.signal, + headers: { Authorization: `Bearer ${token}` }, + }), + gql<{ me: Account; usage: Usage | null; suggestionCounts: SuggestionCounts }>(GQL_QUERY), + ]) + clearTimeout(timer) + if (!statusRes.ok) throw new Error(`HTTP ${statusRes.status}`) + const status = await statusRes.json() + + if (flags.json) { + process.stdout.write(JSON.stringify({ ...gqlRes, queues: status.queues }, null, 2) + '\n') + process.exit(0) + } + + setData({ me: gqlRes.me, queues: status.queues, pipeline: status.pipeline ?? null, usage: gqlRes.usage, suggestionCounts: gqlRes.suggestionCounts }) + setError(null) + } catch (err) { + clearTimeout(timer) + if (err instanceof DOMException && err.name === 'AbortError') { + setError('Request timed out (10s)') + } else { + setError(err instanceof Error ? err.message : String(err)) + } + } + } + + fetchStatus() + if (!flags.watch) return + const timer = setInterval(fetchStatus, POLL_INTERVAL) + return () => clearInterval(timer) + }, []) + + useEffect(() => { if (!flags.watch && data !== null) exit() }, [data]) + useEffect(() => { if (!flags.watch && error !== null) exit(new Error(error)) }, [error]) + + const [refreshing, setRefreshing] = useState(false) + const [refreshMsg, setRefreshMsg] = useState(null) + + useInput((input, key) => { + if (!flags.watch) return + if (input === 'r' && !refreshing) { + setRefreshing(true) + setRefreshMsg(null) + gql<{ refresh: boolean }>('mutation Refresh { refresh(days: 1) }') + .then(() => { + setRefreshMsg('pipeline queued') + setRefreshing(false) + }) + .catch((err) => { + setRefreshMsg(err instanceof Error ? err.message : String(err)) + setRefreshing(false) + }) + } + if (input === 'q') exit() + }, { isActive: flags.watch }) + + if (error) return Error: {error} + if (!data) return + + const { me, queues, pipeline, usage, suggestionCounts } = data + const entries = Object.entries(queues) + const hasActivity = entries.length > 0 || me.pendingEmbeddings > 0 || (pipeline !== null && pipeline.status === 'running') + + const embedded = me.indexedTweets - me.pendingEmbeddings + const embedPct = me.indexedTweets > 0 ? Math.round((embedded / me.indexedTweets) * 100) : 100 + + const BAR_WIDTH = 20 + const filledCount = Math.round((embedPct / 100) * BAR_WIDTH) + const progressBar = '█'.repeat(filledCount) + '░'.repeat(BAR_WIDTH - filledCount) + + return ( + + {/* Header */} + + @{me.xHandle} + + {me.indexedTweets.toLocaleString()} tweets + {' · '}indexed {timeAgo(me.twitterIndexedAt)} + {' · '}refreshed {timeAgo(me.refreshedSuggestionsAt)} + + + + {/* Embeddings progress bar */} + {me.pendingEmbeddings > 0 && ( + + + embeddings + {progressBar} + {embedPct}% + ({embedded.toLocaleString()}/{me.indexedTweets.toLocaleString()}) + + + )} + + {/* Usage & Inbox combined */} + + {usage && ( + + plan {usage.plan} + + + topics + + {usage.interests.used}{usage.interests.limit !== null ? `/${usage.interests.limit}` : ''} + + + + + refreshes + {usage.suggestionRefreshes.limit !== null ? ( + + {usage.suggestionRefreshes.used}/{usage.suggestionRefreshes.limit} + + ) : ( + unlimited + )} + + + )} + + inbox 0 ? 'green' : undefined}>{suggestionCounts.inbox} + + later {suggestionCounts.later} + + archived {suggestionCounts.archived} + + + + {/* Pipeline progress */} + {pipeline && pipeline.status === 'running' && ( + + PIPELINE + {pipeline.steps.map((step, i) => ( + + + {step.label} + ({step.duration}s) + + ))} + {pipeline.current !== '' && ( + + + + + )} + + )} + + {pipeline && pipeline.status === 'complete' && ( + + PIPELINE + {pipeline.steps.map((step, i) => ( + + + {step.label} + ({step.duration}s) + + ))} + ✓ Complete ({pipeline.total_duration}s) + + )} + + {pipeline && pipeline.status === 'failed' && ( + + PIPELINE + {pipeline.steps.map((step, i) => ( + + + {step.label} + ({step.duration}s) + + ))} + ✗ Failed + {pipeline.error && ( + {pipeline.error} + )} + {(pipeline.error?.toLowerCase().includes('oauth') || pipeline.error?.toLowerCase().includes('authorization') || pipeline.error?.toLowerCase().includes('401') || pipeline.steps.length === 0) && ( + + Re-connect your X account at https://sonar.8640p.info/account + Then run sonar refresh to retry. + + )} + + )} + + {/* Embeddings */} + {me.pendingEmbeddings > 0 && !(pipeline && pipeline.status === 'running') && ( + + {'Embeddings'.padEnd(16)} + ● {me.pendingEmbeddings.toLocaleString()} pending + + )} + + {/* Queues (non-pipeline) */} + {entries.filter(([name]) => name !== 'default').length > 0 && !(pipeline && pipeline.status === 'running') && ( + + QUEUES + {entries.filter(([name]) => name !== 'default').map(([name, counts]) => ( + + {(QUEUE_LABELS[name] ?? name).padEnd(16)} + {counts.running > 0 && ▶ {counts.running} running } + {counts.queued > 0 && ● {counts.queued} queued } + {(counts.deferred ?? 0) > 0 && ◆ {counts.deferred} pending } + + ))} + + )} + + {!hasActivity && ( + idle — run sonar refresh to trigger pipeline + )} + + {flags.watch && ( + + {refreshing && refreshing...} + {refreshMsg && {refreshMsg}} + press r to refresh · q to quit + + )} + + ) +} diff --git a/src/commands/topics/add.tsx b/src/commands/topics/add.tsx new file mode 100644 index 0000000..7c36a9d --- /dev/null +++ b/src/commands/topics/add.tsx @@ -0,0 +1,93 @@ +import React, { useEffect, useState } from 'react' +import zod from 'zod' +import { Box, Text } from 'ink' +import { gql } from '../../lib/client.js' +import { Spinner } from '../../components/Spinner.js' +import type { Topic } from './index.js' + +export const args = zod.tuple([ + zod.string().describe('Topic name or phrase'), +]) + +export const options = zod.object({ + description: zod.string().optional().describe('Optional description (auto-generated if omitted)'), + json: zod.boolean().default(false).describe('Raw JSON output'), +}) + +type Props = { args: zod.infer; options: zod.infer } + +const CREATE_MUTATION = ` + mutation CreateOrUpdateTopic( + $name: String! + $description: String + ) { + createOrUpdateTopic(input: { + name: $name + description: $description + }) { + id: nanoId + name + description + version + createdAt + updatedAt + } + } +` + +export default function TopicsAdd({ args: [name], options: flags }: Props) { + const [data, setData] = useState(null) + const [error, setError] = useState(null) + + useEffect(() => { + if (!error || !flags.json) return + process.stderr.write(`${error}\n`) + process.exit(1) + }, [error, flags.json]) + + useEffect(() => { + async function run() { + try { + const result = await gql<{ createOrUpdateTopic: Topic }>(CREATE_MUTATION, { + name, + description: flags.description ?? null, + }) + + if (flags.json) { + process.stdout.write(JSON.stringify(result.createOrUpdateTopic, null, 2) + '\n') + process.exit(0) + } + + setData(result.createOrUpdateTopic) + } catch (err) { + setError(err instanceof Error ? err.message : String(err)) + } + } + run() + }, []) + + if (error) { + if (flags.json) return <> + return Error: {error} + } + + if (!data) { + if (flags.json) return <> + return + } + + return ( + + + {data.name} + v{data.version} · {data.id} · created + + {data.description && {data.description}} + + tip run + sonar refresh + to match this topic against recent tweets + + + ) +} diff --git a/src/commands/topics/delete.tsx b/src/commands/topics/delete.tsx new file mode 100644 index 0000000..16e020f --- /dev/null +++ b/src/commands/topics/delete.tsx @@ -0,0 +1,54 @@ +import React, { useEffect, useState } from 'react' +import zod from 'zod' +import { Box, Text } from 'ink' +import { gql } from '../../lib/client.js' +import { Spinner } from '../../components/Spinner.js' + +export const args = zod.tuple([ + zod.string().describe('Topic ID'), +]) + +export const options = zod.object({ + json: zod.boolean().default(false).describe('Raw JSON output'), +}) + +type Props = { args: zod.infer; options: zod.infer } + +const DELETE_MUTATION = ` + mutation DeleteTopic($nanoId: String!) { + deleteTopic(nanoId: $nanoId) + } +` + +export default function TopicDelete({ args: [id], options: flags }: Props) { + const [done, setDone] = useState(false) + const [error, setError] = useState(null) + + useEffect(() => { + async function run() { + try { + await gql<{ deleteTopic: boolean }>(DELETE_MUTATION, { nanoId: id }) + + if (flags.json) { + process.stdout.write(JSON.stringify({ deleted: id }) + '\n') + process.exit(0) + } + + setDone(true) + } catch (err) { + setError(err instanceof Error ? err.message : String(err)) + } + } + run() + }, []) + + if (error) return Error: {error} + if (!done) return + + return ( + + Deleted + {id} + + ) +} diff --git a/src/commands/topics/edit.tsx b/src/commands/topics/edit.tsx new file mode 100644 index 0000000..51837fe --- /dev/null +++ b/src/commands/topics/edit.tsx @@ -0,0 +1,116 @@ +import React, { useEffect, useState } from 'react' +import zod from 'zod' +import { Box, Text } from 'ink' +import { gql } from '../../lib/client.js' +import { Spinner } from '../../components/Spinner.js' +import type { Topic } from './index.js' + +export const args = zod.tuple([ + zod.string().describe('Topic ID'), +]) + +export const options = zod.object({ + name: zod.string().optional().describe('New name'), + description: zod.string().optional().describe('New description'), + json: zod.boolean().default(false).describe('Raw JSON output'), +}) + +type Props = { args: zod.infer; options: zod.infer } + +const QUERY = ` + query Topics { + topics { + id: nanoId + name + description + version + createdAt + updatedAt + } + } +` + +const UPDATE_MUTATION = ` + mutation CreateOrUpdateTopic( + $nanoId: String + $name: String! + $description: String + ) { + createOrUpdateTopic(input: { + nanoId: $nanoId + name: $name + description: $description + }) { + id: nanoId + name + description + version + createdAt + updatedAt + } + } +` + +async function fetchById(id: string): Promise { + const result = await gql<{ topics: Topic[] }>(QUERY) + const found = result.topics.find((p) => p.id === id) + if (!found) throw new Error(`Topic "${id}" not found. Run: sonar topics`) + return found +} + +export default function TopicEdit({ args: [id], options: flags }: Props) { + const [data, setData] = useState(null) + const [error, setError] = useState(null) + + useEffect(() => { + if (!error || !flags.json) return + process.stderr.write(`${error}\n`) + process.exit(1) + }, [error, flags.json]) + + useEffect(() => { + async function run() { + try { + const existing = await fetchById(id) + const name = flags.name ?? existing.name + const description = flags.description ?? existing.description ?? null + + const result = await gql<{ createOrUpdateTopic: Topic }>(UPDATE_MUTATION, { + nanoId: id, + name, + description, + }) + + if (flags.json) { + process.stdout.write(JSON.stringify(result.createOrUpdateTopic, null, 2) + '\n') + process.exit(0) + } + + setData(result.createOrUpdateTopic) + } catch (err) { + setError(err instanceof Error ? err.message : String(err)) + } + } + run() + }, []) + + if (error) { + if (flags.json) return <> + return Error: {error} + } + + if (!data) { + if (flags.json) return <> + return + } + + return ( + + + {data.name} + v{data.version} · {data.id} · updated + + {data.description && {data.description.slice(0, 160)}...} + + ) +} diff --git a/src/commands/interests/index.tsx b/src/commands/topics/index.tsx similarity index 51% rename from src/commands/interests/index.tsx rename to src/commands/topics/index.tsx index 28fa1bc..4b27d77 100644 --- a/src/commands/interests/index.tsx +++ b/src/commands/topics/index.tsx @@ -3,7 +3,7 @@ import zod from 'zod' import { Box, Text, useStdout } from 'ink' import { gql } from '../../lib/client.js' import { Spinner } from '../../components/Spinner.js' -import { InterestCard } from '../../components/InterestCard.js' +import { TopicCard } from '../../components/TopicCard.js' export const options = zod.object({ json: zod.boolean().default(false).describe('Raw JSON output'), @@ -11,25 +11,21 @@ export const options = zod.object({ type Props = { options: zod.infer } -export interface Interest { +export interface Topic { id: string name: string description: string | null - keywords: string[] | null - relatedTopics: string[] | null version: number createdAt: string updatedAt: string } const QUERY = ` - query Interests { - projects { + query Topics { + topics { id: nanoId name description - keywords - relatedTopics version createdAt updatedAt @@ -37,8 +33,8 @@ const QUERY = ` } ` -export default function Interests({ options: flags }: Props) { - const [data, setData] = useState(null) +export default function Topics({ options: flags }: Props) { + const [data, setData] = useState(null) const [error, setError] = useState(null) const { stdout } = useStdout() const termWidth = stdout.columns ?? 100 @@ -46,14 +42,14 @@ export default function Interests({ options: flags }: Props) { useEffect(() => { async function run() { try { - const result = await gql<{ projects: Interest[] }>(QUERY) + const result = await gql<{ topics: Topic[] }>(QUERY) if (flags.json) { - process.stdout.write(JSON.stringify(result.projects, null, 2) + '\n') + process.stdout.write(JSON.stringify(result.topics, null, 2) + '\n') process.exit(0) } - setData(result.projects) + setData(result.topics) } catch (err) { setError(err instanceof Error ? err.message : String(err)) } @@ -66,20 +62,18 @@ export default function Interests({ options: flags }: Props) { } if (!data) { - return + return } if (data.length === 0) { return ( - No interests found. Create one from a prompt: + No topics found. Add one: - sonar interests create --from-prompt "I want to follow the AI agents ecosystem" - sonar interests create --from-prompt "Rust and systems programming" --vendor anthropic - sonar interests create --from-prompt "DeFi protocols and on-chain finance" - sonar interests create --from-prompt "Climate tech and carbon markets" + sonar topics add "AI agents" + sonar topics add "Rust and systems programming" + sonar topics add "DeFi protocols" - Or manually: sonar interests create --name "My Interest" --keywords "kw1,kw2" --topics "topic1" ) } @@ -87,20 +81,20 @@ export default function Interests({ options: flags }: Props) { return ( - Interests + Topics ({data.length}) {data.map((p, i) => ( - ))} - tip: --json for raw output · match: sonar interests match --days 3 · update: sonar interests update --id <id> --from-prompt "..." + view: sonar topics view <id> · edit: sonar topics edit <id> --name "new name" ) } diff --git a/src/commands/topics/suggest.tsx b/src/commands/topics/suggest.tsx new file mode 100644 index 0000000..08cd5d9 --- /dev/null +++ b/src/commands/topics/suggest.tsx @@ -0,0 +1,190 @@ +import React, { useCallback, useEffect, useState } from 'react' +import zod from 'zod' +import { Box, Text, useInput } from 'ink' +import { gql } from '../../lib/client.js' +import { getVendor } from '../../lib/config.js' +import { generateTopicSuggestions } from '../../lib/ai.js' +import type { GeneratedInterest } from '../../lib/ai.js' +import { Spinner } from '../../components/Spinner.js' +import type { Topic } from './index.js' + +export const options = zod.object({ + vendor: zod.string().optional().describe('AI vendor: openai|anthropic'), + count: zod.number().optional().describe('Number of suggestions (default: 5)'), + json: zod.boolean().default(false).describe('Raw JSON output'), +}) + +type Props = { options: zod.infer } + +const TOPICS_QUERY = ` + query Topics { + topics { id: nanoId name description } + } +` + +const FEED_QUERY = ` + query Feed($hours: Int, $limit: Int) { + feed(hours: $hours, limit: $limit) { + tweet { text } + } + } +` + +const CREATE_MUTATION = ` + mutation CreateOrUpdateTopic($name: String!, $description: String) { + createOrUpdateTopic(input: { name: $name, description: $description }) { + id: nanoId name description version createdAt updatedAt + } + } +` + +type Phase = 'loading' | 'suggesting' | 'reviewing' | 'done' + +export default function TopicsSuggest({ options: flags }: Props) { + const vendor = getVendor(flags.vendor) + const count = flags.count ?? 5 + + const [phase, setPhase] = useState('loading') + const [suggestions, setSuggestions] = useState([]) + const [index, setIndex] = useState(0) + const [accepted, setAccepted] = useState([]) + const [error, setError] = useState(null) + const [saving, setSaving] = useState(false) + + // Phase 1: Fetch context, Phase 2: Generate suggestions + useEffect(() => { + async function run() { + try { + const [topicsRes, feedRes] = await Promise.all([ + gql<{ topics: Topic[] }>(TOPICS_QUERY), + gql<{ feed: { tweet: { text: string } }[] }>(FEED_QUERY, { hours: 24, limit: 15 }), + ]) + + const existingNames = topicsRes.topics.map(t => t.name) + const tweetTexts = feedRes.feed.map(f => f.tweet.text) + + setPhase('suggesting') + + const results = await generateTopicSuggestions(existingNames, tweetTexts, count, vendor) + + if (flags.json) { + process.stdout.write(JSON.stringify(results, null, 2) + '\n') + process.exit(0) + } + + setSuggestions(results) + setPhase('reviewing') + } catch (err) { + setError(err instanceof Error ? err.message : String(err)) + } + } + run() + }, []) + + const current = suggestions[index] + + const acceptCurrent = useCallback(async () => { + if (!current || saving) return + setSaving(true) + try { + await gql<{ createOrUpdateTopic: Topic }>(CREATE_MUTATION, { + name: current.name, + description: current.description, + }) + setAccepted(prev => [...prev, current.name]) + } catch (err) { + process.stderr.write(`Failed to save "${current.name}": ${err instanceof Error ? err.message : String(err)}\n`) + } + setSaving(false) + if (index + 1 >= suggestions.length) { + setPhase('done') + } else { + setIndex(i => i + 1) + } + }, [current, index, suggestions.length, saving]) + + const skipCurrent = useCallback(() => { + if (saving) return + if (index + 1 >= suggestions.length) { + setPhase('done') + } else { + setIndex(i => i + 1) + } + }, [index, suggestions.length, saving]) + + useInput((input) => { + if (phase !== 'reviewing') return + if (input === 'y') acceptCurrent() + else if (input === 'n') skipCurrent() + else if (input === 'q') setPhase('done') + }, { isActive: phase === 'reviewing' && !saving }) + + if (error) return Error: {error} + + if (phase === 'loading') { + return + } + + if (phase === 'suggesting') { + return + } + + if (phase === 'done') { + return ( + + + {accepted.length > 0 + ? `Added ${accepted.length} topic${accepted.length === 1 ? '' : 's'}` + : 'No topics added'} + + {accepted.map(name => ( + + {name} + ))} + {accepted.length > 0 && ( + + tip run + sonar refresh + to match new topics against recent tweets + + )} + + ) + } + + // Phase: reviewing + return ( + + + [{index + 1}/{suggestions.length}] + {accepted.length > 0 && {accepted.length} accepted} + + + + {current.name} + {current.description && ( + + {current.description.slice(0, 300)}{current.description.length > 300 ? '...' : ''} + + )} + {current.keywords.length > 0 && ( + + keywords + {current.keywords.slice(0, 10).join(' ')} + + )} + + + + {saving ? ( + + ) : ( + <> + y accept + n skip + q quit + + )} + + + ) +} diff --git a/src/commands/topics/view.tsx b/src/commands/topics/view.tsx new file mode 100644 index 0000000..b4e1e49 --- /dev/null +++ b/src/commands/topics/view.tsx @@ -0,0 +1,71 @@ +import React, { useEffect, useState } from 'react' +import zod from 'zod' +import { Box, Text } from 'ink' +import { gql } from '../../lib/client.js' +import { Spinner } from '../../components/Spinner.js' +import type { Topic } from './index.js' + +export const args = zod.tuple([ + zod.string().describe('Topic ID'), +]) + +type Props = { args: zod.infer } + +const QUERY = ` + query Topics { + topics { + id: nanoId + name + description + version + createdAt + updatedAt + } + } +` + +export default function TopicView({ args: [id] }: Props) { + const [data, setData] = useState(null) + const [error, setError] = useState(null) + + useEffect(() => { + async function run() { + try { + const result = await gql<{ topics: Topic[] }>(QUERY) + const found = result.topics.find((p) => p.id === id) + if (!found) throw new Error(`Topic "${id}" not found. Run: sonar topics`) + setData(found) + } catch (err) { + setError(err instanceof Error ? err.message : String(err)) + } + } + run() + }, []) + + if (error) return Error: {error} + if (!data) return + + const updatedAt = new Date(data.updatedAt).toLocaleDateString('en-US', { + month: 'short', day: 'numeric', year: 'numeric', + }) + + return ( + + + {data.name} + v{data.version} · {data.id} · {updatedAt} + + + {data.description && ( + + {data.description} + + )} + + + edit: sonar topics edit {data.id} --name "new name" + delete: sonar topics delete {data.id} + + + ) +} diff --git a/src/components/AccountCard.tsx b/src/components/AccountCard.tsx index 3d7d491..a1c5615 100644 --- a/src/components/AccountCard.tsx +++ b/src/components/AccountCard.tsx @@ -28,10 +28,6 @@ export function AccountCard({ me }: Props) { plan: {me.isPayingCustomer ? 'Pro' : 'Free'} indexing accounts: {me.indexingAccounts} indexed tweets: {me.indexedTweets.toLocaleString()} - - embeddings queue: - 0 ? 'yellow' : 'green'}>{me.pendingEmbeddings} - {me.twitterIndexedAt && ( last indexed: diff --git a/src/components/Banner.tsx b/src/components/Banner.tsx new file mode 100644 index 0000000..0219a92 --- /dev/null +++ b/src/components/Banner.tsx @@ -0,0 +1,13 @@ +import React from 'react' +import { Text } from 'ink' + +const LOGO = ` + _|_|_| _|_| _| _| _|_| _|_|_| +_| _| _| _|_| _| _| _| _| _| + _|_| _| _| _| _| _| _|_|_|_| _|_|_| + _| _| _| _| _|_| _| _| _| _| +_|_|_| _|_| _| _| _| _| _| _|`.trimStart() + +export function Banner() { + return {LOGO} +} diff --git a/src/components/InteractiveSession.tsx b/src/components/InteractiveSession.tsx index be1f9a6..e509c0e 100644 --- a/src/components/InteractiveSession.tsx +++ b/src/components/InteractiveSession.tsx @@ -1,62 +1,35 @@ -import React, { useState, useCallback } from 'react' +import React, { useState, useCallback, useEffect } from 'react' import { Box, Text, useInput, useStdout } from 'ink' -import { Spinner } from './Spinner.js' -import { TweetCard } from '../commands/feed.js' import { gql } from '../lib/client.js' -import { generateReply } from '../lib/ai.js' +import { relativeTime, TweetCard } from './TweetCard.js' import { getFeedWidth } from '../lib/config.js' -import type { Vendor } from '../lib/config.js' +import { execSync } from 'child_process' // ─── Types ──────────────────────────────────────────────────────────────────── -type Mode = 'view' | 'reply-input' | 'reply-loading' | 'reply-draft' - -interface FeedUser { - displayName: string - username: string | null - followersCount: number | null - followingCount: number | null -} - -interface FeedTweet { - id: string - xid: string - text: string - createdAt: string - likeCount: number - retweetCount: number - replyCount: number - user: FeedUser -} - -export interface FeedItem { +export interface TriageItem { + key: string score: number + suggestionId?: string matchedKeywords: string[] - tweet: FeedTweet -} - -interface InboxUser { - displayName: string - username: string | null -} - -interface InboxTweet { - xid: string - text: string - createdAt: string - user: InboxUser -} - -export interface Suggestion { - suggestionId: string - score: number - projectsMatched: number - status: string - relevance: number | null - tweet: InboxTweet + tweet: { + id: string + xid: string + text: string + createdAt: string + likeCount: number + retweetCount: number + replyCount: number + user: { + displayName: string + username: string | null + followersCount: number | null + followingCount: number | null + } + } } -const UPDATE_SUGGESTION_MUTATION = ` +const UPDATE_MUTATION = ` mutation UpdateSuggestion($suggestionId: ID!, $status: SuggestionStatus!) { updateSuggestion(input: { suggestionId: $suggestionId, status: $status }) { suggestionId @@ -65,429 +38,189 @@ const UPDATE_SUGGESTION_MUTATION = ` } ` -// ─── Helpers ────────────────────────────────────────────────────────────────── - -function relativeTime(dateStr: string): string { - const diff = Date.now() - new Date(dateStr).getTime() - const mins = Math.floor(diff / 60000) - if (mins < 60) return `${mins}m` - const hours = Math.floor(mins / 60) - if (hours < 24) return `${hours}h` - return `${Math.floor(hours / 24)}d` -} - function Divider({ width }: { width: number }) { return {'─'.repeat(Math.min(width - 2, 72))} } -// ─── Shared hook ────────────────────────────────────────────────────────────── - -function useInteractiveState(total: number, vendor: Vendor) { - const [currentIndex, setCurrentIndex] = useState(0) - const [mode, setMode] = useState('view') - const [replyInput, setReplyInput] = useState('') - const [replyDraft, setReplyDraft] = useState('') - const [statusMessage, setStatusMessage] = useState('') - - const goNext = useCallback(() => { - setCurrentIndex((i) => Math.min(i + 1, total - 1)) - setMode('view') - setReplyDraft('') - setStatusMessage('') - }, [total]) - - const goPrev = useCallback(() => { - setCurrentIndex((i) => Math.max(i - 1, 0)) - setMode('view') - setReplyDraft('') - setStatusMessage('') - }, []) - - const startReply = useCallback(() => { - setReplyInput('') - setMode('reply-input') - }, []) - - const dismissDraft = useCallback(() => { - setReplyDraft('') - setMode('view') - }, []) - - const handleReply = useCallback( - async (tweetText: string, angle: string) => { - setMode('reply-loading') - try { - const result = await generateReply(tweetText, angle, vendor) - setReplyDraft(result.reply) - setMode('reply-draft') - } catch (err) { - setStatusMessage(`Error: ${err instanceof Error ? err.message : String(err)}`) - setMode('view') - } - }, - [vendor], - ) +// ─── Triage Session ─────────────────────────────────────────────────────────── - return { - currentIndex, - mode, - replyInput, - replyDraft, - statusMessage, - setReplyInput, - setMode, - setStatusMessage, - goNext, - goPrev, - startReply, - dismissDraft, - handleReply, - } +interface TriageSessionProps { + items: TriageItem[] + total?: number + fetchMore?: (offset: number) => Promise } -// ─── Interactive Feed Session ───────────────────────────────────────────────── +type ActionLabel = 'dismissed' | 'saved' | null + +const UNDO_WINDOW_MS = 10_000 -interface InteractiveFeedSessionProps { - items: FeedItem[] - vendor: Vendor +interface PendingAction { + timer: ReturnType + suggestionId: string + status: string + index: number } -export function InteractiveFeedSession({ items, vendor }: InteractiveFeedSessionProps) { +export function TriageSession({ items: initialItems, total: initialTotal, fetchMore }: TriageSessionProps) { const { stdout } = useStdout() const termWidth = stdout.columns ?? 100 const cardWidth = getFeedWidth() - const { - currentIndex, - mode, - replyInput, - replyDraft, - statusMessage, - setReplyInput, - setMode, - setStatusMessage, - goNext, - goPrev, - startReply, - dismissDraft, - handleReply, - } = useInteractiveState(items.length, vendor) - - const current = items[currentIndex] - - useInput( - (input, key) => { - if (mode === 'reply-loading') return - - if (mode === 'reply-input') { - if (key.return) { - handleReply(current.tweet.text, replyInput) - } else if (key.escape) { - setMode('view') - setReplyInput('') - } else if (key.backspace || key.delete) { - setReplyInput((s) => s.slice(0, -1)) - } else if (input && !key.ctrl && !key.meta) { - setReplyInput((s) => s + input) - } - return - } - - if (mode === 'reply-draft') { - if (input === 'r') { - handleReply(current.tweet.text, '') - } else if (key.escape) { - dismissDraft() - } - return - } - - // view mode - if (input === 'n' || key.rightArrow || input === ' ') { - goNext() - } else if (input === 'p' || key.leftArrow) { - goPrev() - } else if (input === 'r') { - startReply() - } else if (input === 's') { - setStatusMessage('star — coming soon') - } else if (input === 'a') { - setStatusMessage('analyze — coming soon') - } else if (input === 'q') { - process.exit(0) - } - }, - { isActive: mode !== 'reply-loading' }, - ) - - return ( - - - - {' '} - {currentIndex + 1} / {items.length}{' · '}feed --interactive - - - - - - {mode === 'reply-draft' && ( - - - - Draft reply: - - {replyDraft} - - - - )} - - {statusMessage && ( - - {statusMessage} - - )} - - - - {mode === 'reply-input' ? ( - - Angle (Enter to auto-generate, Esc to cancel): - {replyInput} - - - ) : mode === 'reply-loading' ? ( - - - - ) : mode === 'reply-draft' ? ( - - [r] new draft [Esc] dismiss - - ) : ( - - [n]ext [p]rev [s]tar [r]eply [a]nalyze [q]uit - - )} - - - ) -} + const [items, setItems] = useState(initialItems) + const [total, setTotal] = useState(initialTotal ?? initialItems.length) + const [index, setIndex] = useState(0) + const [lastAction, setLastAction] = useState(null) + const [acting, setActing] = useState(false) + const [loading, setLoading] = useState(false) + const [pending, setPending] = useState(null) + + // Fetch next page when 3 items from the end + useEffect(() => { + if (!fetchMore || loading) return + if (index >= items.length - 3 && items.length < total) { + setLoading(true) + fetchMore(items.length) + .then(more => { + if (more.length > 0) { + setItems(prev => [...prev, ...more]) + } + }) + .catch(() => {}) + .finally(() => setLoading(false)) + } + }, [index, items.length, total, loading]) -// ─── Suggestion Card ────────────────────────────────────────────────────────── + // Flush pending on unmount + useEffect(() => { + return () => { if (pending) { clearTimeout(pending.timer); commitAction(pending) } } + }, [pending]) -function scoreColor(score: number): string { - if (score >= 0.7) return 'green' - if (score >= 0.4) return 'yellow' - return 'white' -} + const done = index >= items.length && items.length >= total + const current = items[index] -function statusColor(status: string): string { - switch (status.toLowerCase()) { - case 'inbox': return 'cyan' - case 'read': return 'green' - case 'skipped': return 'gray' - case 'later': return 'yellow' - case 'archived': return 'magenta' - default: return 'white' + function commitAction(action: PendingAction) { + gql(UPDATE_MUTATION, { suggestionId: action.suggestionId, status: action.status }) + .catch(() => {}) } -} -function SuggestionCard({ item, termWidth }: { item: Suggestion; termWidth: number }) { - const handle = item.tweet.user.username ?? item.tweet.user.displayName - const author = `@${handle}` - const profileUrl = `https://x.com/${handle}` - const tweetUrl = `https://x.com/${handle}/status/${item.tweet.xid}` + const act = useCallback( + (status: 'ARCHIVED' | 'SKIPPED', label: ActionLabel) => { + const item = items[index] - return ( - - - - {relativeTime(item.tweet.createdAt)} - - · - {item.score.toFixed(2)} - · - {item.status.toLowerCase()} - {item.projectsMatched > 0 && ( - · {item.projectsMatched} interest{item.projectsMatched !== 1 ? 's' : ''} - )} - + // Flush any previous pending action immediately + if (pending) { + clearTimeout(pending.timer) + commitAction(pending) + } - - {'└'} - - {author} - - + if (item.suggestionId) { + // Defer the mutation — can be undone within the window + const timer = setTimeout(() => { + commitAction({ timer: 0 as any, suggestionId: item.suggestionId!, status, index }) + setPending(null) + }, UNDO_WINDOW_MS) - - {item.tweet.text} - + setPending({ timer, suggestionId: item.suggestionId, status, index }) + } - - {profileUrl} · {tweetUrl} - - + setLastAction(label) + setIndex((i) => i + 1) + }, + [index, items, pending], ) -} - -// ─── Interactive Inbox Session ──────────────────────────────────────────────── - -interface InteractiveInboxSessionProps { - items: Suggestion[] - vendor: Vendor -} -const INBOX_STATUS_KEYS: Record = { - R: 'READ', - S: 'SKIPPED', - L: 'LATER', - A: 'ARCHIVED', -} + const undo = useCallback(() => { + if (!pending) return -export function InteractiveInboxSession({ items, vendor }: InteractiveInboxSessionProps) { - const { stdout } = useStdout() - const termWidth = stdout.columns ?? 100 - const [isActing, setIsActing] = useState(false) - - const { - currentIndex, - mode, - replyInput, - replyDraft, - statusMessage, - setReplyInput, - setMode, - setStatusMessage, - goNext, - goPrev, - startReply, - dismissDraft, - handleReply, - } = useInteractiveState(items.length, vendor) - - const current = items[currentIndex] - - const handleStatusUpdate = useCallback( - async (status: string) => { - setIsActing(true) - try { - await gql<{ updateSuggestion: { suggestionId: string; status: string } }>( - UPDATE_SUGGESTION_MUTATION, - { suggestionId: current.suggestionId, status }, - ) - setStatusMessage(`✓ marked as ${status.toLowerCase()}`) - } catch (err) { - setStatusMessage(`Error: ${err instanceof Error ? err.message : String(err)}`) - } finally { - setIsActing(false) - } - }, - [current.suggestionId, setStatusMessage], - ) + clearTimeout(pending.timer) + setPending(null) + setIndex(pending.index) + setLastAction(null) + }, [pending]) useInput( (input, key) => { - if (isActing || mode === 'reply-loading') return - - if (mode === 'reply-input') { - if (key.return) { - handleReply(current.tweet.text, replyInput) - } else if (key.escape) { - setMode('view') - setReplyInput('') - } else if (key.backspace || key.delete) { - setReplyInput((s) => s.slice(0, -1)) - } else if (input && !key.ctrl && !key.meta) { - setReplyInput((s) => s + input) - } + if (done) { + if (input === 'q') process.exit(0) + if (input === 'u') undo() return } - if (mode === 'reply-draft') { - if (input === 'r') { - handleReply(current.tweet.text, '') - } else if (key.escape) { - dismissDraft() - } - return - } - - // view mode - if (input === 'n' || key.rightArrow || input === ' ') { - goNext() - } else if (input === 'p' || key.leftArrow) { - goPrev() - } else if (input === 'r') { - startReply() - } else if (input === 'a') { - setStatusMessage('analyze — coming soon') + if (key.return || input === ' ' || input === 'd' || input === 'n') { + act('SKIPPED', 'dismissed') + } else if (input === 's') { + act('ARCHIVED', 'saved') + } else if (input === 'u') { + undo() + } else if (input === 'o') { + const handle = current.tweet.user.username ?? current.tweet.user.displayName + const url = `https://x.com/${handle}/status/${current.tweet.id}` + try { execSync(`open "${url}"`) } catch {} } else if (input === 'q') { process.exit(0) - } else if (INBOX_STATUS_KEYS[input]) { - handleStatusUpdate(INBOX_STATUS_KEYS[input]) } }, - { isActive: !isActing && mode !== 'reply-loading' }, + { isActive: !acting }, ) + if (done) { + return ( + + ✓ Inbox zero + {lastAction && last: {lastAction}} + q to quit + + ) + } + + const canTriage = !!current.suggestionId + return ( - - - {' '} - {currentIndex + 1} / {items.length}{' · '}inbox --interactive - + + {index + 1} / {total} + {lastAction && ✓ {lastAction}} - - - {mode === 'reply-draft' && ( - - - - Draft reply: - - {replyDraft} - - - - )} - - {statusMessage && ( - - {statusMessage} - - )} + - {isActing ? ( - - - - ) : mode === 'reply-input' ? ( - - Angle (Enter to auto-generate, Esc to cancel): - {replyInput} - - - ) : mode === 'reply-loading' ? ( - - - - ) : mode === 'reply-draft' ? ( - - [r] new draft [Esc] dismiss - - ) : ( - - [n]ext [p]rev [r]eply [a]nalyze [R]ead [S]kip [L]ater [A]rchive [q]uit - - )} + + {canTriage ? ( + <> + n next + s save + o open + q quit + + ) : ( + <> + n next + o open + q quit + + )} + ) } + +// ─── Legacy aliases ─────────────────────────────────────────────────────────── +// Kept for any remaining references — both now delegate to TriageSession + +export type { TriageItem as FeedItem } + +export function InteractiveFeedSession({ items }: { items: TriageItem[]; vendor?: string }) { + return +} + +export function InteractiveInboxSession({ items }: { items: any[]; vendor?: string }) { + return +} diff --git a/src/components/InterestCard.tsx b/src/components/InterestCard.tsx deleted file mode 100644 index adbc716..0000000 --- a/src/components/InterestCard.tsx +++ /dev/null @@ -1,53 +0,0 @@ -import React from 'react' -import { Box, Text } from 'ink' -import type { Interest } from '../commands/interests/index.js' - -interface InterestCardProps { - interest: Interest - termWidth: number - isLast: boolean -} - -export function InterestCard({ interest, termWidth, isLast }: InterestCardProps) { - const updatedAt = new Date(interest.updatedAt).toLocaleDateString('en-US', { - month: 'short', - day: 'numeric', - year: 'numeric', - }) - - return ( - - - {interest.name} - v{interest.version} · {interest.id} · {updatedAt} - - - {interest.description && ( - - {'└'} - {interest.description} - - )} - - {interest.keywords && interest.keywords.length > 0 && ( - - keywords - {interest.keywords.join(' ')} - - )} - - {interest.relatedTopics && interest.relatedTopics.length > 0 && ( - - topics - {interest.relatedTopics.join(' ')} - - )} - - {!isLast && ( - - {'─'.repeat(Math.min(termWidth - 2, 72))} - - )} - - ) -} diff --git a/src/components/Spinner.tsx b/src/components/Spinner.tsx index 191ef2f..d70ed89 100644 --- a/src/components/Spinner.tsx +++ b/src/components/Spinner.tsx @@ -1,7 +1,8 @@ import { useState, useEffect } from 'react' import { Text } from 'ink' +import spinners from 'unicode-animations' -const FRAMES = ['⠋', '⠙', '⠹', '⠸', '⠼', '⠴', '⠦', '⠧', '⠇', '⠏'] +const SPINNER = spinners.pulse interface SpinnerProps { label?: string @@ -12,15 +13,15 @@ export function Spinner({ label }: SpinnerProps) { useEffect(() => { const timer = setInterval(() => { - setFrame((f) => (f + 1) % FRAMES.length) - }, 80) + setFrame((f) => (f + 1) % SPINNER.frames.length) + }, SPINNER.interval) return () => clearInterval(timer) }, []) return ( - {FRAMES[frame]} - {label ?? 'Loading...'} + {SPINNER.frames[frame]} + {label ? {label} : null} ) } diff --git a/src/components/TopicCard.tsx b/src/components/TopicCard.tsx new file mode 100644 index 0000000..53327b8 --- /dev/null +++ b/src/components/TopicCard.tsx @@ -0,0 +1,42 @@ +import React from 'react' +import { Box, Text } from 'ink' +import type { Topic } from '../commands/topics/index.js' + +interface TopicCardProps { + topic: Topic + termWidth: number + isLast: boolean +} + +export function TopicCard({ topic, termWidth, isLast }: TopicCardProps) { + const updatedAt = new Date(topic.updatedAt).toLocaleDateString('en-US', { + month: 'short', + day: 'numeric', + year: 'numeric', + }) + + const desc = topic.description + ? topic.description.length > 160 + ? topic.description.slice(0, 160).trimEnd() + '...' + : topic.description + : null + + return ( + + + {topic.name} + v{topic.version} + · + {topic.id} + · + {updatedAt} + + + {desc && ( + + {desc} + + )} + + ) +} diff --git a/src/components/TweetCard.tsx b/src/components/TweetCard.tsx new file mode 100644 index 0000000..5f59b6b --- /dev/null +++ b/src/components/TweetCard.tsx @@ -0,0 +1,185 @@ +import React from 'react' +import { Box, Text } from 'ink' +import Link from 'ink-link' +import { Table } from './Table.js' + +// ─── Types ──────────────────────────────────────────────────────────────────── + +export interface User { + displayName: string + username: string | null + followersCount: number | null + followingCount: number | null +} + +export interface Tweet { + id: string + xid: string + text: string + createdAt: string + likeCount: number + retweetCount: number + replyCount: number + user: User +} + +export interface FeedTweet { + score: number + matchedKeywords: string[] + tweet: Tweet +} + +// ─── Helpers ────────────────────────────────────────────────────────────────── + +export function formatTimestamp(dateStr: string): string { + const d = new Date(dateStr) + const month = d.toLocaleString('en-US', { month: 'short' }) + const day = d.getDate() + const hours = d.getHours() + const mins = d.getMinutes().toString().padStart(2, '0') + const ampm = hours >= 12 ? 'pm' : 'am' + const h = hours % 12 || 12 + return `${month} ${day} · ${h}:${mins}${ampm}` +} + +export function relativeTime(dateStr: string): string { + const diff = Date.now() - new Date(dateStr).getTime() + const mins = Math.floor(diff / 60000) + if (mins < 60) return `${mins}m` + const hours = Math.floor(mins / 60) + if (hours < 24) return `${hours}h` + return `${Math.floor(hours / 24)}d` +} + +function formatCount(n: number | null): string | null { + if (n == null) return null + if (n >= 1_000_000) return `${(n / 1_000_000).toFixed(1)}M` + if (n >= 1_000) return `${(n / 1_000).toFixed(1)}k` + return String(n) +} + +function scoreColor(score: number): string { + if (score >= 0.7) return 'green' + if (score >= 0.4) return 'yellow' + return 'white' +} + +function linkifyMentions(text: string): string { + return text.replace(/@(\w+)/g, (match, handle) => { + const url = `https://x.com/${handle}` + return `\x1b]8;;${url}\x07\x1b[94m${match}\x1b[39m\x1b]8;;\x07` + }) +} + +function TweetText({ text }: { text: string }) { + return {linkifyMentions(text)} +} + +// ─── TweetCard ──────────────────────────────────────────────────────────────── + +interface TweetCardProps { + item: FeedTweet + termWidth: number + cardWidth: number + isLast: boolean +} + +export function TweetCard({ item, termWidth, cardWidth, isLast }: TweetCardProps) { + const { tweet, score } = item + const handle = tweet.user.username ?? tweet.user.displayName + const author = `@${handle}` + const bodyBoxWidth = Math.min(cardWidth + 2, termWidth) + const profileUrl = `https://x.com/${handle}` + const tweetUrl = `https://x.com/${handle}/status/${tweet.id}` + + return ( + + + + {formatTimestamp(tweet.createdAt)} + + {relativeTime(tweet.createdAt)} + {score > 0 && ( + <> + · + {score.toFixed(2)} + + )} + + + + {'└'} + + {author} + + {formatCount(tweet.user.followersCount) && ( + <> + {formatCount(tweet.user.followersCount)} followers + {formatCount(tweet.user.followingCount) && ( + · {formatCount(tweet.user.followingCount)} following + )} + + )} + + + + + + + + ♥ {tweet.likeCount} + + ↺ {tweet.retweetCount} + {tweet.replyCount > 0 && ( + <> + + ↩ {tweet.replyCount} + + )} + + + {item.matchedKeywords.length > 0 && ( + + keywords + {item.matchedKeywords.join(' ')} + + )} + + + + {profileUrl} + + · + + {tweetUrl} + + + + {!isLast && ( + + {'─'.repeat(Math.min(termWidth - 2, 72))} + + )} + + ) +} + +// ─── FeedTable ──────────────────────────────────────────────────────────────── + +function osc8Link(url: string, label: string): string { + return `\x1b]8;;${url}\x07${label}\x1b]8;;\x07` +} + +export function FeedTable({ data }: { data: FeedTweet[] }) { + const rows = data.map((item) => { + const handle = item.tweet.user.username ?? item.tweet.user.displayName + const tweetUrl = `https://x.com/${handle}/status/${item.tweet.id}` + return { + age: osc8Link(tweetUrl, relativeTime(item.tweet.createdAt)), + score: item.score > 0 ? item.score.toFixed(2) : '—', + author: `@${handle}`, + tweet: item.tweet.text.replace(/\n/g, ' ').slice(0, 80), + } + }) + return
+} diff --git a/src/lib/ai.ts b/src/lib/ai.ts index ccff3cf..16d39ba 100644 --- a/src/lib/ai.ts +++ b/src/lib/ai.ts @@ -10,6 +10,14 @@ function extractJSON(text: string): string { return stripped.slice(start, end + 1) } +function extractJSONArray(text: string): string { + const stripped = text.replace(/^```(?:json)?\s*/i, '').replace(/\s*```$/i, '').trim() + const start = stripped.indexOf('[') + const end = stripped.lastIndexOf(']') + if (start === -1 || end === -1) throw new Error('No JSON array found in response') + return stripped.slice(start, end + 1) +} + export interface GeneratedInterest { name: string description: string @@ -37,60 +45,113 @@ Optimise every field for semantic density and current relevance, not readability Respond ONLY with valid JSON, no markdown, no explanation.` -async function callOpenAI(prompt: string, apiKey: string): Promise { - const res = await fetch('https://api.openai.com/v1/responses', { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - Authorization: `Bearer ${apiKey}`, - }, - body: JSON.stringify({ - model: 'gpt-4o', - tools: [{ type: 'web_search_preview' }], - instructions: SYSTEM_PROMPT, - input: prompt, - }), - }) - - if (!res.ok) { - const err = await res.json().catch(() => ({})) - throw new Error(`OpenAI error: ${(err as any)?.error?.message ?? res.status}`) - } +// OpenAI uses web_search_preview which can legitimately take 30-60 s. +export const OPENAI_TIMEOUT_MS = 90_000 +// Anthropic calls are simpler — 60 s is generous. +export const ANTHROPIC_TIMEOUT_MS = 60_000 - const data = await res.json() - const text = data.output - ?.filter((b: any) => b.type === 'message') - .flatMap((b: any) => b.content) - .filter((c: any) => c.type === 'output_text') - .map((c: any) => c.text) - .join('') ?? '' +/** + * Wraps fetch() with an AbortController timeout that covers the full response + * cycle — headers AND body. The processResponse callback receives the Response + * and is responsible for consuming the body (e.g. calling res.json()). The + * timer is kept alive until processResponse resolves or rejects, ensuring a + * stalled body download is caught just like a stalled connection. + */ +async function fetchWithTimeout( + url: string, + init: RequestInit, + timeoutMs: number, + vendorLabel: string, + processResponse: (res: Response) => Promise, +): Promise { + const controller = new AbortController() + const timer = setTimeout(() => controller.abort(), timeoutMs) + try { + const res = await fetch(url, { ...init, signal: controller.signal }) + return await processResponse(res) + } catch (err) { + if (err instanceof DOMException && err.name === 'AbortError') { + const lines: string[] = [ + `${vendorLabel} request timed out after ${timeoutMs / 1000}s.`, + 'Possible causes:', + ' • The AI provider is overloaded or rate-limiting you', + ' • Your network connection is slow or unstable', + ] + if (vendorLabel.toLowerCase().includes('openai')) { + lines.push(' • The web_search tool (OpenAI) took longer than usual') + } + lines.push('Try again in a moment, or use --vendor to switch providers.') + throw new Error(lines.join('\n')) + } + throw err + } finally { + clearTimeout(timer) + } +} - return JSON.parse(extractJSON(text)) as GeneratedInterest +async function callOpenAI(prompt: string, apiKey: string): Promise { + return fetchWithTimeout( + 'https://api.openai.com/v1/responses', + { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${apiKey}`, + }, + body: JSON.stringify({ + model: 'gpt-4o', + tools: [{ type: 'web_search_preview' }], + instructions: SYSTEM_PROMPT, + input: prompt, + }), + }, + OPENAI_TIMEOUT_MS, + 'OpenAI', + async (res) => { + if (!res.ok) { + const err = await res.json().catch(() => ({})) + throw new Error(`OpenAI error: ${(err as any)?.error?.message ?? res.status}`) + } + const data = await res.json() + const text = data.output + ?.filter((b: any) => b.type === 'message') + .flatMap((b: any) => b.content) + .filter((c: any) => c.type === 'output_text') + .map((c: any) => c.text) + .join('') ?? '' + return JSON.parse(extractJSON(text)) as GeneratedInterest + }, + ) } async function callAnthropic(prompt: string, apiKey: string): Promise { - const res = await fetch('https://api.anthropic.com/v1/messages', { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - 'x-api-key': apiKey, - 'anthropic-version': '2023-06-01', + return fetchWithTimeout( + 'https://api.anthropic.com/v1/messages', + { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'x-api-key': apiKey, + 'anthropic-version': '2023-06-01', + }, + body: JSON.stringify({ + model: 'claude-haiku-4-5-20251001', + max_tokens: 1024, + system: SYSTEM_PROMPT, + messages: [{ role: 'user', content: prompt }], + }), }, - body: JSON.stringify({ - model: 'claude-haiku-4-5-20251001', - max_tokens: 1024, - system: SYSTEM_PROMPT, - messages: [{ role: 'user', content: prompt }], - }), - }) - - if (!res.ok) { - const err = await res.json().catch(() => ({})) - throw new Error(`Anthropic error: ${(err as any)?.error?.message ?? res.status}`) - } - - const data = await res.json() - return JSON.parse(extractJSON(data.content[0].text)) as GeneratedInterest + ANTHROPIC_TIMEOUT_MS, + 'Anthropic', + async (res) => { + if (!res.ok) { + const err = await res.json().catch(() => ({})) + throw new Error(`Anthropic error: ${(err as any)?.error?.message ?? res.status}`) + } + const data = await res.json() + return JSON.parse(extractJSON(data.content[0].text)) as GeneratedInterest + }, + ) } export interface GeneratedReply { @@ -104,29 +165,34 @@ async function callOpenAIReply(tweetText: string, userPrompt: string, apiKey: st ? `Original tweet: "${tweetText}"\n\nAngle for reply: ${userPrompt}` : `Original tweet: "${tweetText}"\n\nWrite a thoughtful reply.` - const res = await fetch('https://api.openai.com/v1/chat/completions', { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - Authorization: `Bearer ${apiKey}`, + return fetchWithTimeout( + 'https://api.openai.com/v1/chat/completions', + { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${apiKey}`, + }, + body: JSON.stringify({ + model: 'gpt-4o', + messages: [ + { role: 'system', content: REPLY_SYSTEM_PROMPT }, + { role: 'user', content: userContent }, + ], + }), }, - body: JSON.stringify({ - model: 'gpt-4o', - messages: [ - { role: 'system', content: REPLY_SYSTEM_PROMPT }, - { role: 'user', content: userContent }, - ], - }), - }) - - if (!res.ok) { - const err = await res.json().catch(() => ({})) - throw new Error(`OpenAI error: ${(err as any)?.error?.message ?? res.status}`) - } - - const data = await res.json() - const text = data.choices?.[0]?.message?.content ?? '' - return JSON.parse(extractJSON(text)) as GeneratedReply + OPENAI_TIMEOUT_MS, + 'OpenAI', + async (res) => { + if (!res.ok) { + const err = await res.json().catch(() => ({})) + throw new Error(`OpenAI error: ${(err as any)?.error?.message ?? res.status}`) + } + const data = await res.json() + const text = data.choices?.[0]?.message?.content ?? '' + return JSON.parse(extractJSON(text)) as GeneratedReply + }, + ) } async function callAnthropicReply(tweetText: string, userPrompt: string, apiKey: string): Promise { @@ -134,28 +200,33 @@ async function callAnthropicReply(tweetText: string, userPrompt: string, apiKey: ? `Original tweet: "${tweetText}"\n\nAngle for reply: ${userPrompt}` : `Original tweet: "${tweetText}"\n\nWrite a thoughtful reply.` - const res = await fetch('https://api.anthropic.com/v1/messages', { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - 'x-api-key': apiKey, - 'anthropic-version': '2023-06-01', + return fetchWithTimeout( + 'https://api.anthropic.com/v1/messages', + { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'x-api-key': apiKey, + 'anthropic-version': '2023-06-01', + }, + body: JSON.stringify({ + model: 'claude-haiku-4-5-20251001', + max_tokens: 512, + system: REPLY_SYSTEM_PROMPT, + messages: [{ role: 'user', content: userContent }], + }), }, - body: JSON.stringify({ - model: 'claude-haiku-4-5-20251001', - max_tokens: 512, - system: REPLY_SYSTEM_PROMPT, - messages: [{ role: 'user', content: userContent }], - }), - }) - - if (!res.ok) { - const err = await res.json().catch(() => ({})) - throw new Error(`Anthropic error: ${(err as any)?.error?.message ?? res.status}`) - } - - const data = await res.json() - return JSON.parse(extractJSON(data.content[0].text)) as GeneratedReply + ANTHROPIC_TIMEOUT_MS, + 'Anthropic', + async (res) => { + if (!res.ok) { + const err = await res.json().catch(() => ({})) + throw new Error(`Anthropic error: ${(err as any)?.error?.message ?? res.status}`) + } + const data = await res.json() + return JSON.parse(extractJSON(data.content[0].text)) as GeneratedReply + }, + ) } export async function generateReply( @@ -193,3 +264,103 @@ export async function generateInterest(prompt: string, vendor: Vendor): Promise< throw new Error(`Unknown vendor: ${vendor}. Supported: openai, anthropic`) } + +// ─── Topic Suggestions ─────────────────────────────────────────────────────── + +const SUGGEST_SYSTEM_PROMPT = `You suggest new topics for a social intelligence tool that tracks interests on X (Twitter). Given the user's existing topics and a sample of recent tweets from their feed, suggest new topics that are adjacent to but distinct from what they already track. + +For each suggestion, return a JSON object with: +- name: short, specific interest name (3-6 words, title case) +- description: a dense, jargon-rich passage written in the voice of a practitioner deeply embedded in this space. Pack it with domain-specific terminology, key concepts, tools, notable figures, and current developments. +- keywords: 12-20 specific, high-signal terms used by practitioners +- relatedTopics: 6-10 adjacent topic areas + +Respond ONLY with a valid JSON array of objects. No markdown, no explanation.` + +export async function generateTopicSuggestions( + existingTopics: string[], + recentTweets: string[], + count: number, + vendor: Vendor, +): Promise { + const topicList = existingTopics.length > 0 + ? `My current topics:\n${existingTopics.map(t => `- ${t}`).join('\n')}` + : 'I have no topics yet.' + + const tweetSample = recentTweets.length > 0 + ? `\n\nRecent tweets from my feed:\n${recentTweets.slice(0, 15).map(t => `- ${t.slice(0, 200)}`).join('\n')}` + : '' + + const prompt = `${topicList}${tweetSample}\n\nSuggest exactly ${count} new topics I should track. Return a JSON array.` + + if (vendor === 'openai') { + const apiKey = process.env.OPENAI_API_KEY + if (!apiKey) throw new Error('OPENAI_API_KEY is not set') + return fetchWithTimeout( + 'https://api.openai.com/v1/responses', + { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${apiKey}`, + }, + body: JSON.stringify({ + model: 'gpt-4o', + tools: [{ type: 'web_search_preview' }], + instructions: SUGGEST_SYSTEM_PROMPT, + input: prompt, + }), + }, + OPENAI_TIMEOUT_MS, + 'OpenAI', + async (res) => { + if (!res.ok) { + const err = await res.json().catch(() => ({})) + throw new Error(`OpenAI error: ${(err as any)?.error?.message ?? res.status}`) + } + const data = await res.json() + const text = data.output + ?.filter((b: any) => b.type === 'message') + .flatMap((b: any) => b.content) + .filter((c: any) => c.type === 'output_text') + .map((c: any) => c.text) + .join('') ?? '' + return JSON.parse(extractJSONArray(text)) as GeneratedInterest[] + }, + ) + } + + if (vendor === 'anthropic') { + const apiKey = process.env.ANTHROPIC_API_KEY + if (!apiKey) throw new Error('ANTHROPIC_API_KEY is not set') + return fetchWithTimeout( + 'https://api.anthropic.com/v1/messages', + { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'x-api-key': apiKey, + 'anthropic-version': '2023-06-01', + }, + body: JSON.stringify({ + model: 'claude-haiku-4-5-20251001', + max_tokens: 4096, + system: SUGGEST_SYSTEM_PROMPT, + messages: [{ role: 'user', content: prompt }], + }), + }, + ANTHROPIC_TIMEOUT_MS, + 'Anthropic', + async (res) => { + if (!res.ok) { + const err = await res.json().catch(() => ({})) + throw new Error(`Anthropic error: ${(err as any)?.error?.message ?? res.status}`) + } + const data = await res.json() + return JSON.parse(extractJSONArray(data.content[0].text)) as GeneratedInterest[] + }, + ) + } + + throw new Error(`Unknown vendor: ${vendor}. Supported: openai, anthropic`) +} diff --git a/src/lib/client.ts b/src/lib/client.ts index b9ec36f..955350c 100644 --- a/src/lib/client.ts +++ b/src/lib/client.ts @@ -2,8 +2,31 @@ import { getApiUrl, getToken } from './config.js' interface Flags { debug?: boolean + /** Request timeout in milliseconds. Defaults to 20 000 ms. */ + timeoutMs?: number } +const MAX_RETRIES = Math.max(0, Number(process.env.SONAR_MAX_RETRIES) || 3) + +function sleep(ms: number): Promise { + return new Promise(resolve => setTimeout(resolve, ms)) +} + +function retryDelay(attempt: number): number { + const base = Math.min(1000 * 2 ** attempt, 10_000) + return base + Math.random() * 500 +} + +/** + * Execute a GraphQL request against the Sonar API. + * + * Retries transient failures (network errors, 5xx) with jittered exponential + * backoff. Deterministic failures (4xx, GraphQL errors) throw immediately. + * Control retries via SONAR_MAX_RETRIES env var (default 3, 0 to disable). + * + * A hard timeout (default 20 s) is applied via AbortController so that the + * process never hangs silently when the server is unresponsive. + */ export async function gql( query: string, variables: Record = {}, @@ -11,40 +34,71 @@ export async function gql( ): Promise { const token = getToken() const url = getApiUrl() + const timeoutMs = flags.timeoutMs ?? 20_000 - let res: Response - try { + for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) { + const controller = new AbortController() + const timer = setTimeout(() => controller.abort(), timeoutMs) - if (flags.debug) { - console.error(url, query, variables) + let res: Response + try { + if (flags.debug) { + console.error(url, query, variables) + } + res = await fetch(url, { + method: 'POST', + signal: controller.signal, + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}`, + }, + body: JSON.stringify({ query, variables }), + }) + } catch (err: unknown) { + clearTimeout(timer) + if (attempt < MAX_RETRIES) { + if (flags.debug) console.error(`Retry ${attempt + 1}/${MAX_RETRIES} after network error`) + await sleep(retryDelay(attempt)) + continue + } + if (err instanceof DOMException && err.name === 'AbortError') { + throw new Error( + `Request timed out after ${timeoutMs / 1000}s. ` + + 'The server may be overloaded or unreachable. ' + + 'Check SONAR_API_URL, your network connection, and retry.' + ) + } + throw new Error('Unable to reach server, please try again shortly.') + } finally { + clearTimeout(timer) } - res = await fetch(url, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - Authorization: `Bearer ${token}`, - }, - body: JSON.stringify({ query, variables }), - }) - } catch { - throw new Error('Unable to reach server, please try again shortly.') - } - if (!res.ok) { - if (flags.debug) { - console.error(JSON.stringify(await res.json(), null, 2)) + // 5xx — transient, retry + if (res.status >= 500 && attempt < MAX_RETRIES) { + if (flags.debug) console.error(`Retry ${attempt + 1}/${MAX_RETRIES} after HTTP ${res.status}`) + await sleep(retryDelay(attempt)) + continue } - throw new Error(`HTTP ${res.status}: ${res.statusText}`) - } - const json = (await res.json()) as { - data?: T - errors?: Array<{ message: string }> - } + // 4xx — deterministic, throw immediately + if (!res.ok) { + if (flags.debug) { + console.error(JSON.stringify(await res.json(), null, 2)) + } + throw new Error(`HTTP ${res.status}: ${res.statusText}`) + } + + const json = (await res.json()) as { + data?: T + errors?: Array<{ message: string }> + } + + if (json.errors && json.errors.length > 0) { + throw new Error(json.errors[0].message) + } - if (json.errors && json.errors.length > 0) { - throw new Error(json.errors[0].message) + return json.data as T } - return json.data as T + throw new Error('Unexpected retry exhaustion') } diff --git a/src/lib/config.ts b/src/lib/config.ts index 8f50890..836218d 100644 --- a/src/lib/config.ts +++ b/src/lib/config.ts @@ -1,9 +1,11 @@ import { existsSync, mkdirSync, readFileSync, unlinkSync, writeFileSync } from 'node:fs' import { homedir } from 'node:os' import { join } from 'node:path' +import { DB_PATH } from './db.js' const CONFIG_DIR = join(homedir(), '.sonar') const CONFIG_FILE = join(CONFIG_DIR, 'config.json') +const ACCOUNTS_FILE = join(CONFIG_DIR, 'accounts.json') export type Vendor = 'openai' | 'anthropic' @@ -15,6 +17,16 @@ export interface Config { feedWidth?: number } +export interface AccountEntry { + token: string + apiUrl: string +} + +export interface AccountsFile { + active: string + accounts: Record +} + export function readConfig(): Config { try { const raw = readFileSync(CONFIG_FILE, 'utf8') @@ -38,8 +50,8 @@ export function deleteConfig(): void { } export function deleteDatabase(): void { - if (existsSync(join(CONFIG_DIR, 'database.sqlite'))) { - unlinkSync(join(CONFIG_DIR, 'database.sqlite')) + if (existsSync(DB_PATH)) { + unlinkSync(DB_PATH) } } @@ -50,26 +62,69 @@ export function writeConfig(config: Partial): void { writeFileSync(CONFIG_FILE, JSON.stringify(updated, null, 2), 'utf8') } +// ─── Accounts ──────────────────────────────────────────────────────────────── + +const DEFAULT_API_URL = 'https://api.sonar.8640p.info/graphql' + +export function readAccounts(): AccountsFile { + try { + const raw = readFileSync(ACCOUNTS_FILE, 'utf8') + return JSON.parse(raw) as AccountsFile + } catch { + return { active: '', accounts: {} } + } +} + +export function writeAccounts(data: AccountsFile): void { + mkdirSync(CONFIG_DIR, { recursive: true }) + writeFileSync(ACCOUNTS_FILE, JSON.stringify(data, null, 2), 'utf8') +} + +export function accountsExist(): boolean { + return existsSync(ACCOUNTS_FILE) +} + +/** Migrate legacy config.json token into accounts.json as "default". */ +export function migrateToAccounts(): void { + if (accountsExist()) return + const config = readConfig() + if (!config.token) return + writeAccounts({ + active: 'default', + accounts: { + default: { token: config.token, apiUrl: config.apiUrl || DEFAULT_API_URL }, + }, + }) +} + +export function getActiveAccount(): { name: string; token: string; apiUrl: string } | null { + migrateToAccounts() + const { active, accounts } = readAccounts() + if (!active || !accounts[active]) return null + return { name: active, ...accounts[active] } +} + export function getToken(): string { - // SONAR_API_KEY env var takes highest priority - const apiKey = process.env.SONAR_API_KEY - if (apiKey) return apiKey + // Check accounts.json + const account = getActiveAccount() + if (account?.token) return account.token // Fall back to config file token const config = readConfig() if (config.token) return config.token - process.stderr.write('No token found. Set SONAR_API_KEY or run: sonar config setup\n') + process.stderr.write('No token found. Run: sonar account add \n') process.exit(1) } export function getApiUrl(): string { + if (process.env.SONAR_API_URL) return process.env.SONAR_API_URL + + const account = getActiveAccount() + if (account?.apiUrl) return account.apiUrl + const config = readConfig() - return ( - process.env.SONAR_API_URL ?? - config.apiUrl ?? - 'https://api.sonar.8640p.info/graphql' - ) + return config.apiUrl ?? DEFAULT_API_URL } export function getFeedRender(override?: string): string { diff --git a/src/lib/data-queries.ts b/src/lib/data-queries.ts index e7b9ee1..7b09db8 100644 --- a/src/lib/data-queries.ts +++ b/src/lib/data-queries.ts @@ -34,8 +34,9 @@ export interface Interest { id: string name: string description: string | null - keywords: string[] | null - relatedTopics: string[] | null + // These fields were removed from backend topics; keep optional for backward compatibility. + keywords?: string[] | null + relatedTopics?: string[] | null createdAt: string updatedAt: string } @@ -92,12 +93,10 @@ export const SUGGESTIONS_QUERY = ` export const INTERESTS_QUERY = ` query DataInterests { - projects { + topics { id: nanoId name description - keywords - relatedTopics createdAt updatedAt } diff --git a/src/lib/data-utils.ts b/src/lib/data-utils.ts new file mode 100644 index 0000000..cee4786 --- /dev/null +++ b/src/lib/data-utils.ts @@ -0,0 +1,34 @@ +/** + * Shared utilities for the data backup/restore/verify commands. + */ +import { copyFileSync, existsSync, rmSync } from 'node:fs' +import pkg from 'node-sqlite3-wasm' +const { Database } = pkg + +/** + * Run SQLite's built-in integrity_check pragma on the given database file. + * Returns `'ok'` when the database is healthy. + */ +export function integrityCheck(path: string): string { + const db = new Database(path, { readOnly: true }) + try { + const row = db.get('PRAGMA integrity_check') as { integrity_check: string } | undefined + return row?.integrity_check ?? 'unknown' + } finally { + db.close() + } +} + +/** + * Copy a SQLite DB file together with any WAL / SHM sidecars that exist. + */ +export function copyDbWithSidecars(src: string, dst: string): void { + copyFileSync(src, dst) + for (const ext of ['-wal', '-shm']) { + if (existsSync(`${src}${ext}`)) { + copyFileSync(`${src}${ext}`, `${dst}${ext}`) + } else { + rmSync(`${dst}${ext}`, { force: true }) + } + } +} diff --git a/src/lib/db.ts b/src/lib/db.ts index e48cb2a..6a8698e 100644 --- a/src/lib/db.ts +++ b/src/lib/db.ts @@ -1,11 +1,13 @@ -import Database from 'better-sqlite3' +import pkg from 'node-sqlite3-wasm' +const { Database } = pkg +type Db = InstanceType import { mkdirSync } from 'node:fs' import { homedir } from 'node:os' import { join, dirname } from 'node:path' export const DB_PATH = join(homedir(), '.sonar', 'data.db') -export function openDb(): Database.Database { +export function openDb(): Db { mkdirSync(dirname(DB_PATH), { recursive: true }) const db = new Database(DB_PATH) db.exec(` @@ -28,9 +30,9 @@ export function openDb(): Database.Database { metadata TEXT, synced_at TEXT ); - CREATE TABLE IF NOT EXISTS interests ( + CREATE TABLE IF NOT EXISTS topics ( id TEXT PRIMARY KEY, name TEXT, description TEXT, - keywords TEXT, topics TEXT, + keywords TEXT, related_topics TEXT, created_at TEXT, updated_at TEXT, synced_at TEXT ); CREATE TABLE IF NOT EXISTS sync_state ( @@ -40,7 +42,7 @@ export function openDb(): Database.Database { return db } -export function upsertTweet(db: Database.Database, tweet: { +export function upsertTweet(db: Db, tweet: { id: string xid: string text: string @@ -55,31 +57,31 @@ export function upsertTweet(db: Database.Database, tweet: { followingCount: number | null } }): void { - db.prepare(` + db.run(` INSERT OR REPLACE INTO tweets (id, xid, text, created_at, like_count, retweet_count, reply_count, author_username, author_display_name, author_followers_count, author_following_count) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) - `).run( - tweet.id, tweet.xid, tweet.text, tweet.createdAt, + `, + [tweet.id, tweet.xid, tweet.text, tweet.createdAt, tweet.likeCount, tweet.retweetCount, tweet.replyCount, tweet.user.username, tweet.user.displayName, - tweet.user.followersCount, tweet.user.followingCount, + tweet.user.followersCount, tweet.user.followingCount], ) } -export function upsertFeedItem(db: Database.Database, item: { +export function upsertFeedItem(db: Db, item: { tweetId: string score: number matchedKeywords: string[] }): void { - db.prepare(` + db.run(` INSERT OR REPLACE INTO feed_items (tweet_id, score, matched_keywords, synced_at) VALUES (?, ?, ?, ?) - `).run(item.tweetId, item.score, JSON.stringify(item.matchedKeywords), new Date().toISOString()) + `, [item.tweetId, item.score, JSON.stringify(item.matchedKeywords), new Date().toISOString()]) } -export function upsertSuggestion(db: Database.Database, s: { +export function upsertSuggestion(db: Db, s: { suggestionId: string tweetId: string score: number @@ -88,44 +90,44 @@ export function upsertSuggestion(db: Database.Database, s: { projectsMatched: number metadata?: Record | null }): void { - db.prepare(` + db.run(` INSERT OR REPLACE INTO suggestions (suggestion_id, tweet_id, score, status, relevance, projects_matched, metadata, synced_at) VALUES (?, ?, ?, ?, ?, ?, ?, ?) - `).run( - s.suggestionId, s.tweetId, s.score, s.status, s.relevance, + `, + [s.suggestionId, s.tweetId, s.score, s.status, s.relevance, JSON.stringify(s.projectsMatched), s.metadata != null ? JSON.stringify(s.metadata) : null, - new Date().toISOString(), + new Date().toISOString()], ) } -export function upsertInterest(db: Database.Database, interest: { +export function upsertTopic(db: Db, topic: { id: string name: string description: string | null - keywords: string[] | null - relatedTopics: string[] | null + keywords?: string[] | null + relatedTopics?: string[] | null createdAt: string updatedAt: string }): void { - db.prepare(` - INSERT OR REPLACE INTO interests (id, name, description, keywords, topics, created_at, updated_at, synced_at) + db.run(` + INSERT OR REPLACE INTO topics (id, name, description, keywords, related_topics, created_at, updated_at, synced_at) VALUES (?, ?, ?, ?, ?, ?, ?, ?) - `).run( - interest.id, interest.name, interest.description, - JSON.stringify(interest.keywords ?? []), - JSON.stringify(interest.relatedTopics ?? []), - interest.createdAt, interest.updatedAt, - new Date().toISOString(), + `, + [topic.id, topic.name, topic.description, + JSON.stringify(topic.keywords ?? []), + JSON.stringify(topic.relatedTopics ?? []), + topic.createdAt, topic.updatedAt, + new Date().toISOString()], ) } -export function getSyncState(db: Database.Database, key: string): string | null { - const row = db.prepare('SELECT value FROM sync_state WHERE key = ?').get(key) as { value: string } | undefined +export function getSyncState(db: Db, key: string): string | null { + const row = db.get('SELECT value FROM sync_state WHERE key = ?', [key]) as { value: string } | undefined return row?.value ?? null } -export function setSyncState(db: Database.Database, key: string, value: string): void { - db.prepare('INSERT OR REPLACE INTO sync_state (key, value) VALUES (?, ?)').run(key, value) +export function setSyncState(db: Db, key: string, value: string): void { + db.run('INSERT OR REPLACE INTO sync_state (key, value) VALUES (?, ?)', [key, value]) } diff --git a/src/lib/skill.ts b/src/lib/skill.ts index 73b2139..2033566 100644 --- a/src/lib/skill.ts +++ b/src/lib/skill.ts @@ -1,291 +1,308 @@ -import { writeFileSync, mkdirSync } from 'node:fs' +// THIS FILE IS AUTO-GENERATED. Do not edit manually. +// Run: pnpm generate:skill +// See: scripts/generate-skill.ts +import { writeFileSync, readFileSync, existsSync, mkdirSync } from 'node:fs' +import { createHash } from 'node:crypto' import { join, dirname } from 'node:path' import { homedir } from 'node:os' const SKILL_CONTENT = `--- name: sonar -description: Sonar CLI — manage interests, suggestions, indexing jobs, and account config for the Sonar social intelligence platform. Use when the user asks about their Sonar account, wants to create/list interests, check suggestions, trigger indexing, or configure the CLI. +description: Sonar CLI — view and triage your feed, manage topics, trigger refresh jobs, and manage local Sonar config/data. homepage: https://sonar.sh user-invocable: true allowed-tools: Bash argument-hint: [command and options] -metadata: {"openclaw":{"emoji":"📡","primaryEnv":"SONAR_API_KEY","requires":{"bins":["sonar"],"env":["SONAR_API_KEY"]}}} +metadata: {"openclaw":{"emoji":"📡","requires":{"bins":["sonar"]}}} --- # Sonar CLI -Sonar is a social intelligence platform. Use the \`sonar\` CLI to manage the user's account. +All commands are invoked as: \`sonar [subcommand] [flags]\`. -All commands are invoked as: \`sonar [subcommand] [flags]\` +## sonar account add ---- +\`\`\`bash +sonar account add +sonar account add --alias # Account alias (default: random) +sonar account add --api-url # Custom API URL +\`\`\` -## Account & Config +## sonar account \`\`\`bash -# Show account info, plan usage, and suggestion counts sonar account +sonar account --json # Raw JSON output +\`\`\` -# Show current CLI config (API URL, vendor, token presence) -sonar config +## sonar account remove -# Set AI vendor preference for --from-prompt (saved to ~/.sonar/config.json) -sonar config set vendor openai # or: anthropic +\`\`\`bash +sonar account remove +sonar account remove --force # Remove even if active +\`\`\` -# Initialise workspace from environment variables -# Requires: SONAR_API_KEY -sonar config setup +## sonar account rename + +\`\`\`bash +sonar account rename \`\`\` ---- +## sonar account switch -## Interests +\`\`\`bash +sonar account switch +\`\`\` -Interests are named topic areas with keywords and related topics that drive suggestion matching. +## sonar archive \`\`\`bash -# List all interests -sonar interests +sonar archive +sonar archive --id # Suggestion ID to archive +\`\`\` + +## sonar config env -# Create manually -sonar interests create --name "AI Agents" --description "LLM-based agents and tooling" \\ - --keywords "agents,llm,tools,mcp" --topics "machine learning,AI safety" +\`\`\`bash +sonar config env +\`\`\` -# Generate fields from a natural language prompt (uses OPENAI_API_KEY or ANTHROPIC_API_KEY) -sonar interests create --from-prompt "I want to follow the Rust ecosystem and systems programming" +## sonar config -# Generate with a specific vendor (overrides config preference) -sonar interests create --from-prompt "DeFi and crypto protocols" --vendor anthropic +\`\`\`bash +sonar config +\`\`\` -# Update an existing interest (full replace) -sonar interests update --id --name "New Name" --keywords "kw1,kw2" +## sonar config nuke -# Add keywords to an existing interest (fetches current, merges, sends full list) -sonar interests update --id --add-keywords "mcp,a2a,langgraph" +\`\`\`bash +sonar config nuke +sonar config nuke --confirm # Pass to confirm deletion +\`\`\` -# Remove keywords from an existing interest -sonar interests update --id --remove-keywords "old-term,deprecated-kw" +## sonar config set -# Add and remove keywords in one shot -sonar interests update --id --add-keywords "vibe-coding" --remove-keywords "cursor" +\`\`\`bash +sonar config set +sonar config set --key # Config key: vendor, feed-render, feed-width +sonar config set --value # Value to set +\`\`\` -# Same flags work for related topics -sonar interests update --id --add-topics "AI safety" --remove-topics "machine learning" +## sonar config setup -# Combine keyword/topic patching with a name change -sonar interests update --id --name "New Name" --add-keywords "new-kw" +\`\`\`bash +sonar config setup +sonar config setup --key # API key to use +\`\`\` -# Regenerate all fields from a new prompt (replaces everything) -sonar interests update --id --from-prompt "Rust and WebAssembly tooling" +## sonar config skill -# Output raw JSON (agent-friendly) -sonar interests --json +\`\`\`bash +sonar config skill +sonar config skill --install # Install to ~/.claude/skills/sonar/SKILL.md +sonar config skill --dest # Write to a custom path +sonar config skill --force # Overwrite even if file was modified \`\`\` -**AI vendor resolution order:** -1. \`--vendor\` flag -2. \`SONAR_AI_VENDOR\` environment variable -3. \`vendor\` in \`~/.sonar/config.json\` (set via \`sonar config set vendor\`) -4. Defaults to \`openai\` +## sonar data backup -Required env vars: \`OPENAI_API_KEY\` (OpenAI) or \`ANTHROPIC_API_KEY\` (Anthropic) +\`\`\`bash +sonar data backup +sonar data backup --out # Backup output path (default: ~/.sonar/data-backup-.db) +sonar data backup --json # Raw JSON output +\`\`\` ---- +## sonar data path -## Feed +\`\`\`bash +sonar data path +\`\`\` -Scored tweet feed from your social network, filtered by interests. +## sonar data pull \`\`\`bash -# Show feed (default: last 12h, limit 20, card layout) -sonar feed +sonar data pull +\`\`\` -# Time window -sonar feed --hours 24 -sonar feed --days 3 +## sonar data restore -# Limit results -sonar feed --limit 50 +\`\`\`bash +sonar data restore +sonar data restore --from # Backup database path to restore from +sonar data restore --to # Target database path (default: local sonar DB path) +sonar data restore --json # Raw JSON output +\`\`\` -# Output layout -sonar feed --render card # default — rich card view -sonar feed --render table # compact table view -sonar feed --width 100 # card body width in columns +## sonar data sql -# Raw JSON output (agent-friendly) -sonar feed --json +\`\`\`bash +sonar data sql \`\`\` ---- +## sonar data verify -## Suggestions (inbox) +\`\`\`bash +sonar data verify +sonar data verify --path # Database path (default: local sonar DB path) +sonar data verify --json # Raw JSON output +\`\`\` + +## sonar feed \`\`\`bash -# List suggestions (default: inbox, limit 20) -sonar inbox - -# Filter by status -sonar inbox --status inbox -sonar inbox --status later -sonar inbox --status replied -sonar inbox --status archived - -# Change limit -sonar inbox --limit 50 - -# Update a suggestion's status (positional id replaced with --id flag) -sonar inbox read --id -sonar inbox skip --id -sonar inbox later --id -sonar inbox archive --id - -# Raw JSON output -sonar inbox --json +sonar feed +sonar feed --hours N # Look back N hours (default: 12) +sonar feed --days N # Look back N days +sonar feed --limit N # Result limit (default: 20) +sonar feed --offset N # Skip first N results (default: 0) +sonar feed --kind # Feed source: default|bookmarks|followers|following +sonar feed --render # Output layout: card|table +sonar feed --width N # Card width in columns +sonar feed --json # Raw JSON output +sonar feed --follow # Continuously poll for new items +sonar feed --interval N # Poll interval in seconds (default: 30) \`\`\` ---- +## sonar -## Ingest +\`\`\`bash +sonar +sonar --hours N # Look back N hours (default: 12) +sonar --days N # Look back N days +sonar --limit N # Result limit (default: 20) +sonar --kind # Feed source: default|bookmarks|followers|following +sonar --render # Output layout: card|table +sonar --width N # Card width in columns +sonar --json # Raw JSON output +sonar --no-interactive # Interactive session mode (default: on, use --no-interactive to disable) +sonar --vendor # AI vendor: openai|anthropic +\`\`\` -Trigger background jobs to ingest data. +## sonar later \`\`\`bash -# Trigger specific jobs -sonar ingest tweets # Ingest recent tweets from social graph -sonar ingest bookmarks # Ingest X bookmarks (requires OAuth token) -sonar interests match # Match interests against ingested tweets (default: last 24h) +sonar later +sonar later --id # Suggestion ID to save for later +\`\`\` + +## sonar refresh -# Match tweet window (capped by plan: free=3d, pro=7d, enterprise=14d) -sonar interests match --days 1 # default -sonar interests match --days 3 # broader window (free plan max) -sonar interests match --days 7 # pro plan max +\`\`\`bash +sonar refresh +sonar refresh --bookmarks # Sync bookmarks from X +sonar refresh --likes # Sync likes from X +sonar refresh --graph # Rebuild social graph +sonar refresh --tweets # Index tweets across network +sonar refresh --suggestions # Regenerate suggestions +\`\`\` -# Show current job queue counts (one-shot) -sonar monitor +## sonar skip -# Live polling view of job queues -sonar monitor --watch +\`\`\`bash +sonar skip +sonar skip --id # Suggestion ID to skip \`\`\` ---- +## sonar status -## Local Data +\`\`\`bash +sonar status +sonar status --watch # Poll and refresh every 2 seconds +sonar status --json # Raw JSON output +\`\`\` -Sync feed, suggestions, and interests to a local SQLite DB (\`~/.sonar/data.db\`) for offline querying. +## sonar topics add \`\`\`bash -# Full download — wipes and repopulates ~/.sonar/data.db -sonar config data download +sonar topics add +sonar topics add --description # Optional description (auto-generated if omitted) +sonar topics add --json # Raw JSON output +\`\`\` -# Incremental sync — upserts records newer than last sync -sonar config data sync +## sonar topics delete -# Open an interactive sqlite3 REPL -sonar config data sql +\`\`\`bash +sonar topics delete +sonar topics delete --json # Raw JSON output +\`\`\` + +## sonar topics edit -# Print path to the local DB file -sonar config data path +\`\`\`bash +sonar topics edit +sonar topics edit --name # New name +sonar topics edit --description # New description +sonar topics edit --json # Raw JSON output \`\`\` -### Schema - -\`\`\`sql --- Core tweet content (shared by feed and suggestions) -tweets ( - id TEXT PRIMARY KEY, -- Sonar tweet UUID - xid TEXT, -- Twitter/X tweet ID - text TEXT, - created_at TEXT, - like_count INTEGER, - retweet_count INTEGER, - reply_count INTEGER, - author_username TEXT, - author_display_name TEXT, - author_followers_count INTEGER, - author_following_count INTEGER -) - --- Feed items (scored, keyword-matched tweets) -feed_items ( - tweet_id TEXT PRIMARY KEY, -- FK → tweets.id - score REAL, - matched_keywords TEXT, -- JSON array of strings - synced_at TEXT -) - --- Inbox suggestions -suggestions ( - suggestion_id TEXT PRIMARY KEY, - tweet_id TEXT, -- FK → tweets.id - score REAL, - status TEXT, -- INBOX | READ | SKIPPED | LATER | ARCHIVED - relevance TEXT, - projects_matched TEXT, -- JSON (count of matched interests) - metadata TEXT, -- JSON - synced_at TEXT -) - --- Interests (topics/keywords that drive matching) -interests ( - id TEXT PRIMARY KEY, -- nanoId - name TEXT, - description TEXT, - keywords TEXT, -- JSON array - topics TEXT, -- JSON array - created_at TEXT, - updated_at TEXT, - synced_at TEXT -) - --- Internal sync state -sync_state ( - key TEXT PRIMARY KEY, -- e.g. "last_synced_at" - value TEXT -) +## sonar topics + +\`\`\`bash +sonar topics +sonar topics --json # Raw JSON output \`\`\` ---- +## sonar topics suggest + +\`\`\`bash +sonar topics suggest +sonar topics suggest --vendor # AI vendor: openai|anthropic +sonar topics suggest --count N # Number of suggestions (default: 5) +sonar topics suggest --json # Raw JSON output +\`\`\` -## Environment Variables +## sonar topics view + +\`\`\`bash +sonar topics view +\`\`\` + +## Environment variables | Variable | Purpose | |---|---| -| \`SONAR_API_KEY\` | API key for authentication (overrides config file) | -| \`SONAR_API_URL\` | Backend URL (default: \`http://localhost:8000/graphql\`) | -| \`SONAR_AI_VENDOR\` | AI vendor for \`--from-prompt\` (overrides config file) | +| \`SONAR_API_URL\` | Backend URL (defaults to production GraphQL endpoint) | +| \`SONAR_AI_VENDOR\` | Vendor override for AI-assisted operations (\`openai\` or \`anthropic\`) | +| \`SONAR_FEED_RENDER\` | Default feed renderer override | +| \`SONAR_FEED_WIDTH\` | Default card width override | | \`OPENAI_API_KEY\` | Required when vendor is \`openai\` | | \`ANTHROPIC_API_KEY\` | Required when vendor is \`anthropic\` | +` ---- - -## Config file - -Stored at \`~/.sonar/config.json\`: +const DEFAULT_INSTALL_PATH = join(homedir(), '.claude', 'skills', 'sonar', 'SKILL.md') -\`\`\`json -{ - "token": "snr_...", - "apiUrl": "https://api.sonar.sh/graphql", - "vendor": "openai" +function sha256(content: string): string { + return createHash('sha256').update(content).digest('hex') } -\`\`\` -` -const DEFAULT_INSTALL_PATH = join(homedir(), '.claude', 'skills', 'sonar', 'SKILL.md') +function safeWrite(target: string, content: string, force: boolean): void { + if (existsSync(target) && !force) { + const existing = readFileSync(target, 'utf8') + if (existing === content) { + process.stdout.write(`SKILL.md is already up to date: ${target}\n`) + process.exit(0) + } + // File exists and differs — user may have customized it + process.stderr.write( + `SKILL.md has been modified: ${target}\n` + + `Use --force to overwrite, or manually merge.\n` + + `New version hash: ${sha256(content).slice(0, 8)}\n` + ) + process.exit(1) + } + mkdirSync(dirname(target), { recursive: true }) + writeFileSync(target, content, 'utf8') + process.stdout.write(`SKILL.md written to ${target}\n`) +} -export function writeSkillTo(dest?: string, install?: boolean): void { +export function writeSkillTo(dest?: string, install?: boolean, force?: boolean): void { if (install || dest === '--install') { - const target = DEFAULT_INSTALL_PATH - mkdirSync(dirname(target), { recursive: true }) - writeFileSync(target, SKILL_CONTENT, 'utf8') - process.stdout.write(`SKILL.md written to ${target}\n`) + safeWrite(DEFAULT_INSTALL_PATH, SKILL_CONTENT, force ?? false) process.exit(0) } if (dest) { - mkdirSync(dirname(dest), { recursive: true }) - writeFileSync(dest, SKILL_CONTENT, 'utf8') - process.stdout.write(`SKILL.md written to ${dest}\n`) + safeWrite(dest, SKILL_CONTENT, force ?? false) process.exit(0) } diff --git a/src/types/sonar.ts b/src/types/sonar.ts index 0213988..c5c6aaf 100644 --- a/src/types/sonar.ts +++ b/src/types/sonar.ts @@ -148,7 +148,7 @@ export type Query = { apiKeys: Array; feed: Array; me?: Maybe; - projects: Array; + topics: Array; suggestionCounts: SuggestionCounts; suggestions: Array; tweet?: Maybe; diff --git a/tasks/todo.md b/tasks/todo.md new file mode 100644 index 0000000..7c969cd --- /dev/null +++ b/tasks/todo.md @@ -0,0 +1,98 @@ +# CLI Redesign — tasks/todo.md + +## Goal + +Redesign the Sonar CLI surface from a pipeline-centric model to an interest-monitoring model. Users should be able to surface their interests and understand status without thinking about ingestion, matching, or inbox vs feed distinctions. + +## Branch + +`feat/cli-redesign` + +## Stack + +No framework change — keeping Pastel + Ink. The redesign is purely command surface and UX. + +## Legacy + +Old commands preserved in `src/commands-legacy/` for reference during migration. New commands go in `src/commands/`. + +--- + +## New command surface + +| Command | Replaces | Notes | +|---|---|---| +| `sonar` | `feed` + `inbox` | Combined default view, sorted by relevance | +| `sonar interests` | `interests` (list) | List tracked interests | +| `sonar interests add "..."` | `interests create --from-prompt` | Natural language add | +| `sonar interests edit ` | `interests update` | Edit an interest | +| `sonar refresh` | `ingest tweets` + `ingest bookmarks` + `interests match` | Force pipeline refresh, single escape hatch | +| `sonar status [--watch]` | `monitor` + `account` | Pipeline health + account in one view | +| `sonar archive ` | `inbox archive` | Triage action | +| `sonar later ` | `inbox later` | Triage action | +| `sonar skip ` | `inbox skip` | Triage action | +| `sonar config` | `config` | Unchanged | + +**Retired (no replacement needed):** +- `quickstart` → auto-trigger on first run if no interests exist +- `ingest` → internal, surfaced via `sonar refresh` +- `interests match` → internal, part of `sonar refresh` +- `feed --kind` → bookmarks become a filter on `sonar --kind bookmarks` + +--- + +## Tasks + +### Phase 1 — Scaffold + +- [ ] Create `src/commands/index.tsx` — new default view (combined feed + inbox) +- [ ] Create `src/commands/interests/index.tsx` — list interests +- [ ] Create `src/commands/interests/add.tsx` — add interest from natural language +- [ ] Create `src/commands/interests/edit.tsx` — edit interest +- [ ] Create `src/commands/refresh.tsx` — trigger ingest + match +- [ ] Create `src/commands/status.tsx` — combined monitor + account +- [ ] Create `src/commands/archive.tsx` — triage: archive +- [ ] Create `src/commands/later.tsx` — triage: later +- [ ] Create `src/commands/skip.tsx` — triage: skip +- [ ] Keep `src/commands/config/` — copy from legacy unchanged + +### Phase 2 — Default view (`sonar`) + +- [ ] Merge feed + inbox into single ranked list +- [ ] Support `--kind` filter (default|bookmarks|followers|following) +- [ ] Support `--hours` / `--days` window +- [ ] Support `--limit` +- [ ] Support `--interactive` (AI exploration) +- [ ] Support `--json` +- [ ] First-run: if no interests exist, prompt to run `sonar interests add` or scaffold defaults + +### Phase 3 — Interests + +- [ ] `sonar interests` — table of interests with keyword/topic summary +- [ ] `sonar interests add "..."` — natural language → create interest (wrap existing AI flow) +- [ ] `sonar interests edit ` — interactive or flag-based edit (wrap existing update flow) + +### Phase 4 — Status + Refresh + +- [ ] `sonar status` — account card + queue table in one view (port from monitor + account) +- [ ] `sonar status --watch` — live polling +- [ ] `sonar refresh` — trigger ingest tweets + bookmarks + interests match sequentially, show progress + +### Phase 5 — Triage actions + +- [ ] `sonar archive ` +- [ ] `sonar later ` +- [ ] `sonar skip ` + +### Phase 6 — Cleanup + +- [ ] Remove `src/commands-legacy/` once all commands are migrated and verified +- [ ] Update README command reference +- [ ] Update `config skill` generated skill file to reflect new surface +- [ ] Run `/sonar fix-cli sonar` on each new command to verify clean + +--- + +## Review + +_To be filled in after implementation._