diff --git a/.changeset/sour-dolls-sip.md b/.changeset/sour-dolls-sip.md new file mode 100644 index 00000000..586928ee --- /dev/null +++ b/.changeset/sour-dolls-sip.md @@ -0,0 +1,8 @@ +--- +"nostream": major +--- + +Add a brand-new unified `nostream` CLI/TUI that replaces the legacy `scripts/*` shell wrappers for lifecycle, setup, info, config, data, and development workflows. + +**Fixes** + - fixed some consistnacy issues after the migration from `npm` to `pnpm` diff --git a/.env.example b/.env.example index 7f335c89..49dfb185 100644 --- a/.env.example +++ b/.env.example @@ -69,6 +69,6 @@ WORKER_COUNT=2 # Defaults to CPU count. Use 1 or 2 for local testing. # HIDDEN_SERVICE_PORT=80 # --- I2P (Optional) --- -# To enable I2P, use: ./scripts/start_with_i2p +# To enable I2P, use: nostream start --i2p # I2P tunnel configuration lives in i2p/tunnels.conf and i2p/i2pd.conf. # No application-level env vars are needed; the i2pd sidecar handles everything. diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml index 2e8d1709..14534607 100644 --- a/.github/workflows/checks.yml +++ b/.github/workflows/checks.yml @@ -61,6 +61,10 @@ jobs: run: pnpm install --frozen-lockfile - name: Run build check run: pnpm run build:check + - name: Build artifacts + run: pnpm run build + - name: Verify built CLI entrypoint + run: pnpm run verify:cli:build test-units-and-cover: name: Unit Tests And Coverage runs-on: ubuntu-latest @@ -80,6 +84,8 @@ jobs: run: pnpm install --frozen-lockfile - name: Run unit tests run: pnpm run test:unit + - name: Run CLI test suite + run: pnpm run test:cli - name: Run coverage for unit tests run: pnpm run cover:unit if: ${{ always() }} diff --git a/.knip.json b/.knip.json index 193d47bc..eb3256db 100644 --- a/.knip.json +++ b/.knip.json @@ -1,17 +1,21 @@ { "$schema": "https://unpkg.com/knip@2/schema.json", "entry": [ - "src/index.ts", - "src/import-events.ts", - "knexfile.js" + "src/index.ts!", + "src/import-events.ts!", + "src/cli/index.ts!", + "src/scripts/benchmark-queries.ts!", + "knexfile.js!" ], "project": [ - "src/**/*.ts" + "src/**/*.ts!" ], "ignoreDependencies": [ "lzma-native" ], - "ignoreFiles": [], + "ignore": [ + ".nostr/**" + ], "commitlint": false, "eslint": false, "github-actions": false, @@ -19,4 +23,4 @@ "mocha": false, "nyc": false, "semantic-release": false -} \ No newline at end of file +} diff --git a/CLI.md b/CLI.md new file mode 100644 index 00000000..d62cfda0 --- /dev/null +++ b/CLI.md @@ -0,0 +1,138 @@ +# Nostream CLI + +Nostream ships a unified command-line interface: + +```bash +nostream --help +pnpm run cli -- --help +``` + +When run with no arguments in an interactive terminal, `nostream` launches an interactive TUI. +In non-interactive environments, it prints help and exits successfully. + +## Exit Codes + +- `0`: success +- `1`: runtime/validation error +- `2`: usage error (invalid command/options) + +## Core Commands + +```bash +nostream start [--tor] [--i2p] [--nginx] [--debug] [--port 8008] +nostream stop [--all|--tor|--i2p|--nginx|--local] +nostream info [--tor-hostname] [--i2p-hostname] [--json] +nostream update +nostream clean +nostream setup [--yes] [--start] +nostream seed [--count 100] +nostream import [file.jsonl|file.json] [--file file.jsonl|file.json] [--batch-size 1000] +nostream export [output] [--output output] [--format jsonl|json] +``` + +## Removed Legacy Wrappers + +The old shell wrapper scripts are no longer shipped in `scripts/`. +Use the unified `nostream` CLI directly instead: + +```bash +scripts/start -> nostream start +scripts/start_with_tor -> nostream start --tor +scripts/start_with_i2p -> nostream start --i2p +scripts/start_with_nginx -> nostream start --nginx +scripts/stop -> nostream stop +scripts/print_tor_hostname -> nostream info --tor-hostname +scripts/print_i2p_hostname -> nostream info --i2p-hostname +scripts/update -> nostream update +scripts/clean -> nostream clean +``` + +## Configuration Commands + +```bash +nostream config list +nostream config list --json +nostream config get +nostream config get --json +nostream config set [--type inferred|json] [--validate|--no-validate] [--restart] +nostream config validate + +nostream config env list [--show-secrets] +nostream config env get [--show-secrets] +nostream config env set +nostream config env validate +``` + +Path syntax supports dot keys and array indexes: + +```bash +nostream config get limits.event.content[0].maxLength +nostream config set limits.event.content[0].maxLength 2048 +nostream config set nip05.domainWhitelist '["example.com","relay.io"]' --type json +``` + +## Development Commands + +```bash +nostream dev db:clean [--all|--older-than=30|--kinds=1,7,4] [--dry-run] [--force] +nostream dev db:reset [--yes] +nostream dev seed:relay +nostream dev docker:clean [--yes] +nostream dev test:unit +nostream dev test:cli +nostream dev test:integration +``` + +## TUI Navigation + +Run: + +```bash +nostream +``` + +Main menu includes: +- Start relay +- Stop relay +- Configure settings +- Manage data (export/import) +- Development tools +- View relay info +- Exit + +TUI behavior highlights: +- Each submenu includes an explicit `Back` option, so you can return without using signal keys. +- Start menu prompts for Tor/I2P/Debug, optional custom port, and final confirmation. +- Configure menu offers guided editing for common categories such as payments, network, and limits. +- Advanced dot-path get/set remains available for full settings access. +- Manage menu asks for import/export format and file paths. +- Dev menu displays explicit destructive warnings before DB reset/clean and Docker clean. + +## Common Workflows + +```bash +# Start relay with Tor + I2P +nostream start --tor --i2p + +# Print Tor hostname +nostream info --tor-hostname + +# Machine-readable output for automation +nostream info --json +nostream config list --json +nostream config get payments.enabled --json + +# Import and export events +nostream import --file ./events.jsonl --batch-size 500 +nostream import --file ./events.json --batch-size 500 +nostream export --output backup.jsonl --format jsonl +nostream export --output backup.json --format json + +# Update YAML settings and restart relay +nostream config set payments.enabled true --restart + +# Update env settings +nostream config env set RELAY_PORT 8008 +nostream config env get SECRET --show-secrets +nostream config env validate +``` diff --git a/CONFIGURATION.md b/CONFIGURATION.md index b92cab8e..8fc75b0b 100644 --- a/CONFIGURATION.md +++ b/CONFIGURATION.md @@ -73,8 +73,7 @@ Tunnel keys are persisted at `.nostr/i2p/data/` so the `.b32.i2p` address surviv The i2pd web console (tunnel status, `.b32.i2p` destinations) is published to the host on **`127.0.0.1:7070`** only. Remove the `ports:` mapping in `docker-compose.i2p.yml` to disable host-side access. -- Start with I2P: `./scripts/start_with_i2p` -- Print hostname hints: `./scripts/print_i2p_hostname` +- Start with I2P: `nostream start --i2p` If you've set READ_REPLICAS to 4, you should configure RR0_ through RR3_. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index d0fcfd58..f787cf55 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -47,6 +47,13 @@ corepack enable pnpm install ``` +Use the unified CLI for relay lifecycle and supported development operations from this source +checkout: + +``` +pnpm run cli -- --help +``` + > **Important:** Pre-commit hooks installed by Husky run linting and formatting checks on every > commit. Do **not** bypass them with `git commit --no-verify`. If a hook fails, fix the reported > issues before committing. @@ -56,7 +63,7 @@ pnpm install Start the relay (runs in the foreground until stopped with Ctrl+C): ``` -./scripts/start +pnpm run cli -- start ``` ### Development Quick Start (Standalone) @@ -149,7 +156,7 @@ cd /path/to/nostream Run unit tests: ``` -pnpm test:unit +pnpm run cli -- dev test:unit ``` Run unit tests in watch mode: @@ -223,7 +230,7 @@ DB_MAX_POOL_SIZE=2 Run the integration tests: ``` -pnpm test:integration +pnpm run cli -- dev test:integration ``` Open the integration test report: diff --git a/README.md b/README.md index 783d897e..8705b869 100644 --- a/README.md +++ b/README.md @@ -112,7 +112,7 @@ Install Docker from their [official guide](https://docs.docker.com/engine/instal - On `.nostr/settings.yaml` file make the following changes: - `payments.processor` to `zebedee` - `paymentsProcessors.zebedee.callbackBaseURL` to match your Nostream URL (e.g. `https://{YOUR_DOMAIN_HERE}/callbacks/zebedee`) - - Restart Nostream (`./scripts/stop` followed by `./scripts/start`) + - Restart Nostream (`nostream stop` followed by `nostream start`) - Read the in-depth guide for more information: [Set Up a Paid Nostr Relay with ZEBEDEE API](https://docs.zebedee.io/docs/guides/nostr-relay) 3. [Nodeless](https://nodeless.io/?ref=587f477f-ba1c-4bd3-8986-8302c98f6731) @@ -130,7 +130,7 @@ Install Docker from their [official guide](https://docs.docker.com/engine/instal - On your `.nostr/settings.yaml` file make the following changes: - Set `payments.processor` to `nodeless` - Set `paymentsProcessors.nodeless.storeId` to your store ID - - Restart Nostream (`./scripts/stop` followed by `./scripts/start`) + - Restart Nostream (`nostream stop` followed by `nostream start`) 4. [OpenNode](https://www.opennode.com/) - Complete the step "Before you begin" @@ -145,7 +145,7 @@ Install Docker from their [official guide](https://docs.docker.com/engine/instal - On your `.nostr/settings.yaml` file make the following changes: - Set `payments.processor` to `opennode` - - Restart Nostream (`./scripts/stop` followed by `./scripts/start`) + - Restart Nostream (`nostream stop` followed by `nostream start`) 5. [LNBITS](https://lnbits.com/) - Complete the step "Before you begin" @@ -162,7 +162,7 @@ Install Docker from their [official guide](https://docs.docker.com/engine/instal - Set `payments.processor` to `lnbits` - set `lnbits.baseURL` to your LNbits instance URL (e.g. `https://{YOUR_LNBITS_DOMAIN_HERE}/`) - Set `paymentsProcessors.lnbits.callbackBaseURL` to match your Nostream URL (e.g. `https://{YOUR_DOMAIN_HERE}/callbacks/lnbits`) - - Restart Nostream (`./scripts/stop` followed by `./scripts/start`) + - Restart Nostream (`nostream stop` followed by `nostream start`) 6. [Alby](https://getalby.com/) or any LNURL Provider with [LNURL-verify](https://github.com/lnurl/luds/issues/182) support - Complete the step "Before you begin" @@ -170,7 +170,7 @@ Install Docker from their [official guide](https://docs.docker.com/engine/instal - On your `.nostr/settings.yaml` file make the following changes: - Set `payments.processor` to `lnurl` - Set `lnurl.invoiceURL` to your LNURL (e.g. `https://getalby.com/lnurlp/your-username`) - - Restart Nostream (`./scripts/stop` followed by `./scripts/start`) + - Restart Nostream (`nostream stop` followed by `nostream start`) 7. Ensure payments are required for your public key - Visit https://{YOUR-DOMAIN}/ @@ -186,6 +186,18 @@ Install Docker from their [official guide](https://docs.docker.com/engine/instal ## Quick Start (Docker Compose) +For full command reference and interactive mode documentation, see [CLI.md](CLI.md). +Non-interactive CLI usage conventions: +- exit `0` on success +- exit `1` on runtime/validation errors +- exit `2` on usage errors (invalid command/options) + +Optional global installation from a source checkout: + ``` + pnpm add -g . + nostream --help + ``` + Install Docker following the [official guide](https://docs.docker.com/engine/install/). You may have to uninstall Docker if you installed it using a different guide. @@ -205,74 +217,90 @@ Copy the output and paste it into an `.env` file: Start: ``` - ./scripts/start + nostream start ``` or ``` - ./scripts/start_with_tor - ``` - or, with Nginx reverse proxy and Let's Encrypt SSL: - ``` - RELAY_DOMAIN=relay.example.com CERTBOT_EMAIL=you@example.com ./scripts/start_with_nginx + nostream start --tor ``` - -**Windows / WSL2 users:** Docker bind-mounts can cause PostgreSQL permission errors on Windows. Use the dedicated override file instead: + or ``` - docker compose -f docker-compose.yml -f docker-compose.windows.yml up --build + nostream start --i2p ``` - Or add this to your `.env` file so you don't have to type it every time: + or ``` - COMPOSE_FILE=docker-compose.yml:docker-compose.windows.yml + RELAY_DOMAIN=relay.example.com CERTBOT_EMAIL=you@example.com nostream start --nginx ``` - > **Note:** If you previously ran Nostream on Linux/Mac and are switching to Windows, your existing data lives at `.nostr/data/` on the host. You'll need to copy it into the Docker named volume manually or it won't be visible to the new setup. Stop the server with: ``` - ./scripts/stop + nostream stop ``` Print the Tor hostname: ``` - ./scripts/print_tor_hostname + nostream info --tor-hostname ``` -Start with I2P: +Print I2P hostname(s): ``` - ./scripts/start_with_i2p + nostream info --i2p-hostname ``` -Print the I2P hostname: - ``` - ./scripts/print_i2p_hostname - ``` +The old shell wrapper scripts are no longer shipped in `scripts/`. +Use the unified `nostream` CLI directly instead: -### Importing events from JSON Lines +``` +scripts/start -> nostream start +scripts/start_with_tor -> nostream start --tor +scripts/start_with_i2p -> nostream start --i2p +scripts/start_with_nginx -> nostream start --nginx +scripts/stop -> nostream stop +scripts/print_tor_hostname -> nostream info --tor-hostname +scripts/print_i2p_hostname -> nostream info --i2p-hostname +scripts/update -> nostream update +scripts/clean -> nostream clean +``` + +### Importing events from JSON Lines or JSON Arrays + +You can import NIP-01 events from `.jsonl` (JSON Lines) or `.json` (JSON array) files directly into the relay database. -You can import NIP-01 events from `.jsonl` files directly into the relay database. -Compressed files are also supported and decompressed on-the-fly: +Compressed `.jsonl` files are also supported and decompressed on-the-fly: - `.jsonl.gz` (Gzip) - `.jsonl.xz` (XZ) Basic import: ``` - pnpm import ./events.jsonl + nostream import ./events.jsonl + ``` + +Equivalent alias form: + ``` + nostream import --file ./events.jsonl + ``` + +Import from a JSON array file (compatible with `nostream export --format json`): + ``` + nostream import --file ./events.json ``` Import a compressed backup: ``` - pnpm import ./events.jsonl.gz - pnpm import ./events.jsonl.xz + nostream import ./events.jsonl.gz + nostream import ./events.jsonl.xz ``` Set a custom batch size (default: `1000`): ``` - pnpm import ./events.jsonl --batch-size 500 + nostream import ./events.jsonl --batch-size 500 ``` The importer: - Processes the file line-by-line to keep memory usage bounded. +- Streams JSON array items one by one to keep memory usage bounded. - Validates NIP-01 schema, event id hash, and Schnorr signature before insertion. - Inserts in database transactions per batch. - Skips duplicates without failing the whole import. @@ -302,8 +330,8 @@ You can [install as a systemd service](https://www.swissrouting.com/nostr.html#i RestartSec=5 User=nostr WorkingDirectory=/home/nostr/nostream - ExecStart=/home/nostr/nostream/scripts/start - ExecStop=/home/nostr/nostream/scripts/stop + ExecStart=/usr/bin/env bash -lc 'cd /home/nostr/nostream && nostream start' + ExecStop=/usr/bin/env bash -lc 'cd /home/nostr/nostream && nostream stop' [Install] WantedBy=multi-user.target @@ -370,7 +398,7 @@ To fix this, configure Docker daemon DNS in `/etc/docker/daemon.json`. 4. Retry starting nostream: ``` - ./scripts/start + nostream start ``` Note: avoid `127.0.0.53` in Docker DNS settings because it points to the host's @@ -472,7 +500,6 @@ To clean up the build, coverage and test reports run: ``` pnpm clean ``` - ## Development & Contributing For development environment setup, testing, linting, load testing, and contribution guidelines @@ -481,7 +508,7 @@ For development environment setup, testing, linting, load testing, and contribut ## Export Events -Export all stored events to a [JSON Lines](https://jsonlines.org/) (`.jsonl`) file. Each line is a valid NIP-01 Nostr event JSON object. The export streams rows from the database using cursors, so it works safely on relays with millions of events without loading them into memory. +Export all stored events to either [JSON Lines](https://jsonlines.org/) (`.jsonl`) or JSON array (`.json`) format. The export streams rows from the database using cursors, so it works safely on relays with millions of events without loading them into memory. Optional compression is supported for lower storage and transfer costs: @@ -489,10 +516,12 @@ Optional compression is supported for lower storage and transfer costs: - XZ via `lzma-native` ``` -pnpm export # writes to events.jsonl -pnpm export backup-2024-01-01.jsonl # custom filename -pnpm export backup.jsonl.gz --compress --format=gzip -pnpm export backup.jsonl.xz --compress --format=xz +nostream export # writes to events.jsonl +nostream export --output backup-2024-01-01.jsonl # custom filename +nostream export --output backup.jsonl.gz --compress --format=gzip +nostream export --output backup.jsonl.xz --compress --format=xz +nostream export --output backup-2024-01-01.jsonl # alias form +nostream export --output backup-2024-01-01.json --format json # JSON array output ``` Flags: @@ -535,43 +564,42 @@ pnpm db:verify-index-impact ``` It seeds ~200k synthetic events, drops the hot-path indexes, runs EXPLAIN (ANALYZE, BUFFERS) for each hot query, recreates the indexes, and prints a BEFORE/AFTER table. See the *Database indexes and benchmarking* section of [CONFIGURATION.md](CONFIGURATION.md). - ## Relay Maintenance -Use `clean-db` to wipe or prune `events` table data. This also removes +Use `nostream dev db:clean` to wipe or prune `events` table data. This also removes corresponding data from the derived `event_tags` table when present. Dry run (no deletion): ``` - pnpm clean-db --all --dry-run + nostream dev db:clean --all --dry-run ``` Full wipe: ``` - pnpm clean-db --all --force + nostream dev db:clean --all --force ``` Delete events older than N days: ``` - pnpm clean-db --older-than=30 --force + nostream dev db:clean --older-than=30 --force ``` Delete only selected kinds: ``` - pnpm clean-db --kinds=1,7,4 --force + nostream dev db:clean --kinds=1,7,4 --force ``` Delete only selected kinds older than N days: ``` - pnpm clean-db --older-than=30 --kinds=1,7,4 --force + nostream dev db:clean --older-than=30 --kinds=1,7,4 --force ``` -By default, the script asks for explicit confirmation (`Type 'DELETE' to confirm`). +By default, the command asks for explicit confirmation (`Type 'DELETE' to confirm`). Use `--force` to skip the prompt. @@ -579,7 +607,7 @@ Use `--force` to skip the prompt. You can change the default folder by setting the `NOSTR_CONFIG_DIR` environment variable to a different path. -Run nostream using one of the quick-start guides at least once and `nostream/.nostr/settings.json` will be created. +Run nostream using one of the quick-start guides at least once and `nostream/.nostr/settings.yaml` will be created. Any changes made to the settings file will be read on the next start. Default settings can be found under `resources/default-settings.yaml`. Feel free to copy it to `nostream/.nostr/settings.yaml` if you would like to have a settings file before running the relay first. diff --git a/docker-compose.yml b/docker-compose.yml index 2a7567f3..df7c8d48 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -77,7 +77,7 @@ services: default: ipv4_address: 10.10.10.2 - nostream-db: + nostream-db: image: postgres:15 container_name: nostream-db environment: @@ -88,8 +88,9 @@ services: - ${PWD}/.nostr/data:/var/lib/postgresql/data - ${PWD}/.nostr/db-logs:/var/log/postgresql - ${PWD}/postgresql.conf:/postgresql.conf - networks: - default: + networks: + default: + ipv4_address: 10.10.10.3 command: postgres -c 'config_file=/postgresql.conf' restart: always healthcheck: @@ -99,14 +100,15 @@ services: retries: 5 start_period: 360s - nostream-cache: + nostream-cache: image: redis:7.0.5-alpine3.16 container_name: nostream-cache volumes: - cache:/data command: redis-server --loglevel warning --requirepass nostr_ts_relay - networks: - default: + networks: + default: + ipv4_address: 10.10.10.4 restart: always healthcheck: test: [ "CMD", "redis-cli", "ping", "|", "grep", "PONG" ] diff --git a/package.json b/package.json index c657f6ab..25793dd1 100644 --- a/package.json +++ b/package.json @@ -26,11 +26,28 @@ ], "supportedNipExtensions": [], "main": "src/index.ts", + "bin": { + "nostream": "./dist/src/cli/index.js" + }, + "files": [ + "dist", + "resources", + "nginx", + "i2p", + "docker-compose*.yml", + "postgresql.conf", + ".env.example", + "README.md", + "CLI.md", + "CONFIGURATION.md" + ], "scripts": { + "cli": "node --env-file-if-exists=.env -r ts-node/register src/cli/index.ts", "dev": "node --env-file-if-exists=.env -r ts-node/register src/index.ts", "clean-db": "node --env-file-if-exists=.env -r ts-node/register src/clean-db.ts", "clean": "rimraf ./{dist,.nyc_output,.test-reports,.coverage}", "build": "tsc --project tsconfig.build.json", + "verify:cli:build": "node scripts/verify-cli-build.js", "prestart": "pnpm run build", "start": "cd dist && node --env-file-if-exists=../.env src/index.js", "build:check": "pnpm run build --noEmit", @@ -47,8 +64,10 @@ "db:seed": "knex seed:run", "db:benchmark": "node --env-file-if-exists=.env -r ts-node/register src/scripts/benchmark-queries.ts", "db:verify-index-impact": "node --env-file-if-exists=.env -r ts-node/register scripts/verify-index-impact.ts", - "pretest:unit": "node -e \"require('fs').mkdirSync('.test-reports/unit', {recursive: true})\"", + "pretest:unit": "pnpm run build && node -e \"require('fs').mkdirSync('.test-reports/unit', {recursive: true})\"", "test:unit": "mocha 'test/**/*.spec.ts'", + "pretest:cli": "pnpm run build", + "test:cli": "mocha 'test/unit/cli/**/*.spec.ts'", "test:unit:watch": "pnpm run test:unit --min --watch --watch-files src/**/*,test/**/*", "cover:unit": "nyc --report-dir .coverage/unit pnpm run test:unit", "docker:build": "docker build -t nostream .", @@ -58,19 +77,21 @@ "test:integration": "cucumber-js", "cover:integration": "nyc --report-dir .coverage/integration pnpm run test:integration -p cover", "export": "node --env-file-if-exists=.env -r ts-node/register src/scripts/export-events.ts", - "docker:compose:start": "./scripts/start", - "docker:compose:stop": "./scripts/stop", - "docker:compose:clean": "./scripts/clean", - "tor:docker:compose:start": "./scripts/start_with_tor", - "tor:hostname": "./scripts/print_tor_hostname", - "tor:docker:compose:stop": "./scripts/stop", - "i2p:docker:compose:start": "./scripts/start_with_i2p", - "i2p:hostname": "./scripts/print_i2p_hostname", - "i2p:docker:compose:stop": "./scripts/stop", + "docker:compose:start": "pnpm run cli -- start", + "docker:compose:stop": "pnpm run cli -- stop", + "docker:compose:clean": "pnpm run cli -- clean", + "tor:docker:compose:start": "pnpm run cli -- start --tor", + "tor:hostname": "pnpm run cli -- info --tor-hostname", + "tor:docker:compose:stop": "pnpm run cli -- stop", + "i2p:docker:compose:start": "pnpm run cli -- start --i2p", + "i2p:hostname": "pnpm run cli -- info --i2p-hostname", + "i2p:docker:compose:stop": "pnpm run cli -- stop", "docker:integration:run": "docker compose -f ./test/integration/docker-compose.yml run --rm tests", + "test:cli:docker-smoke": "pnpm run cli -- stop --all && pnpm run cli -- info", "docker:test:integration": "pnpm run docker:integration:run pnpm run test:integration", - "docker:cover:integration": "pnpm run docker:integration:run pnpm run cover:integration", + "docker:cover:integration": "pnpm run docker:integration:run pnpm exec nyc --report-dir .coverage/integration pnpm run test:integration -- -p cover", "postdocker:integration:run": "docker compose -f ./test/integration/docker-compose.yml down", + "prepack": "pnpm run build", "prepare": "husky install || exit 0", "changeset:version": "changeset version && pnpm install --lockfile-only", "changeset:publish": "changeset publish" @@ -131,17 +152,22 @@ "node": ">=24.14.1" }, "dependencies": { + "@clack/prompts": "^1.2.0", "@noble/secp256k1": "1.7.1", "accepts": "^1.3.8", "axios": "^1.15.0", + "cac": "^7.0.0", + "colorette": "^2.0.20", "express": "4.22.1", "js-yaml": "4.1.1", "knex": "2.4.2", + "ora": "^9.3.0", "pg": "8.9.0", "pg-query-stream": "4.3.0", "pino": "^8.21.0", "ramda": "0.28.0", "redis": "4.5.1", + "stream-json": "^2.1.0", "ws": "^8.18.0", "zod": "^3.22.4" }, diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 0ba2b532..27574e7f 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -8,6 +8,9 @@ importers: .: dependencies: + '@clack/prompts': + specifier: ^1.2.0 + version: 1.2.0 '@noble/secp256k1': specifier: 1.7.1 version: 1.7.1 @@ -17,6 +20,12 @@ importers: axios: specifier: ^1.15.0 version: 1.15.1 + cac: + specifier: ^7.0.0 + version: 7.0.0 + colorette: + specifier: ^2.0.20 + version: 2.0.20 express: specifier: 4.22.1 version: 4.22.1 @@ -26,6 +35,9 @@ importers: knex: specifier: 2.4.2 version: 2.4.2(pg@8.9.0) + ora: + specifier: ^9.3.0 + version: 9.4.0 pg: specifier: 8.9.0 version: 8.9.0 @@ -41,6 +53,9 @@ importers: redis: specifier: 4.5.1 version: 4.5.1 + stream-json: + specifier: ^2.1.0 + version: 2.1.0 ws: specifier: ^8.18.0 version: 8.20.0 @@ -349,6 +364,12 @@ packages: '@changesets/write@0.4.0': resolution: {integrity: sha512-CdTLvIOPiCNuH71pyDu3rA+Q0n65cmAbXnwWH84rKGiFumFzkmHNT8KHTMEchcxN+Kl8I54xGUhJ7l3E7X396Q==} + '@clack/core@1.2.0': + resolution: {integrity: sha512-qfxof/3T3t9DPU/Rj3OmcFyZInceqj/NVtO9rwIuJqCUgh32gwPjpFQQp/ben07qKlhpwq7GzfWpST4qdJ5Drg==} + + '@clack/prompts@1.2.0': + resolution: {integrity: sha512-4jmztR9fMqPMjz6H/UZXj0zEmE43ha1euENwkckKKel4XpSfokExPo5AiVStdHSAlHekz4d0CA/r45Ok1E4D3w==} + '@colors/colors@1.5.0': resolution: {integrity: sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==} engines: {node: '>=0.1.90'} @@ -957,6 +978,10 @@ packages: resolution: {integrity: sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==} engines: {node: '>= 0.8'} + cac@7.0.0: + resolution: {integrity: sha512-tixWYgm5ZoOD+3g6UTea91eow5z6AAHaho3g0V9CNSNb45gM8SmflpAc+GRd1InC4AqN/07Unrgp56Y94N9hJQ==} + engines: {node: '>=20.19.0'} + caching-transform@4.0.0: resolution: {integrity: sha512-kpqOvwXnjjN44D89K5ccQC+RUrsy7jB/XLlRrx0D7/2HNcTPqzsb6XgYoErwko6QsV184CA2YgS1fxDiiDZMWA==} engines: {node: '>=8'} @@ -1029,6 +1054,14 @@ packages: resolution: {integrity: sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==} engines: {node: '>=6'} + cli-cursor@5.0.0: + resolution: {integrity: sha512-aCj4O5wKyszjMmDT4tZj93kxyydN/K5zPWSCe6/0AV/AA1pqe5ZBIw0a2ZfPQV7lL5/yb5HsUreJ6UFAF1tEQw==} + engines: {node: '>=18'} + + cli-spinners@3.4.0: + resolution: {integrity: sha512-bXfOC4QcT1tKXGorxL3wbJm6XJPDqEnij2gQ2m7ESQuE+/z9YFIWnl/5RpTiKWbMq3EVKR4fRLJGn6DVfu0mpw==} + engines: {node: '>=18.20'} + cli-table3@0.6.3: resolution: {integrity: sha512-w5Jac5SykAeZJKntOxJCrm63Eg5/4dhMWIcuTbo9rpE+brgaSZo0RuNJZeOyMgsUdhDeojvgyQLmjI+K50ZGyg==} engines: {node: 10.* || >= 12.*} @@ -1396,9 +1429,18 @@ packages: fast-safe-stringify@2.1.1: resolution: {integrity: sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==} + fast-string-truncated-width@1.2.1: + resolution: {integrity: sha512-Q9acT/+Uu3GwGj+5w/zsGuQjh9O1TyywhIwAxHudtWrgF09nHOPrvTLhQevPbttcxjr/SNN7mJmfOw/B1bXgow==} + + fast-string-width@1.1.0: + resolution: {integrity: sha512-O3fwIVIH5gKB38QNbdg+3760ZmGz0SZMgvwJbA1b2TGXceKE6A2cOlfogh1iw8lr049zPyd7YADHy+B7U4W9bQ==} + fast-uri@3.1.0: resolution: {integrity: sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA==} + fast-wrap-ansi@0.1.6: + resolution: {integrity: sha512-HlUwET7a5gqjURj70D5jl7aC3Zmy4weA1SHUfM0JFI0Ptq987NH2TwbBFLoERhfwk+E+eaq4EK3jXoT+R3yp3w==} + fastq@1.20.1: resolution: {integrity: sha512-GGToxJ/w1x32s/D2EKND7kTil4n8OVk/9mycTc4VDza13lOvpUZTGX3mFSCtV9ksdGBVzvsyAVLM6mHFThxXxw==} @@ -1517,6 +1559,10 @@ packages: resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} engines: {node: 6.* || 8.* || >= 10.*} + get-east-asian-width@1.5.0: + resolution: {integrity: sha512-CQ+bEO+Tva/qlmw24dCejulK5pMzVnUOFOijVogd3KQs07HnRIgp8TGipvCCRT06xeYEbpbgwaCxglFyiuIcmA==} + engines: {node: '>=18'} + get-func-name@2.0.2: resolution: {integrity: sha512-8vXOvuE167CtIc3OyItco7N/dpRtBbYOsPsXCz7X/PMnlGjYjSGuZJgM1Y7mmew7BKf9BqvLX2tnOVy1BBUsxQ==} @@ -1728,6 +1774,10 @@ packages: resolution: {integrity: sha512-iwGqO3J21aaSkC7jWnHP/difazwS7SFeIqxv6wEtLU8Y5KlzFTjyqcSIT0d8s4+dDhKytsk9PJZ2BkS5eZwQRQ==} engines: {node: '>=10'} + is-interactive@2.0.0: + resolution: {integrity: sha512-qP1vozQRI+BMOPcjFzrjXuQvdak2pHNUMZoeG2eRbiSqyvbEf/wQtEOTOX1guk6E3t36RkaqiSt8A/6YElNxLQ==} + engines: {node: '>=12'} + is-iterable@1.1.1: resolution: {integrity: sha512-EdOZCr0NsGE00Pot+x1ZFx9MJK3C6wy91geZpXwvwexDLJvA4nzYyZf7r+EIwSeVsOLDdBz7ATg9NqKTzuNYuQ==} engines: {node: '>= 4'} @@ -1775,6 +1825,10 @@ packages: resolution: {integrity: sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==} engines: {node: '>=10'} + is-unicode-supported@2.1.0: + resolution: {integrity: sha512-mE00Gnza5EEB3Ds0HfMyllZzbBrmLOX3vfWoj9A9PEnTfratQ/BcaJOuMhnkhjXvb2+FkY3VuHqtAGpTPmglFQ==} + engines: {node: '>=18'} + is-windows@1.0.2: resolution: {integrity: sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==} engines: {node: '>=0.10.0'} @@ -1983,6 +2037,10 @@ packages: resolution: {integrity: sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==} engines: {node: '>=10'} + log-symbols@7.0.1: + resolution: {integrity: sha512-ja1E3yCr9i/0hmBVaM0bfwDjnGy8I/s6PP4DFp+yP+a+mrHO4Rm7DtmnqROTUkHIkqffC84YY7AeqX6oFk0WFg==} + engines: {node: '>=18'} + loose-envify@1.4.0: resolution: {integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==} hasBin: true @@ -2101,6 +2159,10 @@ packages: resolution: {integrity: sha512-Ysbi9uYW9hFyfrThdDEQuykN4Ey6BuwPD2kpI5ES/nFTDn/98yxYNLZJcgUAKPT/mcrLLKaGzJR9YVxJrIdASQ==} engines: {node: '>=8'} + mimic-function@5.0.1: + resolution: {integrity: sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA==} + engines: {node: '>=18'} + min-indent@1.0.1: resolution: {integrity: sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==} engines: {node: '>=4'} @@ -2259,10 +2321,18 @@ packages: resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==} engines: {node: '>=6'} + onetime@7.0.0: + resolution: {integrity: sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ==} + engines: {node: '>=18'} + opener@1.5.2: resolution: {integrity: sha512-ur5UIdyw5Y7yEj9wLzhqXiy6GZ3Mwx0yGI+5sMn2r0N0v3cKJvUmFH5yPP+WXh9e0xfyzyJX95D8l088DNFj7A==} hasBin: true + ora@9.4.0: + resolution: {integrity: sha512-84cglkRILFxdtA8hAvLNdMrtBpPNBTrQ9/ulg0FA7xLMnD6mifv+enAIeRmvtv+WgdCE+LPGOfQmtJRrVaIVhQ==} + engines: {node: '>=20'} + outdent@0.5.0: resolution: {integrity: sha512-/jHxFIzoMXdqPzTaCpFzAAWhpkSjZPF4Vsn6jAfNpmbH/ymsmd7Qc6VE9BGn0L6YMj6uwpQLxCECpus4ukKS9Q==} @@ -2663,6 +2733,10 @@ packages: engines: {node: '>= 0.4'} hasBin: true + restore-cursor@5.1.0: + resolution: {integrity: sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA==} + engines: {node: '>=18'} + reusify@1.1.0: resolution: {integrity: sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==} engines: {iojs: '>=1.0.0', node: '>=0.10.0'} @@ -2786,6 +2860,9 @@ packages: resolution: {integrity: sha512-PZXKc08f/wcA/BMRGBze2Wmw50CWPiAH3E21EOi4B49vJ616vW4DQh4fQrqsYox2aNR/N3kCqLuB0PwwOucQrg==} deprecated: 16.1.1 + sisteransi@1.0.5: + resolution: {integrity: sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==} + slash@3.0.0: resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} engines: {node: '>=8'} @@ -2847,6 +2924,16 @@ packages: resolution: {integrity: sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==} engines: {node: '>= 0.8'} + stdin-discarder@0.3.2: + resolution: {integrity: sha512-eCPu1qRxPVkl5605OTWF8Wz40b4Mf45NY5LQmVPQ599knfs5QhASUm9GbJ5BDMDOXgrnh0wyEdvzmL//YMlw0A==} + engines: {node: '>=18'} + + stream-chain@3.6.1: + resolution: {integrity: sha512-M4BQpNPI71uumkVXjl4y+mIormQXdo4R0pSR23mcLbn6D+kpvu7Kx2g1hf0jRB76Zb1IT1M06OIGghMTAtZdyQ==} + + stream-json@2.1.0: + resolution: {integrity: sha512-9gV/ywtebMn3DdKnNKYCb9iESvgR1dHbucNV+bRGvdvy+jV4c9FFgYKmENhpKv58jSwvs90Wk80RhfKk1KxHPg==} + string-argv@0.3.1: resolution: {integrity: sha512-a1uQGz7IyVy9YwhqjZIZu1c8JO8dNIe20xBmSS6qu9kv++k3JGzCVmprbNN5Kn+BgzD5E7YYwg1CcjuJMRNsvg==} engines: {node: '>=0.6.19'} @@ -2859,6 +2946,10 @@ packages: resolution: {integrity: sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==} engines: {node: '>=12'} + string-width@8.2.0: + resolution: {integrity: sha512-6hJPQ8N0V0P3SNmP6h2J99RLuzrWz2gvT7VnK5tKvrNqJoyS9W4/Fb8mo31UiPvy00z7DQXkP2hnKBVav76thw==} + engines: {node: '>=20'} + string.fromcodepoint@0.2.1: resolution: {integrity: sha512-n69H31OnxSGSZyZbgBlvYIXlrMhJQ0dQAX1js1QDhpaUH6zmU3QYlj07bCwCNlPOu3oRXIubGPl2gDGnHsiCqg==} @@ -3263,6 +3354,10 @@ packages: resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} engines: {node: '>=10'} + yoctocolors@2.1.2: + resolution: {integrity: sha512-CzhO+pFNo8ajLM2d2IW/R93ipy99LWjtwblvC1RsoSUMZgyLbYFr221TnSNT7GjGdYui6P459mw9JH/g/zW2ug==} + engines: {node: '>=18'} + yup@1.2.0: resolution: {integrity: sha512-PPqYKSAXjpRCgLgLKVGPA33v5c/WgEx3wi6NFjIiegz90zSwyMpvTFp/uGcVnnbx6to28pgnzp/q8ih3QRjLMQ==} @@ -3575,6 +3670,18 @@ snapshots: human-id: 4.1.3 prettier: 2.8.8 + '@clack/core@1.2.0': + dependencies: + fast-wrap-ansi: 0.1.6 + sisteransi: 1.0.5 + + '@clack/prompts@1.2.0': + dependencies: + '@clack/core': 1.2.0 + fast-string-width: 1.1.0 + fast-wrap-ansi: 0.1.6 + sisteransi: 1.0.5 + '@colors/colors@1.5.0': optional: true @@ -4340,6 +4447,8 @@ snapshots: bytes@3.1.2: {} + cac@7.0.0: {} + caching-transform@4.0.0: dependencies: hasha: 5.2.2 @@ -4425,6 +4534,12 @@ snapshots: clean-stack@2.2.0: {} + cli-cursor@5.0.0: + dependencies: + restore-cursor: 5.1.0 + + cli-spinners@3.4.0: {} + cli-table3@0.6.3: dependencies: string-width: 4.2.3 @@ -4772,8 +4887,18 @@ snapshots: fast-safe-stringify@2.1.1: {} + fast-string-truncated-width@1.2.1: {} + + fast-string-width@1.1.0: + dependencies: + fast-string-truncated-width: 1.2.1 + fast-uri@3.1.0: {} + fast-wrap-ansi@0.1.6: + dependencies: + fast-string-width: 1.1.0 + fastq@1.20.1: dependencies: reusify: 1.1.0 @@ -4885,6 +5010,8 @@ snapshots: get-caller-file@2.0.5: {} + get-east-asian-width@1.5.0: {} + get-func-name@2.0.2: {} get-intrinsic@1.3.0: @@ -5084,6 +5211,8 @@ snapshots: global-dirs: 3.0.1 is-path-inside: 3.0.3 + is-interactive@2.0.0: {} + is-iterable@1.1.1: {} is-number@4.0.0: {} @@ -5112,6 +5241,8 @@ snapshots: is-unicode-supported@0.1.0: {} + is-unicode-supported@2.1.0: {} + is-windows@1.0.2: {} isexe@2.0.0: {} @@ -5316,6 +5447,11 @@ snapshots: chalk: 4.1.2 is-unicode-supported: 0.1.0 + log-symbols@7.0.1: + dependencies: + is-unicode-supported: 2.1.0 + yoctocolors: 2.1.2 + loose-envify@1.4.0: dependencies: js-tokens: 4.0.0 @@ -5424,6 +5560,8 @@ snapshots: mimic-fn@3.1.0: {} + mimic-function@5.0.1: {} + min-indent@1.0.1: {} minimatch@3.1.5: @@ -5636,8 +5774,23 @@ snapshots: dependencies: mimic-fn: 2.1.0 + onetime@7.0.0: + dependencies: + mimic-function: 5.0.1 + opener@1.5.2: {} + ora@9.4.0: + dependencies: + chalk: 5.6.2 + cli-cursor: 5.0.0 + cli-spinners: 3.4.0 + is-interactive: 2.0.0 + is-unicode-supported: 2.1.0 + log-symbols: 7.0.1 + stdin-discarder: 0.3.2 + string-width: 8.2.0 + outdent@0.5.0: {} p-defer@1.0.0: {} @@ -6024,6 +6177,11 @@ snapshots: path-parse: 1.0.7 supports-preserve-symlinks-flag: 1.0.0 + restore-cursor@5.1.0: + dependencies: + onetime: 7.0.0 + signal-exit: 4.1.0 + reusify@1.1.0: {} reverse-arguments@1.0.0: {} @@ -6157,6 +6315,8 @@ snapshots: nise: 5.1.9 supports-color: 7.2.0 + sisteransi@1.0.5: {} + slash@3.0.0: {} slash@4.0.0: {} @@ -6220,6 +6380,14 @@ snapshots: statuses@2.0.2: {} + stdin-discarder@0.3.2: {} + + stream-chain@3.6.1: {} + + stream-json@2.1.0: + dependencies: + stream-chain: 3.6.1 + string-argv@0.3.1: {} string-width@4.2.3: @@ -6234,6 +6402,11 @@ snapshots: emoji-regex: 9.2.2 strip-ansi: 7.2.0 + string-width@8.2.0: + dependencies: + get-east-asian-width: 1.5.0 + strip-ansi: 7.2.0 + string.fromcodepoint@0.2.1: {} string_decoder@1.3.0: @@ -6604,6 +6777,8 @@ snapshots: yocto-queue@0.1.0: {} + yoctocolors@2.1.2: {} + yup@1.2.0: dependencies: property-expr: 2.0.6 diff --git a/scripts/clean b/scripts/clean deleted file mode 100755 index 3e8db098..00000000 --- a/scripts/clean +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash -PROJECT_ROOT="$(dirname $(readlink -f "${BASH_SOURCE[0]}"))/.." - -$PROJECT_ROOT/scripts/stop_docker - -docker system prune -a - -docker volume prune diff --git a/scripts/print_i2p_hostname b/scripts/print_i2p_hostname deleted file mode 100644 index 4a9b02ed..00000000 --- a/scripts/print_i2p_hostname +++ /dev/null @@ -1,23 +0,0 @@ -#!/bin/bash -set -euo pipefail - -PROJECT_ROOT="$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")/.." -KEYS_FILE="${PROJECT_ROOT}/.nostr/i2p/data/nostream.dat" - -if [ ! -f "${KEYS_FILE}" ]; then - echo "I2P destination keys not found. Is the i2pd container running?" - echo "Expected: ${KEYS_FILE}" - exit 1 -fi - -# The .b32.i2p address is derived from a SHA-256 hash of the Destination -# inside nostream.dat, so we cannot compute it portably from the host. -# Query the running i2pd container instead. -echo "I2P destination keys exist at: ${KEYS_FILE}" -echo "" -echo "To find your nostream .b32.i2p address, use one of these methods:" -echo " 1. Open the i2pd web console: http://127.0.0.1:7070/?page=i2p_tunnels" -echo " (published by docker-compose.i2p.yml, bound to 127.0.0.1 only)" -echo " 2. Query the console from inside the container:" -echo " docker exec i2pd wget -qO- 'http://127.0.0.1:7070/?page=i2p_tunnels' \\" -echo " | grep -oE '[a-z2-7]{52}\\.b32\\.i2p' | sort -u" diff --git a/scripts/print_tor_hostname b/scripts/print_tor_hostname deleted file mode 100755 index 09bccb5d..00000000 --- a/scripts/print_tor_hostname +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash - -PROJECT_ROOT="$(dirname $(readlink -f "${BASH_SOURCE[0]}"))/.." -cat $PROJECT_ROOT/.nostr/tor/data/nostream/hostname diff --git a/scripts/start b/scripts/start deleted file mode 100755 index 296477e0..00000000 --- a/scripts/start +++ /dev/null @@ -1,73 +0,0 @@ -#!/bin/bash -PROJECT_ROOT="$(dirname $(readlink -f "${BASH_SOURCE[0]}"))/.." -DOCKER_COMPOSE_FILE="${PROJECT_ROOT}/docker-compose.yml" -NOSTR_CONFIG_DIR="${PROJECT_ROOT}/.nostr" -SETTINGS_FILE="${NOSTR_CONFIG_DIR}/settings.yaml" -DEFAULT_SETTINGS_FILE="${PROJECT_ROOT}/resources/default-settings.yaml" -CURRENT_DIR=$(pwd) - -if [[ ${CURRENT_DIR} =~ /scripts$ ]]; then - echo "Please run this script from the Nostream root folder, not the scripts directory." - echo "To do this, change up one directory, and then run the following command:" - echo "./scripts/start" - exit 1 -fi - - -if [ "$EUID" -eq 0 ] - then echo "Error: Nostream should not be run as root." - exit 1 -fi - -# ── DNS Pre-flight Check ───────────────────────────────────────────── -YELLOW=$'\033[0;33m' -BOLD_YELLOW=$'\033[1;33m' -NC=$'\033[0m' -DNS_TEST_URL="https://dl-cdn.alpinelinux.org" -DNS_MAX_RETRIES=3 -DNS_OK=false -BACKOFF=2 - -echo "Checking Docker DNS connectivity..." -for i in $(seq 1 $DNS_MAX_RETRIES); do - printf " [Attempt $i/$DNS_MAX_RETRIES] Testing resolution... " - if docker run --rm alpine wget --spider --timeout=5 "$DNS_TEST_URL" > /dev/null 2>&1; then - echo "Success" - DNS_OK=true - break - else - echo "Failed" - fi - [ "$i" -lt "$DNS_MAX_RETRIES" ] && sleep $BACKOFF && BACKOFF=$((BACKOFF * 2)) -done - -if [ "$DNS_OK" = false ]; then - cat <&2 - -${BOLD_YELLOW} WARNING: Docker DNS resolution failed after $DNS_MAX_RETRIES attempts.${NC} -${YELLOW} Containers cannot resolve external domains (e.g. dl-cdn.alpinelinux.org). - This is commonly caused by a DNS bridge conflict with systemd-resolved. - - Suggested fixes: - 1. Add DNS to /etc/docker/daemon.json: - { "dns": ["8.8.8.8", "8.8.4.4"] } - 2. Then run sudo systemctl restart docker - - The build will continue, but may fail during package installation.${NC} - -EOF -fi - -if [[ ! -d "${NOSTR_CONFIG_DIR}" ]]; then - echo "Creating folder ${NOSTR_CONFIG_DIR}" - mkdir -p "${NOSTR_CONFIG_DIR}" -fi - -if [[ ! -f "${SETTINGS_FILE}" ]]; then - echo "Copying ${DEFAULT_SETTINGS_FILE} to ${SETTINGS_FILE}" - cp "${DEFAULT_SETTINGS_FILE}" "${SETTINGS_FILE}" -fi - -docker compose \ - -f $DOCKER_COMPOSE_FILE \ - up --build --remove-orphans $@ diff --git a/scripts/start_with_i2p b/scripts/start_with_i2p deleted file mode 100644 index cc458a3d..00000000 --- a/scripts/start_with_i2p +++ /dev/null @@ -1,40 +0,0 @@ -#!/bin/bash -set -euo pipefail - -PROJECT_ROOT="$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")/.." -DOCKER_COMPOSE_FILE="${PROJECT_ROOT}/docker-compose.yml" -DOCKER_COMPOSE_I2P_FILE="${PROJECT_ROOT}/docker-compose.i2p.yml" -I2P_DATA_DIR="${PROJECT_ROOT}/.nostr/i2p/data" -NOSTR_CONFIG_DIR="${PROJECT_ROOT}/.nostr" -SETTINGS_FILE="${NOSTR_CONFIG_DIR}/settings.yaml" -DEFAULT_SETTINGS_FILE="${PROJECT_ROOT}/resources/default-settings.yaml" -CURRENT_DIR="$(pwd)" - -if [[ ${CURRENT_DIR} =~ /scripts$ ]]; then - echo "Please run this script from the Nostream root folder, not the scripts directory." - echo "To do this, change up one directory, and then run the following command:" - echo "./scripts/start_with_i2p" - exit 1 -fi - -if [ "$EUID" -eq 0 ]; then - echo "Error: Nostream should not be run as root." - exit 1 -fi - -if [[ ! -d "${NOSTR_CONFIG_DIR}" ]]; then - echo "Creating folder ${NOSTR_CONFIG_DIR}" - mkdir -p "${NOSTR_CONFIG_DIR}" -fi - -if [[ ! -f "${SETTINGS_FILE}" ]]; then - echo "Copying ${DEFAULT_SETTINGS_FILE} to ${SETTINGS_FILE}" - cp "${DEFAULT_SETTINGS_FILE}" "${SETTINGS_FILE}" -fi - -mkdir -p "${I2P_DATA_DIR}" - -docker compose \ - -f "${DOCKER_COMPOSE_FILE}" \ - -f "${DOCKER_COMPOSE_I2P_FILE}" \ - up --build --remove-orphans "$@" diff --git a/scripts/start_with_nginx b/scripts/start_with_nginx deleted file mode 100755 index 45a2bed9..00000000 --- a/scripts/start_with_nginx +++ /dev/null @@ -1,81 +0,0 @@ -#!/bin/bash -PROJECT_ROOT="$(dirname $(readlink -f "${BASH_SOURCE[0]}"))/.." -DOCKER_COMPOSE_FILE="${PROJECT_ROOT}/docker-compose.yml" -DOCKER_COMPOSE_NGINX_FILE="${PROJECT_ROOT}/docker-compose.nginx.yml" -NGINX_CONF_DIR="${PROJECT_ROOT}/nginx/conf.d" -NGINX_TEMPLATE="${NGINX_CONF_DIR}/nostream.conf.template" -NGINX_CONF="${NGINX_CONF_DIR}/nostream.conf" -NOSTR_CONFIG_DIR="${PROJECT_ROOT}/.nostr" -SETTINGS_FILE="${NOSTR_CONFIG_DIR}/settings.yaml" -DEFAULT_SETTINGS_FILE="${PROJECT_ROOT}/resources/default-settings.yaml" -CURRENT_DIR=$(pwd) - -if [[ ${CURRENT_DIR} =~ /scripts$ ]]; then - echo "Please run this script from the Nostream root folder, not the scripts directory." - echo "To do this, change up one directory, and then run the following command:" - echo "./scripts/start_with_nginx" - exit 1 -fi - -if [ "$EUID" -eq 0 ] - then echo "Error: Nostream should not be run as root." - exit 1 -fi - -if [[ -z "${RELAY_DOMAIN}" ]]; then - echo "Error: RELAY_DOMAIN environment variable is not set." - echo "Usage: RELAY_DOMAIN=relay.example.com CERTBOT_EMAIL=you@example.com ./scripts/start_with_nginx" - exit 1 -fi - -FQDN_REGEX='^([A-Za-z0-9]([A-Za-z0-9-]{0,61}[A-Za-z0-9])?\.)+[A-Za-z0-9]([A-Za-z0-9-]{0,61}[A-Za-z0-9])?$' -if [[ ! "${RELAY_DOMAIN}" =~ ${FQDN_REGEX} ]]; then - echo "Error: RELAY_DOMAIN must be a valid fully-qualified domain name." - echo "Usage: RELAY_DOMAIN=relay.example.com CERTBOT_EMAIL=you@example.com ./scripts/start_with_nginx" - exit 1 -fi - -if [[ -z "${CERTBOT_EMAIL}" ]]; then - echo "Error: CERTBOT_EMAIL environment variable is not set." - echo "Usage: RELAY_DOMAIN=relay.example.com CERTBOT_EMAIL=you@example.com ./scripts/start_with_nginx" - exit 1 -fi - -if [[ ! -d "${NOSTR_CONFIG_DIR}" ]]; then - echo "Creating folder ${NOSTR_CONFIG_DIR}" - mkdir -p "${NOSTR_CONFIG_DIR}" -fi - -if [[ ! -f "${SETTINGS_FILE}" ]]; then - echo "Copying ${DEFAULT_SETTINGS_FILE} to ${SETTINGS_FILE}" - cp "${DEFAULT_SETTINGS_FILE}" "${SETTINGS_FILE}" -fi - -# Generate nginx config from template -echo "Generating nginx config for domain: ${RELAY_DOMAIN}" -sed "s/\${RELAY_DOMAIN}/${RELAY_DOMAIN}/g" "${NGINX_TEMPLATE}" > "${NGINX_CONF}" - -# Generate a temporary self-signed cert if no real cert exists yet. -# This lets nginx boot so it can serve the ACME challenge for certbot -# to obtain the real Let's Encrypt certificate. -SSL_CERT_DIR="${PROJECT_ROOT}/nginx/ssl/live/${RELAY_DOMAIN}" -if [[ ! -f "${SSL_CERT_DIR}/fullchain.pem" ]]; then - echo "No SSL certificate found. Generating a temporary self-signed certificate..." - mkdir -p "${SSL_CERT_DIR}" - if ! openssl req -x509 -nodes -newkey rsa:2048 \ - -days 1 \ - -keyout "${SSL_CERT_DIR}/privkey.pem" \ - -out "${SSL_CERT_DIR}/fullchain.pem" \ - -subj "/CN=${RELAY_DOMAIN}" 2>/dev/null; then - echo "Error: Failed to generate self-signed certificate. Is openssl installed?" - exit 1 - fi -fi - -# Ensure compose uses the project root for volume mounts -cd "${PROJECT_ROOT}" - -docker compose \ - -f "${DOCKER_COMPOSE_FILE}" \ - -f "${DOCKER_COMPOSE_NGINX_FILE}" \ - up --build --remove-orphans "$@" diff --git a/scripts/start_with_tor b/scripts/start_with_tor deleted file mode 100755 index cb7f52b2..00000000 --- a/scripts/start_with_tor +++ /dev/null @@ -1,38 +0,0 @@ -#!/bin/bash -PROJECT_ROOT="$(dirname $(readlink -f "${BASH_SOURCE[0]}"))/.." -DOCKER_COMPOSE_FILE="${PROJECT_ROOT}/docker-compose.yml" -DOCKER_COMPOSE_TOR_FILE="${PROJECT_ROOT}/docker-compose.tor.yml" -TOR_DATA_DIR="$PROJECT_ROOT/.nostr/tor/data" -NOSTR_CONFIG_DIR="${PROJECT_ROOT}/.nostr" -SETTINGS_FILE="${NOSTR_CONFIG_DIR}/settings.yaml" -DEFAULT_SETTINGS_FILE="${PROJECT_ROOT}/resources/default-settings.yaml" -CURRENT_DIR=$(pwd) - -if [[ ${CURRENT_DIR} =~ /scripts$ ]]; then - echo "Please run this script from the Nostream root folder, not the scripts directory." - echo "To do this, change up one directory, and then run the following command:" - echo "./scripts/start" - exit 1 -fi - -if [ "$EUID" -eq 0 ] - then echo "Error: Nostream should not be run as root." - exit 1 -fi - -if [[ ! -d "${NOSTR_CONFIG_DIR}" ]]; then - echo "Creating folder ${NOSTR_CONFIG_DIR}" - mkdir -p "${NOSTR_CONFIG_DIR}" -fi - -if [[ ! -f "${SETTINGS_FILE}" ]]; then - echo "Copying ${DEFAULT_SETTINGS_FILE} to ${SETTINGS_FILE}" - cp "${DEFAULT_SETTINGS_FILE}" "${SETTINGS_FILE}" -fi - -mkdir -p $TOR_DATA_DIR - -docker compose \ - -f $DOCKER_COMPOSE_FILE \ - -f $DOCKER_COMPOSE_TOR_FILE \ - up --build --remove-orphans $@ diff --git a/scripts/stop b/scripts/stop deleted file mode 100755 index 788cf020..00000000 --- a/scripts/stop +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash -set -euo pipefail - -PROJECT_ROOT="$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")/.." -DOCKER_COMPOSE_FILE="${PROJECT_ROOT}/docker-compose.yml" -DOCKER_COMPOSE_TOR_FILE="${PROJECT_ROOT}/docker-compose.tor.yml" -DOCKER_COMPOSE_I2P_FILE="${PROJECT_ROOT}/docker-compose.i2p.yml" -DOCKER_COMPOSE_LOCAL_FILE="${PROJECT_ROOT}/docker-compose.local.yml" - -docker compose \ - -f "${DOCKER_COMPOSE_FILE}" \ - -f "${DOCKER_COMPOSE_TOR_FILE}" \ - -f "${DOCKER_COMPOSE_I2P_FILE}" \ - -f "${DOCKER_COMPOSE_LOCAL_FILE}" \ - down "$@" diff --git a/scripts/update b/scripts/update deleted file mode 100755 index 54fe76fd..00000000 --- a/scripts/update +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash -PROJECT_ROOT="$(dirname $(readlink -f "${BASH_SOURCE[0]}"))/.." -SCRIPTS_DIR="${PROJECT_ROOT}/scripts" - -$SCRIPTS_DIR/stop - -git stash -u - -git pull - -git stash pop - -$SCRIPTS_DIR/start diff --git a/scripts/verify-cli-build.js b/scripts/verify-cli-build.js new file mode 100644 index 00000000..c9964ab3 --- /dev/null +++ b/scripts/verify-cli-build.js @@ -0,0 +1,92 @@ +#!/usr/bin/env node +const fs = require('fs') +const path = require('path') +const { spawnSync } = require('child_process') + +const pkgPath = path.resolve(__dirname, '..', 'package.json') +const pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf-8')) +const relBin = typeof pkg.bin === 'string' ? pkg.bin : pkg.bin?.nostream + +if (!relBin) { + console.error('package.json is missing bin.nostream') + process.exit(1) +} + +const binPath = path.resolve(__dirname, '..', relBin) +if (!fs.existsSync(binPath)) { + console.error(`Built CLI entrypoint not found: ${binPath}`) + process.exit(1) +} + +const requiredPackedFiles = [ + 'package.json', + relBin.replace(/^\.\//, ''), + 'resources/default-settings.yaml', + 'docker-compose.yml', +] + +const parsePackJsonOutput = (output) => { + const start = output.search(/^\s*[\[{]/m) + if (start === -1) { + throw new Error('No JSON payload found in pack output') + } + return JSON.parse(output.slice(start).trim()) +} + +const result = spawnSync('node', [binPath, '--help'], { + cwd: path.resolve(__dirname, '..'), + env: process.env, + encoding: 'utf-8', +}) + +if (result.status !== 0) { + console.error(`Built CLI help check failed (exit ${result.status ?? 1})`) + if (result.stdout) { + process.stderr.write(result.stdout) + } + if (result.stderr) { + process.stderr.write(result.stderr) + } + process.exit(result.status ?? 1) +} + +if (!result.stdout.includes('Usage:')) { + console.error('Built CLI help output did not contain Usage:') + process.exit(1) +} + +const packResult = spawnSync('pnpm', ['pack', '--dry-run', '--json'], { + cwd: path.resolve(__dirname, '..'), + env: process.env, + encoding: 'utf-8', +}) + +if (packResult.status !== 0) { + console.error(`pnpm pack dry-run failed (exit ${packResult.status ?? 1})`) + if (packResult.stdout) { + process.stderr.write(packResult.stdout) + } + if (packResult.stderr) { + process.stderr.write(packResult.stderr) + } + process.exit(packResult.status ?? 1) +} + +let packed +try { + packed = parsePackJsonOutput(packResult.stdout) +} catch (error) { + console.error('Failed to parse pnpm pack --json output') + process.stderr.write(String(error)) + process.exit(1) +} + +const files = new Set((packed?.files ?? []).map((file) => file.path)) +for (const requiredFile of requiredPackedFiles) { + if (!files.has(requiredFile)) { + console.error(`Packed artifact is missing required file: ${requiredFile}`) + process.exit(1) + } +} + +console.log(`Verified CLI build entrypoint and package contents: ${relBin}`) diff --git a/scripts/verify-index-impact.ts b/scripts/verify-index-impact.ts index 29655159..40d00352 100644 --- a/scripts/verify-index-impact.ts +++ b/scripts/verify-index-impact.ts @@ -12,7 +12,7 @@ * * Usage: * node -r ts-node/register scripts/verify-index-impact.ts [--events N] [--pubkeys N] [--runs N] - * npm run db:verify-index-impact + * pnpm run db:verify-index-impact */ import { randomBytes } from 'node:crypto' diff --git a/seeds/0000-events.js b/seeds/0000-events.js index 4544dbed..1e94490e 100644 --- a/seeds/0000-events.js +++ b/seeds/0000-events.js @@ -1,12 +1,12 @@ /* eslint-disable @typescript-eslint/no-var-requires */ +const secp256k1 = require('@noble/secp256k1') + const NAMESPACE = 'c646b451-db73-47fb-9a70-ea24ce8a225a' +const SYNTHETIC_SEED_PRIVATE_KEY = '1'.repeat(64) function isReplaceableEvent(kind) { - return kind === 0 - || kind === 3 - || kind === 41 - || (kind >= 10000 && kind < 20000) + return kind === 0 || kind === 3 || kind === 41 || (kind >= 10000 && kind < 20000) } function isParameterizedReplaceableEvent(kind) { @@ -27,12 +27,85 @@ function getEventDeduplication(event) { return null } +function getRequestedSeedCount() { + const rawValue = process.env.NOSTREAM_SEED_COUNT + + if (typeof rawValue !== 'string' || rawValue.trim() === '') { + return undefined + } + + if (!/^\d+$/.test(rawValue.trim())) { + throw new Error(`Invalid NOSTREAM_SEED_COUNT: ${rawValue}. Expected a positive integer.`) + } + + const parsed = Number(rawValue) + if (!Number.isSafeInteger(parsed) || parsed <= 0) { + throw new Error(`Invalid NOSTREAM_SEED_COUNT: ${rawValue}. Expected a positive integer.`) + } + + return parsed +} + +function serializeEvent(event) { + return [0, event.pubkey, event.created_at, event.kind, event.tags, event.content] +} + +async function identifyEvent(event) { + const idBytes = await secp256k1.utils.sha256(Buffer.from(JSON.stringify(serializeEvent(event)))) + return { + ...event, + id: Buffer.from(idBytes).toString('hex'), + } +} + +async function signEvent(event, privateKey) { + const signature = await secp256k1.schnorr.sign(event.id, privateKey) + + return { + ...event, + sig: Buffer.from(signature).toString('hex'), + } +} + +const syntheticSeedPubkey = secp256k1.utils.bytesToHex(secp256k1.getPublicKey(SYNTHETIC_SEED_PRIVATE_KEY, true).subarray(1)) + +async function createSyntheticEvent(baseEvent, index) { + const unsignedEvent = { + pubkey: syntheticSeedPubkey, + created_at: baseEvent.created_at + index, + kind: baseEvent.kind, + tags: baseEvent.tags, + content: `${baseEvent.content} [seed:${index + 1}]`, + } + + const identifiedEvent = await identifyEvent(unsignedEvent) + return signEvent(identifiedEvent, SYNTHETIC_SEED_PRIVATE_KEY) +} + +async function expandSeedEvents(events, count) { + if (!count) { + return events + } + + const expanded = [] + for (let index = 0; index < count; index += 1) { + const baseEvent = events[index % events.length] + expanded.push(await createSyntheticEvent(baseEvent, index)) + } + + return expanded +} + exports.seed = async function (knex) { await knex('events').del() const { v5: uuidv5 } = require('uuid') - const eventRows = require('./events.json').reduce((result, event) => { + const sourceEvents = require('./events.json') + const requestedCount = getRequestedSeedCount() + const events = await expandSeedEvents(sourceEvents, requestedCount) + + const eventRows = events.reduce((result, event) => { result.push({ id: uuidv5(event.id, NAMESPACE), event_id: Buffer.from(event.id, 'hex'), diff --git a/src/clean-db.ts b/src/clean-db.ts index deea592c..ababf38c 100644 --- a/src/clean-db.ts +++ b/src/clean-db.ts @@ -13,7 +13,7 @@ type CleanDbOptions = { } const HELP_TEXT = [ - 'Usage: pnpm run clean-db [options]', + 'Usage: nostream dev db:clean [options]', '', 'Options:', ' --all Delete all events.', @@ -24,10 +24,10 @@ const HELP_TEXT = [ ' --help Show this help message.', '', 'Examples:', - ' pnpm run clean-db --all --dry-run', - ' pnpm run clean-db --all --force', - ' pnpm run clean-db --older-than=30 --force', - ' pnpm run clean-db --older-than=30 --kinds=1,7,4 --dry-run', + ' nostream dev db:clean --all --dry-run', + ' nostream dev db:clean --all --force', + ' nostream dev db:clean --older-than=30 --force', + ' nostream dev db:clean --older-than=30 --kinds=1,7,4 --dry-run', ].join('\n') const getOptionValue = (arg: string, args: string[], index: number): [string, number] => { diff --git a/src/cli/commands/config.ts b/src/cli/commands/config.ts new file mode 100644 index 00000000..0a7812fd --- /dev/null +++ b/src/cli/commands/config.ts @@ -0,0 +1,262 @@ +import ora from 'ora' +import yaml from 'js-yaml' + +import { + getByPath, + loadDefaults, + loadMergedSettings, + loadUserSettings, + parseTypedValue, + saveSettings, + setByPath, + validatePathAgainstDefaults, + validateSettings, +} from '../utils/config' +import { + isSecretEnvKey, + isSupportedEnvKey, + maskSecretValue, + readEnvValues, + upsertEnvValue, + validateEnvPair, + validateEnvValues, +} from '../utils/env-config' +import { logError, logInfo } from '../utils/output' +import { runStart } from './start' +import { runStop } from './stop' + +type ValueType = 'inferred' | 'json' + +const toJson = (value: unknown): string => { + return JSON.stringify( + value, + (_key, entry) => { + if (typeof entry === 'bigint') { + return entry.toString() + } + + return entry + }, + 2, + ) +} + +const serialize = (value: unknown): string => { + if (typeof value === 'bigint') { + return value.toString() + } + + if (typeof value === 'string') { + return value + } + + if (value === undefined) { + return 'undefined' + } + + return yaml.dump(value, { lineWidth: 120 }).trimEnd() +} + +const formatLabel = (key: string): string => { + return key + .split(/[_\-.]/) + .filter(Boolean) + .map((part) => part.charAt(0).toUpperCase() + part.slice(1)) + .join(' ') +} + +const restartRelay = async (): Promise => { + const spinner = ora('Restarting relay...').start() + + const stopCode = await runStop({ all: true }, []) + if (stopCode !== 0) { + spinner.fail('Restart failed while stopping relay') + return stopCode + } + + const startCode = await runStart({}, []) + if (startCode !== 0) { + spinner.fail('Restart failed while starting relay') + return startCode + } + + spinner.succeed('Relay restarted') + return 0 +} + +export const runConfigList = async (options: { json?: boolean } = {}): Promise => { + const settings = loadMergedSettings() + + if (options.json) { + logInfo(toJson(settings)) + return 0 + } + + logInfo(yaml.dump(settings, { lineWidth: 120 })) + return 0 +} + +export const runConfigGet = async (path: string, options: { json?: boolean } = {}): Promise => { + const settings = loadMergedSettings() as unknown as Record + const value = getByPath(settings, path) + + if (value === undefined) { + if (options.json) { + process.stderr.write(`${JSON.stringify({ error: { message: `Path not found: ${path}`, code: 1 } })}\n`) + return 1 + } + + logError(`Path not found: ${path}`) + return 1 + } + + if (options.json) { + logInfo(toJson(value)) + return 0 + } + + logInfo(serialize(value)) + return 0 +} + +export const runConfigSet = async ( + path: string, + rawValue: string, + options: { + restart?: boolean + validate?: boolean + valueType?: ValueType + } = {}, +): Promise => { + const valueType = options.valueType ?? 'inferred' + + const pathIssues = validatePathAgainstDefaults(path) + if (pathIssues.length > 0) { + logError(pathIssues[0].message) + return 1 + } + + const settings = loadUserSettings() as unknown as Record + const next = setByPath(settings, path, parseTypedValue(rawValue, valueType)) + + if (options.validate !== false) { + const merged = loadMergedSettings() as unknown as Record + const mergedNext = setByPath(merged, path, getByPath(next, path)) + const validationIssues = validateSettings(mergedNext as any) + + if (validationIssues.length > 0) { + logError('Config update rejected by validation:') + for (const issue of validationIssues) { + logError(`- ${issue.path}: ${issue.message}`) + } + + return 1 + } + } + + saveSettings(next as any) + + logInfo(`Updated ${path}`) + + if (options.restart) { + return restartRelay() + } + + return 0 +} + +export const runConfigValidate = async (): Promise => { + const settings = loadMergedSettings() + const issues = validateSettings(settings) + + if (issues.length === 0) { + logInfo('Settings are valid') + return 0 + } + + logError('Settings validation failed:') + for (const issue of issues) { + logError(`- ${issue.path}: ${issue.message}`) + } + + return 1 +} + +export const runConfigEnvList = async (options: { showSecrets?: boolean } = {}): Promise => { + const values = readEnvValues() + const entries = Object.entries(values).sort(([a], [b]) => a.localeCompare(b)) + + if (entries.length === 0) { + logInfo('No .env entries found') + return 0 + } + + for (const [key, value] of entries) { + const displayValue = options.showSecrets || !isSecretEnvKey(key) ? value : maskSecretValue(value) + logInfo(`${key}=${displayValue}`) + } + + return 0 +} + +export const runConfigEnvGet = async (key: string, options: { showSecrets?: boolean } = {}): Promise => { + const normalizedKey = key.trim() + + if (!isSupportedEnvKey(normalizedKey)) { + logError(`Unsupported env key: ${normalizedKey}`) + return 1 + } + + const values = readEnvValues() + const value = values[normalizedKey] + + if (value === undefined) { + logError(`Env key not set: ${normalizedKey}`) + return 1 + } + + const displayValue = options.showSecrets || !isSecretEnvKey(normalizedKey) ? value : maskSecretValue(value) + logInfo(displayValue) + return 0 +} + +export const runConfigEnvSet = async (key: string, value: string): Promise => { + const normalizedKey = key.trim() + + if (!isSupportedEnvKey(normalizedKey)) { + logError(`Unsupported env key: ${normalizedKey}`) + return 1 + } + + const issue = validateEnvPair(normalizedKey, value) + if (issue) { + logError(issue) + return 1 + } + + upsertEnvValue(normalizedKey, value) + logInfo(`Updated ${normalizedKey}`) + return 0 +} + +export const runConfigEnvValidate = async (): Promise => { + const values = readEnvValues() + const issues = validateEnvValues(values) + + if (issues.length === 0) { + logInfo('Environment settings are valid') + return 0 + } + + logError('Environment validation failed:') + for (const issue of issues) { + logError(`- ${formatLabel(issue.path)} (${issue.path}): ${issue.message}`) + } + + return 1 +} + +export const getConfigTopLevelCategories = (): string[] => { + const defaults = loadDefaults() as unknown as Record + return Object.keys(defaults) +} diff --git a/src/cli/commands/dev.ts b/src/cli/commands/dev.ts new file mode 100644 index 00000000..7b4fa3d0 --- /dev/null +++ b/src/cli/commands/dev.ts @@ -0,0 +1,139 @@ +import { confirm, isCancel, cancel } from '@clack/prompts' +import ora from 'ora' + +import { runCleanDb } from '../../clean-db' +import { runCommand } from '../utils/process' +import { runStop } from './stop' + +type DevOptions = { + yes?: boolean +} + +const ensureConfirmed = async (message: string, yes?: boolean): Promise => { + if (yes) { + return true + } + + if (!process.stdin.isTTY) { + throw new Error('Interactive confirmation is unavailable. Re-run with --yes.') + } + + const answer = await confirm({ message, initialValue: false }) + if (isCancel(answer)) { + cancel('Operation cancelled') + return false + } + + return answer +} + +const runWithSpinner = async ( + loadingText: string, + successText: string, + failureText: string, + action: () => Promise, +): Promise => { + const spinner = ora(loadingText).start() + + try { + const code = await action() + if (code === 0) { + spinner.succeed(successText) + } else { + spinner.fail(failureText) + } + + return code + } catch (error) { + spinner.fail(failureText) + throw error + } +} + +export const runDevDbClean = async (rawArgs: string[], options: DevOptions = {}): Promise => { + if (rawArgs.length === 0) { + const confirmed = await ensureConfirmed('Delete all events from the database?', options.yes) + if (!confirmed) { + return 1 + } + + return runWithSpinner('Cleaning database...', 'Database clean completed', 'Database clean failed', () => + runCleanDb(['--all', '--force']), + ) + } + + return runWithSpinner('Cleaning database...', 'Database clean completed', 'Database clean failed', () => + runCleanDb(rawArgs), + ) +} + +export const runDevDbReset = async (options: DevOptions): Promise => { + const confirmed = await ensureConfirmed('Reset database and rerun migrations?', options.yes) + if (!confirmed) { + return 1 + } + + const spinner = ora('Resetting database (rollback)...').start() + + let code = await runCommand('pnpm', ['run', 'db:migrate:rollback', '--', '--all']) + if (code !== 0) { + spinner.fail('Database reset failed during rollback') + return code + } + + spinner.text = 'Resetting database (migrate)...' + code = await runCommand('pnpm', ['run', 'db:migrate']) + if (code === 0) { + spinner.succeed('Database reset completed') + } else { + spinner.fail('Database reset failed during migrate') + } + + return code +} + +export const runDevSeedRelay = async (): Promise => { + return runWithSpinner('Seeding relay data...', 'Relay seed completed', 'Relay seed failed', () => + runCommand('pnpm', ['run', 'db:seed']), + ) +} + +export const runDevDockerClean = async (options: DevOptions): Promise => { + const confirmed = await ensureConfirmed('Run docker system prune and docker volume prune?', options.yes) + if (!confirmed) { + return 1 + } + + let code = await runStop({ all: true }, []) + if (code !== 0) { + return code + } + + code = await runCommand('docker', ['system', 'prune', '-a', '-f']) + if (code !== 0) { + return code + } + + return runCommand('docker', ['volume', 'prune', '-f']) +} + +export const runDevTestUnit = async (): Promise => { + return runWithSpinner('Running unit tests...', 'Unit tests completed', 'Unit tests failed', () => + runCommand('pnpm', ['run', 'test:unit']), + ) +} + +export const runDevTestCli = async (): Promise => { + return runWithSpinner('Running CLI tests...', 'CLI tests completed', 'CLI tests failed', () => + runCommand('pnpm', ['run', 'test:cli']), + ) +} + +export const runDevTestIntegration = async (): Promise => { + return runWithSpinner( + 'Running integration tests...', + 'Integration tests completed', + 'Integration tests failed', + () => runCommand('pnpm', ['run', 'test:integration']), + ) +} diff --git a/src/cli/commands/export.ts b/src/cli/commands/export.ts new file mode 100644 index 00000000..46b73c53 --- /dev/null +++ b/src/cli/commands/export.ts @@ -0,0 +1,54 @@ +import { runExportEvents } from '../../scripts/export-events' + +type ExportFormat = 'jsonl' | 'json' +type CompressionFormat = 'gzip' | 'gz' | 'xz' + +type ExportOptions = { + output?: string + format?: ExportFormat + compress?: boolean + compressionFormat?: CompressionFormat +} + +export const runExport = async (options: ExportOptions, rawArgs: string[]): Promise => { + const args: string[] = [] + + if (options.output) { + args.push(options.output) + } + + if (options.compress) { + args.push('--compress') + } + + if (options.compressionFormat) { + args.push('--format', options.compressionFormat) + } + + let skipNext = false + for (const arg of rawArgs) { + if (skipNext) { + skipNext = false + continue + } + + if (arg === '--format') { + skipNext = true + continue + } + + if (arg.startsWith('--format=')) { + continue + } + + if (arg === '--compress' || arg === '-z') { + continue + } + + args.push(arg) + } + + return runExportEvents(args, { + format: options.format, + }) +} diff --git a/src/cli/commands/import.ts b/src/cli/commands/import.ts new file mode 100644 index 00000000..53eca086 --- /dev/null +++ b/src/cli/commands/import.ts @@ -0,0 +1,20 @@ +import { runImportEvents } from '../../import-events' + +type ImportOptions = { + file?: string + batchSize?: number +} + +export const runImport = async (options: ImportOptions, rawArgs: string[]): Promise => { + const args: string[] = [] + + if (options.file) { + args.push(options.file) + } + + if (typeof options.batchSize === 'number') { + args.push('--batch-size', String(options.batchSize)) + } + + return runImportEvents([...args, ...rawArgs]) +} diff --git a/src/cli/commands/info.ts b/src/cli/commands/info.ts new file mode 100644 index 00000000..513e3e4d --- /dev/null +++ b/src/cli/commands/info.ts @@ -0,0 +1,246 @@ +import fs from 'fs' +import knex from 'knex' + +import packageJson from '../../../package.json' +import { loadMergedSettings } from '../utils/config' +import { logError, logInfo } from '../utils/output' +import { getOnionKeyPath, getTorHostnamePath } from '../utils/bootstrap' +import { getProjectPath } from '../utils/paths' +import { runCommandWithOutput } from '../utils/process' + +type InfoOptions = { + torHostname?: boolean + i2pHostname?: boolean + json?: boolean +} + +type I2PGuidancePayload = { + i2pHostnames: string[] + keysFile: string + guidance?: { + webConsoleUrl: string + consoleQueryCommand: string + } +} + +const getEventCount = async (): Promise => { + const db = knex({ + client: 'pg', + connection: process.env.DB_URI + ? process.env.DB_URI + : { + host: process.env.DB_HOST, + port: Number(process.env.DB_PORT), + user: process.env.DB_USER, + password: process.env.DB_PASSWORD, + database: process.env.DB_NAME, + }, + pool: { + min: 0, + max: 1, + idleTimeoutMillis: 1000, + acquireTimeoutMillis: 1000, + propagateCreateError: false, + }, + acquireConnectionTimeout: 1000, + } as any) + + try { + const result = await db('events').whereNull('deleted_at').count<{ count: string | number }>('* as count').first() + return Number(result?.count ?? 0) + } catch { + return null + } finally { + await db.destroy() + } +} + +const getRelayUptimeSeconds = async (): Promise => { + const idResult = await runCommandWithOutput('docker', ['compose', 'ps', '-q', 'nostream'], { timeoutMs: 1000 }) + if (idResult.code !== 0) { + return null + } + + const containerId = idResult.stdout.trim() + if (!containerId) { + return null + } + + const startedAtResult = await runCommandWithOutput('docker', ['inspect', '--format', '{{.State.StartedAt}}', containerId], { + timeoutMs: 1000, + }) + if (startedAtResult.code !== 0) { + return null + } + + const startedAtRaw = startedAtResult.stdout.trim() + const startedAtMs = Date.parse(startedAtRaw) + if (!Number.isFinite(startedAtMs)) { + return null + } + + const seconds = Math.max(0, Math.floor((Date.now() - startedAtMs) / 1000)) + return seconds +} + +const formatUptime = (uptimeSeconds: number | null): string => { + if (uptimeSeconds === null) { + return 'unavailable' + } + + const days = Math.floor(uptimeSeconds / 86400) + const hours = Math.floor((uptimeSeconds % 86400) / 3600) + const minutes = Math.floor((uptimeSeconds % 3600) / 60) + const seconds = uptimeSeconds % 60 + + const segments = [] + if (days > 0) { + segments.push(`${days}d`) + } + if (hours > 0 || days > 0) { + segments.push(`${hours}h`) + } + if (minutes > 0 || hours > 0 || days > 0) { + segments.push(`${minutes}m`) + } + segments.push(`${seconds}s`) + return segments.join(' ') +} + +const writeJson = (value: unknown): void => { + process.stdout.write(`${JSON.stringify(value, null, 2)}\n`) +} + +const writeJsonError = (message: string, code = 1): void => { + process.stderr.write(`${JSON.stringify({ error: { message, code } })}\n`) +} + +export const getInfoPayload = async () => { + const settings = loadMergedSettings() + const torHostnamePath = getTorHostnamePath() + const torHostname = fs.existsSync(torHostnamePath) ? fs.readFileSync(torHostnamePath, 'utf-8').trim() : null + const [eventCount, uptimeSeconds] = await Promise.all([getEventCount(), getRelayUptimeSeconds()]) + + return { + version: packageJson.version, + relay: { + name: settings.info?.name, + url: settings.info?.relay_url, + pubkey: settings.info?.pubkey, + paymentsEnabled: settings.payments?.enabled ?? false, + paymentProcessor: settings.payments?.processor ?? null, + }, + tor: { + hostname: torHostname, + onionPrivateKeyPath: getOnionKeyPath(), + }, + runtime: { + eventCount, + uptimeSeconds, + }, + } +} + +export const runInfo = async (options: InfoOptions): Promise => { + const payload = await getInfoPayload() + + if (options.torHostname) { + if (payload.tor.hostname) { + if (options.json) { + writeJson({ torHostname: payload.tor.hostname }) + return 0 + } + + logInfo(payload.tor.hostname) + return 0 + } + + if (options.json) { + process.stderr.write( + `${JSON.stringify({ error: { message: 'Tor hostname not found. Start with `nostream start --tor` first.', code: 1 } })}\n`, + ) + return 1 + } + + logError('Tor hostname not found. Start with `nostream start --tor` first.') + return 1 + } + + if (options.i2pHostname) { + const keysFile = getProjectPath('.nostr', 'i2p', 'data', 'nostream.dat') + const i2pGuidance: I2PGuidancePayload = { + i2pHostnames: [], + keysFile, + guidance: { + webConsoleUrl: 'http://127.0.0.1:7070/?page=i2p_tunnels', + consoleQueryCommand: + "docker exec i2pd wget -qO- 'http://127.0.0.1:7070/?page=i2p_tunnels' | grep -oE '[a-z2-7]{52}\\\\.b32\\\\.i2p' | sort -u", + }, + } + + if (!fs.existsSync(keysFile)) { + if (options.json) { + writeJsonError(`I2P destination keys not found. Is the i2pd container running? Expected: ${keysFile}`) + return 1 + } + + logError('I2P destination keys not found. Is the i2pd container running?') + logError(`Expected: ${keysFile}`) + return 1 + } + + const result = await runCommandWithOutput('docker', [ + 'exec', + 'i2pd', + 'wget', + '-qO-', + 'http://127.0.0.1:7070/?page=i2p_tunnels', + ]) + + const matches = new Set((`${result.stdout}\n${result.stderr}`).match(/[a-z2-7]{52}\.b32\.i2p/g) ?? []) + if (matches.size > 0) { + if (options.json) { + writeJson({ + i2pHostnames: [...matches], + }) + return 0 + } + + for (const hostname of matches) { + logInfo(hostname) + } + return 0 + } + + if (options.json) { + writeJson(i2pGuidance) + return 0 + } + + logInfo(`I2P destination keys exist at: ${keysFile}`) + logInfo('') + logInfo('To find your nostream .b32.i2p address, use one of these methods:') + logInfo(' 1. Open the i2pd web console: http://127.0.0.1:7070/?page=i2p_tunnels') + logInfo(' (published by docker-compose.i2p.yml, bound to 127.0.0.1 only)') + logInfo(' 2. Query the console from inside the container:') + logInfo(" docker exec i2pd wget -qO- 'http://127.0.0.1:7070/?page=i2p_tunnels' \\") + logInfo(" | grep -oE '[a-z2-7]{52}\\\\.b32\\\\.i2p' | sort -u") + return 0 + } + + if (options.json) { + writeJson(payload) + return 0 + } + + logInfo(`Nostream v${payload.version}`) + logInfo(`Relay: ${payload.relay.name ?? 'n/a'} (${payload.relay.url ?? 'n/a'})`) + logInfo(`Pubkey: ${payload.relay.pubkey ?? 'n/a'}`) + logInfo(`Payments: ${payload.relay.paymentsEnabled ? `enabled (${payload.relay.paymentProcessor})` : 'disabled'}`) + logInfo(`Tor hostname: ${payload.tor.hostname ?? 'not found'}`) + logInfo(`Onion key path: ${payload.tor.onionPrivateKeyPath}`) + logInfo(`Events: ${payload.runtime.eventCount ?? 'unavailable'}`) + logInfo(`Uptime: ${formatUptime(payload.runtime.uptimeSeconds)}`) + + return 0 +} diff --git a/src/cli/commands/seed.ts b/src/cli/commands/seed.ts new file mode 100644 index 00000000..3586476e --- /dev/null +++ b/src/cli/commands/seed.ts @@ -0,0 +1,33 @@ +import ora from 'ora' + +import { runCommand } from '../utils/process' + +type SeedOptions = { + count?: number +} + +export const runSeed = async (options: SeedOptions): Promise => { + if (options.count !== undefined) { + if (!Number.isSafeInteger(options.count) || options.count <= 0) { + throw new Error('--count must be a positive integer') + } + } + + const spinner = ora('Seeding relay data...').start() + + const code = await runCommand('pnpm', ['run', 'db:seed'], { + env: options.count ? { NOSTREAM_SEED_COUNT: String(options.count) } : undefined, + }) + + if (code === 0) { + if (options.count) { + spinner.succeed(`Seed completed with ${options.count} events requested`) + } else { + spinner.succeed('Seed completed') + } + } else { + spinner.fail('Seed failed') + } + + return code +} diff --git a/src/cli/commands/setup.ts b/src/cli/commands/setup.ts new file mode 100644 index 00000000..7de3fa21 --- /dev/null +++ b/src/cli/commands/setup.ts @@ -0,0 +1,169 @@ +import fs from 'fs' +import { randomBytes } from 'crypto' +import { intro, outro, confirm, text, isCancel, cancel } from '@clack/prompts' + +import { ensureConfigBootstrap } from '../utils/bootstrap' +import { getProjectPath } from '../utils/paths' +import { runStart } from './start' + +type SetupOptions = { + yes?: boolean + start?: boolean +} + +const SECRET_PLACEHOLDER = 'change_me_to_something_long_and_random' + +export const setupPrompts = { + intro, + outro, + confirm, + text, + isCancel, + cancel, +} + +class SetupCancelledError extends Error { + constructor() { + super('Setup cancelled') + this.name = 'SetupCancelledError' + } +} + +const readEnvSecret = (content: string): string | undefined => { + for (const line of content.split(/\r?\n/)) { + const trimmed = line.trim() + if (!trimmed || trimmed.startsWith('#') || !trimmed.startsWith('SECRET=')) { + continue + } + + const [rawValue] = trimmed.slice('SECRET='.length).split('#', 1) + return rawValue.trim() + } + + return undefined +} + +const needsSecretReplacement = (secret: string | undefined): boolean => { + return !secret || secret === SECRET_PLACEHOLDER +} + +const resolveSecret = async (assumeYes: boolean): Promise => { + if (process.env.SECRET?.trim()) { + return process.env.SECRET.trim() + } + + if (!assumeYes && process.stdin.isTTY) { + const value = await setupPrompts.text({ + message: 'SECRET env var value (hex recommended)', + placeholder: 'openssl rand -hex 128', + validate: (input) => (input.trim() ? undefined : 'SECRET is required'), + }) + + if (setupPrompts.isCancel(value)) { + setupPrompts.cancel('Setup cancelled') + throw new SetupCancelledError() + } + + return value.trim() + } + + return randomBytes(64).toString('hex') +} + +const upsertSecret = (content: string, secret: string): string => { + const normalized = content.length > 0 ? content : '' + const lines = normalized.split(/\r?\n/) + let replaced = false + + const nextLines = lines.map((line) => { + if (replaced) { + return line + } + + const trimmed = line.trim() + if (!trimmed.startsWith('SECRET=') || trimmed.startsWith('#')) { + return line + } + + replaced = true + const commentIndex = line.indexOf('#') + const commentSuffix = commentIndex >= 0 ? line.slice(commentIndex).trimEnd() : '' + return commentSuffix ? `SECRET=${secret} ${commentSuffix}` : `SECRET=${secret}` + }) + + if (!replaced) { + if (nextLines.length > 0 && nextLines[nextLines.length - 1] !== '') { + nextLines.push(`SECRET=${secret}`) + } else if (nextLines.length === 0) { + nextLines.push(`SECRET=${secret}`) + } else { + nextLines[nextLines.length - 1] = `SECRET=${secret}` + nextLines.push('') + } + } + + return nextLines.join('\n') +} + +const ensureEnvFile = async (assumeYes: boolean): Promise => { + const envPath = getProjectPath('.env') + const envExamplePath = getProjectPath('.env.example') + + if (!fs.existsSync(envPath)) { + if (fs.existsSync(envExamplePath)) { + fs.copyFileSync(envExamplePath, envPath) + } else { + fs.writeFileSync(envPath, '', 'utf-8') + } + } + + const current = fs.readFileSync(envPath, 'utf-8') + + if (!needsSecretReplacement(readEnvSecret(current))) { + return true + } + + let secret: string + try { + secret = await resolveSecret(assumeYes) + } catch (error) { + if (error instanceof SetupCancelledError) { + return false + } + throw error + } + + fs.writeFileSync(envPath, upsertSecret(current, secret), 'utf-8') + return true +} + +export const runSetup = async (options: SetupOptions): Promise => { + setupPrompts.intro('Nostream setup') + + ensureConfigBootstrap() + const shouldContinue = await ensureEnvFile(Boolean(options.yes)) + if (!shouldContinue) { + return 1 + } + + let shouldStart = Boolean(options.start) + + if (!options.yes && !options.start && process.stdin.isTTY) { + const answer = await setupPrompts.confirm({ message: 'Start relay now?', initialValue: true }) + if (setupPrompts.isCancel(answer)) { + setupPrompts.cancel('Setup cancelled') + return 1 + } + + shouldStart = answer + } + + if (shouldStart) { + const code = await runStart({}, []) + setupPrompts.outro(code === 0 ? 'Setup complete' : 'Setup finished with errors') + return code + } + + setupPrompts.outro('Setup complete') + return 0 +} diff --git a/src/cli/commands/start.ts b/src/cli/commands/start.ts new file mode 100644 index 00000000..c10df3a4 --- /dev/null +++ b/src/cli/commands/start.ts @@ -0,0 +1,141 @@ +import fs from 'fs' +import { join } from 'path' +import ora from 'ora' + +import { StartOptions } from '../types' +import { ensureConfigBootstrap, ensureI2PDataDir, ensureNotRoot, ensureTorDataDir } from '../utils/bootstrap' +import { createPortOverrideComposeFile, runDockerCompose } from '../utils/docker' +import { logStep } from '../utils/output' +import { getProjectPath } from '../utils/paths' +import { runCommand } from '../utils/process' + +const FQDN_REGEX = + /^([A-Za-z0-9]([A-Za-z0-9-]{0,61}[A-Za-z0-9])?\.)+[A-Za-z0-9]([A-Za-z0-9-]{0,61}[A-Za-z0-9])?$/ + +const ensureNginxBootstrap = async (): Promise => { + const relayDomain = process.env.RELAY_DOMAIN?.trim() + if (!relayDomain) { + throw new Error( + 'RELAY_DOMAIN environment variable is required when using --nginx (example: RELAY_DOMAIN=relay.example.com).', + ) + } + + if (!FQDN_REGEX.test(relayDomain)) { + throw new Error('RELAY_DOMAIN must be a valid fully-qualified domain name when using --nginx.') + } + + const certbotEmail = process.env.CERTBOT_EMAIL?.trim() + if (!certbotEmail) { + throw new Error( + 'CERTBOT_EMAIL environment variable is required when using --nginx (example: CERTBOT_EMAIL=you@example.com).', + ) + } + + const nginxConfDir = getProjectPath('nginx', 'conf.d') + const nginxTemplate = join(nginxConfDir, 'nostream.conf.template') + const nginxConf = join(nginxConfDir, 'nostream.conf') + + const templateContent = fs.readFileSync(nginxTemplate, 'utf-8') + const rendered = templateContent.replaceAll('${RELAY_DOMAIN}', relayDomain) + fs.writeFileSync(nginxConf, rendered, { encoding: 'utf-8' }) + + const sslCertDir = getProjectPath('nginx', 'ssl', 'live', relayDomain) + const fullchainPath = join(sslCertDir, 'fullchain.pem') + const privkeyPath = join(sslCertDir, 'privkey.pem') + + if (!fs.existsSync(fullchainPath) || !fs.existsSync(privkeyPath)) { + fs.mkdirSync(sslCertDir, { recursive: true }) + + const code = await runCommand('openssl', [ + 'req', + '-x509', + '-nodes', + '-newkey', + 'rsa:2048', + '-days', + '1', + '-keyout', + privkeyPath, + '-out', + fullchainPath, + '-subj', + `/CN=${relayDomain}`, + ]) + + if (code !== 0) { + throw new Error('Failed to generate self-signed SSL certificate. Ensure openssl is installed and retry.') + } + } +} + +export const runStart = async (options: StartOptions, passthrough: string[]): Promise => { + ensureNotRoot() + + const explicitPortFlag = process.argv.slice(2).some((arg) => arg === '--port' || arg.startsWith('--port=')) + const hasPort = typeof options.port === 'number' && Number.isFinite(options.port) + if (explicitPortFlag && !hasPort) { + throw new Error('Port must be a safe integer between 1 and 65535') + } + + if (hasPort) { + if (!Number.isSafeInteger(options.port) || options.port < 1 || options.port > 65535) { + throw new Error('Port must be a safe integer between 1 and 65535') + } + } + + logStep('Preparing configuration') + ensureConfigBootstrap() + + const composeFiles = ['docker-compose.yml'] + + if (options.tor) { + ensureTorDataDir() + composeFiles.push('docker-compose.tor.yml') + } + + if (options.i2p) { + ensureI2PDataDir() + composeFiles.push('docker-compose.i2p.yml') + } + + if (options.nginx) { + await ensureNginxBootstrap() + composeFiles.push('docker-compose.nginx.yml') + } + + let overrideFile: string | undefined + if (hasPort) { + overrideFile = createPortOverrideComposeFile(options.port) + composeFiles.push(overrideFile) + } + + const env: NodeJS.ProcessEnv = {} + + if (options.debug) { + env.DEBUG = process.env.DEBUG || 'primary:*,worker:*,knex:*' + } + + const spinner = ora('Starting relay...').start() + const composePassthrough = passthrough.filter((arg) => arg !== '--') + const upArgs = ['up', '--build', '--remove-orphans', ...(options.detach ? ['-d'] : []), ...composePassthrough] + + try { + const code = await runDockerCompose({ + files: composeFiles, + args: upArgs, + env, + }) + + if (code === 0) { + spinner.succeed('Relay start command completed') + } else { + spinner.fail('Relay start command failed') + } + + return code + } finally { + if (overrideFile && fs.existsSync(overrideFile)) { + fs.unlinkSync(overrideFile) + } + } +} diff --git a/src/cli/commands/stop.ts b/src/cli/commands/stop.ts new file mode 100644 index 00000000..73c0179b --- /dev/null +++ b/src/cli/commands/stop.ts @@ -0,0 +1,40 @@ +import ora from 'ora' + +import { StopOptions } from '../types' +import { runDockerCompose } from '../utils/docker' + +export const runStop = async (options: StopOptions, passthrough: string[]): Promise => { + const composeFiles = ['docker-compose.yml'] + + const includeAll = options.all || (!options.tor && !options.i2p && !options.nginx && !options.local) + + if (includeAll || options.tor) { + composeFiles.push('docker-compose.tor.yml') + } + + if (includeAll || options.i2p) { + composeFiles.push('docker-compose.i2p.yml') + } + + if (includeAll || options.nginx) { + composeFiles.push('docker-compose.nginx.yml') + } + + if (includeAll || options.local) { + composeFiles.push('docker-compose.local.yml') + } + + const spinner = ora('Stopping relay...').start() + const code = await runDockerCompose({ + files: composeFiles, + args: ['down', ...passthrough], + }) + + if (code === 0) { + spinner.succeed('Relay stop command completed') + } else { + spinner.fail('Relay stop command failed') + } + + return code +} diff --git a/src/cli/commands/update.ts b/src/cli/commands/update.ts new file mode 100644 index 00000000..6fb3026d --- /dev/null +++ b/src/cli/commands/update.ts @@ -0,0 +1,64 @@ +import ora from 'ora' + +import { runCommand, runCommandWithOutput } from '../utils/process' +import { runStart } from './start' +import { runStop } from './stop' + +const wasStashCreated = (output: string): boolean => { + return !output.includes('No local changes to save') +} + +export const runUpdate = async (passthrough: string[]): Promise => { + const spinner = ora('Updating relay...').start() + + let code = await runStop({ all: true }, []) + if (code !== 0) { + spinner.fail('Update failed while stopping relay') + return code + } + + const stashResult = await runCommandWithOutput('git', ['stash', 'push', '-u', '-m', 'nostream-cli-update']) + if (stashResult.code !== 0) { + spinner.fail('Update failed while stashing local changes') + return stashResult.code + } + + const stashOutput = `${stashResult.stdout}\n${stashResult.stderr}` + const stashCreated = wasStashCreated(stashOutput) + + code = await runCommand('git', ['pull']) + if (code !== 0) { + if (stashCreated) { + const restoreCode = await runCommand('git', ['stash', 'pop']) + if (restoreCode === 0) { + spinner.fail('Update failed while pulling latest changes. Restored stashed local changes.') + return code + } + + spinner.fail( + 'Update failed while pulling latest changes, and restoring stashed local changes also failed. Run `git stash list` then `git stash pop` manually.', + ) + return restoreCode + } + + spinner.fail('Update failed while pulling latest changes.') + return code + } + + if (stashCreated) { + code = await runCommand('git', ['stash', 'pop']) + if (code !== 0) { + spinner.fail('Update pulled latest changes, but reapplying stashed changes failed') + return code + } + } + + code = await runStart({}, passthrough) + if (code !== 0) { + spinner.fail('Update finished pull, but restart failed') + return code + } + + spinner.succeed('Relay updated and restarted') + return 0 +} diff --git a/src/cli/index.ts b/src/cli/index.ts new file mode 100644 index 00000000..8366eabe --- /dev/null +++ b/src/cli/index.ts @@ -0,0 +1,480 @@ +#!/usr/bin/env node +const { cac } = require('cac') + +import packageJson from '../../package.json' +import { runStart } from './commands/start' +import { runStop } from './commands/stop' +import { runInfo } from './commands/info' +import { runImport } from './commands/import' +import { runExport } from './commands/export' +import { runSetup } from './commands/setup' +import { runSeed } from './commands/seed' +import { runUpdate } from './commands/update' +import { + runConfigEnvGet, + runConfigEnvList, + runConfigEnvSet, + runConfigEnvValidate, + runConfigGet, + runConfigList, + runConfigSet, + runConfigValidate, +} from './commands/config' +import { + runDevDbClean, + runDevDbReset, + runDevDockerClean, + runDevSeedRelay, + runDevTestCli, + runDevTestIntegration, + runDevTestUnit, +} from './commands/dev' +import { runTui } from './tui/main' +import { logError, logInfo } from './utils/output' + +class CliUsageError extends Error {} + +const printHandledError = (message: string): void => { + logError(`Error: ${message}`) +} + +const isStructuredExportFormat = (value: string): value is 'json' | 'jsonl' => { + return value === 'json' || value === 'jsonl' +} + +const isCompressionExportFormat = (value: string): value is 'gzip' | 'gz' | 'xz' => { + return value === 'gzip' || value === 'gz' || value === 'xz' +} + +const extractFormatValues = (args: string[]): string[] => { + const formats: string[] = [] + + for (let index = 0; index < args.length; index += 1) { + const arg = args[index] + + if (arg === '--format') { + const value = args[index + 1] + if (typeof value === 'string') { + formats.push(value) + } + index += 1 + continue + } + + if (arg.startsWith('--format=')) { + const value = arg.slice('--format='.length) + if (value.length) { + formats.push(value) + } + } + } + + return formats +} + +const cli = cac('nostream') + +const configSubHelp: Record = { + list: 'Usage: nostream config list', + get: 'Usage: nostream config get ', + set: 'Usage: nostream config set [--type inferred|json] [--validate|--no-validate] [--restart]', + validate: 'Usage: nostream config validate', + env: 'Usage: nostream config env [args] [--show-secrets]', +} + +const configEnvSubHelp: Record = { + list: 'Usage: nostream config env list [--show-secrets]', + get: 'Usage: nostream config env get [--show-secrets]', + set: 'Usage: nostream config env set ', + validate: 'Usage: nostream config env validate', +} + +const devSubHelp: Record = { + 'db:clean': 'Usage: nostream dev db:clean [--all|--older-than=|--kinds=1,7,4] [--dry-run] [--force]', + 'db:reset': 'Usage: nostream dev db:reset [--yes]', + 'seed:relay': 'Usage: nostream dev seed:relay', + 'docker:clean': 'Usage: nostream dev docker:clean [--yes]', + 'test:unit': 'Usage: nostream dev test:unit', + 'test:cli': 'Usage: nostream dev test:cli', + 'test:integration': 'Usage: nostream dev test:integration', +} + +const withErrorBoundary = + (handler: (...args: T) => Promise | number) => + async (...args: T) => { + try { + const code = await handler(...args) + if (typeof code === 'number' && code !== 0) { + process.exitCode = code + } + } catch (error) { + const message = error instanceof Error ? error.message : String(error) + const usageError = error instanceof CliUsageError + const lastArg = args[args.length - 1] + const jsonMode = + Boolean(lastArg) && + typeof lastArg === 'object' && + !Array.isArray(lastArg) && + (lastArg as Record).json === true + + if (jsonMode) { + process.stderr.write(`${JSON.stringify({ error: { message, code: usageError ? 2 : 1 } })}\n`) + } else { + printHandledError(message) + } + process.exitCode = usageError ? 2 : 1 + } + } + +cli + .command('start [...args]', 'Start Nostream (Docker Compose)') + .option('--tor', 'Enable Tor compose overlay') + .option('--i2p', 'Enable I2P compose overlay') + .option('--nginx', 'Enable Nginx reverse proxy overlay') + .option('--debug', 'Enable DEBUG logging') + .option('-d, --detach', 'Start in detached mode') + .option('--port ', 'Override exposed relay port', { type: [Number] }) + .action( + withErrorBoundary(async (args: unknown, options: unknown) => { + const resolved = options as Record + const normalizedPort = Array.isArray(resolved.port) ? resolved.port[0] : resolved.port + return runStart({ ...(resolved as any), port: normalizedPort as number | undefined }, args as string[]) + }), + ) + +cli + .command('stop [...args]', 'Stop Nostream') + .option('--tor', 'Include Tor overlay') + .option('--i2p', 'Include I2P overlay') + .option('--nginx', 'Include Nginx overlay') + .option('--local', 'Include local dev overlay') + .option('--all', 'Include all known overlays') + .action( + withErrorBoundary(async (args: unknown, options: unknown) => { + return runStop(options as any, args as string[]) + }), + ) + +cli + .command('info', 'Show relay/runtime info') + .option('--tor-hostname', 'Print Tor hostname only') + .option('--i2p-hostname', 'Print I2P hostname(s) when available') + .option('--json', 'Print machine-readable JSON') + .action( + withErrorBoundary(async (options: unknown) => { + return runInfo(options as any) + }), + ) + +cli + .command('update [...args]', 'Pull latest git changes and restart relay') + .action( + withErrorBoundary(async (args: unknown) => { + return runUpdate(args as string[]) + }), + ) + +cli + .command('clean', 'Clean Docker resources (legacy script replacement)') + .action( + withErrorBoundary(async () => { + return runDevDockerClean({ yes: true }) + }), + ) + +cli + .command('import [file] [...args]', 'Import events from .jsonl or .json') + .option('--file ', 'Path to .jsonl/.json file (alias to positional arg)') + .option('--batch-size ', 'Batch size', { type: [Number] }) + .action( + withErrorBoundary(async (file: unknown, args: unknown, options: unknown) => { + const passthrough = (args as string[]) ?? [] + const unsupportedFlag = passthrough.find((arg) => arg.startsWith('-')) + if (unsupportedFlag) { + throw new CliUsageError(`Unknown option: ${unsupportedFlag}`) + } + + const resolved = options as Record + const rawBatchSize = Array.isArray(resolved.batchSize) ? resolved.batchSize[0] : resolved.batchSize + const normalizedBatchSize = + typeof rawBatchSize === 'number' && Number.isFinite(rawBatchSize) ? rawBatchSize : undefined + const normalizedFile = (resolved.file as string | undefined) ?? (file as string | undefined) + if (normalizedFile && normalizedFile.startsWith('-')) { + throw new CliUsageError(`Unknown option: ${normalizedFile}`) + } + + return runImport( + { + ...(resolved as any), + file: normalizedFile, + batchSize: normalizedBatchSize as number | undefined, + }, + passthrough, + ) + }), + ) + +cli + .command('export [output] [...args]', 'Export events to file') + .option('--output ', 'Output path (alias to positional arg)') + .option('-z, --compress', 'Enable compression (legacy compatibility)') + .option('--format ', 'Export format (jsonl|json|gzip|gz|xz)') + .action( + withErrorBoundary(async (output: unknown, args: unknown, options: unknown) => { + const passthrough = (args as string[]) ?? [] + const resolved = options as Record + + const formatCandidates = new Set(extractFormatValues(passthrough)) + if (typeof resolved.format === 'string' && resolved.format.length > 0) { + formatCandidates.add(resolved.format) + } + + const unknownFormats = [...formatCandidates].filter( + (format) => !isStructuredExportFormat(format) && !isCompressionExportFormat(format), + ) + if (unknownFormats.length > 0) { + throw new CliUsageError( + `Unsupported format: ${unknownFormats[0]}. Supported values: json, jsonl, gzip, gz, xz`, + ) + } + + const structuredFormats = [...formatCandidates].filter(isStructuredExportFormat) + const compressionFormats = [...formatCandidates].filter(isCompressionExportFormat) + + if (structuredFormats.length > 1) { + throw new CliUsageError('Conflicting structured export formats were provided. Use only one of: json, jsonl') + } + + const compressionFamilies = new Set(compressionFormats.map((format) => (format === 'xz' ? 'xz' : 'gzip'))) + if (compressionFamilies.size > 1) { + throw new CliUsageError( + 'Conflicting compression formats were provided. Use only one of: gzip/gz or xz', + ) + } + + if (structuredFormats.length > 0 && compressionFormats.length > 0) { + throw new CliUsageError('Cannot combine structured export format (json/jsonl) with compression format (gzip/gz/xz).') + } + + const compress = + Boolean(resolved.compress) || passthrough.includes('--compress') || passthrough.includes('-z') + if (structuredFormats.length > 0 && compress) { + throw new CliUsageError('Cannot combine --compress with --format json/jsonl.') + } + + return runExport( + { + ...(resolved as any), + output: (resolved.output as string | undefined) ?? (output as string | undefined), + format: structuredFormats[0] as 'json' | 'jsonl' | undefined, + compress, + compressionFormat: compressionFormats[0] as 'gzip' | 'gz' | 'xz' | undefined, + }, + passthrough, + ) + }), + ) + +cli + .command('seed', 'Seed relay with test data') + .option('--count ', 'Number of events to seed', { type: [Number] }) + .action( + withErrorBoundary(async (options: unknown) => { + const resolved = options as Record + const normalizedCount = Array.isArray(resolved.count) ? resolved.count[0] : resolved.count + return runSeed({ ...(resolved as any), count: normalizedCount as number | undefined }) + }), + ) + +cli + .command('setup', 'Initial interactive setup') + .option('-y, --yes', 'Non-interactive defaults') + .option('--start', 'Start relay after setup') + .action(withErrorBoundary(async (options: unknown) => runSetup(options as any))) + +cli + .command('config [...args]', 'Manage settings') + .option('--restart', 'Restart relay after setting update') + .option('--validate', 'Validate merged settings before write') + .option('--no-validate', 'Skip validation before write') + .option('--type ', 'Value parser: inferred|json') + .option('--show-secrets', 'Show secret values for env commands') + .option('--json', 'Print machine-readable JSON for read commands') + .action( + withErrorBoundary(async (args: unknown, options: unknown) => { + const positional = (args as string[]) ?? [] + const command = positional[0] + const resolved = options as Record + const json = Boolean(resolved.json) + + if (resolved.help && command === 'env') { + const envCommand = positional[1] + if (envCommand && configEnvSubHelp[envCommand]) { + logInfo(configEnvSubHelp[envCommand]) + return 0 + } + + logInfo(configSubHelp.env) + return 0 + } + + if (resolved.help && command && configSubHelp[command]) { + logInfo(configSubHelp[command]) + return 0 + } + + if (command === 'env') { + const envCommand = positional[1] + const showSecrets = Boolean(resolved.showSecrets) + + switch (envCommand) { + case 'list': + return runConfigEnvList({ showSecrets }) + case 'get': + if (!positional[2]) { + throw new CliUsageError(configEnvSubHelp.get) + } + return runConfigEnvGet(positional[2], { showSecrets }) + case 'set': + if (!positional[2] || positional[3] === undefined) { + throw new CliUsageError(configEnvSubHelp.set) + } + return runConfigEnvSet(positional[2], positional[3]) + case 'validate': + return runConfigEnvValidate() + default: + logInfo(configSubHelp.env) + return 2 + } + } + + switch (command) { + case 'list': + return runConfigList({ json }) + case 'get': + if (!positional[1]) { + throw new CliUsageError(configSubHelp.get) + } + return runConfigGet(positional[1], { json }) + case 'set': { + if (!positional[1] || positional[2] === undefined) { + throw new CliUsageError(configSubHelp.set) + } + + const valueType = ((resolved.type as string | undefined) ?? 'inferred') as 'inferred' | 'json' + if (valueType !== 'inferred' && valueType !== 'json') { + throw new CliUsageError(`Unsupported type: ${valueType}. Supported values: inferred, json`) + } + + return runConfigSet(positional[1], positional[2], { + restart: Boolean(resolved.restart), + validate: resolved.validate !== false, + valueType, + }) + } + case 'validate': + return runConfigValidate() + default: + logInfo('Usage: nostream config [args]') + return 2 + } + }), + ) + +cli + .command('dev [...args]', 'Development utilities') + .option('-y, --yes', 'Skip confirmation') + .action( + withErrorBoundary(async (args: unknown, options: unknown) => { + const positional = (args as string[]) ?? [] + const command = positional[0] + const resolved = options as Record + + if (resolved.help && command && devSubHelp[command]) { + logInfo(devSubHelp[command]) + return 0 + } + + switch (command) { + case 'db:clean': + return runDevDbClean(positional.slice(1), resolved as any) + case 'db:reset': + return runDevDbReset(resolved as any) + case 'seed:relay': + return runDevSeedRelay() + case 'docker:clean': + return runDevDockerClean(resolved as any) + case 'test:unit': + return runDevTestUnit() + case 'test:cli': + return runDevTestCli() + case 'test:integration': + return runDevTestIntegration() + default: + logInfo( + 'Usage: nostream dev [args]', + ) + return 2 + } + }), + ) + +cli.help() +cli.version(packageJson.version) + +withErrorBoundary(async () => { + const userArgs = process.argv.slice(2) + const knownTopLevel = new Set([ + 'start', + 'stop', + 'info', + 'import', + 'export', + 'seed', + 'setup', + 'config', + 'dev', + 'update', + 'clean', + ]) + + if (userArgs.length >= 2 && userArgs.includes('--help')) { + if (userArgs[0] === 'config' && userArgs[1] === 'env') { + if (userArgs[2] && configEnvSubHelp[userArgs[2]]) { + logInfo(configEnvSubHelp[userArgs[2]]) + return 0 + } + + logInfo(configSubHelp.env) + return 0 + } + + if (userArgs[0] === 'config' && configSubHelp[userArgs[1]]) { + logInfo(configSubHelp[userArgs[1]]) + return 0 + } + + if (userArgs[0] === 'dev' && devSubHelp[userArgs[1]]) { + logInfo(devSubHelp[userArgs[1]]) + return 0 + } + } + + if (userArgs.length > 0 && !userArgs[0].startsWith('-') && !knownTopLevel.has(userArgs[0])) { + logError(`Unknown command: ${userArgs[0]}`) + cli.outputHelp() + return 2 + } + + if (userArgs.length === 0) { + if (!process.stdin.isTTY || !process.stdout.isTTY) { + cli.outputHelp() + return 0 + } + + return runTui() + } + + await cli.parse(process.argv) + return typeof process.exitCode === 'number' ? process.exitCode : 0 +})() diff --git a/src/cli/tui/main.ts b/src/cli/tui/main.ts new file mode 100644 index 00000000..928e9d98 --- /dev/null +++ b/src/cli/tui/main.ts @@ -0,0 +1,59 @@ +import { runInfo } from '../commands/info' +import { runStartMenu } from './menus/start' +import { runStopMenu } from './menus/stop' +import { runConfigureMenu } from './menus/configure' +import { runManageMenu } from './menus/manage' +import { runDevMenu } from './menus/dev' +import { createState } from './state' +import { tuiPrompts } from './prompts' + +export const runTui = async (): Promise => { + const state = createState() + tuiPrompts.intro('Nostream Control Center') + + while (state.running) { + const action = await tuiPrompts.select({ + message: 'What would you like to do?', + options: [ + { value: 'start', label: 'Start relay' }, + { value: 'stop', label: 'Stop relay' }, + { value: 'configure', label: 'Configure settings' }, + { value: 'manage', label: 'Manage data (export/import)' }, + { value: 'dev', label: 'Development tools' }, + { value: 'info', label: 'View relay info' }, + { value: 'exit', label: 'Exit' }, + ], + }) + + if (tuiPrompts.isCancel(action) || action === 'exit') { + state.running = false + break + } + + switch (action) { + case 'start': + await runStartMenu() + break + case 'stop': + await runStopMenu() + break + case 'configure': + await runConfigureMenu() + break + case 'manage': + await runManageMenu() + break + case 'dev': + await runDevMenu() + break + case 'info': + await runInfo({}) + break + default: + tuiPrompts.cancel('Unknown action') + } + } + + tuiPrompts.outro('Goodbye') + return 0 +} diff --git a/src/cli/tui/menus/configure.ts b/src/cli/tui/menus/configure.ts new file mode 100644 index 00000000..8bd13703 --- /dev/null +++ b/src/cli/tui/menus/configure.ts @@ -0,0 +1,413 @@ +import { + getConfigTopLevelCategories, + runConfigGet, + runConfigList, + runConfigSet, + runConfigValidate, +} from '../../commands/config' +import { getByPath, loadMergedSettings } from '../../utils/config' +import { tuiPrompts } from '../prompts' + +const toCategoryLabel = (key: string): string => { + return key + .split(/[_\-.]/) + .filter(Boolean) + .map((part) => part.charAt(0).toUpperCase() + part.slice(1)) + .join(' ') +} + +const getCategoryOptions = () => { + const categories = getConfigTopLevelCategories().sort((a, b) => a.localeCompare(b)) + + return [ + ...categories.map((category) => ({ + value: category, + label: toCategoryLabel(category), + })), + { value: 'other', label: 'Other / full path' }, + ] +} + +type GuidedSetting = { + label: string + path: string + type: 'boolean' | 'number' | 'string' | 'select' | 'stringArray' + options?: string[] + placeholder?: string + validate?: (value: string) => string | undefined +} + +type GuidedCategory = { + value: string + label: string + settings: GuidedSetting[] +} + +const requireNonEmpty = (value: string): string | undefined => { + return value.trim() ? undefined : 'Value is required' +} + +const requireSafeNonNegativeInteger = (value: string): string | undefined => { + const trimmed = value.trim() + if (!/^\d+$/.test(trimmed)) { + return 'Value must be a non-negative integer' + } + + const parsed = Number(trimmed) + if (!Number.isSafeInteger(parsed)) { + return 'Value must be a safe integer' + } + + return undefined +} + +const guidedCategories: GuidedCategory[] = [ + { + value: 'payments', + label: 'Payments', + settings: [ + { label: 'Enable payments', path: 'payments.enabled', type: 'boolean' }, + { + label: 'Payment processor', + path: 'payments.processor', + type: 'select', + options: ['zebedee', 'lnbits', 'lnurl', 'nodeless', 'opennode'], + }, + { + label: 'Admission fee enabled', + path: 'payments.feeSchedules.admission[0].enabled', + type: 'boolean', + }, + { + label: 'Admission fee amount (msats)', + path: 'payments.feeSchedules.admission[0].amount', + type: 'number', + validate: requireSafeNonNegativeInteger, + }, + ], + }, + { + value: 'network', + label: 'Network', + settings: [ + { + label: 'Relay URL', + path: 'info.relay_url', + type: 'string', + placeholder: 'wss://relay.example.com', + validate: requireNonEmpty, + }, + { + label: 'Relay name', + path: 'info.name', + type: 'string', + placeholder: 'relay.example.com', + validate: requireNonEmpty, + }, + { + label: 'Max payload size', + path: 'network.maxPayloadSize', + type: 'number', + validate: requireSafeNonNegativeInteger, + }, + ], + }, + { + value: 'limits', + label: 'Limits', + settings: [ + { + label: 'Rate limiter strategy', + path: 'limits.rateLimiter.strategy', + type: 'select', + options: ['ewma', 'sliding_window'], + }, + { + label: 'Primary event content max length', + path: 'limits.event.content[0].maxLength', + type: 'number', + validate: requireSafeNonNegativeInteger, + }, + { + label: 'Minimum pubkey balance', + path: 'limits.event.pubkey.minBalance', + type: 'number', + validate: requireSafeNonNegativeInteger, + }, + ], + }, +] + +const formatCurrentValue = (value: unknown): string => { + if (Array.isArray(value)) { + return value.length === 0 ? '[]' : value.join(', ') + } + + if (typeof value === 'string') { + return value + } + + if (value === undefined) { + return 'undefined' + } + + if (value === null) { + return 'null' + } + + if (typeof value === 'object') { + return JSON.stringify(value) + } + + return String(value) +} + +const getGuidedSettingValue = async (setting: GuidedSetting, currentValue: unknown) => { + switch (setting.type) { + case 'boolean': { + const answer = await tuiPrompts.confirm({ + message: `${setting.label} (current: ${formatCurrentValue(currentValue)})`, + initialValue: Boolean(currentValue), + }) + + if (tuiPrompts.isCancel(answer)) { + tuiPrompts.cancel('Cancelled') + return undefined + } + + return { + rawValue: String(answer), + valueType: 'inferred' as const, + } + } + case 'select': { + const options = (setting.options ?? []).map((option) => ({ + value: option, + label: option, + hint: option === currentValue ? 'current' : undefined, + })) + + const answer = await tuiPrompts.select({ + message: `${setting.label} (current: ${formatCurrentValue(currentValue)})`, + options: [...options, { value: 'back', label: 'Back' }], + }) + + if (tuiPrompts.isCancel(answer) || answer === 'back') { + tuiPrompts.cancel('Cancelled') + return undefined + } + + return { + rawValue: answer, + valueType: 'inferred' as const, + } + } + case 'stringArray': { + const defaultValue = Array.isArray(currentValue) ? currentValue.join(', ') : '' + const answer = await tuiPrompts.text({ + message: `${setting.label} (comma-separated)`, + defaultValue, + }) + + if (tuiPrompts.isCancel(answer)) { + tuiPrompts.cancel('Cancelled') + return undefined + } + + const parsed = answer + .split(',') + .map((part) => part.trim()) + .filter(Boolean) + + return { + rawValue: JSON.stringify(parsed), + valueType: 'json' as const, + } + } + default: { + const answer = await tuiPrompts.text({ + message: `${setting.label} (current: ${formatCurrentValue(currentValue)})`, + defaultValue: currentValue === undefined || currentValue === null ? '' : String(currentValue), + placeholder: setting.placeholder, + validate: setting.validate, + }) + + if (tuiPrompts.isCancel(answer)) { + tuiPrompts.cancel('Cancelled') + return undefined + } + + return { + rawValue: answer, + valueType: 'inferred' as const, + } + } + } +} + +const runGuidedConfigureMenu = async (): Promise => { + const category = await tuiPrompts.select({ + message: 'Configuration category', + options: [...guidedCategories.map(({ value, label }) => ({ value, label })), { value: 'back', label: 'Back' }], + }) + + if (tuiPrompts.isCancel(category)) { + tuiPrompts.cancel('Cancelled') + return 1 + } + if (category === 'back') { + return 0 + } + + const selectedCategory = guidedCategories.find((entry) => entry.value === category) + if (!selectedCategory) { + tuiPrompts.cancel('Unknown category') + return 1 + } + + const settings = loadMergedSettings() as unknown as Record + const setting = await tuiPrompts.select({ + message: `${selectedCategory.label} setting`, + options: [ + ...selectedCategory.settings.map((entry) => ({ + value: entry.path, + label: entry.label, + hint: `current: ${formatCurrentValue(getByPath(settings, entry.path))}`, + })), + { value: 'back', label: 'Back' }, + ], + }) + + if (tuiPrompts.isCancel(setting)) { + tuiPrompts.cancel('Cancelled') + return 1 + } + if (setting === 'back') { + return 0 + } + + const selectedSetting = selectedCategory.settings.find((entry) => entry.path === setting) + if (!selectedSetting) { + tuiPrompts.cancel('Unknown setting') + return 1 + } + + const currentValue = getByPath(settings, selectedSetting.path) + const nextValue = await getGuidedSettingValue(selectedSetting, currentValue) + if (!nextValue) { + return 1 + } + + const confirmedSave = await tuiPrompts.confirm({ + message: `Save ${selectedSetting.label}?`, + initialValue: true, + }) + if (tuiPrompts.isCancel(confirmedSave) || !confirmedSave) { + tuiPrompts.cancel('Cancelled') + return 1 + } + + const restart = await tuiPrompts.confirm({ + message: 'Restart relay after this setting change?', + initialValue: false, + }) + if (tuiPrompts.isCancel(restart)) { + tuiPrompts.cancel('Cancelled') + return 1 + } + + return runConfigSet(selectedSetting.path, nextValue.rawValue, { + restart, + validate: true, + valueType: nextValue.valueType, + }) +} + +export const runConfigureMenu = async (): Promise => { + const action = await tuiPrompts.select({ + message: 'Configuration action', + options: [ + { value: 'list', label: 'List all settings' }, + { value: 'guided', label: 'Guided edit (common settings)' }, + { value: 'get', label: 'Advanced get by dot-path' }, + { value: 'set', label: 'Advanced set by dot-path' }, + { value: 'validate', label: 'Validate settings' }, + { value: 'back', label: 'Back' }, + ], + }) + + if (tuiPrompts.isCancel(action)) { + tuiPrompts.cancel('Cancelled') + return 1 + } + if (action === 'back') { + return 0 + } + + if (action === 'list') { + return runConfigList() + } + + if (action === 'validate') { + return runConfigValidate() + } + + if (action === 'guided') { + return runGuidedConfigureMenu() + } + + const category = await tuiPrompts.select({ + message: 'Configuration category', + options: [...getCategoryOptions(), { value: 'back', label: 'Back' }], + }) + if (tuiPrompts.isCancel(category)) { + tuiPrompts.cancel('Cancelled') + return 1 + } + if (category === 'back') { + return 0 + } + + const pathInput = await tuiPrompts.text({ + message: category === 'other' ? 'Full dot-path' : `Path inside ${category} (without "${category}.")`, + placeholder: category === 'other' ? 'payments.enabled' : 'enabled', + validate: (input) => (input.trim() ? undefined : 'Path is required'), + }) + if (tuiPrompts.isCancel(pathInput)) { + tuiPrompts.cancel('Cancelled') + return 1 + } + + const normalizedPath = pathInput.trim() + const path = category === 'other' ? normalizedPath : `${category}.${normalizedPath}` + + const confirmedPath = await tuiPrompts.confirm({ + message: `Use path: ${path}?`, + initialValue: true, + }) + if (tuiPrompts.isCancel(confirmedPath) || !confirmedPath) { + tuiPrompts.cancel('Cancelled') + return 1 + } + + if (action === 'get') { + return runConfigGet(path) + } + + const value = await tuiPrompts.text({ message: 'New value (true/false/number/string/json)' }) + if (tuiPrompts.isCancel(value)) { + tuiPrompts.cancel('Cancelled') + return 1 + } + + const restart = await tuiPrompts.confirm({ + message: 'Restart relay after this setting change?', + initialValue: false, + }) + if (tuiPrompts.isCancel(restart)) { + tuiPrompts.cancel('Cancelled') + return 1 + } + + return runConfigSet(path, value, { restart, validate: true, valueType: 'inferred' }) +} diff --git a/src/cli/tui/menus/dev.ts b/src/cli/tui/menus/dev.ts new file mode 100644 index 00000000..d576561a --- /dev/null +++ b/src/cli/tui/menus/dev.ts @@ -0,0 +1,98 @@ +import { runDevDbClean, runDevDbReset, runDevDockerClean, runDevSeedRelay } from '../../commands/dev' +import { tuiPrompts } from '../prompts' +import ora from 'ora' + +const confirmDanger = async (message: string): Promise => { + const answer = await tuiPrompts.confirm({ + message: `Destructive action: ${message}`, + initialValue: false, + }) + + if (tuiPrompts.isCancel(answer) || !answer) { + tuiPrompts.cancel('Cancelled') + return false + } + + return true +} + +export const runDevMenu = async (): Promise => { + const action = await tuiPrompts.select({ + message: 'Development utilities', + options: [ + { value: 'db:clean', label: 'Clean DB (events)' }, + { value: 'db:reset', label: 'Reset DB (rollback+migrate)' }, + { value: 'seed:relay', label: 'Seed relay data' }, + { value: 'docker:clean', label: 'Docker system/volume clean' }, + { value: 'back', label: 'Back' }, + ], + }) + + if (tuiPrompts.isCancel(action)) { + tuiPrompts.cancel('Cancelled') + return 1 + } + if (action === 'back') { + return 0 + } + + switch (action) { + case 'db:clean': { + const confirmed = await confirmDanger('delete events from the database') + if (!confirmed) { + return 1 + } + + const spinner = ora('Cleaning database...').start() + const code = await runDevDbClean(['--all', '--force']) + if (code === 0) { + spinner.succeed('Database clean completed') + } else { + spinner.fail('Database clean failed') + } + return code + } + case 'db:reset': { + const confirmed = await confirmDanger('reset database and rerun migrations') + if (!confirmed) { + return 1 + } + + const spinner = ora('Resetting database...').start() + const code = await runDevDbReset({ yes: true }) + if (code === 0) { + spinner.succeed('Database reset completed') + } else { + spinner.fail('Database reset failed') + } + return code + } + case 'seed:relay': { + const spinner = ora('Seeding relay...').start() + const code = await runDevSeedRelay() + if (code === 0) { + spinner.succeed('Relay seed completed') + } else { + spinner.fail('Relay seed failed') + } + return code + } + case 'docker:clean': { + const confirmed = await confirmDanger('remove unused Docker images and volumes') + if (!confirmed) { + return 1 + } + + const spinner = ora('Cleaning Docker resources...').start() + const code = await runDevDockerClean({ yes: true }) + if (code === 0) { + spinner.succeed('Docker clean completed') + } else { + spinner.fail('Docker clean failed') + } + return code + } + default: + return 1 + } +} diff --git a/src/cli/tui/menus/manage.ts b/src/cli/tui/menus/manage.ts new file mode 100644 index 00000000..9995aa78 --- /dev/null +++ b/src/cli/tui/menus/manage.ts @@ -0,0 +1,124 @@ +import { runExport } from '../../commands/export' +import { runImport } from '../../commands/import' +import { logError } from '../../utils/output' +import { tuiPrompts } from '../prompts' +import ora from 'ora' + +export const runManageMenu = async (): Promise => { + const action = await tuiPrompts.select({ + message: 'Data management', + options: [ + { value: 'export', label: 'Export events' }, + { value: 'import', label: 'Import events' }, + { value: 'back', label: 'Back' }, + ], + }) + + if (tuiPrompts.isCancel(action)) { + tuiPrompts.cancel('Cancelled') + return 1 + } + if (action === 'back') { + return 0 + } + + if (action === 'export') { + const format = await tuiPrompts.select({ + message: 'Output format', + options: [ + { value: 'jsonl', label: 'JSON Lines (.jsonl)' }, + { value: 'json', label: 'JSON array (.json)' }, + { value: 'back', label: 'Back' }, + ], + }) + if (tuiPrompts.isCancel(format)) { + tuiPrompts.cancel('Cancelled') + return 1 + } + if (format === 'back') { + return 0 + } + + const output = await tuiPrompts.text({ + message: 'Output filename', + defaultValue: format === 'json' ? 'events.json' : 'events.jsonl', + }) + if (tuiPrompts.isCancel(output)) { + tuiPrompts.cancel('Cancelled') + return 1 + } + + const confirmed = await tuiPrompts.confirm({ + message: `Export events to ${output}?`, + initialValue: true, + }) + if (tuiPrompts.isCancel(confirmed) || !confirmed) { + tuiPrompts.cancel('Cancelled') + return 1 + } + + const spinner = ora('Exporting events...').start() + const code = await runExport({ output, format: format as 'json' | 'jsonl' }, []) + if (code === 0) { + spinner.succeed('Export completed') + } else { + spinner.fail('Export failed') + } + return code + } + + const format = await tuiPrompts.select({ + message: 'Input format', + options: [ + { value: 'jsonl', label: 'JSON Lines (.jsonl)' }, + { value: 'json', label: 'JSON array (.json)' }, + { value: 'back', label: 'Back' }, + ], + }) + if (tuiPrompts.isCancel(format)) { + tuiPrompts.cancel('Cancelled') + return 1 + } + if (format === 'back') { + return 0 + } + + const file = await tuiPrompts.text({ + message: 'Input file path', + defaultValue: format === 'json' ? 'events.json' : 'events.jsonl', + }) + if (tuiPrompts.isCancel(file)) { + tuiPrompts.cancel('Cancelled') + return 1 + } + + const batchSizeRaw = await tuiPrompts.text({ message: 'Batch size', defaultValue: '1000' }) + if (tuiPrompts.isCancel(batchSizeRaw)) { + tuiPrompts.cancel('Cancelled') + return 1 + } + + const batchSize = Number(batchSizeRaw) + if (!Number.isSafeInteger(batchSize) || batchSize <= 0) { + logError('Batch size must be a positive integer') + return 1 + } + + const confirmed = await tuiPrompts.confirm({ + message: `Import events from ${file}?`, + initialValue: true, + }) + if (tuiPrompts.isCancel(confirmed) || !confirmed) { + tuiPrompts.cancel('Cancelled') + return 1 + } + + const spinner = ora('Importing events...').start() + const code = await runImport({ file, batchSize }, []) + if (code === 0) { + spinner.succeed('Import completed') + } else { + spinner.fail('Import failed') + } + return code +} diff --git a/src/cli/tui/menus/start.ts b/src/cli/tui/menus/start.ts new file mode 100644 index 00000000..29027db3 --- /dev/null +++ b/src/cli/tui/menus/start.ts @@ -0,0 +1,84 @@ +import { runStart } from '../../commands/start' +import { tuiPrompts } from '../prompts' +import ora from 'ora' + +export const runStartMenu = async (): Promise => { + const action = await tuiPrompts.select({ + message: 'Start relay', + options: [ + { value: 'continue', label: 'Continue' }, + { value: 'back', label: 'Back' }, + ], + }) + if (tuiPrompts.isCancel(action)) { + tuiPrompts.cancel('Cancelled') + return 1 + } + if (action === 'back') { + return 0 + } + + const tor = await tuiPrompts.confirm({ message: 'Enable Tor?', initialValue: false }) + if (tuiPrompts.isCancel(tor)) { + tuiPrompts.cancel('Cancelled') + return 1 + } + + const i2p = await tuiPrompts.confirm({ message: 'Enable I2P?', initialValue: false }) + if (tuiPrompts.isCancel(i2p)) { + tuiPrompts.cancel('Cancelled') + return 1 + } + + const debug = await tuiPrompts.confirm({ message: 'Enable debug logs?', initialValue: false }) + if (tuiPrompts.isCancel(debug)) { + tuiPrompts.cancel('Cancelled') + return 1 + } + + const useCustomPort = await tuiPrompts.confirm({ message: 'Override relay port?', initialValue: false }) + if (tuiPrompts.isCancel(useCustomPort)) { + tuiPrompts.cancel('Cancelled') + return 1 + } + + let port: number | undefined + if (useCustomPort) { + const portInput = await tuiPrompts.text({ + message: 'Relay port (1-65535)', + defaultValue: '8008', + validate: (input) => { + const parsed = Number(input) + if (!Number.isSafeInteger(parsed) || parsed < 1 || parsed > 65535) { + return 'Port must be a safe integer between 1 and 65535' + } + return undefined + }, + }) + if (tuiPrompts.isCancel(portInput)) { + tuiPrompts.cancel('Cancelled') + return 1 + } + + port = Number(portInput) + } + + const confirmed = await tuiPrompts.confirm({ + message: `Start relay${tor ? ' with Tor' : ''}${i2p ? `${tor ? ' + ' : ' with '}I2P` : ''}${debug ? ' (debug)' : ''}${port ? ` on port ${port}` : ''}?`, + initialValue: true, + }) + if (tuiPrompts.isCancel(confirmed) || !confirmed) { + tuiPrompts.cancel('Cancelled') + return 1 + } + + const spinner = ora('Starting relay...').start() + const code = await runStart({ tor, i2p, debug, port }, []) + if (code === 0) { + spinner.succeed('Relay start command completed') + } else { + spinner.fail('Relay start command failed') + } + + return code +} diff --git a/src/cli/tui/menus/stop.ts b/src/cli/tui/menus/stop.ts new file mode 100644 index 00000000..47f2c455 --- /dev/null +++ b/src/cli/tui/menus/stop.ts @@ -0,0 +1,27 @@ +import ora from 'ora' + +import { runStop } from '../../commands/stop' +import { tuiPrompts } from '../prompts' + +export const runStopMenu = async (): Promise => { + const nginx = await tuiPrompts.confirm({ + message: 'Include Nginx stack while stopping?', + initialValue: false, + }) + + if (tuiPrompts.isCancel(nginx)) { + tuiPrompts.cancel('Cancelled') + return 1 + } + + const spinner = ora('Stopping relay...').start() + const code = await runStop({ tor: true, i2p: true, local: true, nginx }, []) + + if (code === 0) { + spinner.succeed('Relay stop command completed') + } else { + spinner.fail('Relay stop command failed') + } + + return code +} diff --git a/src/cli/tui/prompts.ts b/src/cli/tui/prompts.ts new file mode 100644 index 00000000..b49dde13 --- /dev/null +++ b/src/cli/tui/prompts.ts @@ -0,0 +1,11 @@ +import { cancel, confirm, intro, isCancel, outro, select, text } from '@clack/prompts' + +export const tuiPrompts = { + cancel, + confirm, + intro, + isCancel, + outro, + select, + text, +} diff --git a/src/cli/tui/state.ts b/src/cli/tui/state.ts new file mode 100644 index 00000000..87bec975 --- /dev/null +++ b/src/cli/tui/state.ts @@ -0,0 +1,7 @@ +export type TuiState = { + running: boolean +} + +export const createState = (): TuiState => ({ + running: true, +}) diff --git a/src/cli/types.ts b/src/cli/types.ts new file mode 100644 index 00000000..d84b7624 --- /dev/null +++ b/src/cli/types.ts @@ -0,0 +1,27 @@ +export type CommandContext = { + cwd: string +} + +export type CommandHandler> = (options: T, rawArgs: string[]) => Promise + +export type StartOptions = { + tor?: boolean + i2p?: boolean + nginx?: boolean + debug?: boolean + port?: number + detach?: boolean +} + +export type StopOptions = { + tor?: boolean + i2p?: boolean + nginx?: boolean + local?: boolean + all?: boolean +} + +export type SetupOptions = { + yes?: boolean + start?: boolean +} diff --git a/src/cli/utils/bootstrap.ts b/src/cli/utils/bootstrap.ts new file mode 100644 index 00000000..3fce9904 --- /dev/null +++ b/src/cli/utils/bootstrap.ts @@ -0,0 +1,38 @@ +import fs from 'fs' +import { homedir } from 'os' +import { join } from 'path' + +import { getConfigBaseDir, getDefaultSettingsFilePath, getProjectPath, getSettingsFilePath } from './paths' + +export const ensureNotRoot = (): void => { + if (typeof process.geteuid === 'function' && process.geteuid() === 0) { + throw new Error('Nostream should not be run as root.') + } +} + +export const ensureConfigBootstrap = (): void => { + const configDir = getConfigBaseDir() + const settingsFile = getSettingsFilePath() + const defaultsFile = getDefaultSettingsFilePath() + + if (!fs.existsSync(configDir)) { + fs.mkdirSync(configDir, { recursive: true }) + } + + if (!fs.existsSync(settingsFile)) { + fs.copyFileSync(defaultsFile, settingsFile) + } +} + +export const ensureTorDataDir = (): void => { + fs.mkdirSync(getProjectPath('.nostr', 'tor', 'data'), { recursive: true }) +} + +export const ensureI2PDataDir = (): void => { + fs.mkdirSync(getProjectPath('.nostr', 'i2p', 'data'), { recursive: true }) +} + +export const getTorHostnamePath = (): string => getProjectPath('.nostr', 'tor', 'data', 'nostream', 'hostname') + +export const getOnionKeyPath = (): string => + join(process.env.NOSTR_CONFIG_DIR ?? join(homedir(), '.nostr'), 'v3_onion_private_key') diff --git a/src/cli/utils/config.ts b/src/cli/utils/config.ts new file mode 100644 index 00000000..33cb768d --- /dev/null +++ b/src/cli/utils/config.ts @@ -0,0 +1,420 @@ +import fs from 'fs' +import yaml from 'js-yaml' +import { mergeDeepRight } from 'ramda' + +import { Settings } from '../../@types/settings' +import { getConfigBaseDir, getDefaultSettingsFilePath, getSettingsFilePath } from './paths' + +export type ValidationIssue = { + path: string + message: string +} + +type PathToken = + | { + type: 'key' + key: string + } + | { + type: 'index' + index: number + } + +const isPlainObject = (value: unknown): value is Record => { + return Boolean(value) && typeof value === 'object' && !Array.isArray(value) +} + +const parsePath = (path: string): PathToken[] => { + const input = path.trim() + + if (!input) { + throw new Error('Path is required') + } + + const tokens: PathToken[] = [] + const segments = input.split('.').map((part) => part.trim()) + + for (const segment of segments) { + if (!segment) { + throw new Error(`Invalid path segment in: ${path}`) + } + + const match = segment.match(/^([A-Za-z_][A-Za-z0-9_]*)(\[(\d+)\])*$/) + if (!match) { + throw new Error(`Invalid path segment: ${segment}`) + } + + tokens.push({ type: 'key', key: match[1] }) + + const indexes = [...segment.matchAll(/\[(\d+)\]/g)] + for (const entry of indexes) { + tokens.push({ + type: 'index', + index: Number(entry[1]), + }) + } + } + + return tokens +} + +const formatPathTokens = (tokens: PathToken[]): string => { + let out = '' + + for (const token of tokens) { + if (token.type === 'key') { + out = out ? `${out}.${token.key}` : token.key + continue + } + + out = `${out}[${token.index}]` + } + + return out +} + +export const parseValue = (raw: string): unknown => { + const trimmed = raw.trim() + + if (trimmed === 'true') { + return true + } + + if (trimmed === 'false') { + return false + } + + if (trimmed === 'null') { + return null + } + + if (/^-?\d+$/.test(trimmed)) { + const asNumber = Number(trimmed) + if (Number.isSafeInteger(asNumber)) { + return asNumber + } + } + + if (/^-?\d+n$/.test(trimmed)) { + return BigInt(trimmed.slice(0, -1)) + } + + if ((trimmed.startsWith('{') && trimmed.endsWith('}')) || (trimmed.startsWith('[') && trimmed.endsWith(']'))) { + try { + return JSON.parse(trimmed) + } catch { + return raw + } + } + + return raw +} + +export const parseTypedValue = (raw: string, type: 'inferred' | 'json' = 'inferred'): unknown => { + if (type === 'json') { + try { + return JSON.parse(raw) + } catch (error) { + const message = error instanceof Error ? error.message : String(error) + throw new Error(`Invalid JSON value: ${message}`) + } + } + + return parseValue(raw) +} + +const toSerializable = (value: unknown): unknown => { + if (typeof value === 'bigint') { + return value.toString() + } + + if (Array.isArray(value)) { + return value.map((entry) => toSerializable(entry)) + } + + if (isPlainObject(value)) { + return Object.fromEntries(Object.entries(value).map(([key, entry]) => [key, toSerializable(entry)])) + } + + return value +} + +const validateShape = (schema: unknown, candidate: unknown, path: PathToken[], issues: ValidationIssue[]): void => { + if (schema === undefined || candidate === undefined) { + return + } + + const renderedPath = formatPathTokens(path) || '$' + + if (Array.isArray(schema)) { + if (!Array.isArray(candidate)) { + issues.push({ + path: renderedPath, + message: `Expected array, got ${typeof candidate}`, + }) + return + } + + if (schema.length === 0) { + return + } + + candidate.forEach((entry, index) => { + const matchesAny = schema.some((schemaEntry) => { + const localIssues: ValidationIssue[] = [] + validateShape(schemaEntry, entry, [...path, { type: 'index', index }], localIssues) + return localIssues.length === 0 + }) + + if (!matchesAny) { + issues.push({ + path: formatPathTokens([...path, { type: 'index', index }]), + message: 'Array element does not match expected schema shape', + }) + } + }) + return + } + + if (isPlainObject(schema)) { + if (!isPlainObject(candidate)) { + issues.push({ + path: renderedPath, + message: `Expected object, got ${typeof candidate}`, + }) + return + } + + for (const key of Object.keys(candidate)) { + if (!(key in schema)) { + issues.push({ + path: formatPathTokens([...path, { type: 'key', key }]), + message: 'Unknown setting key', + }) + } + } + + for (const key of Object.keys(schema)) { + validateShape((schema as Record)[key], (candidate as Record)[key], [...path, { type: 'key', key }], issues) + } + + return + } + + if (candidate === null && schema !== null) { + issues.push({ + path: renderedPath, + message: `Expected ${typeof schema}, got null`, + }) + return + } + + if (schema !== null && typeof schema !== typeof candidate) { + issues.push({ + path: renderedPath, + message: `Expected ${typeof schema}, got ${typeof candidate}`, + }) + } +} + +const pathExistsInSchema = (schema: unknown, tokens: PathToken[]): boolean => { + let current: unknown = schema + + for (const token of tokens) { + if (token.type === 'key') { + if (!isPlainObject(current) || !(token.key in current)) { + return false + } + current = (current as Record)[token.key] + continue + } + + if (!Array.isArray(current)) { + return false + } + + current = current[0] + } + + return true +} + +export const ensureSettingsExists = (): void => { + const configDir = getConfigBaseDir() + const settingsPath = getSettingsFilePath() + const defaultsPath = getDefaultSettingsFilePath() + + if (!fs.existsSync(configDir)) { + fs.mkdirSync(configDir, { recursive: true }) + } + + if (!fs.existsSync(settingsPath)) { + fs.copyFileSync(defaultsPath, settingsPath) + } +} + +export const loadDefaults = (): Settings => { + const defaultsRaw = fs.readFileSync(getDefaultSettingsFilePath(), 'utf-8') + return yaml.load(defaultsRaw) as Settings +} + +export const loadUserSettings = (): Settings => { + ensureSettingsExists() + const raw = fs.readFileSync(getSettingsFilePath(), 'utf-8') + return (yaml.load(raw) as Settings) ?? ({} as Settings) +} + +export const loadMergedSettings = (): Settings => { + return mergeDeepRight(loadDefaults(), loadUserSettings()) as Settings +} + +export const saveSettings = (settings: Settings): void => { + ensureSettingsExists() + const serialized = yaml.dump(toSerializable(settings), { lineWidth: 120 }) + fs.writeFileSync(getSettingsFilePath(), serialized, 'utf-8') +} + +export const getByPath = (settings: unknown, path: string): unknown => { + const tokens = parsePath(path) + let current: unknown = settings + + for (const token of tokens) { + if (token.type === 'key') { + if (!isPlainObject(current)) { + return undefined + } + current = current[token.key] + continue + } + + if (!Array.isArray(current)) { + return undefined + } + + current = current[token.index] + } + + return current +} + +const ensureArrayLength = (target: unknown[], minimumLength: number): void => { + while (target.length <= minimumLength) { + target.push(undefined) + } +} + +export const setByPath = (settings: Record, path: string, value: unknown): Record => { + const tokens = parsePath(path) + const clone: Record = structuredClone(settings) + + if (tokens.length === 0) { + throw new Error('Path is required') + } + + let current: unknown = clone + + for (let i = 0; i < tokens.length - 1; i++) { + const token = tokens[i] + const nextToken = tokens[i + 1] + + if (token.type === 'key') { + if (!isPlainObject(current)) { + throw new Error(`Cannot set key ${token.key} on non-object path`) + } + + const existing = current[token.key] + if (existing === undefined) { + current[token.key] = nextToken.type === 'index' ? [] : {} + } else if (nextToken.type === 'index' && !Array.isArray(existing)) { + current[token.key] = [] + } else if (nextToken.type === 'key' && !isPlainObject(existing)) { + current[token.key] = {} + } + + current = current[token.key] + continue + } + + if (!Array.isArray(current)) { + throw new Error(`Cannot index non-array path at [${token.index}]`) + } + + ensureArrayLength(current, token.index) + + const existing = current[token.index] + if (existing === undefined) { + current[token.index] = nextToken.type === 'index' ? [] : {} + } else if (nextToken.type === 'index' && !Array.isArray(existing)) { + current[token.index] = [] + } else if (nextToken.type === 'key' && !isPlainObject(existing)) { + current[token.index] = {} + } + + current = current[token.index] + } + + const last = tokens[tokens.length - 1] + + if (last.type === 'key') { + if (!isPlainObject(current)) { + throw new Error(`Cannot set key ${last.key} on non-object path`) + } + + current[last.key] = value + return clone + } + + if (!Array.isArray(current)) { + throw new Error(`Cannot index non-array path at [${last.index}]`) + } + + ensureArrayLength(current, last.index) + current[last.index] = value + + return clone +} + +export const validatePathAgainstDefaults = (path: string): ValidationIssue[] => { + const defaults = loadDefaults() as unknown + const tokens = parsePath(path) + + if (pathExistsInSchema(defaults, tokens)) { + return [] + } + + return [ + { + path, + message: 'Path does not exist in default settings schema', + }, + ] +} + +export const validateSettings = (settings: Settings): ValidationIssue[] => { + const issues: ValidationIssue[] = [] + + if (!settings.info?.relay_url) { + issues.push({ path: 'info.relay_url', message: 'relay_url is required' }) + } + + if (!settings.info?.name) { + issues.push({ path: 'info.name', message: 'name is required' }) + } + + if (!settings.network) { + issues.push({ path: 'network', message: 'network section is required' }) + } + + if (settings.payments?.enabled && !settings.payments.processor) { + issues.push({ path: 'payments.processor', message: 'processor is required when payments are enabled' }) + } + + const strategy = settings.limits?.rateLimiter?.strategy + if (strategy && strategy !== 'ewma' && strategy !== 'sliding_window') { + issues.push({ path: 'limits.rateLimiter.strategy', message: 'strategy must be ewma or sliding_window' }) + } + + validateShape(loadDefaults(), settings, [], issues) + + return issues +} diff --git a/src/cli/utils/docker.ts b/src/cli/utils/docker.ts new file mode 100644 index 00000000..8cf24fce --- /dev/null +++ b/src/cli/utils/docker.ts @@ -0,0 +1,47 @@ +import fs from 'fs' +import os from 'os' +import { join } from 'path' + +import { getProjectPath } from './paths' +import { runCommand } from './process' + +export type ComposeOptions = { + files: string[] + args: string[] + env?: NodeJS.ProcessEnv +} + +export const resolveComposeFile = (filename: string): string => getProjectPath(filename) + +export const buildComposeArgs = (files: string[], args: string[]): string[] => { + const out: string[] = [] + + for (const file of files) { + const fullPath = resolveComposeFile(file) + if (fs.existsSync(fullPath)) { + out.push('-f', fullPath) + } + } + + return [...out, ...args] +} + +export const runDockerCompose = async ({ files, args, env }: ComposeOptions): Promise => { + const composeArgs = buildComposeArgs(files, args) + return runCommand('docker', ['compose', ...composeArgs], { env }) +} + +export const createPortOverrideComposeFile = (port: number): string => { + const tempFile = join(os.tmpdir(), `nostream-port-override-${process.pid}-${Date.now()}.yml`) + const content = [ + 'services:', + ' nostream:', + ' environment:', + ` RELAY_PORT: ${port}`, + ' ports:', + ` - 127.0.0.1:${port}:${port}`, + ].join('\n') + + fs.writeFileSync(tempFile, content, { encoding: 'utf-8' }) + return tempFile +} diff --git a/src/cli/utils/env-config.ts b/src/cli/utils/env-config.ts new file mode 100644 index 00000000..fb4704c7 --- /dev/null +++ b/src/cli/utils/env-config.ts @@ -0,0 +1,215 @@ +import fs from 'fs' + +import { getEnvFilePath } from './paths' + +export type EnvValidationIssue = { + path: string + message: string +} + +type ParsedEnvLine = { + index: number + key: string + value: string +} + +const ENV_LINE_REGEX = /^\s*([A-Za-z_][A-Za-z0-9_]*)\s*=\s*(.*)$/ + +const SUPPORTED_ENV_KEYS = new Set([ + 'SECRET', + 'RELAY_PORT', + 'RELAY_PRIVATE_KEY', + 'WORKER_COUNT', + 'DB_URI', + 'DB_HOST', + 'DB_PORT', + 'DB_USER', + 'DB_PASSWORD', + 'DB_NAME', + 'DB_MIN_POOL_SIZE', + 'DB_MAX_POOL_SIZE', + 'DB_ACQUIRE_CONNECTION_TIMEOUT', + 'READ_REPLICA_ENABLED', + 'READ_REPLICAS', + 'TOR_HOST', + 'TOR_CONTROL_PORT', + 'TOR_PASSWORD', + 'HIDDEN_SERVICE_PORT', + 'REDIS_URI', + 'REDIS_HOST', + 'REDIS_PORT', + 'REDIS_USER', + 'REDIS_PASSWORD', + 'NOSTR_CONFIG_DIR', + 'DEBUG', + 'ZEBEDEE_API_KEY', + 'NODELESS_API_KEY', + 'NODELESS_WEBHOOK_SECRET', + 'OPENNODE_API_KEY', + 'LNBITS_API_KEY', + 'LOG_LEVEL', +]) + +const RR_KEY_REGEX = /^RR\d+_DB_(HOST|PORT|USER|PASSWORD|NAME|MIN_POOL_SIZE|MAX_POOL_SIZE|ACQUIRE_CONNECTION_TIMEOUT)$/ + +const INTEGER_KEYS = new Set([ + 'RELAY_PORT', + 'WORKER_COUNT', + 'DB_PORT', + 'DB_MIN_POOL_SIZE', + 'DB_MAX_POOL_SIZE', + 'DB_ACQUIRE_CONNECTION_TIMEOUT', + 'READ_REPLICAS', + 'TOR_CONTROL_PORT', + 'HIDDEN_SERVICE_PORT', + 'REDIS_PORT', +]) + +const BOOLEAN_KEYS = new Set(['READ_REPLICA_ENABLED']) + +const RR_INTEGER_KEY_REGEX = /^RR\d+_DB_(PORT|MIN_POOL_SIZE|MAX_POOL_SIZE|ACQUIRE_CONNECTION_TIMEOUT)$/ + +const SECRET_KEY_REGEX = /(SECRET|PASSWORD|API_KEY|PRIVATE_KEY)/i + +const parseEnvFile = (): { lines: string[]; parsed: ParsedEnvLine[] } => { + const envPath = getEnvFilePath() + + if (!fs.existsSync(envPath)) { + return { + lines: [], + parsed: [], + } + } + + const lines = fs.readFileSync(envPath, 'utf-8').split(/\r?\n/) + const parsed: ParsedEnvLine[] = [] + + for (let index = 0; index < lines.length; index += 1) { + const line = lines[index] + if (!line || line.trim().startsWith('#')) { + continue + } + + const match = line.match(ENV_LINE_REGEX) + if (!match) { + continue + } + + parsed.push({ + index, + key: match[1], + value: match[2], + }) + } + + return { + lines, + parsed, + } +} + +export const isSupportedEnvKey = (key: string): boolean => { + return SUPPORTED_ENV_KEYS.has(key) || RR_KEY_REGEX.test(key) +} + +const validateInteger = (key: string, value: string): string | undefined => { + if (!/^-?\d+$/.test(value.trim())) { + return `${key} must be an integer` + } + + const parsed = Number(value) + if (!Number.isSafeInteger(parsed)) { + return `${key} must be a safe integer` + } + + return undefined +} + +const validateBoolean = (key: string, value: string): string | undefined => { + const normalized = value.trim().toLowerCase() + if (normalized === 'true' || normalized === 'false') { + return undefined + } + + return `${key} must be true or false` +} + +export const validateEnvPair = (key: string, value: string): string | undefined => { + if (!isSupportedEnvKey(key)) { + return `${key} is not a supported environment setting` + } + + if (INTEGER_KEYS.has(key) || RR_INTEGER_KEY_REGEX.test(key)) { + return validateInteger(key, value) + } + + if (BOOLEAN_KEYS.has(key)) { + return validateBoolean(key, value) + } + + return undefined +} + +export const readEnvValues = (): Record => { + const { parsed } = parseEnvFile() + const values: Record = {} + + for (const line of parsed) { + values[line.key] = line.value + } + + return values +} + +export const upsertEnvValue = (key: string, value: string): void => { + const envPath = getEnvFilePath() + const { lines, parsed } = parseEnvFile() + + const existing = parsed.find((line) => line.key === key) + const replacement = `${key}=${value}` + + if (existing) { + lines[existing.index] = replacement + } else { + if (lines.length > 0 && lines[lines.length - 1].trim() !== '') { + lines.push('') + } + lines.push(replacement) + } + + fs.writeFileSync(envPath, lines.join('\n').replace(/\n?$/, '\n'), 'utf-8') +} + +export const validateEnvValues = (values: Record): EnvValidationIssue[] => { + const issues: EnvValidationIssue[] = [] + + for (const [key, value] of Object.entries(values)) { + const issue = validateEnvPair(key, value) + if (!issue) { + continue + } + + issues.push({ + path: key, + message: issue, + }) + } + + return issues +} + +export const isSecretEnvKey = (key: string): boolean => { + return SECRET_KEY_REGEX.test(key) +} + +export const maskSecretValue = (value: string): string => { + if (!value) { + return '***' + } + + if (value.length <= 4) { + return '*'.repeat(value.length) + } + + return `${value.slice(0, 2)}***${value.slice(-2)}` +} diff --git a/src/cli/utils/output.ts b/src/cli/utils/output.ts new file mode 100644 index 00000000..1dcf0210 --- /dev/null +++ b/src/cli/utils/output.ts @@ -0,0 +1,31 @@ +import { bold, cyan, red, yellow, green } from 'colorette' + +const writeStdout = (message: string): void => { + process.stdout.write(`${message}\n`) +} + +const writeStderr = (message: string): void => { + process.stderr.write(`${message}\n`) +} + +export const logStep = (message: string): void => { + writeStdout(cyan(`• ${message}`)) +} + +export const logInfo = (message: string): void => { + writeStdout(message) +} + +export const logSuccess = (message: string): void => { + writeStdout(green(message)) +} + +export const logWarn = (message: string): void => { + writeStderr(yellow(message)) +} + +export const logError = (message: string): void => { + writeStderr(red(message)) +} + +export const title = (label: string): string => bold(label) diff --git a/src/cli/utils/paths.ts b/src/cli/utils/paths.ts new file mode 100644 index 00000000..33f2673d --- /dev/null +++ b/src/cli/utils/paths.ts @@ -0,0 +1,13 @@ +import { join } from 'path' + +export const getProjectRoot = (): string => process.cwd() + +export const getProjectPath = (...parts: string[]): string => join(getProjectRoot(), ...parts) + +export const getConfigBaseDir = (): string => process.env.NOSTR_CONFIG_DIR ?? getProjectPath('.nostr') + +export const getSettingsFilePath = (): string => join(getConfigBaseDir(), 'settings.yaml') + +export const getDefaultSettingsFilePath = (): string => getProjectPath('resources', 'default-settings.yaml') + +export const getEnvFilePath = (): string => getProjectPath('.env') diff --git a/src/cli/utils/process.ts b/src/cli/utils/process.ts new file mode 100644 index 00000000..a574de08 --- /dev/null +++ b/src/cli/utils/process.ts @@ -0,0 +1,81 @@ +import { spawn } from 'child_process' + +export type RunOptions = { + cwd?: string + env?: NodeJS.ProcessEnv + stdio?: 'inherit' | 'pipe' + timeoutMs?: number +} + +export const runCommand = (command: string, args: string[], options: RunOptions = {}): Promise => { + return new Promise((resolve, reject) => { + const child = spawn(command, args, { + cwd: options.cwd, + env: { ...process.env, ...(options.env ?? {}) }, + stdio: options.stdio ?? 'inherit', + shell: false, + }) + + const timer = + typeof options.timeoutMs === 'number' + ? setTimeout(() => { + child.kill('SIGTERM') + }, options.timeoutMs) + : undefined + + child.on('error', reject) + child.on('close', (code) => { + if (timer) { + clearTimeout(timer) + } + + resolve(code ?? 1) + }) + }) +} + +export const runCommandWithOutput = ( + command: string, + args: string[], + options: RunOptions = {}, +): Promise<{ code: number; stdout: string; stderr: string }> => { + return new Promise((resolve, reject) => { + let stdout = '' + let stderr = '' + + const child = spawn(command, args, { + cwd: options.cwd, + env: { ...process.env, ...(options.env ?? {}) }, + stdio: 'pipe', + shell: false, + }) + + const timer = + typeof options.timeoutMs === 'number' + ? setTimeout(() => { + child.kill('SIGTERM') + }, options.timeoutMs) + : undefined + + child.stdout.on('data', (chunk) => { + stdout += chunk.toString() + }) + + child.stderr.on('data', (chunk) => { + stderr += chunk.toString() + }) + + child.on('error', reject) + child.on('close', (code) => { + if (timer) { + clearTimeout(timer) + } + + resolve({ + code: code ?? 1, + stdout, + stderr, + }) + }) + }) +} diff --git a/src/import-events.ts b/src/import-events.ts index f674ad72..8288bd7f 100644 --- a/src/import-events.ts +++ b/src/import-events.ts @@ -1,12 +1,8 @@ -import { resolve } from 'path' +import { extname, resolve } from 'path' import fs from 'fs' -import { - CompressionFormat, - createDecompressionStream, - detectCompressionFormat, -} from './utils/compression' +import { CompressionFormat, createDecompressionStream, detectCompressionFormat } from './utils/compression' import { createEventBatchPersister, EventImportLineError, @@ -22,6 +18,18 @@ interface CliOptions { showHelp: boolean } +type ImportFileFormat = 'jsonl' | 'json' + +type RunImportOptions = { + json?: boolean +} + +type InputFileSpec = { + absolutePath: string + compressionFormat?: CompressionFormat + format: ImportFileFormat +} + const DEFAULT_BATCH_SIZE = 1000 const MAX_ERROR_LOGS = 20 @@ -32,10 +40,11 @@ const formatProgress = (stats: EventImportStats): string => { } const printUsage = (): void => { - console.log('Usage: pnpm run import [--batch-size ]') - console.log('Example: pnpm run import ./events.jsonl --batch-size 1000') - console.log('Example: pnpm run import ./events.jsonl.gz') - console.log('Example: pnpm run import ./events.jsonl.xz') + console.log('Usage: nostream import [--batch-size ]') + console.log('Example: nostream import ./events.jsonl --batch-size 1000') + console.log('Example: nostream import ./events.json --batch-size 1000') + console.log('Example: nostream import ./events.jsonl.gz --batch-size 1000') + console.log('Example: nostream import ./events.jsonl.xz --batch-size 1000') } const parseBatchSize = (value: string): number => { @@ -91,7 +100,7 @@ export const parseCliArgs = (args: string[]): CliOptions => { } if (!filePath) { - throw new Error('Missing input file path') + throw new Error('Missing path to .jsonl or .json file') } return { @@ -101,7 +110,21 @@ export const parseCliArgs = (args: string[]): CliOptions => { } } -const ensureValidInputFile = (filePath: string): string => { +const inferCompressedFormat = (absolutePath: string): ImportFileFormat | undefined => { + const normalized = absolutePath.toLowerCase() + + if (normalized.endsWith('.jsonl.gz') || normalized.endsWith('.jsonl.xz')) { + return 'jsonl' + } + + if (normalized.endsWith('.json.gz') || normalized.endsWith('.json.xz')) { + return 'json' + } + + return undefined +} + +const ensureValidInputFile = async (filePath: string): Promise => { const absolutePath = resolve(process.cwd(), filePath) if (!fs.existsSync(absolutePath)) { @@ -113,32 +136,48 @@ const ensureValidInputFile = (filePath: string): string => { throw new Error(`Input path is not a file: ${absolutePath}`) } - return absolutePath -} + const compressionFormat = await detectCompressionFormat(absolutePath) + + if (compressionFormat) { + const format = inferCompressedFormat(absolutePath) + if (!format) { + throw new Error('Compressed input filename must end with .jsonl.gz, .jsonl.xz, .json.gz, or .json.xz') + } -const getCompressionLabel = (format: CompressionFormat): string => { - switch (format) { - case CompressionFormat.GZIP: - return 'gzip' - case CompressionFormat.XZ: - return 'xz' - default: - return String(format) + return { + absolutePath, + compressionFormat, + format, + } } -} -const createImportStream = async (absoluteFilePath: string) => { - const compressionFormat = await detectCompressionFormat(absoluteFilePath) - const source = fs.createReadStream(absoluteFilePath) + const extension = extname(absolutePath).toLowerCase() - if (!compressionFormat) { + if (extension === '.jsonl') { return { - compressionFormat, - stream: source, + absolutePath, + format: 'jsonl', + } + } + + if (extension === '.json') { + return { + absolutePath, + format: 'json', } } - const decompressor = createDecompressionStream(compressionFormat) + throw new Error('Input file must have a .jsonl or .json extension') +} + +const createImportStream = (inputFile: InputFileSpec): NodeJS.ReadableStream => { + const source = fs.createReadStream(inputFile.absolutePath) + + if (!inputFile.compressionFormat) { + return source + } + + const decompressor = createDecompressionStream(inputFile.compressionFormat) source.on('error', (error) => { if (!decompressor.destroyed) { @@ -146,30 +185,37 @@ const createImportStream = async (absoluteFilePath: string) => { } }) - const closeSource = () => { + decompressor.on('close', () => { if (!source.destroyed) { source.destroy() } - } + }) - decompressor.on('close', closeSource) - decompressor.on('error', closeSource) + decompressor.on('error', () => { + if (!source.destroyed) { + source.destroy() + } + }) - return { - compressionFormat, - stream: source.pipe(decompressor), - } + return source.pipe(decompressor) } -const run = async (): Promise => { - const options = parseCliArgs(process.argv.slice(2)) +export const runImportEvents = async ( + args: string[] = process.argv.slice(2), + runOptions: RunImportOptions = {}, +): Promise => { + const options = parseCliArgs(args) if (options.showHelp) { printUsage() - return + return 0 } - const absoluteFilePath = ensureValidInputFile(options.filePath) + const inputFile = await ensureValidInputFile(options.filePath) + + if (inputFile.compressionFormat && inputFile.format === 'json') { + throw new Error('Compressed JSON array import is not supported. Use .json (uncompressed) or .jsonl.gz/.jsonl.xz.') + } const dbClient = getMasterDbClient() const eventRepository = new EventRepository(dbClient, dbClient) @@ -195,16 +241,22 @@ const run = async (): Promise => { const startedAt = Date.now() try { - const { stream, compressionFormat } = await createImportStream(absoluteFilePath) - if (compressionFormat) { - console.log(`Detected ${getCompressionLabel(compressionFormat)} compression. Decompressing on-the-fly...`) + if (inputFile.compressionFormat) { + console.log(`Detected ${inputFile.compressionFormat} compression. Decompressing on-the-fly...`) } - const stats = await importer.importFromReadable(stream, { - batchSize: options.batchSize, - onLineError, - onProgress, - }) + const stats = + inputFile.format === 'json' + ? await importer.importFromJsonArray(inputFile.absolutePath, { + batchSize: options.batchSize, + onLineError, + onProgress, + }) + : await importer.importFromReadable(createImportStream(inputFile), { + batchSize: options.batchSize, + onLineError, + onProgress, + }) if (suppressedErrors > 0) { console.warn(`Suppressed ${formatNumber(suppressedErrors)} additional line errors`) @@ -212,21 +264,41 @@ const run = async (): Promise => { const elapsedSeconds = ((Date.now() - startedAt) / 1000).toFixed(2) - console.log(`Import completed in ${elapsedSeconds}s`) - console.log(formatProgress(stats)) + if (runOptions.json) { + console.log( + JSON.stringify( + { + elapsedSeconds: Number(elapsedSeconds), + ...stats, + suppressedErrors, + }, + null, + 2, + ), + ) + } else { + console.log(`Import completed in ${elapsedSeconds}s`) + console.log(formatProgress(stats)) + } + + return 0 } finally { await dbClient.destroy() } } if (require.main === module) { - run().catch((error: unknown) => { - if (error instanceof Error) { - console.error(`Import failed: ${error.message}`) - } else { - console.error('Import failed with unknown error') - } + runImportEvents() + .then((exitCode) => { + process.exitCode = exitCode + }) + .catch((error: unknown) => { + if (error instanceof Error) { + console.error(`Import failed: ${error.message}`) + } else { + console.error('Import failed with unknown error') + } - process.exit(1) - }) + process.exit(1) + }) } diff --git a/src/scripts/export-events.ts b/src/scripts/export-events.ts index 349414ad..bbff5282 100644 --- a/src/scripts/export-events.ts +++ b/src/scripts/export-events.ts @@ -1,7 +1,7 @@ import 'pg-query-stream' import fs from 'fs' -import path from 'path' +import path, { extname } from 'path' import { pipeline } from 'stream/promises' import { Transform } from 'stream' @@ -20,6 +20,11 @@ type ExportCliOptions = { showHelp: boolean } +type ExportOptions = { + json?: boolean + format?: 'jsonl' | 'json' +} + const DEFAULT_OUTPUT_FILE_PATH = 'events.jsonl' const MIN_ELAPSED_SECONDS = 0.001 @@ -76,9 +81,9 @@ const formatCount = (value: number): string => { return Number.isInteger(rounded) ? rounded.toLocaleString('en-US') : rounded.toLocaleString('en-US', { - maximumFractionDigits: 2, - minimumFractionDigits: 2, - }) + maximumFractionDigits: 2, + minimumFractionDigits: 2, + }) } const getOptionValue = (option: string, args: string[], index: number): [string, number] => { @@ -187,14 +192,83 @@ type EventRow = { event_signature: Buffer } -async function exportEvents(): Promise { - const options = parseCliArgs(process.argv.slice(2)) - if (options.showHelp) { +const resolveExportFormat = (format?: string): 'jsonl' | 'json' => { + if (!format) { + return 'jsonl' + } + + if (format === 'jsonl' || format === 'json') { + return format + } + + throw new Error(`Unsupported format: ${format}. Supported values: json, jsonl`) +} + +const resolveOutputPath = (filename: string | undefined, format: 'jsonl' | 'json'): string => { + const fallback = format === 'json' ? 'events.json' : 'events.jsonl' + const outputPath = path.resolve(filename || fallback) + const expectedExtension = format === 'json' ? '.json' : '.jsonl' + + if (extname(outputPath).toLowerCase() !== expectedExtension) { + throw new Error(`Output file extension must be ${expectedExtension} when using --format ${format}`) + } + + return outputPath +} + +const toEvent = (row: EventRow) => ({ + id: row.event_id.toString('hex'), + pubkey: row.event_pubkey.toString('hex'), + created_at: row.event_created_at, + kind: row.event_kind, + tags: Array.isArray(row.event_tags) ? row.event_tags : [], + content: row.event_content, + sig: row.event_signature.toString('hex'), +}) + +const createFormatterTransform = ( + format: 'jsonl' | 'json', + onExported: () => void, +): Transform => { + if (format === 'jsonl') { + return new Transform({ + objectMode: true, + transform(row: EventRow, _encoding, callback) { + onExported() + callback(null, JSON.stringify(toEvent(row)) + '\n') + }, + }) + } + + let hasRows = false + return new Transform({ + objectMode: true, + transform(row: EventRow, _encoding, callback) { + const prefix = hasRows ? ',\n' : '[\n' + hasRows = true + onExported() + callback(null, prefix + JSON.stringify(toEvent(row))) + }, + flush(callback) { + callback(null, hasRows ? '\n]\n' : '[]\n') + }, + }) +} + +export async function runExportEvents(args: string[] = process.argv.slice(2), options: ExportOptions = {}): Promise { + const useStructuredFormat = Boolean(options.format) + const structuredFormat = resolveExportFormat(options.format) + const cliOptions = useStructuredFormat ? undefined : parseCliArgs(args) + + if (!useStructuredFormat && cliOptions?.showHelp) { printUsage() - return + return 0 } - const outputPath = path.resolve(options.outputFilePath) + const outputPath = useStructuredFormat + ? resolveOutputPath(args[0], structuredFormat) + : path.resolve(cliOptions?.outputFilePath ?? DEFAULT_OUTPUT_FILE_PATH) + const db = getMasterDbClient() const abortController = new AbortController() let interruptedBySignal: NodeJS.Signals | undefined @@ -216,25 +290,28 @@ async function exportEvents(): Promise { const firstEvent = await db('events').select('event_id').whereNull('deleted_at').first() if (abortController.signal.aborted) { - return + return 130 } if (!firstEvent) { - console.log('No events to export.') - return + if (options.json) { + console.log(JSON.stringify({ exported: 0, outputPath, empty: true }, null, 2)) + } else { + console.log('No events to export.') + } + return 0 } - if (options.format) { - console.log(`Exporting events to ${outputPath} using ${getCompressionLabel(options.format)} compression`) + if (useStructuredFormat) { + console.log(`Exporting events to ${outputPath}`) + } else if (cliOptions?.format) { + console.log(`Exporting events to ${outputPath} using ${getCompressionLabel(cliOptions.format)} compression`) } else { console.log(`Exporting events to ${outputPath}`) } const startedAt = Date.now() const output = fs.createWriteStream(outputPath) - const compressionStream = createCompressionStream(options.format) - let exported = 0 - let rawBytes = 0 const dbStream = db('events') .select( @@ -251,25 +328,52 @@ async function exportEvents(): Promise { .orderBy('event_id', 'asc') .stream() + let exported = 0 + + if (useStructuredFormat) { + const formatter = createFormatterTransform(structuredFormat, () => { + exported += 1 + if (exported % 10000 === 0) { + console.log(`Exported ${exported} events...`) + } + }) + + await pipeline(dbStream, formatter, output, { + signal: abortController.signal, + }) + + if (options.json) { + console.log( + JSON.stringify( + { + exported, + outputPath, + format: structuredFormat, + }, + null, + 2, + ), + ) + } else { + console.log(`Export complete: ${exported} events written to ${outputPath} (${structuredFormat})`) + } + + return 0 + } + + const compressionFormat = cliOptions?.format + const compressionStream = createCompressionStream(compressionFormat) + let rawBytes = 0 + const toJsonLine = new Transform({ objectMode: true, transform(row: EventRow, _encoding, callback) { - const event = { - id: row.event_id.toString('hex'), - pubkey: row.event_pubkey.toString('hex'), - created_at: row.event_created_at, - kind: row.event_kind, - tags: Array.isArray(row.event_tags) ? row.event_tags : [], - content: row.event_content, - sig: row.event_signature.toString('hex'), - } - - exported++ + exported += 1 if (exported % 10000 === 0) { console.log(`Exported ${exported} events...`) } - const line = JSON.stringify(event) + '\n' + const line = JSON.stringify(toEvent(row)) + '\n' rawBytes += Buffer.byteLength(line) callback(null, line) }, @@ -296,11 +400,13 @@ async function exportEvents(): Promise { console.log( `Throughput: ${formatCount(eventRate)} events/s | ${formatBytes(rawRate)}/s raw | ${formatBytes(outputRate)}/s output`, ) + + return 0 } catch (error) { if (abortController.signal.aborted) { console.log(`Export interrupted by ${interruptedBySignal ?? 'signal'}.`) process.exitCode = 130 - return + return 130 } throw error @@ -312,7 +418,7 @@ async function exportEvents(): Promise { } if (require.main === module) { - exportEvents().catch((error) => { + runExportEvents().catch((error) => { console.error('Export failed:', error.message) process.exit(1) }) diff --git a/src/services/event-import-service.ts b/src/services/event-import-service.ts index c7071ce9..68f8cdf7 100644 --- a/src/services/event-import-service.ts +++ b/src/services/event-import-service.ts @@ -1,6 +1,10 @@ import fs from 'fs' import readline from 'readline' +const streamArray = require('stream-json/streamers/stream-array.js') as { + withParserAsStream: () => NodeJS.ReadWriteStream +} + import { getEventExpiration, isDeleteEvent, @@ -61,6 +65,12 @@ export interface EventImportOptions { onProgress?: (stats: EventImportStats) => void } +type EventImportCandidate = { + candidate?: unknown + parseError?: unknown + recordNumber: number +} + const getErrorMessage = (error: unknown): string => { if (error instanceof Error) { return error.message @@ -71,7 +81,7 @@ const getErrorMessage = (error: unknown): string => { const isDestroyableStream = ( stream: NodeJS.ReadableStream, -): stream is NodeJS.ReadableStream & { destroy: () => void } => { +): stream is NodeJS.ReadableStream & { destroy: (error?: Error) => void } => { const candidate = stream as { destroy?: unknown } return typeof candidate.destroy === 'function' @@ -79,61 +89,57 @@ const isDestroyableStream = ( export const createEventBatchPersister = (eventRepository: IEventRepository) => - async (events: Event[]): Promise => { - if (!events.length) { - return 0 - } - - let inserted = 0 + async (events: Event[]): Promise => { + if (!events.length) { + return 0 + } - const regularEvents: Event[] = [] - const replaceableEvents: Event[] = [] + let inserted = 0 - for (const event of events) { - if (isEphemeralEvent(event)) { - continue - } + const regularEvents: Event[] = [] + const replaceableEvents: Event[] = [] - if (isDeleteEvent(event)) { - // flush pending batches before applying deletes - inserted += await eventRepository.createMany(regularEvents.splice(0)) - inserted += await eventRepository.upsertMany(replaceableEvents.splice(0)) - - const eventIdsToDelete = event.tags.reduce( - (ids, tag) => - tag.length >= 2 - && tag[0] === EventTags.Event - && /^[0-9a-f]{64}$/.test(tag[1]) - ? [...ids, tag[1]] - : ids, - [] as string[] - ) - - if (eventIdsToDelete.length) { - await eventRepository.deleteByPubkeyAndIds(event.pubkey, eventIdsToDelete) - } + for (const event of events) { + if (isEphemeralEvent(event)) { + continue + } - inserted += await eventRepository.create(enrichEventMetadata(event)) - continue - } + if (isDeleteEvent(event)) { + // flush pending batches before applying deletes + inserted += await eventRepository.createMany(regularEvents.splice(0)) + inserted += await eventRepository.upsertMany(replaceableEvents.splice(0)) - const enrichedEvent = enrichEventMetadata(event) + const eventIdsToDelete = event.tags.reduce( + (ids, tag) => + tag.length >= 2 && tag[0] === EventTags.Event && /^[0-9a-f]{64}$/.test(tag[1]) ? [...ids, tag[1]] : ids, + [] as string[], + ) - if (isReplaceableEvent(event) || isParameterizedReplaceableEvent(event)) { - replaceableEvents.push(enrichedEvent) - continue + if (eventIdsToDelete.length) { + await eventRepository.deleteByPubkeyAndIds(event.pubkey, eventIdsToDelete) } - regularEvents.push(enrichedEvent) + inserted += await eventRepository.create(enrichEventMetadata(event)) + continue } - // flush remaining - inserted += await eventRepository.createMany(regularEvents) - inserted += await eventRepository.upsertMany(replaceableEvents) + const enrichedEvent = enrichEventMetadata(event) + + if (isReplaceableEvent(event) || isParameterizedReplaceableEvent(event)) { + replaceableEvents.push(enrichedEvent) + continue + } - return inserted + regularEvents.push(enrichedEvent) } + // flush remaining + inserted += await eventRepository.createMany(regularEvents) + inserted += await eventRepository.upsertMany(replaceableEvents) + + return inserted + } + export class EventImportService { public constructor( private readonly persistBatch: (events: Event[]) => Promise, @@ -143,11 +149,87 @@ export class EventImportService { input: NodeJS.ReadableStream, options: EventImportOptions = {}, ): Promise { - const batchSize = ( - typeof options.batchSize === 'number' - && Number.isInteger(options.batchSize) - && options.batchSize > 0 - ) ? options.batchSize : DEFAULT_BATCH_SIZE + return this.importFromCandidates(this.readJsonlCandidatesFromStream(input), options) + } + + public async importFromJsonl(filePath: string, options: EventImportOptions = {}): Promise { + const stream = fs.createReadStream(filePath, { + encoding: 'utf-8', + }) + + return this.importFromReadable(stream, options) + } + + public async importFromJsonArray(filePath: string, options: EventImportOptions = {}): Promise { + return this.importFromCandidates(this.readJsonArrayCandidates(filePath), options) + } + + private async *readJsonlCandidatesFromStream(input: NodeJS.ReadableStream): AsyncGenerator { + const lineReader = readline.createInterface({ + crlfDelay: Infinity, + input, + }) + + let lineNumber = 0 + + try { + for await (const line of lineReader) { + lineNumber += 1 + + const trimmedLine = line.trim() + if (!trimmedLine.length) { + continue + } + + try { + yield { + recordNumber: lineNumber, + candidate: JSON.parse(trimmedLine), + } + } catch (error) { + yield { + recordNumber: lineNumber, + parseError: error, + } + } + } + } finally { + lineReader.close() + if (isDestroyableStream(input)) { + input.destroy() + } + } + } + + private async *readJsonArrayCandidates(filePath: string): AsyncGenerator { + const source = fs.createReadStream(filePath, { + encoding: 'utf-8', + }) + const arrayStream = streamArray.withParserAsStream() + const pipeline = source.pipe(arrayStream) + + try { + for await (const chunk of pipeline as AsyncIterable<{ key: number; value: unknown }>) { + yield { + recordNumber: chunk.key + 1, + candidate: chunk.value, + } + } + } catch (error) { + throw new Error(`Invalid JSON array input: ${getErrorMessage(error)}`) + } finally { + source.destroy() + } + } + + private async importFromCandidates( + candidates: AsyncIterable, + options: EventImportOptions = {}, + ): Promise { + const batchSize = + typeof options.batchSize === 'number' && Number.isInteger(options.batchSize) && options.batchSize > 0 + ? options.batchSize + : DEFAULT_BATCH_SIZE const onLineError = options.onLineError ?? (() => undefined) const onProgress = options.onProgress ?? (() => undefined) @@ -162,8 +244,6 @@ export class EventImportService { skipped: 0, } - let lineNumber = 0 - const flushBatch = async () => { if (!batch.length) { return @@ -173,9 +253,7 @@ export class EventImportService { const inserted = await this.persistBatch(batch) if (!Number.isInteger(inserted) || inserted < 0 || inserted > currentBatchSize) { - throw new Error( - `Invalid insert count (${inserted}) for batch size ${currentBatchSize}`, - ) + throw new Error(`Invalid insert count (${inserted}) for batch size ${currentBatchSize}`) } stats.inserted += inserted @@ -185,69 +263,49 @@ export class EventImportService { onProgress({ ...stats }) } - const lineReader = readline.createInterface({ - crlfDelay: Infinity, - input, - }) - - try { - for await (const line of lineReader) { - lineNumber += 1 - - const trimmedLine = line.trim() - if (!trimmedLine.length) { - continue - } - + for await (const { recordNumber, candidate, parseError } of candidates) { + if (parseError) { stats.processed += 1 + stats.errors += 1 + onLineError({ + lineNumber: recordNumber, + reason: getErrorMessage(parseError), + }) + continue + } - let event: Event - try { - event = validateEventSchema(JSON.parse(trimmedLine)) as Event + stats.processed += 1 - if (!await isEventIdValid(event)) { - throw new Error('invalid: event id does not match') - } - - if (!await isEventSignatureValid(event)) { - throw new Error('invalid: event signature verification failed') - } - } catch (error) { - stats.errors += 1 - onLineError({ - lineNumber, - reason: getErrorMessage(error), - }) + let event: Event + try { + event = validateEventSchema(candidate) as Event - continue + if (!(await isEventIdValid(event))) { + throw new Error('invalid: event id does not match') } - batch.push(event) - - if (batch.length >= batchSize) { - await flushBatch() + if (!(await isEventSignatureValid(event))) { + throw new Error('invalid: event signature verification failed') } + } catch (error) { + stats.errors += 1 + onLineError({ + lineNumber: recordNumber, + reason: getErrorMessage(error), + }) + + continue } - await flushBatch() + batch.push(event) - return stats - } finally { - lineReader.close() - if (isDestroyableStream(input)) { - input.destroy() + if (batch.length >= batchSize) { + await flushBatch() } } - } - public async importFromJsonl( - filePath: string, - options: EventImportOptions = {}, - ): Promise { - const stream = fs.createReadStream(filePath, { - encoding: 'utf-8', - }) + await flushBatch() - return this.importFromReadable(stream, options) + return stats } } diff --git a/test/unit/cli/cli.integration.spec.ts b/test/unit/cli/cli.integration.spec.ts new file mode 100644 index 00000000..69462de2 --- /dev/null +++ b/test/unit/cli/cli.integration.spec.ts @@ -0,0 +1,488 @@ +import { expect } from 'chai' +import fs from 'fs' +import os from 'os' +import path from 'path' +import { spawn } from 'child_process' + +const projectRoot = process.cwd() + +type CliResult = { + code: number + stdout: string + stderr: string +} + +const runCli = (args: string[], env: NodeJS.ProcessEnv = {}): Promise => { + return new Promise((resolve, reject) => { + const child = spawn('node', ['dist/src/cli/index.js', ...args], { + cwd: projectRoot, + env: { + ...process.env, + ...env, + }, + stdio: 'pipe', + }) + + let stdout = '' + let stderr = '' + + child.stdout.on('data', (chunk) => { + stdout += chunk.toString() + }) + + child.stderr.on('data', (chunk) => { + stderr += chunk.toString() + }) + + child.on('error', reject) + child.on('close', (code) => { + resolve({ + code: code ?? 1, + stdout, + stderr, + }) + }) + }) +} + +const runPnpmCli = (args: string[], env: NodeJS.ProcessEnv = {}): Promise => { + return new Promise((resolve, reject) => { + const child = spawn('pnpm', ['run', 'cli', ...args], { + cwd: projectRoot, + env: { + ...process.env, + ...env, + }, + stdio: 'pipe', + }) + + let stdout = '' + let stderr = '' + + child.stdout.on('data', (chunk) => { + stdout += chunk.toString() + }) + + child.stderr.on('data', (chunk) => { + stderr += chunk.toString() + }) + + child.on('error', reject) + child.on('close', (code) => { + resolve({ + code: code ?? 1, + stdout, + stderr, + }) + }) + }) +} + +const createShimCommand = (dir: string, name: string, scriptBody: string) => { + const target = path.join(dir, name) + fs.writeFileSync( + target, + ['#!/usr/bin/env bash', 'set -euo pipefail', scriptBody].join('\n'), + 'utf-8', + ) + fs.chmodSync(target, 0o755) +} + +const parsePackJsonOutput = (output: string): T => { + const start = output.search(/^\s*[\[{]/m) + if (start === -1) { + throw new Error(`No JSON payload found in pack output: ${output}`) + } + return JSON.parse(output.slice(start).trim()) as T +} + +const runCommand = (command: string, args: string[]): Promise => { + return new Promise((resolve, reject) => { + const child = spawn(command, args, { + cwd: projectRoot, + env: process.env, + stdio: 'pipe', + }) + + let stdout = '' + let stderr = '' + + child.stdout.on('data', (chunk) => { + stdout += chunk.toString() + }) + + child.stderr.on('data', (chunk) => { + stderr += chunk.toString() + }) + + child.on('error', reject) + child.on('close', (code) => { + resolve({ + code: code ?? 1, + stdout, + stderr, + }) + }) + }) +} + +describe('cli integration (spawn)', function () { + this.timeout(30000) + + it('shows top-level help', async () => { + const result = await runCli(['--help']) + + expect(result.code).to.equal(0) + expect(result.stdout).to.include('Usage:') + expect(result.stdout).to.include('config [...args]') + expect(result.stdout).to.include('update [...args]') + expect(result.stdout).to.include('clean') + }) + + it('supports pnpm run cli as the documented entry point', async () => { + const result = await runPnpmCli(['--help']) + + expect(result.code).to.equal(0) + expect(result.stdout).to.include('Usage:') + expect(result.stdout).to.include('start [...args]') + }) + + it('keeps package bin mapping aligned with TypeScript build output path', () => { + const pkg = JSON.parse(fs.readFileSync(path.join(projectRoot, 'package.json'), 'utf-8')) as { + files?: string[] + bin?: string | { nostream?: string } + } + const binPath = typeof pkg.bin === 'string' ? pkg.bin : pkg.bin?.nostream + expect(binPath).to.equal('./dist/src/cli/index.js') + expect(pkg.files).to.include('dist') + }) + + it('packs the built CLI and runtime assets required for installation', async () => { + const result = await runCommand('pnpm', ['pack', '--dry-run', '--json']) + + expect(result.code).to.equal(0) + const packSummary = parsePackJsonOutput<{ + files: Array<{ path: string }> + }>(result.stdout) + const packedFiles = new Set(packSummary.files.map((file) => file.path)) + + expect(packedFiles.has('package.json')).to.equal(true) + expect(packedFiles.has('dist/src/cli/index.js')).to.equal(true) + expect(packedFiles.has('resources/default-settings.yaml')).to.equal(true) + expect(packedFiles.has('docker-compose.yml')).to.equal(true) + }) + + it('shows nested subcommand help', async () => { + const configGet = await runCli(['config', 'get', '--help']) + const devClean = await runCli(['dev', 'db:clean', '--help']) + + expect(configGet.code).to.equal(0) + expect(configGet.stdout).to.include('Usage: nostream config get ') + + expect(devClean.code).to.equal(0) + expect(devClean.stdout).to.include('Usage: nostream dev db:clean') + }) + + it('returns usage exit code for unknown command', async () => { + const result = await runCli(['nope']) + + expect(result.code).to.equal(2) + expect(result.stdout).to.include('Usage:') + }) + + it('prints help and exits 0 with no args in non-interactive mode', async () => { + const result = await runCli([]) + + expect(result.code).to.equal(0) + expect(result.stdout).to.include('Usage:') + }) + + it('supports config set/get with indexed path and validation controls', async () => { + const configDir = fs.mkdtempSync(path.join(os.tmpdir(), 'nostream-cli-config-')) + + const setIndexed = await runCli( + ['config', 'set', 'limits.event.content[0].maxLength', '2048'], + { NOSTR_CONFIG_DIR: configDir }, + ) + expect(setIndexed.code).to.equal(0) + + const getIndexed = await runCli( + ['config', 'get', 'limits.event.content[0].maxLength'], + { NOSTR_CONFIG_DIR: configDir }, + ) + expect(getIndexed.code).to.equal(0) + expect(getIndexed.stdout).to.include('2048') + + const setInvalidValidated = await runCli( + ['config', 'set', 'limits.rateLimiter.strategy', 'broken-strategy'], + { NOSTR_CONFIG_DIR: configDir }, + ) + expect(setInvalidValidated.code).to.equal(1) + + const getStrategyAfterReject = await runCli( + ['config', 'get', 'limits.rateLimiter.strategy'], + { NOSTR_CONFIG_DIR: configDir }, + ) + expect(getStrategyAfterReject.code).to.equal(0) + expect(getStrategyAfterReject.stdout).to.include('ewma') + + const setInvalidNoValidate = await runCli( + ['config', 'set', 'limits.rateLimiter.strategy', 'broken-strategy', '--no-validate'], + { NOSTR_CONFIG_DIR: configDir }, + ) + expect(setInvalidNoValidate.code).to.equal(0) + + const getStrategyAfterNoValidate = await runCli( + ['config', 'get', 'limits.rateLimiter.strategy'], + { NOSTR_CONFIG_DIR: configDir }, + ) + expect(getStrategyAfterNoValidate.code).to.equal(0) + expect(getStrategyAfterNoValidate.stdout).to.include('broken-strategy') + }) + + it('supports config set JSON mode', async () => { + const configDir = fs.mkdtempSync(path.join(os.tmpdir(), 'nostream-cli-json-')) + + const setResult = await runCli( + ['config', 'set', 'nip05.domainWhitelist', '["example.com","relay.io"]', '--type', 'json'], + { NOSTR_CONFIG_DIR: configDir }, + ) + + expect(setResult.code).to.equal(0) + + const getResult = await runCli( + ['config', 'get', 'nip05.domainWhitelist'], + { NOSTR_CONFIG_DIR: configDir }, + ) + + expect(getResult.code).to.equal(0) + expect(getResult.stdout).to.include('example.com') + }) + + it('supports import/export aliases and format flags in help', async () => { + const importHelp = await runCli(['import', '--help']) + const exportHelp = await runCli(['export', '--help']) + const startHelp = await runCli(['start', '--help']) + const infoHelp = await runCli(['info', '--help']) + + expect(importHelp.code).to.equal(0) + expect(importHelp.stdout).to.include('--file ') + expect(importHelp.stdout).to.include('Path to .jsonl/.json file') + + expect(exportHelp.code).to.equal(0) + expect(exportHelp.stdout).to.include('--output ') + expect(exportHelp.stdout).to.include('--compress') + expect(exportHelp.stdout).to.include('--format ') + expect(exportHelp.stdout).to.include('jsonl|json|gzip|gz|xz') + + expect(startHelp.code).to.equal(0) + expect(startHelp.stdout).to.include('--nginx') + + expect(infoHelp.code).to.equal(0) + expect(infoHelp.stdout).to.include('--i2p-hostname') + expect(infoHelp.stdout).to.include('--json') + }) + + it('supports json output for info and config reads', async () => { + const configDir = fs.mkdtempSync(path.join(os.tmpdir(), 'nostream-cli-json-read-')) + + const infoResult = await runCli(['info', '--json'], { NOSTR_CONFIG_DIR: configDir }) + expect(infoResult.code).to.equal(0) + expect(() => JSON.parse(infoResult.stdout)).to.not.throw() + expect(JSON.parse(infoResult.stdout)).to.have.property('relay') + + const configListResult = await runCli(['config', 'list', '--json'], { NOSTR_CONFIG_DIR: configDir }) + expect(configListResult.code).to.equal(0) + expect(() => JSON.parse(configListResult.stdout)).to.not.throw() + expect(JSON.parse(configListResult.stdout)).to.have.property('payments') + + const configGetResult = await runCli(['config', 'get', 'payments.enabled', '--json'], { + NOSTR_CONFIG_DIR: configDir, + }) + expect(configGetResult.code).to.equal(0) + expect(JSON.parse(configGetResult.stdout)).to.equal(false) + }) + + it('prints json errors for read failures in json mode', async () => { + const configDir = fs.mkdtempSync(path.join(os.tmpdir(), 'nostream-cli-json-error-')) + + const configGetResult = await runCli(['config', 'get', 'payments.fakeField', '--json'], { + NOSTR_CONFIG_DIR: configDir, + }) + expect(configGetResult.code).to.equal(1) + expect(JSON.parse(configGetResult.stderr)).to.deep.equal({ + error: { + message: 'Path not found: payments.fakeField', + code: 1, + }, + }) + }) + + it('validates nginx start requirements', async () => { + const result = await runCli(['start', '--nginx']) + + expect(result.code).to.equal(1) + expect(result.stderr).to.include('RELAY_DOMAIN environment variable is required when using --nginx') + }) + + it('returns usage exit code for unsupported/unknown format flags', async () => { + const importResult = await runCli(['import', '--format', 'yaml']) + const exportResult = await runCli(['export', '--format', 'yaml']) + const conflictingExportResult = await runCli(['export', '--format', 'json', '--compress']) + + expect(importResult.code).to.equal(1) + expect(importResult.stderr).to.include('Unknown option `--format`') + + expect(exportResult.code).to.equal(2) + expect(exportResult.stderr).to.include('Error: Unsupported format: yaml. Supported values: json, jsonl, gzip, gz, xz') + expect(exportResult.stderr).to.include('Unsupported format: yaml') + + expect(conflictingExportResult.code).to.equal(2) + expect(conflictingExportResult.stderr).to.include('Cannot combine --compress with --format json/jsonl') + }) + + it('rejects out-of-range start port values', async () => { + const result = await runCli(['start', '--port', '70000']) + + expect(result.code).to.equal(1) + expect(result.stderr).to.include('Port must be a safe integer between 1 and 65535') + }) + + it('invokes docker compose stack through start command using shims', async () => { + const shimDir = fs.mkdtempSync(path.join(os.tmpdir(), 'nostream-cli-shim-docker-')) + const logPath = path.join(shimDir, 'docker.log') + const configDir = fs.mkdtempSync(path.join(os.tmpdir(), 'nostream-cli-shim-config-')) + + createShimCommand( + shimDir, + 'docker', + [ + `echo "$*" >> "${logPath}"`, + 'exit 0', + ].join('\n'), + ) + + const result = await runCli(['start', '--tor', '--i2p', '--debug'], { + PATH: `${shimDir}:${process.env.PATH}`, + NOSTR_CONFIG_DIR: configDir, + }) + + expect(result.code).to.equal(0) + + const logs = fs.readFileSync(logPath, 'utf-8') + expect(logs).to.include('compose') + expect(logs).to.include('docker-compose.tor.yml') + expect(logs).to.include('docker-compose.i2p.yml') + expect(logs).to.include('up --build --remove-orphans') + }) + + it('cleans temporary port override compose files after start', async () => { + const shimDir = fs.mkdtempSync(path.join(os.tmpdir(), 'nostream-cli-shim-port-')) + const logPath = path.join(shimDir, 'docker.log') + const configDir = fs.mkdtempSync(path.join(os.tmpdir(), 'nostream-cli-shim-port-config-')) + + createShimCommand( + shimDir, + 'docker', + [ + `echo "$*" >> "${logPath}"`, + 'exit 0', + ].join('\n'), + ) + + const before = fs + .readdirSync(os.tmpdir()) + .filter((name) => name.startsWith('nostream-port-override-') && name.endsWith('.yml')).length + + const result = await runCli(['start', '--port', '9999'], { + PATH: `${shimDir}:${process.env.PATH}`, + NOSTR_CONFIG_DIR: configDir, + }) + + const after = fs + .readdirSync(os.tmpdir()) + .filter((name) => name.startsWith('nostream-port-override-') && name.endsWith('.yml')).length + + expect(result.code).to.equal(0) + expect(after).to.equal(before) + }) + + it('supports config env subcommand help', async () => { + const result = await runCli(['config', 'env', '--help']) + + expect(result.code).to.equal(0) + expect(result.stdout).to.include('Usage: nostream config env ') + }) + + it('runs legacy clean replacement command', async () => { + const shimDir = fs.mkdtempSync(path.join(os.tmpdir(), 'nostream-cli-shim-clean-')) + const logPath = path.join(shimDir, 'docker.log') + const configDir = fs.mkdtempSync(path.join(os.tmpdir(), 'nostream-cli-shim-clean-config-')) + + createShimCommand( + shimDir, + 'docker', + [ + `echo "$*" >> "${logPath}"`, + 'exit 0', + ].join('\n'), + ) + + const result = await runCli(['clean'], { + PATH: `${shimDir}:${process.env.PATH}`, + NOSTR_CONFIG_DIR: configDir, + }) + + expect(result.code).to.equal(0) + + const logs = fs.readFileSync(logPath, 'utf-8') + expect(logs).to.include('compose') + expect(logs).to.include('down') + expect(logs).to.include('system prune -a -f') + expect(logs).to.include('volume prune -f') + }) + + it('runs legacy update replacement command', async () => { + const shimDir = fs.mkdtempSync(path.join(os.tmpdir(), 'nostream-cli-shim-update-')) + const dockerLogPath = path.join(shimDir, 'docker.log') + const gitLogPath = path.join(shimDir, 'git.log') + const configDir = fs.mkdtempSync(path.join(os.tmpdir(), 'nostream-cli-shim-update-config-')) + + createShimCommand( + shimDir, + 'docker', + [ + `echo "$*" >> "${dockerLogPath}"`, + 'exit 0', + ].join('\n'), + ) + + createShimCommand( + shimDir, + 'git', + [ + `echo "$*" >> "${gitLogPath}"`, + 'if [[ "$1" == "stash" && "$2" == "push" ]]; then', + ' echo "No local changes to save"', + 'fi', + 'exit 0', + ].join('\n'), + ) + + const result = await runCli(['update'], { + PATH: `${shimDir}:${process.env.PATH}`, + NOSTR_CONFIG_DIR: configDir, + }) + + expect(result.code).to.equal(0) + + const dockerLogs = fs.readFileSync(dockerLogPath, 'utf-8') + expect(dockerLogs).to.include('compose') + expect(dockerLogs).to.include('down') + expect(dockerLogs).to.include('up --build --remove-orphans') + + const gitLogs = fs.readFileSync(gitLogPath, 'utf-8') + expect(gitLogs).to.include('stash push -u -m nostream-cli-update') + expect(gitLogs).to.include('pull') + }) +}) diff --git a/test/unit/cli/commands.spec.ts b/test/unit/cli/commands.spec.ts new file mode 100644 index 00000000..adc4dfaf --- /dev/null +++ b/test/unit/cli/commands.spec.ts @@ -0,0 +1,18 @@ +import { expect } from 'chai' + +import { isSupportedEnvKey, validateEnvPair } from '../../../src/cli/utils/env-config' + +describe('cli env config helpers', () => { + it('accepts supported env keys', () => { + expect(isSupportedEnvKey('RELAY_PORT')).to.equal(true) + expect(isSupportedEnvKey('RR0_DB_HOST')).to.equal(true) + expect(isSupportedEnvKey('UNKNOWN_KEY')).to.equal(false) + }) + + it('validates numeric and boolean env values', () => { + expect(validateEnvPair('RELAY_PORT', '8008')).to.equal(undefined) + expect(validateEnvPair('RELAY_PORT', 'bad')).to.include('must be an integer') + expect(validateEnvPair('READ_REPLICA_ENABLED', 'true')).to.equal(undefined) + expect(validateEnvPair('READ_REPLICA_ENABLED', 'yes')).to.include('must be true or false') + }) +}) diff --git a/test/unit/cli/config.spec.ts b/test/unit/cli/config.spec.ts new file mode 100644 index 00000000..e65e678c --- /dev/null +++ b/test/unit/cli/config.spec.ts @@ -0,0 +1,78 @@ +import { expect } from 'chai' + +import { + getByPath, + parseTypedValue, + parseValue, + setByPath, + validatePathAgainstDefaults, + validateSettings, +} from '../../../src/cli/utils/config' + +describe('cli config utils', () => { + it('parses primitive values', () => { + expect(parseValue('true')).to.equal(true) + expect(parseValue('false')).to.equal(false) + expect(parseValue('42')).to.equal(42) + expect(parseValue('42n')).to.equal(42n) + expect(parseValue('null')).to.equal(null) + expect(parseValue('hello')).to.equal('hello') + }) + + it('parses typed json values', () => { + expect(parseTypedValue('{"enabled":true}', 'json')).to.deep.equal({ enabled: true }) + expect(parseTypedValue('[1,2,3]', 'json')).to.deep.equal([1, 2, 3]) + expect(() => parseTypedValue('{', 'json')).to.throw('Invalid JSON value') + }) + + it('sets and gets dot-path values', () => { + const input = { + payments: { + enabled: false, + }, + } + + const updated = setByPath(input as any, 'payments.enabled', true) + + expect(getByPath(updated, 'payments.enabled')).to.equal(true) + expect(getByPath(updated, 'payments')).to.deep.equal({ enabled: true }) + expect(getByPath(updated, 'payments.processor')).to.equal(undefined) + }) + + it('supports indexed path syntax', () => { + const input = { + limits: { + event: { + content: [ + { + maxLength: 100, + }, + ], + }, + }, + } + + const updated = setByPath(input as any, 'limits.event.content[0].maxLength', 500) + + expect(getByPath(updated, 'limits.event.content[0].maxLength')).to.equal(500) + }) + + it('rejects malformed path syntax', () => { + expect(() => setByPath({} as any, 'payments[]', true)).to.throw('Invalid path segment') + }) + + it('validates known paths against defaults', () => { + expect(validatePathAgainstDefaults('payments.enabled')).to.deep.equal([]) + expect(validatePathAgainstDefaults('limits.event.content[0].maxLength')).to.deep.equal([]) + + const issues = validatePathAgainstDefaults('payments.fakeField') + expect(issues[0].message).to.include('does not exist') + }) + + it('validates basic required fields', () => { + const issues = validateSettings({} as any) + + expect(issues.some((issue) => issue.path === 'info.relay_url')).to.equal(true) + expect(issues.some((issue) => issue.path === 'network')).to.equal(true) + }) +}) diff --git a/test/unit/cli/docker.spec.ts b/test/unit/cli/docker.spec.ts new file mode 100644 index 00000000..964b257d --- /dev/null +++ b/test/unit/cli/docker.spec.ts @@ -0,0 +1,34 @@ +import { expect } from 'chai' +import fs from 'fs' +import os from 'os' +import path from 'path' +import sinon from 'sinon' + +import { buildComposeArgs, createPortOverrideComposeFile } from '../../../src/cli/utils/docker' + +describe('cli docker utils', () => { + afterEach(() => { + sinon.restore() + }) + + it('includes only existing compose files', () => { + const existsSyncStub = sinon.stub(fs, 'existsSync').callsFake((input) => String(input).includes('docker-compose.yml')) + + const args = buildComposeArgs(['docker-compose.yml', 'docker-compose.tor.yml'], ['up']) + + expect(existsSyncStub.called).to.equal(true) + expect(args).to.include('up') + expect(args).to.include(path.join(process.cwd(), 'docker-compose.yml')) + expect(args).to.not.include(path.join(process.cwd(), 'docker-compose.tor.yml')) + }) + + it('creates a temporary port override compose file', () => { + const tempFile = createPortOverrideComposeFile(9999) + const content = fs.readFileSync(tempFile, 'utf-8') + + expect(tempFile.startsWith(os.tmpdir())).to.equal(true) + expect(content).to.include('127.0.0.1:9999:9999') + + fs.unlinkSync(tempFile) + }) +}) diff --git a/test/unit/cli/docs.spec.ts b/test/unit/cli/docs.spec.ts new file mode 100644 index 00000000..c4bbf565 --- /dev/null +++ b/test/unit/cli/docs.spec.ts @@ -0,0 +1,41 @@ +import { expect } from 'chai' +import fs from 'fs' +import path from 'path' + +describe('cli documentation alignment', () => { + const projectRoot = process.cwd() + + it('documents removed legacy wrapper scripts explicitly', () => { + const readme = fs.readFileSync(path.join(projectRoot, 'README.md'), 'utf-8') + const cliDoc = fs.readFileSync(path.join(projectRoot, 'CLI.md'), 'utf-8') + + expect(readme).to.include('The old shell wrapper scripts are no longer shipped in `scripts/`.') + expect(cliDoc).to.include('The old shell wrapper scripts are no longer shipped in `scripts/`.') + }) + + it('documents the guided TUI configure flow and fallback behavior', () => { + const cliDoc = fs.readFileSync(path.join(projectRoot, 'CLI.md'), 'utf-8') + + expect(cliDoc).to.include('When run with no arguments in an interactive terminal, `nostream` launches an interactive TUI.') + expect(cliDoc).to.include('Configure menu offers guided editing for common categories such as payments, network, and limits.') + expect(cliDoc).to.include('Advanced dot-path get/set remains available for full settings access.') + }) + + it('does not ship removed legacy wrapper scripts', () => { + const removedWrappers = [ + 'start', + 'start_with_tor', + 'start_with_i2p', + 'start_with_nginx', + 'stop', + 'print_tor_hostname', + 'print_i2p_hostname', + 'update', + 'clean', + ] + + for (const wrapper of removedWrappers) { + expect(fs.existsSync(path.join(projectRoot, 'scripts', wrapper))).to.equal(false, wrapper) + } + }) +}) diff --git a/test/unit/cli/export-command.spec.ts b/test/unit/cli/export-command.spec.ts new file mode 100644 index 00000000..8a6f1a9e --- /dev/null +++ b/test/unit/cli/export-command.spec.ts @@ -0,0 +1,46 @@ +import { expect } from 'chai' +import sinon from 'sinon' + +import * as exportEventsModule from '../../../src/scripts/export-events' +import { runExport } from '../../../src/cli/commands/export' + +describe('runExport command adapter', () => { + afterEach(() => { + sinon.restore() + }) + + it('forwards legacy compression flags to export-events runtime', async () => { + const runExportEventsStub = sinon.stub(exportEventsModule, 'runExportEvents').resolves(0) + + const code = await runExport( + { + output: 'backup.jsonl.gz', + compress: true, + compressionFormat: 'gzip', + }, + [], + ) + + expect(code).to.equal(0) + expect(runExportEventsStub.calledOnce).to.equal(true) + expect(runExportEventsStub.firstCall.args[0]).to.deep.equal(['backup.jsonl.gz', '--compress', '--format', 'gzip']) + expect(runExportEventsStub.firstCall.args[1]).to.deep.equal({ format: undefined }) + }) + + it('keeps structured format in options while removing handled raw args', async () => { + const runExportEventsStub = sinon.stub(exportEventsModule, 'runExportEvents').resolves(0) + + const code = await runExport( + { + output: 'backup.json', + format: 'json', + }, + ['--format', 'json', '--compress', '-z', '--unknown-flag'], + ) + + expect(code).to.equal(0) + expect(runExportEventsStub.calledOnce).to.equal(true) + expect(runExportEventsStub.firstCall.args[0]).to.deep.equal(['backup.json', '--unknown-flag']) + expect(runExportEventsStub.firstCall.args[1]).to.deep.equal({ format: 'json' }) + }) +}) diff --git a/test/unit/cli/export.spec.ts b/test/unit/cli/export.spec.ts new file mode 100644 index 00000000..f4194b85 --- /dev/null +++ b/test/unit/cli/export.spec.ts @@ -0,0 +1,117 @@ +import { expect } from 'chai' +import fs from 'fs' +import os from 'os' +import path from 'path' +import { Readable } from 'stream' +import sinon from 'sinon' + +import * as dbClient from '../../../src/database/client' +import { runExportEvents } from '../../../src/scripts/export-events' + +type EventRow = { + event_id: Buffer + event_pubkey: Buffer + event_kind: number + event_created_at: number + event_content: string + event_tags: unknown[] | null + event_signature: Buffer +} + +const createRow = (idHex: string, createdAt: number): EventRow => ({ + event_id: Buffer.from(idHex, 'hex'), + event_pubkey: Buffer.from('11'.repeat(32), 'hex'), + event_kind: 1, + event_created_at: createdAt, + event_content: `event-${createdAt}`, + event_tags: [['p', 'abc']], + event_signature: Buffer.from('22'.repeat(64), 'hex'), +}) + +const createMockDb = (rows: EventRow[]) => { + const makeQuery = () => ({ + select() { + return this + }, + whereNull() { + return this + }, + orderBy() { + return this + }, + first: async () => (rows[0] ? { event_id: rows[0].event_id } : undefined), + stream: () => Readable.from(rows), + }) + + const db = ((table: string) => { + if (table !== 'events') { + throw new Error(`Unexpected table: ${table}`) + } + + return makeQuery() + }) as unknown as ((table: string) => ReturnType) & { destroy: () => Promise } + + db.destroy = async () => {} + return db +} + +describe('cli export formats', () => { + afterEach(() => { + sinon.restore() + }) + + it('exports JSON array format when --format json is selected', async () => { + const rows = [createRow('aa'.repeat(32), 100), createRow('bb'.repeat(32), 200)] + sinon.stub(dbClient, 'getMasterDbClient').returns(createMockDb(rows) as any) + + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'nostream-export-json-')) + const outputPath = path.join(tempDir, 'events.json') + + const code = await runExportEvents([outputPath], { format: 'json' }) + expect(code).to.equal(0) + + const fileContent = fs.readFileSync(outputPath, 'utf-8') + const parsed = JSON.parse(fileContent) as Array<{ id: string; kind: number }> + expect(parsed).to.have.length(2) + expect(parsed[0].id).to.equal('aa'.repeat(32)) + expect(parsed[1].id).to.equal('bb'.repeat(32)) + expect(parsed[0].kind).to.equal(1) + }) + + it('exports JSON Lines format by default', async () => { + const rows = [createRow('cc'.repeat(32), 300)] + sinon.stub(dbClient, 'getMasterDbClient').returns(createMockDb(rows) as any) + + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'nostream-export-jsonl-')) + const outputPath = path.join(tempDir, 'events.jsonl') + + const code = await runExportEvents([outputPath], { format: 'jsonl' }) + expect(code).to.equal(0) + + const lines = fs + .readFileSync(outputPath, 'utf-8') + .trim() + .split('\n') + .filter(Boolean) + + expect(lines).to.have.length(1) + const first = JSON.parse(lines[0]) as { id: string } + expect(first.id).to.equal('cc'.repeat(32)) + }) + + it('rejects mismatched output extension for selected format', async () => { + const rows = [createRow('dd'.repeat(32), 400)] + sinon.stub(dbClient, 'getMasterDbClient').returns(createMockDb(rows) as any) + + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'nostream-export-invalid-ext-')) + const outputPath = path.join(tempDir, 'events.json') + + try { + await runExportEvents([outputPath], { format: 'jsonl' }) + expect.fail('Expected runExportEvents to throw for mismatched extension') + } catch (error) { + const message = error instanceof Error ? error.message : String(error) + expect(message).to.include('Output file extension must be .jsonl when using --format jsonl') + } + }) +}) diff --git a/test/unit/cli/import.runtime.spec.ts b/test/unit/cli/import.runtime.spec.ts new file mode 100644 index 00000000..26af1772 --- /dev/null +++ b/test/unit/cli/import.runtime.spec.ts @@ -0,0 +1,86 @@ +import { expect } from 'chai' +import fs from 'fs' +import os from 'os' +import path from 'path' +import sinon from 'sinon' + +import { runImportEvents } from '../../../src/import-events' +import * as dbClient from '../../../src/database/client' +import { EventImportService, EventImportStats } from '../../../src/services/event-import-service' + +const makeTempFile = (name: string, content: string): string => { + const dir = fs.mkdtempSync(path.join(os.tmpdir(), 'nostream-import-runtime-')) + const filePath = path.join(dir, name) + fs.writeFileSync(filePath, content, 'utf-8') + return filePath +} + +const stubDb = () => { + return { + destroy: sinon.stub().resolves(), + } as any +} + +const emptyStats = (): EventImportStats => ({ + errors: 0, + inserted: 0, + processed: 0, + skipped: 0, +}) + +describe('import runtime routing', () => { + afterEach(() => { + sinon.restore() + }) + + it('routes .jsonl input to importFromJsonl', async () => { + const filePath = makeTempFile('events.jsonl', '{"x":1}\n') + + sinon.stub(dbClient, 'getMasterDbClient').returns(stubDb()) + const jsonlStub = sinon.stub(EventImportService.prototype, 'importFromReadable').resolves(emptyStats()) + const jsonArrayStub = sinon.stub(EventImportService.prototype, 'importFromJsonArray').resolves(emptyStats()) + + const code = await runImportEvents([filePath]) + + expect(code).to.equal(0) + expect(jsonlStub.calledOnce).to.equal(true) + expect(jsonArrayStub.called).to.equal(false) + }) + + it('routes .json input to importFromJsonArray', async () => { + const filePath = makeTempFile('events.json', '[]') + + sinon.stub(dbClient, 'getMasterDbClient').returns(stubDb()) + const jsonlStub = sinon.stub(EventImportService.prototype, 'importFromJsonl').resolves(emptyStats()) + const jsonArrayStub = sinon.stub(EventImportService.prototype, 'importFromJsonArray').resolves(emptyStats()) + + const code = await runImportEvents([filePath]) + + expect(code).to.equal(0) + expect(jsonArrayStub.calledOnce).to.equal(true) + expect(jsonlStub.called).to.equal(false) + }) + + it('rejects unsupported input extensions', async () => { + const filePath = makeTempFile('events.txt', '') + + try { + await runImportEvents([filePath]) + expect.fail('Expected unsupported extension to throw') + } catch (error) { + const message = error instanceof Error ? error.message : String(error) + expect(message).to.include('Input file must have a .jsonl or .json extension') + } + }) + + it('prints help with .json and .jsonl usage', async () => { + const logStub = sinon.stub(console, 'log') + + const code = await runImportEvents(['--help']) + + expect(code).to.equal(0) + const output = logStub.getCalls().map((call) => call.args.join(' ')).join('\n') + expect(output).to.include('file.jsonl|file.json') + expect(output).to.include('nostream import ./events.json --batch-size 1000') + }) +}) diff --git a/test/unit/cli/info.spec.ts b/test/unit/cli/info.spec.ts new file mode 100644 index 00000000..ffe0bd83 --- /dev/null +++ b/test/unit/cli/info.spec.ts @@ -0,0 +1,115 @@ +const { expect } = require('chai') +const fs = require('fs') +const path = require('path') +const sinon = require('sinon') + +const infoCommand = require('../../../dist/src/cli/commands/info.js') +const configUtils = require('../../../dist/src/cli/utils/config.js') +const processUtils = require('../../../dist/src/cli/utils/process.js') + +describe('runInfo', () => { + const keysFile = path.join(process.cwd(), '.nostr', 'i2p', 'data', 'nostream.dat') + + let stdout = '' + let stderr = '' + + beforeEach(() => { + sinon.stub(configUtils, 'loadMergedSettings').returns({}) + sinon.stub(process.stdout, 'write').callsFake(((chunk: string | Uint8Array) => { + stdout += String(chunk) + return true + }) as any) + sinon.stub(process.stderr, 'write').callsFake(((chunk: string | Uint8Array) => { + stderr += String(chunk) + return true + }) as any) + }) + + afterEach(() => { + stdout = '' + stderr = '' + sinon.restore() + }) + + it('prints detected I2P hostnames as JSON', async () => { + sinon.stub(fs, 'existsSync').callsFake((target) => String(target).endsWith('nostream.dat')) + sinon + .stub(processUtils, 'runCommandWithOutput') + .onFirstCall() + .resolves({ code: 1, stdout: '', stderr: '' }) + .onSecondCall() + .resolves({ + code: 0, + stdout: 'alphaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.b32.i2p\n', + stderr: 'betabbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb.b32.i2p\n', + }) + + const code = await infoCommand.runInfo({ i2pHostname: true, json: true }) + + expect(code).to.equal(0) + expect(JSON.parse(stdout)).to.deep.equal({ + i2pHostnames: [ + 'alphaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.b32.i2p', + 'betabbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb.b32.i2p', + ], + }) + expect(stderr).to.equal('') + }) + + it('prints a JSON error when I2P keys are missing', async () => { + sinon.stub(fs, 'existsSync').returns(false) + sinon.stub(processUtils, 'runCommandWithOutput').resolves({ code: 1, stdout: '', stderr: '' }) + + const code = await infoCommand.runInfo({ i2pHostname: true, json: true }) + + expect(code).to.equal(1) + expect(JSON.parse(stderr)).to.deep.equal({ + error: { + message: `I2P destination keys not found. Is the i2pd container running? Expected: ${keysFile}`, + code: 1, + }, + }) + expect(stdout).to.equal('') + }) + + it('prints JSON guidance when no I2P hostname can be extracted', async () => { + sinon.stub(fs, 'existsSync').callsFake((target) => String(target).endsWith('nostream.dat')) + sinon + .stub(processUtils, 'runCommandWithOutput') + .onFirstCall() + .resolves({ code: 1, stdout: '', stderr: '' }) + .onSecondCall() + .resolves({ code: 0, stdout: '', stderr: '' }) + + const code = await infoCommand.runInfo({ i2pHostname: true, json: true }) + + expect(code).to.equal(0) + expect(JSON.parse(stdout)).to.deep.equal({ + i2pHostnames: [], + keysFile, + guidance: { + webConsoleUrl: 'http://127.0.0.1:7070/?page=i2p_tunnels', + consoleQueryCommand: + "docker exec i2pd wget -qO- 'http://127.0.0.1:7070/?page=i2p_tunnels' | grep -oE '[a-z2-7]{52}\\\\.b32\\\\.i2p' | sort -u", + }, + }) + expect(stderr).to.equal('') + }) + + it('keeps non-json I2P hostname output human-readable', async () => { + sinon.stub(fs, 'existsSync').callsFake((target) => String(target).endsWith('nostream.dat')) + sinon + .stub(processUtils, 'runCommandWithOutput') + .onFirstCall() + .resolves({ code: 1, stdout: '', stderr: '' }) + .onSecondCall() + .resolves({ code: 0, stdout: '', stderr: '' }) + + const code = await infoCommand.runInfo({ i2pHostname: true }) + + expect(code).to.equal(0) + expect(stdout).to.include(`I2P destination keys exist at: ${keysFile}`) + expect(stdout).to.include('To find your nostream .b32.i2p address, use one of these methods:') + expect(stderr).to.equal('') + }) +}) diff --git a/test/unit/cli/setup.spec.ts b/test/unit/cli/setup.spec.ts new file mode 100644 index 00000000..24a61030 --- /dev/null +++ b/test/unit/cli/setup.spec.ts @@ -0,0 +1,117 @@ +import { expect } from 'chai' +import fs from 'fs' +import os from 'os' +import path from 'path' +import sinon from 'sinon' + +const setupCommand: typeof import('../../../dist/src/cli/commands/setup.js') = require('../../../dist/src/cli/commands/setup.js') + +describe('runSetup', () => { + const originalCwd = process.cwd() + const originalSecret = process.env.SECRET + const originalStdinIsTTY = process.stdin.isTTY + + let tempDir: string + + const writeDefaultSettings = () => { + fs.mkdirSync(path.join(tempDir, 'resources'), { recursive: true }) + fs.writeFileSync(path.join(tempDir, 'resources', 'default-settings.yaml'), 'payments:\n enabled: false\n', 'utf-8') + } + + const readEnv = () => { + return fs.readFileSync(path.join(tempDir, '.env'), 'utf-8') + } + + beforeEach(() => { + tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'nostream-setup-')) + process.chdir(tempDir) + writeDefaultSettings() + delete process.env.SECRET + }) + + afterEach(() => { + sinon.restore() + process.chdir(originalCwd) + process.stdin.isTTY = originalStdinIsTTY + if (originalSecret === undefined) { + delete process.env.SECRET + } else { + process.env.SECRET = originalSecret + } + + fs.rmSync(tempDir, { recursive: true, force: true }) + }) + + it('copies .env.example and replaces the placeholder secret', async () => { + fs.writeFileSync( + path.join(tempDir, '.env.example'), + 'SECRET=change_me_to_something_long_and_random # Generate: openssl rand -hex 128\nFOO=bar\n', + 'utf-8', + ) + process.env.SECRET = 'replacement-secret' + + const code = await setupCommand.runSetup({ yes: true }) + + expect(code).to.equal(0) + const envContents = readEnv() + expect(envContents).to.include('SECRET=replacement-secret # Generate: openssl rand -hex 128') + expect(envContents).to.include('FOO=bar') + expect(envContents.match(/^SECRET=/gm)).to.have.length(1) + }) + + it('preserves an existing non-placeholder secret', async () => { + fs.writeFileSync(path.join(tempDir, '.env'), 'SECRET=real-secret\nFOO=bar\n', 'utf-8') + + const code = await setupCommand.runSetup({ yes: true }) + + expect(code).to.equal(0) + expect(readEnv()).to.equal('SECRET=real-secret\nFOO=bar\n') + }) + + it('fills an empty secret from process.env.SECRET', async () => { + fs.writeFileSync(path.join(tempDir, '.env'), 'SECRET= # existing comment\nFOO=bar\n', 'utf-8') + process.env.SECRET = 'env-secret' + + const code = await setupCommand.runSetup({ yes: true }) + + expect(code).to.equal(0) + const envContents = readEnv() + expect(envContents).to.include('SECRET=env-secret # existing comment') + expect(envContents.match(/^SECRET=/gm)).to.have.length(1) + }) + + it('generates a secure fallback secret in non-interactive mode', async () => { + fs.writeFileSync(path.join(tempDir, '.env.example'), 'SECRET=change_me_to_something_long_and_random\n', 'utf-8') + + const code = await setupCommand.runSetup({ yes: true }) + + expect(code).to.equal(0) + const generatedSecret = readEnv().match(/^SECRET=([a-f0-9]+)$/m)?.[1] + expect(generatedSecret).to.not.equal(undefined) + expect(generatedSecret).to.not.equal('change_me_to_something_long_and_random') + expect(generatedSecret).to.have.length(128) + }) + + it('returns 1 when setup is cancelled while entering the secret and does not continue', async () => { + const cancelToken = Symbol('cancel') + + process.stdin.isTTY = true + fs.writeFileSync(path.join(tempDir, '.env.example'), 'SECRET=change_me_to_something_long_and_random\n', 'utf-8') + + const textStub = sinon.stub(setupCommand.setupPrompts, 'text').resolves(cancelToken as any) + const isCancelStub = sinon.stub(setupCommand.setupPrompts, 'isCancel').callsFake((value) => value === cancelToken) + const cancelStub = sinon.stub(setupCommand.setupPrompts, 'cancel') + const confirmStub = sinon.stub(setupCommand.setupPrompts, 'confirm') + const outroStub = sinon.stub(setupCommand.setupPrompts, 'outro') + + const code = await setupCommand.runSetup({ yes: false }) + + expect(code).to.equal(1) + expect(textStub.calledOnce).to.equal(true) + expect(isCancelStub.calledOnceWithExactly(cancelToken)).to.equal(true) + expect(cancelStub.calledOnceWithExactly('Setup cancelled')).to.equal(true) + expect(confirmStub.notCalled).to.equal(true) + expect(outroStub.notCalled).to.equal(true) + expect(readEnv()).to.equal('SECRET=change_me_to_something_long_and_random\n') + }) +}) diff --git a/test/unit/cli/tui.spec.ts b/test/unit/cli/tui.spec.ts new file mode 100644 index 00000000..5d0a71af --- /dev/null +++ b/test/unit/cli/tui.spec.ts @@ -0,0 +1,232 @@ +import { expect } from 'chai' +import sinon from 'sinon' + +import * as configCommands from '../../../src/cli/commands/config' +import * as exportCommand from '../../../src/cli/commands/export' +import * as importCommand from '../../../src/cli/commands/import' +import * as startCommand from '../../../src/cli/commands/start' +import * as configureMenu from '../../../src/cli/tui/menus/configure' +import * as devMenu from '../../../src/cli/tui/menus/dev' +import * as manageMenu from '../../../src/cli/tui/menus/manage' +import * as startMenu from '../../../src/cli/tui/menus/start' +import { tuiPrompts } from '../../../src/cli/tui/prompts' + +describe('cli tui menus', () => { + afterEach(() => { + sinon.restore() + }) + + it('routes configure list action', async () => { + sinon.stub(tuiPrompts, 'select').resolves('list' as any) + sinon.stub(tuiPrompts, 'isCancel').returns(false) + const runList = sinon.stub(configCommands, 'runConfigList').resolves(0) + + const code = await configureMenu.runConfigureMenu() + + expect(code).to.equal(0) + expect(runList.calledOnceWithExactly()).to.equal(true) + }) + + it('handles configure cancellation', async () => { + sinon.stub(tuiPrompts, 'select').resolves(Symbol('cancel') as any) + sinon.stub(tuiPrompts, 'isCancel').returns(true) + + const code = await configureMenu.runConfigureMenu() + + expect(code).to.equal(1) + }) + + it('returns to previous menu on configure back selection', async () => { + sinon.stub(tuiPrompts, 'select').resolves('back' as any) + sinon.stub(tuiPrompts, 'isCancel').returns(false) + + const code = await configureMenu.runConfigureMenu() + + expect(code).to.equal(0) + }) + + it('routes guided configure values into config set', async () => { + sinon + .stub(tuiPrompts, 'select') + .onFirstCall() + .resolves('guided' as any) + .onSecondCall() + .resolves('payments' as any) + .onThirdCall() + .resolves('payments.processor' as any) + .onCall(3) + .resolves('lnbits' as any) + sinon + .stub(tuiPrompts, 'confirm') + .onFirstCall() + .resolves(true as any) + .onSecondCall() + .resolves(false as any) + sinon.stub(tuiPrompts, 'isCancel').returns(false) + sinon.stub(configCommands, 'getConfigTopLevelCategories').returns(['payments', 'limits', 'network']) + const runConfigSet = sinon.stub(configCommands, 'runConfigSet').resolves(0) + + const code = await configureMenu.runConfigureMenu() + + expect(code).to.equal(0) + expect(runConfigSet.calledOnceWithExactly('payments.processor', 'lnbits', { + restart: false, + validate: true, + valueType: 'inferred', + })).to.equal(true) + }) + + it('rejects invalid guided numeric input before writing', async () => { + sinon + .stub(tuiPrompts, 'select') + .onFirstCall() + .resolves('guided' as any) + .onSecondCall() + .resolves('limits' as any) + .onThirdCall() + .resolves('limits.event.content[0].maxLength' as any) + const textStub = sinon.stub(tuiPrompts, 'text').callsFake(async (options: any) => { + expect(options.validate('bad')).to.equal('Value must be a non-negative integer') + expect(options.validate('2048')).to.equal(undefined) + return '2048' + }) + sinon + .stub(tuiPrompts, 'confirm') + .onFirstCall() + .resolves(true as any) + .onSecondCall() + .resolves(false as any) + sinon.stub(tuiPrompts, 'isCancel').returns(false) + const runConfigSet = sinon.stub(configCommands, 'runConfigSet').resolves(0) + + const code = await configureMenu.runConfigureMenu() + + expect(code).to.equal(0) + expect(textStub.calledOnce).to.equal(true) + expect(runConfigSet.calledOnceWithExactly('limits.event.content[0].maxLength', '2048', { + restart: false, + validate: true, + valueType: 'inferred', + })).to.equal(true) + }) + + it('keeps advanced configure get action available', async () => { + sinon + .stub(tuiPrompts, 'select') + .onFirstCall() + .resolves('get' as any) + .onSecondCall() + .resolves('other' as any) + sinon + .stub(tuiPrompts, 'text') + .resolves('payments.enabled' as any) + sinon.stub(tuiPrompts, 'confirm').resolves(true as any) + sinon.stub(tuiPrompts, 'isCancel').returns(false) + const runGet = sinon.stub(configCommands, 'runConfigGet').resolves(0) + + const code = await configureMenu.runConfigureMenu() + + expect(code).to.equal(0) + expect(runGet.calledOnceWithExactly('payments.enabled')).to.equal(true) + }) + + it('routes start menu prompt values into start command', async () => { + sinon.stub(tuiPrompts, 'select').resolves('continue' as any) + sinon + .stub(tuiPrompts, 'confirm') + .onFirstCall() + .resolves(true as any) + .onSecondCall() + .resolves(false as any) + .onThirdCall() + .resolves(true as any) + .onCall(3) + .resolves(false as any) + .onCall(4) + .resolves(true as any) + sinon.stub(tuiPrompts, 'isCancel').returns(false) + + const runStart = sinon.stub(startCommand, 'runStart').resolves(0) + + const code = await startMenu.runStartMenu() + + expect(code).to.equal(0) + expect(runStart.calledOnce).to.equal(true) + expect(runStart.firstCall.args[0]).to.deep.equal({ + tor: true, + i2p: false, + debug: true, + port: undefined, + }) + }) + + it('returns to previous menu on start back selection', async () => { + sinon.stub(tuiPrompts, 'select').resolves('back' as any) + sinon.stub(tuiPrompts, 'isCancel').returns(false) + + const code = await startMenu.runStartMenu() + + expect(code).to.equal(0) + }) + + it('maps manage export format selection to export file format', async () => { + sinon + .stub(tuiPrompts, 'select') + .onFirstCall() + .resolves('export' as any) + .onSecondCall() + .resolves('json' as any) + sinon.stub(tuiPrompts, 'text').resolves('events.json' as any) + sinon.stub(tuiPrompts, 'confirm').resolves(true as any) + sinon.stub(tuiPrompts, 'isCancel').returns(false) + + const runExport = sinon.stub(exportCommand, 'runExport').resolves(0) + + const code = await manageMenu.runManageMenu() + + expect(code).to.equal(0) + expect(runExport.calledOnceWithExactly({ output: 'events.json', format: 'json' }, [])).to.equal(true) + }) + + it('maps manage import format selection to import file defaults', async () => { + sinon + .stub(tuiPrompts, 'select') + .onFirstCall() + .resolves('import' as any) + .onSecondCall() + .resolves('json' as any) + sinon + .stub(tuiPrompts, 'text') + .onFirstCall() + .resolves('events.json' as any) + .onSecondCall() + .resolves('500' as any) + sinon.stub(tuiPrompts, 'confirm').resolves(true as any) + sinon.stub(tuiPrompts, 'isCancel').returns(false) + + const runImport = sinon.stub(importCommand, 'runImport').resolves(0) + + const code = await manageMenu.runManageMenu() + + expect(code).to.equal(0) + expect(runImport.calledOnceWithExactly({ file: 'events.json', batchSize: 500 }, [])).to.equal(true) + }) + + it('returns to previous menu on manage back selection', async () => { + sinon.stub(tuiPrompts, 'select').resolves('back' as any) + sinon.stub(tuiPrompts, 'isCancel').returns(false) + + const code = await manageMenu.runManageMenu() + + expect(code).to.equal(0) + }) + + it('returns to previous menu on dev back selection', async () => { + sinon.stub(tuiPrompts, 'select').resolves('back' as any) + sinon.stub(tuiPrompts, 'isCancel').returns(false) + + const code = await devMenu.runDevMenu() + + expect(code).to.equal(0) + }) +}) diff --git a/test/unit/cli/update.spec.ts b/test/unit/cli/update.spec.ts new file mode 100644 index 00000000..6c5d9308 --- /dev/null +++ b/test/unit/cli/update.spec.ts @@ -0,0 +1,59 @@ +import { expect } from 'chai' +import sinon from 'sinon' + +import { runUpdate } from '../../../src/cli/commands/update' +import * as processUtils from '../../../src/cli/utils/process' +import * as startCommand from '../../../src/cli/commands/start' +import * as stopCommand from '../../../src/cli/commands/stop' + +describe('runUpdate', () => { + afterEach(() => { + sinon.restore() + }) + + it('attempts to restore stash when pull fails and stash was created', async () => { + sinon.stub(stopCommand, 'runStop').resolves(0) + const runStartStub = sinon.stub(startCommand, 'runStart').resolves(0) + sinon.stub(processUtils, 'runCommandWithOutput').resolves({ + code: 0, + stdout: 'Saved working directory and index state WIP on main: abc123', + stderr: '', + }) + const runCommandStub = sinon + .stub(processUtils, 'runCommand') + .onFirstCall() + .resolves(1) + .onSecondCall() + .resolves(0) + + const code = await runUpdate([]) + + expect(code).to.equal(1) + expect(runCommandStub.firstCall.args).to.deep.equal(['git', ['pull']]) + expect(runCommandStub.secondCall.args).to.deep.equal(['git', ['stash', 'pop']]) + expect(runStartStub.called).to.equal(false) + }) + + it('returns restore failure code when pull and stash restore both fail', async () => { + sinon.stub(stopCommand, 'runStop').resolves(0) + const runStartStub = sinon.stub(startCommand, 'runStart').resolves(0) + sinon.stub(processUtils, 'runCommandWithOutput').resolves({ + code: 0, + stdout: 'Saved working directory and index state WIP on main: abc123', + stderr: '', + }) + const runCommandStub = sinon + .stub(processUtils, 'runCommand') + .onFirstCall() + .resolves(1) + .onSecondCall() + .resolves(2) + + const code = await runUpdate([]) + + expect(code).to.equal(2) + expect(runCommandStub.firstCall.args).to.deep.equal(['git', ['pull']]) + expect(runCommandStub.secondCall.args).to.deep.equal(['git', ['stash', 'pop']]) + expect(runStartStub.called).to.equal(false) + }) +}) diff --git a/test/unit/import-events.spec.ts b/test/unit/import-events.spec.ts index 00cf5779..1c1756a3 100644 --- a/test/unit/import-events.spec.ts +++ b/test/unit/import-events.spec.ts @@ -44,7 +44,7 @@ describe('parseCliArgs (import-events)', () => { }) it('throws when input file path is missing', () => { - expect(() => parseCliArgs([])).to.throw('Missing input file path') + expect(() => parseCliArgs([])).to.throw('Missing path to .jsonl or .json file') }) it('throws on unknown options including short options', () => { diff --git a/test/unit/seeds/0000-events.spec.ts b/test/unit/seeds/0000-events.spec.ts new file mode 100644 index 00000000..f9f77c8c --- /dev/null +++ b/test/unit/seeds/0000-events.spec.ts @@ -0,0 +1,92 @@ +import { expect } from 'chai' + +import { isEventIdValid, isEventSignatureValid } from '../../../src/utils/event' + +const seedScript = require('../../../seeds/0000-events') +const sourceEvents = require('../../../seeds/events.json') + +type EventRow = { + event_id: Buffer + event_pubkey: Buffer + event_kind: number + event_created_at: number + event_content: string + event_tags: string + event_signature: Buffer +} + +const runSeed = async (requestedCount?: number): Promise => { + if (typeof requestedCount === 'number') { + process.env.NOSTREAM_SEED_COUNT = String(requestedCount) + } else { + delete process.env.NOSTREAM_SEED_COUNT + } + + let rows: EventRow[] = [] + + const knex = ((table: string) => { + if (table !== 'events') { + throw new Error(`Unexpected table: ${table}`) + } + + return { + del: async () => undefined, + } + }) as any + + knex.batchInsert = async (_table: string, insertedRows: EventRow[]) => { + rows = insertedRows + } + + await seedScript.seed(knex) + + return rows +} + +describe('seeds/0000-events', () => { + const originalSeedCount = process.env.NOSTREAM_SEED_COUNT + + afterEach(() => { + if (originalSeedCount === undefined) { + delete process.env.NOSTREAM_SEED_COUNT + return + } + + process.env.NOSTREAM_SEED_COUNT = originalSeedCount + }) + + it('keeps default seed behavior when NOSTREAM_SEED_COUNT is not set', async () => { + const rows = await runSeed() + + expect(rows.length).to.equal(sourceEvents.length) + expect(rows[0].event_id.toString('hex')).to.equal(sourceEvents[0].id) + expect(rows[0].event_pubkey.toString('hex')).to.equal(sourceEvents[0].pubkey) + expect(rows[0].event_signature.toString('hex')).to.equal(sourceEvents[0].sig) + }) + + it('generates deterministic valid events when NOSTREAM_SEED_COUNT is set', async () => { + const firstRunRows = await runSeed(5) + const secondRunRows = await runSeed(5) + + expect(firstRunRows.length).to.equal(5) + expect(secondRunRows.length).to.equal(5) + expect(firstRunRows.map((row) => row.event_id.toString('hex'))).to.deep.equal( + secondRunRows.map((row) => row.event_id.toString('hex')), + ) + + for (const row of firstRunRows) { + const event = { + id: row.event_id.toString('hex'), + pubkey: row.event_pubkey.toString('hex'), + created_at: row.event_created_at, + kind: row.event_kind, + tags: JSON.parse(row.event_tags), + content: row.event_content, + sig: row.event_signature.toString('hex'), + } + + expect(await isEventIdValid(event)).to.equal(true) + expect(await isEventSignatureValid(event)).to.equal(true) + } + }) +}) diff --git a/test/unit/services/event-import-service.spec.ts b/test/unit/services/event-import-service.spec.ts index 359f365e..8aa91870 100644 --- a/test/unit/services/event-import-service.spec.ts +++ b/test/unit/services/event-import-service.spec.ts @@ -31,6 +31,18 @@ describe('EventImportService', () => { return filePath } + const createJsonArrayFile = (value: unknown): string => { + const tmpDir = fs.mkdtempSync(join(os.tmpdir(), 'nostream-import-array-')) + tmpDirs.push(tmpDir) + + const filePath = join(tmpDir, 'events.json') + fs.writeFileSync(filePath, JSON.stringify(value), { + encoding: 'utf-8', + }) + + return filePath + } + afterEach(() => { for (const tmpDir of tmpDirs.splice(0)) { fs.rmSync(tmpDir, { @@ -107,6 +119,44 @@ describe('EventImportService', () => { }) }) + it('imports valid events from JSON array in batches and tracks skipped duplicates', async () => { + const [event] = getEvents() + const filePath = createJsonArrayFile([event, event, event]) + + const batchCalls: Event[][] = [] + const persistBatch = async (events: Event[]): Promise => { + batchCalls.push([...events]) + + if (batchCalls.length === 1) { + return 2 + } + + return 0 + } + + const progressUpdates: EventImportStats[] = [] + + const importer = new EventImportService(persistBatch) + + const stats = await importer.importFromJsonArray(filePath, { + batchSize: 2, + onProgress: (progress) => { + progressUpdates.push(progress) + }, + }) + + expect(stats).to.deep.equal({ + errors: 0, + inserted: 2, + processed: 3, + skipped: 1, + }) + + expect(batchCalls.length).to.equal(2) + expect(progressUpdates.length).to.equal(2) + expect(progressUpdates[progressUpdates.length - 1]).to.deep.equal(stats) + }) + it('counts malformed and invalid events as errors and keeps importing', async () => { const [event] = getEvents() @@ -157,6 +207,50 @@ describe('EventImportService', () => { expect(lineErrors.length).to.equal(3) }) + it('counts malformed and invalid events in JSON array as errors and keeps importing', async () => { + const [event] = getEvents() + + const invalidIdEvent: Event = { + ...event, + content: `${event.content} changed`, + } + + const invalidSignatureEvent: Event = { + ...event, + sig: 'f'.repeat(128), + } + + const filePath = createJsonArrayFile([event, 'not-an-event', invalidIdEvent, invalidSignatureEvent]) + + const batchCalls: Event[][] = [] + const persistBatch = async (events: Event[]): Promise => { + batchCalls.push([...events]) + return 1 + } + + const lineErrors: EventImportLineError[] = [] + + const importer = new EventImportService(persistBatch) + + const stats = await importer.importFromJsonArray(filePath, { + batchSize: 10, + onLineError: (lineError) => { + lineErrors.push(lineError) + }, + }) + + expect(stats).to.deep.equal({ + errors: 3, + inserted: 1, + processed: 4, + skipped: 0, + }) + expect(batchCalls.length).to.equal(1) + expect(batchCalls[0].length).to.equal(1) + expect(lineErrors.length).to.equal(3) + expect(lineErrors.map((item) => item.lineNumber)).to.deep.equal([2, 3, 4]) + }) + it('rejects when persistence returns an invalid insert count', async () => { const [event] = getEvents() const filePath = createJsonlFile([JSON.stringify(event)]) @@ -173,6 +267,22 @@ describe('EventImportService', () => { } }) + it('rejects JSON array import when persistence returns an invalid insert count', async () => { + const [event] = getEvents() + const filePath = createJsonArrayFile([event]) + + const persistBatch = async (): Promise => 2 + + const importer = new EventImportService(persistBatch) + + try { + await importer.importFromJsonArray(filePath) + expect.fail('Expected import to reject when persistence returns invalid insert count') + } catch (error) { + expect((error as Error).message).to.include('Invalid insert count') + } + }) + it('propagates persistence failures as import failures', async () => { const [event] = getEvents() const filePath = createJsonlFile([JSON.stringify(event)]) @@ -198,6 +308,60 @@ describe('EventImportService', () => { } }) + it('propagates persistence failures as JSON array import failures', async () => { + const [event] = getEvents() + const filePath = createJsonArrayFile([event]) + + const persistBatch = async (): Promise => { + throw new Error('database unavailable') + } + + const lineErrors: EventImportLineError[] = [] + + const importer = new EventImportService(persistBatch) + + try { + await importer.importFromJsonArray(filePath, { + onLineError: (lineError) => { + lineErrors.push(lineError) + }, + }) + expect.fail('Expected import to reject when persistence fails') + } catch (error) { + expect((error as Error).message).to.equal('database unavailable') + expect(lineErrors.length).to.equal(0) + } + }) + + it('fails fast for malformed top-level JSON in JSON array mode', async () => { + const tmpDir = fs.mkdtempSync(join(os.tmpdir(), 'nostream-import-array-malformed-')) + tmpDirs.push(tmpDir) + const filePath = join(tmpDir, 'events.json') + fs.writeFileSync(filePath, '{"broken":', 'utf-8') + + const importer = new EventImportService(async () => 0) + + try { + await importer.importFromJsonArray(filePath) + expect.fail('Expected malformed top-level JSON to fail') + } catch (error) { + expect((error as Error).message).to.include('Invalid JSON array input:') + } + }) + + it('fails fast for non-array top-level JSON in JSON array mode', async () => { + const filePath = createJsonArrayFile({ foo: 'bar' }) + + const importer = new EventImportService(async () => 0) + + try { + await importer.importFromJsonArray(filePath) + expect.fail('Expected non-array top-level JSON to fail') + } catch (error) { + expect((error as Error).message).to.include('Invalid JSON array input:') + } + }) + it('normalizes parameterized replaceable deduplication to first d tag value', async () => { const parameterizedEvent: Event = { id: 'a'.repeat(64),