diff --git a/.claude-plugin/marketplace.json b/.claude-plugin/marketplace.json
index 427fc2e..4fd6dc3 100644
--- a/.claude-plugin/marketplace.json
+++ b/.claude-plugin/marketplace.json
@@ -6,13 +6,13 @@
},
"metadata": {
"description": "Cloud-backed persistent shared memory for AI agents powered by Deeplake",
- "version": "0.6.38"
+ "version": "0.6.37"
},
"plugins": [
{
"name": "hivemind",
"description": "Persistent shared memory powered by Deeplake — captures all session activity and provides cross-session, cross-agent memory search",
- "version": "0.6.38",
+ "version": "0.6.37",
"source": "./claude-code",
"homepage": "https://github.com/activeloopai/hivemind"
}
diff --git a/.claude-plugin/plugin.json b/.claude-plugin/plugin.json
index e178805..dddddbd 100644
--- a/.claude-plugin/plugin.json
+++ b/.claude-plugin/plugin.json
@@ -1,7 +1,7 @@
{
"name": "hivemind",
"description": "Cloud-backed persistent memory powered by Deeplake — read, write, and share memory across Claude Code sessions and agents",
- "version": "0.6.38",
+ "version": "0.6.37",
"author": {
"name": "Activeloop",
"url": "https://deeplake.ai"
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index ece166b..665f2a9 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -34,21 +34,6 @@ jobs:
- name: Typecheck
run: npm run typecheck
- - name: Duplication check (jscpd)
- # Threshold 7% is the current baseline (see .jscpd.json). The job
- # fails if a future change pushes duplication above it, so the
- # number is a regression guard — reviewers can see the exact
- # clones in the markdown report uploaded below.
- run: npm run dup
-
- - name: Upload jscpd report
- if: always()
- uses: actions/upload-artifact@v4
- with:
- name: jscpd-report
- path: jscpd-report/
- if-no-files-found: ignore
-
- name: Run tests with coverage
# Per-file 80% thresholds for PR #60 files are declared in
# vitest.config.ts under `coverage.thresholds`. Vitest exits non-zero
diff --git a/.gitignore b/.gitignore
index b952b68..4f538ba 100644
--- a/.gitignore
+++ b/.gitignore
@@ -6,7 +6,6 @@ tmp/
.env
.env.*
coverage/
-jscpd-report/
bench/
.claude/
CLAUDE.md
diff --git a/.jscpd.json b/.jscpd.json
deleted file mode 100644
index 842983c..0000000
--- a/.jscpd.json
+++ /dev/null
@@ -1,21 +0,0 @@
-{
- "threshold": 7,
- "reporters": ["console", "markdown"],
- "output": "./jscpd-report",
- "ignore": [
- "**/node_modules/**",
- "**/dist/**",
- "**/bundle/**",
- "**/coverage/**",
- "**/*.test.ts",
- "**/tests/**",
- "**/fixtures/**",
- "**/claude-code/.claude-plugin/**",
- "**/codex/.codex-plugin/**"
- ],
- "absolute": false,
- "gitignore": true,
- "format": ["typescript"],
- "minLines": 10,
- "minTokens": 60
-}
diff --git a/claude-code/.claude-plugin/plugin.json b/claude-code/.claude-plugin/plugin.json
index e178805..dddddbd 100644
--- a/claude-code/.claude-plugin/plugin.json
+++ b/claude-code/.claude-plugin/plugin.json
@@ -1,7 +1,7 @@
{
"name": "hivemind",
"description": "Cloud-backed persistent memory powered by Deeplake — read, write, and share memory across Claude Code sessions and agents",
- "version": "0.6.38",
+ "version": "0.6.37",
"author": {
"name": "Activeloop",
"url": "https://deeplake.ai"
diff --git a/claude-code/bundle/capture.js b/claude-code/bundle/capture.js
index 3b5a215..82a4aac 100755
--- a/claude-code/bundle/capture.js
+++ b/claude-code/bundle/capture.js
@@ -2,13 +2,13 @@
// dist/src/utils/stdin.js
function readStdin() {
- return new Promise((resolve, reject) => {
+ return new Promise((resolve2, reject) => {
let data = "";
process.stdin.setEncoding("utf-8");
process.stdin.on("data", (chunk) => data += chunk);
process.stdin.on("end", () => {
try {
- resolve(JSON.parse(data));
+ resolve2(JSON.parse(data));
} catch (err) {
reject(new Error(`Failed to parse hook input: ${err}`));
}
@@ -55,6 +55,9 @@ function loadConfig() {
// dist/src/deeplake-api.js
import { randomUUID } from "node:crypto";
+import { existsSync as existsSync2, mkdirSync, readFileSync as readFileSync2, writeFileSync } from "node:fs";
+import { join as join3 } from "node:path";
+import { tmpdir } from "node:os";
// dist/src/utils/debug.js
import { appendFileSync } from "node:fs";
@@ -76,6 +79,12 @@ function log(tag, msg) {
function sqlStr(value) {
return value.replace(/\\/g, "\\\\").replace(/'/g, "''").replace(/\0/g, "").replace(/[\x01-\x08\x0b\x0c\x0e-\x1f\x7f]/g, "");
}
+function sqlIdent(name) {
+ if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(name)) {
+ throw new Error(`Invalid SQL identifier: ${JSON.stringify(name)}`);
+ }
+ return name;
+}
// dist/src/deeplake-api.js
var log2 = (msg) => log("sdk", msg);
@@ -97,8 +106,29 @@ var RETRYABLE_CODES = /* @__PURE__ */ new Set([429, 500, 502, 503, 504]);
var MAX_RETRIES = 3;
var BASE_DELAY_MS = 500;
var MAX_CONCURRENCY = 5;
+var QUERY_TIMEOUT_MS = Number(process.env["HIVEMIND_QUERY_TIMEOUT_MS"] ?? process.env["DEEPLAKE_QUERY_TIMEOUT_MS"] ?? 1e4);
+var INDEX_MARKER_TTL_MS = Number(process.env["HIVEMIND_INDEX_MARKER_TTL_MS"] ?? 6 * 60 * 6e4);
function sleep(ms) {
- return new Promise((resolve) => setTimeout(resolve, ms));
+ return new Promise((resolve2) => setTimeout(resolve2, ms));
+}
+function isTimeoutError(error) {
+ const name = error instanceof Error ? error.name.toLowerCase() : "";
+ const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase();
+ return name.includes("timeout") || name === "aborterror" || message.includes("timeout") || message.includes("timed out");
+}
+function isDuplicateIndexError(error) {
+ const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase();
+ return message.includes("duplicate key value violates unique constraint") || message.includes("pg_class_relname_nsp_index") || message.includes("already exists");
+}
+function isSessionInsertQuery(sql) {
+ return /^\s*insert\s+into\s+"[^"]+"\s*\(\s*id\s*,\s*path\s*,\s*filename\s*,\s*message\s*,/i.test(sql);
+}
+function isTransientHtml403(text) {
+ const body = text.toLowerCase();
+ return body.includes(" this.waiting.push(resolve));
+ await new Promise((resolve2) => this.waiting.push(resolve2));
}
release() {
this.active--;
@@ -131,6 +161,7 @@ var DeeplakeApi = class {
tableName;
_pendingRows = [];
_sem = new Semaphore(MAX_CONCURRENCY);
+ _tablesCache = null;
constructor(token, apiUrl, orgId, workspaceId, tableName) {
this.token = token;
this.apiUrl = apiUrl;
@@ -161,6 +192,7 @@ var DeeplakeApi = class {
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
let resp;
try {
+ const signal = AbortSignal.timeout(QUERY_TIMEOUT_MS);
resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables/query`, {
method: "POST",
headers: {
@@ -168,9 +200,14 @@ var DeeplakeApi = class {
"Content-Type": "application/json",
"X-Activeloop-Org-Id": this.orgId
},
+ signal,
body: JSON.stringify({ query: sql })
});
} catch (e) {
+ if (isTimeoutError(e)) {
+ lastError = new Error(`Query timeout after ${QUERY_TIMEOUT_MS}ms`);
+ throw lastError;
+ }
lastError = e instanceof Error ? e : new Error(String(e));
if (attempt < MAX_RETRIES) {
const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200;
@@ -187,7 +224,8 @@ var DeeplakeApi = class {
return raw.rows.map((row) => Object.fromEntries(raw.columns.map((col, i) => [col, row[i]])));
}
const text = await resp.text().catch(() => "");
- if (attempt < MAX_RETRIES && RETRYABLE_CODES.has(resp.status)) {
+ const retryable403 = isSessionInsertQuery(sql) && (resp.status === 401 || resp.status === 403 && (text.length === 0 || isTransientHtml403(text)));
+ if (attempt < MAX_RETRIES && (RETRYABLE_CODES.has(resp.status) || retryable403)) {
const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200;
log2(`query retry ${attempt + 1}/${MAX_RETRIES} (${resp.status}) in ${delay.toFixed(0)}ms`);
await sleep(delay);
@@ -252,8 +290,61 @@ var DeeplakeApi = class {
async createIndex(column) {
await this.query(`CREATE INDEX IF NOT EXISTS idx_${sqlStr(column)}_bm25 ON "${this.tableName}" USING deeplake_index ("${column}")`);
}
+ buildLookupIndexName(table, suffix) {
+ return `idx_${table}_${suffix}`.replace(/[^a-zA-Z0-9_]/g, "_");
+ }
+ getLookupIndexMarkerPath(table, suffix) {
+ const markerKey = [
+ this.workspaceId,
+ this.orgId,
+ table,
+ suffix
+ ].join("__").replace(/[^a-zA-Z0-9_.-]/g, "_");
+ return join3(getIndexMarkerDir(), `${markerKey}.json`);
+ }
+ hasFreshLookupIndexMarker(table, suffix) {
+ const markerPath = this.getLookupIndexMarkerPath(table, suffix);
+ if (!existsSync2(markerPath))
+ return false;
+ try {
+ const raw = JSON.parse(readFileSync2(markerPath, "utf-8"));
+ const updatedAt = raw.updatedAt ? new Date(raw.updatedAt).getTime() : NaN;
+ if (!Number.isFinite(updatedAt) || Date.now() - updatedAt > INDEX_MARKER_TTL_MS)
+ return false;
+ return true;
+ } catch {
+ return false;
+ }
+ }
+ markLookupIndexReady(table, suffix) {
+ mkdirSync(getIndexMarkerDir(), { recursive: true });
+ writeFileSync(this.getLookupIndexMarkerPath(table, suffix), JSON.stringify({ updatedAt: (/* @__PURE__ */ new Date()).toISOString() }), "utf-8");
+ }
+ async ensureLookupIndex(table, suffix, columnsSql) {
+ if (this.hasFreshLookupIndexMarker(table, suffix))
+ return;
+ const indexName = this.buildLookupIndexName(table, suffix);
+ try {
+ await this.query(`CREATE INDEX IF NOT EXISTS "${indexName}" ON "${table}" ${columnsSql}`);
+ this.markLookupIndexReady(table, suffix);
+ } catch (e) {
+ if (isDuplicateIndexError(e)) {
+ this.markLookupIndexReady(table, suffix);
+ return;
+ }
+ log2(`index "${indexName}" skipped: ${e.message}`);
+ }
+ }
/** List all tables in the workspace (with retry). */
- async listTables() {
+ async listTables(forceRefresh = false) {
+ if (!forceRefresh && this._tablesCache)
+ return [...this._tablesCache];
+ const { tables, cacheable } = await this._fetchTables();
+ if (cacheable)
+ this._tablesCache = [...tables];
+ return tables;
+ }
+ async _fetchTables() {
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
try {
const resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables`, {
@@ -264,22 +355,25 @@ var DeeplakeApi = class {
});
if (resp.ok) {
const data = await resp.json();
- return (data.tables ?? []).map((t) => t.table_name);
+ return {
+ tables: (data.tables ?? []).map((t) => t.table_name),
+ cacheable: true
+ };
}
if (attempt < MAX_RETRIES && RETRYABLE_CODES.has(resp.status)) {
await sleep(BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200);
continue;
}
- return [];
+ return { tables: [], cacheable: false };
} catch {
if (attempt < MAX_RETRIES) {
await sleep(BASE_DELAY_MS * Math.pow(2, attempt));
continue;
}
- return [];
+ return { tables: [], cacheable: false };
}
}
- return [];
+ return { tables: [], cacheable: false };
}
/** Create the memory table if it doesn't already exist. Migrate columns on existing tables. */
async ensureTable(name) {
@@ -289,6 +383,8 @@ var DeeplakeApi = class {
log2(`table "${tbl}" not found, creating`);
await this.query(`CREATE TABLE IF NOT EXISTS "${tbl}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', summary TEXT NOT NULL DEFAULT '', author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'text/plain', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`);
log2(`table "${tbl}" created`);
+ if (!tables.includes(tbl))
+ this._tablesCache = [...tables, tbl];
}
}
/** Create the sessions table (uses JSONB for message since every row is a JSON event). */
@@ -298,48 +394,58 @@ var DeeplakeApi = class {
log2(`table "${name}" not found, creating`);
await this.query(`CREATE TABLE IF NOT EXISTS "${name}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', message JSONB, author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'application/json', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`);
log2(`table "${name}" created`);
+ if (!tables.includes(name))
+ this._tablesCache = [...tables, name];
}
+ await this.ensureLookupIndex(name, "path_creation_date", `("path", "creation_date")`);
}
};
-// dist/src/utils/session-path.js
-function buildSessionPath(config, sessionId) {
- const workspace = config.workspaceId ?? "default";
- return `/sessions/${config.userName}/${config.userName}_${config.orgName}_${workspace}_${sessionId}.jsonl`;
+// dist/src/utils/direct-run.js
+import { resolve } from "node:path";
+import { fileURLToPath } from "node:url";
+function isDirectRun(metaUrl) {
+ const entry = process.argv[1];
+ if (!entry)
+ return false;
+ try {
+ return resolve(fileURLToPath(metaUrl)) === resolve(entry);
+ } catch {
+ return false;
+ }
}
// dist/src/hooks/summary-state.js
-import { readFileSync as readFileSync2, writeFileSync, writeSync, mkdirSync, renameSync, existsSync as existsSync2, unlinkSync, openSync, closeSync } from "node:fs";
+import { readFileSync as readFileSync3, writeFileSync as writeFileSync2, writeSync, mkdirSync as mkdirSync2, renameSync, existsSync as existsSync3, unlinkSync, openSync, closeSync } from "node:fs";
import { homedir as homedir3 } from "node:os";
-import { join as join3 } from "node:path";
-var dlog = (msg) => log("summary-state", msg);
-var STATE_DIR = join3(homedir3(), ".claude", "hooks", "summary-state");
+import { join as join4 } from "node:path";
+var STATE_DIR = join4(homedir3(), ".claude", "hooks", "summary-state");
var YIELD_BUF = new Int32Array(new SharedArrayBuffer(4));
function statePath(sessionId) {
- return join3(STATE_DIR, `${sessionId}.json`);
+ return join4(STATE_DIR, `${sessionId}.json`);
}
function lockPath(sessionId) {
- return join3(STATE_DIR, `${sessionId}.lock`);
+ return join4(STATE_DIR, `${sessionId}.lock`);
}
function readState(sessionId) {
const p = statePath(sessionId);
- if (!existsSync2(p))
+ if (!existsSync3(p))
return null;
try {
- return JSON.parse(readFileSync2(p, "utf-8"));
+ return JSON.parse(readFileSync3(p, "utf-8"));
} catch {
return null;
}
}
function writeState(sessionId, state) {
- mkdirSync(STATE_DIR, { recursive: true });
+ mkdirSync2(STATE_DIR, { recursive: true });
const p = statePath(sessionId);
const tmp = `${p}.${process.pid}.${Date.now()}.tmp`;
- writeFileSync(tmp, JSON.stringify(state));
+ writeFileSync2(tmp, JSON.stringify(state));
renameSync(tmp, p);
}
function withRmwLock(sessionId, fn) {
- mkdirSync(STATE_DIR, { recursive: true });
+ mkdirSync2(STATE_DIR, { recursive: true });
const rmwLock = statePath(sessionId) + ".rmw";
const deadline = Date.now() + 2e3;
let fd = null;
@@ -350,11 +456,9 @@ function withRmwLock(sessionId, fn) {
if (e.code !== "EEXIST")
throw e;
if (Date.now() > deadline) {
- dlog(`rmw lock deadline exceeded for ${sessionId}, reclaiming stale lock`);
try {
unlinkSync(rmwLock);
- } catch (unlinkErr) {
- dlog(`stale rmw lock unlink failed for ${sessionId}: ${unlinkErr.message}`);
+ } catch {
}
continue;
}
@@ -367,8 +471,7 @@ function withRmwLock(sessionId, fn) {
closeSync(fd);
try {
unlinkSync(rmwLock);
- } catch (unlinkErr) {
- dlog(`rmw lock cleanup failed for ${sessionId}: ${unlinkErr.message}`);
+ } catch {
}
}
}
@@ -401,20 +504,18 @@ function shouldTrigger(state, cfg, now = Date.now()) {
return false;
}
function tryAcquireLock(sessionId, maxAgeMs = 10 * 60 * 1e3) {
- mkdirSync(STATE_DIR, { recursive: true });
+ mkdirSync2(STATE_DIR, { recursive: true });
const p = lockPath(sessionId);
- if (existsSync2(p)) {
+ if (existsSync3(p)) {
try {
- const ageMs = Date.now() - parseInt(readFileSync2(p, "utf-8"), 10);
+ const ageMs = Date.now() - parseInt(readFileSync3(p, "utf-8"), 10);
if (Number.isFinite(ageMs) && ageMs < maxAgeMs)
return false;
- } catch (readErr) {
- dlog(`lock file unreadable for ${sessionId}, treating as stale: ${readErr.message}`);
+ } catch {
}
try {
unlinkSync(p);
- } catch (unlinkErr) {
- dlog(`could not unlink stale lock for ${sessionId}: ${unlinkErr.message}`);
+ } catch {
return false;
}
}
@@ -432,45 +533,15 @@ function tryAcquireLock(sessionId, maxAgeMs = 10 * 60 * 1e3) {
throw e;
}
}
-function releaseLock(sessionId) {
- try {
- unlinkSync(lockPath(sessionId));
- } catch (e) {
- if (e?.code !== "ENOENT") {
- dlog(`releaseLock unlink failed for ${sessionId}: ${e.message}`);
- }
- }
-}
// dist/src/hooks/spawn-wiki-worker.js
import { spawn, execSync } from "node:child_process";
-import { fileURLToPath } from "node:url";
+import { fileURLToPath as fileURLToPath2 } from "node:url";
import { dirname, join as join5 } from "node:path";
-import { writeFileSync as writeFileSync2, mkdirSync as mkdirSync3 } from "node:fs";
-import { homedir as homedir4, tmpdir } from "node:os";
-
-// dist/src/utils/wiki-log.js
-import { mkdirSync as mkdirSync2, appendFileSync as appendFileSync2 } from "node:fs";
-import { join as join4 } from "node:path";
-function makeWikiLogger(hooksDir, filename = "deeplake-wiki.log") {
- const path = join4(hooksDir, filename);
- return {
- path,
- log(msg) {
- try {
- mkdirSync2(hooksDir, { recursive: true });
- appendFileSync2(path, `[${utcTimestamp()}] ${msg}
-`);
- } catch {
- }
- }
- };
-}
-
-// dist/src/hooks/spawn-wiki-worker.js
+import { writeFileSync as writeFileSync3, mkdirSync as mkdirSync3, appendFileSync as appendFileSync2 } from "node:fs";
+import { homedir as homedir4, tmpdir as tmpdir2 } from "node:os";
var HOME = homedir4();
-var wikiLogger = makeWikiLogger(join5(HOME, ".claude", "hooks"));
-var WIKI_LOG = wikiLogger.path;
+var WIKI_LOG = join5(HOME, ".claude", "hooks", "deeplake-wiki.log");
var WIKI_PROMPT_TEMPLATE = `You are building a personal wiki from a coding session. Your goal is to extract every piece of knowledge \u2014 entities, decisions, relationships, and facts \u2014 into a structured, searchable wiki entry. Think of this as building a knowledge graph, not writing a summary.
SESSION JSONL path: __JSONL__
@@ -523,7 +594,14 @@ IMPORTANT: Be exhaustive. Extract EVERY entity, decision, and fact. Future you w
PRIVACY: Never include absolute filesystem paths (e.g. /home/user/..., /Users/..., C:\\\\...) in the summary. Use only project-relative paths or the project name. The Source and Project fields above are already correct \u2014 do not change them.
LENGTH LIMIT: Keep the total summary under 4000 characters. Be dense and concise \u2014 prioritize facts over prose. If a session is short, the summary should be short too.`;
-var wikiLog = wikiLogger.log;
+function wikiLog(msg) {
+ try {
+ mkdirSync3(join5(HOME, ".claude", "hooks"), { recursive: true });
+ appendFileSync2(WIKI_LOG, `[${utcTimestamp()}] ${msg}
+`);
+ } catch {
+ }
+}
function findClaudeBin() {
try {
return execSync("which claude 2>/dev/null", { encoding: "utf-8" }).trim();
@@ -534,10 +612,10 @@ function findClaudeBin() {
function spawnWikiWorker(opts) {
const { config, sessionId, cwd, bundleDir, reason } = opts;
const projectName = cwd.split("/").pop() || "unknown";
- const tmpDir = join5(tmpdir(), `deeplake-wiki-${sessionId}-${Date.now()}`);
+ const tmpDir = join5(tmpdir2(), `deeplake-wiki-${sessionId}-${Date.now()}`);
mkdirSync3(tmpDir, { recursive: true });
const configFile = join5(tmpDir, "config.json");
- writeFileSync2(configFile, JSON.stringify({
+ writeFileSync3(configFile, JSON.stringify({
apiUrl: config.apiUrl,
token: config.token,
orgId: config.orgId,
@@ -562,24 +640,280 @@ function spawnWikiWorker(opts) {
wikiLog(`${reason}: spawned summary worker for ${sessionId}`);
}
function bundleDirFromImportMeta(importMetaUrl) {
- return dirname(fileURLToPath(importMetaUrl));
+ return dirname(fileURLToPath2(importMetaUrl));
}
-// dist/src/hooks/capture.js
-var log3 = (msg) => log("capture", msg);
-var CAPTURE = process.env.HIVEMIND_CAPTURE !== "false";
-async function main() {
- if (!CAPTURE)
+// dist/src/hooks/session-queue.js
+import { appendFileSync as appendFileSync3, closeSync as closeSync2, existsSync as existsSync4, mkdirSync as mkdirSync4, openSync as openSync2, readFileSync as readFileSync4, readdirSync, renameSync as renameSync2, rmSync, statSync, writeFileSync as writeFileSync4 } from "node:fs";
+import { dirname as dirname2, join as join6 } from "node:path";
+import { homedir as homedir5 } from "node:os";
+var DEFAULT_QUEUE_DIR = join6(homedir5(), ".deeplake", "queue");
+var DEFAULT_MAX_BATCH_ROWS = 50;
+var DEFAULT_STALE_INFLIGHT_MS = 6e4;
+var DEFAULT_AUTH_FAILURE_TTL_MS = 5 * 6e4;
+var BUSY_WAIT_STEP_MS = 100;
+var SessionWriteDisabledError = class extends Error {
+ constructor(message) {
+ super(message);
+ this.name = "SessionWriteDisabledError";
+ }
+};
+function buildSessionPath(config, sessionId) {
+ return `/sessions/${config.userName}/${config.userName}_${config.orgName}_${config.workspaceId}_${sessionId}.jsonl`;
+}
+function buildQueuedSessionRow(args) {
+ return {
+ id: crypto.randomUUID(),
+ path: args.sessionPath,
+ filename: args.sessionPath.split("/").pop() ?? "",
+ message: args.line,
+ author: args.userName,
+ sizeBytes: Buffer.byteLength(args.line, "utf-8"),
+ project: args.projectName,
+ description: args.description,
+ agent: args.agent,
+ creationDate: args.timestamp,
+ lastUpdateDate: args.timestamp
+ };
+}
+function appendQueuedSessionRow(row, queueDir = DEFAULT_QUEUE_DIR) {
+ mkdirSync4(queueDir, { recursive: true });
+ const sessionId = extractSessionId(row.path);
+ const queuePath = getQueuePath(queueDir, sessionId);
+ appendFileSync3(queuePath, `${JSON.stringify(row)}
+`);
+ return queuePath;
+}
+function buildSessionInsertSql(sessionsTable, rows) {
+ if (rows.length === 0)
+ throw new Error("buildSessionInsertSql: rows must not be empty");
+ const table = sqlIdent(sessionsTable);
+ const values = rows.map((row) => {
+ const jsonForSql = sqlStr(coerceJsonbPayload(row.message));
+ return `('${sqlStr(row.id)}', '${sqlStr(row.path)}', '${sqlStr(row.filename)}', '${jsonForSql}'::jsonb, '${sqlStr(row.author)}', ${row.sizeBytes}, '${sqlStr(row.project)}', '${sqlStr(row.description)}', '${sqlStr(row.agent)}', '${sqlStr(row.creationDate)}', '${sqlStr(row.lastUpdateDate)}')`;
+ }).join(", ");
+ return `INSERT INTO "${table}" (id, path, filename, message, author, size_bytes, project, description, agent, creation_date, last_update_date) VALUES ${values}`;
+}
+function coerceJsonbPayload(message) {
+ try {
+ return JSON.stringify(JSON.parse(message));
+ } catch {
+ return JSON.stringify({
+ type: "raw_message",
+ content: message
+ });
+ }
+}
+async function flushSessionQueue(api, opts) {
+ const queueDir = opts.queueDir ?? DEFAULT_QUEUE_DIR;
+ const maxBatchRows = opts.maxBatchRows ?? DEFAULT_MAX_BATCH_ROWS;
+ const staleInflightMs = opts.staleInflightMs ?? DEFAULT_STALE_INFLIGHT_MS;
+ const waitIfBusyMs = opts.waitIfBusyMs ?? 0;
+ const drainAll = opts.drainAll ?? false;
+ mkdirSync4(queueDir, { recursive: true });
+ const queuePath = getQueuePath(queueDir, opts.sessionId);
+ const inflightPath = getInflightPath(queueDir, opts.sessionId);
+ if (isSessionWriteDisabled(opts.sessionsTable, queueDir)) {
+ return existsSync4(queuePath) || existsSync4(inflightPath) ? { status: "disabled", rows: 0, batches: 0 } : { status: "empty", rows: 0, batches: 0 };
+ }
+ let totalRows = 0;
+ let totalBatches = 0;
+ let flushedAny = false;
+ while (true) {
+ if (opts.allowStaleInflight)
+ recoverStaleInflight(queuePath, inflightPath, staleInflightMs);
+ if (existsSync4(inflightPath)) {
+ if (waitIfBusyMs > 0) {
+ await waitForInflightToClear(inflightPath, waitIfBusyMs);
+ if (opts.allowStaleInflight)
+ recoverStaleInflight(queuePath, inflightPath, staleInflightMs);
+ }
+ if (existsSync4(inflightPath)) {
+ return flushedAny ? { status: "flushed", rows: totalRows, batches: totalBatches } : { status: "busy", rows: 0, batches: 0 };
+ }
+ }
+ if (!existsSync4(queuePath)) {
+ return flushedAny ? { status: "flushed", rows: totalRows, batches: totalBatches } : { status: "empty", rows: 0, batches: 0 };
+ }
+ try {
+ renameSync2(queuePath, inflightPath);
+ } catch (e) {
+ if (e?.code === "ENOENT") {
+ return flushedAny ? { status: "flushed", rows: totalRows, batches: totalBatches } : { status: "empty", rows: 0, batches: 0 };
+ }
+ throw e;
+ }
+ try {
+ const { rows, batches } = await flushInflightFile(api, opts.sessionsTable, inflightPath, maxBatchRows);
+ totalRows += rows;
+ totalBatches += batches;
+ flushedAny = flushedAny || rows > 0;
+ } catch (e) {
+ requeueInflight(queuePath, inflightPath);
+ if (e instanceof SessionWriteDisabledError) {
+ return { status: "disabled", rows: totalRows, batches: totalBatches };
+ }
+ throw e;
+ }
+ if (!drainAll) {
+ return { status: "flushed", rows: totalRows, batches: totalBatches };
+ }
+ }
+}
+function getQueuePath(queueDir, sessionId) {
+ return join6(queueDir, `${sessionId}.jsonl`);
+}
+function getInflightPath(queueDir, sessionId) {
+ return join6(queueDir, `${sessionId}.inflight`);
+}
+function extractSessionId(sessionPath) {
+ const filename = sessionPath.split("/").pop() ?? "";
+ return filename.replace(/\.jsonl$/, "").split("_").pop() ?? filename;
+}
+async function flushInflightFile(api, sessionsTable, inflightPath, maxBatchRows) {
+ const rows = readQueuedRows(inflightPath);
+ if (rows.length === 0) {
+ rmSync(inflightPath, { force: true });
+ return { rows: 0, batches: 0 };
+ }
+ let ensured = false;
+ let batches = 0;
+ const queueDir = dirname2(inflightPath);
+ for (let i = 0; i < rows.length; i += maxBatchRows) {
+ const chunk = rows.slice(i, i + maxBatchRows);
+ const sql = buildSessionInsertSql(sessionsTable, chunk);
+ try {
+ await api.query(sql);
+ } catch (e) {
+ if (isSessionWriteAuthError(e)) {
+ markSessionWriteDisabled(sessionsTable, errorMessage(e), queueDir);
+ throw new SessionWriteDisabledError(errorMessage(e));
+ }
+ if (!ensured && isEnsureSessionsTableRetryable(e)) {
+ try {
+ await api.ensureSessionsTable(sessionsTable);
+ } catch (ensureError) {
+ if (isSessionWriteAuthError(ensureError)) {
+ markSessionWriteDisabled(sessionsTable, errorMessage(ensureError), queueDir);
+ throw new SessionWriteDisabledError(errorMessage(ensureError));
+ }
+ throw ensureError;
+ }
+ ensured = true;
+ try {
+ await api.query(sql);
+ } catch (retryError) {
+ if (isSessionWriteAuthError(retryError)) {
+ markSessionWriteDisabled(sessionsTable, errorMessage(retryError), queueDir);
+ throw new SessionWriteDisabledError(errorMessage(retryError));
+ }
+ throw retryError;
+ }
+ } else {
+ throw e;
+ }
+ }
+ batches += 1;
+ }
+ clearSessionWriteDisabled(sessionsTable, queueDir);
+ rmSync(inflightPath, { force: true });
+ return { rows: rows.length, batches };
+}
+function readQueuedRows(path) {
+ const raw = readFileSync4(path, "utf-8");
+ return raw.split("\n").map((line) => line.trim()).filter(Boolean).map((line) => JSON.parse(line));
+}
+function requeueInflight(queuePath, inflightPath) {
+ if (!existsSync4(inflightPath))
return;
- const input = await readStdin();
- const config = loadConfig();
- if (!config) {
- log3("no config");
+ const inflight = readFileSync4(inflightPath, "utf-8");
+ appendFileSync3(queuePath, inflight);
+ rmSync(inflightPath, { force: true });
+}
+function recoverStaleInflight(queuePath, inflightPath, staleInflightMs) {
+ if (!existsSync4(inflightPath) || !isStale(inflightPath, staleInflightMs))
return;
+ requeueInflight(queuePath, inflightPath);
+}
+function isStale(path, staleInflightMs) {
+ return Date.now() - statSync(path).mtimeMs >= staleInflightMs;
+}
+function isEnsureSessionsTableRetryable(error) {
+ const message = errorMessage(error).toLowerCase();
+ return message.includes("does not exist") || message.includes("doesn't exist") || message.includes("relation") || message.includes("not found");
+}
+function isSessionWriteAuthError(error) {
+ const message = errorMessage(error).toLowerCase();
+ return message.includes("403") || message.includes("401") || message.includes("forbidden") || message.includes("unauthorized");
+}
+function markSessionWriteDisabled(sessionsTable, reason, queueDir = DEFAULT_QUEUE_DIR) {
+ mkdirSync4(queueDir, { recursive: true });
+ writeFileSync4(getSessionWriteDisabledPath(queueDir, sessionsTable), JSON.stringify({
+ disabledAt: (/* @__PURE__ */ new Date()).toISOString(),
+ reason,
+ sessionsTable
+ }));
+}
+function clearSessionWriteDisabled(sessionsTable, queueDir = DEFAULT_QUEUE_DIR) {
+ rmSync(getSessionWriteDisabledPath(queueDir, sessionsTable), { force: true });
+}
+function isSessionWriteDisabled(sessionsTable, queueDir = DEFAULT_QUEUE_DIR, ttlMs = DEFAULT_AUTH_FAILURE_TTL_MS) {
+ const path = getSessionWriteDisabledPath(queueDir, sessionsTable);
+ if (!existsSync4(path))
+ return false;
+ try {
+ const raw = readFileSync4(path, "utf-8");
+ const state = JSON.parse(raw);
+ const ageMs = Date.now() - new Date(state.disabledAt).getTime();
+ if (Number.isNaN(ageMs) || ageMs >= ttlMs) {
+ rmSync(path, { force: true });
+ return false;
+ }
+ return true;
+ } catch {
+ rmSync(path, { force: true });
+ return false;
+ }
+}
+function getSessionWriteDisabledPath(queueDir, sessionsTable) {
+ return join6(queueDir, `.${sessionsTable}.disabled.json`);
+}
+function errorMessage(error) {
+ return error instanceof Error ? error.message : String(error);
+}
+async function waitForInflightToClear(inflightPath, waitIfBusyMs) {
+ const startedAt = Date.now();
+ while (existsSync4(inflightPath) && Date.now() - startedAt < waitIfBusyMs) {
+ await sleep2(BUSY_WAIT_STEP_MS);
}
- const sessionsTable = config.sessionsTableName;
- const api = new DeeplakeApi(config.token, config.apiUrl, config.orgId, config.workspaceId, sessionsTable);
- const ts = (/* @__PURE__ */ new Date()).toISOString();
+}
+function sleep2(ms) {
+ return new Promise((resolve2) => setTimeout(resolve2, ms));
+}
+
+// dist/src/hooks/query-cache.js
+import { mkdirSync as mkdirSync5, readFileSync as readFileSync5, rmSync as rmSync2, writeFileSync as writeFileSync5 } from "node:fs";
+import { join as join7 } from "node:path";
+import { homedir as homedir6 } from "node:os";
+var log3 = (msg) => log("query-cache", msg);
+var DEFAULT_CACHE_ROOT = join7(homedir6(), ".deeplake", "query-cache");
+function getSessionQueryCacheDir(sessionId, deps = {}) {
+ const { cacheRoot = DEFAULT_CACHE_ROOT } = deps;
+ return join7(cacheRoot, sessionId);
+}
+function clearSessionQueryCache(sessionId, deps = {}) {
+ const { logFn = log3 } = deps;
+ try {
+ rmSync2(getSessionQueryCacheDir(sessionId, deps), { recursive: true, force: true });
+ } catch (e) {
+ logFn(`clear failed for session=${sessionId}: ${e.message}`);
+ }
+}
+
+// dist/src/hooks/capture.js
+var log4 = (msg) => log("capture", msg);
+var CAPTURE = (process.env.HIVEMIND_CAPTURE ?? process.env.DEEPLAKE_CAPTURE) !== "false";
+function buildCaptureEntry(input, timestamp) {
const meta = {
session_id: input.session_id,
transcript_path: input.transcript_path,
@@ -588,20 +922,18 @@ async function main() {
hook_event_name: input.hook_event_name,
agent_id: input.agent_id,
agent_type: input.agent_type,
- timestamp: ts
+ timestamp
};
- let entry;
if (input.prompt !== void 0) {
- log3(`user session=${input.session_id}`);
- entry = {
+ return {
id: crypto.randomUUID(),
...meta,
type: "user_message",
content: input.prompt
};
- } else if (input.tool_name !== void 0) {
- log3(`tool=${input.tool_name} session=${input.session_id}`);
- entry = {
+ }
+ if (input.tool_name !== void 0) {
+ return {
id: crypto.randomUUID(),
...meta,
type: "tool_call",
@@ -610,75 +942,103 @@ async function main() {
tool_input: JSON.stringify(input.tool_input),
tool_response: JSON.stringify(input.tool_response)
};
- } else if (input.last_assistant_message !== void 0) {
- log3(`assistant session=${input.session_id}`);
- entry = {
+ }
+ if (input.last_assistant_message !== void 0) {
+ return {
id: crypto.randomUUID(),
...meta,
type: "assistant_message",
content: input.last_assistant_message,
...input.agent_transcript_path ? { agent_transcript_path: input.agent_transcript_path } : {}
};
- } else {
- log3("unknown event, skipping");
- return;
}
- const sessionPath = buildSessionPath(config, input.session_id);
- const line = JSON.stringify(entry);
- log3(`writing to ${sessionPath}`);
- const projectName = (input.cwd ?? "").split("/").pop() || "unknown";
- const filename = sessionPath.split("/").pop() ?? "";
- const jsonForSql = line.replace(/'/g, "''");
- const insertSql = `INSERT INTO "${sessionsTable}" (id, path, filename, message, author, size_bytes, project, description, agent, creation_date, last_update_date) VALUES ('${crypto.randomUUID()}', '${sqlStr(sessionPath)}', '${sqlStr(filename)}', '${jsonForSql}'::jsonb, '${sqlStr(config.userName)}', ${Buffer.byteLength(line, "utf-8")}, '${sqlStr(projectName)}', '${sqlStr(input.hook_event_name ?? "")}', 'claude_code', '${ts}', '${ts}')`;
- try {
- await api.query(insertSql);
- } catch (e) {
- if (e.message?.includes("permission denied") || e.message?.includes("does not exist")) {
- log3("table missing, creating and retrying");
- await api.ensureSessionsTable(sessionsTable);
- await api.query(insertSql);
- } else {
- throw e;
- }
- }
- log3("capture ok \u2192 cloud");
- maybeTriggerPeriodicSummary(input.session_id, input.cwd ?? "", config);
+ return null;
}
-function maybeTriggerPeriodicSummary(sessionId, cwd, config) {
- if (process.env.HIVEMIND_WIKI_WORKER === "1")
+function maybeTriggerPeriodicSummary(sessionId, cwd, config, deps = {}) {
+ const { bundleDir = bundleDirFromImportMeta(import.meta.url), wikiWorker = process.env.HIVEMIND_WIKI_WORKER === "1", logFn = log4, bumpTotalCountFn = bumpTotalCount, loadTriggerConfigFn = loadTriggerConfig, shouldTriggerFn = shouldTrigger, tryAcquireLockFn = tryAcquireLock, wikiLogFn = wikiLog, spawnWikiWorkerFn = spawnWikiWorker } = deps;
+ if (wikiWorker)
return;
try {
- const state = bumpTotalCount(sessionId);
- const cfg = loadTriggerConfig();
- if (!shouldTrigger(state, cfg))
+ const state = bumpTotalCountFn(sessionId);
+ const cfg = loadTriggerConfigFn();
+ if (!shouldTriggerFn(state, cfg))
return;
- if (!tryAcquireLock(sessionId)) {
- log3(`periodic trigger suppressed (lock held) session=${sessionId}`);
+ if (!tryAcquireLockFn(sessionId)) {
+ logFn(`periodic trigger suppressed (lock held) session=${sessionId}`);
return;
}
- wikiLog(`Periodic: threshold hit (total=${state.totalCount}, since=${state.totalCount - state.lastSummaryCount}, N=${cfg.everyNMessages}, hours=${cfg.everyHours})`);
- try {
- spawnWikiWorker({
- config,
- sessionId,
- cwd,
- bundleDir: bundleDirFromImportMeta(import.meta.url),
- reason: "Periodic"
- });
- } catch (e) {
- log3(`periodic spawn failed: ${e.message}`);
- try {
- releaseLock(sessionId);
- } catch (releaseErr) {
- log3(`releaseLock after periodic spawn failure also failed: ${releaseErr.message}`);
- }
- throw e;
- }
+ wikiLogFn(`Periodic: threshold hit (total=${state.totalCount}, since=${state.totalCount - state.lastSummaryCount}, N=${cfg.everyNMessages}, hours=${cfg.everyHours})`);
+ spawnWikiWorkerFn({
+ config,
+ sessionId,
+ cwd,
+ bundleDir,
+ reason: "Periodic"
+ });
} catch (e) {
- log3(`periodic trigger error: ${e.message}`);
+ logFn(`periodic trigger error: ${e.message}`);
+ }
+}
+async function runCaptureHook(input, deps = {}) {
+ const { captureEnabled = CAPTURE, config = loadConfig(), now = () => (/* @__PURE__ */ new Date()).toISOString(), createApi = (activeConfig) => new DeeplakeApi(activeConfig.token, activeConfig.apiUrl, activeConfig.orgId, activeConfig.workspaceId, activeConfig.sessionsTableName), appendQueuedSessionRowFn = appendQueuedSessionRow, buildQueuedSessionRowFn = buildQueuedSessionRow, flushSessionQueueFn = flushSessionQueue, clearSessionQueryCacheFn = clearSessionQueryCache, maybeTriggerPeriodicSummaryFn = maybeTriggerPeriodicSummary, logFn = log4 } = deps;
+ if (!captureEnabled)
+ return { status: "disabled" };
+ if (!config) {
+ logFn("no config");
+ return { status: "no_config" };
+ }
+ const ts = now();
+ const entry = buildCaptureEntry(input, ts);
+ if (!entry) {
+ logFn("unknown event, skipping");
+ return { status: "ignored" };
+ }
+ if (input.prompt !== void 0)
+ logFn(`user session=${input.session_id}`);
+ else if (input.tool_name !== void 0)
+ logFn(`tool=${input.tool_name} session=${input.session_id}`);
+ else
+ logFn(`assistant session=${input.session_id}`);
+ if (input.hook_event_name === "UserPromptSubmit") {
+ clearSessionQueryCacheFn(input.session_id);
+ }
+ const sessionPath = buildSessionPath(config, input.session_id);
+ const line = JSON.stringify(entry);
+ const projectName = (input.cwd ?? "").split("/").pop() || "unknown";
+ appendQueuedSessionRowFn(buildQueuedSessionRowFn({
+ sessionPath,
+ line,
+ userName: config.userName,
+ projectName,
+ description: input.hook_event_name ?? "",
+ agent: "claude_code",
+ timestamp: ts
+ }));
+ logFn(`queued ${input.hook_event_name ?? "event"} for ${sessionPath}`);
+ maybeTriggerPeriodicSummaryFn(input.session_id, input.cwd ?? "", config);
+ if (input.hook_event_name === "Stop" || input.hook_event_name === "SubagentStop") {
+ const result = await flushSessionQueueFn(createApi(config), {
+ sessionId: input.session_id,
+ sessionsTable: config.sessionsTableName,
+ drainAll: true
+ });
+ logFn(`flush ${result.status}: rows=${result.rows} batches=${result.batches}`);
+ return { status: "queued", entry, flushStatus: result.status };
}
+ return { status: "queued", entry };
}
-main().catch((e) => {
- log3(`fatal: ${e.message}`);
- process.exit(0);
-});
+async function main() {
+ const input = await readStdin();
+ await runCaptureHook(input);
+}
+if (isDirectRun(import.meta.url)) {
+ main().catch((e) => {
+ log4(`fatal: ${e.message}`);
+ process.exit(0);
+ });
+}
+export {
+ buildCaptureEntry,
+ maybeTriggerPeriodicSummary,
+ runCaptureHook
+};
diff --git a/claude-code/bundle/commands/auth-login.js b/claude-code/bundle/commands/auth-login.js
index 6d4cb13..ff5e179 100755
--- a/claude-code/bundle/commands/auth-login.js
+++ b/claude-code/bundle/commands/auth-login.js
@@ -239,6 +239,9 @@ function loadConfig() {
// dist/src/deeplake-api.js
import { randomUUID } from "node:crypto";
+import { existsSync as existsSync3, mkdirSync as mkdirSync2, readFileSync as readFileSync3, writeFileSync as writeFileSync2 } from "node:fs";
+import { join as join4 } from "node:path";
+import { tmpdir } from "node:os";
// dist/src/utils/debug.js
import { appendFileSync } from "node:fs";
@@ -278,9 +281,30 @@ var RETRYABLE_CODES = /* @__PURE__ */ new Set([429, 500, 502, 503, 504]);
var MAX_RETRIES = 3;
var BASE_DELAY_MS = 500;
var MAX_CONCURRENCY = 5;
+var QUERY_TIMEOUT_MS = Number(process.env["HIVEMIND_QUERY_TIMEOUT_MS"] ?? process.env["DEEPLAKE_QUERY_TIMEOUT_MS"] ?? 1e4);
+var INDEX_MARKER_TTL_MS = Number(process.env["HIVEMIND_INDEX_MARKER_TTL_MS"] ?? 6 * 60 * 6e4);
function sleep(ms) {
return new Promise((resolve) => setTimeout(resolve, ms));
}
+function isTimeoutError(error) {
+ const name = error instanceof Error ? error.name.toLowerCase() : "";
+ const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase();
+ return name.includes("timeout") || name === "aborterror" || message.includes("timeout") || message.includes("timed out");
+}
+function isDuplicateIndexError(error) {
+ const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase();
+ return message.includes("duplicate key value violates unique constraint") || message.includes("pg_class_relname_nsp_index") || message.includes("already exists");
+}
+function isSessionInsertQuery(sql) {
+ return /^\s*insert\s+into\s+"[^"]+"\s*\(\s*id\s*,\s*path\s*,\s*filename\s*,\s*message\s*,/i.test(sql);
+}
+function isTransientHtml403(text) {
+ const body = text.toLowerCase();
+ return body.includes(" Object.fromEntries(raw.columns.map((col, i) => [col, row[i]])));
}
const text = await resp.text().catch(() => "");
- if (attempt < MAX_RETRIES && RETRYABLE_CODES.has(resp.status)) {
+ const retryable403 = isSessionInsertQuery(sql) && (resp.status === 401 || resp.status === 403 && (text.length === 0 || isTransientHtml403(text)));
+ if (attempt < MAX_RETRIES && (RETRYABLE_CODES.has(resp.status) || retryable403)) {
const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200;
log2(`query retry ${attempt + 1}/${MAX_RETRIES} (${resp.status}) in ${delay.toFixed(0)}ms`);
await sleep(delay);
@@ -433,8 +465,61 @@ var DeeplakeApi = class {
async createIndex(column) {
await this.query(`CREATE INDEX IF NOT EXISTS idx_${sqlStr(column)}_bm25 ON "${this.tableName}" USING deeplake_index ("${column}")`);
}
+ buildLookupIndexName(table, suffix) {
+ return `idx_${table}_${suffix}`.replace(/[^a-zA-Z0-9_]/g, "_");
+ }
+ getLookupIndexMarkerPath(table, suffix) {
+ const markerKey = [
+ this.workspaceId,
+ this.orgId,
+ table,
+ suffix
+ ].join("__").replace(/[^a-zA-Z0-9_.-]/g, "_");
+ return join4(getIndexMarkerDir(), `${markerKey}.json`);
+ }
+ hasFreshLookupIndexMarker(table, suffix) {
+ const markerPath = this.getLookupIndexMarkerPath(table, suffix);
+ if (!existsSync3(markerPath))
+ return false;
+ try {
+ const raw = JSON.parse(readFileSync3(markerPath, "utf-8"));
+ const updatedAt = raw.updatedAt ? new Date(raw.updatedAt).getTime() : NaN;
+ if (!Number.isFinite(updatedAt) || Date.now() - updatedAt > INDEX_MARKER_TTL_MS)
+ return false;
+ return true;
+ } catch {
+ return false;
+ }
+ }
+ markLookupIndexReady(table, suffix) {
+ mkdirSync2(getIndexMarkerDir(), { recursive: true });
+ writeFileSync2(this.getLookupIndexMarkerPath(table, suffix), JSON.stringify({ updatedAt: (/* @__PURE__ */ new Date()).toISOString() }), "utf-8");
+ }
+ async ensureLookupIndex(table, suffix, columnsSql) {
+ if (this.hasFreshLookupIndexMarker(table, suffix))
+ return;
+ const indexName = this.buildLookupIndexName(table, suffix);
+ try {
+ await this.query(`CREATE INDEX IF NOT EXISTS "${indexName}" ON "${table}" ${columnsSql}`);
+ this.markLookupIndexReady(table, suffix);
+ } catch (e) {
+ if (isDuplicateIndexError(e)) {
+ this.markLookupIndexReady(table, suffix);
+ return;
+ }
+ log2(`index "${indexName}" skipped: ${e.message}`);
+ }
+ }
/** List all tables in the workspace (with retry). */
- async listTables() {
+ async listTables(forceRefresh = false) {
+ if (!forceRefresh && this._tablesCache)
+ return [...this._tablesCache];
+ const { tables, cacheable } = await this._fetchTables();
+ if (cacheable)
+ this._tablesCache = [...tables];
+ return tables;
+ }
+ async _fetchTables() {
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
try {
const resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables`, {
@@ -445,22 +530,25 @@ var DeeplakeApi = class {
});
if (resp.ok) {
const data = await resp.json();
- return (data.tables ?? []).map((t) => t.table_name);
+ return {
+ tables: (data.tables ?? []).map((t) => t.table_name),
+ cacheable: true
+ };
}
if (attempt < MAX_RETRIES && RETRYABLE_CODES.has(resp.status)) {
await sleep(BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200);
continue;
}
- return [];
+ return { tables: [], cacheable: false };
} catch {
if (attempt < MAX_RETRIES) {
await sleep(BASE_DELAY_MS * Math.pow(2, attempt));
continue;
}
- return [];
+ return { tables: [], cacheable: false };
}
}
- return [];
+ return { tables: [], cacheable: false };
}
/** Create the memory table if it doesn't already exist. Migrate columns on existing tables. */
async ensureTable(name) {
@@ -470,6 +558,8 @@ var DeeplakeApi = class {
log2(`table "${tbl}" not found, creating`);
await this.query(`CREATE TABLE IF NOT EXISTS "${tbl}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', summary TEXT NOT NULL DEFAULT '', author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'text/plain', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`);
log2(`table "${tbl}" created`);
+ if (!tables.includes(tbl))
+ this._tablesCache = [...tables, tbl];
}
}
/** Create the sessions table (uses JSONB for message since every row is a JSON event). */
@@ -479,7 +569,10 @@ var DeeplakeApi = class {
log2(`table "${name}" not found, creating`);
await this.query(`CREATE TABLE IF NOT EXISTS "${name}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', message JSONB, author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'application/json', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`);
log2(`table "${name}" created`);
+ if (!tables.includes(name))
+ this._tablesCache = [...tables, name];
}
+ await this.ensureLookupIndex(name, "path_creation_date", `("path", "creation_date")`);
}
};
diff --git a/claude-code/bundle/pre-tool-use.js b/claude-code/bundle/pre-tool-use.js
index cb59c9c..e316382 100755
--- a/claude-code/bundle/pre-tool-use.js
+++ b/claude-code/bundle/pre-tool-use.js
@@ -1,21 +1,19 @@
#!/usr/bin/env node
// dist/src/hooks/pre-tool-use.js
-import { existsSync as existsSync2 } from "node:fs";
-import { join as join3 } from "node:path";
-import { homedir as homedir3 } from "node:os";
-import { fileURLToPath } from "node:url";
-import { dirname } from "node:path";
+import { existsSync as existsSync3 } from "node:fs";
+import { join as join6, dirname } from "node:path";
+import { fileURLToPath as fileURLToPath2 } from "node:url";
// dist/src/utils/stdin.js
function readStdin() {
- return new Promise((resolve, reject) => {
+ return new Promise((resolve2, reject) => {
let data = "";
process.stdin.setEncoding("utf-8");
process.stdin.on("data", (chunk) => data += chunk);
process.stdin.on("end", () => {
try {
- resolve(JSON.parse(data));
+ resolve2(JSON.parse(data));
} catch (err) {
reject(new Error(`Failed to parse hook input: ${err}`));
}
@@ -62,6 +60,9 @@ function loadConfig() {
// dist/src/deeplake-api.js
import { randomUUID } from "node:crypto";
+import { existsSync as existsSync2, mkdirSync, readFileSync as readFileSync2, writeFileSync } from "node:fs";
+import { join as join3 } from "node:path";
+import { tmpdir } from "node:os";
// dist/src/utils/debug.js
import { appendFileSync } from "node:fs";
@@ -104,8 +105,29 @@ var RETRYABLE_CODES = /* @__PURE__ */ new Set([429, 500, 502, 503, 504]);
var MAX_RETRIES = 3;
var BASE_DELAY_MS = 500;
var MAX_CONCURRENCY = 5;
+var QUERY_TIMEOUT_MS = Number(process.env["HIVEMIND_QUERY_TIMEOUT_MS"] ?? process.env["DEEPLAKE_QUERY_TIMEOUT_MS"] ?? 1e4);
+var INDEX_MARKER_TTL_MS = Number(process.env["HIVEMIND_INDEX_MARKER_TTL_MS"] ?? 6 * 60 * 6e4);
function sleep(ms) {
- return new Promise((resolve) => setTimeout(resolve, ms));
+ return new Promise((resolve2) => setTimeout(resolve2, ms));
+}
+function isTimeoutError(error) {
+ const name = error instanceof Error ? error.name.toLowerCase() : "";
+ const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase();
+ return name.includes("timeout") || name === "aborterror" || message.includes("timeout") || message.includes("timed out");
+}
+function isDuplicateIndexError(error) {
+ const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase();
+ return message.includes("duplicate key value violates unique constraint") || message.includes("pg_class_relname_nsp_index") || message.includes("already exists");
+}
+function isSessionInsertQuery(sql) {
+ return /^\s*insert\s+into\s+"[^"]+"\s*\(\s*id\s*,\s*path\s*,\s*filename\s*,\s*message\s*,/i.test(sql);
+}
+function isTransientHtml403(text) {
+ const body = text.toLowerCase();
+ return body.includes(" this.waiting.push(resolve));
+ await new Promise((resolve2) => this.waiting.push(resolve2));
}
release() {
this.active--;
@@ -138,6 +160,7 @@ var DeeplakeApi = class {
tableName;
_pendingRows = [];
_sem = new Semaphore(MAX_CONCURRENCY);
+ _tablesCache = null;
constructor(token, apiUrl, orgId, workspaceId, tableName) {
this.token = token;
this.apiUrl = apiUrl;
@@ -168,6 +191,7 @@ var DeeplakeApi = class {
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
let resp;
try {
+ const signal = AbortSignal.timeout(QUERY_TIMEOUT_MS);
resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables/query`, {
method: "POST",
headers: {
@@ -175,9 +199,14 @@ var DeeplakeApi = class {
"Content-Type": "application/json",
"X-Activeloop-Org-Id": this.orgId
},
+ signal,
body: JSON.stringify({ query: sql })
});
} catch (e) {
+ if (isTimeoutError(e)) {
+ lastError = new Error(`Query timeout after ${QUERY_TIMEOUT_MS}ms`);
+ throw lastError;
+ }
lastError = e instanceof Error ? e : new Error(String(e));
if (attempt < MAX_RETRIES) {
const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200;
@@ -194,7 +223,8 @@ var DeeplakeApi = class {
return raw.rows.map((row) => Object.fromEntries(raw.columns.map((col, i) => [col, row[i]])));
}
const text = await resp.text().catch(() => "");
- if (attempt < MAX_RETRIES && RETRYABLE_CODES.has(resp.status)) {
+ const retryable403 = isSessionInsertQuery(sql) && (resp.status === 401 || resp.status === 403 && (text.length === 0 || isTransientHtml403(text)));
+ if (attempt < MAX_RETRIES && (RETRYABLE_CODES.has(resp.status) || retryable403)) {
const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200;
log2(`query retry ${attempt + 1}/${MAX_RETRIES} (${resp.status}) in ${delay.toFixed(0)}ms`);
await sleep(delay);
@@ -259,8 +289,61 @@ var DeeplakeApi = class {
async createIndex(column) {
await this.query(`CREATE INDEX IF NOT EXISTS idx_${sqlStr(column)}_bm25 ON "${this.tableName}" USING deeplake_index ("${column}")`);
}
+ buildLookupIndexName(table, suffix) {
+ return `idx_${table}_${suffix}`.replace(/[^a-zA-Z0-9_]/g, "_");
+ }
+ getLookupIndexMarkerPath(table, suffix) {
+ const markerKey = [
+ this.workspaceId,
+ this.orgId,
+ table,
+ suffix
+ ].join("__").replace(/[^a-zA-Z0-9_.-]/g, "_");
+ return join3(getIndexMarkerDir(), `${markerKey}.json`);
+ }
+ hasFreshLookupIndexMarker(table, suffix) {
+ const markerPath = this.getLookupIndexMarkerPath(table, suffix);
+ if (!existsSync2(markerPath))
+ return false;
+ try {
+ const raw = JSON.parse(readFileSync2(markerPath, "utf-8"));
+ const updatedAt = raw.updatedAt ? new Date(raw.updatedAt).getTime() : NaN;
+ if (!Number.isFinite(updatedAt) || Date.now() - updatedAt > INDEX_MARKER_TTL_MS)
+ return false;
+ return true;
+ } catch {
+ return false;
+ }
+ }
+ markLookupIndexReady(table, suffix) {
+ mkdirSync(getIndexMarkerDir(), { recursive: true });
+ writeFileSync(this.getLookupIndexMarkerPath(table, suffix), JSON.stringify({ updatedAt: (/* @__PURE__ */ new Date()).toISOString() }), "utf-8");
+ }
+ async ensureLookupIndex(table, suffix, columnsSql) {
+ if (this.hasFreshLookupIndexMarker(table, suffix))
+ return;
+ const indexName = this.buildLookupIndexName(table, suffix);
+ try {
+ await this.query(`CREATE INDEX IF NOT EXISTS "${indexName}" ON "${table}" ${columnsSql}`);
+ this.markLookupIndexReady(table, suffix);
+ } catch (e) {
+ if (isDuplicateIndexError(e)) {
+ this.markLookupIndexReady(table, suffix);
+ return;
+ }
+ log2(`index "${indexName}" skipped: ${e.message}`);
+ }
+ }
/** List all tables in the workspace (with retry). */
- async listTables() {
+ async listTables(forceRefresh = false) {
+ if (!forceRefresh && this._tablesCache)
+ return [...this._tablesCache];
+ const { tables, cacheable } = await this._fetchTables();
+ if (cacheable)
+ this._tablesCache = [...tables];
+ return tables;
+ }
+ async _fetchTables() {
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
try {
const resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables`, {
@@ -271,22 +354,25 @@ var DeeplakeApi = class {
});
if (resp.ok) {
const data = await resp.json();
- return (data.tables ?? []).map((t) => t.table_name);
+ return {
+ tables: (data.tables ?? []).map((t) => t.table_name),
+ cacheable: true
+ };
}
if (attempt < MAX_RETRIES && RETRYABLE_CODES.has(resp.status)) {
await sleep(BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200);
continue;
}
- return [];
+ return { tables: [], cacheable: false };
} catch {
if (attempt < MAX_RETRIES) {
await sleep(BASE_DELAY_MS * Math.pow(2, attempt));
continue;
}
- return [];
+ return { tables: [], cacheable: false };
}
}
- return [];
+ return { tables: [], cacheable: false };
}
/** Create the memory table if it doesn't already exist. Migrate columns on existing tables. */
async ensureTable(name) {
@@ -296,6 +382,8 @@ var DeeplakeApi = class {
log2(`table "${tbl}" not found, creating`);
await this.query(`CREATE TABLE IF NOT EXISTS "${tbl}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', summary TEXT NOT NULL DEFAULT '', author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'text/plain', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`);
log2(`table "${tbl}" created`);
+ if (!tables.includes(tbl))
+ this._tablesCache = [...tables, tbl];
}
}
/** Create the sessions table (uses JSONB for message since every row is a JSON event). */
@@ -305,10 +393,27 @@ var DeeplakeApi = class {
log2(`table "${name}" not found, creating`);
await this.query(`CREATE TABLE IF NOT EXISTS "${name}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', message JSONB, author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'application/json', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`);
log2(`table "${name}" created`);
+ if (!tables.includes(name))
+ this._tablesCache = [...tables, name];
}
+ await this.ensureLookupIndex(name, "path_creation_date", `("path", "creation_date")`);
}
};
+// dist/src/utils/direct-run.js
+import { resolve } from "node:path";
+import { fileURLToPath } from "node:url";
+function isDirectRun(metaUrl) {
+ const entry = process.argv[1];
+ if (!entry)
+ return false;
+ try {
+ return resolve(fileURLToPath(metaUrl)) === resolve(entry);
+ } catch {
+ return false;
+ }
+}
+
// dist/src/shell/grep-core.js
var TOOL_INPUT_FIELDS = [
"command",
@@ -518,29 +623,127 @@ function normalizeContent(path, raw) {
return raw;
return out;
}
+function buildPathCondition(targetPath) {
+ if (!targetPath || targetPath === "/")
+ return "";
+ const clean = targetPath.replace(/\/+$/, "");
+ if (/[*?]/.test(clean)) {
+ const likePattern = sqlLike(clean).replace(/\*/g, "%").replace(/\?/g, "_");
+ return `path LIKE '${likePattern}'`;
+ }
+ const base = clean.split("/").pop() ?? "";
+ if (base.includes(".")) {
+ return `path = '${sqlStr(clean)}'`;
+ }
+ return `(path = '${sqlStr(clean)}' OR path LIKE '${sqlLike(clean)}/%')`;
+}
async function searchDeeplakeTables(api, memoryTable, sessionsTable, opts) {
- const { pathFilter, contentScanOnly, likeOp, escapedPattern } = opts;
+ const { pathFilter, contentScanOnly, likeOp, escapedPattern, prefilterPattern, prefilterPatterns } = opts;
const limit = opts.limit ?? 100;
- const memFilter = contentScanOnly ? "" : ` AND summary::text ${likeOp} '%${escapedPattern}%'`;
- const sessFilter = contentScanOnly ? "" : ` AND message::text ${likeOp} '%${escapedPattern}%'`;
- const memQuery = `SELECT path, summary::text AS content FROM "${memoryTable}" WHERE 1=1${pathFilter}${memFilter} LIMIT ${limit}`;
- const sessQuery = `SELECT path, message::text AS content FROM "${sessionsTable}" WHERE 1=1${pathFilter}${sessFilter} LIMIT ${limit}`;
- const [memRows, sessRows] = await Promise.all([
- api.query(memQuery).catch(() => []),
- api.query(sessQuery).catch(() => [])
- ]);
- const rows = [];
- for (const r of memRows)
- rows.push({ path: String(r.path), content: String(r.content ?? "") });
- for (const r of sessRows)
- rows.push({ path: String(r.path), content: String(r.content ?? "") });
- return rows;
+ const filterPatterns = contentScanOnly ? prefilterPatterns && prefilterPatterns.length > 0 ? prefilterPatterns : prefilterPattern ? [prefilterPattern] : [] : [escapedPattern];
+ const memFilter = buildContentFilter("summary::text", likeOp, filterPatterns);
+ const sessFilter = buildContentFilter("message::text", likeOp, filterPatterns);
+ const memQuery = `SELECT path, summary::text AS content, 0 AS source_order, '' AS creation_date FROM "${memoryTable}" WHERE 1=1${pathFilter}${memFilter} LIMIT ${limit}`;
+ const sessQuery = `SELECT path, message::text AS content, 1 AS source_order, COALESCE(creation_date::text, '') AS creation_date FROM "${sessionsTable}" WHERE 1=1${pathFilter}${sessFilter} LIMIT ${limit}`;
+ const rows = await api.query(`SELECT path, content, source_order, creation_date FROM ((${memQuery}) UNION ALL (${sessQuery})) AS combined ORDER BY path, source_order, creation_date`);
+ return rows.map((row) => ({
+ path: String(row["path"]),
+ content: String(row["content"] ?? "")
+ }));
}
function buildPathFilter(targetPath) {
- if (!targetPath || targetPath === "/")
+ const condition = buildPathCondition(targetPath);
+ return condition ? ` AND ${condition}` : "";
+}
+function extractRegexLiteralPrefilter(pattern) {
+ if (!pattern)
+ return null;
+ const parts = [];
+ let current = "";
+ for (let i = 0; i < pattern.length; i++) {
+ const ch = pattern[i];
+ if (ch === "\\") {
+ const next = pattern[i + 1];
+ if (!next)
+ return null;
+ if (/[dDsSwWbBAZzGkKpP]/.test(next))
+ return null;
+ current += next;
+ i++;
+ continue;
+ }
+ if (ch === ".") {
+ if (pattern[i + 1] === "*") {
+ if (current)
+ parts.push(current);
+ current = "";
+ i++;
+ continue;
+ }
+ return null;
+ }
+ if ("|()[]{}+?^$".includes(ch) || ch === "*")
+ return null;
+ current += ch;
+ }
+ if (current)
+ parts.push(current);
+ const literal = parts.reduce((best, part) => part.length > best.length ? part : best, "");
+ return literal.length >= 2 ? literal : null;
+}
+function extractRegexAlternationPrefilters(pattern) {
+ if (!pattern.includes("|"))
+ return null;
+ const parts = [];
+ let current = "";
+ let escaped = false;
+ for (let i = 0; i < pattern.length; i++) {
+ const ch = pattern[i];
+ if (escaped) {
+ current += `\\${ch}`;
+ escaped = false;
+ continue;
+ }
+ if (ch === "\\") {
+ escaped = true;
+ continue;
+ }
+ if (ch === "|") {
+ if (!current)
+ return null;
+ parts.push(current);
+ current = "";
+ continue;
+ }
+ if ("()[]{}^$".includes(ch))
+ return null;
+ current += ch;
+ }
+ if (escaped || !current)
+ return null;
+ parts.push(current);
+ const literals = [...new Set(parts.map((part) => extractRegexLiteralPrefilter(part)).filter((part) => typeof part === "string" && part.length >= 2))];
+ return literals.length > 0 ? literals : null;
+}
+function buildGrepSearchOptions(params, targetPath) {
+ const hasRegexMeta = !params.fixedString && /[.*+?^${}()|[\]\\]/.test(params.pattern);
+ const literalPrefilter = hasRegexMeta ? extractRegexLiteralPrefilter(params.pattern) : null;
+ const alternationPrefilters = hasRegexMeta ? extractRegexAlternationPrefilters(params.pattern) : null;
+ return {
+ pathFilter: buildPathFilter(targetPath),
+ contentScanOnly: hasRegexMeta,
+ likeOp: params.ignoreCase ? "ILIKE" : "LIKE",
+ escapedPattern: sqlLike(params.pattern),
+ prefilterPattern: literalPrefilter ? sqlLike(literalPrefilter) : void 0,
+ prefilterPatterns: alternationPrefilters?.map((literal) => sqlLike(literal))
+ };
+}
+function buildContentFilter(column, likeOp, patterns) {
+ if (patterns.length === 0)
return "";
- const clean = targetPath.replace(/\/+$/, "");
- return ` AND (path = '${sqlStr(clean)}' OR path LIKE '${sqlLike(clean)}/%')`;
+ if (patterns.length === 1)
+ return ` AND ${column} ${likeOp} '%${patterns[0]}%'`;
+ return ` AND (${patterns.map((pattern) => `${column} ${likeOp} '%${pattern}%'`).join(" OR ")})`;
}
function compileGrepRegex(params) {
let reStr = params.fixedString ? params.pattern.replace(/[.*+?^${}()|[\]\\]/g, "\\$&") : params.pattern;
@@ -584,13 +787,7 @@ function refineGrepMatches(rows, params, forceMultiFilePrefix) {
return output;
}
async function grepBothTables(api, memoryTable, sessionsTable, params, targetPath) {
- const hasRegexMeta = !params.fixedString && /[.*+?^${}()|[\]\\]/.test(params.pattern);
- const rows = await searchDeeplakeTables(api, memoryTable, sessionsTable, {
- pathFilter: buildPathFilter(targetPath),
- contentScanOnly: hasRegexMeta,
- likeOp: params.ignoreCase ? "ILIKE" : "LIKE",
- escapedPattern: sqlLike(params.pattern)
- });
+ const rows = await searchDeeplakeTables(api, memoryTable, sessionsTable, buildGrepSearchOptions(params, targetPath));
const seen = /* @__PURE__ */ new Set();
const unique = rows.filter((r) => seen.has(r.path) ? false : (seen.add(r.path), true));
const normalized = unique.map((r) => ({ path: r.path, content: normalizeContent(r.path, r.content) }));
@@ -598,67 +795,157 @@ async function grepBothTables(api, memoryTable, sessionsTable, params, targetPat
}
// dist/src/hooks/grep-direct.js
-function parseBashGrep(cmd) {
- const first = cmd.trim().split(/\s*\|\s*/)[0];
- if (!/^(grep|egrep|fgrep)\b/.test(first))
- return null;
- const isFixed = first.startsWith("fgrep");
+function splitFirstPipelineStage(cmd) {
+ const input = cmd.trim();
+ let quote = null;
+ let escaped = false;
+ for (let i = 0; i < input.length; i++) {
+ const ch = input[i];
+ if (escaped) {
+ escaped = false;
+ continue;
+ }
+ if (quote) {
+ if (ch === quote) {
+ quote = null;
+ continue;
+ }
+ if (ch === "\\" && quote === '"') {
+ escaped = true;
+ }
+ continue;
+ }
+ if (ch === "\\") {
+ escaped = true;
+ continue;
+ }
+ if (ch === "'" || ch === '"') {
+ quote = ch;
+ continue;
+ }
+ if (ch === "|")
+ return input.slice(0, i).trim();
+ }
+ return quote ? null : input;
+}
+function tokenizeGrepStage(input) {
const tokens = [];
- let pos = 0;
- while (pos < first.length) {
- if (first[pos] === " " || first[pos] === " ") {
- pos++;
+ let current = "";
+ let quote = null;
+ for (let i = 0; i < input.length; i++) {
+ const ch = input[i];
+ if (quote) {
+ if (ch === quote) {
+ quote = null;
+ } else if (ch === "\\" && quote === '"' && i + 1 < input.length) {
+ current += input[++i];
+ } else {
+ current += ch;
+ }
continue;
}
- if (first[pos] === "'" || first[pos] === '"') {
- const q = first[pos];
- let end = pos + 1;
- while (end < first.length && first[end] !== q)
- end++;
- tokens.push(first.slice(pos + 1, end));
- pos = end + 1;
- } else {
- let end = pos;
- while (end < first.length && first[end] !== " " && first[end] !== " ")
- end++;
- tokens.push(first.slice(pos, end));
- pos = end;
+ if (ch === "'" || ch === '"') {
+ quote = ch;
+ continue;
+ }
+ if (ch === "\\" && i + 1 < input.length) {
+ current += input[++i];
+ continue;
}
+ if (/\s/.test(ch)) {
+ if (current) {
+ tokens.push(current);
+ current = "";
+ }
+ continue;
+ }
+ current += ch;
}
+ if (quote)
+ return null;
+ if (current)
+ tokens.push(current);
+ return tokens;
+}
+function parseBashGrep(cmd) {
+ const first = splitFirstPipelineStage(cmd);
+ if (!first)
+ return null;
+ if (!/^(grep|egrep|fgrep)\b/.test(first))
+ return null;
+ const isFixed = first.startsWith("fgrep");
+ const tokens = tokenizeGrepStage(first);
+ if (!tokens || tokens.length === 0)
+ return null;
let ignoreCase = false, wordMatch = false, filesOnly = false, countOnly = false, lineNumber = false, invertMatch = false, fixedString = isFixed;
+ const explicitPatterns = [];
let ti = 1;
- while (ti < tokens.length && tokens[ti].startsWith("-") && tokens[ti] !== "--") {
- const flag = tokens[ti];
- if (flag.startsWith("--")) {
+ while (ti < tokens.length) {
+ const token = tokens[ti];
+ if (token === "--") {
+ ti++;
+ break;
+ }
+ if (!token.startsWith("-") || token === "-")
+ break;
+ if (token.startsWith("--")) {
+ const [flag, inlineValue] = token.split("=", 2);
const handlers = {
"--ignore-case": () => {
ignoreCase = true;
+ return false;
},
"--word-regexp": () => {
wordMatch = true;
+ return false;
},
"--files-with-matches": () => {
filesOnly = true;
+ return false;
},
"--count": () => {
countOnly = true;
+ return false;
},
"--line-number": () => {
lineNumber = true;
+ return false;
},
"--invert-match": () => {
invertMatch = true;
+ return false;
},
"--fixed-strings": () => {
fixedString = true;
+ return false;
+ },
+ "--after-context": () => inlineValue === void 0,
+ "--before-context": () => inlineValue === void 0,
+ "--context": () => inlineValue === void 0,
+ "--max-count": () => inlineValue === void 0,
+ "--regexp": () => {
+ if (inlineValue !== void 0) {
+ explicitPatterns.push(inlineValue);
+ return false;
+ }
+ return true;
}
};
- handlers[flag]?.();
+ const consumeNext = handlers[flag]?.() ?? false;
+ if (consumeNext) {
+ ti++;
+ if (ti >= tokens.length)
+ return null;
+ if (flag === "--regexp")
+ explicitPatterns.push(tokens[ti]);
+ }
ti++;
continue;
}
- for (const c of flag.slice(1)) {
- switch (c) {
+ const shortFlags = token.slice(1);
+ for (let i = 0; i < shortFlags.length; i++) {
+ const flag = shortFlags[i];
+ switch (flag) {
case "i":
ignoreCase = true;
break;
@@ -680,19 +967,48 @@ function parseBashGrep(cmd) {
case "F":
fixedString = true;
break;
+ case "r":
+ case "R":
+ case "E":
+ break;
+ case "A":
+ case "B":
+ case "C":
+ case "m":
+ if (i === shortFlags.length - 1) {
+ ti++;
+ if (ti >= tokens.length)
+ return null;
+ }
+ i = shortFlags.length;
+ break;
+ case "e": {
+ const inlineValue = shortFlags.slice(i + 1);
+ if (inlineValue) {
+ explicitPatterns.push(inlineValue);
+ } else {
+ ti++;
+ if (ti >= tokens.length)
+ return null;
+ explicitPatterns.push(tokens[ti]);
+ }
+ i = shortFlags.length;
+ break;
+ }
+ default:
+ break;
}
}
ti++;
}
- if (ti < tokens.length && tokens[ti] === "--")
- ti++;
- if (ti >= tokens.length)
+ const pattern = explicitPatterns.length > 0 ? explicitPatterns[0] : tokens[ti];
+ if (!pattern)
return null;
- let target = tokens[ti + 1] ?? "/";
+ let target = explicitPatterns.length > 0 ? tokens[ti] ?? "/" : tokens[ti + 1] ?? "/";
if (target === "." || target === "./")
target = "/";
return {
- pattern: tokens[ti],
+ pattern,
targetPath: target,
ignoreCase,
wordMatch,
@@ -720,15 +1036,640 @@ async function handleGrepDirect(api, table, sessionsTable, params) {
return output.join("\n") || "(no matches)";
}
-// dist/src/hooks/pre-tool-use.js
-var log3 = (msg) => log("pre", msg);
-var MEMORY_PATH = join3(homedir3(), ".deeplake", "memory");
+// dist/src/hooks/virtual-table-query.js
+function normalizeSessionPart(path, content) {
+ return normalizeContent(path, content);
+}
+function buildVirtualIndexContent(rows) {
+ const lines = ["# Memory Index", "", `${rows.length} sessions:`, ""];
+ for (const row of rows) {
+ const path = row["path"];
+ const project = row["project"] || "";
+ const description = (row["description"] || "").slice(0, 120);
+ const date = (row["creation_date"] || "").slice(0, 10);
+ lines.push(`- [${path}](${path}) ${date} ${project ? `[${project}]` : ""} ${description}`);
+ }
+ return lines.join("\n");
+}
+function buildUnionQuery(memoryQuery, sessionsQuery) {
+ return `SELECT path, content, size_bytes, creation_date, source_order FROM ((${memoryQuery}) UNION ALL (${sessionsQuery})) AS combined ORDER BY path, source_order, creation_date`;
+}
+function buildInList(paths) {
+ return paths.map((path) => `'${sqlStr(path)}'`).join(", ");
+}
+function buildDirFilter(dirs) {
+ const cleaned = [...new Set(dirs.map((dir) => dir.replace(/\/+$/, "") || "/"))];
+ if (cleaned.length === 0 || cleaned.includes("/"))
+ return "";
+ const clauses = cleaned.map((dir) => `path LIKE '${sqlLike(dir)}/%'`);
+ return ` WHERE ${clauses.join(" OR ")}`;
+}
+async function queryUnionRows(api, memoryQuery, sessionsQuery) {
+ const unionQuery = buildUnionQuery(memoryQuery, sessionsQuery);
+ try {
+ return await api.query(unionQuery);
+ } catch {
+ const [memoryRows, sessionRows] = await Promise.all([
+ api.query(memoryQuery).catch(() => []),
+ api.query(sessionsQuery).catch(() => [])
+ ]);
+ return [...memoryRows, ...sessionRows];
+ }
+}
+async function readVirtualPathContents(api, memoryTable, sessionsTable, virtualPaths) {
+ const uniquePaths = [...new Set(virtualPaths)];
+ const result = new Map(uniquePaths.map((path) => [path, null]));
+ if (uniquePaths.length === 0)
+ return result;
+ const inList = buildInList(uniquePaths);
+ const rows = await queryUnionRows(api, `SELECT path, summary::text AS content, NULL::bigint AS size_bytes, '' AS creation_date, 0 AS source_order FROM "${memoryTable}" WHERE path IN (${inList})`, `SELECT path, message::text AS content, NULL::bigint AS size_bytes, COALESCE(creation_date::text, '') AS creation_date, 1 AS source_order FROM "${sessionsTable}" WHERE path IN (${inList})`);
+ const memoryHits = /* @__PURE__ */ new Map();
+ const sessionHits = /* @__PURE__ */ new Map();
+ for (const row of rows) {
+ const path = row["path"];
+ const content = row["content"];
+ const sourceOrder = Number(row["source_order"] ?? 0);
+ if (typeof path !== "string" || typeof content !== "string")
+ continue;
+ if (sourceOrder === 0) {
+ memoryHits.set(path, content);
+ } else {
+ const current = sessionHits.get(path) ?? [];
+ current.push(normalizeSessionPart(path, content));
+ sessionHits.set(path, current);
+ }
+ }
+ for (const path of uniquePaths) {
+ if (memoryHits.has(path)) {
+ result.set(path, memoryHits.get(path) ?? null);
+ continue;
+ }
+ const sessionParts = sessionHits.get(path) ?? [];
+ if (sessionParts.length > 0) {
+ result.set(path, sessionParts.join("\n"));
+ }
+ }
+ if (result.get("/index.md") === null && uniquePaths.includes("/index.md")) {
+ const rows2 = await api.query(`SELECT path, project, description, creation_date FROM "${memoryTable}" WHERE path LIKE '/summaries/%' ORDER BY creation_date DESC`).catch(() => []);
+ result.set("/index.md", buildVirtualIndexContent(rows2));
+ }
+ return result;
+}
+async function listVirtualPathRowsForDirs(api, memoryTable, sessionsTable, dirs) {
+ const uniqueDirs = [...new Set(dirs.map((dir) => dir.replace(/\/+$/, "") || "/"))];
+ const filter = buildDirFilter(uniqueDirs);
+ const rows = await queryUnionRows(api, `SELECT path, NULL::text AS content, size_bytes, '' AS creation_date, 0 AS source_order FROM "${memoryTable}"${filter}`, `SELECT path, NULL::text AS content, size_bytes, '' AS creation_date, 1 AS source_order FROM "${sessionsTable}"${filter}`);
+ const deduped = dedupeRowsByPath(rows.map((row) => ({
+ path: row["path"],
+ size_bytes: row["size_bytes"]
+ })));
+ const byDir = /* @__PURE__ */ new Map();
+ for (const dir of uniqueDirs)
+ byDir.set(dir, []);
+ for (const row of deduped) {
+ const path = row["path"];
+ if (typeof path !== "string")
+ continue;
+ for (const dir of uniqueDirs) {
+ const prefix = dir === "/" ? "/" : `${dir}/`;
+ if (dir === "/" || path.startsWith(prefix)) {
+ byDir.get(dir)?.push(row);
+ }
+ }
+ }
+ return byDir;
+}
+async function readVirtualPathContent(api, memoryTable, sessionsTable, virtualPath) {
+ return (await readVirtualPathContents(api, memoryTable, sessionsTable, [virtualPath])).get(virtualPath) ?? null;
+}
+async function listVirtualPathRows(api, memoryTable, sessionsTable, dir) {
+ return (await listVirtualPathRowsForDirs(api, memoryTable, sessionsTable, [dir])).get(dir.replace(/\/+$/, "") || "/") ?? [];
+}
+async function findVirtualPaths(api, memoryTable, sessionsTable, dir, filenamePattern) {
+ const normalizedDir = dir.replace(/\/+$/, "") || "/";
+ const likePath = `${sqlLike(normalizedDir === "/" ? "" : normalizedDir)}/%`;
+ const rows = await queryUnionRows(api, `SELECT path, NULL::text AS content, NULL::bigint AS size_bytes, '' AS creation_date, 0 AS source_order FROM "${memoryTable}" WHERE path LIKE '${likePath}' AND filename LIKE '${filenamePattern}'`, `SELECT path, NULL::text AS content, NULL::bigint AS size_bytes, '' AS creation_date, 1 AS source_order FROM "${sessionsTable}" WHERE path LIKE '${likePath}' AND filename LIKE '${filenamePattern}'`);
+ return [...new Set(rows.map((row) => row["path"]).filter((value) => typeof value === "string" && value.length > 0))];
+}
+function dedupeRowsByPath(rows) {
+ const seen = /* @__PURE__ */ new Set();
+ const unique = [];
+ for (const row of rows) {
+ const path = typeof row["path"] === "string" ? row["path"] : "";
+ if (!path || seen.has(path))
+ continue;
+ seen.add(path);
+ unique.push(row);
+ }
+ return unique;
+}
+
+// dist/src/hooks/bash-command-compiler.js
+function isQuoted(ch) {
+ return ch === "'" || ch === '"';
+}
+function splitTopLevel(input, operators) {
+ const parts = [];
+ let current = "";
+ let quote = null;
+ for (let i = 0; i < input.length; i++) {
+ const ch = input[i];
+ if (quote) {
+ if (ch === quote)
+ quote = null;
+ current += ch;
+ continue;
+ }
+ if (isQuoted(ch)) {
+ quote = ch;
+ current += ch;
+ continue;
+ }
+ const matched = operators.find((op) => input.startsWith(op, i));
+ if (matched) {
+ const trimmed2 = current.trim();
+ if (trimmed2)
+ parts.push(trimmed2);
+ current = "";
+ i += matched.length - 1;
+ continue;
+ }
+ current += ch;
+ }
+ if (quote)
+ return null;
+ const trimmed = current.trim();
+ if (trimmed)
+ parts.push(trimmed);
+ return parts;
+}
+function tokenizeShellWords(input) {
+ const tokens = [];
+ let current = "";
+ let quote = null;
+ for (let i = 0; i < input.length; i++) {
+ const ch = input[i];
+ if (quote) {
+ if (ch === quote) {
+ quote = null;
+ } else if (ch === "\\" && quote === '"' && i + 1 < input.length) {
+ current += input[++i];
+ } else {
+ current += ch;
+ }
+ continue;
+ }
+ if (isQuoted(ch)) {
+ quote = ch;
+ continue;
+ }
+ if (/\s/.test(ch)) {
+ if (current) {
+ tokens.push(current);
+ current = "";
+ }
+ continue;
+ }
+ current += ch;
+ }
+ if (quote)
+ return null;
+ if (current)
+ tokens.push(current);
+ return tokens;
+}
+function expandBraceToken(token) {
+ const match = token.match(/\{([^{}]+)\}/);
+ if (!match)
+ return [token];
+ const [expr] = match;
+ const prefix = token.slice(0, match.index);
+ const suffix = token.slice((match.index ?? 0) + expr.length);
+ let variants = [];
+ const numericRange = match[1].match(/^(-?\d+)\.\.(-?\d+)$/);
+ if (numericRange) {
+ const start = Number(numericRange[1]);
+ const end = Number(numericRange[2]);
+ const step = start <= end ? 1 : -1;
+ for (let value = start; step > 0 ? value <= end : value >= end; value += step) {
+ variants.push(String(value));
+ }
+ } else {
+ variants = match[1].split(",");
+ }
+ return variants.flatMap((variant) => expandBraceToken(`${prefix}${variant}${suffix}`));
+}
+function stripAllowedModifiers(segment) {
+ const ignoreMissing = /\s2>\/dev\/null\s*$/.test(segment);
+ const clean = segment.replace(/\s2>\/dev\/null\s*$/g, "").replace(/\s2>&1\s*/g, " ").trim();
+ return { clean, ignoreMissing };
+}
+function hasUnsupportedRedirection(segment) {
+ let quote = null;
+ for (let i = 0; i < segment.length; i++) {
+ const ch = segment[i];
+ if (quote) {
+ if (ch === quote)
+ quote = null;
+ continue;
+ }
+ if (isQuoted(ch)) {
+ quote = ch;
+ continue;
+ }
+ if (ch === ">" || ch === "<")
+ return true;
+ }
+ return false;
+}
+function parseHeadTailStage(stage) {
+ const tokens = tokenizeShellWords(stage);
+ if (!tokens || tokens.length === 0)
+ return null;
+ const [cmd, ...rest] = tokens;
+ if (cmd !== "head" && cmd !== "tail")
+ return null;
+ if (rest.length === 0)
+ return { lineLimit: 10, fromEnd: cmd === "tail" };
+ if (rest.length === 1) {
+ const count = Number(rest[0]);
+ if (!Number.isFinite(count)) {
+ return { lineLimit: 10, fromEnd: cmd === "tail" };
+ }
+ return { lineLimit: Math.abs(count), fromEnd: cmd === "tail" };
+ }
+ if (rest.length === 2 && /^-\d+$/.test(rest[0])) {
+ const count = Number(rest[0]);
+ if (!Number.isFinite(count))
+ return null;
+ return { lineLimit: Math.abs(count), fromEnd: cmd === "tail" };
+ }
+ if (rest.length === 2 && rest[0] === "-n") {
+ const count = Number(rest[1]);
+ if (!Number.isFinite(count))
+ return null;
+ return { lineLimit: Math.abs(count), fromEnd: cmd === "tail" };
+ }
+ if (rest.length === 3 && rest[0] === "-n") {
+ const count = Number(rest[1]);
+ if (!Number.isFinite(count))
+ return null;
+ return { lineLimit: Math.abs(count), fromEnd: cmd === "tail" };
+ }
+ return null;
+}
+function isValidPipelineHeadTailStage(stage) {
+ const tokens = tokenizeShellWords(stage);
+ if (!tokens || tokens[0] !== "head" && tokens[0] !== "tail")
+ return false;
+ if (tokens.length === 1)
+ return true;
+ if (tokens.length === 2)
+ return /^-\d+$/.test(tokens[1]);
+ if (tokens.length === 3)
+ return tokens[1] === "-n" && /^-?\d+$/.test(tokens[2]);
+ return false;
+}
+function parseFindNamePatterns(tokens) {
+ const patterns = [];
+ for (let i = 2; i < tokens.length; i++) {
+ const token = tokens[i];
+ if (token === "-type") {
+ i += 1;
+ continue;
+ }
+ if (token === "-o")
+ continue;
+ if (token === "-name") {
+ const pattern = tokens[i + 1];
+ if (!pattern)
+ return null;
+ patterns.push(pattern);
+ i += 1;
+ continue;
+ }
+ return null;
+ }
+ return patterns.length > 0 ? patterns : null;
+}
+function parseCompiledSegment(segment) {
+ const { clean, ignoreMissing } = stripAllowedModifiers(segment);
+ if (hasUnsupportedRedirection(clean))
+ return null;
+ const pipeline = splitTopLevel(clean, ["|"]);
+ if (!pipeline || pipeline.length === 0)
+ return null;
+ const tokens = tokenizeShellWords(pipeline[0]);
+ if (!tokens || tokens.length === 0)
+ return null;
+ if (tokens[0] === "echo" && pipeline.length === 1) {
+ const text = tokens.slice(1).join(" ");
+ return { kind: "echo", text };
+ }
+ if (tokens[0] === "cat") {
+ const paths = tokens.slice(1).flatMap(expandBraceToken);
+ if (paths.length === 0)
+ return null;
+ let lineLimit = 0;
+ let fromEnd = false;
+ let countLines2 = false;
+ if (pipeline.length > 1) {
+ if (pipeline.length !== 2)
+ return null;
+ const pipeStage = pipeline[1].trim();
+ if (/^wc\s+-l\s*$/.test(pipeStage)) {
+ if (paths.length !== 1)
+ return null;
+ countLines2 = true;
+ } else {
+ if (!isValidPipelineHeadTailStage(pipeStage))
+ return null;
+ const headTail = parseHeadTailStage(pipeStage);
+ if (!headTail)
+ return null;
+ lineLimit = headTail.lineLimit;
+ fromEnd = headTail.fromEnd;
+ }
+ }
+ return { kind: "cat", paths, lineLimit, fromEnd, countLines: countLines2, ignoreMissing };
+ }
+ if (tokens[0] === "head" || tokens[0] === "tail") {
+ if (pipeline.length !== 1)
+ return null;
+ const parsed = parseHeadTailStage(clean);
+ if (!parsed)
+ return null;
+ const headTokens = tokenizeShellWords(clean);
+ if (!headTokens)
+ return null;
+ if (headTokens[1] === "-n" && headTokens.length < 4 || /^-\d+$/.test(headTokens[1] ?? "") && headTokens.length < 3 || headTokens.length === 2 && /^-?\d+$/.test(headTokens[1] ?? ""))
+ return null;
+ const path = headTokens[headTokens.length - 1];
+ if (path === "head" || path === "tail" || path === "-n")
+ return null;
+ return {
+ kind: "cat",
+ paths: expandBraceToken(path),
+ lineLimit: parsed.lineLimit,
+ fromEnd: parsed.fromEnd,
+ countLines: false,
+ ignoreMissing
+ };
+ }
+ if (tokens[0] === "wc" && tokens[1] === "-l" && pipeline.length === 1 && tokens[2]) {
+ return {
+ kind: "cat",
+ paths: expandBraceToken(tokens[2]),
+ lineLimit: 0,
+ fromEnd: false,
+ countLines: true,
+ ignoreMissing
+ };
+ }
+ if (tokens[0] === "ls" && pipeline.length === 1) {
+ const dirs = tokens.slice(1).filter((token) => !token.startsWith("-")).flatMap(expandBraceToken);
+ const longFormat = tokens.some((token) => token.startsWith("-") && token.includes("l"));
+ return { kind: "ls", dirs: dirs.length > 0 ? dirs : ["/"], longFormat };
+ }
+ if (tokens[0] === "find") {
+ if (pipeline.length > 3)
+ return null;
+ const dir = tokens[1];
+ if (!dir)
+ return null;
+ const patterns = parseFindNamePatterns(tokens);
+ if (!patterns)
+ return null;
+ const countOnly = pipeline.length === 2 && /^wc\s+-l\s*$/.test(pipeline[1].trim());
+ if (countOnly) {
+ if (patterns.length !== 1)
+ return null;
+ return { kind: "find", dir, pattern: patterns[0], countOnly };
+ }
+ if (pipeline.length >= 2) {
+ const xargsTokens = tokenizeShellWords(pipeline[1].trim());
+ if (!xargsTokens || xargsTokens[0] !== "xargs")
+ return null;
+ const xargsArgs = xargsTokens.slice(1);
+ while (xargsArgs[0] && xargsArgs[0].startsWith("-")) {
+ if (xargsArgs[0] === "-r") {
+ xargsArgs.shift();
+ continue;
+ }
+ return null;
+ }
+ const grepCmd = xargsArgs.join(" ");
+ const grepParams2 = parseBashGrep(grepCmd);
+ if (!grepParams2)
+ return null;
+ let lineLimit = 0;
+ if (pipeline.length === 3) {
+ const headStage = pipeline[2].trim();
+ if (!isValidPipelineHeadTailStage(headStage))
+ return null;
+ const headTail = parseHeadTailStage(headStage);
+ if (!headTail || headTail.fromEnd)
+ return null;
+ lineLimit = headTail.lineLimit;
+ }
+ return { kind: "find_grep", dir, patterns, params: grepParams2, lineLimit };
+ }
+ if (patterns.length !== 1)
+ return null;
+ return { kind: "find", dir, pattern: patterns[0], countOnly };
+ }
+ const grepParams = parseBashGrep(clean);
+ if (grepParams) {
+ let lineLimit = 0;
+ if (pipeline.length > 1) {
+ if (pipeline.length !== 2)
+ return null;
+ const headStage = pipeline[1].trim();
+ if (!isValidPipelineHeadTailStage(headStage))
+ return null;
+ const headTail = parseHeadTailStage(headStage);
+ if (!headTail || headTail.fromEnd)
+ return null;
+ lineLimit = headTail.lineLimit;
+ }
+ return { kind: "grep", params: grepParams, lineLimit };
+ }
+ return null;
+}
+function parseCompiledBashCommand(cmd) {
+ if (cmd.includes("||"))
+ return null;
+ const segments = splitTopLevel(cmd, ["&&", ";", "\n"]);
+ if (!segments || segments.length === 0)
+ return null;
+ const parsed = segments.map(parseCompiledSegment);
+ if (parsed.some((segment) => segment === null))
+ return null;
+ return parsed;
+}
+function applyLineWindow(content, lineLimit, fromEnd) {
+ if (lineLimit <= 0)
+ return content;
+ const lines = content.split("\n");
+ return (fromEnd ? lines.slice(-lineLimit) : lines.slice(0, lineLimit)).join("\n");
+}
+function countLines(content) {
+ return content === "" ? 0 : content.split("\n").length;
+}
+function renderDirectoryListing(dir, rows, longFormat) {
+ const entries = /* @__PURE__ */ new Map();
+ const prefix = dir === "/" ? "/" : `${dir}/`;
+ for (const row of rows) {
+ const path = row["path"];
+ if (!path.startsWith(prefix) && dir !== "/")
+ continue;
+ const rest = dir === "/" ? path.slice(1) : path.slice(prefix.length);
+ const slash = rest.indexOf("/");
+ const name = slash === -1 ? rest : rest.slice(0, slash);
+ if (!name)
+ continue;
+ const existing = entries.get(name);
+ if (slash !== -1) {
+ if (!existing)
+ entries.set(name, { isDir: true, size: 0 });
+ } else {
+ entries.set(name, { isDir: false, size: Number(row["size_bytes"] ?? 0) });
+ }
+ }
+ if (entries.size === 0)
+ return `ls: cannot access '${dir}': No such file or directory`;
+ const lines = [];
+ for (const [name, info] of [...entries].sort((a, b) => a[0].localeCompare(b[0]))) {
+ if (longFormat) {
+ const type = info.isDir ? "drwxr-xr-x" : "-rw-r--r--";
+ const size = String(info.isDir ? 0 : info.size).padStart(6);
+ lines.push(`${type} 1 user user ${size} ${name}${info.isDir ? "/" : ""}`);
+ } else {
+ lines.push(name + (info.isDir ? "/" : ""));
+ }
+ }
+ return lines.join("\n");
+}
+async function executeCompiledBashCommand(api, memoryTable, sessionsTable, cmd, deps = {}) {
+ const { readVirtualPathContentsFn = readVirtualPathContents, listVirtualPathRowsForDirsFn = listVirtualPathRowsForDirs, findVirtualPathsFn = findVirtualPaths, handleGrepDirectFn = handleGrepDirect } = deps;
+ const plan = parseCompiledBashCommand(cmd);
+ if (!plan)
+ return null;
+ const readPaths = [...new Set(plan.flatMap((segment) => segment.kind === "cat" ? segment.paths : []))];
+ const listDirs = [...new Set(plan.flatMap((segment) => segment.kind === "ls" ? segment.dirs.map((dir) => dir.replace(/\/+$/, "") || "/") : []))];
+ const contentMap = readPaths.length > 0 ? await readVirtualPathContentsFn(api, memoryTable, sessionsTable, readPaths) : /* @__PURE__ */ new Map();
+ const dirRowsMap = listDirs.length > 0 ? await listVirtualPathRowsForDirsFn(api, memoryTable, sessionsTable, listDirs) : /* @__PURE__ */ new Map();
+ const outputs = [];
+ for (const segment of plan) {
+ if (segment.kind === "echo") {
+ outputs.push(segment.text);
+ continue;
+ }
+ if (segment.kind === "cat") {
+ const contents = [];
+ for (const path of segment.paths) {
+ const content = contentMap.get(path) ?? null;
+ if (content === null) {
+ if (segment.ignoreMissing)
+ continue;
+ return null;
+ }
+ contents.push(content);
+ }
+ const combined = contents.join("");
+ if (segment.countLines) {
+ outputs.push(`${countLines(combined)} ${segment.paths[0]}`);
+ } else {
+ outputs.push(applyLineWindow(combined, segment.lineLimit, segment.fromEnd));
+ }
+ continue;
+ }
+ if (segment.kind === "ls") {
+ for (const dir of segment.dirs) {
+ outputs.push(renderDirectoryListing(dir.replace(/\/+$/, "") || "/", dirRowsMap.get(dir.replace(/\/+$/, "") || "/") ?? [], segment.longFormat));
+ }
+ continue;
+ }
+ if (segment.kind === "find") {
+ const filenamePattern = sqlLike(segment.pattern).replace(/\*/g, "%").replace(/\?/g, "_");
+ const paths = await findVirtualPathsFn(api, memoryTable, sessionsTable, segment.dir.replace(/\/+$/, "") || "/", filenamePattern);
+ outputs.push(segment.countOnly ? String(paths.length) : paths.join("\n") || "(no matches)");
+ continue;
+ }
+ if (segment.kind === "find_grep") {
+ const dir = segment.dir.replace(/\/+$/, "") || "/";
+ const candidateBatches = await Promise.all(segment.patterns.map((pattern) => findVirtualPathsFn(api, memoryTable, sessionsTable, dir, sqlLike(pattern).replace(/\*/g, "%").replace(/\?/g, "_"))));
+ const candidatePaths = [...new Set(candidateBatches.flat())];
+ if (candidatePaths.length === 0) {
+ outputs.push("(no matches)");
+ continue;
+ }
+ const candidateContents = await readVirtualPathContentsFn(api, memoryTable, sessionsTable, candidatePaths);
+ const matched = refineGrepMatches(candidatePaths.flatMap((path) => {
+ const content = candidateContents.get(path);
+ if (content === null || content === void 0)
+ return [];
+ return [{ path, content: normalizeContent(path, content) }];
+ }), segment.params);
+ const limited = segment.lineLimit > 0 ? matched.slice(0, segment.lineLimit) : matched;
+ outputs.push(limited.join("\n") || "(no matches)");
+ continue;
+ }
+ if (segment.kind === "grep") {
+ const result = await handleGrepDirectFn(api, memoryTable, sessionsTable, segment.params);
+ if (result === null)
+ return null;
+ if (segment.lineLimit > 0) {
+ outputs.push(result.split("\n").slice(0, segment.lineLimit).join("\n"));
+ } else {
+ outputs.push(result);
+ }
+ continue;
+ }
+ }
+ return outputs.join("\n");
+}
+
+// dist/src/hooks/query-cache.js
+import { mkdirSync as mkdirSync2, readFileSync as readFileSync3, rmSync, writeFileSync as writeFileSync2 } from "node:fs";
+import { join as join4 } from "node:path";
+import { homedir as homedir3 } from "node:os";
+var log3 = (msg) => log("query-cache", msg);
+var DEFAULT_CACHE_ROOT = join4(homedir3(), ".deeplake", "query-cache");
+var INDEX_CACHE_FILE = "index.md";
+function getSessionQueryCacheDir(sessionId, deps = {}) {
+ const { cacheRoot = DEFAULT_CACHE_ROOT } = deps;
+ return join4(cacheRoot, sessionId);
+}
+function readCachedIndexContent(sessionId, deps = {}) {
+ const { logFn = log3 } = deps;
+ try {
+ return readFileSync3(join4(getSessionQueryCacheDir(sessionId, deps), INDEX_CACHE_FILE), "utf-8");
+ } catch (e) {
+ if (e?.code === "ENOENT")
+ return null;
+ logFn(`read failed for session=${sessionId}: ${e.message}`);
+ return null;
+ }
+}
+function writeCachedIndexContent(sessionId, content, deps = {}) {
+ const { logFn = log3 } = deps;
+ try {
+ const dir = getSessionQueryCacheDir(sessionId, deps);
+ mkdirSync2(dir, { recursive: true });
+ writeFileSync2(join4(dir, INDEX_CACHE_FILE), content, "utf-8");
+ } catch (e) {
+ logFn(`write failed for session=${sessionId}: ${e.message}`);
+ }
+}
+
+// dist/src/hooks/memory-path-utils.js
+import { homedir as homedir4 } from "node:os";
+import { join as join5 } from "node:path";
+var MEMORY_PATH = join5(homedir4(), ".deeplake", "memory");
var TILDE_PATH = "~/.deeplake/memory";
var HOME_VAR_PATH = "$HOME/.deeplake/memory";
-var __bundleDir = dirname(fileURLToPath(import.meta.url));
-var SHELL_BUNDLE = existsSync2(join3(__bundleDir, "shell", "deeplake-shell.js")) ? join3(__bundleDir, "shell", "deeplake-shell.js") : join3(__bundleDir, "..", "shell", "deeplake-shell.js");
var SAFE_BUILTINS = /* @__PURE__ */ new Set([
- // filesystem
"cat",
"ls",
"cp",
@@ -744,7 +1685,6 @@ var SAFE_BUILTINS = /* @__PURE__ */ new Set([
"du",
"tree",
"file",
- // text processing
"grep",
"egrep",
"fgrep",
@@ -771,31 +1711,24 @@ var SAFE_BUILTINS = /* @__PURE__ */ new Set([
"diff",
"strings",
"split",
- // search
"find",
"xargs",
"which",
- // data formats
"jq",
"yq",
"xan",
"base64",
"od",
- // archives
"tar",
"gzip",
"gunzip",
"zcat",
- // hashing
"md5sum",
"sha1sum",
"sha256sum",
- // output/io
"echo",
"printf",
"tee",
- "cat",
- // path/env
"pwd",
"cd",
"basename",
@@ -804,7 +1737,6 @@ var SAFE_BUILTINS = /* @__PURE__ */ new Set([
"printenv",
"hostname",
"whoami",
- // misc
"date",
"seq",
"expr",
@@ -819,7 +1751,6 @@ var SAFE_BUILTINS = /* @__PURE__ */ new Set([
"history",
"help",
"clear",
- // shell control flow
"for",
"while",
"do",
@@ -849,6 +1780,22 @@ function touchesMemory(p) {
function rewritePaths(cmd) {
return cmd.replace(new RegExp(MEMORY_PATH.replace(/[.*+?^${}()|[\]\\]/g, "\\$&") + "/?", "g"), "/").replace(/~\/.deeplake\/memory\/?/g, "/").replace(/\$HOME\/.deeplake\/memory\/?/g, "/").replace(/"\$HOME\/.deeplake\/memory\/?"/g, '"/"');
}
+
+// dist/src/hooks/pre-tool-use.js
+var log4 = (msg) => log("pre", msg);
+var __bundleDir = dirname(fileURLToPath2(import.meta.url));
+var SHELL_BUNDLE = existsSync3(join6(__bundleDir, "shell", "deeplake-shell.js")) ? join6(__bundleDir, "shell", "deeplake-shell.js") : join6(__bundleDir, "..", "shell", "deeplake-shell.js");
+function getReadTargetPath(toolInput) {
+ const rawPath = toolInput.file_path ?? toolInput.path;
+ return rawPath ? rawPath : null;
+}
+function isLikelyDirectoryPath(virtualPath) {
+ const normalized = virtualPath.replace(/\/+$/, "") || "/";
+ if (normalized === "/")
+ return true;
+ const base = normalized.split("/").pop() ?? "";
+ return !base.includes(".");
+}
function getShellCommand(toolName, toolInput) {
switch (toolName) {
case "Grep": {
@@ -865,10 +1812,10 @@ function getShellCommand(toolName, toolInput) {
break;
}
case "Read": {
- const fp = toolInput.file_path;
+ const fp = getReadTargetPath(toolInput);
if (fp && touchesMemory(fp)) {
- const virtualPath = rewritePaths(fp) || "/";
- return `cat ${virtualPath}`;
+ const rewritten = rewritePaths(fp) || "/";
+ return `${isLikelyDirectoryPath(rewritten) ? "ls" : "cat"} ${rewritten}`;
}
break;
}
@@ -876,34 +1823,24 @@ function getShellCommand(toolName, toolInput) {
const cmd = toolInput.command;
if (!cmd || !touchesMemory(cmd))
break;
- {
- const rewritten = rewritePaths(cmd);
- if (!isSafe(rewritten)) {
- log3(`unsafe command blocked: ${rewritten}`);
- return null;
- }
- return rewritten;
+ const rewritten = rewritePaths(cmd);
+ if (!isSafe(rewritten)) {
+ log4(`unsafe command blocked: ${rewritten}`);
+ return null;
}
- break;
+ return rewritten;
}
case "Glob": {
const p = toolInput.path;
- if (p && touchesMemory(p)) {
- return `ls /`;
- }
+ if (p && touchesMemory(p))
+ return "ls /";
break;
}
}
return null;
}
-function emitResult(command, description) {
- console.log(JSON.stringify({
- hookSpecificOutput: {
- hookEventName: "PreToolUse",
- permissionDecision: "allow",
- updatedInput: { command, description }
- }
- }));
+function buildAllowDecision(command, description) {
+ return { command, description };
}
function extractGrepParams(toolName, toolInput, shellCmd) {
if (toolName === "Grep") {
@@ -924,234 +1861,236 @@ function extractGrepParams(toolName, toolInput, shellCmd) {
return parseBashGrep(shellCmd);
return null;
}
-async function main() {
- const input = await readStdin();
- log3(`hook fired: tool=${input.tool_name} input=${JSON.stringify(input.tool_input)}`);
+function buildFallbackDecision(shellCmd, shellBundle = SHELL_BUNDLE) {
+ return buildAllowDecision(`node "${shellBundle}" -c "${shellCmd.replace(/"/g, '\\"')}"`, `[DeepLake shell] ${shellCmd}`);
+}
+async function processPreToolUse(input, deps = {}) {
+ const { config = loadConfig(), createApi = (table2, activeConfig) => new DeeplakeApi(activeConfig.token, activeConfig.apiUrl, activeConfig.orgId, activeConfig.workspaceId, table2), executeCompiledBashCommandFn = executeCompiledBashCommand, handleGrepDirectFn = handleGrepDirect, readVirtualPathContentsFn = readVirtualPathContents, readVirtualPathContentFn = readVirtualPathContent, listVirtualPathRowsFn = listVirtualPathRows, findVirtualPathsFn = findVirtualPaths, readCachedIndexContentFn = readCachedIndexContent, writeCachedIndexContentFn = writeCachedIndexContent, shellBundle = SHELL_BUNDLE, logFn = log4 } = deps;
const cmd = input.tool_input.command ?? "";
const shellCmd = getShellCommand(input.tool_name, input.tool_input);
- const toolPath = input.tool_input.file_path ?? input.tool_input.path ?? "";
+ const toolPath = getReadTargetPath(input.tool_input) ?? input.tool_input.path ?? "";
if (!shellCmd && (touchesMemory(cmd) || touchesMemory(toolPath))) {
const guidance = "[RETRY REQUIRED] The command you tried is not available for ~/.deeplake/memory/. This virtual filesystem only supports bash builtins: cat, ls, grep, echo, jq, head, tail, sed, awk, wc, sort, find, etc. python, python3, node, and curl are NOT available. You MUST rewrite your command using only the bash tools listed above and try again. For example, to parse JSON use: cat file.json | jq '.key'. To count keys: cat file.json | jq 'keys | length'.";
- log3(`unsupported command, returning guidance: ${cmd}`);
- console.log(JSON.stringify({
- hookSpecificOutput: {
- hookEventName: "PreToolUse",
- permissionDecision: "allow",
- updatedInput: {
- command: `echo ${JSON.stringify(guidance)}`,
- description: "[DeepLake] unsupported command \u2014 rewrite using bash builtins"
- }
- }
- }));
- return;
+ logFn(`unsupported command, returning guidance: ${cmd}`);
+ return buildAllowDecision(`echo ${JSON.stringify(guidance)}`, "[DeepLake] unsupported command \u2014 rewrite using bash builtins");
}
if (!shellCmd)
- return;
- const config = loadConfig();
- if (config) {
- const table = process.env["HIVEMIND_TABLE"] ?? "memory";
- const sessionsTable = process.env["HIVEMIND_SESSIONS_TABLE"] ?? "sessions";
- const api = new DeeplakeApi(config.token, config.apiUrl, config.orgId, config.workspaceId, table);
- try {
- const grepParams = extractGrepParams(input.tool_name, input.tool_input, shellCmd);
- if (grepParams) {
- log3(`direct grep: pattern=${grepParams.pattern} path=${grepParams.targetPath}`);
- const result = await handleGrepDirect(api, table, sessionsTable, grepParams);
- if (result !== null) {
- emitResult(`echo ${JSON.stringify(result)}`, `[DeepLake direct] grep ${grepParams.pattern}`);
- return;
- }
+ return null;
+ if (!config)
+ return buildFallbackDecision(shellCmd, shellBundle);
+ const table = process.env["HIVEMIND_TABLE"] ?? "memory";
+ const sessionsTable = process.env["HIVEMIND_SESSIONS_TABLE"] ?? "sessions";
+ const api = createApi(table, config);
+ const readVirtualPathContentsWithCache = async (cachePaths) => {
+ const uniquePaths = [...new Set(cachePaths)];
+ const result = new Map(uniquePaths.map((path) => [path, null]));
+ const cachedIndex = uniquePaths.includes("/index.md") ? readCachedIndexContentFn(input.session_id) : null;
+ const remainingPaths = cachedIndex === null ? uniquePaths : uniquePaths.filter((path) => path !== "/index.md");
+ if (cachedIndex !== null) {
+ result.set("/index.md", cachedIndex);
+ }
+ if (remainingPaths.length > 0) {
+ const fetched = await readVirtualPathContentsFn(api, table, sessionsTable, remainingPaths);
+ for (const [path, content] of fetched)
+ result.set(path, content);
+ }
+ const fetchedIndex = result.get("/index.md");
+ if (typeof fetchedIndex === "string") {
+ writeCachedIndexContentFn(input.session_id, fetchedIndex);
+ }
+ return result;
+ };
+ try {
+ if (input.tool_name === "Bash") {
+ const compiled = await executeCompiledBashCommandFn(api, table, sessionsTable, shellCmd, {
+ readVirtualPathContentsFn: async (_api, _memoryTable, _sessionsTable, cachePaths) => readVirtualPathContentsWithCache(cachePaths)
+ });
+ if (compiled !== null) {
+ return buildAllowDecision(`echo ${JSON.stringify(compiled)}`, `[DeepLake compiled] ${shellCmd}`);
}
- {
- let virtualPath = null;
- let lineLimit = 0;
- let fromEnd = false;
- if (input.tool_name === "Read") {
- virtualPath = rewritePaths(input.tool_input.file_path ?? "");
- } else if (input.tool_name === "Bash") {
- const catCmd = shellCmd.replace(/\s+2>\S+/g, "").trim();
- const catPipeHead = catCmd.match(/^cat\s+(\S+?)\s*(?:\|[^|]*)*\|\s*head\s+(?:-n?\s*)?(-?\d+)\s*$/);
- if (catPipeHead) {
- virtualPath = catPipeHead[1];
- lineLimit = Math.abs(parseInt(catPipeHead[2], 10));
- }
- if (!virtualPath) {
- const catMatch = catCmd.match(/^cat\s+(\S+)\s*$/);
- if (catMatch)
- virtualPath = catMatch[1];
- }
- if (!virtualPath) {
- const headMatch = shellCmd.match(/^head\s+(?:-n\s*)?(-?\d+)\s+(\S+)\s*$/) ?? shellCmd.match(/^head\s+(\S+)\s*$/);
- if (headMatch) {
- if (headMatch[2]) {
- virtualPath = headMatch[2];
- lineLimit = Math.abs(parseInt(headMatch[1], 10));
- } else {
- virtualPath = headMatch[1];
- lineLimit = 10;
- }
- }
- }
- if (!virtualPath) {
- const tailMatch = shellCmd.match(/^tail\s+(?:-n\s*)?(-?\d+)\s+(\S+)\s*$/) ?? shellCmd.match(/^tail\s+(\S+)\s*$/);
- if (tailMatch) {
- fromEnd = true;
- if (tailMatch[2]) {
- virtualPath = tailMatch[2];
- lineLimit = Math.abs(parseInt(tailMatch[1], 10));
- } else {
- virtualPath = tailMatch[1];
- lineLimit = 10;
- }
- }
- }
- if (!virtualPath) {
- const wcMatch = shellCmd.match(/^wc\s+-l\s+(\S+)\s*$/);
- if (wcMatch) {
- virtualPath = wcMatch[1];
- lineLimit = -1;
- }
+ }
+ const grepParams = extractGrepParams(input.tool_name, input.tool_input, shellCmd);
+ if (grepParams) {
+ logFn(`direct grep: pattern=${grepParams.pattern} path=${grepParams.targetPath}`);
+ const result = await handleGrepDirectFn(api, table, sessionsTable, grepParams);
+ if (result !== null)
+ return buildAllowDecision(`echo ${JSON.stringify(result)}`, `[DeepLake direct] grep ${grepParams.pattern}`);
+ }
+ let virtualPath = null;
+ let lineLimit = 0;
+ let fromEnd = false;
+ let lsDir = null;
+ let longFormat = false;
+ if (input.tool_name === "Read") {
+ virtualPath = rewritePaths(getReadTargetPath(input.tool_input) ?? "");
+ if (virtualPath && isLikelyDirectoryPath(virtualPath)) {
+ lsDir = virtualPath.replace(/\/+$/, "") || "/";
+ virtualPath = null;
+ }
+ } else if (input.tool_name === "Bash") {
+ const catCmd = shellCmd.replace(/\s+2>\S+/g, "").trim();
+ const catPipeHead = catCmd.match(/^cat\s+(\S+?)\s*(?:\|[^|]*)*\|\s*head\s+(?:-n?\s*)?(-?\d+)\s*$/);
+ if (catPipeHead) {
+ virtualPath = catPipeHead[1];
+ lineLimit = Math.abs(parseInt(catPipeHead[2], 10));
+ }
+ if (!virtualPath) {
+ const catMatch = catCmd.match(/^cat\s+(\S+)\s*$/);
+ if (catMatch)
+ virtualPath = catMatch[1];
+ }
+ if (!virtualPath) {
+ const headMatch = shellCmd.match(/^head\s+(?:-n\s*)?(-?\d+)\s+(\S+)\s*$/) ?? shellCmd.match(/^head\s+(\S+)\s*$/);
+ if (headMatch) {
+ if (headMatch[2]) {
+ virtualPath = headMatch[2];
+ lineLimit = Math.abs(parseInt(headMatch[1], 10));
+ } else {
+ virtualPath = headMatch[1];
+ lineLimit = 10;
}
}
- if (virtualPath && !virtualPath.endsWith("/")) {
- log3(`direct read: ${virtualPath}`);
- let content = null;
- if (virtualPath.startsWith("/sessions/")) {
- try {
- const sessionRows = await api.query(`SELECT message::text AS content FROM "${sessionsTable}" WHERE path = '${sqlStr(virtualPath)}' LIMIT 1`);
- if (sessionRows.length > 0 && sessionRows[0]["content"]) {
- content = sessionRows[0]["content"];
- }
- } catch {
- }
+ }
+ if (!virtualPath) {
+ const tailMatch = shellCmd.match(/^tail\s+(?:-n\s*)?(-?\d+)\s+(\S+)\s*$/) ?? shellCmd.match(/^tail\s+(\S+)\s*$/);
+ if (tailMatch) {
+ fromEnd = true;
+ if (tailMatch[2]) {
+ virtualPath = tailMatch[2];
+ lineLimit = Math.abs(parseInt(tailMatch[1], 10));
} else {
- const rows = await api.query(`SELECT summary FROM "${table}" WHERE path = '${sqlStr(virtualPath)}' LIMIT 1`);
- if (rows.length > 0 && rows[0]["summary"]) {
- content = rows[0]["summary"];
- } else if (virtualPath === "/index.md") {
- const idxRows = await api.query(`SELECT path, project, description, creation_date FROM "${table}" WHERE path LIKE '/summaries/%' ORDER BY creation_date DESC`);
- const lines = ["# Memory Index", "", `${idxRows.length} sessions:`, ""];
- for (const r of idxRows) {
- const p = r["path"];
- const proj = r["project"] || "";
- const desc = (r["description"] || "").slice(0, 120);
- const date = (r["creation_date"] || "").slice(0, 10);
- lines.push(`- [${p}](${p}) ${date} ${proj ? `[${proj}]` : ""} ${desc}`);
- }
- content = lines.join("\n");
- }
- }
- if (content !== null) {
- if (lineLimit === -1) {
- const count = content.split("\n").length;
- emitResult(`echo ${JSON.stringify(`${count} ${virtualPath}`)}`, `[DeepLake direct] wc -l ${virtualPath}`);
- return;
- }
- if (lineLimit > 0) {
- const lines = content.split("\n");
- content = fromEnd ? lines.slice(-lineLimit).join("\n") : lines.slice(0, lineLimit).join("\n");
- }
- const label = lineLimit > 0 ? fromEnd ? `tail -${lineLimit}` : `head -${lineLimit}` : "cat";
- emitResult(`echo ${JSON.stringify(content)}`, `[DeepLake direct] ${label} ${virtualPath}`);
- return;
+ virtualPath = tailMatch[1];
+ lineLimit = 10;
}
}
}
- {
- let lsDir = null;
- let longFormat = false;
- if (input.tool_name === "Glob") {
- lsDir = rewritePaths(input.tool_input.path ?? "") || "/";
- } else if (input.tool_name === "Bash") {
- const lsMatch = shellCmd.match(/^ls\s+(?:-([a-zA-Z]+)\s+)?(\S+)?\s*$/);
- if (lsMatch) {
- lsDir = lsMatch[2] ?? "/";
- longFormat = (lsMatch[1] ?? "").includes("l");
- }
+ if (!virtualPath) {
+ const wcMatch = shellCmd.match(/^wc\s+-l\s+(\S+)\s*$/);
+ if (wcMatch) {
+ virtualPath = wcMatch[1];
+ lineLimit = -1;
}
- if (lsDir) {
- const dir = lsDir.replace(/\/+$/, "") || "/";
- log3(`direct ls: ${dir}`);
- const isSessionDir = dir === "/sessions" || dir.startsWith("/sessions/");
- const isRoot = dir === "/";
- const lsQueries = [];
- if (!isSessionDir) {
- lsQueries.push(api.query(`SELECT path, size_bytes FROM "${table}" WHERE path LIKE '${sqlLike(dir === "/" ? "" : dir)}/%' ORDER BY path`).catch(() => []));
- }
- if (isSessionDir || isRoot) {
- lsQueries.push(api.query(`SELECT path, size_bytes FROM "${sessionsTable}" WHERE path LIKE '${sqlLike(dir === "/" ? "" : dir)}/%' ORDER BY path`).catch(() => []));
- }
- const rows = (await Promise.all(lsQueries)).flat();
- const entries = /* @__PURE__ */ new Map();
- const prefix = dir === "/" ? "/" : dir + "/";
- for (const row of rows) {
- const p = row["path"];
- if (!p.startsWith(prefix) && dir !== "/")
- continue;
- const rest = dir === "/" ? p.slice(1) : p.slice(prefix.length);
- const slash = rest.indexOf("/");
- const name = slash === -1 ? rest : rest.slice(0, slash);
- if (!name)
- continue;
- const existing = entries.get(name);
- if (slash !== -1) {
- if (!existing)
- entries.set(name, { isDir: true, size: 0 });
- } else {
- entries.set(name, { isDir: false, size: row["size_bytes"] ?? 0 });
- }
- }
- const lines = [];
- for (const [name, info] of [...entries].sort((a, b) => a[0].localeCompare(b[0]))) {
- if (longFormat) {
- const type = info.isDir ? "drwxr-xr-x" : "-rw-r--r--";
- const size = String(info.isDir ? 0 : info.size).padStart(6);
- lines.push(`${type} 1 user user ${size} ${name}${info.isDir ? "/" : ""}`);
- } else {
- lines.push(name + (info.isDir ? "/" : ""));
- }
- }
- emitResult(`echo ${JSON.stringify(lines.join("\n") || "(empty directory)")}`, `[DeepLake direct] ls ${dir}`);
- return;
+ }
+ }
+ if (virtualPath && !virtualPath.endsWith("/")) {
+ logFn(`direct read: ${virtualPath}`);
+ let content = virtualPath === "/index.md" ? readCachedIndexContentFn(input.session_id) : null;
+ if (content === null) {
+ content = await readVirtualPathContentFn(api, table, sessionsTable, virtualPath);
+ }
+ if (content === null && virtualPath === "/index.md") {
+ const idxRows = await api.query(`SELECT path, project, description, creation_date FROM "${table}" WHERE path LIKE '/summaries/%' ORDER BY creation_date DESC`);
+ const lines = ["# Memory Index", "", `${idxRows.length} sessions:`, ""];
+ for (const r of idxRows) {
+ const p = r["path"];
+ const proj = r["project"] || "";
+ const desc = (r["description"] || "").slice(0, 120);
+ const date = (r["creation_date"] || "").slice(0, 10);
+ lines.push(`- [${p}](${p}) ${date} ${proj ? `[${proj}]` : ""} ${desc}`);
}
+ content = lines.join("\n");
}
- if (input.tool_name === "Bash") {
- const findMatch = shellCmd.match(/^find\s+(\S+)\s+(?:-type\s+\S+\s+)?-name\s+'([^']+)'/);
- if (findMatch) {
- const dir = findMatch[1].replace(/\/+$/, "") || "/";
- const namePattern = sqlLike(findMatch[2]).replace(/\*/g, "%").replace(/\?/g, "_");
- log3(`direct find: ${dir} -name '${findMatch[2]}'`);
- const isSessionDir = dir === "/sessions" || dir.startsWith("/sessions/");
- const findTable = isSessionDir ? sessionsTable : table;
- const rows = await api.query(`SELECT path FROM "${findTable}" WHERE path LIKE '${sqlLike(dir === "/" ? "" : dir)}/%' AND filename LIKE '${namePattern}' ORDER BY path`);
- let result = rows.map((r) => r["path"]).join("\n") || "";
- if (/\|\s*wc\s+-l\s*$/.test(shellCmd)) {
- result = String(rows.length);
- }
- emitResult(`echo ${JSON.stringify(result || "(no matches)")}`, `[DeepLake direct] find ${dir}`);
- return;
+ if (content !== null) {
+ if (virtualPath === "/index.md") {
+ writeCachedIndexContentFn(input.session_id, content);
}
+ if (lineLimit === -1)
+ return buildAllowDecision(`echo ${JSON.stringify(`${content.split("\n").length} ${virtualPath}`)}`, `[DeepLake direct] wc -l ${virtualPath}`);
+ if (lineLimit > 0) {
+ const lines = content.split("\n");
+ content = fromEnd ? lines.slice(-lineLimit).join("\n") : lines.slice(0, lineLimit).join("\n");
+ }
+ const label = lineLimit > 0 ? fromEnd ? `tail -${lineLimit}` : `head -${lineLimit}` : "cat";
+ return buildAllowDecision(`echo ${JSON.stringify(content)}`, `[DeepLake direct] ${label} ${virtualPath}`);
+ }
+ }
+ if (!lsDir && input.tool_name === "Glob") {
+ lsDir = rewritePaths(input.tool_input.path ?? "") || "/";
+ } else if (input.tool_name === "Bash") {
+ const lsMatch = shellCmd.match(/^ls\s+(?:-([a-zA-Z]+)\s+)?(\S+)?\s*$/);
+ if (lsMatch) {
+ lsDir = lsMatch[2] ?? "/";
+ longFormat = (lsMatch[1] ?? "").includes("l");
+ }
+ }
+ if (lsDir) {
+ const dir = lsDir.replace(/\/+$/, "") || "/";
+ logFn(`direct ls: ${dir}`);
+ const rows = await listVirtualPathRowsFn(api, table, sessionsTable, dir);
+ const entries = /* @__PURE__ */ new Map();
+ const prefix = dir === "/" ? "/" : dir + "/";
+ for (const row of rows) {
+ const p = row["path"];
+ if (!p.startsWith(prefix) && dir !== "/")
+ continue;
+ const rest = dir === "/" ? p.slice(1) : p.slice(prefix.length);
+ const slash = rest.indexOf("/");
+ const name = slash === -1 ? rest : rest.slice(0, slash);
+ if (!name)
+ continue;
+ const existing = entries.get(name);
+ if (slash !== -1) {
+ if (!existing)
+ entries.set(name, { isDir: true, size: 0 });
+ } else {
+ entries.set(name, { isDir: false, size: row["size_bytes"] ?? 0 });
+ }
+ }
+ const lines = [];
+ for (const [name, info] of [...entries].sort((a, b) => a[0].localeCompare(b[0]))) {
+ if (longFormat) {
+ const type = info.isDir ? "drwxr-xr-x" : "-rw-r--r--";
+ const size = String(info.isDir ? 0 : info.size).padStart(6);
+ lines.push(`${type} 1 user user ${size} ${name}${info.isDir ? "/" : ""}`);
+ } else {
+ lines.push(name + (info.isDir ? "/" : ""));
+ }
+ }
+ return buildAllowDecision(`echo ${JSON.stringify(lines.join("\n") || "(empty directory)")}`, `[DeepLake direct] ls ${dir}`);
+ }
+ if (input.tool_name === "Bash") {
+ const findMatch = shellCmd.match(/^find\s+(\S+)\s+(?:-type\s+\S+\s+)?-name\s+'([^']+)'/);
+ if (findMatch) {
+ const dir = findMatch[1].replace(/\/+$/, "") || "/";
+ const namePattern = sqlLike(findMatch[2]).replace(/\*/g, "%").replace(/\?/g, "_");
+ logFn(`direct find: ${dir} -name '${findMatch[2]}'`);
+ const paths = await findVirtualPathsFn(api, table, sessionsTable, dir, namePattern);
+ let result = paths.join("\n") || "";
+ if (/\|\s*wc\s+-l\s*$/.test(shellCmd))
+ result = String(paths.length);
+ return buildAllowDecision(`echo ${JSON.stringify(result || "(no matches)")}`, `[DeepLake direct] find ${dir}`);
}
- } catch (e) {
- log3(`direct query failed, falling back to shell: ${e.message}`);
}
+ } catch (e) {
+ logFn(`direct query failed, falling back to shell: ${e.message}`);
}
- log3(`intercepted \u2192 rewriting to shell: ${shellCmd}`);
- const rewrittenCommand = `node "${SHELL_BUNDLE}" -c "${shellCmd.replace(/"/g, '\\"')}"`;
- const output = {
+ return buildFallbackDecision(shellCmd, shellBundle);
+}
+async function main() {
+ const input = await readStdin();
+ const decision = await processPreToolUse(input);
+ if (!decision)
+ return;
+ console.log(JSON.stringify({
hookSpecificOutput: {
hookEventName: "PreToolUse",
permissionDecision: "allow",
- updatedInput: {
- command: rewrittenCommand,
- description: `[DeepLake] ${shellCmd}`
- }
+ updatedInput: decision
}
- };
- log3(`rewritten: ${rewrittenCommand}`);
- console.log(JSON.stringify(output));
+ }));
}
-main().catch((e) => {
- log3(`fatal: ${e.message}`);
- process.exit(0);
-});
+if (isDirectRun(import.meta.url)) {
+ main().catch((e) => {
+ log4(`fatal: ${e.message}`);
+ process.exit(0);
+ });
+}
+export {
+ buildAllowDecision,
+ extractGrepParams,
+ getShellCommand,
+ isSafe,
+ processPreToolUse,
+ rewritePaths,
+ touchesMemory
+};
diff --git a/claude-code/bundle/session-end.js b/claude-code/bundle/session-end.js
index c10f5db..944977c 100755
--- a/claude-code/bundle/session-end.js
+++ b/claude-code/bundle/session-end.js
@@ -2,13 +2,13 @@
// dist/src/utils/stdin.js
function readStdin() {
- return new Promise((resolve, reject) => {
+ return new Promise((resolve2, reject) => {
let data = "";
process.stdin.setEncoding("utf-8");
process.stdin.on("data", (chunk) => data += chunk);
process.stdin.on("end", () => {
try {
- resolve(JSON.parse(data));
+ resolve2(JSON.parse(data));
} catch (err) {
reject(new Error(`Failed to parse hook input: ${err}`));
}
@@ -53,6 +53,12 @@ function loadConfig() {
};
}
+// dist/src/deeplake-api.js
+import { randomUUID } from "node:crypto";
+import { existsSync as existsSync2, mkdirSync, readFileSync as readFileSync2, writeFileSync } from "node:fs";
+import { join as join3 } from "node:path";
+import { tmpdir } from "node:os";
+
// dist/src/utils/debug.js
import { appendFileSync } from "node:fs";
import { join as join2 } from "node:path";
@@ -69,35 +75,354 @@ function log(tag, msg) {
`);
}
-// dist/src/hooks/spawn-wiki-worker.js
-import { spawn, execSync } from "node:child_process";
-import { fileURLToPath } from "node:url";
-import { dirname, join as join4 } from "node:path";
-import { writeFileSync, mkdirSync as mkdirSync2 } from "node:fs";
-import { homedir as homedir3, tmpdir } from "node:os";
+// dist/src/utils/sql.js
+function sqlStr(value) {
+ return value.replace(/\\/g, "\\\\").replace(/'/g, "''").replace(/\0/g, "").replace(/[\x01-\x08\x0b\x0c\x0e-\x1f\x7f]/g, "");
+}
+function sqlIdent(name) {
+ if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(name)) {
+ throw new Error(`Invalid SQL identifier: ${JSON.stringify(name)}`);
+ }
+ return name;
+}
-// dist/src/utils/wiki-log.js
-import { mkdirSync, appendFileSync as appendFileSync2 } from "node:fs";
-import { join as join3 } from "node:path";
-function makeWikiLogger(hooksDir, filename = "deeplake-wiki.log") {
- const path = join3(hooksDir, filename);
- return {
- path,
- log(msg) {
- try {
- mkdirSync(hooksDir, { recursive: true });
- appendFileSync2(path, `[${utcTimestamp()}] ${msg}
+// dist/src/deeplake-api.js
+var log2 = (msg) => log("sdk", msg);
+var TRACE_SQL = (process.env.HIVEMIND_TRACE_SQL ?? process.env.DEEPLAKE_TRACE_SQL) === "1" || (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1";
+var DEBUG_FILE_LOG = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1";
+function summarizeSql(sql, maxLen = 220) {
+ const compact = sql.replace(/\s+/g, " ").trim();
+ return compact.length > maxLen ? `${compact.slice(0, maxLen)}...` : compact;
+}
+function traceSql(msg) {
+ if (!TRACE_SQL)
+ return;
+ process.stderr.write(`[deeplake-sql] ${msg}
`);
+ if (DEBUG_FILE_LOG)
+ log2(msg);
+}
+var RETRYABLE_CODES = /* @__PURE__ */ new Set([429, 500, 502, 503, 504]);
+var MAX_RETRIES = 3;
+var BASE_DELAY_MS = 500;
+var MAX_CONCURRENCY = 5;
+var QUERY_TIMEOUT_MS = Number(process.env["HIVEMIND_QUERY_TIMEOUT_MS"] ?? process.env["DEEPLAKE_QUERY_TIMEOUT_MS"] ?? 1e4);
+var INDEX_MARKER_TTL_MS = Number(process.env["HIVEMIND_INDEX_MARKER_TTL_MS"] ?? 6 * 60 * 6e4);
+function sleep(ms) {
+ return new Promise((resolve2) => setTimeout(resolve2, ms));
+}
+function isTimeoutError(error) {
+ const name = error instanceof Error ? error.name.toLowerCase() : "";
+ const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase();
+ return name.includes("timeout") || name === "aborterror" || message.includes("timeout") || message.includes("timed out");
+}
+function isDuplicateIndexError(error) {
+ const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase();
+ return message.includes("duplicate key value violates unique constraint") || message.includes("pg_class_relname_nsp_index") || message.includes("already exists");
+}
+function isSessionInsertQuery(sql) {
+ return /^\s*insert\s+into\s+"[^"]+"\s*\(\s*id\s*,\s*path\s*,\s*filename\s*,\s*message\s*,/i.test(sql);
+}
+function isTransientHtml403(text) {
+ const body = text.toLowerCase();
+ return body.includes(" this.waiting.push(resolve2));
+ }
+ release() {
+ this.active--;
+ const next = this.waiting.shift();
+ if (next) {
+ this.active++;
+ next();
+ }
+ }
+};
+var DeeplakeApi = class {
+ token;
+ apiUrl;
+ orgId;
+ workspaceId;
+ tableName;
+ _pendingRows = [];
+ _sem = new Semaphore(MAX_CONCURRENCY);
+ _tablesCache = null;
+ constructor(token, apiUrl, orgId, workspaceId, tableName) {
+ this.token = token;
+ this.apiUrl = apiUrl;
+ this.orgId = orgId;
+ this.workspaceId = workspaceId;
+ this.tableName = tableName;
+ }
+ /** Execute SQL with retry on transient errors and bounded concurrency. */
+ async query(sql) {
+ const startedAt = Date.now();
+ const summary = summarizeSql(sql);
+ traceSql(`query start: ${summary}`);
+ await this._sem.acquire();
+ try {
+ const rows = await this._queryWithRetry(sql);
+ traceSql(`query ok (${Date.now() - startedAt}ms, rows=${rows.length}): ${summary}`);
+ return rows;
+ } catch (e) {
+ const message = e instanceof Error ? e.message : String(e);
+ traceSql(`query fail (${Date.now() - startedAt}ms): ${summary} :: ${message}`);
+ throw e;
+ } finally {
+ this._sem.release();
+ }
+ }
+ async _queryWithRetry(sql) {
+ let lastError;
+ for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
+ let resp;
+ try {
+ const signal = AbortSignal.timeout(QUERY_TIMEOUT_MS);
+ resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables/query`, {
+ method: "POST",
+ headers: {
+ Authorization: `Bearer ${this.token}`,
+ "Content-Type": "application/json",
+ "X-Activeloop-Org-Id": this.orgId
+ },
+ signal,
+ body: JSON.stringify({ query: sql })
+ });
+ } catch (e) {
+ if (isTimeoutError(e)) {
+ lastError = new Error(`Query timeout after ${QUERY_TIMEOUT_MS}ms`);
+ throw lastError;
+ }
+ lastError = e instanceof Error ? e : new Error(String(e));
+ if (attempt < MAX_RETRIES) {
+ const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200;
+ log2(`query retry ${attempt + 1}/${MAX_RETRIES} (fetch error: ${lastError.message}) in ${delay.toFixed(0)}ms`);
+ await sleep(delay);
+ continue;
+ }
+ throw lastError;
+ }
+ if (resp.ok) {
+ const raw = await resp.json();
+ if (!raw?.rows || !raw?.columns)
+ return [];
+ return raw.rows.map((row) => Object.fromEntries(raw.columns.map((col, i) => [col, row[i]])));
+ }
+ const text = await resp.text().catch(() => "");
+ const retryable403 = isSessionInsertQuery(sql) && (resp.status === 401 || resp.status === 403 && (text.length === 0 || isTransientHtml403(text)));
+ if (attempt < MAX_RETRIES && (RETRYABLE_CODES.has(resp.status) || retryable403)) {
+ const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200;
+ log2(`query retry ${attempt + 1}/${MAX_RETRIES} (${resp.status}) in ${delay.toFixed(0)}ms`);
+ await sleep(delay);
+ continue;
+ }
+ throw new Error(`Query failed: ${resp.status}: ${text.slice(0, 200)}`);
+ }
+ throw lastError ?? new Error("Query failed: max retries exceeded");
+ }
+ // ── Writes ──────────────────────────────────────────────────────────────────
+ /** Queue rows for writing. Call commit() to flush. */
+ appendRows(rows) {
+ this._pendingRows.push(...rows);
+ }
+ /** Flush pending rows via SQL. */
+ async commit() {
+ if (this._pendingRows.length === 0)
+ return;
+ const rows = this._pendingRows;
+ this._pendingRows = [];
+ const CONCURRENCY = 10;
+ for (let i = 0; i < rows.length; i += CONCURRENCY) {
+ const chunk = rows.slice(i, i + CONCURRENCY);
+ await Promise.allSettled(chunk.map((r) => this.upsertRowSql(r)));
+ }
+ log2(`commit: ${rows.length} rows`);
+ }
+ async upsertRowSql(row) {
+ const ts = (/* @__PURE__ */ new Date()).toISOString();
+ const cd = row.creationDate ?? ts;
+ const lud = row.lastUpdateDate ?? ts;
+ const exists = await this.query(`SELECT path FROM "${this.tableName}" WHERE path = '${sqlStr(row.path)}' LIMIT 1`);
+ if (exists.length > 0) {
+ let setClauses = `summary = E'${sqlStr(row.contentText)}', mime_type = '${sqlStr(row.mimeType)}', size_bytes = ${row.sizeBytes}, last_update_date = '${lud}'`;
+ if (row.project !== void 0)
+ setClauses += `, project = '${sqlStr(row.project)}'`;
+ if (row.description !== void 0)
+ setClauses += `, description = '${sqlStr(row.description)}'`;
+ await this.query(`UPDATE "${this.tableName}" SET ${setClauses} WHERE path = '${sqlStr(row.path)}'`);
+ } else {
+ const id = randomUUID();
+ let cols = "id, path, filename, summary, mime_type, size_bytes, creation_date, last_update_date";
+ let vals = `'${id}', '${sqlStr(row.path)}', '${sqlStr(row.filename)}', E'${sqlStr(row.contentText)}', '${sqlStr(row.mimeType)}', ${row.sizeBytes}, '${cd}', '${lud}'`;
+ if (row.project !== void 0) {
+ cols += ", project";
+ vals += `, '${sqlStr(row.project)}'`;
+ }
+ if (row.description !== void 0) {
+ cols += ", description";
+ vals += `, '${sqlStr(row.description)}'`;
+ }
+ await this.query(`INSERT INTO "${this.tableName}" (${cols}) VALUES (${vals})`);
+ }
+ }
+ /** Update specific columns on a row by path. */
+ async updateColumns(path, columns) {
+ const setClauses = Object.entries(columns).map(([col, val]) => typeof val === "number" ? `${col} = ${val}` : `${col} = '${sqlStr(String(val))}'`).join(", ");
+ await this.query(`UPDATE "${this.tableName}" SET ${setClauses} WHERE path = '${sqlStr(path)}'`);
+ }
+ // ── Convenience ─────────────────────────────────────────────────────────────
+ /** Create a BM25 search index on a column. */
+ async createIndex(column) {
+ await this.query(`CREATE INDEX IF NOT EXISTS idx_${sqlStr(column)}_bm25 ON "${this.tableName}" USING deeplake_index ("${column}")`);
+ }
+ buildLookupIndexName(table, suffix) {
+ return `idx_${table}_${suffix}`.replace(/[^a-zA-Z0-9_]/g, "_");
+ }
+ getLookupIndexMarkerPath(table, suffix) {
+ const markerKey = [
+ this.workspaceId,
+ this.orgId,
+ table,
+ suffix
+ ].join("__").replace(/[^a-zA-Z0-9_.-]/g, "_");
+ return join3(getIndexMarkerDir(), `${markerKey}.json`);
+ }
+ hasFreshLookupIndexMarker(table, suffix) {
+ const markerPath = this.getLookupIndexMarkerPath(table, suffix);
+ if (!existsSync2(markerPath))
+ return false;
+ try {
+ const raw = JSON.parse(readFileSync2(markerPath, "utf-8"));
+ const updatedAt = raw.updatedAt ? new Date(raw.updatedAt).getTime() : NaN;
+ if (!Number.isFinite(updatedAt) || Date.now() - updatedAt > INDEX_MARKER_TTL_MS)
+ return false;
+ return true;
+ } catch {
+ return false;
+ }
+ }
+ markLookupIndexReady(table, suffix) {
+ mkdirSync(getIndexMarkerDir(), { recursive: true });
+ writeFileSync(this.getLookupIndexMarkerPath(table, suffix), JSON.stringify({ updatedAt: (/* @__PURE__ */ new Date()).toISOString() }), "utf-8");
+ }
+ async ensureLookupIndex(table, suffix, columnsSql) {
+ if (this.hasFreshLookupIndexMarker(table, suffix))
+ return;
+ const indexName = this.buildLookupIndexName(table, suffix);
+ try {
+ await this.query(`CREATE INDEX IF NOT EXISTS "${indexName}" ON "${table}" ${columnsSql}`);
+ this.markLookupIndexReady(table, suffix);
+ } catch (e) {
+ if (isDuplicateIndexError(e)) {
+ this.markLookupIndexReady(table, suffix);
+ return;
+ }
+ log2(`index "${indexName}" skipped: ${e.message}`);
+ }
+ }
+ /** List all tables in the workspace (with retry). */
+ async listTables(forceRefresh = false) {
+ if (!forceRefresh && this._tablesCache)
+ return [...this._tablesCache];
+ const { tables, cacheable } = await this._fetchTables();
+ if (cacheable)
+ this._tablesCache = [...tables];
+ return tables;
+ }
+ async _fetchTables() {
+ for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
+ try {
+ const resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables`, {
+ headers: {
+ Authorization: `Bearer ${this.token}`,
+ "X-Activeloop-Org-Id": this.orgId
+ }
+ });
+ if (resp.ok) {
+ const data = await resp.json();
+ return {
+ tables: (data.tables ?? []).map((t) => t.table_name),
+ cacheable: true
+ };
+ }
+ if (attempt < MAX_RETRIES && RETRYABLE_CODES.has(resp.status)) {
+ await sleep(BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200);
+ continue;
+ }
+ return { tables: [], cacheable: false };
} catch {
+ if (attempt < MAX_RETRIES) {
+ await sleep(BASE_DELAY_MS * Math.pow(2, attempt));
+ continue;
+ }
+ return { tables: [], cacheable: false };
}
}
- };
+ return { tables: [], cacheable: false };
+ }
+ /** Create the memory table if it doesn't already exist. Migrate columns on existing tables. */
+ async ensureTable(name) {
+ const tbl = name ?? this.tableName;
+ const tables = await this.listTables();
+ if (!tables.includes(tbl)) {
+ log2(`table "${tbl}" not found, creating`);
+ await this.query(`CREATE TABLE IF NOT EXISTS "${tbl}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', summary TEXT NOT NULL DEFAULT '', author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'text/plain', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`);
+ log2(`table "${tbl}" created`);
+ if (!tables.includes(tbl))
+ this._tablesCache = [...tables, tbl];
+ }
+ }
+ /** Create the sessions table (uses JSONB for message since every row is a JSON event). */
+ async ensureSessionsTable(name) {
+ const tables = await this.listTables();
+ if (!tables.includes(name)) {
+ log2(`table "${name}" not found, creating`);
+ await this.query(`CREATE TABLE IF NOT EXISTS "${name}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', message JSONB, author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'application/json', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`);
+ log2(`table "${name}" created`);
+ if (!tables.includes(name))
+ this._tablesCache = [...tables, name];
+ }
+ await this.ensureLookupIndex(name, "path_creation_date", `("path", "creation_date")`);
+ }
+};
+
+// dist/src/utils/direct-run.js
+import { resolve } from "node:path";
+import { fileURLToPath } from "node:url";
+function isDirectRun(metaUrl) {
+ const entry = process.argv[1];
+ if (!entry)
+ return false;
+ try {
+ return resolve(fileURLToPath(metaUrl)) === resolve(entry);
+ } catch {
+ return false;
+ }
}
// dist/src/hooks/spawn-wiki-worker.js
+import { spawn, execSync } from "node:child_process";
+import { fileURLToPath as fileURLToPath2 } from "node:url";
+import { dirname, join as join4 } from "node:path";
+import { writeFileSync as writeFileSync2, mkdirSync as mkdirSync2, appendFileSync as appendFileSync2 } from "node:fs";
+import { homedir as homedir3, tmpdir as tmpdir2 } from "node:os";
var HOME = homedir3();
-var wikiLogger = makeWikiLogger(join4(HOME, ".claude", "hooks"));
-var WIKI_LOG = wikiLogger.path;
+var WIKI_LOG = join4(HOME, ".claude", "hooks", "deeplake-wiki.log");
var WIKI_PROMPT_TEMPLATE = `You are building a personal wiki from a coding session. Your goal is to extract every piece of knowledge \u2014 entities, decisions, relationships, and facts \u2014 into a structured, searchable wiki entry. Think of this as building a knowledge graph, not writing a summary.
SESSION JSONL path: __JSONL__
@@ -150,7 +475,14 @@ IMPORTANT: Be exhaustive. Extract EVERY entity, decision, and fact. Future you w
PRIVACY: Never include absolute filesystem paths (e.g. /home/user/..., /Users/..., C:\\\\...) in the summary. Use only project-relative paths or the project name. The Source and Project fields above are already correct \u2014 do not change them.
LENGTH LIMIT: Keep the total summary under 4000 characters. Be dense and concise \u2014 prioritize facts over prose. If a session is short, the summary should be short too.`;
-var wikiLog = wikiLogger.log;
+function wikiLog(msg) {
+ try {
+ mkdirSync2(join4(HOME, ".claude", "hooks"), { recursive: true });
+ appendFileSync2(WIKI_LOG, `[${utcTimestamp()}] ${msg}
+`);
+ } catch {
+ }
+}
function findClaudeBin() {
try {
return execSync("which claude 2>/dev/null", { encoding: "utf-8" }).trim();
@@ -161,10 +493,10 @@ function findClaudeBin() {
function spawnWikiWorker(opts) {
const { config, sessionId, cwd, bundleDir, reason } = opts;
const projectName = cwd.split("/").pop() || "unknown";
- const tmpDir = join4(tmpdir(), `deeplake-wiki-${sessionId}-${Date.now()}`);
+ const tmpDir = join4(tmpdir2(), `deeplake-wiki-${sessionId}-${Date.now()}`);
mkdirSync2(tmpDir, { recursive: true });
const configFile = join4(tmpDir, "config.json");
- writeFileSync(configFile, JSON.stringify({
+ writeFileSync2(configFile, JSON.stringify({
apiUrl: config.apiUrl,
token: config.token,
orgId: config.orgId,
@@ -189,102 +521,264 @@ function spawnWikiWorker(opts) {
wikiLog(`${reason}: spawned summary worker for ${sessionId}`);
}
function bundleDirFromImportMeta(importMetaUrl) {
- return dirname(fileURLToPath(importMetaUrl));
+ return dirname(fileURLToPath2(importMetaUrl));
}
-// dist/src/hooks/summary-state.js
-import { readFileSync as readFileSync2, writeFileSync as writeFileSync2, writeSync, mkdirSync as mkdirSync3, renameSync, existsSync as existsSync2, unlinkSync, openSync, closeSync } from "node:fs";
+// dist/src/hooks/session-queue.js
+import { appendFileSync as appendFileSync3, closeSync, existsSync as existsSync3, mkdirSync as mkdirSync3, openSync, readFileSync as readFileSync3, readdirSync, renameSync, rmSync, statSync, writeFileSync as writeFileSync3 } from "node:fs";
+import { dirname as dirname2, join as join5 } from "node:path";
import { homedir as homedir4 } from "node:os";
-import { join as join5 } from "node:path";
-var dlog = (msg) => log("summary-state", msg);
-var STATE_DIR = join5(homedir4(), ".claude", "hooks", "summary-state");
-var YIELD_BUF = new Int32Array(new SharedArrayBuffer(4));
-function lockPath(sessionId) {
- return join5(STATE_DIR, `${sessionId}.lock`);
-}
-function tryAcquireLock(sessionId, maxAgeMs = 10 * 60 * 1e3) {
- mkdirSync3(STATE_DIR, { recursive: true });
- const p = lockPath(sessionId);
- if (existsSync2(p)) {
+var DEFAULT_QUEUE_DIR = join5(homedir4(), ".deeplake", "queue");
+var DEFAULT_MAX_BATCH_ROWS = 50;
+var DEFAULT_STALE_INFLIGHT_MS = 6e4;
+var DEFAULT_AUTH_FAILURE_TTL_MS = 5 * 6e4;
+var BUSY_WAIT_STEP_MS = 100;
+var SessionWriteDisabledError = class extends Error {
+ constructor(message) {
+ super(message);
+ this.name = "SessionWriteDisabledError";
+ }
+};
+function buildSessionInsertSql(sessionsTable, rows) {
+ if (rows.length === 0)
+ throw new Error("buildSessionInsertSql: rows must not be empty");
+ const table = sqlIdent(sessionsTable);
+ const values = rows.map((row) => {
+ const jsonForSql = sqlStr(coerceJsonbPayload(row.message));
+ return `('${sqlStr(row.id)}', '${sqlStr(row.path)}', '${sqlStr(row.filename)}', '${jsonForSql}'::jsonb, '${sqlStr(row.author)}', ${row.sizeBytes}, '${sqlStr(row.project)}', '${sqlStr(row.description)}', '${sqlStr(row.agent)}', '${sqlStr(row.creationDate)}', '${sqlStr(row.lastUpdateDate)}')`;
+ }).join(", ");
+ return `INSERT INTO "${table}" (id, path, filename, message, author, size_bytes, project, description, agent, creation_date, last_update_date) VALUES ${values}`;
+}
+function coerceJsonbPayload(message) {
+ try {
+ return JSON.stringify(JSON.parse(message));
+ } catch {
+ return JSON.stringify({
+ type: "raw_message",
+ content: message
+ });
+ }
+}
+async function flushSessionQueue(api, opts) {
+ const queueDir = opts.queueDir ?? DEFAULT_QUEUE_DIR;
+ const maxBatchRows = opts.maxBatchRows ?? DEFAULT_MAX_BATCH_ROWS;
+ const staleInflightMs = opts.staleInflightMs ?? DEFAULT_STALE_INFLIGHT_MS;
+ const waitIfBusyMs = opts.waitIfBusyMs ?? 0;
+ const drainAll = opts.drainAll ?? false;
+ mkdirSync3(queueDir, { recursive: true });
+ const queuePath = getQueuePath(queueDir, opts.sessionId);
+ const inflightPath = getInflightPath(queueDir, opts.sessionId);
+ if (isSessionWriteDisabled(opts.sessionsTable, queueDir)) {
+ return existsSync3(queuePath) || existsSync3(inflightPath) ? { status: "disabled", rows: 0, batches: 0 } : { status: "empty", rows: 0, batches: 0 };
+ }
+ let totalRows = 0;
+ let totalBatches = 0;
+ let flushedAny = false;
+ while (true) {
+ if (opts.allowStaleInflight)
+ recoverStaleInflight(queuePath, inflightPath, staleInflightMs);
+ if (existsSync3(inflightPath)) {
+ if (waitIfBusyMs > 0) {
+ await waitForInflightToClear(inflightPath, waitIfBusyMs);
+ if (opts.allowStaleInflight)
+ recoverStaleInflight(queuePath, inflightPath, staleInflightMs);
+ }
+ if (existsSync3(inflightPath)) {
+ return flushedAny ? { status: "flushed", rows: totalRows, batches: totalBatches } : { status: "busy", rows: 0, batches: 0 };
+ }
+ }
+ if (!existsSync3(queuePath)) {
+ return flushedAny ? { status: "flushed", rows: totalRows, batches: totalBatches } : { status: "empty", rows: 0, batches: 0 };
+ }
try {
- const ageMs = Date.now() - parseInt(readFileSync2(p, "utf-8"), 10);
- if (Number.isFinite(ageMs) && ageMs < maxAgeMs)
- return false;
- } catch (readErr) {
- dlog(`lock file unreadable for ${sessionId}, treating as stale: ${readErr.message}`);
+ renameSync(queuePath, inflightPath);
+ } catch (e) {
+ if (e?.code === "ENOENT") {
+ return flushedAny ? { status: "flushed", rows: totalRows, batches: totalBatches } : { status: "empty", rows: 0, batches: 0 };
+ }
+ throw e;
}
try {
- unlinkSync(p);
- } catch (unlinkErr) {
- dlog(`could not unlink stale lock for ${sessionId}: ${unlinkErr.message}`);
- return false;
+ const { rows, batches } = await flushInflightFile(api, opts.sessionsTable, inflightPath, maxBatchRows);
+ totalRows += rows;
+ totalBatches += batches;
+ flushedAny = flushedAny || rows > 0;
+ } catch (e) {
+ requeueInflight(queuePath, inflightPath);
+ if (e instanceof SessionWriteDisabledError) {
+ return { status: "disabled", rows: totalRows, batches: totalBatches };
+ }
+ throw e;
+ }
+ if (!drainAll) {
+ return { status: "flushed", rows: totalRows, batches: totalBatches };
}
}
- try {
- const fd = openSync(p, "wx");
+}
+function getQueuePath(queueDir, sessionId) {
+ return join5(queueDir, `${sessionId}.jsonl`);
+}
+function getInflightPath(queueDir, sessionId) {
+ return join5(queueDir, `${sessionId}.inflight`);
+}
+async function flushInflightFile(api, sessionsTable, inflightPath, maxBatchRows) {
+ const rows = readQueuedRows(inflightPath);
+ if (rows.length === 0) {
+ rmSync(inflightPath, { force: true });
+ return { rows: 0, batches: 0 };
+ }
+ let ensured = false;
+ let batches = 0;
+ const queueDir = dirname2(inflightPath);
+ for (let i = 0; i < rows.length; i += maxBatchRows) {
+ const chunk = rows.slice(i, i + maxBatchRows);
+ const sql = buildSessionInsertSql(sessionsTable, chunk);
try {
- writeSync(fd, String(Date.now()));
- } finally {
- closeSync(fd);
+ await api.query(sql);
+ } catch (e) {
+ if (isSessionWriteAuthError(e)) {
+ markSessionWriteDisabled(sessionsTable, errorMessage(e), queueDir);
+ throw new SessionWriteDisabledError(errorMessage(e));
+ }
+ if (!ensured && isEnsureSessionsTableRetryable(e)) {
+ try {
+ await api.ensureSessionsTable(sessionsTable);
+ } catch (ensureError) {
+ if (isSessionWriteAuthError(ensureError)) {
+ markSessionWriteDisabled(sessionsTable, errorMessage(ensureError), queueDir);
+ throw new SessionWriteDisabledError(errorMessage(ensureError));
+ }
+ throw ensureError;
+ }
+ ensured = true;
+ try {
+ await api.query(sql);
+ } catch (retryError) {
+ if (isSessionWriteAuthError(retryError)) {
+ markSessionWriteDisabled(sessionsTable, errorMessage(retryError), queueDir);
+ throw new SessionWriteDisabledError(errorMessage(retryError));
+ }
+ throw retryError;
+ }
+ } else {
+ throw e;
+ }
}
- return true;
- } catch (e) {
- if (e.code === "EEXIST")
- return false;
- throw e;
+ batches += 1;
}
+ clearSessionWriteDisabled(sessionsTable, queueDir);
+ rmSync(inflightPath, { force: true });
+ return { rows: rows.length, batches };
}
-function releaseLock(sessionId) {
+function readQueuedRows(path) {
+ const raw = readFileSync3(path, "utf-8");
+ return raw.split("\n").map((line) => line.trim()).filter(Boolean).map((line) => JSON.parse(line));
+}
+function requeueInflight(queuePath, inflightPath) {
+ if (!existsSync3(inflightPath))
+ return;
+ const inflight = readFileSync3(inflightPath, "utf-8");
+ appendFileSync3(queuePath, inflight);
+ rmSync(inflightPath, { force: true });
+}
+function recoverStaleInflight(queuePath, inflightPath, staleInflightMs) {
+ if (!existsSync3(inflightPath) || !isStale(inflightPath, staleInflightMs))
+ return;
+ requeueInflight(queuePath, inflightPath);
+}
+function isStale(path, staleInflightMs) {
+ return Date.now() - statSync(path).mtimeMs >= staleInflightMs;
+}
+function isEnsureSessionsTableRetryable(error) {
+ const message = errorMessage(error).toLowerCase();
+ return message.includes("does not exist") || message.includes("doesn't exist") || message.includes("relation") || message.includes("not found");
+}
+function isSessionWriteAuthError(error) {
+ const message = errorMessage(error).toLowerCase();
+ return message.includes("403") || message.includes("401") || message.includes("forbidden") || message.includes("unauthorized");
+}
+function markSessionWriteDisabled(sessionsTable, reason, queueDir = DEFAULT_QUEUE_DIR) {
+ mkdirSync3(queueDir, { recursive: true });
+ writeFileSync3(getSessionWriteDisabledPath(queueDir, sessionsTable), JSON.stringify({
+ disabledAt: (/* @__PURE__ */ new Date()).toISOString(),
+ reason,
+ sessionsTable
+ }));
+}
+function clearSessionWriteDisabled(sessionsTable, queueDir = DEFAULT_QUEUE_DIR) {
+ rmSync(getSessionWriteDisabledPath(queueDir, sessionsTable), { force: true });
+}
+function isSessionWriteDisabled(sessionsTable, queueDir = DEFAULT_QUEUE_DIR, ttlMs = DEFAULT_AUTH_FAILURE_TTL_MS) {
+ const path = getSessionWriteDisabledPath(queueDir, sessionsTable);
+ if (!existsSync3(path))
+ return false;
try {
- unlinkSync(lockPath(sessionId));
- } catch (e) {
- if (e?.code !== "ENOENT") {
- dlog(`releaseLock unlink failed for ${sessionId}: ${e.message}`);
+ const raw = readFileSync3(path, "utf-8");
+ const state = JSON.parse(raw);
+ const ageMs = Date.now() - new Date(state.disabledAt).getTime();
+ if (Number.isNaN(ageMs) || ageMs >= ttlMs) {
+ rmSync(path, { force: true });
+ return false;
}
+ return true;
+ } catch {
+ rmSync(path, { force: true });
+ return false;
+ }
+}
+function getSessionWriteDisabledPath(queueDir, sessionsTable) {
+ return join5(queueDir, `.${sessionsTable}.disabled.json`);
+}
+function errorMessage(error) {
+ return error instanceof Error ? error.message : String(error);
+}
+async function waitForInflightToClear(inflightPath, waitIfBusyMs) {
+ const startedAt = Date.now();
+ while (existsSync3(inflightPath) && Date.now() - startedAt < waitIfBusyMs) {
+ await sleep2(BUSY_WAIT_STEP_MS);
}
}
+function sleep2(ms) {
+ return new Promise((resolve2) => setTimeout(resolve2, ms));
+}
// dist/src/hooks/session-end.js
-var log2 = (msg) => log("session-end", msg);
-async function main() {
- if (process.env.HIVEMIND_WIKI_WORKER === "1")
- return;
- if (process.env.HIVEMIND_CAPTURE === "false")
- return;
- const input = await readStdin();
- const sessionId = input.session_id;
- const cwd = input.cwd ?? "";
- if (!sessionId)
- return;
- const config = loadConfig();
+var log3 = (msg) => log("session-end", msg);
+async function runSessionEndHook(input, deps = {}) {
+ const { wikiWorker = (process.env.HIVEMIND_WIKI_WORKER ?? process.env.DEEPLAKE_WIKI_WORKER) === "1", captureEnabled = (process.env.HIVEMIND_CAPTURE ?? process.env.DEEPLAKE_CAPTURE) !== "false", config = loadConfig(), createApi = (activeConfig) => new DeeplakeApi(activeConfig.token, activeConfig.apiUrl, activeConfig.orgId, activeConfig.workspaceId, activeConfig.sessionsTableName), flushSessionQueueFn = flushSessionQueue, spawnWikiWorkerFn = spawnWikiWorker, wikiLogFn = wikiLog, bundleDir = bundleDirFromImportMeta(import.meta.url), logFn = log3 } = deps;
+ if (wikiWorker || !captureEnabled || !input.session_id)
+ return { status: "skipped" };
if (!config) {
- log2("no config");
- return;
- }
- if (!tryAcquireLock(sessionId)) {
- wikiLog(`SessionEnd: periodic worker already running for ${sessionId}, skipping`);
- return;
- }
- wikiLog(`SessionEnd: triggering summary for ${sessionId}`);
- try {
- spawnWikiWorker({
- config,
- sessionId,
- cwd,
- bundleDir: bundleDirFromImportMeta(import.meta.url),
- reason: "SessionEnd"
- });
- } catch (e) {
- log2(`spawn failed: ${e.message}`);
- try {
- releaseLock(sessionId);
- } catch (releaseErr) {
- log2(`releaseLock after spawn failure also failed: ${releaseErr.message}`);
- }
- throw e;
+ logFn("no config");
+ return { status: "no_config" };
}
+ const flush = await flushSessionQueueFn(createApi(config), {
+ sessionId: input.session_id,
+ sessionsTable: config.sessionsTableName,
+ waitIfBusyMs: 5e3,
+ drainAll: true
+ });
+ logFn(`flush ${flush.status}: rows=${flush.rows} batches=${flush.batches}`);
+ wikiLogFn(`SessionEnd: triggering summary for ${input.session_id}`);
+ spawnWikiWorkerFn({
+ config,
+ sessionId: input.session_id,
+ cwd: input.cwd ?? "",
+ bundleDir,
+ reason: "SessionEnd"
+ });
+ return { status: "flushed", flushStatus: flush.status };
+}
+async function main() {
+ const input = await readStdin();
+ await runSessionEndHook(input);
+}
+if (isDirectRun(import.meta.url)) {
+ main().catch((e) => {
+ log3(`fatal: ${e.message}`);
+ process.exit(0);
+ });
}
-main().catch((e) => {
- log2(`fatal: ${e.message}`);
- process.exit(0);
-});
+export {
+ runSessionEndHook
+};
diff --git a/claude-code/bundle/session-start-setup.js b/claude-code/bundle/session-start-setup.js
index bec63e9..77621bc 100755
--- a/claude-code/bundle/session-start-setup.js
+++ b/claude-code/bundle/session-start-setup.js
@@ -1,10 +1,11 @@
#!/usr/bin/env node
// dist/src/hooks/session-start-setup.js
-import { fileURLToPath } from "node:url";
-import { dirname as dirname2, join as join6 } from "node:path";
+import { fileURLToPath as fileURLToPath2 } from "node:url";
+import { dirname as dirname3, join as join7 } from "node:path";
+import { mkdirSync as mkdirSync5, appendFileSync as appendFileSync3 } from "node:fs";
import { execSync as execSync2 } from "node:child_process";
-import { homedir as homedir4 } from "node:os";
+import { homedir as homedir6 } from "node:os";
// dist/src/commands/auth.js
import { readFileSync, writeFileSync, existsSync, mkdirSync, unlinkSync } from "node:fs";
@@ -66,6 +67,9 @@ function loadConfig() {
// dist/src/deeplake-api.js
import { randomUUID } from "node:crypto";
+import { existsSync as existsSync3, mkdirSync as mkdirSync2, readFileSync as readFileSync3, writeFileSync as writeFileSync2 } from "node:fs";
+import { join as join4 } from "node:path";
+import { tmpdir } from "node:os";
// dist/src/utils/debug.js
import { appendFileSync } from "node:fs";
@@ -87,6 +91,12 @@ function log(tag, msg) {
function sqlStr(value) {
return value.replace(/\\/g, "\\\\").replace(/'/g, "''").replace(/\0/g, "").replace(/[\x01-\x08\x0b\x0c\x0e-\x1f\x7f]/g, "");
}
+function sqlIdent(name) {
+ if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(name)) {
+ throw new Error(`Invalid SQL identifier: ${JSON.stringify(name)}`);
+ }
+ return name;
+}
// dist/src/deeplake-api.js
var log2 = (msg) => log("sdk", msg);
@@ -108,8 +118,29 @@ var RETRYABLE_CODES = /* @__PURE__ */ new Set([429, 500, 502, 503, 504]);
var MAX_RETRIES = 3;
var BASE_DELAY_MS = 500;
var MAX_CONCURRENCY = 5;
+var QUERY_TIMEOUT_MS = Number(process.env["HIVEMIND_QUERY_TIMEOUT_MS"] ?? process.env["DEEPLAKE_QUERY_TIMEOUT_MS"] ?? 1e4);
+var INDEX_MARKER_TTL_MS = Number(process.env["HIVEMIND_INDEX_MARKER_TTL_MS"] ?? 6 * 60 * 6e4);
function sleep(ms) {
- return new Promise((resolve) => setTimeout(resolve, ms));
+ return new Promise((resolve2) => setTimeout(resolve2, ms));
+}
+function isTimeoutError(error) {
+ const name = error instanceof Error ? error.name.toLowerCase() : "";
+ const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase();
+ return name.includes("timeout") || name === "aborterror" || message.includes("timeout") || message.includes("timed out");
+}
+function isDuplicateIndexError(error) {
+ const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase();
+ return message.includes("duplicate key value violates unique constraint") || message.includes("pg_class_relname_nsp_index") || message.includes("already exists");
+}
+function isSessionInsertQuery(sql) {
+ return /^\s*insert\s+into\s+"[^"]+"\s*\(\s*id\s*,\s*path\s*,\s*filename\s*,\s*message\s*,/i.test(sql);
+}
+function isTransientHtml403(text) {
+ const body = text.toLowerCase();
+ return body.includes(" this.waiting.push(resolve));
+ await new Promise((resolve2) => this.waiting.push(resolve2));
}
release() {
this.active--;
@@ -142,6 +173,7 @@ var DeeplakeApi = class {
tableName;
_pendingRows = [];
_sem = new Semaphore(MAX_CONCURRENCY);
+ _tablesCache = null;
constructor(token, apiUrl, orgId, workspaceId, tableName) {
this.token = token;
this.apiUrl = apiUrl;
@@ -172,6 +204,7 @@ var DeeplakeApi = class {
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
let resp;
try {
+ const signal = AbortSignal.timeout(QUERY_TIMEOUT_MS);
resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables/query`, {
method: "POST",
headers: {
@@ -179,9 +212,14 @@ var DeeplakeApi = class {
"Content-Type": "application/json",
"X-Activeloop-Org-Id": this.orgId
},
+ signal,
body: JSON.stringify({ query: sql })
});
} catch (e) {
+ if (isTimeoutError(e)) {
+ lastError = new Error(`Query timeout after ${QUERY_TIMEOUT_MS}ms`);
+ throw lastError;
+ }
lastError = e instanceof Error ? e : new Error(String(e));
if (attempt < MAX_RETRIES) {
const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200;
@@ -198,7 +236,8 @@ var DeeplakeApi = class {
return raw.rows.map((row) => Object.fromEntries(raw.columns.map((col, i) => [col, row[i]])));
}
const text = await resp.text().catch(() => "");
- if (attempt < MAX_RETRIES && RETRYABLE_CODES.has(resp.status)) {
+ const retryable403 = isSessionInsertQuery(sql) && (resp.status === 401 || resp.status === 403 && (text.length === 0 || isTransientHtml403(text)));
+ if (attempt < MAX_RETRIES && (RETRYABLE_CODES.has(resp.status) || retryable403)) {
const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200;
log2(`query retry ${attempt + 1}/${MAX_RETRIES} (${resp.status}) in ${delay.toFixed(0)}ms`);
await sleep(delay);
@@ -263,8 +302,61 @@ var DeeplakeApi = class {
async createIndex(column) {
await this.query(`CREATE INDEX IF NOT EXISTS idx_${sqlStr(column)}_bm25 ON "${this.tableName}" USING deeplake_index ("${column}")`);
}
+ buildLookupIndexName(table, suffix) {
+ return `idx_${table}_${suffix}`.replace(/[^a-zA-Z0-9_]/g, "_");
+ }
+ getLookupIndexMarkerPath(table, suffix) {
+ const markerKey = [
+ this.workspaceId,
+ this.orgId,
+ table,
+ suffix
+ ].join("__").replace(/[^a-zA-Z0-9_.-]/g, "_");
+ return join4(getIndexMarkerDir(), `${markerKey}.json`);
+ }
+ hasFreshLookupIndexMarker(table, suffix) {
+ const markerPath = this.getLookupIndexMarkerPath(table, suffix);
+ if (!existsSync3(markerPath))
+ return false;
+ try {
+ const raw = JSON.parse(readFileSync3(markerPath, "utf-8"));
+ const updatedAt = raw.updatedAt ? new Date(raw.updatedAt).getTime() : NaN;
+ if (!Number.isFinite(updatedAt) || Date.now() - updatedAt > INDEX_MARKER_TTL_MS)
+ return false;
+ return true;
+ } catch {
+ return false;
+ }
+ }
+ markLookupIndexReady(table, suffix) {
+ mkdirSync2(getIndexMarkerDir(), { recursive: true });
+ writeFileSync2(this.getLookupIndexMarkerPath(table, suffix), JSON.stringify({ updatedAt: (/* @__PURE__ */ new Date()).toISOString() }), "utf-8");
+ }
+ async ensureLookupIndex(table, suffix, columnsSql) {
+ if (this.hasFreshLookupIndexMarker(table, suffix))
+ return;
+ const indexName = this.buildLookupIndexName(table, suffix);
+ try {
+ await this.query(`CREATE INDEX IF NOT EXISTS "${indexName}" ON "${table}" ${columnsSql}`);
+ this.markLookupIndexReady(table, suffix);
+ } catch (e) {
+ if (isDuplicateIndexError(e)) {
+ this.markLookupIndexReady(table, suffix);
+ return;
+ }
+ log2(`index "${indexName}" skipped: ${e.message}`);
+ }
+ }
/** List all tables in the workspace (with retry). */
- async listTables() {
+ async listTables(forceRefresh = false) {
+ if (!forceRefresh && this._tablesCache)
+ return [...this._tablesCache];
+ const { tables, cacheable } = await this._fetchTables();
+ if (cacheable)
+ this._tablesCache = [...tables];
+ return tables;
+ }
+ async _fetchTables() {
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
try {
const resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables`, {
@@ -275,22 +367,25 @@ var DeeplakeApi = class {
});
if (resp.ok) {
const data = await resp.json();
- return (data.tables ?? []).map((t) => t.table_name);
+ return {
+ tables: (data.tables ?? []).map((t) => t.table_name),
+ cacheable: true
+ };
}
if (attempt < MAX_RETRIES && RETRYABLE_CODES.has(resp.status)) {
await sleep(BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200);
continue;
}
- return [];
+ return { tables: [], cacheable: false };
} catch {
if (attempt < MAX_RETRIES) {
await sleep(BASE_DELAY_MS * Math.pow(2, attempt));
continue;
}
- return [];
+ return { tables: [], cacheable: false };
}
}
- return [];
+ return { tables: [], cacheable: false };
}
/** Create the memory table if it doesn't already exist. Migrate columns on existing tables. */
async ensureTable(name) {
@@ -300,6 +395,8 @@ var DeeplakeApi = class {
log2(`table "${tbl}" not found, creating`);
await this.query(`CREATE TABLE IF NOT EXISTS "${tbl}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', summary TEXT NOT NULL DEFAULT '', author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'text/plain', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`);
log2(`table "${tbl}" created`);
+ if (!tables.includes(tbl))
+ this._tablesCache = [...tables, tbl];
}
}
/** Create the sessions table (uses JSONB for message since every row is a JSON event). */
@@ -309,19 +406,22 @@ var DeeplakeApi = class {
log2(`table "${name}" not found, creating`);
await this.query(`CREATE TABLE IF NOT EXISTS "${name}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', message JSONB, author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'application/json', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`);
log2(`table "${name}" created`);
+ if (!tables.includes(name))
+ this._tablesCache = [...tables, name];
}
+ await this.ensureLookupIndex(name, "path_creation_date", `("path", "creation_date")`);
}
};
// dist/src/utils/stdin.js
function readStdin() {
- return new Promise((resolve, reject) => {
+ return new Promise((resolve2, reject) => {
let data = "";
process.stdin.setEncoding("utf-8");
process.stdin.on("data", (chunk) => data += chunk);
process.stdin.on("end", () => {
try {
- resolve(JSON.parse(data));
+ resolve2(JSON.parse(data));
} catch (err) {
reject(new Error(`Failed to parse hook input: ${err}`));
}
@@ -330,140 +430,538 @@ function readStdin() {
});
}
-// dist/src/utils/version-check.js
-import { readFileSync as readFileSync3 } from "node:fs";
-import { dirname, join as join4 } from "node:path";
-var GITHUB_RAW_PKG = "https://raw.githubusercontent.com/activeloopai/hivemind/main/package.json";
+// dist/src/utils/direct-run.js
+import { resolve } from "node:path";
+import { fileURLToPath } from "node:url";
+function isDirectRun(metaUrl) {
+ const entry = process.argv[1];
+ if (!entry)
+ return false;
+ try {
+ return resolve(fileURLToPath(metaUrl)) === resolve(entry);
+ } catch {
+ return false;
+ }
+}
+
+// dist/src/hooks/session-queue.js
+import { appendFileSync as appendFileSync2, closeSync, existsSync as existsSync4, mkdirSync as mkdirSync3, openSync, readFileSync as readFileSync4, readdirSync, renameSync, rmSync, statSync, writeFileSync as writeFileSync3 } from "node:fs";
+import { dirname, join as join5 } from "node:path";
+import { homedir as homedir4 } from "node:os";
+var DEFAULT_QUEUE_DIR = join5(homedir4(), ".deeplake", "queue");
+var DEFAULT_MAX_BATCH_ROWS = 50;
+var DEFAULT_STALE_INFLIGHT_MS = 6e4;
+var DEFAULT_AUTH_FAILURE_TTL_MS = 5 * 6e4;
+var DEFAULT_DRAIN_LOCK_STALE_MS = 3e4;
+var BUSY_WAIT_STEP_MS = 100;
+var SessionWriteDisabledError = class extends Error {
+ constructor(message) {
+ super(message);
+ this.name = "SessionWriteDisabledError";
+ }
+};
+function buildSessionInsertSql(sessionsTable, rows) {
+ if (rows.length === 0)
+ throw new Error("buildSessionInsertSql: rows must not be empty");
+ const table = sqlIdent(sessionsTable);
+ const values = rows.map((row) => {
+ const jsonForSql = sqlStr(coerceJsonbPayload(row.message));
+ return `('${sqlStr(row.id)}', '${sqlStr(row.path)}', '${sqlStr(row.filename)}', '${jsonForSql}'::jsonb, '${sqlStr(row.author)}', ${row.sizeBytes}, '${sqlStr(row.project)}', '${sqlStr(row.description)}', '${sqlStr(row.agent)}', '${sqlStr(row.creationDate)}', '${sqlStr(row.lastUpdateDate)}')`;
+ }).join(", ");
+ return `INSERT INTO "${table}" (id, path, filename, message, author, size_bytes, project, description, agent, creation_date, last_update_date) VALUES ${values}`;
+}
+function coerceJsonbPayload(message) {
+ try {
+ return JSON.stringify(JSON.parse(message));
+ } catch {
+ return JSON.stringify({
+ type: "raw_message",
+ content: message
+ });
+ }
+}
+async function flushSessionQueue(api, opts) {
+ const queueDir = opts.queueDir ?? DEFAULT_QUEUE_DIR;
+ const maxBatchRows = opts.maxBatchRows ?? DEFAULT_MAX_BATCH_ROWS;
+ const staleInflightMs = opts.staleInflightMs ?? DEFAULT_STALE_INFLIGHT_MS;
+ const waitIfBusyMs = opts.waitIfBusyMs ?? 0;
+ const drainAll = opts.drainAll ?? false;
+ mkdirSync3(queueDir, { recursive: true });
+ const queuePath = getQueuePath(queueDir, opts.sessionId);
+ const inflightPath = getInflightPath(queueDir, opts.sessionId);
+ if (isSessionWriteDisabled(opts.sessionsTable, queueDir)) {
+ return existsSync4(queuePath) || existsSync4(inflightPath) ? { status: "disabled", rows: 0, batches: 0 } : { status: "empty", rows: 0, batches: 0 };
+ }
+ let totalRows = 0;
+ let totalBatches = 0;
+ let flushedAny = false;
+ while (true) {
+ if (opts.allowStaleInflight)
+ recoverStaleInflight(queuePath, inflightPath, staleInflightMs);
+ if (existsSync4(inflightPath)) {
+ if (waitIfBusyMs > 0) {
+ await waitForInflightToClear(inflightPath, waitIfBusyMs);
+ if (opts.allowStaleInflight)
+ recoverStaleInflight(queuePath, inflightPath, staleInflightMs);
+ }
+ if (existsSync4(inflightPath)) {
+ return flushedAny ? { status: "flushed", rows: totalRows, batches: totalBatches } : { status: "busy", rows: 0, batches: 0 };
+ }
+ }
+ if (!existsSync4(queuePath)) {
+ return flushedAny ? { status: "flushed", rows: totalRows, batches: totalBatches } : { status: "empty", rows: 0, batches: 0 };
+ }
+ try {
+ renameSync(queuePath, inflightPath);
+ } catch (e) {
+ if (e?.code === "ENOENT") {
+ return flushedAny ? { status: "flushed", rows: totalRows, batches: totalBatches } : { status: "empty", rows: 0, batches: 0 };
+ }
+ throw e;
+ }
+ try {
+ const { rows, batches } = await flushInflightFile(api, opts.sessionsTable, inflightPath, maxBatchRows);
+ totalRows += rows;
+ totalBatches += batches;
+ flushedAny = flushedAny || rows > 0;
+ } catch (e) {
+ requeueInflight(queuePath, inflightPath);
+ if (e instanceof SessionWriteDisabledError) {
+ return { status: "disabled", rows: totalRows, batches: totalBatches };
+ }
+ throw e;
+ }
+ if (!drainAll) {
+ return { status: "flushed", rows: totalRows, batches: totalBatches };
+ }
+ }
+}
+async function drainSessionQueues(api, opts) {
+ const queueDir = opts.queueDir ?? DEFAULT_QUEUE_DIR;
+ mkdirSync3(queueDir, { recursive: true });
+ const sessionIds = listQueuedSessionIds(queueDir, opts.staleInflightMs ?? DEFAULT_STALE_INFLIGHT_MS);
+ let flushedSessions = 0;
+ let rows = 0;
+ let batches = 0;
+ for (const sessionId of sessionIds) {
+ const result = await flushSessionQueue(api, {
+ sessionId,
+ sessionsTable: opts.sessionsTable,
+ queueDir,
+ maxBatchRows: opts.maxBatchRows,
+ allowStaleInflight: true,
+ staleInflightMs: opts.staleInflightMs,
+ drainAll: true
+ });
+ if (result.status === "flushed") {
+ flushedSessions += 1;
+ rows += result.rows;
+ batches += result.batches;
+ }
+ }
+ return {
+ queuedSessions: sessionIds.length,
+ flushedSessions,
+ rows,
+ batches
+ };
+}
+function tryAcquireSessionDrainLock(sessionsTable, queueDir = DEFAULT_QUEUE_DIR, staleMs = DEFAULT_DRAIN_LOCK_STALE_MS) {
+ mkdirSync3(queueDir, { recursive: true });
+ const lockPath = getSessionDrainLockPath(queueDir, sessionsTable);
+ for (let attempt = 0; attempt < 2; attempt++) {
+ try {
+ const fd = openSync(lockPath, "wx");
+ closeSync(fd);
+ return () => rmSync(lockPath, { force: true });
+ } catch (e) {
+ if (e?.code !== "EEXIST")
+ throw e;
+ if (existsSync4(lockPath) && isStale(lockPath, staleMs)) {
+ rmSync(lockPath, { force: true });
+ continue;
+ }
+ return null;
+ }
+ }
+ return null;
+}
+function getQueuePath(queueDir, sessionId) {
+ return join5(queueDir, `${sessionId}.jsonl`);
+}
+function getInflightPath(queueDir, sessionId) {
+ return join5(queueDir, `${sessionId}.inflight`);
+}
+async function flushInflightFile(api, sessionsTable, inflightPath, maxBatchRows) {
+ const rows = readQueuedRows(inflightPath);
+ if (rows.length === 0) {
+ rmSync(inflightPath, { force: true });
+ return { rows: 0, batches: 0 };
+ }
+ let ensured = false;
+ let batches = 0;
+ const queueDir = dirname(inflightPath);
+ for (let i = 0; i < rows.length; i += maxBatchRows) {
+ const chunk = rows.slice(i, i + maxBatchRows);
+ const sql = buildSessionInsertSql(sessionsTable, chunk);
+ try {
+ await api.query(sql);
+ } catch (e) {
+ if (isSessionWriteAuthError(e)) {
+ markSessionWriteDisabled(sessionsTable, errorMessage(e), queueDir);
+ throw new SessionWriteDisabledError(errorMessage(e));
+ }
+ if (!ensured && isEnsureSessionsTableRetryable(e)) {
+ try {
+ await api.ensureSessionsTable(sessionsTable);
+ } catch (ensureError) {
+ if (isSessionWriteAuthError(ensureError)) {
+ markSessionWriteDisabled(sessionsTable, errorMessage(ensureError), queueDir);
+ throw new SessionWriteDisabledError(errorMessage(ensureError));
+ }
+ throw ensureError;
+ }
+ ensured = true;
+ try {
+ await api.query(sql);
+ } catch (retryError) {
+ if (isSessionWriteAuthError(retryError)) {
+ markSessionWriteDisabled(sessionsTable, errorMessage(retryError), queueDir);
+ throw new SessionWriteDisabledError(errorMessage(retryError));
+ }
+ throw retryError;
+ }
+ } else {
+ throw e;
+ }
+ }
+ batches += 1;
+ }
+ clearSessionWriteDisabled(sessionsTable, queueDir);
+ rmSync(inflightPath, { force: true });
+ return { rows: rows.length, batches };
+}
+function readQueuedRows(path) {
+ const raw = readFileSync4(path, "utf-8");
+ return raw.split("\n").map((line) => line.trim()).filter(Boolean).map((line) => JSON.parse(line));
+}
+function requeueInflight(queuePath, inflightPath) {
+ if (!existsSync4(inflightPath))
+ return;
+ const inflight = readFileSync4(inflightPath, "utf-8");
+ appendFileSync2(queuePath, inflight);
+ rmSync(inflightPath, { force: true });
+}
+function recoverStaleInflight(queuePath, inflightPath, staleInflightMs) {
+ if (!existsSync4(inflightPath) || !isStale(inflightPath, staleInflightMs))
+ return;
+ requeueInflight(queuePath, inflightPath);
+}
+function isStale(path, staleInflightMs) {
+ return Date.now() - statSync(path).mtimeMs >= staleInflightMs;
+}
+function listQueuedSessionIds(queueDir, staleInflightMs) {
+ const sessionIds = /* @__PURE__ */ new Set();
+ for (const name of readdirSync(queueDir)) {
+ if (name.endsWith(".jsonl")) {
+ sessionIds.add(name.slice(0, -".jsonl".length));
+ } else if (name.endsWith(".inflight")) {
+ const path = join5(queueDir, name);
+ if (isStale(path, staleInflightMs)) {
+ sessionIds.add(name.slice(0, -".inflight".length));
+ }
+ }
+ }
+ return [...sessionIds].sort();
+}
+function isEnsureSessionsTableRetryable(error) {
+ const message = errorMessage(error).toLowerCase();
+ return message.includes("does not exist") || message.includes("doesn't exist") || message.includes("relation") || message.includes("not found");
+}
+function isSessionWriteAuthError(error) {
+ const message = errorMessage(error).toLowerCase();
+ return message.includes("403") || message.includes("401") || message.includes("forbidden") || message.includes("unauthorized");
+}
+function markSessionWriteDisabled(sessionsTable, reason, queueDir = DEFAULT_QUEUE_DIR) {
+ mkdirSync3(queueDir, { recursive: true });
+ writeFileSync3(getSessionWriteDisabledPath(queueDir, sessionsTable), JSON.stringify({
+ disabledAt: (/* @__PURE__ */ new Date()).toISOString(),
+ reason,
+ sessionsTable
+ }));
+}
+function clearSessionWriteDisabled(sessionsTable, queueDir = DEFAULT_QUEUE_DIR) {
+ rmSync(getSessionWriteDisabledPath(queueDir, sessionsTable), { force: true });
+}
+function isSessionWriteDisabled(sessionsTable, queueDir = DEFAULT_QUEUE_DIR, ttlMs = DEFAULT_AUTH_FAILURE_TTL_MS) {
+ const path = getSessionWriteDisabledPath(queueDir, sessionsTable);
+ if (!existsSync4(path))
+ return false;
+ try {
+ const raw = readFileSync4(path, "utf-8");
+ const state = JSON.parse(raw);
+ const ageMs = Date.now() - new Date(state.disabledAt).getTime();
+ if (Number.isNaN(ageMs) || ageMs >= ttlMs) {
+ rmSync(path, { force: true });
+ return false;
+ }
+ return true;
+ } catch {
+ rmSync(path, { force: true });
+ return false;
+ }
+}
+function getSessionWriteDisabledPath(queueDir, sessionsTable) {
+ return join5(queueDir, `.${sessionsTable}.disabled.json`);
+}
+function getSessionDrainLockPath(queueDir, sessionsTable) {
+ return join5(queueDir, `.${sessionsTable}.drain.lock`);
+}
+function errorMessage(error) {
+ return error instanceof Error ? error.message : String(error);
+}
+async function waitForInflightToClear(inflightPath, waitIfBusyMs) {
+ const startedAt = Date.now();
+ while (existsSync4(inflightPath) && Date.now() - startedAt < waitIfBusyMs) {
+ await sleep2(BUSY_WAIT_STEP_MS);
+ }
+}
+function sleep2(ms) {
+ return new Promise((resolve2) => setTimeout(resolve2, ms));
+}
+
+// dist/src/hooks/version-check.js
+import { existsSync as existsSync5, mkdirSync as mkdirSync4, readFileSync as readFileSync5, writeFileSync as writeFileSync4 } from "node:fs";
+import { dirname as dirname2, join as join6 } from "node:path";
+import { homedir as homedir5 } from "node:os";
+var DEFAULT_VERSION_CACHE_PATH = join6(homedir5(), ".deeplake", ".version-check.json");
+var DEFAULT_VERSION_CACHE_TTL_MS = 60 * 60 * 1e3;
function getInstalledVersion(bundleDir, pluginManifestDir) {
try {
- const pluginJson = join4(bundleDir, "..", pluginManifestDir, "plugin.json");
- const plugin = JSON.parse(readFileSync3(pluginJson, "utf-8"));
+ const pluginJson = join6(bundleDir, "..", pluginManifestDir, "plugin.json");
+ const plugin = JSON.parse(readFileSync5(pluginJson, "utf-8"));
if (plugin.version)
return plugin.version;
} catch {
}
let dir = bundleDir;
for (let i = 0; i < 5; i++) {
- const candidate = join4(dir, "package.json");
+ const candidate = join6(dir, "package.json");
try {
- const pkg = JSON.parse(readFileSync3(candidate, "utf-8"));
+ const pkg = JSON.parse(readFileSync5(candidate, "utf-8"));
if ((pkg.name === "hivemind" || pkg.name === "hivemind-codex") && pkg.version)
return pkg.version;
} catch {
}
- const parent = dirname(dir);
+ const parent = dirname2(dir);
if (parent === dir)
break;
dir = parent;
}
return null;
}
-async function getLatestVersion(timeoutMs = 3e3) {
- try {
- const res = await fetch(GITHUB_RAW_PKG, { signal: AbortSignal.timeout(timeoutMs) });
- if (!res.ok)
- return null;
- const pkg = await res.json();
- return pkg.version ?? null;
- } catch {
- return null;
- }
-}
function isNewer(latest, current) {
- const parse = (v) => v.split(".").map(Number);
+ const parse = (v) => v.replace(/-.*$/, "").split(".").map(Number);
const [la, lb, lc] = parse(latest);
const [ca, cb, cc] = parse(current);
return la > ca || la === ca && lb > cb || la === ca && lb === cb && lc > cc;
}
-
-// dist/src/utils/wiki-log.js
-import { mkdirSync as mkdirSync2, appendFileSync as appendFileSync2 } from "node:fs";
-import { join as join5 } from "node:path";
-function makeWikiLogger(hooksDir, filename = "deeplake-wiki.log") {
- const path = join5(hooksDir, filename);
- return {
- path,
- log(msg) {
- try {
- mkdirSync2(hooksDir, { recursive: true });
- appendFileSync2(path, `[${utcTimestamp()}] ${msg}
-`);
- } catch {
- }
+function readVersionCache(cachePath = DEFAULT_VERSION_CACHE_PATH) {
+ if (!existsSync5(cachePath))
+ return null;
+ try {
+ const parsed = JSON.parse(readFileSync5(cachePath, "utf-8"));
+ if (parsed && typeof parsed.checkedAt === "number" && typeof parsed.url === "string" && (typeof parsed.latest === "string" || parsed.latest === null)) {
+ return parsed;
}
- };
+ } catch {
+ }
+ return null;
+}
+function writeVersionCache(entry, cachePath = DEFAULT_VERSION_CACHE_PATH) {
+ mkdirSync4(dirname2(cachePath), { recursive: true });
+ writeFileSync4(cachePath, JSON.stringify(entry));
+}
+function readFreshCachedLatestVersion(url, ttlMs = DEFAULT_VERSION_CACHE_TTL_MS, cachePath = DEFAULT_VERSION_CACHE_PATH, nowMs = Date.now()) {
+ const cached = readVersionCache(cachePath);
+ if (!cached || cached.url !== url)
+ return void 0;
+ if (nowMs - cached.checkedAt > ttlMs)
+ return void 0;
+ return cached.latest;
+}
+async function getLatestVersionCached(opts) {
+ const ttlMs = opts.ttlMs ?? DEFAULT_VERSION_CACHE_TTL_MS;
+ const cachePath = opts.cachePath ?? DEFAULT_VERSION_CACHE_PATH;
+ const nowMs = opts.nowMs ?? Date.now();
+ const fetchImpl = opts.fetchImpl ?? fetch;
+ const fresh = readFreshCachedLatestVersion(opts.url, ttlMs, cachePath, nowMs);
+ if (fresh !== void 0)
+ return fresh;
+ const stale = readVersionCache(cachePath);
+ try {
+ const res = await fetchImpl(opts.url, { signal: AbortSignal.timeout(opts.timeoutMs) });
+ const latest = res.ok ? (await res.json()).version ?? null : stale?.latest ?? null;
+ writeVersionCache({
+ checkedAt: nowMs,
+ latest,
+ url: opts.url
+ }, cachePath);
+ return latest;
+ } catch {
+ const latest = stale?.latest ?? null;
+ writeVersionCache({
+ checkedAt: nowMs,
+ latest,
+ url: opts.url
+ }, cachePath);
+ return latest;
+ }
}
// dist/src/hooks/session-start-setup.js
var log3 = (msg) => log("session-setup", msg);
-var __bundleDir = dirname2(fileURLToPath(import.meta.url));
-var { log: wikiLog } = makeWikiLogger(join6(homedir4(), ".claude", "hooks"));
-async function main() {
- if (process.env.HIVEMIND_WIKI_WORKER === "1")
+var __bundleDir = dirname3(fileURLToPath2(import.meta.url));
+var GITHUB_RAW_PKG = "https://raw.githubusercontent.com/activeloopai/hivemind/main/package.json";
+var VERSION_CHECK_TIMEOUT = 3e3;
+var HOME = homedir6();
+var WIKI_LOG = join7(HOME, ".claude", "hooks", "deeplake-wiki.log");
+function wikiLog(msg) {
+ try {
+ mkdirSync5(join7(HOME, ".claude", "hooks"), { recursive: true });
+ appendFileSync3(WIKI_LOG, `[${utcTimestamp()}] ${msg}
+`);
+ } catch {
+ }
+}
+async function createPlaceholder(api, table, sessionId, cwd, userName, orgName, workspaceId) {
+ const summaryPath = `/summaries/${userName}/${sessionId}.md`;
+ const existing = await api.query(`SELECT path FROM "${table}" WHERE path = '${sqlStr(summaryPath)}' LIMIT 1`);
+ if (existing.length > 0) {
+ wikiLog(`SessionSetup: summary exists for ${sessionId} (resumed)`);
return;
- const input = await readStdin();
- const creds = loadCredentials();
+ }
+ const now = (/* @__PURE__ */ new Date()).toISOString();
+ const projectName = cwd.split("/").pop() || "unknown";
+ const sessionSource = `/sessions/${userName}/${userName}_${orgName}_${workspaceId}_${sessionId}.jsonl`;
+ const content = [
+ `# Session ${sessionId}`,
+ `- **Source**: ${sessionSource}`,
+ `- **Started**: ${now}`,
+ `- **Project**: ${projectName}`,
+ `- **Status**: in-progress`,
+ ""
+ ].join("\n");
+ const filename = `${sessionId}.md`;
+ await api.query(`INSERT INTO "${table}" (id, path, filename, summary, author, mime_type, size_bytes, project, description, agent, creation_date, last_update_date) VALUES ('${crypto.randomUUID()}', '${sqlStr(summaryPath)}', '${sqlStr(filename)}', E'${sqlStr(content)}', '${sqlStr(userName)}', 'text/markdown', ${Buffer.byteLength(content, "utf-8")}, '${sqlStr(projectName)}', 'in progress', 'claude_code', '${now}', '${now}')`);
+ wikiLog(`SessionSetup: created placeholder for ${sessionId} (${cwd})`);
+}
+async function runSessionStartSetup(input, deps = {}) {
+ const { wikiWorker = (process.env.HIVEMIND_WIKI_WORKER ?? process.env.DEEPLAKE_WIKI_WORKER) === "1", creds = loadCredentials(), saveCredentialsFn = saveCredentials, config = loadConfig(), createApi = (activeConfig) => new DeeplakeApi(activeConfig.token, activeConfig.apiUrl, activeConfig.orgId, activeConfig.workspaceId, activeConfig.tableName), captureEnabled = (process.env.HIVEMIND_CAPTURE ?? process.env.DEEPLAKE_CAPTURE) !== "false", drainSessionQueuesFn = drainSessionQueues, isSessionWriteDisabledFn = isSessionWriteDisabled, isSessionWriteAuthErrorFn = isSessionWriteAuthError, markSessionWriteDisabledFn = markSessionWriteDisabled, tryAcquireSessionDrainLockFn = tryAcquireSessionDrainLock, createPlaceholderFn = createPlaceholder, getInstalledVersionFn = getInstalledVersion, getLatestVersionCachedFn = getLatestVersionCached, isNewerFn = isNewer, execSyncFn = execSync2, logFn = log3, wikiLogFn = wikiLog } = deps;
+ if (wikiWorker)
+ return { status: "skipped" };
if (!creds?.token) {
- log3("no credentials");
- return;
+ logFn("no credentials");
+ return { status: "no_credentials" };
}
if (!creds.userName) {
try {
const { userInfo: userInfo2 } = await import("node:os");
creds.userName = userInfo2().username ?? "unknown";
- saveCredentials(creds);
- log3(`backfilled userName: ${creds.userName}`);
+ saveCredentialsFn(creds);
+ logFn(`backfilled userName: ${creds.userName}`);
} catch {
}
}
- if (input.session_id) {
+ if (input.session_id && config) {
try {
- const config = loadConfig();
- if (config) {
- const api = new DeeplakeApi(config.token, config.apiUrl, config.orgId, config.workspaceId, config.tableName);
- await api.ensureTable();
- await api.ensureSessionsTable(config.sessionsTableName);
- log3("setup complete");
+ const api = createApi(config);
+ await api.ensureTable();
+ if (captureEnabled) {
+ if (isSessionWriteDisabledFn(config.sessionsTableName)) {
+ logFn(`sessions table disabled, skipping setup for "${config.sessionsTableName}"`);
+ } else {
+ const releaseDrainLock = tryAcquireSessionDrainLockFn(config.sessionsTableName);
+ if (!releaseDrainLock) {
+ logFn(`sessions drain already in progress, skipping duplicate setup for "${config.sessionsTableName}"`);
+ } else {
+ try {
+ await api.ensureSessionsTable(config.sessionsTableName);
+ const drain = await drainSessionQueuesFn(api, {
+ sessionsTable: config.sessionsTableName
+ });
+ if (drain.flushedSessions > 0) {
+ logFn(`drained ${drain.flushedSessions} queued session(s), rows=${drain.rows}, batches=${drain.batches}`);
+ }
+ } catch (e) {
+ if (isSessionWriteAuthErrorFn(e)) {
+ markSessionWriteDisabledFn(config.sessionsTableName, e.message);
+ logFn(`sessions table unavailable, skipping setup: ${e.message}`);
+ } else {
+ throw e;
+ }
+ } finally {
+ releaseDrainLock();
+ }
+ }
+ }
+ await createPlaceholderFn(api, config.tableName, input.session_id, input.cwd ?? "", config.userName, config.orgName, config.workspaceId);
}
+ logFn("setup complete");
} catch (e) {
- log3(`setup failed: ${e.message}`);
- wikiLog(`SessionSetup: failed for ${input.session_id}: ${e.message}`);
+ logFn(`setup failed: ${e.message}`);
+ wikiLogFn(`SessionSetup: failed for ${input.session_id}: ${e.message}`);
}
}
const autoupdate = creds.autoupdate !== false;
try {
- const current = getInstalledVersion(__bundleDir, ".claude-plugin");
+ const current = getInstalledVersionFn(__bundleDir, ".claude-plugin");
if (current) {
- const latest = await getLatestVersion();
- if (latest && isNewer(latest, current)) {
+ const latest = await getLatestVersionCachedFn({
+ url: GITHUB_RAW_PKG,
+ timeoutMs: VERSION_CHECK_TIMEOUT
+ });
+ if (latest && isNewerFn(latest, current)) {
if (autoupdate) {
- log3(`autoupdate: updating ${current} \u2192 ${latest}`);
+ logFn(`autoupdate: updating ${current} \u2192 ${latest}`);
try {
const scopes = ["user", "project", "local", "managed"];
const cmd = scopes.map((s) => `claude plugin update hivemind@hivemind --scope ${s} 2>/dev/null`).join("; ");
- execSync2(cmd, { stdio: "ignore", timeout: 6e4 });
+ execSyncFn(cmd, { stdio: "ignore", timeout: 6e4 });
process.stderr.write(`\u2705 Hivemind auto-updated: ${current} \u2192 ${latest}. Run /reload-plugins to apply.
`);
- log3(`autoupdate succeeded: ${current} \u2192 ${latest}`);
+ logFn(`autoupdate succeeded: ${current} \u2192 ${latest}`);
} catch (e) {
process.stderr.write(`\u2B06\uFE0F Hivemind update available: ${current} \u2192 ${latest}. Auto-update failed \u2014 run /hivemind:update to upgrade manually.
`);
- log3(`autoupdate failed: ${e.message}`);
+ logFn(`autoupdate failed: ${e.message}`);
}
} else {
process.stderr.write(`\u2B06\uFE0F Hivemind update available: ${current} \u2192 ${latest}. Run /hivemind:update to upgrade.
`);
- log3(`update available (autoupdate off): ${current} \u2192 ${latest}`);
+ logFn(`update available (autoupdate off): ${current} \u2192 ${latest}`);
}
} else {
- log3(`version up to date: ${current}`);
+ logFn(`version up to date: ${current}`);
}
}
} catch (e) {
- log3(`version check failed: ${e.message}`);
+ logFn(`version check failed: ${e.message}`);
}
+ return { status: "complete" };
+}
+async function main() {
+ const input = await readStdin();
+ await runSessionStartSetup(input);
}
-main().catch((e) => {
- log3(`fatal: ${e.message}`);
- process.exit(0);
-});
+if (isDirectRun(import.meta.url)) {
+ main().catch((e) => {
+ log3(`fatal: ${e.message}`);
+ process.exit(0);
+ });
+}
+export {
+ createPlaceholder,
+ runSessionStartSetup,
+ wikiLog
+};
diff --git a/claude-code/bundle/session-start.js b/claude-code/bundle/session-start.js
index f136de0..ea84c9c 100755
--- a/claude-code/bundle/session-start.js
+++ b/claude-code/bundle/session-start.js
@@ -1,11 +1,8 @@
#!/usr/bin/env node
// dist/src/hooks/session-start.js
-import { fileURLToPath } from "node:url";
-import { dirname as dirname2, join as join6 } from "node:path";
-import { readdirSync, rmSync } from "node:fs";
-import { execSync as execSync2 } from "node:child_process";
-import { homedir as homedir4 } from "node:os";
+import { fileURLToPath as fileURLToPath2 } from "node:url";
+import { dirname as dirname2, join as join4 } from "node:path";
// dist/src/commands/auth.js
import { readFileSync, writeFileSync, existsSync, mkdirSync, unlinkSync } from "node:fs";
@@ -29,300 +26,15 @@ function saveCredentials(creds) {
writeFileSync(CREDS_PATH, JSON.stringify({ ...creds, savedAt: (/* @__PURE__ */ new Date()).toISOString() }, null, 2), { mode: 384 });
}
-// dist/src/config.js
-import { readFileSync as readFileSync2, existsSync as existsSync2 } from "node:fs";
-import { join as join2 } from "node:path";
-import { homedir as homedir2, userInfo } from "node:os";
-function loadConfig() {
- const home = homedir2();
- const credPath = join2(home, ".deeplake", "credentials.json");
- let creds = null;
- if (existsSync2(credPath)) {
- try {
- creds = JSON.parse(readFileSync2(credPath, "utf-8"));
- } catch {
- return null;
- }
- }
- const env = process.env;
- if (!env.HIVEMIND_TOKEN && env.DEEPLAKE_TOKEN) {
- process.stderr.write("[hivemind] DEEPLAKE_* env vars are deprecated; use HIVEMIND_* instead\n");
- }
- const token = env.HIVEMIND_TOKEN ?? env.DEEPLAKE_TOKEN ?? creds?.token;
- const orgId = env.HIVEMIND_ORG_ID ?? env.DEEPLAKE_ORG_ID ?? creds?.orgId;
- if (!token || !orgId)
- return null;
- return {
- token,
- orgId,
- orgName: creds?.orgName ?? orgId,
- userName: creds?.userName || userInfo().username || "unknown",
- workspaceId: env.HIVEMIND_WORKSPACE_ID ?? env.DEEPLAKE_WORKSPACE_ID ?? creds?.workspaceId ?? "default",
- apiUrl: env.HIVEMIND_API_URL ?? env.DEEPLAKE_API_URL ?? creds?.apiUrl ?? "https://api.deeplake.ai",
- tableName: env.HIVEMIND_TABLE ?? env.DEEPLAKE_TABLE ?? "memory",
- sessionsTableName: env.HIVEMIND_SESSIONS_TABLE ?? env.DEEPLAKE_SESSIONS_TABLE ?? "sessions",
- memoryPath: env.HIVEMIND_MEMORY_PATH ?? env.DEEPLAKE_MEMORY_PATH ?? join2(home, ".deeplake", "memory")
- };
-}
-
-// dist/src/deeplake-api.js
-import { randomUUID } from "node:crypto";
-
-// dist/src/utils/debug.js
-import { appendFileSync } from "node:fs";
-import { join as join3 } from "node:path";
-import { homedir as homedir3 } from "node:os";
-var DEBUG = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1";
-var LOG = join3(homedir3(), ".deeplake", "hook-debug.log");
-function utcTimestamp(d = /* @__PURE__ */ new Date()) {
- return d.toISOString().replace("T", " ").slice(0, 19) + " UTC";
-}
-function log(tag, msg) {
- if (!DEBUG)
- return;
- appendFileSync(LOG, `${(/* @__PURE__ */ new Date()).toISOString()} [${tag}] ${msg}
-`);
-}
-
-// dist/src/utils/sql.js
-function sqlStr(value) {
- return value.replace(/\\/g, "\\\\").replace(/'/g, "''").replace(/\0/g, "").replace(/[\x01-\x08\x0b\x0c\x0e-\x1f\x7f]/g, "");
-}
-
-// dist/src/deeplake-api.js
-var log2 = (msg) => log("sdk", msg);
-var TRACE_SQL = (process.env.HIVEMIND_TRACE_SQL ?? process.env.DEEPLAKE_TRACE_SQL) === "1" || (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1";
-var DEBUG_FILE_LOG = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1";
-function summarizeSql(sql, maxLen = 220) {
- const compact = sql.replace(/\s+/g, " ").trim();
- return compact.length > maxLen ? `${compact.slice(0, maxLen)}...` : compact;
-}
-function traceSql(msg) {
- if (!TRACE_SQL)
- return;
- process.stderr.write(`[deeplake-sql] ${msg}
-`);
- if (DEBUG_FILE_LOG)
- log2(msg);
-}
-var RETRYABLE_CODES = /* @__PURE__ */ new Set([429, 500, 502, 503, 504]);
-var MAX_RETRIES = 3;
-var BASE_DELAY_MS = 500;
-var MAX_CONCURRENCY = 5;
-function sleep(ms) {
- return new Promise((resolve) => setTimeout(resolve, ms));
-}
-var Semaphore = class {
- max;
- waiting = [];
- active = 0;
- constructor(max) {
- this.max = max;
- }
- async acquire() {
- if (this.active < this.max) {
- this.active++;
- return;
- }
- await new Promise((resolve) => this.waiting.push(resolve));
- }
- release() {
- this.active--;
- const next = this.waiting.shift();
- if (next) {
- this.active++;
- next();
- }
- }
-};
-var DeeplakeApi = class {
- token;
- apiUrl;
- orgId;
- workspaceId;
- tableName;
- _pendingRows = [];
- _sem = new Semaphore(MAX_CONCURRENCY);
- constructor(token, apiUrl, orgId, workspaceId, tableName) {
- this.token = token;
- this.apiUrl = apiUrl;
- this.orgId = orgId;
- this.workspaceId = workspaceId;
- this.tableName = tableName;
- }
- /** Execute SQL with retry on transient errors and bounded concurrency. */
- async query(sql) {
- const startedAt = Date.now();
- const summary = summarizeSql(sql);
- traceSql(`query start: ${summary}`);
- await this._sem.acquire();
- try {
- const rows = await this._queryWithRetry(sql);
- traceSql(`query ok (${Date.now() - startedAt}ms, rows=${rows.length}): ${summary}`);
- return rows;
- } catch (e) {
- const message = e instanceof Error ? e.message : String(e);
- traceSql(`query fail (${Date.now() - startedAt}ms): ${summary} :: ${message}`);
- throw e;
- } finally {
- this._sem.release();
- }
- }
- async _queryWithRetry(sql) {
- let lastError;
- for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
- let resp;
- try {
- resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables/query`, {
- method: "POST",
- headers: {
- Authorization: `Bearer ${this.token}`,
- "Content-Type": "application/json",
- "X-Activeloop-Org-Id": this.orgId
- },
- body: JSON.stringify({ query: sql })
- });
- } catch (e) {
- lastError = e instanceof Error ? e : new Error(String(e));
- if (attempt < MAX_RETRIES) {
- const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200;
- log2(`query retry ${attempt + 1}/${MAX_RETRIES} (fetch error: ${lastError.message}) in ${delay.toFixed(0)}ms`);
- await sleep(delay);
- continue;
- }
- throw lastError;
- }
- if (resp.ok) {
- const raw = await resp.json();
- if (!raw?.rows || !raw?.columns)
- return [];
- return raw.rows.map((row) => Object.fromEntries(raw.columns.map((col, i) => [col, row[i]])));
- }
- const text = await resp.text().catch(() => "");
- if (attempt < MAX_RETRIES && RETRYABLE_CODES.has(resp.status)) {
- const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200;
- log2(`query retry ${attempt + 1}/${MAX_RETRIES} (${resp.status}) in ${delay.toFixed(0)}ms`);
- await sleep(delay);
- continue;
- }
- throw new Error(`Query failed: ${resp.status}: ${text.slice(0, 200)}`);
- }
- throw lastError ?? new Error("Query failed: max retries exceeded");
- }
- // ── Writes ──────────────────────────────────────────────────────────────────
- /** Queue rows for writing. Call commit() to flush. */
- appendRows(rows) {
- this._pendingRows.push(...rows);
- }
- /** Flush pending rows via SQL. */
- async commit() {
- if (this._pendingRows.length === 0)
- return;
- const rows = this._pendingRows;
- this._pendingRows = [];
- const CONCURRENCY = 10;
- for (let i = 0; i < rows.length; i += CONCURRENCY) {
- const chunk = rows.slice(i, i + CONCURRENCY);
- await Promise.allSettled(chunk.map((r) => this.upsertRowSql(r)));
- }
- log2(`commit: ${rows.length} rows`);
- }
- async upsertRowSql(row) {
- const ts = (/* @__PURE__ */ new Date()).toISOString();
- const cd = row.creationDate ?? ts;
- const lud = row.lastUpdateDate ?? ts;
- const exists = await this.query(`SELECT path FROM "${this.tableName}" WHERE path = '${sqlStr(row.path)}' LIMIT 1`);
- if (exists.length > 0) {
- let setClauses = `summary = E'${sqlStr(row.contentText)}', mime_type = '${sqlStr(row.mimeType)}', size_bytes = ${row.sizeBytes}, last_update_date = '${lud}'`;
- if (row.project !== void 0)
- setClauses += `, project = '${sqlStr(row.project)}'`;
- if (row.description !== void 0)
- setClauses += `, description = '${sqlStr(row.description)}'`;
- await this.query(`UPDATE "${this.tableName}" SET ${setClauses} WHERE path = '${sqlStr(row.path)}'`);
- } else {
- const id = randomUUID();
- let cols = "id, path, filename, summary, mime_type, size_bytes, creation_date, last_update_date";
- let vals = `'${id}', '${sqlStr(row.path)}', '${sqlStr(row.filename)}', E'${sqlStr(row.contentText)}', '${sqlStr(row.mimeType)}', ${row.sizeBytes}, '${cd}', '${lud}'`;
- if (row.project !== void 0) {
- cols += ", project";
- vals += `, '${sqlStr(row.project)}'`;
- }
- if (row.description !== void 0) {
- cols += ", description";
- vals += `, '${sqlStr(row.description)}'`;
- }
- await this.query(`INSERT INTO "${this.tableName}" (${cols}) VALUES (${vals})`);
- }
- }
- /** Update specific columns on a row by path. */
- async updateColumns(path, columns) {
- const setClauses = Object.entries(columns).map(([col, val]) => typeof val === "number" ? `${col} = ${val}` : `${col} = '${sqlStr(String(val))}'`).join(", ");
- await this.query(`UPDATE "${this.tableName}" SET ${setClauses} WHERE path = '${sqlStr(path)}'`);
- }
- // ── Convenience ─────────────────────────────────────────────────────────────
- /** Create a BM25 search index on a column. */
- async createIndex(column) {
- await this.query(`CREATE INDEX IF NOT EXISTS idx_${sqlStr(column)}_bm25 ON "${this.tableName}" USING deeplake_index ("${column}")`);
- }
- /** List all tables in the workspace (with retry). */
- async listTables() {
- for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
- try {
- const resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables`, {
- headers: {
- Authorization: `Bearer ${this.token}`,
- "X-Activeloop-Org-Id": this.orgId
- }
- });
- if (resp.ok) {
- const data = await resp.json();
- return (data.tables ?? []).map((t) => t.table_name);
- }
- if (attempt < MAX_RETRIES && RETRYABLE_CODES.has(resp.status)) {
- await sleep(BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200);
- continue;
- }
- return [];
- } catch {
- if (attempt < MAX_RETRIES) {
- await sleep(BASE_DELAY_MS * Math.pow(2, attempt));
- continue;
- }
- return [];
- }
- }
- return [];
- }
- /** Create the memory table if it doesn't already exist. Migrate columns on existing tables. */
- async ensureTable(name) {
- const tbl = name ?? this.tableName;
- const tables = await this.listTables();
- if (!tables.includes(tbl)) {
- log2(`table "${tbl}" not found, creating`);
- await this.query(`CREATE TABLE IF NOT EXISTS "${tbl}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', summary TEXT NOT NULL DEFAULT '', author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'text/plain', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`);
- log2(`table "${tbl}" created`);
- }
- }
- /** Create the sessions table (uses JSONB for message since every row is a JSON event). */
- async ensureSessionsTable(name) {
- const tables = await this.listTables();
- if (!tables.includes(name)) {
- log2(`table "${name}" not found, creating`);
- await this.query(`CREATE TABLE IF NOT EXISTS "${name}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', message JSONB, author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'application/json', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`);
- log2(`table "${name}" created`);
- }
- }
-};
-
// dist/src/utils/stdin.js
function readStdin() {
- return new Promise((resolve, reject) => {
+ return new Promise((resolve2, reject) => {
let data = "";
process.stdin.setEncoding("utf-8");
process.stdin.on("data", (chunk) => data += chunk);
process.stdin.on("end", () => {
try {
- resolve(JSON.parse(data));
+ resolve2(JSON.parse(data));
} catch (err) {
reject(new Error(`Failed to parse hook input: ${err}`));
}
@@ -331,23 +43,52 @@ function readStdin() {
});
}
-// dist/src/utils/version-check.js
-import { readFileSync as readFileSync3 } from "node:fs";
-import { dirname, join as join4 } from "node:path";
-var GITHUB_RAW_PKG = "https://raw.githubusercontent.com/activeloopai/hivemind/main/package.json";
+// dist/src/utils/debug.js
+import { appendFileSync } from "node:fs";
+import { join as join2 } from "node:path";
+import { homedir as homedir2 } from "node:os";
+var DEBUG = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1";
+var LOG = join2(homedir2(), ".deeplake", "hook-debug.log");
+function log(tag, msg) {
+ if (!DEBUG)
+ return;
+ appendFileSync(LOG, `${(/* @__PURE__ */ new Date()).toISOString()} [${tag}] ${msg}
+`);
+}
+
+// dist/src/utils/direct-run.js
+import { resolve } from "node:path";
+import { fileURLToPath } from "node:url";
+function isDirectRun(metaUrl) {
+ const entry = process.argv[1];
+ if (!entry)
+ return false;
+ try {
+ return resolve(fileURLToPath(metaUrl)) === resolve(entry);
+ } catch {
+ return false;
+ }
+}
+
+// dist/src/hooks/version-check.js
+import { existsSync as existsSync2, mkdirSync as mkdirSync2, readFileSync as readFileSync2, writeFileSync as writeFileSync2 } from "node:fs";
+import { dirname, join as join3 } from "node:path";
+import { homedir as homedir3 } from "node:os";
+var DEFAULT_VERSION_CACHE_PATH = join3(homedir3(), ".deeplake", ".version-check.json");
+var DEFAULT_VERSION_CACHE_TTL_MS = 60 * 60 * 1e3;
function getInstalledVersion(bundleDir, pluginManifestDir) {
try {
- const pluginJson = join4(bundleDir, "..", pluginManifestDir, "plugin.json");
- const plugin = JSON.parse(readFileSync3(pluginJson, "utf-8"));
+ const pluginJson = join3(bundleDir, "..", pluginManifestDir, "plugin.json");
+ const plugin = JSON.parse(readFileSync2(pluginJson, "utf-8"));
if (plugin.version)
return plugin.version;
} catch {
}
let dir = bundleDir;
for (let i = 0; i < 5; i++) {
- const candidate = join4(dir, "package.json");
+ const candidate = join3(dir, "package.json");
try {
- const pkg = JSON.parse(readFileSync3(candidate, "utf-8"));
+ const pkg = JSON.parse(readFileSync2(candidate, "utf-8"));
if ((pkg.name === "hivemind" || pkg.name === "hivemind-codex") && pkg.version)
return pkg.version;
} catch {
@@ -359,47 +100,38 @@ function getInstalledVersion(bundleDir, pluginManifestDir) {
}
return null;
}
-async function getLatestVersion(timeoutMs = 3e3) {
- try {
- const res = await fetch(GITHUB_RAW_PKG, { signal: AbortSignal.timeout(timeoutMs) });
- if (!res.ok)
- return null;
- const pkg = await res.json();
- return pkg.version ?? null;
- } catch {
- return null;
- }
-}
function isNewer(latest, current) {
- const parse = (v) => v.split(".").map(Number);
+ const parse = (v) => v.replace(/-.*$/, "").split(".").map(Number);
const [la, lb, lc] = parse(latest);
const [ca, cb, cc] = parse(current);
return la > ca || la === ca && lb > cb || la === ca && lb === cb && lc > cc;
}
-
-// dist/src/utils/wiki-log.js
-import { mkdirSync as mkdirSync2, appendFileSync as appendFileSync2 } from "node:fs";
-import { join as join5 } from "node:path";
-function makeWikiLogger(hooksDir, filename = "deeplake-wiki.log") {
- const path = join5(hooksDir, filename);
- return {
- path,
- log(msg) {
- try {
- mkdirSync2(hooksDir, { recursive: true });
- appendFileSync2(path, `[${utcTimestamp()}] ${msg}
-`);
- } catch {
- }
+function readVersionCache(cachePath = DEFAULT_VERSION_CACHE_PATH) {
+ if (!existsSync2(cachePath))
+ return null;
+ try {
+ const parsed = JSON.parse(readFileSync2(cachePath, "utf-8"));
+ if (parsed && typeof parsed.checkedAt === "number" && typeof parsed.url === "string" && (typeof parsed.latest === "string" || parsed.latest === null)) {
+ return parsed;
}
- };
+ } catch {
+ }
+ return null;
+}
+function readFreshCachedLatestVersion(url, ttlMs = DEFAULT_VERSION_CACHE_TTL_MS, cachePath = DEFAULT_VERSION_CACHE_PATH, nowMs = Date.now()) {
+ const cached = readVersionCache(cachePath);
+ if (!cached || cached.url !== url)
+ return void 0;
+ if (nowMs - cached.checkedAt > ttlMs)
+ return void 0;
+ return cached.latest;
}
// dist/src/hooks/session-start.js
-var log3 = (msg) => log("session-start", msg);
-var __bundleDir = dirname2(fileURLToPath(import.meta.url));
-var AUTH_CMD = join6(__bundleDir, "commands", "auth-login.js");
-var context = `DEEPLAKE MEMORY: You have TWO memory sources. ALWAYS check BOTH when the user asks you to recall, remember, or look up ANY information:
+var log2 = (msg) => log("session-start", msg);
+var __bundleDir = dirname2(fileURLToPath2(import.meta.url));
+var AUTH_CMD = join4(__bundleDir, "commands", "auth-login.js");
+var CLAUDE_SESSION_START_CONTEXT = `DEEPLAKE MEMORY: You have TWO memory sources. ALWAYS check BOTH when the user asks you to recall, remember, or look up ANY information:
1. Your built-in memory (~/.claude/) \u2014 personal per-project notes
2. Deeplake global memory (~/.deeplake/memory/) \u2014 global memory shared across all sessions, users, and agents in the org
@@ -407,9 +139,19 @@ var context = `DEEPLAKE MEMORY: You have TWO memory sources. ALWAYS check BOTH w
Deeplake memory structure:
- ~/.deeplake/memory/index.md \u2014 START HERE, table of all sessions
- ~/.deeplake/memory/summaries/username/*.md \u2014 AI-generated wiki summaries per session
-- ~/.deeplake/memory/sessions/username/*.jsonl \u2014 raw session data (last resort)
-
-SEARCH STRATEGY: Always read index.md first. Then read specific summaries. Only read raw JSONL if summaries don't have enough detail. Do NOT jump straight to JSONL files.
+- ~/.deeplake/memory/sessions/{author}/* \u2014 raw session data (last resort)
+
+SEARCH STRATEGY: Always read index.md first. Then read specific summaries. Only read raw session files if summaries don't have enough detail. Do NOT jump straight to raw session files.
+When index.md points to a likely match, read that exact summary or session file directly before trying broader grep variants.
+If index.md already points to likely candidate files, open those exact files before broadening into synonym greps or wide exploratory scans.
+Do NOT probe unrelated local paths such as ~/.claude/projects/, arbitrary home directories, or guessed summary roots when the question is about Deeplake memory.
+TEMPORAL GROUNDING: If a summary or transcript uses relative time like "last year", "last week", or "next month", resolve it against that session's own date/date_time metadata, not today's date.
+TEMPORAL FOLLOW-THROUGH: If a summary only gives a relative time, open the linked source session and use its date/date_time to convert the final answer into an absolute month/date/year or explicit range before responding.
+ANSWER SHAPE: Once you have enough evidence, answer with the smallest exact phrase supported by memory. For identity or relationship questions, use just the noun phrase. For education questions, answer with the likely field or credential directly, not the broader life story. For "when" questions, prefer absolute dates/months/years over relative phrases. Avoid extra biography, explanation, or hedging.
+NOT-FOUND BAR: Do NOT answer "not found" until you have checked index.md plus at least one likely summary or raw session file for the named person. If keyword grep is empty, grep the person's name alone and inspect the candidate files.
+NEGATIVE-EVIDENCE QUESTIONS: For identity, relationship status, and research-topic questions, summaries may omit the exact phrase. If likely summaries are ambiguous, read the candidate raw session transcript and look for positive clues before concluding the answer is absent.
+SELF-LABEL PRIORITY: For identity questions, prefer the person's own explicit self-label from the transcript over broader category descriptions or paraphrases.
+RELATIONSHIP STATUS INFERENCE: For relationship-status questions, treat explicit self-descriptions about partnership, dating, marriage, or parenting plans as status evidence. If the transcript strongly supports an unpartnered status, answer with the concise status phrase instead of "not found."
Search command: Grep pattern="keyword" path="~/.deeplake/memory"
@@ -429,142 +171,71 @@ IMPORTANT: Only use bash commands (cat, ls, grep, echo, jq, head, tail, etc.) to
LIMITS: Do NOT spawn subagents to read deeplake memory. If a file returns empty after 2 attempts, skip it and move on. Report what you found rather than exhaustively retrying.
Debugging: Set HIVEMIND_DEBUG=1 to enable verbose logging to ~/.deeplake/hook-debug.log`;
-var HOME = homedir4();
-var { log: wikiLog } = makeWikiLogger(join6(HOME, ".claude", "hooks"));
-async function createPlaceholder(api, table, sessionId, cwd, userName, orgName, workspaceId) {
- const summaryPath = `/summaries/${userName}/${sessionId}.md`;
- const existing = await api.query(`SELECT path FROM "${table}" WHERE path = '${sqlStr(summaryPath)}' LIMIT 1`);
- if (existing.length > 0) {
- wikiLog(`SessionStart: summary exists for ${sessionId} (resumed)`);
- return;
+var GITHUB_RAW_PKG = "https://raw.githubusercontent.com/activeloopai/hivemind/main/package.json";
+function buildSessionStartAdditionalContext(args) {
+ const resolvedContext = CLAUDE_SESSION_START_CONTEXT.replace(/HIVEMIND_AUTH_CMD/g, args.authCommand);
+ let updateNotice = "";
+ if (args.currentVersion) {
+ if (args.latestVersion && isNewer(args.latestVersion, args.currentVersion)) {
+ updateNotice = `
+
+\u2B06\uFE0F Hivemind update available: ${args.currentVersion} \u2192 ${args.latestVersion}.`;
+ } else {
+ updateNotice = `
+
+\u2705 Hivemind v${args.currentVersion}`;
+ }
}
- const now = (/* @__PURE__ */ new Date()).toISOString();
- const projectName = cwd.split("/").pop() ?? "unknown";
- const sessionSource = `/sessions/${userName}/${userName}_${orgName}_${workspaceId}_${sessionId}.jsonl`;
- const content = [
- `# Session ${sessionId}`,
- `- **Source**: ${sessionSource}`,
- `- **Started**: ${now}`,
- `- **Project**: ${projectName}`,
- `- **Status**: in-progress`,
- ""
- ].join("\n");
- const filename = `${sessionId}.md`;
- await api.query(`INSERT INTO "${table}" (id, path, filename, summary, author, mime_type, size_bytes, project, description, agent, creation_date, last_update_date) VALUES ('${crypto.randomUUID()}', '${sqlStr(summaryPath)}', '${sqlStr(filename)}', E'${sqlStr(content)}', '${sqlStr(userName)}', 'text/markdown', ${Buffer.byteLength(content, "utf-8")}, '${sqlStr(projectName)}', 'in progress', 'claude_code', '${now}', '${now}')`);
- wikiLog(`SessionStart: created placeholder for ${sessionId} (${cwd})`);
+ return args.creds?.token ? `${resolvedContext}
+
+Logged in to Deeplake as org: ${args.creds.orgName ?? args.creds.orgId} (workspace: ${args.creds.workspaceId ?? "default"})${updateNotice}` : `${resolvedContext}
+
+\u26A0\uFE0F Not logged in to Deeplake. Memory search will not work. Ask the user to run /hivemind:login to authenticate.${updateNotice}`;
}
-async function main() {
- if (process.env.HIVEMIND_WIKI_WORKER === "1")
- return;
- const input = await readStdin();
- let creds = loadCredentials();
+async function runSessionStartHook(_input, deps = {}) {
+ const { wikiWorker = (process.env.HIVEMIND_WIKI_WORKER ?? process.env.DEEPLAKE_WIKI_WORKER) === "1", creds = loadCredentials(), saveCredentialsFn = saveCredentials, currentVersion = getInstalledVersion(__bundleDir, ".claude-plugin"), latestVersion = currentVersion ? readFreshCachedLatestVersion(GITHUB_RAW_PKG, DEFAULT_VERSION_CACHE_TTL_MS) ?? null : null, authCommand = AUTH_CMD, logFn = log2 } = deps;
+ if (wikiWorker)
+ return null;
if (!creds?.token) {
- log3("no credentials found \u2014 run /hivemind:login to authenticate");
+ logFn("no credentials found \u2014 run /hivemind:login to authenticate");
} else {
- log3(`credentials loaded: org=${creds.orgName ?? creds.orgId}`);
+ logFn(`credentials loaded: org=${creds.orgName ?? creds.orgId}`);
if (creds.token && !creds.userName) {
try {
- const { userInfo: userInfo2 } = await import("node:os");
- creds.userName = userInfo2().username ?? "unknown";
- saveCredentials(creds);
- log3(`backfilled and persisted userName: ${creds.userName}`);
+ const { userInfo } = await import("node:os");
+ creds.userName = userInfo().username ?? "unknown";
+ saveCredentialsFn(creds);
+ logFn(`backfilled and persisted userName: ${creds.userName}`);
} catch {
}
}
}
- const captureEnabled = process.env.HIVEMIND_CAPTURE !== "false";
- if (input.session_id && creds?.token) {
- try {
- const config = loadConfig();
- if (config) {
- const table = config.tableName;
- const sessionsTable = config.sessionsTableName;
- const api = new DeeplakeApi(config.token, config.apiUrl, config.orgId, config.workspaceId, table);
- await api.ensureTable();
- await api.ensureSessionsTable(sessionsTable);
- if (captureEnabled) {
- await createPlaceholder(api, table, input.session_id, input.cwd ?? "", config.userName, config.orgName, config.workspaceId);
- log3("placeholder created");
- } else {
- log3("placeholder skipped (HIVEMIND_CAPTURE=false)");
- }
- }
- } catch (e) {
- log3(`placeholder failed: ${e.message}`);
- wikiLog(`SessionStart: placeholder failed for ${input.session_id}: ${e.message}`);
- }
- }
- const autoupdate = creds?.autoupdate !== false;
- let updateNotice = "";
- try {
- const current = getInstalledVersion(__bundleDir, ".claude-plugin");
- if (current) {
- const latest = await getLatestVersion();
- if (latest && isNewer(latest, current)) {
- if (autoupdate) {
- log3(`autoupdate: updating ${current} \u2192 ${latest}`);
- try {
- const scopes = ["user", "project", "local", "managed"];
- const cmd = scopes.map((s) => `claude plugin update hivemind@hivemind --scope ${s} 2>/dev/null || true`).join("; ");
- execSync2(cmd, { stdio: "ignore", timeout: 6e4 });
- try {
- const cacheParent = join6(homedir4(), ".claude", "plugins", "cache", "hivemind", "hivemind");
- const entries = readdirSync(cacheParent, { withFileTypes: true });
- for (const e of entries) {
- if (e.isDirectory() && e.name !== latest) {
- rmSync(join6(cacheParent, e.name), { recursive: true, force: true });
- log3(`cache cleanup: removed old version ${e.name}`);
- }
- }
- } catch (e) {
- log3(`cache cleanup failed: ${e.message}`);
- }
- updateNotice = `
-
-\u2705 Hivemind auto-updated: ${current} \u2192 ${latest}. Run /reload-plugins to apply.`;
- process.stderr.write(`\u2705 Hivemind auto-updated: ${current} \u2192 ${latest}. Run /reload-plugins to apply.
-`);
- log3(`autoupdate succeeded: ${current} \u2192 ${latest}`);
- } catch (e) {
- updateNotice = `
-
-\u2B06\uFE0F Hivemind update available: ${current} \u2192 ${latest}. Auto-update failed \u2014 run /hivemind:update to upgrade manually.`;
- process.stderr.write(`\u2B06\uFE0F Hivemind update available: ${current} \u2192 ${latest}. Auto-update failed \u2014 run /hivemind:update to upgrade manually.
-`);
- log3(`autoupdate failed: ${e.message}`);
- }
- } else {
- updateNotice = `
-
-\u2B06\uFE0F Hivemind update available: ${current} \u2192 ${latest}. Run /hivemind:update to upgrade.`;
- process.stderr.write(`\u2B06\uFE0F Hivemind update available: ${current} \u2192 ${latest}. Run /hivemind:update to upgrade.
-`);
- log3(`update available (autoupdate off): ${current} \u2192 ${latest}`);
- }
- } else {
- log3(`version up to date: ${current}`);
- updateNotice = `
-
-\u2705 Hivemind v${current} (up to date)`;
- }
- }
- } catch (e) {
- log3(`version check failed: ${e.message}`);
- }
- const resolvedContext = context.replace(/HIVEMIND_AUTH_CMD/g, AUTH_CMD);
- const additionalContext = creds?.token ? `${resolvedContext}
-
-Logged in to Deeplake as org: ${creds.orgName ?? creds.orgId} (workspace: ${creds.workspaceId ?? "default"})${updateNotice}` : `${resolvedContext}
-
-\u26A0\uFE0F Not logged in to Deeplake. Memory search will not work. Ask the user to run /hivemind:login to authenticate.${updateNotice}`;
- console.log(JSON.stringify({
+ return {
hookSpecificOutput: {
hookEventName: "SessionStart",
- additionalContext
+ additionalContext: buildSessionStartAdditionalContext({
+ authCommand,
+ creds,
+ currentVersion,
+ latestVersion
+ })
}
- }));
+ };
}
-main().catch((e) => {
- log3(`fatal: ${e.message}`);
- process.exit(0);
-});
+async function main() {
+ await readStdin();
+ const result = await runSessionStartHook({});
+ if (result)
+ console.log(JSON.stringify(result));
+}
+if (isDirectRun(import.meta.url)) {
+ main().catch((e) => {
+ log2(`fatal: ${e.message}`);
+ process.exit(0);
+ });
+}
+export {
+ CLAUDE_SESSION_START_CONTEXT,
+ buildSessionStartAdditionalContext,
+ runSessionStartHook
+};
diff --git a/claude-code/bundle/shell/deeplake-shell.js b/claude-code/bundle/shell/deeplake-shell.js
index 2d0b237..5872059 100755
--- a/claude-code/bundle/shell/deeplake-shell.js
+++ b/claude-code/bundle/shell/deeplake-shell.js
@@ -46081,14 +46081,14 @@ var require_turndown_cjs = __commonJS({
} else if (node.nodeType === 1) {
replacement = replacementForNode.call(self2, node);
}
- return join6(output, replacement);
+ return join7(output, replacement);
}, "");
}
function postProcess(output) {
var self2 = this;
this.rules.forEach(function(rule) {
if (typeof rule.append === "function") {
- output = join6(output, rule.append(self2.options));
+ output = join7(output, rule.append(self2.options));
}
});
return output.replace(/^[\t\r\n]+/, "").replace(/[\t\r\n\s]+$/, "");
@@ -46100,7 +46100,7 @@ var require_turndown_cjs = __commonJS({
if (whitespace.leading || whitespace.trailing) content = content.trim();
return whitespace.leading + rule.replacement(content, node, this.options) + whitespace.trailing;
}
- function join6(output, replacement) {
+ function join7(output, replacement) {
var s12 = trimTrailingNewlines(output);
var s22 = trimLeadingNewlines(replacement);
var nls = Math.max(output.length - s12.length, replacement.length - s22.length);
@@ -66758,6 +66758,9 @@ function loadConfig() {
// dist/src/deeplake-api.js
import { randomUUID } from "node:crypto";
+import { existsSync as existsSync3, mkdirSync, readFileSync as readFileSync2, writeFileSync } from "node:fs";
+import { join as join6 } from "node:path";
+import { tmpdir } from "node:os";
// dist/src/utils/debug.js
import { appendFileSync } from "node:fs";
@@ -66800,9 +66803,30 @@ var RETRYABLE_CODES = /* @__PURE__ */ new Set([429, 500, 502, 503, 504]);
var MAX_RETRIES = 3;
var BASE_DELAY_MS = 500;
var MAX_CONCURRENCY = 5;
+var QUERY_TIMEOUT_MS = Number(process.env["HIVEMIND_QUERY_TIMEOUT_MS"] ?? process.env["DEEPLAKE_QUERY_TIMEOUT_MS"] ?? 1e4);
+var INDEX_MARKER_TTL_MS = Number(process.env["HIVEMIND_INDEX_MARKER_TTL_MS"] ?? 6 * 60 * 6e4);
function sleep(ms3) {
return new Promise((resolve5) => setTimeout(resolve5, ms3));
}
+function isTimeoutError(error) {
+ const name = error instanceof Error ? error.name.toLowerCase() : "";
+ const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase();
+ return name.includes("timeout") || name === "aborterror" || message.includes("timeout") || message.includes("timed out");
+}
+function isDuplicateIndexError(error) {
+ const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase();
+ return message.includes("duplicate key value violates unique constraint") || message.includes("pg_class_relname_nsp_index") || message.includes("already exists");
+}
+function isSessionInsertQuery(sql) {
+ return /^\s*insert\s+into\s+"[^"]+"\s*\(\s*id\s*,\s*path\s*,\s*filename\s*,\s*message\s*,/i.test(sql);
+}
+function isTransientHtml403(text) {
+ const body = text.toLowerCase();
+ return body.includes(" Object.fromEntries(raw.columns.map((col, i11) => [col, row[i11]])));
}
const text = await resp.text().catch(() => "");
- if (attempt < MAX_RETRIES && RETRYABLE_CODES.has(resp.status)) {
+ const retryable403 = isSessionInsertQuery(sql) && (resp.status === 401 || resp.status === 403 && (text.length === 0 || isTransientHtml403(text)));
+ if (attempt < MAX_RETRIES && (RETRYABLE_CODES.has(resp.status) || retryable403)) {
const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200;
log2(`query retry ${attempt + 1}/${MAX_RETRIES} (${resp.status}) in ${delay.toFixed(0)}ms`);
await sleep(delay);
@@ -66955,8 +66987,61 @@ var DeeplakeApi = class {
async createIndex(column) {
await this.query(`CREATE INDEX IF NOT EXISTS idx_${sqlStr(column)}_bm25 ON "${this.tableName}" USING deeplake_index ("${column}")`);
}
+ buildLookupIndexName(table, suffix) {
+ return `idx_${table}_${suffix}`.replace(/[^a-zA-Z0-9_]/g, "_");
+ }
+ getLookupIndexMarkerPath(table, suffix) {
+ const markerKey = [
+ this.workspaceId,
+ this.orgId,
+ table,
+ suffix
+ ].join("__").replace(/[^a-zA-Z0-9_.-]/g, "_");
+ return join6(getIndexMarkerDir(), `${markerKey}.json`);
+ }
+ hasFreshLookupIndexMarker(table, suffix) {
+ const markerPath = this.getLookupIndexMarkerPath(table, suffix);
+ if (!existsSync3(markerPath))
+ return false;
+ try {
+ const raw = JSON.parse(readFileSync2(markerPath, "utf-8"));
+ const updatedAt = raw.updatedAt ? new Date(raw.updatedAt).getTime() : NaN;
+ if (!Number.isFinite(updatedAt) || Date.now() - updatedAt > INDEX_MARKER_TTL_MS)
+ return false;
+ return true;
+ } catch {
+ return false;
+ }
+ }
+ markLookupIndexReady(table, suffix) {
+ mkdirSync(getIndexMarkerDir(), { recursive: true });
+ writeFileSync(this.getLookupIndexMarkerPath(table, suffix), JSON.stringify({ updatedAt: (/* @__PURE__ */ new Date()).toISOString() }), "utf-8");
+ }
+ async ensureLookupIndex(table, suffix, columnsSql) {
+ if (this.hasFreshLookupIndexMarker(table, suffix))
+ return;
+ const indexName = this.buildLookupIndexName(table, suffix);
+ try {
+ await this.query(`CREATE INDEX IF NOT EXISTS "${indexName}" ON "${table}" ${columnsSql}`);
+ this.markLookupIndexReady(table, suffix);
+ } catch (e6) {
+ if (isDuplicateIndexError(e6)) {
+ this.markLookupIndexReady(table, suffix);
+ return;
+ }
+ log2(`index "${indexName}" skipped: ${e6.message}`);
+ }
+ }
/** List all tables in the workspace (with retry). */
- async listTables() {
+ async listTables(forceRefresh = false) {
+ if (!forceRefresh && this._tablesCache)
+ return [...this._tablesCache];
+ const { tables, cacheable } = await this._fetchTables();
+ if (cacheable)
+ this._tablesCache = [...tables];
+ return tables;
+ }
+ async _fetchTables() {
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
try {
const resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables`, {
@@ -66967,22 +67052,25 @@ var DeeplakeApi = class {
});
if (resp.ok) {
const data = await resp.json();
- return (data.tables ?? []).map((t6) => t6.table_name);
+ return {
+ tables: (data.tables ?? []).map((t6) => t6.table_name),
+ cacheable: true
+ };
}
if (attempt < MAX_RETRIES && RETRYABLE_CODES.has(resp.status)) {
await sleep(BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200);
continue;
}
- return [];
+ return { tables: [], cacheable: false };
} catch {
if (attempt < MAX_RETRIES) {
await sleep(BASE_DELAY_MS * Math.pow(2, attempt));
continue;
}
- return [];
+ return { tables: [], cacheable: false };
}
}
- return [];
+ return { tables: [], cacheable: false };
}
/** Create the memory table if it doesn't already exist. Migrate columns on existing tables. */
async ensureTable(name) {
@@ -66992,6 +67080,8 @@ var DeeplakeApi = class {
log2(`table "${tbl}" not found, creating`);
await this.query(`CREATE TABLE IF NOT EXISTS "${tbl}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', summary TEXT NOT NULL DEFAULT '', author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'text/plain', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`);
log2(`table "${tbl}" created`);
+ if (!tables.includes(tbl))
+ this._tablesCache = [...tables, tbl];
}
}
/** Create the sessions table (uses JSONB for message since every row is a JSON event). */
@@ -67001,673 +67091,1090 @@ var DeeplakeApi = class {
log2(`table "${name}" not found, creating`);
await this.query(`CREATE TABLE IF NOT EXISTS "${name}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', message JSONB, author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'application/json', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`);
log2(`table "${name}" created`);
+ if (!tables.includes(name))
+ this._tablesCache = [...tables, name];
}
+ await this.ensureLookupIndex(name, "path_creation_date", `("path", "creation_date")`);
}
};
// dist/src/shell/deeplake-fs.js
import { basename as basename4, posix } from "node:path";
import { randomUUID as randomUUID2 } from "node:crypto";
-var BATCH_SIZE = 10;
-var FLUSH_DEBOUNCE_MS = 200;
-function normPath(p22) {
- const r10 = posix.normalize(p22.startsWith("/") ? p22 : "/" + p22);
- return r10 === "/" ? r10 : r10.replace(/\/$/, "");
+
+// dist/src/shell/grep-core.js
+var TOOL_INPUT_FIELDS = [
+ "command",
+ "file_path",
+ "path",
+ "pattern",
+ "prompt",
+ "subagent_type",
+ "query",
+ "url",
+ "notebook_path",
+ "old_string",
+ "new_string",
+ "content",
+ "skill",
+ "args",
+ "taskId",
+ "status",
+ "subject",
+ "description",
+ "to",
+ "message",
+ "summary",
+ "max_results"
+];
+var TOOL_RESPONSE_DROP = /* @__PURE__ */ new Set([
+ // Note: `stderr` is intentionally NOT in this set. The `stdout` high-signal
+ // branch below already de-dupes it for the common case (appends as suffix
+ // when non-empty). If a tool response has ONLY `stderr` and no `stdout`
+ // (hard-failure on some tools), the generic cleanup preserves it so the
+ // error message reaches Claude instead of collapsing to `[ok]`.
+ "interrupted",
+ "isImage",
+ "noOutputExpected",
+ "type",
+ "structuredPatch",
+ "userModified",
+ "originalFile",
+ "replaceAll",
+ "totalDurationMs",
+ "totalTokens",
+ "totalToolUseCount",
+ "usage",
+ "toolStats",
+ "durationMs",
+ "durationSeconds",
+ "bytes",
+ "code",
+ "codeText",
+ "agentId",
+ "agentType",
+ "verificationNudgeNeeded",
+ "numLines",
+ "numFiles",
+ "truncated",
+ "statusChange",
+ "updatedFields",
+ "isAgent",
+ "success"
+]);
+function maybeParseJson(v27) {
+ if (typeof v27 !== "string")
+ return v27;
+ const s10 = v27.trim();
+ if (s10[0] !== "{" && s10[0] !== "[")
+ return v27;
+ try {
+ return JSON.parse(s10);
+ } catch {
+ return v27;
+ }
}
-function parentOf(p22) {
- const i11 = p22.lastIndexOf("/");
- return i11 <= 0 ? "/" : p22.slice(0, i11);
+function snakeCase(k17) {
+ return k17.replace(/([A-Z])/g, "_$1").toLowerCase();
}
-function guessMime(filename) {
- const ext2 = filename.split(".").pop()?.toLowerCase() ?? "";
- return {
- json: "application/json",
- md: "text/markdown",
- txt: "text/plain",
- js: "text/javascript",
- ts: "text/typescript",
- html: "text/html",
- css: "text/css"
- }[ext2] ?? "text/plain";
+function camelCase(k17) {
+ return k17.replace(/_([a-z])/g, (_16, c15) => c15.toUpperCase());
}
-function fsErr(code, msg, path2) {
- return Object.assign(new Error(`${code}: ${msg}, '${path2}'`), { code });
+function formatToolInput(raw) {
+ const p22 = maybeParseJson(raw);
+ if (typeof p22 !== "object" || p22 === null)
+ return String(p22 ?? "");
+ const parts = [];
+ for (const k17 of TOOL_INPUT_FIELDS) {
+ if (p22[k17] === void 0)
+ continue;
+ const v27 = p22[k17];
+ parts.push(`${k17}: ${typeof v27 === "string" ? v27 : JSON.stringify(v27)}`);
+ }
+ for (const k17 of ["glob", "output_mode", "limit", "offset"]) {
+ if (p22[k17] !== void 0)
+ parts.push(`${k17}: ${p22[k17]}`);
+ }
+ return parts.length ? parts.join("\n") : JSON.stringify(p22);
}
-var DeeplakeFs = class _DeeplakeFs {
- client;
- table;
- mountPoint;
- // path → Buffer (content) or null (exists but not fetched yet)
- files = /* @__PURE__ */ new Map();
- meta = /* @__PURE__ */ new Map();
- // dir path → Set of immediate child names
- dirs = /* @__PURE__ */ new Map();
- // batched writes pending SQL flush
- pending = /* @__PURE__ */ new Map();
- // paths that have been flushed (INSERT) at least once — subsequent flushes use UPDATE
- flushed = /* @__PURE__ */ new Set();
- /** Number of files loaded from the server during bootstrap. */
- get fileCount() {
- return this.files.size;
+function formatToolResponse(raw, inp, toolName) {
+ const r10 = maybeParseJson(raw);
+ if (typeof r10 !== "object" || r10 === null)
+ return String(r10 ?? "");
+ if (toolName === "Edit" || toolName === "Write" || toolName === "MultiEdit") {
+ return r10.filePath ? `[wrote ${r10.filePath}]` : "[ok]";
}
- flushTimer = null;
- // serialize flushes
- flushChain = Promise.resolve();
- // Paths that live in the sessions table (multi-row, read by concatenation)
- sessionPaths = /* @__PURE__ */ new Set();
- sessionsTable = null;
- constructor(client, table, mountPoint) {
- this.client = client;
- this.table = table;
- this.mountPoint = mountPoint;
- this.dirs.set(mountPoint, /* @__PURE__ */ new Set());
- if (mountPoint !== "/")
- this.dirs.set("/", /* @__PURE__ */ new Set([mountPoint.slice(1)]));
+ if (typeof r10.stdout === "string") {
+ const stderr = r10.stderr;
+ return r10.stdout + (stderr ? `
+stderr: ${stderr}` : "");
}
- static async create(client, table, mount = "/memory", sessionsTable) {
- const fs3 = new _DeeplakeFs(client, table, mount);
- fs3.sessionsTable = sessionsTable ?? null;
- await client.ensureTable();
- let sessionSyncOk = true;
- const memoryBootstrap = (async () => {
- const sql = `SELECT path, size_bytes, mime_type FROM "${table}" ORDER BY path`;
- try {
- const rows = await client.query(sql);
- for (const row of rows) {
- const p22 = row["path"];
- fs3.files.set(p22, null);
- fs3.meta.set(p22, {
- size: Number(row["size_bytes"] ?? 0),
- mime: row["mime_type"] ?? "application/octet-stream",
- mtime: /* @__PURE__ */ new Date()
- });
- fs3.addToTree(p22);
- fs3.flushed.add(p22);
- }
- } catch {
- }
- })();
- const sessionsBootstrap = sessionsTable && sessionSyncOk ? (async () => {
- try {
- const sessionRows = await client.query(`SELECT path, SUM(size_bytes) as total_size FROM "${sessionsTable}" GROUP BY path ORDER BY path`);
- for (const row of sessionRows) {
- const p22 = row["path"];
- if (!fs3.files.has(p22)) {
- fs3.files.set(p22, null);
- fs3.meta.set(p22, {
- size: Number(row["total_size"] ?? 0),
- mime: "application/x-ndjson",
- mtime: /* @__PURE__ */ new Date()
- });
- fs3.addToTree(p22);
- }
- fs3.sessionPaths.add(p22);
- }
- } catch {
- }
- })() : Promise.resolve();
- await Promise.all([memoryBootstrap, sessionsBootstrap]);
- return fs3;
+ if (typeof r10.content === "string")
+ return r10.content;
+ if (r10.file && typeof r10.file === "object") {
+ const f11 = r10.file;
+ if (typeof f11.content === "string")
+ return `[${f11.filePath ?? ""}]
+${f11.content}`;
+ if (typeof f11.base64 === "string")
+ return `[binary ${f11.filePath ?? ""}: ${f11.base64.length} base64 chars]`;
}
- // ── tree management ───────────────────────────────────────────────────────
- addToTree(filePath) {
- const segs = filePath.split("/").filter(Boolean);
- for (let d15 = 0; d15 < segs.length; d15++) {
- const dir = d15 === 0 ? "/" : "/" + segs.slice(0, d15).join("/");
- if (!this.dirs.has(dir))
- this.dirs.set(dir, /* @__PURE__ */ new Set());
- this.dirs.get(dir).add(segs[d15]);
- }
+ if (Array.isArray(r10.filenames))
+ return r10.filenames.join("\n");
+ if (Array.isArray(r10.matches)) {
+ return r10.matches.map((m26) => typeof m26 === "string" ? m26 : JSON.stringify(m26)).join("\n");
}
- removeFromTree(filePath) {
- this.files.delete(filePath);
- this.meta.delete(filePath);
- this.pending.delete(filePath);
- this.flushed.delete(filePath);
- const parent = parentOf(filePath);
- this.dirs.get(parent)?.delete(basename4(filePath));
+ if (Array.isArray(r10.results)) {
+ return r10.results.map((x28) => typeof x28 === "string" ? x28 : x28?.title ?? x28?.url ?? JSON.stringify(x28)).join("\n");
}
- // ── flush / write batching ────────────────────────────────────────────────
- scheduleFlush() {
- if (this.flushTimer !== null)
- return;
- this.flushTimer = setTimeout(() => {
- this.flush().catch(() => {
- });
- }, FLUSH_DEBOUNCE_MS);
+ const inpObj = maybeParseJson(inp);
+ const kept = {};
+ for (const [k17, v27] of Object.entries(r10)) {
+ if (TOOL_RESPONSE_DROP.has(k17))
+ continue;
+ if (v27 === "" || v27 === false || v27 == null)
+ continue;
+ if (typeof inpObj === "object" && inpObj) {
+ const inObj = inpObj;
+ if (k17 in inObj && JSON.stringify(inObj[k17]) === JSON.stringify(v27))
+ continue;
+ const snake = snakeCase(k17);
+ if (snake in inObj && JSON.stringify(inObj[snake]) === JSON.stringify(v27))
+ continue;
+ const camel = camelCase(k17);
+ if (camel in inObj && JSON.stringify(inObj[camel]) === JSON.stringify(v27))
+ continue;
+ }
+ kept[k17] = v27;
}
- async flush() {
- this.flushChain = this.flushChain.then(() => this._doFlush());
- return this.flushChain;
+ return Object.keys(kept).length ? JSON.stringify(kept) : "[ok]";
+}
+function formatToolCall(obj) {
+ return `[tool:${obj?.tool_name ?? "?"}]
+input: ${formatToolInput(obj?.tool_input)}
+response: ${formatToolResponse(obj?.tool_response, obj?.tool_input, obj?.tool_name)}`;
+}
+function normalizeContent(path2, raw) {
+ if (!path2.includes("/sessions/"))
+ return raw;
+ if (!raw || raw[0] !== "{")
+ return raw;
+ let obj;
+ try {
+ obj = JSON.parse(raw);
+ } catch {
+ return raw;
}
- async _doFlush() {
- if (this.pending.size === 0)
- return;
- if (this.flushTimer !== null) {
- clearTimeout(this.flushTimer);
- this.flushTimer = null;
- }
- const rows = [...this.pending.values()];
- this.pending.clear();
- const results = await Promise.allSettled(rows.map((r10) => this.upsertRow(r10)));
- let failures = 0;
- for (let i11 = 0; i11 < results.length; i11++) {
- if (results[i11].status === "rejected") {
- if (!this.pending.has(rows[i11].path)) {
- this.pending.set(rows[i11].path, rows[i11]);
- }
- failures++;
- }
- }
- if (failures > 0) {
- throw new Error(`flush: ${failures}/${rows.length} writes failed and were re-queued`);
+ if (Array.isArray(obj.turns)) {
+ const header = [];
+ if (obj.date_time)
+ header.push(`date: ${obj.date_time}`);
+ if (obj.speakers) {
+ const s10 = obj.speakers;
+ const names = [s10.speaker_a, s10.speaker_b].filter(Boolean).join(", ");
+ if (names)
+ header.push(`speakers: ${names}`);
}
+ const lines = obj.turns.map((t6) => {
+ const sp = String(t6?.speaker ?? t6?.name ?? "?").trim();
+ const tx = String(t6?.text ?? t6?.content ?? "").replace(/\s+/g, " ").trim();
+ const tag = t6?.dia_id ? `[${t6.dia_id}] ` : "";
+ return `${tag}${sp}: ${tx}`;
+ });
+ const out2 = [...header, ...lines].join("\n");
+ return out2.trim() ? out2 : raw;
}
- async upsertRow(r10) {
- const text = sqlStr(r10.contentText);
- const p22 = sqlStr(r10.path);
- const fname = sqlStr(r10.filename);
- const mime = sqlStr(r10.mimeType);
- const ts3 = (/* @__PURE__ */ new Date()).toISOString();
- const cd = r10.creationDate ?? ts3;
- const lud = r10.lastUpdateDate ?? ts3;
- if (this.flushed.has(r10.path)) {
- let setClauses = `filename = '${fname}', summary = E'${text}', mime_type = '${mime}', size_bytes = ${r10.sizeBytes}, last_update_date = '${sqlStr(lud)}'`;
- if (r10.project !== void 0)
- setClauses += `, project = '${sqlStr(r10.project)}'`;
- if (r10.description !== void 0)
- setClauses += `, description = '${sqlStr(r10.description)}'`;
- await this.client.query(`UPDATE "${this.table}" SET ${setClauses} WHERE path = '${p22}'`);
- } else {
- const id = randomUUID2();
- const cols = "id, path, filename, summary, mime_type, size_bytes, creation_date, last_update_date" + (r10.project !== void 0 ? ", project" : "") + (r10.description !== void 0 ? ", description" : "");
- const vals = `'${id}', '${p22}', '${fname}', E'${text}', '${mime}', ${r10.sizeBytes}, '${sqlStr(cd)}', '${sqlStr(lud)}'` + (r10.project !== void 0 ? `, '${sqlStr(r10.project)}'` : "") + (r10.description !== void 0 ? `, '${sqlStr(r10.description)}'` : "");
- await this.client.query(`INSERT INTO "${this.table}" (${cols}) VALUES (${vals})`);
- this.flushed.add(r10.path);
- }
+ const stripRecalled = (t6) => {
+ const i11 = t6.indexOf("");
+ if (i11 === -1)
+ return t6;
+ const j14 = t6.lastIndexOf("");
+ if (j14 === -1 || j14 < i11)
+ return t6;
+ const head = t6.slice(0, i11);
+ const tail = t6.slice(j14 + "".length);
+ return (head + tail).replace(/^\s+/, "").replace(/\n{3,}/g, "\n\n");
+ };
+ let out = null;
+ if (obj.type === "user_message") {
+ out = `[user] ${stripRecalled(String(obj.content ?? ""))}`;
+ } else if (obj.type === "assistant_message") {
+ const agent = obj.agent_type ? ` (agent=${obj.agent_type})` : "";
+ out = `[assistant${agent}] ${stripRecalled(String(obj.content ?? ""))}`;
+ } else if (obj.type === "tool_call") {
+ out = formatToolCall(obj);
}
- // ── Virtual index.md generation ────────────────────────────────────────────
- async generateVirtualIndex() {
- const rows = await this.client.query(`SELECT path, project, description, creation_date, last_update_date FROM "${this.table}" WHERE path LIKE '${sqlStr("/summaries/")}%' ORDER BY last_update_date DESC`);
- const sessionPathsByKey = /* @__PURE__ */ new Map();
- for (const sp of this.sessionPaths) {
- const hivemind = sp.match(/\/sessions\/[^/]+\/[^/]+_([^.]+)\.jsonl$/);
- if (hivemind) {
- sessionPathsByKey.set(hivemind[1], sp.slice(1));
- } else {
- const fname = sp.split("/").pop() ?? "";
- const stem = fname.replace(/\.[^.]+$/, "");
- if (stem)
- sessionPathsByKey.set(stem, sp.slice(1));
- }
+ if (out === null)
+ return raw;
+ const trimmed = out.trim();
+ if (!trimmed || trimmed === "[user]" || trimmed === "[assistant]" || /^\[tool:[^\]]*\]\s+input:\s+\{\}\s+response:\s+\{\}$/.test(trimmed))
+ return raw;
+ return out;
+}
+function buildPathCondition(targetPath) {
+ if (!targetPath || targetPath === "/")
+ return "";
+ const clean = targetPath.replace(/\/+$/, "");
+ if (/[*?]/.test(clean)) {
+ const likePattern = sqlLike(clean).replace(/\*/g, "%").replace(/\?/g, "_");
+ return `path LIKE '${likePattern}'`;
+ }
+ const base = clean.split("/").pop() ?? "";
+ if (base.includes(".")) {
+ return `path = '${sqlStr(clean)}'`;
+ }
+ return `(path = '${sqlStr(clean)}' OR path LIKE '${sqlLike(clean)}/%')`;
+}
+async function searchDeeplakeTables(api, memoryTable, sessionsTable, opts) {
+ const { pathFilter, contentScanOnly, likeOp, escapedPattern, prefilterPattern, prefilterPatterns } = opts;
+ const limit = opts.limit ?? 100;
+ const filterPatterns = contentScanOnly ? prefilterPatterns && prefilterPatterns.length > 0 ? prefilterPatterns : prefilterPattern ? [prefilterPattern] : [] : [escapedPattern];
+ const memFilter = buildContentFilter("summary::text", likeOp, filterPatterns);
+ const sessFilter = buildContentFilter("message::text", likeOp, filterPatterns);
+ const memQuery = `SELECT path, summary::text AS content, 0 AS source_order, '' AS creation_date FROM "${memoryTable}" WHERE 1=1${pathFilter}${memFilter} LIMIT ${limit}`;
+ const sessQuery = `SELECT path, message::text AS content, 1 AS source_order, COALESCE(creation_date::text, '') AS creation_date FROM "${sessionsTable}" WHERE 1=1${pathFilter}${sessFilter} LIMIT ${limit}`;
+ const rows = await api.query(`SELECT path, content, source_order, creation_date FROM ((${memQuery}) UNION ALL (${sessQuery})) AS combined ORDER BY path, source_order, creation_date`);
+ return rows.map((row) => ({
+ path: String(row["path"]),
+ content: String(row["content"] ?? "")
+ }));
+}
+function buildPathFilter(targetPath) {
+ const condition = buildPathCondition(targetPath);
+ return condition ? ` AND ${condition}` : "";
+}
+function buildPathFilterForTargets(targetPaths) {
+ if (targetPaths.some((targetPath) => !targetPath || targetPath === "/"))
+ return "";
+ const conditions = [...new Set(targetPaths.map((targetPath) => buildPathCondition(targetPath)).filter((condition) => condition.length > 0))];
+ if (conditions.length === 0)
+ return "";
+ if (conditions.length === 1)
+ return ` AND ${conditions[0]}`;
+ return ` AND (${conditions.join(" OR ")})`;
+}
+function extractRegexLiteralPrefilter(pattern) {
+ if (!pattern)
+ return null;
+ const parts = [];
+ let current = "";
+ for (let i11 = 0; i11 < pattern.length; i11++) {
+ const ch = pattern[i11];
+ if (ch === "\\") {
+ const next = pattern[i11 + 1];
+ if (!next)
+ return null;
+ if (/[dDsSwWbBAZzGkKpP]/.test(next))
+ return null;
+ current += next;
+ i11++;
+ continue;
}
- const lines = [
- "# Session Index",
- "",
- "List of all Claude Code sessions with summaries.",
- "",
- "| Session | Conversation | Created | Last Updated | Project | Description |",
- "|---------|-------------|---------|--------------|---------|-------------|"
- ];
- for (const row of rows) {
- const p22 = row["path"];
- const match2 = p22.match(/\/summaries\/([^/]+)\/([^/]+)\.md$/);
- if (!match2)
+ if (ch === ".") {
+ if (pattern[i11 + 1] === "*") {
+ if (current)
+ parts.push(current);
+ current = "";
+ i11++;
continue;
- const summaryUser = match2[1];
- const sessionId = match2[2];
- const relPath = `summaries/${summaryUser}/${sessionId}.md`;
- const baseName = sessionId.replace(/_summary$/, "");
- const convPath = sessionPathsByKey.get(sessionId) ?? sessionPathsByKey.get(baseName);
- const convLink = convPath ? `[messages](${convPath})` : "";
- const project = row["project"] || "";
- const description = row["description"] || "";
- const creationDate = row["creation_date"] || "";
- const lastUpdateDate = row["last_update_date"] || "";
- lines.push(`| [${sessionId}](${relPath}) | ${convLink} | ${creationDate} | ${lastUpdateDate} | ${project} | ${description} |`);
+ }
+ return null;
}
- lines.push("");
- return lines.join("\n");
+ if ("|()[]{}+?^$".includes(ch) || ch === "*")
+ return null;
+ current += ch;
}
- // ── batch prefetch ────────────────────────────────────────────────────────
- /**
- * Prefetch multiple files into the content cache with a single SQL query.
- * Skips paths that are already cached, pending, or session-backed.
- * After this call, subsequent readFile() calls for these paths hit cache.
- */
- async prefetch(paths) {
- const uncached = [];
- for (const raw of paths) {
- const p22 = normPath(raw);
- if (this.files.get(p22) !== null && this.files.get(p22) !== void 0)
- continue;
- if (this.pending.has(p22))
- continue;
- if (this.sessionPaths.has(p22))
- continue;
- if (!this.files.has(p22))
- continue;
- uncached.push(p22);
- }
- if (uncached.length === 0)
- return;
- const inList = uncached.map((p22) => `'${sqlStr(p22)}'`).join(", ");
- const rows = await this.client.query(`SELECT path, summary FROM "${this.table}" WHERE path IN (${inList})`);
- for (const row of rows) {
- const p22 = row["path"];
- const text = row["summary"] ?? "";
- this.files.set(p22, Buffer.from(text, "utf-8"));
+ if (current)
+ parts.push(current);
+ const literal = parts.reduce((best, part) => part.length > best.length ? part : best, "");
+ return literal.length >= 2 ? literal : null;
+}
+function extractRegexAlternationPrefilters(pattern) {
+ if (!pattern.includes("|"))
+ return null;
+ const parts = [];
+ let current = "";
+ let escaped = false;
+ for (let i11 = 0; i11 < pattern.length; i11++) {
+ const ch = pattern[i11];
+ if (escaped) {
+ current += `\\${ch}`;
+ escaped = false;
+ continue;
}
- }
- // ── IFileSystem: reads ────────────────────────────────────────────────────
- async readFileBuffer(path2) {
- const p22 = normPath(path2);
- if (this.dirs.has(p22) && !this.files.has(p22))
- throw fsErr("EISDIR", "illegal operation on a directory", p22);
- if (!this.files.has(p22))
- throw fsErr("ENOENT", "no such file or directory", p22);
- const cached = this.files.get(p22);
- if (cached !== null && cached !== void 0)
- return cached;
- const pend = this.pending.get(p22);
- if (pend) {
- const buf2 = Buffer.from(pend.contentText, "utf-8");
- this.files.set(p22, buf2);
- return buf2;
+ if (ch === "\\") {
+ escaped = true;
+ continue;
}
- if (this.sessionPaths.has(p22) && this.sessionsTable) {
- const rows2 = await this.client.query(`SELECT message FROM "${this.sessionsTable}" WHERE path = '${sqlStr(p22)}' ORDER BY creation_date ASC`);
- if (rows2.length === 0)
- throw fsErr("ENOENT", "no such file or directory", p22);
- const text = rows2.map((r10) => typeof r10["message"] === "string" ? r10["message"] : JSON.stringify(r10["message"])).join("\n");
- const buf2 = Buffer.from(text, "utf-8");
- this.files.set(p22, buf2);
- return buf2;
+ if (ch === "|") {
+ if (!current)
+ return null;
+ parts.push(current);
+ current = "";
+ continue;
}
- const rows = await this.client.query(`SELECT summary FROM "${this.table}" WHERE path = '${sqlStr(p22)}' LIMIT 1`);
- if (rows.length === 0)
- throw fsErr("ENOENT", "no such file or directory", p22);
- const buf = Buffer.from(rows[0]["summary"] ?? "", "utf-8");
- this.files.set(p22, buf);
- return buf;
+ if ("()[]{}^$".includes(ch))
+ return null;
+ current += ch;
}
- async readFile(path2, _opts) {
- const p22 = normPath(path2);
- if (this.dirs.has(p22) && !this.files.has(p22))
- throw fsErr("EISDIR", "illegal operation on a directory", p22);
- if (p22 === "/index.md" && !this.files.has(p22)) {
- const realRows = await this.client.query(`SELECT summary FROM "${this.table}" WHERE path = '${sqlStr("/index.md")}' LIMIT 1`);
- if (realRows.length > 0 && realRows[0]["summary"]) {
- const text2 = realRows[0]["summary"];
- const buf2 = Buffer.from(text2, "utf-8");
- this.files.set(p22, buf2);
- return text2;
+ if (escaped || !current)
+ return null;
+ parts.push(current);
+ const literals = [...new Set(parts.map((part) => extractRegexLiteralPrefilter(part)).filter((part) => typeof part === "string" && part.length >= 2))];
+ return literals.length > 0 ? literals : null;
+}
+function buildGrepSearchOptions(params, targetPath) {
+ const hasRegexMeta = !params.fixedString && /[.*+?^${}()|[\]\\]/.test(params.pattern);
+ const literalPrefilter = hasRegexMeta ? extractRegexLiteralPrefilter(params.pattern) : null;
+ const alternationPrefilters = hasRegexMeta ? extractRegexAlternationPrefilters(params.pattern) : null;
+ return {
+ pathFilter: buildPathFilter(targetPath),
+ contentScanOnly: hasRegexMeta,
+ likeOp: params.ignoreCase ? "ILIKE" : "LIKE",
+ escapedPattern: sqlLike(params.pattern),
+ prefilterPattern: literalPrefilter ? sqlLike(literalPrefilter) : void 0,
+ prefilterPatterns: alternationPrefilters?.map((literal) => sqlLike(literal))
+ };
+}
+function buildContentFilter(column, likeOp, patterns) {
+ if (patterns.length === 0)
+ return "";
+ if (patterns.length === 1)
+ return ` AND ${column} ${likeOp} '%${patterns[0]}%'`;
+ return ` AND (${patterns.map((pattern) => `${column} ${likeOp} '%${pattern}%'`).join(" OR ")})`;
+}
+function compileGrepRegex(params) {
+ let reStr = params.fixedString ? params.pattern.replace(/[.*+?^${}()|[\]\\]/g, "\\$&") : params.pattern;
+ if (params.wordMatch)
+ reStr = `\\b${reStr}\\b`;
+ try {
+ return new RegExp(reStr, params.ignoreCase ? "i" : "");
+ } catch {
+ return new RegExp(params.pattern.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"), params.ignoreCase ? "i" : "");
+ }
+}
+function refineGrepMatches(rows, params, forceMultiFilePrefix) {
+ const re9 = compileGrepRegex(params);
+ const multi = forceMultiFilePrefix ?? rows.length > 1;
+ const output = [];
+ for (const row of rows) {
+ if (!row.content)
+ continue;
+ const lines = row.content.split("\n");
+ const matched = [];
+ for (let i11 = 0; i11 < lines.length; i11++) {
+ const hit = re9.test(lines[i11]);
+ if (hit !== !!params.invertMatch) {
+ if (params.filesOnly) {
+ output.push(row.path);
+ break;
+ }
+ const prefix = multi ? `${row.path}:` : "";
+ const ln3 = params.lineNumber ? `${i11 + 1}:` : "";
+ matched.push(`${prefix}${ln3}${lines[i11]}`);
}
- return this.generateVirtualIndex();
}
- if (!this.files.has(p22))
- throw fsErr("ENOENT", "no such file or directory", p22);
- const cached = this.files.get(p22);
- if (cached !== null && cached !== void 0)
- return cached.toString("utf-8");
- const pend = this.pending.get(p22);
- if (pend)
- return pend.contentText;
- if (this.sessionPaths.has(p22) && this.sessionsTable) {
- const rows2 = await this.client.query(`SELECT message FROM "${this.sessionsTable}" WHERE path = '${sqlStr(p22)}' ORDER BY creation_date ASC`);
- if (rows2.length === 0)
- throw fsErr("ENOENT", "no such file or directory", p22);
- const text2 = rows2.map((r10) => typeof r10["message"] === "string" ? r10["message"] : JSON.stringify(r10["message"])).join("\n");
- const buf2 = Buffer.from(text2, "utf-8");
- this.files.set(p22, buf2);
- return text2;
+ if (!params.filesOnly) {
+ if (params.countOnly) {
+ output.push(`${multi ? `${row.path}:` : ""}${matched.length}`);
+ } else {
+ output.push(...matched);
+ }
}
- const rows = await this.client.query(`SELECT summary FROM "${this.table}" WHERE path = '${sqlStr(p22)}' LIMIT 1`);
- if (rows.length === 0)
- throw fsErr("ENOENT", "no such file or directory", p22);
- const text = rows[0]["summary"] ?? "";
- const buf = Buffer.from(text, "utf-8");
- this.files.set(p22, buf);
- return text;
}
- // ── IFileSystem: writes ───────────────────────────────────────────────────
- /** Write a file with optional row-level metadata (project, description, dates). */
- async writeFileWithMeta(path2, content, meta) {
- const p22 = normPath(path2);
- if (this.sessionPaths.has(p22))
- throw fsErr("EPERM", "session files are read-only", p22);
- if (this.dirs.has(p22) && !this.files.has(p22))
- throw fsErr("EISDIR", "illegal operation on a directory", p22);
- const text = typeof content === "string" ? content : Buffer.from(content).toString("utf-8");
- const buf = Buffer.from(text, "utf-8");
- const mime = guessMime(basename4(p22));
- this.files.set(p22, buf);
- this.meta.set(p22, { size: buf.length, mime, mtime: /* @__PURE__ */ new Date() });
- this.addToTree(p22);
- this.pending.set(p22, {
- path: p22,
- filename: basename4(p22),
- contentText: text,
- mimeType: mime,
- sizeBytes: buf.length,
- ...meta
- });
- if (this.pending.size >= BATCH_SIZE)
- await this.flush();
- else
- this.scheduleFlush();
+ return output;
+}
+
+// dist/src/shell/deeplake-fs.js
+var BATCH_SIZE = 10;
+var PREFETCH_BATCH_SIZE = 50;
+var FLUSH_DEBOUNCE_MS = 200;
+function normPath(p22) {
+ const r10 = posix.normalize(p22.startsWith("/") ? p22 : "/" + p22);
+ return r10 === "/" ? r10 : r10.replace(/\/$/, "");
+}
+function parentOf(p22) {
+ const i11 = p22.lastIndexOf("/");
+ return i11 <= 0 ? "/" : p22.slice(0, i11);
+}
+function guessMime(filename) {
+ const ext2 = filename.split(".").pop()?.toLowerCase() ?? "";
+ return {
+ json: "application/json",
+ md: "text/markdown",
+ txt: "text/plain",
+ js: "text/javascript",
+ ts: "text/typescript",
+ html: "text/html",
+ css: "text/css"
+ }[ext2] ?? "text/plain";
+}
+function normalizeSessionMessage(path2, message) {
+ const raw = typeof message === "string" ? message : JSON.stringify(message);
+ return normalizeContent(path2, raw);
+}
+function joinSessionMessages(path2, messages) {
+ return messages.map((message) => normalizeSessionMessage(path2, message)).join("\n");
+}
+function fsErr(code, msg, path2) {
+ return Object.assign(new Error(`${code}: ${msg}, '${path2}'`), { code });
+}
+var DeeplakeFs = class _DeeplakeFs {
+ client;
+ table;
+ mountPoint;
+ // path → Buffer (content) or null (exists but not fetched yet)
+ files = /* @__PURE__ */ new Map();
+ meta = /* @__PURE__ */ new Map();
+ // dir path → Set of immediate child names
+ dirs = /* @__PURE__ */ new Map();
+ // batched writes pending SQL flush
+ pending = /* @__PURE__ */ new Map();
+ // paths that have been flushed (INSERT) at least once — subsequent flushes use UPDATE
+ flushed = /* @__PURE__ */ new Set();
+ /** Number of files loaded from the server during bootstrap. */
+ get fileCount() {
+ return this.files.size;
}
- async writeFile(path2, content, _opts) {
- const p22 = normPath(path2);
- if (this.sessionPaths.has(p22))
- throw fsErr("EPERM", "session files are read-only", p22);
- if (this.dirs.has(p22) && !this.files.has(p22))
- throw fsErr("EISDIR", "illegal operation on a directory", p22);
- const text = typeof content === "string" ? content : Buffer.from(content).toString("utf-8");
- const buf = Buffer.from(text, "utf-8");
- const mime = guessMime(basename4(p22));
- this.files.set(p22, buf);
- this.meta.set(p22, { size: buf.length, mime, mtime: /* @__PURE__ */ new Date() });
- this.addToTree(p22);
- this.pending.set(p22, {
- path: p22,
- filename: basename4(p22),
- contentText: text,
- mimeType: mime,
- sizeBytes: buf.length
- });
- if (this.pending.size >= BATCH_SIZE)
- await this.flush();
- else
- this.scheduleFlush();
+ flushTimer = null;
+ // serialize flushes
+ flushChain = Promise.resolve();
+ // Paths that live in the sessions table (multi-row, read by concatenation)
+ sessionPaths = /* @__PURE__ */ new Set();
+ sessionsTable = null;
+ constructor(client, table, mountPoint) {
+ this.client = client;
+ this.table = table;
+ this.mountPoint = mountPoint;
+ this.dirs.set(mountPoint, /* @__PURE__ */ new Set());
+ if (mountPoint !== "/")
+ this.dirs.set("/", /* @__PURE__ */ new Set([mountPoint.slice(1)]));
}
- async appendFile(path2, content, opts) {
- const p22 = normPath(path2);
- const add = typeof content === "string" ? content : Buffer.from(content).toString("utf-8");
- if (this.sessionPaths.has(p22))
- throw fsErr("EPERM", "session files are read-only", p22);
- if (this.files.has(p22) || await this.exists(p22).catch(() => false)) {
- const ts3 = (/* @__PURE__ */ new Date()).toISOString();
- await this.client.query(`UPDATE "${this.table}" SET summary = summary || E'${sqlStr(add)}', size_bytes = size_bytes + ${Buffer.byteLength(add, "utf-8")}, last_update_date = '${ts3}' WHERE path = '${sqlStr(p22)}'`);
- this.files.set(p22, null);
- const m26 = this.meta.get(p22);
- if (m26) {
- m26.size += Buffer.byteLength(add, "utf-8");
- m26.mtime = new Date(ts3);
+ static async create(client, table, mount = "/memory", sessionsTable) {
+ const fs3 = new _DeeplakeFs(client, table, mount);
+ fs3.sessionsTable = sessionsTable ?? null;
+ await client.ensureTable();
+ let sessionSyncOk = true;
+ const memoryBootstrap = (async () => {
+ const sql = `SELECT path, size_bytes, mime_type FROM "${table}" ORDER BY path`;
+ try {
+ const rows = await client.query(sql);
+ for (const row of rows) {
+ const p22 = row["path"];
+ fs3.files.set(p22, null);
+ fs3.meta.set(p22, {
+ size: Number(row["size_bytes"] ?? 0),
+ mime: row["mime_type"] ?? "application/octet-stream",
+ mtime: /* @__PURE__ */ new Date()
+ });
+ fs3.addToTree(p22);
+ fs3.flushed.add(p22);
+ }
+ } catch {
}
- } else {
- await this.writeFile(p22, content, opts);
- await this.flush();
- }
+ })();
+ const sessionsBootstrap = sessionsTable && sessionSyncOk ? (async () => {
+ try {
+ const sessionRows = await client.query(`SELECT path, SUM(size_bytes) as total_size FROM "${sessionsTable}" GROUP BY path ORDER BY path`);
+ for (const row of sessionRows) {
+ const p22 = row["path"];
+ if (!fs3.files.has(p22)) {
+ fs3.files.set(p22, null);
+ fs3.meta.set(p22, {
+ size: Number(row["total_size"] ?? 0),
+ mime: "application/x-ndjson",
+ mtime: /* @__PURE__ */ new Date()
+ });
+ fs3.addToTree(p22);
+ }
+ fs3.sessionPaths.add(p22);
+ }
+ } catch {
+ }
+ })() : Promise.resolve();
+ await Promise.all([memoryBootstrap, sessionsBootstrap]);
+ return fs3;
}
- // ── IFileSystem: metadata ─────────────────────────────────────────────────
- async exists(path2) {
- const p22 = normPath(path2);
- if (p22 === "/index.md")
- return true;
- return this.files.has(p22) || this.dirs.has(p22);
+ // ── tree management ───────────────────────────────────────────────────────
+ addToTree(filePath) {
+ const segs = filePath.split("/").filter(Boolean);
+ for (let d15 = 0; d15 < segs.length; d15++) {
+ const dir = d15 === 0 ? "/" : "/" + segs.slice(0, d15).join("/");
+ if (!this.dirs.has(dir))
+ this.dirs.set(dir, /* @__PURE__ */ new Set());
+ this.dirs.get(dir).add(segs[d15]);
+ }
}
- async stat(path2) {
- const p22 = normPath(path2);
- const isFile = this.files.has(p22);
- const isDir = this.dirs.has(p22);
- if (p22 === "/index.md" && !isFile && !isDir) {
- return {
- isFile: true,
- isDirectory: false,
- isSymbolicLink: false,
- mode: 420,
- size: 0,
- mtime: /* @__PURE__ */ new Date()
- };
- }
- if (!isFile && !isDir)
- throw fsErr("ENOENT", "no such file or directory", p22);
- const m26 = this.meta.get(p22);
- return {
- isFile: isFile && !isDir,
- isDirectory: isDir,
- isSymbolicLink: false,
- mode: isDir ? 493 : 420,
- size: m26?.size ?? 0,
- mtime: m26?.mtime ?? /* @__PURE__ */ new Date()
- };
- }
- async lstat(path2) {
- return this.stat(path2);
- }
- async chmod(_path, _mode) {
- }
- async utimes(_path, _atime, _mtime) {
- }
- async symlink(_target, linkPath) {
- throw fsErr("EPERM", "operation not permitted", linkPath);
- }
- async link(_src, destPath) {
- throw fsErr("EPERM", "operation not permitted", destPath);
+ removeFromTree(filePath) {
+ this.files.delete(filePath);
+ this.meta.delete(filePath);
+ this.pending.delete(filePath);
+ this.flushed.delete(filePath);
+ const parent = parentOf(filePath);
+ this.dirs.get(parent)?.delete(basename4(filePath));
}
- async readlink(path2) {
- throw fsErr("EINVAL", "invalid argument", path2);
+ // ── flush / write batching ────────────────────────────────────────────────
+ scheduleFlush() {
+ if (this.flushTimer !== null)
+ return;
+ this.flushTimer = setTimeout(() => {
+ this.flush().catch(() => {
+ });
+ }, FLUSH_DEBOUNCE_MS);
}
- async realpath(path2) {
- const p22 = normPath(path2);
- if (p22 === "/index.md")
- return p22;
- if (!this.files.has(p22) && !this.dirs.has(p22))
- throw fsErr("ENOENT", "no such file or directory", p22);
- return p22;
+ async flush() {
+ this.flushChain = this.flushChain.then(() => this._doFlush());
+ return this.flushChain;
}
- // ── IFileSystem: directories ──────────────────────────────────────────────
- async mkdir(path2, opts) {
- const p22 = normPath(path2);
- if (this.files.has(p22))
- throw fsErr("EEXIST", "file exists", p22);
- if (this.dirs.has(p22)) {
- if (!opts?.recursive)
- throw fsErr("EEXIST", "file exists", p22);
+ async _doFlush() {
+ if (this.pending.size === 0)
return;
+ if (this.flushTimer !== null) {
+ clearTimeout(this.flushTimer);
+ this.flushTimer = null;
}
- if (!opts?.recursive) {
- const parent2 = parentOf(p22);
- if (!this.dirs.has(parent2))
- throw fsErr("ENOENT", "no such file or directory", parent2);
+ const rows = [...this.pending.values()];
+ this.pending.clear();
+ const results = await Promise.allSettled(rows.map((r10) => this.upsertRow(r10)));
+ let failures = 0;
+ for (let i11 = 0; i11 < results.length; i11++) {
+ if (results[i11].status === "rejected") {
+ if (!this.pending.has(rows[i11].path)) {
+ this.pending.set(rows[i11].path, rows[i11]);
+ }
+ failures++;
+ }
}
- this.dirs.set(p22, /* @__PURE__ */ new Set());
- const parent = parentOf(p22);
- if (!this.dirs.has(parent))
- this.dirs.set(parent, /* @__PURE__ */ new Set());
- this.dirs.get(parent).add(basename4(p22));
- }
- async readdir(path2) {
- const p22 = normPath(path2);
- if (!this.dirs.has(p22))
- throw fsErr("ENOTDIR", "not a directory", p22);
- const entries = [...this.dirs.get(p22) ?? []];
- if (p22 === "/" && !entries.includes("index.md")) {
- entries.push("index.md");
+ if (failures > 0) {
+ throw new Error(`flush: ${failures}/${rows.length} writes failed and were re-queued`);
}
- return entries;
}
- async readdirWithFileTypes(path2) {
- const names = await this.readdir(path2);
- const p22 = normPath(path2);
- return names.map((name) => {
- const child = p22 === "/" ? `/${name}` : `${p22}/${name}`;
- return {
- name,
- isFile: (this.files.has(child) || child === "/index.md") && !this.dirs.has(child),
- isDirectory: this.dirs.has(child),
- isSymbolicLink: false
- };
- });
+ async upsertRow(r10) {
+ const text = sqlStr(r10.contentText);
+ const p22 = sqlStr(r10.path);
+ const fname = sqlStr(r10.filename);
+ const mime = sqlStr(r10.mimeType);
+ const ts3 = (/* @__PURE__ */ new Date()).toISOString();
+ const cd = r10.creationDate ?? ts3;
+ const lud = r10.lastUpdateDate ?? ts3;
+ if (this.flushed.has(r10.path)) {
+ let setClauses = `filename = '${fname}', summary = E'${text}', mime_type = '${mime}', size_bytes = ${r10.sizeBytes}, last_update_date = '${sqlStr(lud)}'`;
+ if (r10.project !== void 0)
+ setClauses += `, project = '${sqlStr(r10.project)}'`;
+ if (r10.description !== void 0)
+ setClauses += `, description = '${sqlStr(r10.description)}'`;
+ await this.client.query(`UPDATE "${this.table}" SET ${setClauses} WHERE path = '${p22}'`);
+ } else {
+ const id = randomUUID2();
+ const cols = "id, path, filename, summary, mime_type, size_bytes, creation_date, last_update_date" + (r10.project !== void 0 ? ", project" : "") + (r10.description !== void 0 ? ", description" : "");
+ const vals = `'${id}', '${p22}', '${fname}', E'${text}', '${mime}', ${r10.sizeBytes}, '${sqlStr(cd)}', '${sqlStr(lud)}'` + (r10.project !== void 0 ? `, '${sqlStr(r10.project)}'` : "") + (r10.description !== void 0 ? `, '${sqlStr(r10.description)}'` : "");
+ await this.client.query(`INSERT INTO "${this.table}" (${cols}) VALUES (${vals})`);
+ this.flushed.add(r10.path);
+ }
}
- // ── IFileSystem: structural mutations ─────────────────────────────────────
- async rm(path2, opts) {
- const p22 = normPath(path2);
- if (this.sessionPaths.has(p22))
- throw fsErr("EPERM", "session files are read-only", p22);
- if (!this.files.has(p22) && !this.dirs.has(p22)) {
- if (opts?.force)
- return;
- throw fsErr("ENOENT", "no such file or directory", p22);
+ // ── Virtual index.md generation ────────────────────────────────────────────
+ async generateVirtualIndex() {
+ const rows = await this.client.query(`SELECT path, project, description, creation_date, last_update_date FROM "${this.table}" WHERE path LIKE '${sqlStr("/summaries/")}%' ORDER BY last_update_date DESC`);
+ const sessionPathsByKey = /* @__PURE__ */ new Map();
+ for (const sp of this.sessionPaths) {
+ const hivemind = sp.match(/\/sessions\/[^/]+\/[^/]+_([^.]+)\.jsonl$/);
+ if (hivemind) {
+ sessionPathsByKey.set(hivemind[1], sp.slice(1));
+ } else {
+ const fname = sp.split("/").pop() ?? "";
+ const stem = fname.replace(/\.[^.]+$/, "");
+ if (stem)
+ sessionPathsByKey.set(stem, sp.slice(1));
+ }
}
- if (this.dirs.has(p22)) {
- const children = this.dirs.get(p22) ?? /* @__PURE__ */ new Set();
- if (children.size > 0 && !opts?.recursive)
- throw fsErr("ENOTEMPTY", "directory not empty", p22);
- const toDelete = [];
- const stack = [p22];
- while (stack.length) {
- const cur = stack.pop();
- for (const child of [...this.dirs.get(cur) ?? []]) {
- const childPath = cur === "/" ? `/${child}` : `${cur}/${child}`;
- if (this.files.has(childPath))
- toDelete.push(childPath);
- if (this.dirs.has(childPath))
- stack.push(childPath);
- }
+ const lines = [
+ "# Session Index",
+ "",
+ "List of all Claude Code sessions with summaries.",
+ "",
+ "| Session | Conversation | Created | Last Updated | Project | Description |",
+ "|---------|-------------|---------|--------------|---------|-------------|"
+ ];
+ for (const row of rows) {
+ const p22 = row["path"];
+ const match2 = p22.match(/\/summaries\/([^/]+)\/([^/]+)\.md$/);
+ if (!match2)
+ continue;
+ const summaryUser = match2[1];
+ const sessionId = match2[2];
+ const relPath = `summaries/${summaryUser}/${sessionId}.md`;
+ const baseName = sessionId.replace(/_summary$/, "");
+ const convPath = sessionPathsByKey.get(sessionId) ?? sessionPathsByKey.get(baseName);
+ const convLink = convPath ? `[messages](${convPath})` : "";
+ const project = row["project"] || "";
+ const description = row["description"] || "";
+ const creationDate = row["creation_date"] || "";
+ const lastUpdateDate = row["last_update_date"] || "";
+ lines.push(`| [${sessionId}](${relPath}) | ${convLink} | ${creationDate} | ${lastUpdateDate} | ${project} | ${description} |`);
+ }
+ lines.push("");
+ return lines.join("\n");
+ }
+ // ── batch prefetch ────────────────────────────────────────────────────────
+ /**
+ * Prefetch multiple files into the content cache with a single SQL query.
+ * Skips paths that are already cached, pending, or session-backed.
+ * After this call, subsequent readFile() calls for these paths hit cache.
+ */
+ async prefetch(paths) {
+ const uncached = [];
+ const uncachedSessions = [];
+ for (const raw of paths) {
+ const p22 = normPath(raw);
+ if (this.files.get(p22) !== null && this.files.get(p22) !== void 0)
+ continue;
+ if (this.pending.has(p22))
+ continue;
+ if (!this.files.has(p22))
+ continue;
+ if (this.sessionPaths.has(p22)) {
+ uncachedSessions.push(p22);
+ } else {
+ uncached.push(p22);
}
- const safeToDelete = toDelete.filter((fp) => !this.sessionPaths.has(fp));
- for (const fp of safeToDelete)
- this.removeFromTree(fp);
- this.dirs.delete(p22);
- this.dirs.get(parentOf(p22))?.delete(basename4(p22));
- if (safeToDelete.length > 0) {
- const inList = safeToDelete.map((fp) => `'${sqlStr(fp)}'`).join(", ");
- await this.client.query(`DELETE FROM "${this.table}" WHERE path IN (${inList})`);
+ }
+ for (let i11 = 0; i11 < uncached.length; i11 += PREFETCH_BATCH_SIZE) {
+ const chunk = uncached.slice(i11, i11 + PREFETCH_BATCH_SIZE);
+ const inList = chunk.map((p22) => `'${sqlStr(p22)}'`).join(", ");
+ const rows = await this.client.query(`SELECT path, summary FROM "${this.table}" WHERE path IN (${inList})`);
+ for (const row of rows) {
+ const p22 = row["path"];
+ const text = row["summary"] ?? "";
+ this.files.set(p22, Buffer.from(text, "utf-8"));
}
- } else {
- await this.client.query(`DELETE FROM "${this.table}" WHERE path = '${sqlStr(p22)}'`);
- this.removeFromTree(p22);
}
- }
- async cp(src, dest, opts) {
- const s10 = normPath(src), d15 = normPath(dest);
- if (this.sessionPaths.has(d15))
- throw fsErr("EPERM", "session files are read-only", d15);
- if (this.dirs.has(s10) && !this.files.has(s10)) {
- if (!opts?.recursive)
- throw fsErr("EISDIR", "is a directory", s10);
- for (const fp of [...this.files.keys()].filter((k17) => k17 === s10 || k17.startsWith(s10 + "/"))) {
- await this.writeFile(d15 + fp.slice(s10.length), await this.readFileBuffer(fp));
+ if (!this.sessionsTable)
+ return;
+ for (let i11 = 0; i11 < uncachedSessions.length; i11 += PREFETCH_BATCH_SIZE) {
+ const chunk = uncachedSessions.slice(i11, i11 + PREFETCH_BATCH_SIZE);
+ const inList = chunk.map((p22) => `'${sqlStr(p22)}'`).join(", ");
+ const rows = await this.client.query(`SELECT path, message, creation_date FROM "${this.sessionsTable}" WHERE path IN (${inList}) ORDER BY path, creation_date ASC`);
+ const grouped = /* @__PURE__ */ new Map();
+ for (const row of rows) {
+ const p22 = row["path"];
+ const current = grouped.get(p22) ?? [];
+ current.push(normalizeSessionMessage(p22, row["message"]));
+ grouped.set(p22, current);
+ }
+ for (const [p22, parts] of grouped) {
+ this.files.set(p22, Buffer.from(parts.join("\n"), "utf-8"));
}
- } else {
- await this.writeFile(d15, await this.readFileBuffer(s10));
}
}
- async mv(src, dest) {
- const s10 = normPath(src), d15 = normPath(dest);
- if (this.sessionPaths.has(s10))
- throw fsErr("EPERM", "session files are read-only", s10);
- if (this.sessionPaths.has(d15))
- throw fsErr("EPERM", "session files are read-only", d15);
- await this.cp(src, dest, { recursive: true });
- await this.rm(src, { recursive: true, force: true });
+ // ── IFileSystem: reads ────────────────────────────────────────────────────
+ async readFileBuffer(path2) {
+ const p22 = normPath(path2);
+ if (this.dirs.has(p22) && !this.files.has(p22))
+ throw fsErr("EISDIR", "illegal operation on a directory", p22);
+ if (!this.files.has(p22))
+ throw fsErr("ENOENT", "no such file or directory", p22);
+ const cached = this.files.get(p22);
+ if (cached !== null && cached !== void 0)
+ return cached;
+ const pend = this.pending.get(p22);
+ if (pend) {
+ const buf2 = Buffer.from(pend.contentText, "utf-8");
+ this.files.set(p22, buf2);
+ return buf2;
+ }
+ if (this.sessionPaths.has(p22) && this.sessionsTable) {
+ const rows2 = await this.client.query(`SELECT message FROM "${this.sessionsTable}" WHERE path = '${sqlStr(p22)}' ORDER BY creation_date ASC`);
+ if (rows2.length === 0)
+ throw fsErr("ENOENT", "no such file or directory", p22);
+ const text = joinSessionMessages(p22, rows2.map((row) => row["message"]));
+ const buf2 = Buffer.from(text, "utf-8");
+ this.files.set(p22, buf2);
+ return buf2;
+ }
+ const rows = await this.client.query(`SELECT summary FROM "${this.table}" WHERE path = '${sqlStr(p22)}' LIMIT 1`);
+ if (rows.length === 0)
+ throw fsErr("ENOENT", "no such file or directory", p22);
+ const buf = Buffer.from(rows[0]["summary"] ?? "", "utf-8");
+ this.files.set(p22, buf);
+ return buf;
}
- resolvePath(base, path2) {
- if (path2.startsWith("/"))
- return normPath(path2);
- return normPath(posix.join(base, path2));
+ async readFile(path2, _opts) {
+ const p22 = normPath(path2);
+ if (this.dirs.has(p22) && !this.files.has(p22))
+ throw fsErr("EISDIR", "illegal operation on a directory", p22);
+ if (p22 === "/index.md" && !this.files.has(p22)) {
+ const realRows = await this.client.query(`SELECT summary FROM "${this.table}" WHERE path = '${sqlStr("/index.md")}' LIMIT 1`);
+ if (realRows.length > 0 && realRows[0]["summary"]) {
+ const text2 = realRows[0]["summary"];
+ const buf2 = Buffer.from(text2, "utf-8");
+ this.files.set(p22, buf2);
+ return text2;
+ }
+ return this.generateVirtualIndex();
+ }
+ if (!this.files.has(p22))
+ throw fsErr("ENOENT", "no such file or directory", p22);
+ const cached = this.files.get(p22);
+ if (cached !== null && cached !== void 0)
+ return cached.toString("utf-8");
+ const pend = this.pending.get(p22);
+ if (pend)
+ return pend.contentText;
+ if (this.sessionPaths.has(p22) && this.sessionsTable) {
+ const rows2 = await this.client.query(`SELECT message FROM "${this.sessionsTable}" WHERE path = '${sqlStr(p22)}' ORDER BY creation_date ASC`);
+ if (rows2.length === 0)
+ throw fsErr("ENOENT", "no such file or directory", p22);
+ const text2 = joinSessionMessages(p22, rows2.map((row) => row["message"]));
+ const buf2 = Buffer.from(text2, "utf-8");
+ this.files.set(p22, buf2);
+ return text2;
+ }
+ const rows = await this.client.query(`SELECT summary FROM "${this.table}" WHERE path = '${sqlStr(p22)}' LIMIT 1`);
+ if (rows.length === 0)
+ throw fsErr("ENOENT", "no such file or directory", p22);
+ const text = rows[0]["summary"] ?? "";
+ const buf = Buffer.from(text, "utf-8");
+ this.files.set(p22, buf);
+ return text;
}
- getAllPaths() {
- return [.../* @__PURE__ */ new Set([...this.files.keys(), ...this.dirs.keys()])];
+ // ── IFileSystem: writes ───────────────────────────────────────────────────
+ /** Write a file with optional row-level metadata (project, description, dates). */
+ async writeFileWithMeta(path2, content, meta) {
+ const p22 = normPath(path2);
+ if (this.sessionPaths.has(p22))
+ throw fsErr("EPERM", "session files are read-only", p22);
+ if (this.dirs.has(p22) && !this.files.has(p22))
+ throw fsErr("EISDIR", "illegal operation on a directory", p22);
+ const text = typeof content === "string" ? content : Buffer.from(content).toString("utf-8");
+ const buf = Buffer.from(text, "utf-8");
+ const mime = guessMime(basename4(p22));
+ this.files.set(p22, buf);
+ this.meta.set(p22, { size: buf.length, mime, mtime: /* @__PURE__ */ new Date() });
+ this.addToTree(p22);
+ this.pending.set(p22, {
+ path: p22,
+ filename: basename4(p22),
+ contentText: text,
+ mimeType: mime,
+ sizeBytes: buf.length,
+ ...meta
+ });
+ if (this.pending.size >= BATCH_SIZE)
+ await this.flush();
+ else
+ this.scheduleFlush();
}
-};
-
-// node_modules/yargs-parser/build/lib/index.js
-import { format } from "util";
-import { normalize, resolve as resolve4 } from "path";
-
-// node_modules/yargs-parser/build/lib/string-utils.js
-function camelCase(str) {
- const isCamelCase = str !== str.toLowerCase() && str !== str.toUpperCase();
- if (!isCamelCase) {
- str = str.toLowerCase();
+ async writeFile(path2, content, _opts) {
+ const p22 = normPath(path2);
+ if (this.sessionPaths.has(p22))
+ throw fsErr("EPERM", "session files are read-only", p22);
+ if (this.dirs.has(p22) && !this.files.has(p22))
+ throw fsErr("EISDIR", "illegal operation on a directory", p22);
+ const text = typeof content === "string" ? content : Buffer.from(content).toString("utf-8");
+ const buf = Buffer.from(text, "utf-8");
+ const mime = guessMime(basename4(p22));
+ this.files.set(p22, buf);
+ this.meta.set(p22, { size: buf.length, mime, mtime: /* @__PURE__ */ new Date() });
+ this.addToTree(p22);
+ this.pending.set(p22, {
+ path: p22,
+ filename: basename4(p22),
+ contentText: text,
+ mimeType: mime,
+ sizeBytes: buf.length
+ });
+ if (this.pending.size >= BATCH_SIZE)
+ await this.flush();
+ else
+ this.scheduleFlush();
}
- if (str.indexOf("-") === -1 && str.indexOf("_") === -1) {
- return str;
- } else {
- let camelcase = "";
- let nextChrUpper = false;
- const leadingHyphens = str.match(/^-+/);
- for (let i11 = leadingHyphens ? leadingHyphens[0].length : 0; i11 < str.length; i11++) {
- let chr = str.charAt(i11);
- if (nextChrUpper) {
- nextChrUpper = false;
- chr = chr.toUpperCase();
- }
- if (i11 !== 0 && (chr === "-" || chr === "_")) {
- nextChrUpper = true;
- } else if (chr !== "-" && chr !== "_") {
- camelcase += chr;
+ async appendFile(path2, content, opts) {
+ const p22 = normPath(path2);
+ const add = typeof content === "string" ? content : Buffer.from(content).toString("utf-8");
+ if (this.sessionPaths.has(p22))
+ throw fsErr("EPERM", "session files are read-only", p22);
+ if (this.files.has(p22) || await this.exists(p22).catch(() => false)) {
+ const ts3 = (/* @__PURE__ */ new Date()).toISOString();
+ await this.client.query(`UPDATE "${this.table}" SET summary = summary || E'${sqlStr(add)}', size_bytes = size_bytes + ${Buffer.byteLength(add, "utf-8")}, last_update_date = '${ts3}' WHERE path = '${sqlStr(p22)}'`);
+ this.files.set(p22, null);
+ const m26 = this.meta.get(p22);
+ if (m26) {
+ m26.size += Buffer.byteLength(add, "utf-8");
+ m26.mtime = new Date(ts3);
}
- }
- return camelcase;
- }
-}
-function decamelize(str, joinString) {
- const lowercase = str.toLowerCase();
- joinString = joinString || "-";
- let notCamelcase = "";
- for (let i11 = 0; i11 < str.length; i11++) {
- const chrLower = lowercase.charAt(i11);
- const chrString = str.charAt(i11);
- if (chrLower !== chrString && i11 > 0) {
- notCamelcase += `${joinString}${lowercase.charAt(i11)}`;
} else {
- notCamelcase += chrString;
+ await this.writeFile(p22, content, opts);
+ await this.flush();
}
}
- return notCamelcase;
-}
-function looksLikeNumber(x28) {
- if (x28 === null || x28 === void 0)
- return false;
- if (typeof x28 === "number")
- return true;
- if (/^0x[0-9a-f]+$/i.test(x28))
- return true;
- if (/^0[^.]/.test(x28))
- return false;
- return /^[-]?(?:\d+(?:\.\d*)?|\.\d+)(e[-+]?\d+)?$/.test(x28);
-}
-
-// node_modules/yargs-parser/build/lib/tokenize-arg-string.js
-function tokenizeArgString(argString) {
- if (Array.isArray(argString)) {
- return argString.map((e6) => typeof e6 !== "string" ? e6 + "" : e6);
+ // ── IFileSystem: metadata ─────────────────────────────────────────────────
+ async exists(path2) {
+ const p22 = normPath(path2);
+ if (p22 === "/index.md")
+ return true;
+ return this.files.has(p22) || this.dirs.has(p22);
}
- argString = argString.trim();
- let i11 = 0;
- let prevC = null;
- let c15 = null;
- let opening = null;
- const args = [];
- for (let ii2 = 0; ii2 < argString.length; ii2++) {
- prevC = c15;
- c15 = argString.charAt(ii2);
- if (c15 === " " && !opening) {
- if (!(prevC === " ")) {
- i11++;
- }
- continue;
- }
- if (c15 === opening) {
- opening = null;
- } else if ((c15 === "'" || c15 === '"') && !opening) {
- opening = c15;
+ async stat(path2) {
+ const p22 = normPath(path2);
+ const isFile = this.files.has(p22);
+ const isDir = this.dirs.has(p22);
+ if (p22 === "/index.md" && !isFile && !isDir) {
+ return {
+ isFile: true,
+ isDirectory: false,
+ isSymbolicLink: false,
+ mode: 420,
+ size: 0,
+ mtime: /* @__PURE__ */ new Date()
+ };
}
- if (!args[i11])
- args[i11] = "";
- args[i11] += c15;
+ if (!isFile && !isDir)
+ throw fsErr("ENOENT", "no such file or directory", p22);
+ const m26 = this.meta.get(p22);
+ return {
+ isFile: isFile && !isDir,
+ isDirectory: isDir,
+ isSymbolicLink: false,
+ mode: isDir ? 493 : 420,
+ size: m26?.size ?? 0,
+ mtime: m26?.mtime ?? /* @__PURE__ */ new Date()
+ };
}
- return args;
-}
-
-// node_modules/yargs-parser/build/lib/yargs-parser-types.js
-var DefaultValuesForTypeKey;
-(function(DefaultValuesForTypeKey2) {
- DefaultValuesForTypeKey2["BOOLEAN"] = "boolean";
+ async lstat(path2) {
+ return this.stat(path2);
+ }
+ async chmod(_path, _mode) {
+ }
+ async utimes(_path, _atime, _mtime) {
+ }
+ async symlink(_target, linkPath) {
+ throw fsErr("EPERM", "operation not permitted", linkPath);
+ }
+ async link(_src, destPath) {
+ throw fsErr("EPERM", "operation not permitted", destPath);
+ }
+ async readlink(path2) {
+ throw fsErr("EINVAL", "invalid argument", path2);
+ }
+ async realpath(path2) {
+ const p22 = normPath(path2);
+ if (p22 === "/index.md")
+ return p22;
+ if (!this.files.has(p22) && !this.dirs.has(p22))
+ throw fsErr("ENOENT", "no such file or directory", p22);
+ return p22;
+ }
+ // ── IFileSystem: directories ──────────────────────────────────────────────
+ async mkdir(path2, opts) {
+ const p22 = normPath(path2);
+ if (this.files.has(p22))
+ throw fsErr("EEXIST", "file exists", p22);
+ if (this.dirs.has(p22)) {
+ if (!opts?.recursive)
+ throw fsErr("EEXIST", "file exists", p22);
+ return;
+ }
+ if (!opts?.recursive) {
+ const parent2 = parentOf(p22);
+ if (!this.dirs.has(parent2))
+ throw fsErr("ENOENT", "no such file or directory", parent2);
+ }
+ this.dirs.set(p22, /* @__PURE__ */ new Set());
+ const parent = parentOf(p22);
+ if (!this.dirs.has(parent))
+ this.dirs.set(parent, /* @__PURE__ */ new Set());
+ this.dirs.get(parent).add(basename4(p22));
+ }
+ async readdir(path2) {
+ const p22 = normPath(path2);
+ if (!this.dirs.has(p22))
+ throw fsErr("ENOTDIR", "not a directory", p22);
+ const entries = [...this.dirs.get(p22) ?? []];
+ if (p22 === "/" && !entries.includes("index.md")) {
+ entries.push("index.md");
+ }
+ return entries;
+ }
+ async readdirWithFileTypes(path2) {
+ const names = await this.readdir(path2);
+ const p22 = normPath(path2);
+ return names.map((name) => {
+ const child = p22 === "/" ? `/${name}` : `${p22}/${name}`;
+ return {
+ name,
+ isFile: (this.files.has(child) || child === "/index.md") && !this.dirs.has(child),
+ isDirectory: this.dirs.has(child),
+ isSymbolicLink: false
+ };
+ });
+ }
+ // ── IFileSystem: structural mutations ─────────────────────────────────────
+ async rm(path2, opts) {
+ const p22 = normPath(path2);
+ if (this.sessionPaths.has(p22))
+ throw fsErr("EPERM", "session files are read-only", p22);
+ if (!this.files.has(p22) && !this.dirs.has(p22)) {
+ if (opts?.force)
+ return;
+ throw fsErr("ENOENT", "no such file or directory", p22);
+ }
+ if (this.dirs.has(p22)) {
+ const children = this.dirs.get(p22) ?? /* @__PURE__ */ new Set();
+ if (children.size > 0 && !opts?.recursive)
+ throw fsErr("ENOTEMPTY", "directory not empty", p22);
+ const toDelete = [];
+ const stack = [p22];
+ while (stack.length) {
+ const cur = stack.pop();
+ for (const child of [...this.dirs.get(cur) ?? []]) {
+ const childPath = cur === "/" ? `/${child}` : `${cur}/${child}`;
+ if (this.files.has(childPath))
+ toDelete.push(childPath);
+ if (this.dirs.has(childPath))
+ stack.push(childPath);
+ }
+ }
+ const safeToDelete = toDelete.filter((fp) => !this.sessionPaths.has(fp));
+ for (const fp of safeToDelete)
+ this.removeFromTree(fp);
+ this.dirs.delete(p22);
+ this.dirs.get(parentOf(p22))?.delete(basename4(p22));
+ if (safeToDelete.length > 0) {
+ const inList = safeToDelete.map((fp) => `'${sqlStr(fp)}'`).join(", ");
+ await this.client.query(`DELETE FROM "${this.table}" WHERE path IN (${inList})`);
+ }
+ } else {
+ await this.client.query(`DELETE FROM "${this.table}" WHERE path = '${sqlStr(p22)}'`);
+ this.removeFromTree(p22);
+ }
+ }
+ async cp(src, dest, opts) {
+ const s10 = normPath(src), d15 = normPath(dest);
+ if (this.sessionPaths.has(d15))
+ throw fsErr("EPERM", "session files are read-only", d15);
+ if (this.dirs.has(s10) && !this.files.has(s10)) {
+ if (!opts?.recursive)
+ throw fsErr("EISDIR", "is a directory", s10);
+ for (const fp of [...this.files.keys()].filter((k17) => k17 === s10 || k17.startsWith(s10 + "/"))) {
+ await this.writeFile(d15 + fp.slice(s10.length), await this.readFileBuffer(fp));
+ }
+ } else {
+ await this.writeFile(d15, await this.readFileBuffer(s10));
+ }
+ }
+ async mv(src, dest) {
+ const s10 = normPath(src), d15 = normPath(dest);
+ if (this.sessionPaths.has(s10))
+ throw fsErr("EPERM", "session files are read-only", s10);
+ if (this.sessionPaths.has(d15))
+ throw fsErr("EPERM", "session files are read-only", d15);
+ await this.cp(src, dest, { recursive: true });
+ await this.rm(src, { recursive: true, force: true });
+ }
+ resolvePath(base, path2) {
+ if (path2.startsWith("/"))
+ return normPath(path2);
+ return normPath(posix.join(base, path2));
+ }
+ getAllPaths() {
+ return [.../* @__PURE__ */ new Set([...this.files.keys(), ...this.dirs.keys()])];
+ }
+};
+
+// node_modules/yargs-parser/build/lib/index.js
+import { format } from "util";
+import { normalize, resolve as resolve4 } from "path";
+
+// node_modules/yargs-parser/build/lib/string-utils.js
+function camelCase2(str) {
+ const isCamelCase = str !== str.toLowerCase() && str !== str.toUpperCase();
+ if (!isCamelCase) {
+ str = str.toLowerCase();
+ }
+ if (str.indexOf("-") === -1 && str.indexOf("_") === -1) {
+ return str;
+ } else {
+ let camelcase = "";
+ let nextChrUpper = false;
+ const leadingHyphens = str.match(/^-+/);
+ for (let i11 = leadingHyphens ? leadingHyphens[0].length : 0; i11 < str.length; i11++) {
+ let chr = str.charAt(i11);
+ if (nextChrUpper) {
+ nextChrUpper = false;
+ chr = chr.toUpperCase();
+ }
+ if (i11 !== 0 && (chr === "-" || chr === "_")) {
+ nextChrUpper = true;
+ } else if (chr !== "-" && chr !== "_") {
+ camelcase += chr;
+ }
+ }
+ return camelcase;
+ }
+}
+function decamelize(str, joinString) {
+ const lowercase = str.toLowerCase();
+ joinString = joinString || "-";
+ let notCamelcase = "";
+ for (let i11 = 0; i11 < str.length; i11++) {
+ const chrLower = lowercase.charAt(i11);
+ const chrString = str.charAt(i11);
+ if (chrLower !== chrString && i11 > 0) {
+ notCamelcase += `${joinString}${lowercase.charAt(i11)}`;
+ } else {
+ notCamelcase += chrString;
+ }
+ }
+ return notCamelcase;
+}
+function looksLikeNumber(x28) {
+ if (x28 === null || x28 === void 0)
+ return false;
+ if (typeof x28 === "number")
+ return true;
+ if (/^0x[0-9a-f]+$/i.test(x28))
+ return true;
+ if (/^0[^.]/.test(x28))
+ return false;
+ return /^[-]?(?:\d+(?:\.\d*)?|\.\d+)(e[-+]?\d+)?$/.test(x28);
+}
+
+// node_modules/yargs-parser/build/lib/tokenize-arg-string.js
+function tokenizeArgString(argString) {
+ if (Array.isArray(argString)) {
+ return argString.map((e6) => typeof e6 !== "string" ? e6 + "" : e6);
+ }
+ argString = argString.trim();
+ let i11 = 0;
+ let prevC = null;
+ let c15 = null;
+ let opening = null;
+ const args = [];
+ for (let ii2 = 0; ii2 < argString.length; ii2++) {
+ prevC = c15;
+ c15 = argString.charAt(ii2);
+ if (c15 === " " && !opening) {
+ if (!(prevC === " ")) {
+ i11++;
+ }
+ continue;
+ }
+ if (c15 === opening) {
+ opening = null;
+ } else if ((c15 === "'" || c15 === '"') && !opening) {
+ opening = c15;
+ }
+ if (!args[i11])
+ args[i11] = "";
+ args[i11] += c15;
+ }
+ return args;
+}
+
+// node_modules/yargs-parser/build/lib/yargs-parser-types.js
+var DefaultValuesForTypeKey;
+(function(DefaultValuesForTypeKey2) {
+ DefaultValuesForTypeKey2["BOOLEAN"] = "boolean";
DefaultValuesForTypeKey2["STRING"] = "string";
DefaultValuesForTypeKey2["NUMBER"] = "number";
DefaultValuesForTypeKey2["ARRAY"] = "array";
@@ -67983,7 +68490,7 @@ var YargsParser = class {
;
[].concat(...Object.keys(aliases).map((k17) => aliases[k17])).forEach((alias) => {
if (configuration["camel-case-expansion"] && alias.includes("-")) {
- delete argv[alias.split(".").map((prop) => camelCase(prop)).join(".")];
+ delete argv[alias.split(".").map((prop) => camelCase2(prop)).join(".")];
}
delete argv[alias];
});
@@ -68065,7 +68572,7 @@ var YargsParser = class {
function setArg(key, val, shouldStripQuotes = inputIsString) {
if (/-/.test(key) && configuration["camel-case-expansion"]) {
const alias = key.split(".").map(function(prop) {
- return camelCase(prop);
+ return camelCase2(prop);
}).join(".");
addNewAlias(key, alias);
}
@@ -68213,7 +68720,7 @@ var YargsParser = class {
if (i11 === 0) {
key = key.substring(prefix.length);
}
- return camelCase(key);
+ return camelCase2(key);
});
if ((configOnly && flags.configs[keys.join(".")] || !configOnly) && !hasKey(argv2, keys)) {
setArg(keys.join("."), env2[envVar]);
@@ -68333,7 +68840,7 @@ var YargsParser = class {
flags.aliases[key] = [].concat(aliases[key] || []);
flags.aliases[key].concat(key).forEach(function(x28) {
if (/-/.test(x28) && configuration["camel-case-expansion"]) {
- const c15 = camelCase(x28);
+ const c15 = camelCase2(x28);
if (c15 !== key && flags.aliases[key].indexOf(c15) === -1) {
flags.aliases[key].push(c15);
newAliases[c15] = true;
@@ -68394,445 +68901,170 @@ var YargsParser = class {
break;
}
}
- return hasAllFlags;
- }
- function isUnknownOptionAsArg(arg) {
- return configuration["unknown-options-as-args"] && isUnknownOption(arg);
- }
- function isUnknownOption(arg) {
- arg = arg.replace(/^-{3,}/, "--");
- if (arg.match(negative)) {
- return false;
- }
- if (hasAllShortFlags(arg)) {
- return false;
- }
- const flagWithEquals = /^-+([^=]+?)=[\s\S]*$/;
- const normalFlag = /^-+([^=]+?)$/;
- const flagEndingInHyphen = /^-+([^=]+?)-$/;
- const flagEndingInDigits = /^-+([^=]+?\d+)$/;
- const flagEndingInNonWordCharacters = /^-+([^=]+?)\W+.*$/;
- return !hasFlagsMatching(arg, flagWithEquals, negatedBoolean, normalFlag, flagEndingInHyphen, flagEndingInDigits, flagEndingInNonWordCharacters);
- }
- function defaultValue(key) {
- if (!checkAllAliases(key, flags.bools) && !checkAllAliases(key, flags.counts) && `${key}` in defaults2) {
- return defaults2[key];
- } else {
- return defaultForType(guessType(key));
- }
- }
- function defaultForType(type) {
- const def = {
- [DefaultValuesForTypeKey.BOOLEAN]: true,
- [DefaultValuesForTypeKey.STRING]: "",
- [DefaultValuesForTypeKey.NUMBER]: void 0,
- [DefaultValuesForTypeKey.ARRAY]: []
- };
- return def[type];
- }
- function guessType(key) {
- let type = DefaultValuesForTypeKey.BOOLEAN;
- if (checkAllAliases(key, flags.strings))
- type = DefaultValuesForTypeKey.STRING;
- else if (checkAllAliases(key, flags.numbers))
- type = DefaultValuesForTypeKey.NUMBER;
- else if (checkAllAliases(key, flags.bools))
- type = DefaultValuesForTypeKey.BOOLEAN;
- else if (checkAllAliases(key, flags.arrays))
- type = DefaultValuesForTypeKey.ARRAY;
- return type;
- }
- function isUndefined(num) {
- return num === void 0;
- }
- function checkConfiguration() {
- Object.keys(flags.counts).find((key) => {
- if (checkAllAliases(key, flags.arrays)) {
- error = Error(__("Invalid configuration: %s, opts.count excludes opts.array.", key));
- return true;
- } else if (checkAllAliases(key, flags.nargs)) {
- error = Error(__("Invalid configuration: %s, opts.count excludes opts.narg.", key));
- return true;
- }
- return false;
- });
- }
- return {
- aliases: Object.assign({}, flags.aliases),
- argv: Object.assign(argvReturn, argv),
- configuration,
- defaulted: Object.assign({}, defaulted),
- error,
- newAliases: Object.assign({}, newAliases)
- };
- }
-};
-function combineAliases(aliases) {
- const aliasArrays = [];
- const combined = /* @__PURE__ */ Object.create(null);
- let change = true;
- Object.keys(aliases).forEach(function(key) {
- aliasArrays.push([].concat(aliases[key], key));
- });
- while (change) {
- change = false;
- for (let i11 = 0; i11 < aliasArrays.length; i11++) {
- for (let ii2 = i11 + 1; ii2 < aliasArrays.length; ii2++) {
- const intersect = aliasArrays[i11].filter(function(v27) {
- return aliasArrays[ii2].indexOf(v27) !== -1;
- });
- if (intersect.length) {
- aliasArrays[i11] = aliasArrays[i11].concat(aliasArrays[ii2]);
- aliasArrays.splice(ii2, 1);
- change = true;
- break;
- }
- }
- }
- }
- aliasArrays.forEach(function(aliasArray) {
- aliasArray = aliasArray.filter(function(v27, i11, self2) {
- return self2.indexOf(v27) === i11;
- });
- const lastAlias = aliasArray.pop();
- if (lastAlias !== void 0 && typeof lastAlias === "string") {
- combined[lastAlias] = aliasArray;
- }
- });
- return combined;
-}
-function increment(orig) {
- return orig !== void 0 ? orig + 1 : 1;
-}
-function sanitizeKey(key) {
- if (key === "__proto__")
- return "___proto___";
- return key;
-}
-function stripQuotes(val) {
- return typeof val === "string" && (val[0] === "'" || val[0] === '"') && val[val.length - 1] === val[0] ? val.substring(1, val.length - 1) : val;
-}
-
-// node_modules/yargs-parser/build/lib/index.js
-import { readFileSync as readFileSync2 } from "fs";
-import { createRequire } from "node:module";
-var _a3;
-var _b;
-var _c;
-var minNodeVersion = process && process.env && process.env.YARGS_MIN_NODE_VERSION ? Number(process.env.YARGS_MIN_NODE_VERSION) : 20;
-var nodeVersion = (_b = (_a3 = process === null || process === void 0 ? void 0 : process.versions) === null || _a3 === void 0 ? void 0 : _a3.node) !== null && _b !== void 0 ? _b : (_c = process === null || process === void 0 ? void 0 : process.version) === null || _c === void 0 ? void 0 : _c.slice(1);
-if (nodeVersion) {
- const major = Number(nodeVersion.match(/^([^.]+)/)[1]);
- if (major < minNodeVersion) {
- throw Error(`yargs parser supports a minimum Node.js version of ${minNodeVersion}. Read our version support policy: https://github.com/yargs/yargs-parser#supported-nodejs-versions`);
- }
-}
-var env = process ? process.env : {};
-var require2 = createRequire ? createRequire(import.meta.url) : void 0;
-var parser = new YargsParser({
- cwd: process.cwd,
- env: () => {
- return env;
- },
- format,
- normalize,
- resolve: resolve4,
- require: (path2) => {
- if (typeof require2 !== "undefined") {
- return require2(path2);
- } else if (path2.match(/\.json$/)) {
- return JSON.parse(readFileSync2(path2, "utf8"));
- } else {
- throw Error("only .json config files are supported in ESM");
- }
- }
-});
-var yargsParser = function Parser(args, opts) {
- const result = parser.parse(args.slice(), opts);
- return result.argv;
-};
-yargsParser.detailed = function(args, opts) {
- return parser.parse(args.slice(), opts);
-};
-yargsParser.camelCase = camelCase;
-yargsParser.decamelize = decamelize;
-yargsParser.looksLikeNumber = looksLikeNumber;
-var lib_default = yargsParser;
-
-// dist/src/shell/grep-core.js
-var TOOL_INPUT_FIELDS = [
- "command",
- "file_path",
- "path",
- "pattern",
- "prompt",
- "subagent_type",
- "query",
- "url",
- "notebook_path",
- "old_string",
- "new_string",
- "content",
- "skill",
- "args",
- "taskId",
- "status",
- "subject",
- "description",
- "to",
- "message",
- "summary",
- "max_results"
-];
-var TOOL_RESPONSE_DROP = /* @__PURE__ */ new Set([
- // Note: `stderr` is intentionally NOT in this set. The `stdout` high-signal
- // branch below already de-dupes it for the common case (appends as suffix
- // when non-empty). If a tool response has ONLY `stderr` and no `stdout`
- // (hard-failure on some tools), the generic cleanup preserves it so the
- // error message reaches Claude instead of collapsing to `[ok]`.
- "interrupted",
- "isImage",
- "noOutputExpected",
- "type",
- "structuredPatch",
- "userModified",
- "originalFile",
- "replaceAll",
- "totalDurationMs",
- "totalTokens",
- "totalToolUseCount",
- "usage",
- "toolStats",
- "durationMs",
- "durationSeconds",
- "bytes",
- "code",
- "codeText",
- "agentId",
- "agentType",
- "verificationNudgeNeeded",
- "numLines",
- "numFiles",
- "truncated",
- "statusChange",
- "updatedFields",
- "isAgent",
- "success"
-]);
-function maybeParseJson(v27) {
- if (typeof v27 !== "string")
- return v27;
- const s10 = v27.trim();
- if (s10[0] !== "{" && s10[0] !== "[")
- return v27;
- try {
- return JSON.parse(s10);
- } catch {
- return v27;
- }
-}
-function snakeCase(k17) {
- return k17.replace(/([A-Z])/g, "_$1").toLowerCase();
-}
-function camelCase2(k17) {
- return k17.replace(/_([a-z])/g, (_16, c15) => c15.toUpperCase());
-}
-function formatToolInput(raw) {
- const p22 = maybeParseJson(raw);
- if (typeof p22 !== "object" || p22 === null)
- return String(p22 ?? "");
- const parts = [];
- for (const k17 of TOOL_INPUT_FIELDS) {
- if (p22[k17] === void 0)
- continue;
- const v27 = p22[k17];
- parts.push(`${k17}: ${typeof v27 === "string" ? v27 : JSON.stringify(v27)}`);
- }
- for (const k17 of ["glob", "output_mode", "limit", "offset"]) {
- if (p22[k17] !== void 0)
- parts.push(`${k17}: ${p22[k17]}`);
- }
- return parts.length ? parts.join("\n") : JSON.stringify(p22);
-}
-function formatToolResponse(raw, inp, toolName) {
- const r10 = maybeParseJson(raw);
- if (typeof r10 !== "object" || r10 === null)
- return String(r10 ?? "");
- if (toolName === "Edit" || toolName === "Write" || toolName === "MultiEdit") {
- return r10.filePath ? `[wrote ${r10.filePath}]` : "[ok]";
- }
- if (typeof r10.stdout === "string") {
- const stderr = r10.stderr;
- return r10.stdout + (stderr ? `
-stderr: ${stderr}` : "");
- }
- if (typeof r10.content === "string")
- return r10.content;
- if (r10.file && typeof r10.file === "object") {
- const f11 = r10.file;
- if (typeof f11.content === "string")
- return `[${f11.filePath ?? ""}]
-${f11.content}`;
- if (typeof f11.base64 === "string")
- return `[binary ${f11.filePath ?? ""}: ${f11.base64.length} base64 chars]`;
- }
- if (Array.isArray(r10.filenames))
- return r10.filenames.join("\n");
- if (Array.isArray(r10.matches)) {
- return r10.matches.map((m26) => typeof m26 === "string" ? m26 : JSON.stringify(m26)).join("\n");
- }
- if (Array.isArray(r10.results)) {
- return r10.results.map((x28) => typeof x28 === "string" ? x28 : x28?.title ?? x28?.url ?? JSON.stringify(x28)).join("\n");
- }
- const inpObj = maybeParseJson(inp);
- const kept = {};
- for (const [k17, v27] of Object.entries(r10)) {
- if (TOOL_RESPONSE_DROP.has(k17))
- continue;
- if (v27 === "" || v27 === false || v27 == null)
- continue;
- if (typeof inpObj === "object" && inpObj) {
- const inObj = inpObj;
- if (k17 in inObj && JSON.stringify(inObj[k17]) === JSON.stringify(v27))
- continue;
- const snake = snakeCase(k17);
- if (snake in inObj && JSON.stringify(inObj[snake]) === JSON.stringify(v27))
- continue;
- const camel = camelCase2(k17);
- if (camel in inObj && JSON.stringify(inObj[camel]) === JSON.stringify(v27))
- continue;
+ return hasAllFlags;
}
- kept[k17] = v27;
- }
- return Object.keys(kept).length ? JSON.stringify(kept) : "[ok]";
-}
-function formatToolCall(obj) {
- return `[tool:${obj?.tool_name ?? "?"}]
-input: ${formatToolInput(obj?.tool_input)}
-response: ${formatToolResponse(obj?.tool_response, obj?.tool_input, obj?.tool_name)}`;
-}
-function normalizeContent(path2, raw) {
- if (!path2.includes("/sessions/"))
- return raw;
- if (!raw || raw[0] !== "{")
- return raw;
- let obj;
- try {
- obj = JSON.parse(raw);
- } catch {
- return raw;
- }
- if (Array.isArray(obj.turns)) {
- const header = [];
- if (obj.date_time)
- header.push(`date: ${obj.date_time}`);
- if (obj.speakers) {
- const s10 = obj.speakers;
- const names = [s10.speaker_a, s10.speaker_b].filter(Boolean).join(", ");
- if (names)
- header.push(`speakers: ${names}`);
+ function isUnknownOptionAsArg(arg) {
+ return configuration["unknown-options-as-args"] && isUnknownOption(arg);
}
- const lines = obj.turns.map((t6) => {
- const sp = String(t6?.speaker ?? t6?.name ?? "?").trim();
- const tx = String(t6?.text ?? t6?.content ?? "").replace(/\s+/g, " ").trim();
- const tag = t6?.dia_id ? `[${t6.dia_id}] ` : "";
- return `${tag}${sp}: ${tx}`;
- });
- const out2 = [...header, ...lines].join("\n");
- return out2.trim() ? out2 : raw;
+ function isUnknownOption(arg) {
+ arg = arg.replace(/^-{3,}/, "--");
+ if (arg.match(negative)) {
+ return false;
+ }
+ if (hasAllShortFlags(arg)) {
+ return false;
+ }
+ const flagWithEquals = /^-+([^=]+?)=[\s\S]*$/;
+ const normalFlag = /^-+([^=]+?)$/;
+ const flagEndingInHyphen = /^-+([^=]+?)-$/;
+ const flagEndingInDigits = /^-+([^=]+?\d+)$/;
+ const flagEndingInNonWordCharacters = /^-+([^=]+?)\W+.*$/;
+ return !hasFlagsMatching(arg, flagWithEquals, negatedBoolean, normalFlag, flagEndingInHyphen, flagEndingInDigits, flagEndingInNonWordCharacters);
+ }
+ function defaultValue(key) {
+ if (!checkAllAliases(key, flags.bools) && !checkAllAliases(key, flags.counts) && `${key}` in defaults2) {
+ return defaults2[key];
+ } else {
+ return defaultForType(guessType(key));
+ }
+ }
+ function defaultForType(type) {
+ const def = {
+ [DefaultValuesForTypeKey.BOOLEAN]: true,
+ [DefaultValuesForTypeKey.STRING]: "",
+ [DefaultValuesForTypeKey.NUMBER]: void 0,
+ [DefaultValuesForTypeKey.ARRAY]: []
+ };
+ return def[type];
+ }
+ function guessType(key) {
+ let type = DefaultValuesForTypeKey.BOOLEAN;
+ if (checkAllAliases(key, flags.strings))
+ type = DefaultValuesForTypeKey.STRING;
+ else if (checkAllAliases(key, flags.numbers))
+ type = DefaultValuesForTypeKey.NUMBER;
+ else if (checkAllAliases(key, flags.bools))
+ type = DefaultValuesForTypeKey.BOOLEAN;
+ else if (checkAllAliases(key, flags.arrays))
+ type = DefaultValuesForTypeKey.ARRAY;
+ return type;
+ }
+ function isUndefined(num) {
+ return num === void 0;
+ }
+ function checkConfiguration() {
+ Object.keys(flags.counts).find((key) => {
+ if (checkAllAliases(key, flags.arrays)) {
+ error = Error(__("Invalid configuration: %s, opts.count excludes opts.array.", key));
+ return true;
+ } else if (checkAllAliases(key, flags.nargs)) {
+ error = Error(__("Invalid configuration: %s, opts.count excludes opts.narg.", key));
+ return true;
+ }
+ return false;
+ });
+ }
+ return {
+ aliases: Object.assign({}, flags.aliases),
+ argv: Object.assign(argvReturn, argv),
+ configuration,
+ defaulted: Object.assign({}, defaulted),
+ error,
+ newAliases: Object.assign({}, newAliases)
+ };
}
- const stripRecalled = (t6) => {
- const i11 = t6.indexOf("");
- if (i11 === -1)
- return t6;
- const j14 = t6.lastIndexOf("");
- if (j14 === -1 || j14 < i11)
- return t6;
- const head = t6.slice(0, i11);
- const tail = t6.slice(j14 + "".length);
- return (head + tail).replace(/^\s+/, "").replace(/\n{3,}/g, "\n\n");
- };
- let out = null;
- if (obj.type === "user_message") {
- out = `[user] ${stripRecalled(String(obj.content ?? ""))}`;
- } else if (obj.type === "assistant_message") {
- const agent = obj.agent_type ? ` (agent=${obj.agent_type})` : "";
- out = `[assistant${agent}] ${stripRecalled(String(obj.content ?? ""))}`;
- } else if (obj.type === "tool_call") {
- out = formatToolCall(obj);
+};
+function combineAliases(aliases) {
+ const aliasArrays = [];
+ const combined = /* @__PURE__ */ Object.create(null);
+ let change = true;
+ Object.keys(aliases).forEach(function(key) {
+ aliasArrays.push([].concat(aliases[key], key));
+ });
+ while (change) {
+ change = false;
+ for (let i11 = 0; i11 < aliasArrays.length; i11++) {
+ for (let ii2 = i11 + 1; ii2 < aliasArrays.length; ii2++) {
+ const intersect = aliasArrays[i11].filter(function(v27) {
+ return aliasArrays[ii2].indexOf(v27) !== -1;
+ });
+ if (intersect.length) {
+ aliasArrays[i11] = aliasArrays[i11].concat(aliasArrays[ii2]);
+ aliasArrays.splice(ii2, 1);
+ change = true;
+ break;
+ }
+ }
+ }
}
- if (out === null)
- return raw;
- const trimmed = out.trim();
- if (!trimmed || trimmed === "[user]" || trimmed === "[assistant]" || /^\[tool:[^\]]*\]\s+input:\s+\{\}\s+response:\s+\{\}$/.test(trimmed))
- return raw;
- return out;
+ aliasArrays.forEach(function(aliasArray) {
+ aliasArray = aliasArray.filter(function(v27, i11, self2) {
+ return self2.indexOf(v27) === i11;
+ });
+ const lastAlias = aliasArray.pop();
+ if (lastAlias !== void 0 && typeof lastAlias === "string") {
+ combined[lastAlias] = aliasArray;
+ }
+ });
+ return combined;
}
-async function searchDeeplakeTables(api, memoryTable, sessionsTable, opts) {
- const { pathFilter, contentScanOnly, likeOp, escapedPattern } = opts;
- const limit = opts.limit ?? 100;
- const memFilter = contentScanOnly ? "" : ` AND summary::text ${likeOp} '%${escapedPattern}%'`;
- const sessFilter = contentScanOnly ? "" : ` AND message::text ${likeOp} '%${escapedPattern}%'`;
- const memQuery = `SELECT path, summary::text AS content FROM "${memoryTable}" WHERE 1=1${pathFilter}${memFilter} LIMIT ${limit}`;
- const sessQuery = `SELECT path, message::text AS content FROM "${sessionsTable}" WHERE 1=1${pathFilter}${sessFilter} LIMIT ${limit}`;
- const [memRows, sessRows] = await Promise.all([
- api.query(memQuery).catch(() => []),
- api.query(sessQuery).catch(() => [])
- ]);
- const rows = [];
- for (const r10 of memRows)
- rows.push({ path: String(r10.path), content: String(r10.content ?? "") });
- for (const r10 of sessRows)
- rows.push({ path: String(r10.path), content: String(r10.content ?? "") });
- return rows;
+function increment(orig) {
+ return orig !== void 0 ? orig + 1 : 1;
}
-function buildPathFilter(targetPath) {
- if (!targetPath || targetPath === "/")
- return "";
- const clean = targetPath.replace(/\/+$/, "");
- return ` AND (path = '${sqlStr(clean)}' OR path LIKE '${sqlLike(clean)}/%')`;
+function sanitizeKey(key) {
+ if (key === "__proto__")
+ return "___proto___";
+ return key;
}
-function compileGrepRegex(params) {
- let reStr = params.fixedString ? params.pattern.replace(/[.*+?^${}()|[\]\\]/g, "\\$&") : params.pattern;
- if (params.wordMatch)
- reStr = `\\b${reStr}\\b`;
- try {
- return new RegExp(reStr, params.ignoreCase ? "i" : "");
- } catch {
- return new RegExp(params.pattern.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"), params.ignoreCase ? "i" : "");
+function stripQuotes(val) {
+ return typeof val === "string" && (val[0] === "'" || val[0] === '"') && val[val.length - 1] === val[0] ? val.substring(1, val.length - 1) : val;
+}
+
+// node_modules/yargs-parser/build/lib/index.js
+import { readFileSync as readFileSync3 } from "fs";
+import { createRequire } from "node:module";
+var _a3;
+var _b;
+var _c;
+var minNodeVersion = process && process.env && process.env.YARGS_MIN_NODE_VERSION ? Number(process.env.YARGS_MIN_NODE_VERSION) : 20;
+var nodeVersion = (_b = (_a3 = process === null || process === void 0 ? void 0 : process.versions) === null || _a3 === void 0 ? void 0 : _a3.node) !== null && _b !== void 0 ? _b : (_c = process === null || process === void 0 ? void 0 : process.version) === null || _c === void 0 ? void 0 : _c.slice(1);
+if (nodeVersion) {
+ const major = Number(nodeVersion.match(/^([^.]+)/)[1]);
+ if (major < minNodeVersion) {
+ throw Error(`yargs parser supports a minimum Node.js version of ${minNodeVersion}. Read our version support policy: https://github.com/yargs/yargs-parser#supported-nodejs-versions`);
}
}
-function refineGrepMatches(rows, params, forceMultiFilePrefix) {
- const re9 = compileGrepRegex(params);
- const multi = forceMultiFilePrefix ?? rows.length > 1;
- const output = [];
- for (const row of rows) {
- if (!row.content)
- continue;
- const lines = row.content.split("\n");
- const matched = [];
- for (let i11 = 0; i11 < lines.length; i11++) {
- const hit = re9.test(lines[i11]);
- if (hit !== !!params.invertMatch) {
- if (params.filesOnly) {
- output.push(row.path);
- break;
- }
- const prefix = multi ? `${row.path}:` : "";
- const ln3 = params.lineNumber ? `${i11 + 1}:` : "";
- matched.push(`${prefix}${ln3}${lines[i11]}`);
- }
- }
- if (!params.filesOnly) {
- if (params.countOnly) {
- output.push(`${multi ? `${row.path}:` : ""}${matched.length}`);
- } else {
- output.push(...matched);
- }
+var env = process ? process.env : {};
+var require2 = createRequire ? createRequire(import.meta.url) : void 0;
+var parser = new YargsParser({
+ cwd: process.cwd,
+ env: () => {
+ return env;
+ },
+ format,
+ normalize,
+ resolve: resolve4,
+ require: (path2) => {
+ if (typeof require2 !== "undefined") {
+ return require2(path2);
+ } else if (path2.match(/\.json$/)) {
+ return JSON.parse(readFileSync3(path2, "utf8"));
+ } else {
+ throw Error("only .json config files are supported in ESM");
}
}
- return output;
-}
+});
+var yargsParser = function Parser(args, opts) {
+ const result = parser.parse(args.slice(), opts);
+ return result.argv;
+};
+yargsParser.detailed = function(args, opts) {
+ return parser.parse(args.slice(), opts);
+};
+yargsParser.camelCase = camelCase2;
+yargsParser.decamelize = decamelize;
+yargsParser.looksLikeNumber = looksLikeNumber;
+var lib_default = yargsParser;
// dist/src/shell/grep-interceptor.js
var MAX_FALLBACK_CANDIDATES = 500;
@@ -68876,23 +69108,18 @@ function createGrepCommand(client, fs3, table, sessionsTable) {
filesOnly: Boolean(parsed.l || parsed["files-with-matches"]),
countOnly: Boolean(parsed.c || parsed["count"])
};
- const likeOp = matchParams.ignoreCase ? "ILIKE" : "LIKE";
- const hasRegexMeta = !matchParams.fixedString && /[.*+?^${}()|[\]\\]/.test(pattern);
- const escapedPattern = sqlLike(pattern);
let rows = [];
try {
- const perTarget = await Promise.race([
- Promise.all(targets.map((t6) => searchDeeplakeTables(client, table, sessionsTable ?? "sessions", {
- pathFilter: buildPathFilter(t6),
- contentScanOnly: hasRegexMeta,
- likeOp,
- escapedPattern,
- limit: 100
- }))),
+ const searchOptions = {
+ ...buildGrepSearchOptions(matchParams, targets[0] ?? ctx.cwd),
+ pathFilter: buildPathFilterForTargets(targets),
+ limit: 100
+ };
+ const queryRows = await Promise.race([
+ searchDeeplakeTables(client, table, sessionsTable ?? "sessions", searchOptions),
new Promise((_16, reject) => setTimeout(() => reject(new Error("timeout")), 3e3))
]);
- for (const batch of perTarget)
- rows.push(...batch);
+ rows.push(...queryRows);
} catch {
rows = [];
}
diff --git a/claude-code/bundle/wiki-worker.js b/claude-code/bundle/wiki-worker.js
index 02468a3..cd53b4e 100755
--- a/claude-code/bundle/wiki-worker.js
+++ b/claude-code/bundle/wiki-worker.js
@@ -14,18 +14,11 @@ var LOG = join(homedir(), ".deeplake", "hook-debug.log");
function utcTimestamp(d = /* @__PURE__ */ new Date()) {
return d.toISOString().replace("T", " ").slice(0, 19) + " UTC";
}
-function log(tag, msg) {
- if (!DEBUG)
- return;
- appendFileSync(LOG, `${(/* @__PURE__ */ new Date()).toISOString()} [${tag}] ${msg}
-`);
-}
// dist/src/hooks/summary-state.js
import { readFileSync, writeFileSync, writeSync, mkdirSync, renameSync, existsSync, unlinkSync, openSync, closeSync } from "node:fs";
import { homedir as homedir2 } from "node:os";
import { join as join2 } from "node:path";
-var dlog = (msg) => log("summary-state", msg);
var STATE_DIR = join2(homedir2(), ".claude", "hooks", "summary-state");
var YIELD_BUF = new Int32Array(new SharedArrayBuffer(4));
function statePath(sessionId) {
@@ -63,11 +56,9 @@ function withRmwLock(sessionId, fn) {
if (e.code !== "EEXIST")
throw e;
if (Date.now() > deadline) {
- dlog(`rmw lock deadline exceeded for ${sessionId}, reclaiming stale lock`);
try {
unlinkSync(rmwLock);
- } catch (unlinkErr) {
- dlog(`stale rmw lock unlink failed for ${sessionId}: ${unlinkErr.message}`);
+ } catch {
}
continue;
}
@@ -80,8 +71,7 @@ function withRmwLock(sessionId, fn) {
closeSync(fd);
try {
unlinkSync(rmwLock);
- } catch (unlinkErr) {
- dlog(`rmw lock cleanup failed for ${sessionId}: ${unlinkErr.message}`);
+ } catch {
}
}
}
@@ -98,10 +88,7 @@ function finalizeSummary(sessionId, jsonlLines) {
function releaseLock(sessionId) {
try {
unlinkSync(lockPath(sessionId));
- } catch (e) {
- if (e?.code !== "ENOENT") {
- dlog(`releaseLock unlink failed for ${sessionId}: ${e.message}`);
- }
+ } catch {
}
}
@@ -131,7 +118,6 @@ async function uploadSummary(query2, params) {
}
// dist/src/hooks/wiki-worker.js
-var dlog2 = (msg) => log("wiki-worker", msg);
var cfg = JSON.parse(readFileSync2(process.argv[2], "utf-8"));
var tmpDir = cfg.tmpDir;
var tmpJsonl = join3(tmpDir, "session.jsonl");
@@ -179,8 +165,7 @@ async function query(sql, retries = 4) {
function cleanup() {
try {
rmSync(tmpDir, { recursive: true, force: true });
- } catch (cleanupErr) {
- dlog2(`cleanup failed to remove ${tmpDir}: ${cleanupErr.message}`);
+ } catch {
}
}
async function main() {
@@ -263,8 +248,7 @@ async function main() {
cleanup();
try {
releaseLock(cfg.sessionId);
- } catch (releaseErr) {
- dlog2(`releaseLock failed in finally for ${cfg.sessionId}: ${releaseErr.message}`);
+ } catch {
}
}
}
diff --git a/claude-code/tests/bash-command-compiler.test.ts b/claude-code/tests/bash-command-compiler.test.ts
new file mode 100644
index 0000000..3bb90a7
--- /dev/null
+++ b/claude-code/tests/bash-command-compiler.test.ts
@@ -0,0 +1,457 @@
+import { describe, expect, it, vi } from "vitest";
+import {
+ executeCompiledBashCommand,
+ expandBraceToken,
+ hasUnsupportedRedirection,
+ parseCompiledBashCommand,
+ parseCompiledSegment,
+ splitTopLevel,
+ stripAllowedModifiers,
+ tokenizeShellWords,
+} from "../../src/hooks/bash-command-compiler.js";
+
+describe("bash-command-compiler parsing", () => {
+ it("splits top-level sequences while respecting quotes", () => {
+ expect(splitTopLevel("cat /a && echo 'x && y' ; ls /b", ["&&", ";"])).toEqual([
+ "cat /a",
+ "echo 'x && y'",
+ "ls /b",
+ ]);
+ expect(splitTopLevel(" && echo hi ; ", ["&&", ";"])).toEqual(["echo hi"]);
+ });
+
+ it("returns null on unterminated quotes", () => {
+ expect(splitTopLevel("echo 'oops", ["&&"])).toBeNull();
+ expect(tokenizeShellWords("echo \"oops")).toBeNull();
+ });
+
+ it("tokenizes shell words with quotes and escapes", () => {
+ expect(tokenizeShellWords("echo \"hello world\" 'again' plain")).toEqual([
+ "echo",
+ "hello world",
+ "again",
+ "plain",
+ ]);
+ expect(tokenizeShellWords("echo \"hello \\\"world\\\"\"")).toEqual([
+ "echo",
+ "hello \"world\"",
+ ]);
+ });
+
+ it("expands numeric and comma brace expressions", () => {
+ expect(expandBraceToken("/part_{1..3}.md")).toEqual([
+ "/part_1.md",
+ "/part_2.md",
+ "/part_3.md",
+ ]);
+ expect(expandBraceToken("/file_{a,b}.md")).toEqual([
+ "/file_a.md",
+ "/file_b.md",
+ ]);
+ expect(expandBraceToken("/plain.md")).toEqual(["/plain.md"]);
+ expect(expandBraceToken("/part_{3..1}.md")).toEqual([
+ "/part_3.md",
+ "/part_2.md",
+ "/part_1.md",
+ ]);
+ });
+
+ it("strips allowed stderr modifiers and detects unsupported redirection", () => {
+ expect(stripAllowedModifiers("cat /a 2>/dev/null")).toEqual({
+ clean: "cat /a",
+ ignoreMissing: true,
+ });
+ expect(stripAllowedModifiers("cat /a 2>&1 | head -2")).toEqual({
+ clean: "cat /a | head -2",
+ ignoreMissing: false,
+ });
+ expect(hasUnsupportedRedirection("echo ok > /x")).toBe(true);
+ expect(hasUnsupportedRedirection("echo '>'")).toBe(false);
+ });
+
+ it("parses supported read-only segments", () => {
+ expect(parseCompiledSegment("echo ---")).toEqual({ kind: "echo", text: "---" });
+ expect(parseCompiledSegment("cat /a /b | head -2")).toEqual({
+ kind: "cat",
+ paths: ["/a", "/b"],
+ lineLimit: 2,
+ fromEnd: false,
+ countLines: false,
+ ignoreMissing: false,
+ });
+ expect(parseCompiledSegment("head /a")).toEqual({
+ kind: "cat",
+ paths: ["/a"],
+ lineLimit: 10,
+ fromEnd: false,
+ countLines: false,
+ ignoreMissing: false,
+ });
+ expect(parseCompiledSegment("head -2 /a")).toEqual({
+ kind: "cat",
+ paths: ["/a"],
+ lineLimit: 2,
+ fromEnd: false,
+ countLines: false,
+ ignoreMissing: false,
+ });
+ expect(parseCompiledSegment("tail -n 3 /a")).toEqual({
+ kind: "cat",
+ paths: ["/a"],
+ lineLimit: 3,
+ fromEnd: true,
+ countLines: false,
+ ignoreMissing: false,
+ });
+ expect(parseCompiledSegment("tail -2 /a")).toEqual({
+ kind: "cat",
+ paths: ["/a"],
+ lineLimit: 2,
+ fromEnd: true,
+ countLines: false,
+ ignoreMissing: false,
+ });
+ expect(parseCompiledSegment("head -n 2 /a")).toEqual({
+ kind: "cat",
+ paths: ["/a"],
+ lineLimit: 2,
+ fromEnd: false,
+ countLines: false,
+ ignoreMissing: false,
+ });
+ expect(parseCompiledSegment("wc -l /a")).toEqual({
+ kind: "cat",
+ paths: ["/a"],
+ lineLimit: 0,
+ fromEnd: false,
+ countLines: true,
+ ignoreMissing: false,
+ });
+ expect(parseCompiledSegment("cat /a | wc -l")).toEqual({
+ kind: "cat",
+ paths: ["/a"],
+ lineLimit: 0,
+ fromEnd: false,
+ countLines: true,
+ ignoreMissing: false,
+ });
+ expect(parseCompiledSegment("ls -la /summaries/{a,b}")).toEqual({
+ kind: "ls",
+ dirs: ["/summaries/a", "/summaries/b"],
+ longFormat: true,
+ });
+ expect(parseCompiledSegment("ls -l")).toEqual({
+ kind: "ls",
+ dirs: ["/"],
+ longFormat: true,
+ });
+ expect(parseCompiledSegment("ls -a")).toEqual({
+ kind: "ls",
+ dirs: ["/"],
+ longFormat: false,
+ });
+ expect(parseCompiledSegment("find /summaries -name '*.md' | wc -l")).toEqual({
+ kind: "find",
+ dir: "/summaries",
+ pattern: "*.md",
+ countOnly: true,
+ });
+ expect(parseCompiledSegment("grep foo /summaries | head -5")).toEqual({
+ kind: "grep",
+ params: {
+ pattern: "foo",
+ targetPath: "/summaries",
+ ignoreCase: false,
+ wordMatch: false,
+ filesOnly: false,
+ countOnly: false,
+ lineNumber: false,
+ invertMatch: false,
+ fixedString: false,
+ },
+ lineLimit: 5,
+ });
+ expect(parseCompiledSegment("grep foo /summaries | head")).toEqual({
+ kind: "grep",
+ params: {
+ pattern: "foo",
+ targetPath: "/summaries",
+ ignoreCase: false,
+ wordMatch: false,
+ filesOnly: false,
+ countOnly: false,
+ lineNumber: false,
+ invertMatch: false,
+ fixedString: false,
+ },
+ lineLimit: 10,
+ });
+ expect(parseCompiledSegment("grep foo /summaries")).toEqual({
+ kind: "grep",
+ params: {
+ pattern: "foo",
+ targetPath: "/summaries",
+ ignoreCase: false,
+ wordMatch: false,
+ filesOnly: false,
+ countOnly: false,
+ lineNumber: false,
+ invertMatch: false,
+ fixedString: false,
+ },
+ lineLimit: 0,
+ });
+ expect(parseCompiledSegment("find /summaries -type f -name '*.md' -o -name '*.json' | xargs grep -l 'launch' | head -5")).toEqual({
+ kind: "find_grep",
+ dir: "/summaries",
+ patterns: ["*.md", "*.json"],
+ params: {
+ pattern: "launch",
+ targetPath: "/",
+ ignoreCase: false,
+ wordMatch: false,
+ filesOnly: true,
+ countOnly: false,
+ lineNumber: false,
+ invertMatch: false,
+ fixedString: false,
+ },
+ lineLimit: 5,
+ });
+ expect(parseCompiledSegment("find /summaries -type f -name '*.md' | xargs -r grep -l launch | head -1")).toEqual({
+ kind: "find_grep",
+ dir: "/summaries",
+ patterns: ["*.md"],
+ params: {
+ pattern: "launch",
+ targetPath: "/",
+ ignoreCase: false,
+ wordMatch: false,
+ filesOnly: true,
+ countOnly: false,
+ lineNumber: false,
+ invertMatch: false,
+ fixedString: false,
+ },
+ lineLimit: 1,
+ });
+ });
+
+ it("rejects unsupported segments and command shapes", () => {
+ expect(parseCompiledSegment("cat")).toBeNull();
+ expect(parseCompiledSegment("echo ok > /x")).toBeNull();
+ expect(parseCompiledSegment("cat /a | jq '.x'")).toBeNull();
+ expect(parseCompiledSegment("cat /a /b | wc -l")).toBeNull();
+ expect(parseCompiledSegment("cat /a | head -n nope")).toBeNull();
+ expect(parseCompiledSegment("head -n nope /a")).toBeNull();
+ expect(parseCompiledSegment("head -n 2")).toBeNull();
+ expect(parseCompiledSegment("wc -l")).toBeNull();
+ expect(parseCompiledSegment("find")).toBeNull();
+ expect(parseCompiledSegment("find /summaries -name")).toBeNull();
+ expect(parseCompiledSegment("find /summaries -name '*.md' | sort")).toBeNull();
+ expect(parseCompiledSegment("find /summaries -name '*.md' -o -name '*.json'")).toBeNull();
+ expect(parseCompiledSegment("find /summaries -name '*.md' -o -name '*.json' | wc -l")).toBeNull();
+ expect(parseCompiledSegment("find /summaries -name '*.md' | xargs")).toBeNull();
+ expect(parseCompiledSegment("find /summaries -name '*.md' | xargs grep -l foo | head nope")).toBeNull();
+ expect(parseCompiledSegment("find /summaries -name '*.md' | xargs -z grep -l foo")).toBeNull();
+ expect(parseCompiledSegment("find /summaries -name '*.md' | xargs grep -l foo | tail -2")).toBeNull();
+ expect(parseCompiledSegment("grep foo /a | tail -2")).toBeNull();
+ expect(parseCompiledSegment("grep foo /a | head nope")).toBeNull();
+ expect(parseCompiledBashCommand("cat /a || cat /b")).toBeNull();
+ expect(parseCompiledBashCommand("cat /a && echo ok > /x")).toBeNull();
+ });
+});
+
+describe("bash-command-compiler execution", () => {
+ it("batches exact reads and directory listings across compound commands", async () => {
+ const readVirtualPathContentsFn = vi.fn(async () => new Map([
+ ["/a.md", "line1\nline2\nline3\n"],
+ ["/b.md", "tail1\ntail2\n"],
+ ]));
+ const listVirtualPathRowsForDirsFn = vi.fn(async () => new Map([
+ ["/summaries/a", [{ path: "/summaries/a/group/file1.md", size_bytes: 10 }]],
+ ["/summaries/b", [{ path: "/summaries/b/file2.md", size_bytes: 20 }]],
+ ]));
+ const findVirtualPathsFn = vi.fn(async () => ["/summaries/a/file1.md", "/summaries/a/file2.md"]);
+ const handleGrepDirectFn = vi.fn(async () => "/summaries/a/file1.md:needle\n/summaries/a/file2.md:needle");
+
+ const output = await executeCompiledBashCommand(
+ { query: vi.fn() } as any,
+ "memory",
+ "sessions",
+ "cat /{a,b}.md | head -3 && echo --- && ls -la /summaries/{a,b} && find /summaries/a -name '*.md' | wc -l && grep needle /summaries/a | head -1",
+ {
+ readVirtualPathContentsFn: readVirtualPathContentsFn as any,
+ listVirtualPathRowsForDirsFn: listVirtualPathRowsForDirsFn as any,
+ findVirtualPathsFn: findVirtualPathsFn as any,
+ handleGrepDirectFn: handleGrepDirectFn as any,
+ },
+ );
+
+ expect(readVirtualPathContentsFn).toHaveBeenCalledWith(expect.anything(), "memory", "sessions", ["/a.md", "/b.md"]);
+ expect(listVirtualPathRowsForDirsFn).toHaveBeenCalledWith(expect.anything(), "memory", "sessions", ["/summaries/a", "/summaries/b"]);
+ expect(handleGrepDirectFn).toHaveBeenCalledTimes(1);
+ expect(output).toContain("line1\nline2\nline3");
+ expect(output).toContain("---");
+ expect(output).toContain("drwxr-xr-x");
+ expect(output).toContain("group/");
+ expect(output).toContain("2");
+ expect(output).toContain("/summaries/a/file1.md:needle");
+ });
+
+ it("returns null when a required path is missing", async () => {
+ const output = await executeCompiledBashCommand(
+ { query: vi.fn() } as any,
+ "memory",
+ "sessions",
+ "cat /missing.md",
+ {
+ readVirtualPathContentsFn: vi.fn(async () => new Map([["/missing.md", null]])) as any,
+ },
+ );
+ expect(output).toBeNull();
+ });
+
+ it("ignores missing files when stderr is redirected to /dev/null", async () => {
+ const output = await executeCompiledBashCommand(
+ { query: vi.fn() } as any,
+ "memory",
+ "sessions",
+ "cat /missing.md 2>/dev/null",
+ {
+ readVirtualPathContentsFn: vi.fn(async () => new Map([["/missing.md", null]])) as any,
+ },
+ );
+ expect(output).toBe("");
+ });
+
+ it("ignores only the missing cat inputs and keeps present content", async () => {
+ const output = await executeCompiledBashCommand(
+ { query: vi.fn() } as any,
+ "memory",
+ "sessions",
+ "cat /missing.md /present.md 2>/dev/null",
+ {
+ readVirtualPathContentsFn: vi.fn(async () => new Map([
+ ["/missing.md", null],
+ ["/present.md", "ok"],
+ ])) as any,
+ },
+ );
+ expect(output).toBe("ok");
+ });
+
+ it("renders missing directories and supports line-counting", async () => {
+ const output = await executeCompiledBashCommand(
+ { query: vi.fn() } as any,
+ "memory",
+ "sessions",
+ "wc -l /a.md && ls /missing",
+ {
+ readVirtualPathContentsFn: vi.fn(async () => new Map([["/a.md", "x\ny\nz"]])) as any,
+ listVirtualPathRowsForDirsFn: vi.fn(async () => new Map([["/missing", []]])) as any,
+ },
+ );
+ expect(output).toContain("3 /a.md");
+ expect(output).toContain("No such file or directory");
+ });
+
+ it("renders short ls output, no-match find output, and raw grep output", async () => {
+ const output = await executeCompiledBashCommand(
+ { query: vi.fn() } as any,
+ "memory",
+ "sessions",
+ "ls /summaries/a && find /summaries/a -name '*.txt' && grep needle /summaries/a",
+ {
+ listVirtualPathRowsForDirsFn: vi.fn(async () => new Map([
+ ["/summaries/a", [{ path: "/summaries/a/file1.md", size_bytes: 10 }]],
+ ])) as any,
+ findVirtualPathsFn: vi.fn(async () => []) as any,
+ handleGrepDirectFn: vi.fn(async () => "/summaries/a/file1.md:needle") as any,
+ },
+ );
+
+ expect(output).toContain("file1.md");
+ expect(output).toContain("(no matches)");
+ expect(output).toContain("/summaries/a/file1.md:needle");
+ });
+
+ it("returns joined find results, line-limited grep, and no-match compiled find+grep output", async () => {
+ const joinedFind = await executeCompiledBashCommand(
+ { query: vi.fn() } as any,
+ "memory",
+ "sessions",
+ "find /summaries/a -name '*.md'",
+ {
+ findVirtualPathsFn: vi.fn(async () => ["/summaries/a/file1.md", "/summaries/a/file2.md"]) as any,
+ },
+ );
+ expect(joinedFind).toBe("/summaries/a/file1.md\n/summaries/a/file2.md");
+
+ const grepLimited = await executeCompiledBashCommand(
+ { query: vi.fn() } as any,
+ "memory",
+ "sessions",
+ "grep needle /summaries/a | head -1",
+ {
+ handleGrepDirectFn: vi.fn(async () => "/summaries/a/file1.md:needle\n/summaries/a/file2.md:needle") as any,
+ },
+ );
+ expect(grepLimited).toBe("/summaries/a/file1.md:needle");
+
+ const noMatchFindGrep = await executeCompiledBashCommand(
+ { query: vi.fn() } as any,
+ "memory",
+ "sessions",
+ "find /summaries -name '*.md' | xargs grep -l launch",
+ {
+ findVirtualPathsFn: vi.fn(async () => []) as any,
+ },
+ );
+ expect(noMatchFindGrep).toBe("(no matches)");
+ });
+
+ it("returns null when a compiled grep returns null", async () => {
+ const output = await executeCompiledBashCommand(
+ { query: vi.fn() } as any,
+ "memory",
+ "sessions",
+ "grep needle /summaries/a",
+ {
+ handleGrepDirectFn: vi.fn(async () => null) as any,
+ },
+ );
+ expect(output).toBeNull();
+ });
+
+ it("compiles find | xargs grep -l | head into batched path reads", async () => {
+ const findVirtualPathsFn = vi.fn()
+ .mockResolvedValueOnce(["/summaries/a.md", "/summaries/shared.json"])
+ .mockResolvedValueOnce(["/summaries/b.json", "/summaries/shared.json"]);
+ const readVirtualPathContentsFn = vi.fn(async () => new Map([
+ ["/summaries/a.md", "launch timeline and notes"],
+ ["/summaries/shared.json", "{\"turns\":[{\"speaker\":\"Alice\",\"text\":\"launch update\"}]}"],
+ ["/summaries/b.json", "No match here"],
+ ]));
+
+ const output = await executeCompiledBashCommand(
+ { query: vi.fn() } as any,
+ "memory",
+ "sessions",
+ "find /summaries -type f -name '*.md' -o -name '*.json' | xargs grep -l 'launch' | head -1",
+ {
+ findVirtualPathsFn: findVirtualPathsFn as any,
+ readVirtualPathContentsFn: readVirtualPathContentsFn as any,
+ },
+ );
+
+ expect(findVirtualPathsFn).toHaveBeenCalledTimes(2);
+ expect(readVirtualPathContentsFn).toHaveBeenCalledWith(
+ expect.anything(),
+ "memory",
+ "sessions",
+ ["/summaries/a.md", "/summaries/shared.json", "/summaries/b.json"],
+ );
+ expect(output).toBe("/summaries/a.md");
+ });
+});
diff --git a/claude-code/tests/capture-hook.test.ts b/claude-code/tests/capture-hook.test.ts
deleted file mode 100644
index c40e8e6..0000000
--- a/claude-code/tests/capture-hook.test.ts
+++ /dev/null
@@ -1,313 +0,0 @@
-import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
-
-/**
- * Direct source-level tests for src/hooks/capture.ts. The module runs
- * main() at import time; each scenario resets the registry and imports
- * fresh. Mocks: readStdin, loadConfig, DeeplakeApi, spawn-wiki-worker,
- * summary-state. Everything else (SQL assembly, entry shape, meta
- * merging, JSON escaping) runs for real.
- *
- * Coverage target: each event-type branch (prompt / tool / assistant /
- * unknown), the CAPTURE guard, the table-missing retry, the unrelated
- * error re-throw, and every leg of the periodic-trigger helper
- * (threshold not met / met + lock free / met + lock held / spawn
- * throws / outer catch).
- */
-
-const stdinMock = vi.fn();
-const loadConfigMock = vi.fn();
-const spawnMock = vi.fn();
-const wikiLogMock = vi.fn();
-const tryAcquireLockMock = vi.fn();
-const releaseLockMock = vi.fn();
-const bumpTotalCountMock = vi.fn();
-const loadTriggerConfigMock = vi.fn();
-const shouldTriggerMock = vi.fn();
-const debugLogMock = vi.fn();
-const queryMock = vi.fn();
-const ensureSessionsTableMock = vi.fn();
-
-vi.mock("../../src/utils/stdin.js", () => ({ readStdin: (...a: any[]) => stdinMock(...a) }));
-vi.mock("../../src/config.js", () => ({ loadConfig: (...a: any[]) => loadConfigMock(...a) }));
-vi.mock("../../src/hooks/spawn-wiki-worker.js", () => ({
- spawnWikiWorker: (...a: any[]) => spawnMock(...a),
- wikiLog: (...a: any[]) => wikiLogMock(...a),
- bundleDirFromImportMeta: () => "/fake/bundle",
-}));
-vi.mock("../../src/hooks/summary-state.js", () => ({
- tryAcquireLock: (...a: any[]) => tryAcquireLockMock(...a),
- releaseLock: (...a: any[]) => releaseLockMock(...a),
- bumpTotalCount: (...a: any[]) => bumpTotalCountMock(...a),
- loadTriggerConfig: (...a: any[]) => loadTriggerConfigMock(...a),
- shouldTrigger: (...a: any[]) => shouldTriggerMock(...a),
-}));
-vi.mock("../../src/utils/debug.js", () => ({
- log: (_tag: string, msg: string) => debugLogMock(msg),
-}));
-vi.mock("../../src/deeplake-api.js", () => ({
- DeeplakeApi: class {
- query(sql: string) { return queryMock(sql); }
- ensureSessionsTable(t: string) { return ensureSessionsTableMock(t); }
- },
-}));
-
-async function runHook(env: Record = {}): Promise {
- delete process.env.HIVEMIND_WIKI_WORKER;
- delete process.env.HIVEMIND_CAPTURE;
- for (const [k, v] of Object.entries(env)) {
- if (v === undefined) delete process.env[k];
- else process.env[k] = v;
- }
- vi.resetModules();
- await import("../../src/hooks/capture.js");
- await new Promise(r => setImmediate(r));
- await new Promise(r => setImmediate(r));
-}
-
-const validConfig = {
- token: "t", orgId: "o", orgName: "acme", workspaceId: "default",
- userName: "alice", apiUrl: "http://example", tableName: "memory",
- sessionsTableName: "sessions",
-};
-
-beforeEach(() => {
- stdinMock.mockReset().mockResolvedValue({
- session_id: "sid-1",
- cwd: "/workspaces/proj",
- hook_event_name: "UserPromptSubmit",
- prompt: "hello",
- });
- loadConfigMock.mockReset().mockReturnValue(validConfig);
- spawnMock.mockReset();
- wikiLogMock.mockReset();
- tryAcquireLockMock.mockReset().mockReturnValue(true);
- releaseLockMock.mockReset();
- bumpTotalCountMock.mockReset().mockReturnValue({
- lastSummaryAt: Date.now(), lastSummaryCount: 0, totalCount: 1,
- });
- loadTriggerConfigMock.mockReset().mockReturnValue({ everyNMessages: 50, everyHours: 2 });
- shouldTriggerMock.mockReset().mockReturnValue(false);
- debugLogMock.mockReset();
- queryMock.mockReset().mockResolvedValue([]);
- ensureSessionsTableMock.mockReset().mockResolvedValue(undefined);
-});
-
-afterEach(() => { vi.restoreAllMocks(); });
-
-describe("capture hook — guard", () => {
- it("returns without touching stdin when HIVEMIND_CAPTURE=false", async () => {
- await runHook({ HIVEMIND_CAPTURE: "false" });
- expect(stdinMock).not.toHaveBeenCalled();
- expect(queryMock).not.toHaveBeenCalled();
- });
-
- it("returns when loadConfig returns null", async () => {
- loadConfigMock.mockReturnValue(null);
- await runHook();
- expect(debugLogMock).toHaveBeenCalledWith("no config");
- expect(queryMock).not.toHaveBeenCalled();
- });
-});
-
-describe("capture hook — event-type branches", () => {
- it("user_message: INSERT contains prompt content", async () => {
- await runHook();
- expect(queryMock).toHaveBeenCalledTimes(1);
- const sql = queryMock.mock.calls[0][0] as string;
- expect(sql).toMatch(/INSERT INTO "sessions"/);
- expect(sql).toContain('"type":"user_message"');
- expect(sql).toContain('"content":"hello"');
- expect(debugLogMock).toHaveBeenCalledWith(expect.stringMatching(/^user session=sid-1$/));
- });
-
- it("tool_call: INSERT contains tool_name + serialized input/response", async () => {
- stdinMock.mockResolvedValue({
- session_id: "sid-2",
- cwd: "/p",
- hook_event_name: "PostToolUse",
- tool_name: "Bash",
- tool_use_id: "tu-1",
- tool_input: { command: "ls" },
- tool_response: { stdout: "file" },
- });
- await runHook();
- const sql = queryMock.mock.calls[0][0] as string;
- expect(sql).toContain('"type":"tool_call"');
- expect(sql).toContain('"tool_name":"Bash"');
- expect(sql).toContain('tool_input');
- expect(sql).toContain('tool_response');
- expect(debugLogMock).toHaveBeenCalledWith(expect.stringMatching(/^tool=Bash session=sid-2$/));
- });
-
- it("assistant_message without agent_transcript_path", async () => {
- stdinMock.mockResolvedValue({
- session_id: "sid-3",
- cwd: "/p",
- hook_event_name: "Stop",
- last_assistant_message: "reply text",
- });
- await runHook();
- const sql = queryMock.mock.calls[0][0] as string;
- expect(sql).toContain('"type":"assistant_message"');
- expect(sql).toContain('"content":"reply text"');
- expect(sql).not.toContain("agent_transcript_path");
- });
-
- it("assistant_message WITH agent_transcript_path", async () => {
- stdinMock.mockResolvedValue({
- session_id: "sid-4",
- cwd: "/p",
- hook_event_name: "SubagentStop",
- last_assistant_message: "sub reply",
- agent_transcript_path: "/tmp/agent.jsonl",
- });
- await runHook();
- const sql = queryMock.mock.calls[0][0] as string;
- expect(sql).toContain('"agent_transcript_path":"/tmp/agent.jsonl"');
- });
-
- it("unknown event: skipped, no INSERT", async () => {
- stdinMock.mockResolvedValue({
- session_id: "sid-x", cwd: "/p", hook_event_name: "WeirdHook",
- // no prompt, no tool_name, no last_assistant_message
- });
- await runHook();
- expect(queryMock).not.toHaveBeenCalled();
- expect(debugLogMock).toHaveBeenCalledWith("unknown event, skipping");
- });
-});
-
-describe("capture hook — INSERT fallback + error paths", () => {
- it("creates the sessions table and retries when table is missing", async () => {
- queryMock
- .mockRejectedValueOnce(new Error('relation "sessions" does not exist'))
- .mockResolvedValueOnce([]);
- await runHook();
- expect(ensureSessionsTableMock).toHaveBeenCalledWith("sessions");
- expect(queryMock).toHaveBeenCalledTimes(2);
- expect(debugLogMock).toHaveBeenCalledWith("table missing, creating and retrying");
- });
-
- it("creates the sessions table when the API returns 'permission denied'", async () => {
- queryMock
- .mockRejectedValueOnce(new Error("permission denied for relation sessions"))
- .mockResolvedValueOnce([]);
- await runHook();
- expect(ensureSessionsTableMock).toHaveBeenCalled();
- expect(queryMock).toHaveBeenCalledTimes(2);
- });
-
- it("re-throws unrelated errors (caught by main().catch)", async () => {
- const exitSpy = vi.spyOn(process, "exit").mockImplementation(() => undefined as never);
- queryMock.mockRejectedValue(new Error("random SQL boom"));
- await runHook();
- // The outer catch wraps the throw into the fatal log and exits.
- expect(debugLogMock).toHaveBeenCalledWith("fatal: random SQL boom");
- expect(exitSpy).toHaveBeenCalledWith(0);
- });
-});
-
-describe("capture hook — periodic trigger helper", () => {
- it("does nothing when HIVEMIND_WIKI_WORKER=1 (nested worker)", async () => {
- await runHook({ HIVEMIND_WIKI_WORKER: "1" });
- // The inner call is bypassed — but CAPTURE is also computed at load,
- // so with WIKI_WORKER=1 the capture itself still runs (CAPTURE default
- // is true). We just assert bumpTotalCount was NOT called.
- expect(bumpTotalCountMock).not.toHaveBeenCalled();
- });
-
- it("does not spawn when shouldTrigger returns false", async () => {
- shouldTriggerMock.mockReturnValue(false);
- await runHook();
- expect(bumpTotalCountMock).toHaveBeenCalledTimes(1);
- expect(tryAcquireLockMock).not.toHaveBeenCalled();
- expect(spawnMock).not.toHaveBeenCalled();
- });
-
- it("spawns the wiki worker when shouldTrigger=true and lock acquired", async () => {
- shouldTriggerMock.mockReturnValue(true);
- bumpTotalCountMock.mockReturnValue({
- lastSummaryAt: 0, lastSummaryCount: 0, totalCount: 10,
- });
- await runHook();
- expect(tryAcquireLockMock).toHaveBeenCalledWith("sid-1");
- expect(wikiLogMock).toHaveBeenCalledWith(
- expect.stringMatching(/^Periodic: threshold hit \(total=10,/),
- );
- expect(spawnMock).toHaveBeenCalledTimes(1);
- expect(spawnMock.mock.calls[0][0]).toMatchObject({ sessionId: "sid-1", reason: "Periodic" });
- });
-
- it("logs 'periodic trigger suppressed' when the lock is already held", async () => {
- shouldTriggerMock.mockReturnValue(true);
- tryAcquireLockMock.mockReturnValue(false);
- await runHook();
- expect(spawnMock).not.toHaveBeenCalled();
- expect(debugLogMock).toHaveBeenCalledWith(
- expect.stringContaining("periodic trigger suppressed (lock held)"),
- );
- });
-
- it("releases the lock if spawnWikiWorker throws", async () => {
- shouldTriggerMock.mockReturnValue(true);
- spawnMock.mockImplementation(() => { throw new Error("spawn failed"); });
- await runHook();
- expect(releaseLockMock).toHaveBeenCalledWith("sid-1");
- expect(debugLogMock).toHaveBeenCalledWith(
- expect.stringContaining("periodic trigger error: spawn failed"),
- );
- });
-
- it("still swallows the error when releaseLock ALSO throws", async () => {
- shouldTriggerMock.mockReturnValue(true);
- spawnMock.mockImplementation(() => { throw new Error("spawn failed"); });
- releaseLockMock.mockImplementation(() => { throw new Error("release failed"); });
- await runHook();
- // We should still see the outer periodic-trigger error log — the
- // release throw is deliberately swallowed.
- expect(debugLogMock).toHaveBeenCalledWith(
- expect.stringContaining("periodic trigger error: spawn failed"),
- );
- });
-
- it("catches errors thrown by bumpTotalCount itself (outer try)", async () => {
- bumpTotalCountMock.mockImplementation(() => { throw new Error("bump boom"); });
- await runHook();
- expect(debugLogMock).toHaveBeenCalledWith(
- expect.stringContaining("periodic trigger error: bump boom"),
- );
- });
-});
-
-describe("capture hook — defensive fallback branches", () => {
- it("falls back to 'default' workspace when config.workspaceId is undefined", async () => {
- loadConfigMock.mockReturnValue({ ...validConfig, workspaceId: undefined });
- await runHook();
- const sql = queryMock.mock.calls[0][0] as string;
- // sessionPath uses workspace; with undefined it should land on 'default'
- expect(sql).toContain("alice_acme_default_sid-1.jsonl");
- });
-
- it("projectName falls back to 'unknown' when cwd is undefined", async () => {
- stdinMock.mockResolvedValue({
- session_id: "sid-cwd", hook_event_name: "UserPromptSubmit", prompt: "x",
- });
- await runHook();
- const sql = queryMock.mock.calls[0][0] as string;
- expect(sql).toContain("'unknown'");
- });
-
- it("hook_event_name defaults to empty string when missing", async () => {
- stdinMock.mockResolvedValue({
- session_id: "sid-no-evt", cwd: "/p", prompt: "hi",
- // no hook_event_name
- });
- await runHook();
- const sql = queryMock.mock.calls[0][0] as string;
- // description column (hook_event_name ?? '') should land as ''
- // It appears between the projectName and the author — we just
- // assert the INSERT still went through.
- expect(queryMock).toHaveBeenCalledTimes(1);
- expect(sql).toMatch(/'[^']*', 'claude_code'/);
- });
-});
diff --git a/claude-code/tests/codex-capture-hook.test.ts b/claude-code/tests/codex-capture-hook.test.ts
deleted file mode 100644
index 9992182..0000000
--- a/claude-code/tests/codex-capture-hook.test.ts
+++ /dev/null
@@ -1,284 +0,0 @@
-import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
-
-/**
- * Direct source-level tests for src/hooks/codex/capture.ts. Mirrors the
- * claude-code capture-hook test: mocks the stdin / config / API /
- * summary-state seams and asserts SQL shape, branch coverage for
- * UserPromptSubmit / PostToolUse / unknown, and the periodic trigger
- * helper.
- */
-
-const stdinMock = vi.fn();
-const loadConfigMock = vi.fn();
-const spawnMock = vi.fn();
-const wikiLogMock = vi.fn();
-const tryAcquireLockMock = vi.fn();
-const releaseLockMock = vi.fn();
-const bumpTotalCountMock = vi.fn();
-const loadTriggerConfigMock = vi.fn();
-const shouldTriggerMock = vi.fn();
-const debugLogMock = vi.fn();
-const queryMock = vi.fn();
-const ensureSessionsTableMock = vi.fn();
-
-vi.mock("../../src/utils/stdin.js", () => ({ readStdin: (...a: any[]) => stdinMock(...a) }));
-vi.mock("../../src/config.js", () => ({ loadConfig: (...a: any[]) => loadConfigMock(...a) }));
-vi.mock("../../src/hooks/codex/spawn-wiki-worker.js", () => ({
- spawnCodexWikiWorker: (...a: any[]) => spawnMock(...a),
- wikiLog: (...a: any[]) => wikiLogMock(...a),
- bundleDirFromImportMeta: () => "/fake/codex/bundle",
-}));
-vi.mock("../../src/hooks/summary-state.js", () => ({
- tryAcquireLock: (...a: any[]) => tryAcquireLockMock(...a),
- releaseLock: (...a: any[]) => releaseLockMock(...a),
- bumpTotalCount: (...a: any[]) => bumpTotalCountMock(...a),
- loadTriggerConfig: (...a: any[]) => loadTriggerConfigMock(...a),
- shouldTrigger: (...a: any[]) => shouldTriggerMock(...a),
-}));
-vi.mock("../../src/utils/debug.js", () => ({
- log: (_tag: string, msg: string) => debugLogMock(msg),
-}));
-vi.mock("../../src/deeplake-api.js", () => ({
- DeeplakeApi: class {
- query(sql: string) { return queryMock(sql); }
- ensureSessionsTable(t: string) { return ensureSessionsTableMock(t); }
- },
-}));
-
-async function runHook(env: Record = {}): Promise {
- delete process.env.HIVEMIND_WIKI_WORKER;
- delete process.env.HIVEMIND_CAPTURE;
- for (const [k, v] of Object.entries(env)) {
- if (v === undefined) delete process.env[k];
- else process.env[k] = v;
- }
- vi.resetModules();
- await import("../../src/hooks/codex/capture.js");
- await new Promise(r => setImmediate(r));
- await new Promise(r => setImmediate(r));
-}
-
-const validConfig = {
- token: "t", orgId: "o", orgName: "acme", workspaceId: "default",
- userName: "alice", apiUrl: "http://example", tableName: "memory",
- sessionsTableName: "sessions",
-};
-
-beforeEach(() => {
- stdinMock.mockReset().mockResolvedValue({
- session_id: "sid-1",
- cwd: "/workspaces/proj",
- hook_event_name: "UserPromptSubmit",
- model: "gpt-5",
- prompt: "hello",
- });
- loadConfigMock.mockReset().mockReturnValue(validConfig);
- spawnMock.mockReset();
- wikiLogMock.mockReset();
- tryAcquireLockMock.mockReset().mockReturnValue(true);
- releaseLockMock.mockReset();
- bumpTotalCountMock.mockReset().mockReturnValue({
- lastSummaryAt: 0, lastSummaryCount: 0, totalCount: 1,
- });
- loadTriggerConfigMock.mockReset().mockReturnValue({ everyNMessages: 50, everyHours: 2 });
- shouldTriggerMock.mockReset().mockReturnValue(false);
- debugLogMock.mockReset();
- queryMock.mockReset().mockResolvedValue([]);
- ensureSessionsTableMock.mockReset().mockResolvedValue(undefined);
-});
-
-afterEach(() => { vi.restoreAllMocks(); });
-
-describe("codex capture hook — guards", () => {
- it("returns when HIVEMIND_CAPTURE=false", async () => {
- await runHook({ HIVEMIND_CAPTURE: "false" });
- expect(stdinMock).not.toHaveBeenCalled();
- });
-
- it("returns when loadConfig is null", async () => {
- loadConfigMock.mockReturnValue(null);
- await runHook();
- expect(debugLogMock).toHaveBeenCalledWith("no config");
- expect(queryMock).not.toHaveBeenCalled();
- });
-});
-
-describe("codex capture hook — event-type branches", () => {
- it("user_message: INSERT contains prompt", async () => {
- await runHook();
- const sql = queryMock.mock.calls[0][0] as string;
- expect(sql).toMatch(/INSERT INTO "sessions"/);
- expect(sql).toContain('"type":"user_message"');
- expect(sql).toContain('"content":"hello"');
- expect(sql).toContain("'codex'");
- });
-
- it("tool_call: INSERT contains tool_name and model metadata", async () => {
- stdinMock.mockResolvedValue({
- session_id: "sid-2", cwd: "/p",
- hook_event_name: "PostToolUse",
- model: "gpt-5",
- tool_name: "Bash",
- tool_use_id: "tu-1",
- tool_input: { command: "ls" },
- tool_response: { stdout: "x" },
- });
- await runHook();
- const sql = queryMock.mock.calls[0][0] as string;
- expect(sql).toContain('"type":"tool_call"');
- expect(sql).toContain('"tool_name":"Bash"');
- expect(sql).toContain('"model":"gpt-5"');
- });
-
- it("unknown hook_event_name → log and skip", async () => {
- stdinMock.mockResolvedValue({
- session_id: "sid-x", cwd: "/p", hook_event_name: "SomethingElse", model: "m",
- });
- await runHook();
- expect(queryMock).not.toHaveBeenCalled();
- expect(debugLogMock).toHaveBeenCalledWith("unknown event: SomethingElse, skipping");
- });
-
- it("UserPromptSubmit without prompt → skipped (defensive)", async () => {
- stdinMock.mockResolvedValue({
- session_id: "sid-y", cwd: "/p", hook_event_name: "UserPromptSubmit", model: "m",
- });
- await runHook();
- expect(queryMock).not.toHaveBeenCalled();
- });
-
- it("PostToolUse without tool_name → skipped (defensive)", async () => {
- stdinMock.mockResolvedValue({
- session_id: "sid-z", cwd: "/p", hook_event_name: "PostToolUse", model: "m",
- });
- await runHook();
- expect(queryMock).not.toHaveBeenCalled();
- });
-});
-
-describe("codex capture hook — INSERT fallbacks", () => {
- it("retries after creating the sessions table on 'does not exist'", async () => {
- queryMock
- .mockRejectedValueOnce(new Error('relation "sessions" does not exist'))
- .mockResolvedValueOnce([]);
- await runHook();
- expect(ensureSessionsTableMock).toHaveBeenCalledWith("sessions");
- expect(queryMock).toHaveBeenCalledTimes(2);
- });
-
- it("retries on 'permission denied' too", async () => {
- queryMock
- .mockRejectedValueOnce(new Error("permission denied"))
- .mockResolvedValueOnce([]);
- await runHook();
- expect(ensureSessionsTableMock).toHaveBeenCalled();
- });
-
- it("re-throws an unrelated SQL error", async () => {
- const exitSpy = vi.spyOn(process, "exit").mockImplementation(() => undefined as never);
- queryMock.mockRejectedValue(new Error("syntax error"));
- await runHook();
- expect(debugLogMock).toHaveBeenCalledWith("fatal: syntax error");
- expect(exitSpy).toHaveBeenCalledWith(0);
- });
-});
-
-describe("codex capture hook — periodic trigger", () => {
- it("bypasses the trigger when HIVEMIND_WIKI_WORKER=1", async () => {
- await runHook({ HIVEMIND_WIKI_WORKER: "1" });
- expect(bumpTotalCountMock).not.toHaveBeenCalled();
- });
-
- it("no spawn when shouldTrigger=false", async () => {
- shouldTriggerMock.mockReturnValue(false);
- await runHook();
- expect(spawnMock).not.toHaveBeenCalled();
- });
-
- it("spawns when shouldTrigger=true + lock free", async () => {
- shouldTriggerMock.mockReturnValue(true);
- bumpTotalCountMock.mockReturnValue({
- lastSummaryAt: 0, lastSummaryCount: 0, totalCount: 10,
- });
- await runHook();
- expect(spawnMock).toHaveBeenCalledTimes(1);
- expect(spawnMock.mock.calls[0][0]).toMatchObject({ sessionId: "sid-1", reason: "Periodic" });
- });
-
- it("suppresses when lock held", async () => {
- shouldTriggerMock.mockReturnValue(true);
- tryAcquireLockMock.mockReturnValue(false);
- await runHook();
- expect(spawnMock).not.toHaveBeenCalled();
- expect(debugLogMock).toHaveBeenCalledWith(
- expect.stringContaining("periodic trigger suppressed (lock held)"),
- );
- });
-
- it("releases the lock when spawn throws", async () => {
- shouldTriggerMock.mockReturnValue(true);
- spawnMock.mockImplementation(() => { throw new Error("spawn boom"); });
- await runHook();
- expect(releaseLockMock).toHaveBeenCalledWith("sid-1");
- expect(debugLogMock).toHaveBeenCalledWith(
- expect.stringContaining("periodic trigger error: spawn boom"),
- );
- });
-
- it("swallows release failure on top of spawn failure", async () => {
- shouldTriggerMock.mockReturnValue(true);
- spawnMock.mockImplementation(() => { throw new Error("spawn boom"); });
- releaseLockMock.mockImplementation(() => { throw new Error("release boom"); });
- await runHook();
- expect(debugLogMock).toHaveBeenCalledWith(
- expect.stringContaining("periodic trigger error: spawn boom"),
- );
- });
-
- it("outer try catches bumpTotalCount throw", async () => {
- bumpTotalCountMock.mockImplementation(() => { throw new Error("bump boom"); });
- await runHook();
- expect(debugLogMock).toHaveBeenCalledWith(
- expect.stringContaining("periodic trigger error: bump boom"),
- );
- });
-});
-
-describe("codex capture hook — defensive fallbacks", () => {
- it("falls back projectName='unknown' when cwd is '' ", async () => {
- stdinMock.mockResolvedValue({
- session_id: "sid-c", cwd: "", hook_event_name: "UserPromptSubmit", model: "m", prompt: "x",
- });
- await runHook();
- const sql = queryMock.mock.calls[0][0] as string;
- expect(sql).toContain("'unknown'");
- });
-
- it("falls back projectName='unknown' when cwd is undefined at runtime", async () => {
- // The interface types cwd as string, but runtime values can arrive
- // undefined from untyped hook inputs. The ?? fallbacks exist for this.
- stdinMock.mockResolvedValue({
- session_id: "sid-d", hook_event_name: "UserPromptSubmit", model: "m", prompt: "x",
- });
- await runHook();
- const sql = queryMock.mock.calls[0][0] as string;
- expect(sql).toContain("'unknown'");
- });
-
- it("passes empty hook_event_name through the description column fallback", async () => {
- // `input.hook_event_name ?? ''` — construct an input where the field
- // is legitimately missing to exercise the nullish coalesce.
- stdinMock.mockResolvedValue({
- session_id: "sid-e", cwd: "/p", model: "m",
- });
- await runHook();
- // UserPromptSubmit / PostToolUse are the only types the codex
- // capture handles, so this falls into "unknown event, skipping".
- // That's fine — the branch we want is the `?? ''` in the INSERT
- // string which runs later; to reach it we supply a prompt and
- // leave hook_event_name undefined. Codex capture gates on
- // hook_event_name === 'UserPromptSubmit', so undefined won't match
- // and the INSERT is skipped. That is itself a useful branch.
- expect(queryMock).not.toHaveBeenCalled();
- });
-});
diff --git a/claude-code/tests/codex-session-start-hook.test.ts b/claude-code/tests/codex-session-start-hook.test.ts
deleted file mode 100644
index 5f47909..0000000
--- a/claude-code/tests/codex-session-start-hook.test.ts
+++ /dev/null
@@ -1,175 +0,0 @@
-import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
-import { EventEmitter } from "node:events";
-
-/**
- * Source-level tests for src/hooks/codex/session-start.ts. Codex has
- * no async-hook mechanism, so this fast-path hook synchronously reads
- * creds, emits context on stdout, and SPAWNS a detached node process
- * running session-start-setup.js for the heavy work.
- *
- * Mocks: readStdin, loadCredentials, and child_process.spawn. The
- * spawn mock returns a fake child with a writable stdin and an
- * unref() method so the hook body can drive it end-to-end without
- * actually forking a process.
- */
-
-const stdinMock = vi.fn();
-const loadCredsMock = vi.fn();
-const debugLogMock = vi.fn();
-const spawnMock = vi.fn();
-
-vi.mock("../../src/utils/stdin.js", () => ({ readStdin: (...a: any[]) => stdinMock(...a) }));
-vi.mock("../../src/commands/auth.js", () => ({
- loadCredentials: (...a: any[]) => loadCredsMock(...a),
-}));
-vi.mock("../../src/utils/debug.js", () => ({
- log: (_t: string, msg: string) => debugLogMock(msg),
-}));
-vi.mock("node:child_process", async () => {
- const actual = await vi.importActual("node:child_process");
- return { ...actual, spawn: (...a: any[]) => spawnMock(...a) };
-});
-
-function makeFakeChild() {
- const stdin = new EventEmitter() as any;
- stdin.write = vi.fn();
- stdin.end = vi.fn();
- return {
- stdin,
- unref: vi.fn(),
- };
-}
-
-async function runHook(env: Record = {}): Promise {
- delete process.env.HIVEMIND_WIKI_WORKER;
- for (const [k, v] of Object.entries(env)) {
- if (v === undefined) delete process.env[k];
- else process.env[k] = v;
- }
- vi.resetModules();
- const collected: string[] = [];
- const originalLog = console.log;
- console.log = (...args: any[]) => { collected.push(args.join(" ")); };
- try {
- await import("../../src/hooks/codex/session-start.js");
- await new Promise(r => setImmediate(r));
- return collected.join("\n") || null;
- } finally {
- console.log = originalLog;
- }
-}
-
-beforeEach(() => {
- stdinMock.mockReset().mockResolvedValue({
- session_id: "sid-1", cwd: "/x", hook_event_name: "SessionStart", model: "gpt-5",
- });
- loadCredsMock.mockReset().mockReturnValue({
- token: "tok", orgId: "org-id", orgName: "acme", userName: "alice", workspaceId: "default",
- });
- debugLogMock.mockReset();
- spawnMock.mockReset().mockImplementation(() => makeFakeChild());
-});
-
-afterEach(() => { vi.restoreAllMocks(); });
-
-describe("codex session-start hook — guards", () => {
- it("returns immediately when HIVEMIND_WIKI_WORKER=1 (nested worker)", async () => {
- const out = await runHook({ HIVEMIND_WIKI_WORKER: "1" });
- expect(stdinMock).not.toHaveBeenCalled();
- expect(spawnMock).not.toHaveBeenCalled();
- expect(out).toBeNull();
- });
-
- it("emits not-logged-in context when creds are missing (no token)", async () => {
- loadCredsMock.mockReturnValue(null);
- const out = await runHook();
- expect(spawnMock).not.toHaveBeenCalled();
- expect(out).toContain("Not logged in to Deeplake");
- expect(debugLogMock).toHaveBeenCalledWith(
- expect.stringContaining("no credentials found"),
- );
- });
-
- it("logs org name when creds are present", async () => {
- const out = await runHook();
- expect(debugLogMock).toHaveBeenCalledWith(
- expect.stringContaining("credentials loaded: org=acme"),
- );
- expect(out).toContain("Logged in to Deeplake as org: acme");
- expect(out).toContain("workspace: default");
- });
-
- it("falls back to orgId when orgName is missing", async () => {
- loadCredsMock.mockReturnValue({
- token: "tok", orgId: "org-uuid-123", userName: "alice", workspaceId: "staging",
- });
- const out = await runHook();
- expect(debugLogMock).toHaveBeenCalledWith(
- expect.stringContaining("credentials loaded: org=org-uuid-123"),
- );
- expect(out).toContain("Logged in to Deeplake as org: org-uuid-123");
- expect(out).toContain("workspace: staging");
- });
-
- it("defaults workspace to 'default' when creds omit workspaceId", async () => {
- loadCredsMock.mockReturnValue({
- token: "tok", orgId: "o", orgName: "acme", userName: "alice",
- });
- const out = await runHook();
- expect(out).toContain("workspace: default");
- });
-});
-
-describe("codex session-start hook — spawn async setup", () => {
- it("spawns session-start-setup.js and feeds the same stdin input", async () => {
- const fake = makeFakeChild();
- spawnMock.mockReturnValue(fake);
- await runHook();
- expect(spawnMock).toHaveBeenCalledTimes(1);
- const [cmd, args, opts] = spawnMock.mock.calls[0];
- expect(cmd).toBe("node");
- expect(args[0]).toContain("session-start-setup.js");
- expect(opts.detached).toBe(true);
- expect(fake.stdin.write).toHaveBeenCalledWith(expect.stringContaining("sid-1"));
- expect(fake.stdin.end).toHaveBeenCalled();
- expect(fake.unref).toHaveBeenCalled();
- expect(debugLogMock).toHaveBeenCalledWith("spawned async setup process");
- });
-
- it("does not spawn when creds are missing", async () => {
- loadCredsMock.mockReturnValue({ token: "" });
- await runHook();
- expect(spawnMock).not.toHaveBeenCalled();
- });
-});
-
-describe("codex session-start hook — fatal catch", () => {
- it("catches a stdin throw and exits 0", async () => {
- stdinMock.mockRejectedValue(new Error("stdin boom"));
- const exitSpy = vi.spyOn(process, "exit").mockImplementation(() => undefined as never);
- await runHook();
- await new Promise(r => setImmediate(r));
- expect(debugLogMock).toHaveBeenCalledWith("fatal: stdin boom");
- expect(exitSpy).toHaveBeenCalledWith(0);
- });
-});
-
-describe("codex session-start hook — spawn pipes the hook input verbatim", () => {
- it("forwards the full CodexSessionStartInput JSON to the setup process stdin", async () => {
- // The detached setup process parses the SAME stdin input that was
- // fed to this hook. If the contract breaks (e.g. we re-serialize a
- // subset), the async setup would receive a different payload and
- // the placeholder row would have the wrong session/cwd. Assert the
- // exact JSON round-trips.
- const fake = makeFakeChild();
- spawnMock.mockReturnValue(fake);
- const customInput = {
- session_id: "custom-sid", cwd: "/custom/path",
- hook_event_name: "SessionStart", model: "gpt-5", source: "codex-cli",
- };
- stdinMock.mockResolvedValue(customInput);
- await runHook();
- const written = fake.stdin.write.mock.calls[0][0];
- expect(JSON.parse(written)).toMatchObject(customInput);
- });
-});
diff --git a/claude-code/tests/codex-session-start-setup-hook.test.ts b/claude-code/tests/codex-session-start-setup-hook.test.ts
deleted file mode 100644
index 3c05a71..0000000
--- a/claude-code/tests/codex-session-start-setup-hook.test.ts
+++ /dev/null
@@ -1,276 +0,0 @@
-import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
-
-/**
- * Source-level tests for src/hooks/codex/session-start-setup.ts. The
- * codex async setup hook does the same work as its claude-code
- * counterpart (table setup, placeholder, version check + autoupdate)
- * but with a different autoupdate strategy — it runs a shell pipeline
- * that git clones the release tag into the codex plugin cache.
- *
- * Mocks: readStdin, loadCredentials/saveCredentials, loadConfig,
- * DeeplakeApi (ensureTable, ensureSessionsTable, query), global.fetch,
- * child_process.execSync.
- */
-
-const stdinMock = vi.fn();
-const loadCredsMock = vi.fn();
-const saveCredsMock = vi.fn();
-const loadConfigMock = vi.fn();
-const debugLogMock = vi.fn();
-const ensureTableMock = vi.fn();
-const ensureSessionsTableMock = vi.fn();
-const queryMock = vi.fn();
-const execSyncMock = vi.fn();
-
-vi.mock("../../src/utils/stdin.js", () => ({ readStdin: (...a: any[]) => stdinMock(...a) }));
-vi.mock("../../src/commands/auth.js", () => ({
- loadCredentials: (...a: any[]) => loadCredsMock(...a),
- saveCredentials: (...a: any[]) => saveCredsMock(...a),
-}));
-vi.mock("../../src/config.js", () => ({ loadConfig: (...a: any[]) => loadConfigMock(...a) }));
-vi.mock("../../src/utils/debug.js", () => ({
- log: (_t: string, msg: string) => debugLogMock(msg),
-}));
-vi.mock("../../src/deeplake-api.js", () => ({
- DeeplakeApi: class {
- ensureTable() { return ensureTableMock(); }
- ensureSessionsTable(t: string) { return ensureSessionsTableMock(t); }
- query(sql: string) { return queryMock(sql); }
- },
-}));
-vi.mock("node:child_process", async () => {
- const actual = await vi.importActual("node:child_process");
- return { ...actual, execSync: (...a: any[]) => execSyncMock(...a) };
-});
-
-const originalFetch = global.fetch;
-const fetchMock = vi.fn();
-
-async function runHook(env: Record = {}): Promise {
- delete process.env.HIVEMIND_WIKI_WORKER;
- delete process.env.HIVEMIND_CAPTURE;
- for (const [k, v] of Object.entries(env)) {
- if (v === undefined) delete process.env[k];
- else process.env[k] = v;
- }
- vi.resetModules();
- // @ts-expect-error
- global.fetch = fetchMock;
- await import("../../src/hooks/codex/session-start-setup.js");
- await new Promise(r => setImmediate(r));
- await new Promise(r => setImmediate(r));
-}
-
-const validConfig = {
- token: "t", orgId: "o", orgName: "acme", workspaceId: "default",
- userName: "alice", apiUrl: "http://example", tableName: "memory",
- sessionsTableName: "sessions",
-};
-
-beforeEach(() => {
- stdinMock.mockReset().mockResolvedValue({
- session_id: "sid-1", cwd: "/workspaces/proj",
- hook_event_name: "SessionStart", model: "gpt-5",
- });
- loadCredsMock.mockReset().mockReturnValue({
- token: "tok", orgId: "o", orgName: "acme", userName: "alice",
- });
- saveCredsMock.mockReset();
- loadConfigMock.mockReset().mockReturnValue(validConfig);
- debugLogMock.mockReset();
- ensureTableMock.mockReset().mockResolvedValue(undefined);
- ensureSessionsTableMock.mockReset().mockResolvedValue(undefined);
- queryMock.mockReset().mockResolvedValue([]); // placeholder SELECT → empty, INSERT will follow
- execSyncMock.mockReset();
- fetchMock.mockReset().mockResolvedValue({
- ok: true,
- json: async () => ({ version: "0.0.1" }),
- });
-});
-
-afterEach(() => {
- vi.restoreAllMocks();
- // @ts-expect-error
- global.fetch = originalFetch;
-});
-
-describe("codex session-start-setup hook — guards", () => {
- it("returns when HIVEMIND_WIKI_WORKER=1", async () => {
- await runHook({ HIVEMIND_WIKI_WORKER: "1" });
- expect(stdinMock).not.toHaveBeenCalled();
- });
-
- it("returns when no credentials are loaded", async () => {
- loadCredsMock.mockReturnValue(null);
- await runHook();
- expect(debugLogMock).toHaveBeenCalledWith("no credentials");
- expect(ensureTableMock).not.toHaveBeenCalled();
- });
-});
-
-describe("codex session-start-setup hook — userName backfill", () => {
- it("backfills userName when missing and saves creds", async () => {
- loadCredsMock.mockReturnValue({ token: "tok", orgId: "o", orgName: "acme" });
- await runHook();
- expect(saveCredsMock).toHaveBeenCalled();
- expect(debugLogMock).toHaveBeenCalledWith(
- expect.stringMatching(/^backfilled userName: /),
- );
- });
-
- it("does not save when userName present", async () => {
- await runHook();
- expect(saveCredsMock).not.toHaveBeenCalled();
- });
-});
-
-describe("codex session-start-setup hook — placeholder branching", () => {
- it("creates placeholder when none exists (SELECT returns [] → INSERT)", async () => {
- await runHook();
- expect(ensureTableMock).toHaveBeenCalled();
- expect(ensureSessionsTableMock).toHaveBeenCalledWith("sessions");
- expect(queryMock).toHaveBeenCalledTimes(2);
- expect(queryMock.mock.calls[0][0]).toMatch(/^SELECT path FROM/);
- expect(queryMock.mock.calls[1][0]).toMatch(/^INSERT INTO/);
- expect(queryMock.mock.calls[1][0]).toContain("'codex'");
- expect(debugLogMock).toHaveBeenCalledWith("setup complete");
- });
-
- it("skips INSERT on resumed session (SELECT returns a row)", async () => {
- queryMock.mockResolvedValueOnce([{ path: "/summaries/alice/sid-1.md" }]);
- await runHook();
- expect(queryMock).toHaveBeenCalledTimes(1);
- });
-
- it("skips placeholder when HIVEMIND_CAPTURE=false but still ensures tables", async () => {
- await runHook({ HIVEMIND_CAPTURE: "false" });
- expect(ensureTableMock).toHaveBeenCalled();
- expect(ensureSessionsTableMock).toHaveBeenCalled();
- expect(queryMock).not.toHaveBeenCalled();
- });
-
- it("swallows setup errors and logs them", async () => {
- ensureTableMock.mockRejectedValue(new Error("table boom"));
- await runHook();
- expect(debugLogMock).toHaveBeenCalledWith(
- expect.stringContaining("setup failed: table boom"),
- );
- });
-
- it("skips setup when session_id is empty", async () => {
- stdinMock.mockResolvedValue({
- session_id: "", cwd: "/x", hook_event_name: "SessionStart", model: "m",
- });
- await runHook();
- expect(ensureTableMock).not.toHaveBeenCalled();
- });
-
- it("skips setup when loadConfig returns null", async () => {
- loadConfigMock.mockReturnValue(null);
- await runHook();
- expect(ensureTableMock).not.toHaveBeenCalled();
- });
-});
-
-describe("codex session-start-setup hook — version check + autoupdate", () => {
- it("runs the git-clone autoupdate when a newer version is available", async () => {
- fetchMock.mockResolvedValue({
- ok: true,
- json: async () => ({ version: "999.0.0" }),
- });
- const stderrSpy = vi.spyOn(process.stderr, "write").mockReturnValue(true);
- await runHook();
- expect(execSyncMock).toHaveBeenCalled();
- // The shell pipeline builds the tag from the version — verify the
- // safe version regex accepted it and the tag embedded.
- expect(execSyncMock.mock.calls[0][0]).toContain("v999.0.0");
- expect(stderrSpy).toHaveBeenCalledWith(
- expect.stringContaining("auto-updated"),
- );
- });
-
- it("uses the manual-upgrade message when autoupdate is disabled", async () => {
- loadCredsMock.mockReturnValue({
- token: "t", orgId: "o", orgName: "acme", userName: "u",
- autoupdate: false,
- });
- fetchMock.mockResolvedValue({ ok: true, json: async () => ({ version: "999.0.0" }) });
- const stderrSpy = vi.spyOn(process.stderr, "write").mockReturnValue(true);
- await runHook();
- expect(execSyncMock).not.toHaveBeenCalled();
- expect(stderrSpy).toHaveBeenCalledWith(
- expect.stringContaining("update available"),
- );
- });
-
- it("emits 'Auto-update failed' when execSync throws", async () => {
- fetchMock.mockResolvedValue({ ok: true, json: async () => ({ version: "999.0.0" }) });
- execSyncMock.mockImplementation(() => { throw new Error("git fail"); });
- const stderrSpy = vi.spyOn(process.stderr, "write").mockReturnValue(true);
- await runHook();
- expect(stderrSpy).toHaveBeenCalledWith(
- expect.stringContaining("Auto-update failed"),
- );
- });
-
- it("tolerates a fetch error (GitHub unreachable)", async () => {
- fetchMock.mockRejectedValue(new Error("offline"));
- await runHook();
- expect(execSyncMock).not.toHaveBeenCalled();
- });
-});
-
-describe("codex session-start-setup hook — fatal catch", () => {
- it("catches stdin throw and exits 0", async () => {
- stdinMock.mockRejectedValue(new Error("stdin boom"));
- const exitSpy = vi.spyOn(process, "exit").mockImplementation(() => undefined as never);
- await runHook();
- await new Promise(r => setImmediate(r));
- expect(debugLogMock).toHaveBeenCalledWith("fatal: stdin boom");
- expect(exitSpy).toHaveBeenCalledWith(0);
- });
-});
-
-// Additional branch coverage for version helpers
-describe("codex session-start-setup hook — version helpers edge cases", () => {
- it("fetch ok:false short-circuits getLatestVersion", async () => {
- fetchMock.mockResolvedValue({ ok: false, json: async () => ({ version: "999.0.0" }) });
- await runHook();
- expect(execSyncMock).not.toHaveBeenCalled();
- });
-
- it("response without 'version' field falls through to null", async () => {
- fetchMock.mockResolvedValue({ ok: true, json: async () => ({}) });
- await runHook();
- expect(execSyncMock).not.toHaveBeenCalled();
- });
-
- it("rejects unsafe version tags without executing git clone", async () => {
- // The hook builds `v${latest}` and validates against /^v\d+\.\d+\.\d+$/.
- // Feed a version that fails the regex; the inner try throws the
- // 'unsafe version tag' guard error, which is caught and surfaces
- // the manual-upgrade path.
- fetchMock.mockResolvedValue({
- ok: true,
- json: async () => ({ version: "999.0.0-dangerous;rm -rf" }),
- });
- const stderrSpy = vi.spyOn(process.stderr, "write").mockReturnValue(true);
- await runHook();
- expect(execSyncMock).not.toHaveBeenCalled();
- expect(stderrSpy).toHaveBeenCalledWith(
- expect.stringContaining("Auto-update failed"),
- );
- });
-
- it("treats latest == current as 'up to date' (isNewer false)", async () => {
- const pkg = JSON.parse(
- require("node:fs").readFileSync(
- require("node:path").join(__dirname, "..", ".claude-plugin", "plugin.json"),
- "utf-8",
- ),
- );
- fetchMock.mockResolvedValue({ ok: true, json: async () => ({ version: pkg.version }) });
- await runHook();
- expect(execSyncMock).not.toHaveBeenCalled();
- });
-});
diff --git a/claude-code/tests/codex-stop-hook.test.ts b/claude-code/tests/codex-stop-hook.test.ts
deleted file mode 100644
index 0f3cbbf..0000000
--- a/claude-code/tests/codex-stop-hook.test.ts
+++ /dev/null
@@ -1,279 +0,0 @@
-import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
-import { mkdtempSync, writeFileSync, rmSync } from "node:fs";
-import { tmpdir } from "node:os";
-import { join } from "node:path";
-
-/**
- * Direct source-level tests for src/hooks/codex/stop.ts. Covers the
- * whole hook: WIKI_WORKER guard, CAPTURE guard (computed at module
- * load — we resetModules per scenario), missing session_id, missing
- * config, transcript parsing (string / array / bad / missing), INSERT
- * failure path, lock held vs free, the spawn call, and the fatal catch.
- */
-
-const stdinMock = vi.fn();
-const loadConfigMock = vi.fn();
-const spawnMock = vi.fn();
-const wikiLogMock = vi.fn();
-const tryAcquireLockMock = vi.fn();
-const releaseLockMock = vi.fn();
-const debugLogMock = vi.fn();
-const queryMock = vi.fn();
-
-vi.mock("../../src/utils/stdin.js", () => ({ readStdin: (...args: any[]) => stdinMock(...args) }));
-vi.mock("../../src/config.js", () => ({ loadConfig: (...args: any[]) => loadConfigMock(...args) }));
-vi.mock("../../src/hooks/codex/spawn-wiki-worker.js", () => ({
- spawnCodexWikiWorker: (...args: any[]) => spawnMock(...args),
- wikiLog: (...args: any[]) => wikiLogMock(...args),
- bundleDirFromImportMeta: () => "/fake/codex/bundle",
-}));
-vi.mock("../../src/hooks/summary-state.js", () => ({
- tryAcquireLock: (...args: any[]) => tryAcquireLockMock(...args),
- releaseLock: (...args: any[]) => releaseLockMock(...args),
-}));
-vi.mock("../../src/utils/debug.js", () => ({
- log: (_tag: string, msg: string) => debugLogMock(msg),
-}));
-vi.mock("../../src/deeplake-api.js", () => ({
- DeeplakeApi: class { query(sql: string) { return queryMock(sql); } },
-}));
-
-async function runHook(env: Record = {}): Promise {
- delete process.env.HIVEMIND_WIKI_WORKER;
- delete process.env.HIVEMIND_CAPTURE;
- for (const [k, v] of Object.entries(env)) {
- if (v === undefined) delete process.env[k];
- else process.env[k] = v;
- }
- vi.resetModules();
- await import("../../src/hooks/codex/stop.js");
- await new Promise(r => setImmediate(r));
- await new Promise(r => setImmediate(r));
-}
-
-const validConfig = {
- token: "t", orgId: "o", orgName: "org", workspaceId: "default",
- userName: "u", apiUrl: "http://example", tableName: "memory",
- sessionsTableName: "sessions",
-};
-
-let tmpDir: string;
-
-beforeEach(() => {
- tmpDir = mkdtempSync(join(tmpdir(), "codex-stop-test-"));
- stdinMock.mockReset().mockResolvedValue({
- session_id: "sid-1", cwd: "/proj/foo", hook_event_name: "Stop", model: "gpt-5",
- transcript_path: null,
- });
- loadConfigMock.mockReset().mockReturnValue(validConfig);
- spawnMock.mockReset();
- wikiLogMock.mockReset();
- tryAcquireLockMock.mockReset().mockReturnValue(true);
- releaseLockMock.mockReset();
- debugLogMock.mockReset();
- queryMock.mockReset().mockResolvedValue([]);
-});
-
-afterEach(() => {
- vi.restoreAllMocks();
- try { rmSync(tmpDir, { recursive: true, force: true }); } catch { /* ignore */ }
-});
-
-describe("codex stop hook — guard paths", () => {
- it("returns immediately when HIVEMIND_WIKI_WORKER=1", async () => {
- await runHook({ HIVEMIND_WIKI_WORKER: "1" });
- expect(stdinMock).not.toHaveBeenCalled();
- expect(queryMock).not.toHaveBeenCalled();
- expect(spawnMock).not.toHaveBeenCalled();
- });
-
- it("returns without spawning when session_id is empty", async () => {
- stdinMock.mockResolvedValue({ session_id: "", cwd: "/x", hook_event_name: "Stop", model: "m" });
- await runHook();
- expect(loadConfigMock).not.toHaveBeenCalled();
- expect(queryMock).not.toHaveBeenCalled();
- expect(spawnMock).not.toHaveBeenCalled();
- });
-
- it("returns without spawning when loadConfig returns null", async () => {
- loadConfigMock.mockReturnValue(null);
- await runHook();
- expect(queryMock).not.toHaveBeenCalled();
- expect(spawnMock).not.toHaveBeenCalled();
- expect(debugLogMock).toHaveBeenCalledWith("no config");
- });
-
- it("skips capture AND spawn when HIVEMIND_CAPTURE=false", async () => {
- await runHook({ HIVEMIND_CAPTURE: "false" });
- expect(queryMock).not.toHaveBeenCalled();
- expect(tryAcquireLockMock).not.toHaveBeenCalled();
- expect(spawnMock).not.toHaveBeenCalled();
- });
-});
-
-describe("codex stop hook — capture path + INSERT shape", () => {
- it("issues exactly one INSERT against the sessions table on the happy path", async () => {
- await runHook();
- expect(queryMock).toHaveBeenCalledTimes(1);
- const sql = queryMock.mock.calls[0][0] as string;
- expect(sql).toMatch(/^INSERT INTO "sessions"/);
- expect(sql).toContain("'Stop'");
- expect(sql).toContain("'codex'");
- expect(sql).toContain("sid-1");
- expect(sql).toContain("::jsonb");
- expect(debugLogMock).toHaveBeenCalledWith("stop event captured");
- });
-
- it("swallows an INSERT failure and still tries to spawn the wiki worker", async () => {
- queryMock.mockRejectedValue(new Error("network down"));
- await runHook();
- expect(debugLogMock).toHaveBeenCalledWith("capture failed: network down");
- expect(spawnMock).toHaveBeenCalledTimes(1);
- });
-
- it("derives projectName=unknown when cwd is the empty string", async () => {
- stdinMock.mockResolvedValue({
- session_id: "sid-x", cwd: "", hook_event_name: "Stop", model: "m", transcript_path: null,
- });
- await runHook();
- const sql = queryMock.mock.calls[0][0] as string;
- expect(sql).toContain("'unknown'");
- });
-});
-
-describe("codex stop hook — transcript parsing", () => {
- const writeTranscript = (lines: string[]): string => {
- const p = join(tmpDir, "transcript.jsonl");
- writeFileSync(p, lines.join("\n"));
- return p;
- };
-
- it("extracts the last assistant message when content is a plain string", async () => {
- const path = writeTranscript([
- JSON.stringify({ payload: { role: "user", content: "hi" } }),
- JSON.stringify({ payload: { role: "assistant", content: "hello there" } }),
- ]);
- stdinMock.mockResolvedValue({
- session_id: "sid-1", cwd: "/x", hook_event_name: "Stop", model: "m", transcript_path: path,
- });
- await runHook();
- expect(debugLogMock).toHaveBeenCalledWith(
- expect.stringContaining("extracted assistant message from transcript"),
- );
- const sql = queryMock.mock.calls[0][0] as string;
- expect(sql).toContain("hello there");
- expect(sql).toContain('"type":"assistant_message"');
- });
-
- it("extracts from content arrays, joining output_text / text blocks", async () => {
- const path = writeTranscript([
- JSON.stringify({
- payload: {
- role: "assistant",
- content: [
- { type: "output_text", text: "part A" },
- { type: "reasoning", text: "ignored" },
- { type: "text", text: "part B" },
- ],
- },
- }),
- ]);
- stdinMock.mockResolvedValue({
- session_id: "sid-1", cwd: "/x", hook_event_name: "Stop", model: "m", transcript_path: path,
- });
- await runHook();
- const sql = queryMock.mock.calls[0][0] as string;
- expect(sql).toContain("part A");
- expect(sql).toContain("part B");
- });
-
- it("skips malformed JSONL lines and falls back to assistant_stop when no valid message", async () => {
- const path = writeTranscript([
- "{not json",
- JSON.stringify({ payload: { role: "user", content: "hey" } }),
- ]);
- stdinMock.mockResolvedValue({
- session_id: "sid-1", cwd: "/x", hook_event_name: "Stop", model: "m", transcript_path: path,
- });
- await runHook();
- const sql = queryMock.mock.calls[0][0] as string;
- expect(sql).toContain('"type":"assistant_stop"');
- });
-
- it("handles a transcript_path that does not exist on disk (no log, no content)", async () => {
- stdinMock.mockResolvedValue({
- session_id: "sid-1", cwd: "/x", hook_event_name: "Stop", model: "m",
- transcript_path: join(tmpDir, "missing.jsonl"),
- });
- await runHook();
- const sql = queryMock.mock.calls[0][0] as string;
- expect(sql).toContain('"type":"assistant_stop"');
- expect(debugLogMock).not.toHaveBeenCalledWith(
- expect.stringContaining("extracted assistant message"),
- );
- });
-
- it("treats content as empty when it is neither string nor array (defensive branch)", async () => {
- const path = writeTranscript([
- JSON.stringify({ payload: { role: "assistant", content: { weird: true } } }),
- ]);
- stdinMock.mockResolvedValue({
- session_id: "sid-1", cwd: "/x", hook_event_name: "Stop", model: "m", transcript_path: path,
- });
- await runHook();
- const sql = queryMock.mock.calls[0][0] as string;
- expect(sql).toContain('"type":"assistant_stop"');
- });
-});
-
-describe("codex stop hook — wiki spawn + lock coordination", () => {
- it("skips the wiki spawn with a log line when tryAcquireLock returns false", async () => {
- tryAcquireLockMock.mockReturnValue(false);
- await runHook();
- expect(spawnMock).not.toHaveBeenCalled();
- expect(wikiLogMock).toHaveBeenCalledWith(
- expect.stringContaining("periodic worker already running for sid-1, skipping"),
- );
- });
-
- it("spawns the codex wiki worker on the happy path with the right arguments", async () => {
- await runHook();
- expect(spawnMock).toHaveBeenCalledTimes(1);
- const arg = spawnMock.mock.calls[0][0];
- expect(arg.sessionId).toBe("sid-1");
- expect(arg.cwd).toBe("/proj/foo");
- expect(arg.reason).toBe("Stop");
- expect(arg.config).toBe(validConfig);
- });
-});
-
-describe("codex stop hook — fatal catch", () => {
- it("catches a thrown readStdin error and exits 0 without crashing", async () => {
- stdinMock.mockRejectedValue(new Error("bad stdin"));
- const exitSpy = vi.spyOn(process, "exit").mockImplementation(() => undefined as never);
- await runHook();
- await new Promise(r => setImmediate(r));
- expect(debugLogMock).toHaveBeenCalledWith("fatal: bad stdin");
- expect(exitSpy).toHaveBeenCalledWith(0);
- });
-
- it("releases the lock if spawnCodexWikiWorker throws (no lock leak)", async () => {
- spawnMock.mockImplementation(() => { throw new Error("codex spawn exploded"); });
- const exitSpy = vi.spyOn(process, "exit").mockImplementation(() => undefined as never);
- await runHook();
- await new Promise(r => setImmediate(r));
- expect(releaseLockMock).toHaveBeenCalledWith("sid-1");
- expect(debugLogMock).toHaveBeenCalledWith("fatal: codex spawn exploded");
- expect(exitSpy).toHaveBeenCalledWith(0);
- });
-
- it("swallows release errors when spawn also throws (no double-fault)", async () => {
- spawnMock.mockImplementation(() => { throw new Error("codex spawn exploded"); });
- releaseLockMock.mockImplementation(() => { throw new Error("release broken"); });
- const exitSpy = vi.spyOn(process, "exit").mockImplementation(() => undefined as never);
- await runHook();
- await new Promise(r => setImmediate(r));
- expect(debugLogMock).toHaveBeenCalledWith("fatal: codex spawn exploded");
- expect(exitSpy).toHaveBeenCalledWith(0);
- });
-});
diff --git a/claude-code/tests/codex-wiki-worker.test.ts b/claude-code/tests/codex-wiki-worker.test.ts
deleted file mode 100644
index 6a4260a..0000000
--- a/claude-code/tests/codex-wiki-worker.test.ts
+++ /dev/null
@@ -1,358 +0,0 @@
-import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
-import { mkdtempSync, rmSync, writeFileSync, readFileSync, mkdirSync } from "node:fs";
-import { tmpdir } from "node:os";
-import { join } from "node:path";
-
-/**
- * Source-level tests for src/hooks/codex/wiki-worker.ts. Mirrors the
- * CC wiki-worker test: mock fetch + execFileSync + summary-state +
- * upload-summary, feed a config file via process.argv[2], drive the
- * module through every branch.
- *
- * Codex-specific differences vs the CC worker:
- * - binary key is `codexBin` (not `claudeBin`)
- * - invoked as `codex exec --dangerously-bypass-approvals-and-sandbox `
- * - agent label on upload is `"codex"` (not `"claude_code"`)
- */
-
-const finalizeSummaryMock = vi.fn();
-const releaseLockMock = vi.fn();
-const uploadSummaryMock = vi.fn();
-const execFileSyncMock = vi.fn();
-
-vi.mock("../../src/hooks/summary-state.js", () => ({
- finalizeSummary: (...a: any[]) => finalizeSummaryMock(...a),
- releaseLock: (...a: any[]) => releaseLockMock(...a),
-}));
-vi.mock("../../src/hooks/upload-summary.js", () => ({
- uploadSummary: (...a: any[]) => uploadSummaryMock(...a),
-}));
-vi.mock("node:child_process", async () => {
- const actual = await vi.importActual("node:child_process");
- return { ...actual, execFileSync: (...a: any[]) => execFileSyncMock(...a) };
-});
-
-const originalFetch = global.fetch;
-const fetchMock = vi.fn();
-const originalArgv2 = process.argv[2];
-
-let rootDir: string;
-let tmpDir: string;
-let hooksDir: string;
-let configPath: string;
-
-const defaultConfig = () => ({
- apiUrl: "http://fake.local",
- token: "tok",
- orgId: "org",
- workspaceId: "default",
- memoryTable: "memory",
- sessionsTable: "sessions",
- sessionId: "sid-codex",
- userName: "alice",
- project: "proj",
- tmpDir,
- codexBin: "/fake/codex",
- wikiLog: join(hooksDir, "wiki.log"),
- hooksDir,
- promptTemplate: "JSONL=__JSONL__ SUMMARY=__SUMMARY__ SID=__SESSION_ID__ PROJ=__PROJECT__ OFFSET=__PREV_OFFSET__ LINES=__JSONL_LINES__ SRC=__JSONL_SERVER_PATH__",
-});
-
-function writeConfig(overrides: Partial> = {}): void {
- const cfg = { ...defaultConfig(), ...overrides };
- writeFileSync(configPath, JSON.stringify(cfg));
-}
-
-function jsonResp(body: unknown, ok = true, status = 200): Response {
- return {
- ok, status,
- json: async () => body,
- text: async () => typeof body === "string" ? body : JSON.stringify(body),
- } as Response;
-}
-
-async function runWorker(): Promise {
- vi.resetModules();
- // @ts-expect-error
- global.fetch = fetchMock;
- await import("../../src/hooks/codex/wiki-worker.js");
- await new Promise(r => setImmediate(r));
- await new Promise(r => setImmediate(r));
- await new Promise(r => setImmediate(r));
-}
-
-beforeEach(() => {
- rootDir = mkdtempSync(join(tmpdir(), "codex-wiki-worker-test-"));
- tmpDir = join(rootDir, "tmp");
- hooksDir = join(rootDir, "hooks");
- mkdirSync(tmpDir, { recursive: true });
- mkdirSync(hooksDir, { recursive: true });
- configPath = join(rootDir, "config.json");
- writeConfig();
- process.argv[2] = configPath;
- fetchMock.mockReset();
- finalizeSummaryMock.mockReset();
- releaseLockMock.mockReset();
- uploadSummaryMock.mockReset().mockResolvedValue({ path: "insert", summaryLength: 80, descLength: 15, sql: "..." });
- execFileSyncMock.mockReset();
-});
-
-afterEach(() => {
- // @ts-expect-error
- global.fetch = originalFetch;
- process.argv[2] = originalArgv2;
- try { rmSync(rootDir, { recursive: true, force: true }); } catch { /* ignore */ }
- vi.restoreAllMocks();
-});
-
-// ═══ early exit ═════════════════════════════════════════════════════════════
-
-describe("codex wiki-worker — no events", () => {
- it("exits early when the sessions table has no rows for this session", async () => {
- fetchMock.mockResolvedValue(jsonResp({ columns: ["message", "creation_date"], rows: [] }));
- await runWorker();
- const log = readFileSync(join(hooksDir, "wiki.log"), "utf-8");
- expect(log).toContain("no session events found — exiting");
- expect(execFileSyncMock).not.toHaveBeenCalled();
- expect(uploadSummaryMock).not.toHaveBeenCalled();
- expect(releaseLockMock).toHaveBeenCalledWith("sid-codex");
- });
-
- it("handles a response with null rows as empty", async () => {
- fetchMock.mockResolvedValue(jsonResp({}));
- await runWorker();
- expect(execFileSyncMock).not.toHaveBeenCalled();
- });
-});
-
-// ═══ happy path ═════════════════════════════════════════════════════════════
-
-describe("codex wiki-worker — happy path", () => {
- const eventRow = [
- { message: JSON.stringify({ type: "user_message", content: "hello codex" }), creation_date: "2026-04-20T00:00:00Z" },
- ];
-
- const mkFetch = (pathRows = 1, hasSummary = false) => {
- return fetchMock.mockImplementation(async (_url: string, init: any) => {
- const sql = JSON.parse(init.body).query as string;
- if (sql.startsWith("SELECT message, creation_date")) {
- return jsonResp({ columns: ["message", "creation_date"], rows: eventRow.map(r => [r.message, r.creation_date]) });
- }
- if (sql.startsWith("SELECT DISTINCT path")) {
- return jsonResp({
- columns: ["path"],
- rows: pathRows > 0 ? [["/sessions/alice/alice_org_default_sid-codex.jsonl"]] : [],
- });
- }
- if (sql.startsWith("SELECT summary FROM")) {
- if (hasSummary) {
- return jsonResp({ columns: ["summary"], rows: [["# Session X\n- **JSONL offset**: 7\n\n## What Happened\nprior"]] });
- }
- return jsonResp({ columns: ["summary"], rows: [] });
- }
- throw new Error(`unexpected query: ${sql}`);
- });
- };
-
- it("runs `codex exec --dangerously-bypass-approvals-and-sandbox ` and uploads summary", async () => {
- mkFetch();
- let capturedJsonl: string | null = null;
- execFileSyncMock.mockImplementation((bin: string, args: string[]) => {
- expect(bin).toBe("/fake/codex");
- expect(args[0]).toBe("exec");
- expect(args[1]).toBe("--dangerously-bypass-approvals-and-sandbox");
- const prompt = args[2];
- const jsonlPath = prompt.match(/JSONL=(\S+)/)![1];
- capturedJsonl = readFileSync(jsonlPath, "utf-8");
- const summaryPath = prompt.match(/SUMMARY=(\S+)/)![1];
- writeFileSync(summaryPath, "# Session sid-codex\n\n## What Happened\ndone.\n");
- return Buffer.from("");
- });
- await runWorker();
-
- expect(capturedJsonl).toContain('"type":"user_message"');
- expect(capturedJsonl).toContain('"content":"hello codex"');
-
- // codex exec is invoked with HIVEMIND_WIKI_WORKER=1 to prevent the
- // child's own capture hook from recursing back into this worker.
- const execOpts = execFileSyncMock.mock.calls[0][2];
- expect(execOpts.env.HIVEMIND_WIKI_WORKER).toBe("1");
- expect(execOpts.env.HIVEMIND_CAPTURE).toBe("false");
-
- // Upload agent is 'codex' (not 'claude_code')
- expect(uploadSummaryMock).toHaveBeenCalledTimes(1);
- const params = uploadSummaryMock.mock.calls[0][1];
- expect(params.agent).toBe("codex");
- expect(params.sessionId).toBe("sid-codex");
-
- expect(finalizeSummaryMock).toHaveBeenCalledWith("sid-codex", 1);
- expect(releaseLockMock).toHaveBeenCalledWith("sid-codex");
- });
-
- it("parses JSONL offset from an existing summary on resumed session", async () => {
- mkFetch(1, true);
- execFileSyncMock.mockImplementation((_bin: string, args: string[]) => {
- const prompt = args[2];
- const summaryPath = prompt.match(/SUMMARY=(\S+)/)![1];
- writeFileSync(summaryPath, "# updated\n\n## What Happened\n...\n");
- return Buffer.from("");
- });
- await runWorker();
- const prompt = execFileSyncMock.mock.calls[0][1][2] as string;
- expect(prompt).toContain("OFFSET=7");
- const log = readFileSync(join(hooksDir, "wiki.log"), "utf-8");
- expect(log).toContain("existing summary found, offset=7");
- });
-
- it("falls back to /sessions/unknown/ when path SELECT empty", async () => {
- mkFetch(0);
- execFileSyncMock.mockImplementation((_bin: string, args: string[]) => {
- const summaryPath = args[2].match(/SUMMARY=(\S+)/)![1];
- writeFileSync(summaryPath, "x\n");
- return Buffer.from("");
- });
- await runWorker();
- const prompt = execFileSyncMock.mock.calls[0][1][2] as string;
- expect(prompt).toContain("SRC=/sessions/unknown/sid-codex.jsonl");
- });
-
- it("serializes JSONB object rows by stringifying them", async () => {
- fetchMock.mockImplementation(async (_url: string, init: any) => {
- const sql = JSON.parse(init.body).query as string;
- if (sql.startsWith("SELECT message, creation_date")) {
- return jsonResp({
- columns: ["message", "creation_date"],
- rows: [[{ type: "user_message", content: "obj" }, "t"]],
- });
- }
- if (sql.startsWith("SELECT DISTINCT path")) return jsonResp({ columns: ["path"], rows: [["/x.jsonl"]] });
- return jsonResp({ columns: ["summary"], rows: [] });
- });
- let capturedJsonl: string | null = null;
- execFileSyncMock.mockImplementation((_bin: string, args: string[]) => {
- const jsonlPath = args[2].match(/JSONL=(\S+)/)![1];
- capturedJsonl = readFileSync(jsonlPath, "utf-8");
- const summaryPath = args[2].match(/SUMMARY=(\S+)/)![1];
- writeFileSync(summaryPath, "x");
- return Buffer.from("");
- });
- await runWorker();
- expect(capturedJsonl).toContain('"type":"user_message"');
- });
-});
-
-// ═══ codex exec failure ════════════════════════════════════════════════════
-
-describe("codex wiki-worker — codex exec failure", () => {
- beforeEach(() => {
- fetchMock.mockImplementation(async (_url: string, init: any) => {
- const sql = JSON.parse(init.body).query as string;
- if (sql.startsWith("SELECT message")) return jsonResp({ columns: ["message", "creation_date"], rows: [["{}", "t"]] });
- if (sql.startsWith("SELECT DISTINCT path")) return jsonResp({ columns: ["path"], rows: [["/x.jsonl"]] });
- return jsonResp({ columns: ["summary"], rows: [] });
- });
- });
-
- it("logs status and skips upload when codex exec throws without producing a summary", async () => {
- const err: any = new Error("codex crashed");
- err.status = 99;
- execFileSyncMock.mockImplementation(() => { throw err; });
- await runWorker();
- const log = readFileSync(join(hooksDir, "wiki.log"), "utf-8");
- expect(log).toContain("codex exec failed: 99");
- expect(log).toContain("no summary file generated");
- expect(uploadSummaryMock).not.toHaveBeenCalled();
- expect(releaseLockMock).toHaveBeenCalled();
- });
-
- it("falls back to err.message when err.status is absent", async () => {
- execFileSyncMock.mockImplementation(() => { throw new Error("no status here"); });
- await runWorker();
- const log = readFileSync(join(hooksDir, "wiki.log"), "utf-8");
- expect(log).toContain("codex exec failed: no status here");
- });
-});
-
-// ═══ query retry logic ═════════════════════════════════════════════════════
-
-describe("codex wiki-worker — query retry logic", () => {
- beforeEach(() => {
- vi.spyOn(global, "setTimeout").mockImplementation(((cb: any) => {
- cb();
- return 0 as any;
- }) as any);
- });
-
- it("retries on 500 until success", async () => {
- const responses = [
- jsonResp("server error", false, 500),
- jsonResp({ columns: ["message", "creation_date"], rows: [] }),
- ];
- fetchMock.mockImplementation(async () => responses.shift()!);
- await runWorker();
- expect(fetchMock.mock.calls.length).toBeGreaterThanOrEqual(2);
- });
-
- it("retries on CloudFlare rate-limit class 401/403/429", async () => {
- for (const status of [401, 403, 429]) {
- fetchMock.mockReset();
- fetchMock
- .mockResolvedValueOnce(jsonResp("", false, status))
- .mockResolvedValue(jsonResp({ columns: ["message", "creation_date"], rows: [] }));
- await runWorker();
- expect(fetchMock.mock.calls.length).toBeGreaterThanOrEqual(2);
- }
- });
-
- it("throws on 400 (non-retryable) and main catches", async () => {
- fetchMock.mockResolvedValue(jsonResp("bad", false, 400));
- await runWorker();
- const log = readFileSync(join(hooksDir, "wiki.log"), "utf-8");
- expect(log).toMatch(/fatal: API 400/);
- expect(releaseLockMock).toHaveBeenCalled();
- });
-});
-
-// ═══ finalize + release + empty summary ═══════════════════════════════════
-
-describe("codex wiki-worker — finalize + release edges", () => {
- beforeEach(() => {
- fetchMock.mockImplementation(async (_url: string, init: any) => {
- const sql = JSON.parse(init.body).query as string;
- if (sql.startsWith("SELECT message")) return jsonResp({ columns: ["message", "creation_date"], rows: [["{}", "t"]] });
- if (sql.startsWith("SELECT DISTINCT path")) return jsonResp({ columns: ["path"], rows: [["/x.jsonl"]] });
- return jsonResp({ columns: ["summary"], rows: [] });
- });
- execFileSyncMock.mockImplementation((_bin: string, args: string[]) => {
- const summaryPath = args[2].match(/SUMMARY=(\S+)/)![1];
- writeFileSync(summaryPath, "# s\n\n## What Happened\nX\n");
- return Buffer.from("");
- });
- });
-
- it("logs sidecar update failure but still releases lock", async () => {
- finalizeSummaryMock.mockImplementation(() => { throw new Error("sidecar boom"); });
- await runWorker();
- const log = readFileSync(join(hooksDir, "wiki.log"), "utf-8");
- expect(log).toContain("sidecar update failed: sidecar boom");
- expect(releaseLockMock).toHaveBeenCalled();
- });
-
- it("swallows releaseLock throw in finally", async () => {
- releaseLockMock.mockImplementation(() => { throw new Error("release boom"); });
- await runWorker();
- const log = readFileSync(join(hooksDir, "wiki.log"), "utf-8");
- expect(log).toContain("done");
- });
-
- it("skips upload when summary file is whitespace-only", async () => {
- execFileSyncMock.mockImplementation((_bin: string, args: string[]) => {
- const summaryPath = args[2].match(/SUMMARY=(\S+)/)![1];
- writeFileSync(summaryPath, " \n\n");
- return Buffer.from("");
- });
- await runWorker();
- expect(uploadSummaryMock).not.toHaveBeenCalled();
- expect(finalizeSummaryMock).not.toHaveBeenCalled();
- });
-});
diff --git a/claude-code/tests/deeplake-api.test.ts b/claude-code/tests/deeplake-api.test.ts
index b1276c9..f427bf7 100644
--- a/claude-code/tests/deeplake-api.test.ts
+++ b/claude-code/tests/deeplake-api.test.ts
@@ -1,4 +1,7 @@
import { describe, it, expect, beforeEach, vi, afterEach } from "vitest";
+import { mkdtempSync } from "node:fs";
+import { join } from "node:path";
+import { tmpdir } from "node:os";
import { DeeplakeApi, WriteRow } from "../../src/deeplake-api.js";
// ��─ Mock fetch ──────────────────────────────────────────────────────────────
@@ -20,6 +23,11 @@ function makeApi(table = "test_table") {
beforeEach(() => {
mockFetch.mockReset();
+ process.env.HIVEMIND_INDEX_MARKER_DIR = mkdtempSync(join(tmpdir(), "hivemind-index-marker-"));
+});
+
+afterEach(() => {
+ delete process.env.HIVEMIND_INDEX_MARKER_DIR;
});
// ── query() ─────────────────────────────────────────────────────────────────
@@ -79,6 +87,23 @@ describe("DeeplakeApi.query", () => {
expect(rows).toEqual([{ x: "ok" }]);
});
+ it("retries transient HTML 403s for session inserts", async () => {
+ mockFetch
+ .mockResolvedValueOnce({
+ ok: false,
+ status: 403,
+ json: async () => ({}),
+ text: async () => "403 Forbiddennginx",
+ })
+ .mockResolvedValueOnce(jsonResponse({}));
+ const api = makeApi();
+ const rows = await api.query(
+ 'INSERT INTO "sessions" (id, path, filename, message, author, size_bytes, project, description, agent, creation_date, last_update_date) VALUES (\'id\', \'/p\', \'f\', \'{}\'::jsonb, \'u\', 2, \'p\', \'Stop\', \'claude_code\', \'t\', \'t\')',
+ );
+ expect(rows).toEqual([]);
+ expect(mockFetch).toHaveBeenCalledTimes(2);
+ });
+
it("retries on 502/503/504", async () => {
mockFetch
.mockResolvedValueOnce(jsonResponse("", 502))
@@ -120,6 +145,25 @@ describe("DeeplakeApi.query", () => {
await expect(api.query("SELECT 1")).rejects.toThrow("DNS_FAIL");
});
+ it("fails fast on timeout-like fetch errors without retrying", async () => {
+ const timeoutError = new Error("request timed out");
+ timeoutError.name = "TimeoutError";
+ mockFetch.mockRejectedValueOnce(timeoutError);
+ const api = makeApi();
+
+ await expect(api.query("SELECT 1")).rejects.toThrow("Query timeout after 10000ms");
+ expect(mockFetch).toHaveBeenCalledTimes(1);
+ });
+
+ it("passes an abort signal to query fetches", async () => {
+ mockFetch.mockResolvedValueOnce(jsonResponse({ columns: ["x"], rows: [["ok"]] }));
+ const api = makeApi();
+ await api.query("SELECT 1");
+
+ const opts = mockFetch.mock.calls[0][1];
+ expect(opts.signal).toBeInstanceOf(AbortSignal);
+ });
+
it("wraps non-Error fetch exceptions", async () => {
mockFetch.mockRejectedValue("string error");
const api = makeApi();
@@ -328,6 +372,19 @@ describe("DeeplakeApi.listTables", () => {
const api = makeApi();
expect(await api.listTables()).toEqual([]);
});
+
+ it("caches successful results per api instance", async () => {
+ mockFetch.mockResolvedValueOnce({
+ ok: true,
+ status: 200,
+ json: async () => ({ tables: [{ table_name: "memory" }, { table_name: "sessions" }] }),
+ });
+ const api = makeApi();
+
+ expect(await api.listTables()).toEqual(["memory", "sessions"]);
+ expect(await api.listTables()).toEqual(["memory", "sessions"]);
+ expect(mockFetch).toHaveBeenCalledTimes(1);
+ });
});
// ── ensureTable ─────────────────────────────────────────────────────────────
@@ -371,6 +428,28 @@ describe("DeeplakeApi.ensureTable", () => {
const createSql = JSON.parse(mockFetch.mock.calls[1][1].body).query;
expect(createSql).toContain("custom_table");
});
+
+ it("reuses cached listTables across ensureTable and ensureSessionsTable", async () => {
+ mockFetch.mockResolvedValueOnce({
+ ok: true, status: 200,
+ json: async () => ({ tables: [{ table_name: "memory" }] }),
+ });
+ mockFetch.mockResolvedValueOnce(jsonResponse({}));
+ mockFetch.mockResolvedValueOnce(jsonResponse({}));
+ const api = makeApi("memory");
+
+ await api.ensureTable();
+ await api.ensureSessionsTable("sessions");
+
+ expect(mockFetch).toHaveBeenCalledTimes(3);
+ const createSql = JSON.parse(mockFetch.mock.calls[1][1].body).query;
+ expect(createSql).toContain("CREATE TABLE IF NOT EXISTS");
+ expect(createSql).toContain("sessions");
+ const indexSql = JSON.parse(mockFetch.mock.calls[2][1].body).query;
+ expect(indexSql).toContain("CREATE INDEX IF NOT EXISTS");
+ expect(indexSql).toContain("\"path\"");
+ expect(indexSql).toContain("\"creation_date\"");
+ });
});
// ── ensureSessionsTable ─────────────────────────────────────────────────────
@@ -382,6 +461,7 @@ describe("DeeplakeApi.ensureSessionsTable", () => {
json: async () => ({ tables: [] }),
});
mockFetch.mockResolvedValueOnce(jsonResponse({}));
+ mockFetch.mockResolvedValueOnce(jsonResponse({}));
const api = makeApi();
await api.ensureSessionsTable("sessions");
const createSql = JSON.parse(mockFetch.mock.calls[1][1].body).query;
@@ -389,15 +469,50 @@ describe("DeeplakeApi.ensureSessionsTable", () => {
expect(createSql).toContain("sessions");
expect(createSql).toContain("JSONB");
expect(createSql).toContain("USING deeplake");
+ const indexSql = JSON.parse(mockFetch.mock.calls[2][1].body).query;
+ expect(indexSql).toContain("CREATE INDEX IF NOT EXISTS");
+ expect(indexSql).toContain("\"sessions\"");
+ expect(indexSql).toContain("(\"path\", \"creation_date\")");
});
- it("does nothing when sessions table already exists", async () => {
+ it("ensures the lookup index when sessions table already exists", async () => {
mockFetch.mockResolvedValueOnce({
ok: true, status: 200,
json: async () => ({ tables: [{ table_name: "sessions" }] }),
});
+ mockFetch.mockResolvedValueOnce(jsonResponse({}));
const api = makeApi();
await api.ensureSessionsTable("sessions");
- expect(mockFetch).toHaveBeenCalledOnce();
+ expect(mockFetch).toHaveBeenCalledTimes(2);
+ const indexSql = JSON.parse(mockFetch.mock.calls[1][1].body).query;
+ expect(indexSql).toContain("CREATE INDEX IF NOT EXISTS");
+ });
+
+ it("ignores lookup-index creation errors after ensuring the sessions table", async () => {
+ mockFetch.mockResolvedValueOnce({
+ ok: true, status: 200,
+ json: async () => ({ tables: [{ table_name: "sessions" }] }),
+ });
+ mockFetch.mockResolvedValueOnce(jsonResponse("forbidden", 403));
+ const api = makeApi();
+
+ await expect(api.ensureSessionsTable("sessions")).resolves.toBeUndefined();
+ expect(mockFetch).toHaveBeenCalledTimes(2);
+ });
+
+ it("treats duplicate concurrent index creation errors as success and records a local marker", async () => {
+ mockFetch.mockResolvedValueOnce({
+ ok: true, status: 200,
+ json: async () => ({ tables: [{ table_name: "sessions" }] }),
+ });
+ mockFetch.mockResolvedValueOnce(jsonResponse("duplicate key value violates unique constraint \"pg_class_relname_nsp_index\"", 400));
+
+ const api = makeApi();
+ await expect(api.ensureSessionsTable("sessions")).resolves.toBeUndefined();
+
+ mockFetch.mockReset();
+ await api.ensureSessionsTable("sessions");
+
+ expect(mockFetch).not.toHaveBeenCalled();
});
});
diff --git a/claude-code/tests/deeplake-fs.test.ts b/claude-code/tests/deeplake-fs.test.ts
index 4cbf03b..455b86a 100644
--- a/claude-code/tests/deeplake-fs.test.ts
+++ b/claude-code/tests/deeplake-fs.test.ts
@@ -586,6 +586,63 @@ describe("prefetch", () => {
expect(client.query).not.toHaveBeenCalled();
});
+
+ it("prefetches session-backed files in batches instead of one query per path", async () => {
+ const sessionMessages = new Map([
+ ["/sessions/alice/a.json", [
+ { message: "{\"type\":\"user_message\",\"content\":\"hello\"}", creation_date: "2026-01-01T00:00:00.000Z" },
+ { message: "{\"type\":\"assistant_message\",\"content\":\"hi\"}", creation_date: "2026-01-01T00:00:01.000Z" },
+ ]],
+ ["/sessions/alice/b.json", [
+ { message: "{\"type\":\"user_message\",\"content\":\"bye\"}", creation_date: "2026-01-01T00:00:02.000Z" },
+ ]],
+ ]);
+
+ const client = {
+ ensureTable: vi.fn().mockResolvedValue(undefined),
+ query: vi.fn(async (sql: string) => {
+ if (sql.includes("SELECT path, size_bytes, mime_type")) return [];
+ if (sql.includes("SELECT path, SUM(size_bytes) as total_size")) {
+ return [...sessionMessages.entries()].map(([path, rows]) => ({
+ path,
+ total_size: rows.reduce((sum, row) => sum + Buffer.byteLength(row.message, "utf-8"), 0),
+ }));
+ }
+ if (sql.includes("SELECT path, message, creation_date")) {
+ const inMatch = sql.match(/IN \(([^)]+)\)/);
+ const paths = inMatch
+ ? inMatch[1].split(",").map((value) => value.trim().replace(/^'|'$/g, ""))
+ : [];
+ return paths.flatMap((path) =>
+ (sessionMessages.get(path) ?? []).map((row) => ({
+ path,
+ message: row.message,
+ creation_date: row.creation_date,
+ })),
+ );
+ }
+ if (sql.includes("SELECT message FROM")) return [];
+ return [];
+ }),
+ };
+
+ const fs = await DeeplakeFs.create(client as never, "memory", "/", "sessions");
+ client.query.mockClear();
+
+ await fs.prefetch(["/sessions/alice/a.json", "/sessions/alice/b.json"]);
+
+ const prefetchCalls = (client.query.mock.calls as [string][]).filter(
+ ([sql]) => sql.includes("SELECT path, message, creation_date") && sql.includes("IN ("),
+ );
+ expect(prefetchCalls).toHaveLength(1);
+ expect(prefetchCalls[0][0]).toContain("/sessions/alice/a.json");
+ expect(prefetchCalls[0][0]).toContain("/sessions/alice/b.json");
+
+ client.query.mockClear();
+ expect(await fs.readFile("/sessions/alice/a.json")).toBe("[user] hello\n[assistant] hi");
+ expect(await fs.readFile("/sessions/alice/b.json")).toBe("[user] bye");
+ expect(client.query).not.toHaveBeenCalled();
+ });
});
// ── Upsert: id stability & dates ─────────────────────────────────────────────
diff --git a/claude-code/tests/grep-core.test.ts b/claude-code/tests/grep-core.test.ts
index 4a3a860..2a9a409 100644
--- a/claude-code/tests/grep-core.test.ts
+++ b/claude-code/tests/grep-core.test.ts
@@ -1,8 +1,12 @@
import { describe, it, expect, vi } from "vitest";
import {
+ buildGrepSearchOptions,
normalizeContent,
buildPathFilter,
+ buildPathFilterForTargets,
compileGrepRegex,
+ extractRegexAlternationPrefilters,
+ extractRegexLiteralPrefilter,
refineGrepMatches,
searchDeeplakeTables,
grepBothTables,
@@ -32,31 +36,31 @@ describe("normalizeContent: passthrough for non-session paths", () => {
});
});
-describe("normalizeContent: LoCoMo benchmark shape", () => {
+describe("normalizeContent: turn-array session shape", () => {
const raw = JSON.stringify({
date_time: "1:56 pm on 8 May, 2023",
- speakers: { speaker_a: "Caroline", speaker_b: "Melanie" },
+ speakers: { speaker_a: "Avery", speaker_b: "Jordan" },
turns: [
- { dia_id: "D1:1", speaker: "Caroline", text: "Hey Mel!" },
- { dia_id: "D1:2", speaker: "Melanie", text: "Hi Caroline." },
+ { dia_id: "D1:1", speaker: "Avery", text: "Hey Jordan!" },
+ { dia_id: "D1:2", speaker: "Jordan", text: "Hi Avery." },
],
});
it("emits date and speakers header", () => {
- const out = normalizeContent("/sessions/conv_0_session_1.json", raw);
+ const out = normalizeContent("/sessions/alice/chat_1.json", raw);
expect(out).toContain("date: 1:56 pm on 8 May, 2023");
- expect(out).toContain("speakers: Caroline, Melanie");
+ expect(out).toContain("speakers: Avery, Jordan");
});
it("emits one line per turn with dia_id tag", () => {
- const out = normalizeContent("/sessions/conv_0_session_1.json", raw);
- expect(out).toContain("[D1:1] Caroline: Hey Mel!");
- expect(out).toContain("[D1:2] Melanie: Hi Caroline.");
+ const out = normalizeContent("/sessions/alice/chat_1.json", raw);
+ expect(out).toContain("[D1:1] Avery: Hey Jordan!");
+ expect(out).toContain("[D1:2] Jordan: Hi Avery.");
});
it("falls back gracefully on turns without speaker/text", () => {
const weird = JSON.stringify({ turns: [{}, { speaker: "X" }] });
- const out = normalizeContent("/sessions/conv_0_session_1.json", weird);
+ const out = normalizeContent("/sessions/alice/chat_1.json", weird);
// Must not crash; includes placeholder `?` for missing speaker
expect(out).toContain("?: ");
expect(out).toContain("X: ");
@@ -67,7 +71,7 @@ describe("normalizeContent: LoCoMo benchmark shape", () => {
turns: [{ speaker: "A", text: "hi" }],
speakers: { speaker_a: "", speaker_b: "" },
});
- const out = normalizeContent("/sessions/conv_0_session_1.json", raw);
+ const out = normalizeContent("/sessions/alice/chat_1.json", raw);
expect(out).not.toContain("speakers:");
expect(out).toContain("A: hi");
});
@@ -77,32 +81,32 @@ describe("normalizeContent: LoCoMo benchmark shape", () => {
turns: [{ speaker: "A", text: "hi" }],
speakers: { speaker_a: "Alice" },
});
- const out = normalizeContent("/sessions/conv_0_session_1.json", raw);
+ const out = normalizeContent("/sessions/alice/chat_1.json", raw);
expect(out).toContain("speakers: Alice");
});
it("falls back speaker->name when speaker field is absent on a turn", () => {
- const raw = JSON.stringify({ turns: [{ name: "Caroline", text: "hi" }] });
- const out = normalizeContent("/sessions/conv_0_session_1.json", raw);
- expect(out).toContain("Caroline: hi");
+ const raw = JSON.stringify({ turns: [{ name: "Avery", text: "hi" }] });
+ const out = normalizeContent("/sessions/alice/chat_1.json", raw);
+ expect(out).toContain("Avery: hi");
});
it("falls back text->content when text field is absent on a turn", () => {
const raw = JSON.stringify({ turns: [{ speaker: "X", content: "fallback" }] });
- const out = normalizeContent("/sessions/conv_0_session_1.json", raw);
+ const out = normalizeContent("/sessions/alice/chat_1.json", raw);
expect(out).toContain("X: fallback");
});
it("omits dia_id prefix when the turn has no dia_id", () => {
const raw = JSON.stringify({ turns: [{ speaker: "A", text: "hi" }] });
- const out = normalizeContent("/sessions/conv_0_session_1.json", raw);
+ const out = normalizeContent("/sessions/alice/chat_1.json", raw);
expect(out).toContain("A: hi");
expect(out).not.toMatch(/\[\]/);
});
it("emits turns without date/speakers when both are missing", () => {
const raw = JSON.stringify({ turns: [{ speaker: "A", text: "hi" }] });
- const out = normalizeContent("/sessions/conv_0_session_1.json", raw);
+ const out = normalizeContent("/sessions/alice/chat_1.json", raw);
expect(out).not.toContain("date:");
expect(out).not.toContain("speakers:");
expect(out).toContain("A: hi");
@@ -111,7 +115,7 @@ describe("normalizeContent: LoCoMo benchmark shape", () => {
it("returns raw when turns produce an empty serialization", () => {
const empty = JSON.stringify({ turns: [] });
// No header, no turns → trimmed output is empty → fallback to raw
- const out = normalizeContent("/sessions/conv_0_session_1.json", empty);
+ const out = normalizeContent("/sessions/alice/chat_1.json", empty);
expect(out).toBe(empty);
});
});
@@ -428,15 +432,44 @@ describe("buildPathFilter", () => {
expect(buildPathFilter("")).toBe("");
});
it("emits equality + prefix match for subpaths", () => {
- const f = buildPathFilter("/summaries/locomo");
- expect(f).toContain("path = '/summaries/locomo'");
- expect(f).toContain("path LIKE '/summaries/locomo/%'");
+ const f = buildPathFilter("/summaries/projects");
+ expect(f).toContain("path = '/summaries/projects'");
+ expect(f).toContain("path LIKE '/summaries/projects/%'");
});
it("strips trailing slashes", () => {
const f = buildPathFilter("/sessions///");
expect(f).toContain("path = '/sessions'");
expect(f).toContain("path LIKE '/sessions/%'");
});
+ it("uses exact matching for likely file targets", () => {
+ expect(buildPathFilter("/summaries/alice/s1.md")).toBe(
+ " AND path = '/summaries/alice/s1.md'",
+ );
+ });
+ it("uses LIKE matching for glob targets instead of exact file matching", () => {
+ expect(buildPathFilter("/summaries/projects/*.md")).toBe(
+ " AND path LIKE '/summaries/projects/%.md'",
+ );
+ const filter = buildPathFilter("/sessions/alice/chat_?.json");
+ expect(filter).toMatch(/^ AND path LIKE '\/sessions\/alice\/chat.*\.json'$/);
+ });
+});
+
+describe("buildPathFilterForTargets", () => {
+ it("returns empty string when any target is root", () => {
+ expect(buildPathFilterForTargets(["/summaries", "/"])).toBe("");
+ });
+
+ it("joins multiple target filters into one OR clause", () => {
+ const filter = buildPathFilterForTargets([
+ "/summaries/alice",
+ "/sessions/bob/chat.jsonl",
+ ]);
+ expect(filter).toContain("path = '/summaries/alice'");
+ expect(filter).toContain("path LIKE '/summaries/alice/%'");
+ expect(filter).toContain("path = '/sessions/bob/chat.jsonl'");
+ expect(filter).toContain(" OR ");
+ });
});
// ── compileGrepRegex ────────────────────────────────────────────────────────
@@ -569,15 +602,14 @@ describe("refineGrepMatches", () => {
// ── searchDeeplakeTables ─────────────────────────────────────────────────────
describe("searchDeeplakeTables", () => {
- function mockApi(memRows: unknown[], sessRows: unknown[]) {
+ function mockApi(rows: unknown[]) {
const query = vi.fn()
- .mockImplementationOnce(async () => memRows)
- .mockImplementationOnce(async () => sessRows);
+ .mockImplementationOnce(async () => rows);
return { query } as any;
}
- it("issues one LIKE query per table with the escaped pattern and path filter", async () => {
- const api = mockApi([], []);
+ it("issues one UNION ALL query with the escaped pattern and path filter", async () => {
+ const api = mockApi([]);
await searchDeeplakeTables(api, "memory", "sessions", {
pathFilter: " AND (path = '/x' OR path LIKE '/x/%')",
contentScanOnly: false,
@@ -585,33 +617,64 @@ describe("searchDeeplakeTables", () => {
escapedPattern: "foo",
limit: 50,
});
- expect(api.query).toHaveBeenCalledTimes(2);
- const [memCall, sessCall] = api.query.mock.calls.map((c: unknown[]) => c[0] as string);
- expect(memCall).toContain('FROM "memory"');
- expect(memCall).toContain("summary::text ILIKE '%foo%'");
- expect(memCall).toContain("LIMIT 50");
- expect(sessCall).toContain('FROM "sessions"');
- expect(sessCall).toContain("message::text ILIKE '%foo%'");
+ expect(api.query).toHaveBeenCalledTimes(1);
+ const sql = api.query.mock.calls[0][0] as string;
+ expect(sql).toContain('FROM "memory"');
+ expect(sql).toContain('FROM "sessions"');
+ expect(sql).toContain("summary::text ILIKE '%foo%'");
+ expect(sql).toContain("message::text ILIKE '%foo%'");
+ expect(sql).toContain("LIMIT 50");
+ expect(sql).toContain("UNION ALL");
});
it("skips LIKE filter when contentScanOnly is true (regex-in-memory mode)", async () => {
- const api = mockApi([], []);
+ const api = mockApi([]);
await searchDeeplakeTables(api, "m", "s", {
pathFilter: "",
contentScanOnly: true,
likeOp: "LIKE",
escapedPattern: "anything",
});
- const [memCall, sessCall] = api.query.mock.calls.map((c: unknown[]) => c[0] as string);
- expect(memCall).not.toContain("LIKE");
- expect(sessCall).not.toContain("LIKE");
+ const sql = api.query.mock.calls[0][0] as string;
+ expect(sql).not.toContain("summary::text LIKE");
+ expect(sql).not.toContain("message::text LIKE");
+ });
+
+ it("uses a safe literal prefilter for regex scans when available", async () => {
+ const api = mockApi([]);
+ await searchDeeplakeTables(api, "m", "s", {
+ pathFilter: "",
+ contentScanOnly: true,
+ likeOp: "LIKE",
+ escapedPattern: "foo.*bar",
+ prefilterPattern: "foo",
+ });
+ const sql = api.query.mock.calls[0][0] as string;
+ expect(sql).toContain("summary::text LIKE '%foo%'");
+ expect(sql).toContain("message::text LIKE '%foo%'");
+ });
+
+ it("expands alternation prefilters into OR clauses instead of literal pipes", async () => {
+ const api = mockApi([]);
+ await searchDeeplakeTables(api, "m", "s", {
+ pathFilter: "",
+ contentScanOnly: true,
+ likeOp: "LIKE",
+ escapedPattern: "relationship|partner|married",
+ prefilterPatterns: ["relationship", "partner", "married"],
+ });
+ const sql = api.query.mock.calls[0][0] as string;
+ expect(sql).toContain("summary::text LIKE '%relationship%'");
+ expect(sql).toContain("summary::text LIKE '%partner%'");
+ expect(sql).toContain("summary::text LIKE '%married%'");
+ expect(sql).not.toContain("relationship|partner|married");
});
it("concatenates rows from both tables into {path, content}", async () => {
- const api = mockApi(
- [{ path: "/summaries/a", content: "aaa" }],
- [{ path: "/sessions/b", content: "bbb" }],
- );
+ const api = mockApi([
+ { path: "/summaries/a", content: "aaa" },
+ { path: "/sessions/b", content: "bbb" },
+ ]);
const rows = await searchDeeplakeTables(api, "m", "s", {
pathFilter: "", contentScanOnly: false, likeOp: "LIKE", escapedPattern: "x",
});
@@ -622,7 +685,7 @@ describe("searchDeeplakeTables", () => {
});
it("tolerates null content on memory row (coerces to empty string)", async () => {
- const api = mockApi([{ path: "/a", content: null }], []);
+ const api = mockApi([{ path: "/a", content: null }]);
const rows = await searchDeeplakeTables(api, "m", "s", {
pathFilter: "", contentScanOnly: false, likeOp: "LIKE", escapedPattern: "x",
});
@@ -630,35 +693,22 @@ describe("searchDeeplakeTables", () => {
});
it("tolerates null content on sessions row too", async () => {
- const api = mockApi([], [{ path: "/b", content: null }]);
+ const api = mockApi([{ path: "/b", content: null }]);
const rows = await searchDeeplakeTables(api, "m", "s", {
pathFilter: "", contentScanOnly: false, likeOp: "LIKE", escapedPattern: "x",
});
expect(rows[0]).toEqual({ path: "/b", content: "" });
});
- it("returns partial results when the sessions query fails", async () => {
+ it("keeps grep on a single SQL query when the union query fails", async () => {
const api = {
query: vi.fn()
- .mockImplementationOnce(async () => [{ path: "/a", content: "ok" }])
- .mockImplementationOnce(async () => { throw new Error("boom"); }),
+ .mockRejectedValueOnce(new Error("bad union"))
} as any;
- const rows = await searchDeeplakeTables(api, "m", "s", {
+ await expect(searchDeeplakeTables(api, "m", "s", {
pathFilter: "", contentScanOnly: false, likeOp: "LIKE", escapedPattern: "x",
- });
- expect(rows).toEqual([{ path: "/a", content: "ok" }]);
- });
-
- it("returns partial results when the memory query fails", async () => {
- const api = {
- query: vi.fn()
- .mockImplementationOnce(async () => { throw new Error("boom"); })
- .mockImplementationOnce(async () => [{ path: "/b", content: "ok" }]),
- } as any;
- const rows = await searchDeeplakeTables(api, "m", "s", {
- pathFilter: "", contentScanOnly: false, likeOp: "LIKE", escapedPattern: "x",
- });
- expect(rows).toEqual([{ path: "/b", content: "ok" }]);
+ })).rejects.toThrow("bad union");
+ expect(api.query).toHaveBeenCalledTimes(1);
});
it("defaults limit to 100 when omitted", async () => {
@@ -677,8 +727,7 @@ describe("grepBothTables", () => {
function mockApi(rows: unknown[]) {
return {
query: vi.fn()
- .mockResolvedValueOnce(rows) // memory
- .mockResolvedValueOnce([]), // sessions (empty in these tests)
+ .mockResolvedValueOnce(rows),
} as any;
}
@@ -698,44 +747,151 @@ describe("grepBothTables", () => {
it("deduplicates rows by path when memory and sessions return the same path", async () => {
const api = {
query: vi.fn()
- .mockResolvedValueOnce([{ path: "/shared", content: "foo" }])
- .mockResolvedValueOnce([{ path: "/shared", content: "foo" }]),
+ .mockResolvedValueOnce([{ path: "/shared", content: "foo" }, { path: "/shared", content: "foo" }]),
} as any;
const out = await grepBothTables(api, "m", "s", baseParams, "/");
// only one line for the shared path
expect(out.length).toBe(1);
});
- it("normalizes session JSON before refinement (LoCoMo turns)", async () => {
+ it("normalizes session JSON before refinement (turn-array sessions)", async () => {
const sessionContent = JSON.stringify({
turns: [
- { dia_id: "D1:1", speaker: "Alice", text: "greeting foo here" },
+ { dia_id: "D1:1", speaker: "Alice", text: "project foo update" },
{ dia_id: "D1:2", speaker: "Bob", text: "unrelated" },
],
});
const api = {
query: vi.fn()
- .mockResolvedValueOnce([])
- .mockResolvedValueOnce([{ path: "/sessions/conv_0_session_1.json", content: sessionContent }]),
+ .mockResolvedValueOnce([{ path: "/sessions/alice/chat_1.json", content: sessionContent }]),
} as any;
const out = await grepBothTables(api, "m", "s", baseParams, "/");
// Only the matching turn is returned, not the whole JSON blob
- expect(out.some(l => l.includes("[D1:1] Alice: greeting foo here"))).toBe(true);
+ expect(out.some(l => l.includes("[D1:1] Alice: project foo update"))).toBe(true);
expect(out.some(l => l.includes("unrelated"))).toBe(false);
});
it("uses contentScanOnly when pattern has regex metacharacters", async () => {
const api = mockApi([{ path: "/a", content: "this is a test" }]);
await grepBothTables(api, "m", "s", { ...baseParams, pattern: "t.*t" }, "/");
- const [memSql] = api.query.mock.calls.map((c: unknown[]) => c[0] as string);
- expect(memSql).not.toContain("ILIKE");
- expect(memSql).not.toContain("summary::text LIKE");
+ const [sql] = api.query.mock.calls.map((c: unknown[]) => c[0] as string);
+ expect(sql).not.toContain("summary::text LIKE");
+ expect(sql).not.toContain("message::text LIKE");
+ });
+
+ it("adds a safe literal prefilter for wildcard regexes with stable anchors", async () => {
+ const api = mockApi([{ path: "/a", content: "foo middle bar" }]);
+ await grepBothTables(api, "m", "s", { ...baseParams, pattern: "foo.*bar" }, "/");
+ const [sql] = api.query.mock.calls.map((c: unknown[]) => c[0] as string);
+ expect(sql).toContain("summary::text LIKE '%foo%'");
});
it("routes to ILIKE when ignoreCase is set", async () => {
const api = mockApi([]);
await grepBothTables(api, "m", "s", { ...baseParams, ignoreCase: true }, "/");
- const [memSql] = api.query.mock.calls.map((c: unknown[]) => c[0] as string);
- expect(memSql).toContain("ILIKE");
+ const [sql] = api.query.mock.calls.map((c: unknown[]) => c[0] as string);
+ expect(sql).toContain("ILIKE");
+ });
+
+ it("uses a single union query even for scoped target paths", async () => {
+ const api = mockApi([{ path: "/summaries/a.md", content: "foo line" }]);
+ await grepBothTables(api, "memory", "sessions", baseParams, "/summaries");
+ expect(api.query).toHaveBeenCalledTimes(1);
+ const sql = api.query.mock.calls[0][0] as string;
+ expect(sql).toContain('FROM "memory"');
+ expect(sql).toContain('FROM "sessions"');
+ expect(sql).toContain("UNION ALL");
+ });
+});
+
+describe("regex literal prefilter", () => {
+ it("returns null for an empty pattern", () => {
+ expect(extractRegexLiteralPrefilter("")).toBeNull();
+ });
+
+ it("extracts a literal from simple wildcard regexes", () => {
+ expect(extractRegexLiteralPrefilter("foo.*bar")).toBe("foo");
+ expect(extractRegexLiteralPrefilter("prefix.*suffix")).toBe("prefix");
+ expect(extractRegexLiteralPrefilter("x.*suffix")).toBe("suffix");
+ });
+
+ it("returns null for complex regex features", () => {
+ expect(extractRegexLiteralPrefilter("colou?r")).toBeNull();
+ expect(extractRegexLiteralPrefilter("foo|bar")).toBeNull();
+ expect(extractRegexLiteralPrefilter("[ab]foo")).toBeNull();
+ });
+
+ it("handles escaped literals and rejects dangling escapes or bare dots", () => {
+ expect(extractRegexLiteralPrefilter("foo\\.bar")).toBe("foo.bar");
+ expect(extractRegexLiteralPrefilter("\\d+foo")).toBeNull();
+ expect(extractRegexLiteralPrefilter("foo\\")).toBeNull();
+ expect(extractRegexLiteralPrefilter("foo.bar")).toBeNull();
+ });
+
+ it("builds grep search options with regex prefilter when safe", () => {
+ const opts = buildGrepSearchOptions({
+ pattern: "foo.*bar",
+ ignoreCase: true,
+ wordMatch: false,
+ filesOnly: false,
+ countOnly: false,
+ lineNumber: false,
+ invertMatch: false,
+ fixedString: false,
+ }, "/summaries");
+
+ expect(opts.contentScanOnly).toBe(true);
+ expect(opts.likeOp).toBe("ILIKE");
+ expect(opts.prefilterPattern).toBe("foo");
+ expect(opts.pathFilter).toContain("/summaries");
+ });
+
+ it("extracts safe alternation anchors and carries them into grep search options", () => {
+ expect(extractRegexAlternationPrefilters("relationship|partner|married")).toEqual([
+ "relationship",
+ "partner",
+ "married",
+ ]);
+
+ const opts = buildGrepSearchOptions({
+ pattern: "relationship|partner|married",
+ ignoreCase: false,
+ wordMatch: false,
+ filesOnly: false,
+ countOnly: false,
+ lineNumber: false,
+ invertMatch: false,
+ fixedString: false,
+ }, "/summaries");
+
+ expect(opts.contentScanOnly).toBe(true);
+ expect(opts.prefilterPatterns).toEqual(["relationship", "partner", "married"]);
+ });
+
+ it("rejects alternation prefilters when grouping makes them unsafe", () => {
+ expect(extractRegexAlternationPrefilters("(foo|bar)")).toBeNull();
+ expect(extractRegexAlternationPrefilters("foo|bar.*baz")).toEqual(["foo", "bar"]);
+ });
+
+ it("preserves escaped alternation characters inside a literal branch", () => {
+ expect(extractRegexAlternationPrefilters("foo\\|bar|baz")).toEqual(["foo|bar", "baz"]);
+ expect(extractRegexAlternationPrefilters("foo|bar\\.md")).toEqual(["foo", "bar.md"]);
+ });
+
+ it("keeps fixed-string searches on the SQL-filtered path even with regex metacharacters", () => {
+ const opts = buildGrepSearchOptions({
+ pattern: "foo.*bar",
+ ignoreCase: false,
+ wordMatch: false,
+ filesOnly: false,
+ countOnly: false,
+ lineNumber: false,
+ invertMatch: false,
+ fixedString: true,
+ }, "/summaries/alice/s1.md");
+
+ expect(opts.contentScanOnly).toBe(false);
+ expect(opts.prefilterPattern).toBeUndefined();
+ expect(opts.pathFilter).toBe(" AND path = '/summaries/alice/s1.md'");
});
});
diff --git a/claude-code/tests/grep-direct.test.ts b/claude-code/tests/grep-direct.test.ts
index df74a0d..0f56c9a 100644
--- a/claude-code/tests/grep-direct.test.ts
+++ b/claude-code/tests/grep-direct.test.ts
@@ -8,16 +8,14 @@ describe("handleGrepDirect", () => {
lineNumber: false, invertMatch: false, fixedString: false,
};
- function mockApi(mem: unknown[], sess: unknown[]) {
+ function mockApi(rows: unknown[]) {
return {
- query: vi.fn()
- .mockImplementationOnce(async () => mem)
- .mockImplementationOnce(async () => sess),
+ query: vi.fn().mockImplementationOnce(async () => rows),
} as any;
}
it("returns null when pattern is empty", async () => {
- const api = mockApi([], []);
+ const api = mockApi([]);
const r = await handleGrepDirect(api, "memory", "sessions", { ...baseParams, pattern: "" });
expect(r).toBeNull();
expect(api.query).not.toHaveBeenCalled();
@@ -26,30 +24,29 @@ describe("handleGrepDirect", () => {
it("delegates to grepBothTables and joins the match lines", async () => {
const api = mockApi(
[{ path: "/summaries/a.md", content: "foo line here\nbar line" }],
- [],
);
const r = await handleGrepDirect(api, "memory", "sessions", baseParams);
expect(r).toBe("foo line here");
});
it("emits '(no matches)' when both tables return nothing", async () => {
- const api = mockApi([], []);
+ const api = mockApi([]);
const r = await handleGrepDirect(api, "memory", "sessions", baseParams);
expect(r).toBe("(no matches)");
});
it("merges results from both memory and sessions", async () => {
- const api = mockApi(
- [{ path: "/summaries/a.md", content: "foo in summary" }],
- [{ path: "/sessions/b.jsonl", content: "foo in session" }],
- );
+ const api = mockApi([
+ { path: "/summaries/a.md", content: "foo in summary" },
+ { path: "/sessions/b.jsonl", content: "foo in session" },
+ ]);
const r = await handleGrepDirect(api, "memory", "sessions", baseParams);
expect(r).toContain("/summaries/a.md:foo in summary");
expect(r).toContain("/sessions/b.jsonl:foo in session");
});
it("applies ignoreCase flag at SQL level (ILIKE)", async () => {
- const api = mockApi([{ path: "/a", content: "Foo" }], []);
+ const api = mockApi([{ path: "/a", content: "Foo" }]);
await handleGrepDirect(api, "memory", "sessions", { ...baseParams, ignoreCase: true });
const sql = api.query.mock.calls[0][0] as string;
expect(sql).toContain("ILIKE");
@@ -93,6 +90,13 @@ describe("parseBashGrep: long options", () => {
expect(r).not.toBeNull();
expect(r!.pattern).toBe("foo");
});
+
+ it("accepts grep no-op long options that take inline numeric values", () => {
+ const r = parseBashGrep("grep --after-context=2 --before-context=3 --context=4 --max-count=1 foo /x");
+ expect(r).not.toBeNull();
+ expect(r!.pattern).toBe("foo");
+ expect(r!.targetPath).toBe("/x");
+ });
});
@@ -140,6 +144,10 @@ describe("parseBashGrep", () => {
expect(parseBashGrep("grep -r")).toBeNull();
});
+ it("returns null for unterminated quoted commands", () => {
+ expect(parseBashGrep('grep "unterminated /dir')).toBeNull();
+ });
+
// ── Flag parsing ──
it("parses -i flag", () => {
@@ -227,4 +235,100 @@ describe("parseBashGrep", () => {
expect(r!.pattern).toBe("pattern");
expect(r!.targetPath).toBe("/dir");
});
+
+ it("does not split on alternation pipes inside quotes", () => {
+ const r = parseBashGrep("grep 'book|read' /dir | head -5");
+ expect(r).not.toBeNull();
+ expect(r!.pattern).toBe("book|read");
+ expect(r!.targetPath).toBe("/dir");
+ });
+
+ it("keeps escaped spaces inside unquoted patterns", () => {
+ const r = parseBashGrep("grep Melanie\\ sunrise /dir");
+ expect(r).not.toBeNull();
+ expect(r!.pattern).toBe("Melanie sunrise");
+ expect(r!.targetPath).toBe("/dir");
+ });
+
+ it("consumes -A numeric values without treating them as paths", () => {
+ const r = parseBashGrep("grep -A 5 'Caroline' /summaries/");
+ expect(r).not.toBeNull();
+ expect(r!.pattern).toBe("Caroline");
+ expect(r!.targetPath).toBe("/summaries/");
+ });
+
+ it("consumes attached -B numeric values without shifting the target path", () => {
+ const r = parseBashGrep("grep -B5 'friends' /sessions/");
+ expect(r).not.toBeNull();
+ expect(r!.pattern).toBe("friends");
+ expect(r!.targetPath).toBe("/sessions/");
+ });
+
+ it("consumes -m values without shifting the target path", () => {
+ const r = parseBashGrep("grep -m 1 'single' /dir");
+ expect(r).not.toBeNull();
+ expect(r!.pattern).toBe("single");
+ expect(r!.targetPath).toBe("/dir");
+ });
+
+ it("uses -e as the explicit pattern source", () => {
+ const r = parseBashGrep("grep -e 'book|read' /dir");
+ expect(r).not.toBeNull();
+ expect(r!.pattern).toBe("book|read");
+ expect(r!.targetPath).toBe("/dir");
+ });
+
+ it("uses inline -e values as the explicit pattern source", () => {
+ const r = parseBashGrep("grep -ebook /dir");
+ expect(r).not.toBeNull();
+ expect(r!.pattern).toBe("book");
+ expect(r!.targetPath).toBe("/dir");
+ });
+
+ it("uses --regexp= as the explicit pattern source", () => {
+ const r = parseBashGrep("grep --regexp=book\\|read /dir");
+ expect(r).not.toBeNull();
+ expect(r!.pattern).toBe("book|read");
+ expect(r!.targetPath).toBe("/dir");
+ });
+
+ it("defaults explicit -e searches to / when no target path is given", () => {
+ const r = parseBashGrep("grep -e 'book|read'");
+ expect(r).not.toBeNull();
+ expect(r!.pattern).toBe("book|read");
+ expect(r!.targetPath).toBe("/");
+ });
+
+ it("returns null when a value-taking long option is missing its value", () => {
+ expect(parseBashGrep("grep --after-context")).toBeNull();
+ });
+
+ it("returns null when -A is missing its value", () => {
+ expect(parseBashGrep("grep -A")).toBeNull();
+ });
+
+ it("returns null when -e is missing its value", () => {
+ expect(parseBashGrep("grep -e")).toBeNull();
+ });
+
+ it("tolerates unknown short flags without crashing", () => {
+ const r = parseBashGrep("grep -Z foo /dir");
+ expect(r).not.toBeNull();
+ expect(r!.pattern).toBe("foo");
+ expect(r!.targetPath).toBe("/dir");
+ });
+
+ it("preserves escaped pipes outside quotes as part of the pattern", () => {
+ const r = parseBashGrep("grep foo\\|bar /dir | head -5");
+ expect(r).not.toBeNull();
+ expect(r!.pattern).toBe("foo|bar");
+ expect(r!.targetPath).toBe("/dir");
+ });
+
+ it("preserves escaped quotes inside double-quoted patterns", () => {
+ const r = parseBashGrep('grep "foo\\"bar" /dir');
+ expect(r).not.toBeNull();
+ expect(r!.pattern).toBe('foo"bar');
+ expect(r!.targetPath).toBe("/dir");
+ });
});
diff --git a/claude-code/tests/grep-interceptor.test.ts b/claude-code/tests/grep-interceptor.test.ts
index a2584ce..ba7e67b 100644
--- a/claude-code/tests/grep-interceptor.test.ts
+++ b/claude-code/tests/grep-interceptor.test.ts
@@ -1,6 +1,7 @@
import { describe, it, expect, vi } from "vitest";
import { createGrepCommand } from "../../src/shell/grep-interceptor.js";
import { DeeplakeFs } from "../../src/shell/deeplake-fs.js";
+import * as grepCore from "../../src/shell/grep-core.js";
// ── Minimal mocks ─────────────────────────────────────────────────────────────
function makeClient(queryResults: Record[] = []) {
@@ -30,6 +31,31 @@ function makeCtx(fs: DeeplakeFs, cwd = "/memory") {
// cache. Tests below assert that new contract.
describe("grep interceptor", () => {
+ it("returns exitCode=1 when the pattern is missing", async () => {
+ const client = makeClient();
+ const fs = await DeeplakeFs.create(client as never, "test", "/memory");
+ client.query.mockClear();
+ const cmd = createGrepCommand(client as never, fs, "test");
+ const result = await cmd.execute([], makeCtx(fs) as never);
+ expect(result).toEqual({
+ stdout: "",
+ stderr: "grep: missing pattern\n",
+ exitCode: 1,
+ });
+ expect(client.query).not.toHaveBeenCalled();
+ });
+
+ it("returns exitCode=1 when all target paths resolve to nothing", async () => {
+ const client = makeClient();
+ const fs = await DeeplakeFs.create(client as never, "test", "/memory");
+ vi.spyOn(fs, "resolvePath").mockReturnValue("");
+ client.query.mockClear();
+ const cmd = createGrepCommand(client as never, fs, "test");
+ const result = await cmd.execute(["foo", "missing"], makeCtx(fs) as never);
+ expect(result).toEqual({ stdout: "", stderr: "", exitCode: 1 });
+ expect(client.query).not.toHaveBeenCalled();
+ });
+
it("returns exitCode=127 for paths outside mount (pass-through)", async () => {
const client = makeClient();
const fs = await DeeplakeFs.create(client as never, "test", "/memory");
@@ -44,13 +70,11 @@ describe("grep interceptor", () => {
const client = makeClient([{ path: "/memory/a.txt", content: "hello world" }]);
const fs = await DeeplakeFs.create(client as never, "test", "/memory");
client.query.mockClear();
- // Both mem and sess queries should run; return matching content for both.
client.query.mockResolvedValue([{ path: "/memory/a.txt", content: "hello world" }]);
const cmd = createGrepCommand(client as never, fs, "test", "sessions");
const result = await cmd.execute(["hello", "/memory"], makeCtx(fs) as never);
- // At least one call for memory + one for sessions
const sqls = client.query.mock.calls.map((c: unknown[]) => c[0] as string);
expect(sqls.some(s => /FROM "test"/.test(s) && /ILIKE|LIKE/.test(s))).toBe(true);
expect(sqls.some(s => /FROM "sessions"/.test(s) && /ILIKE|LIKE/.test(s))).toBe(true);
@@ -60,6 +84,24 @@ describe("grep interceptor", () => {
expect(result.exitCode).toBe(0);
});
+ it("uses one SQL query even when grep receives multiple target paths", async () => {
+ const client = makeClient([{ path: "/memory/a.txt", content: "hello world" }]);
+ const fs = await DeeplakeFs.create(client as never, "test", "/memory");
+ client.query.mockClear();
+ client.query.mockResolvedValue([{ path: "/memory/a.txt", content: "hello world" }]);
+
+ const cmd = createGrepCommand(client as never, fs, "test", "sessions");
+ const result = await cmd.execute(["hello", "/memory/a", "/memory/b"], makeCtx(fs) as never);
+
+ expect(client.query).toHaveBeenCalledTimes(1);
+ const sql = client.query.mock.calls[0][0] as string;
+ expect(sql).toContain('FROM "test"');
+ expect(sql).toContain('FROM "sessions"');
+ expect(sql).toContain("path = '/memory/a'");
+ expect(sql).toContain("path = '/memory/b'");
+ expect(result.exitCode).toBe(0);
+ });
+
it("falls back to in-memory scan when SQL returns nothing", async () => {
const client = makeClient([]);
const fs = await DeeplakeFs.create(client as never, "test", "/memory");
@@ -162,4 +204,17 @@ describe("grep interceptor", () => {
expect.arrayContaining(["/memory/a.txt", "/memory/b.txt"])
);
});
+
+ it("falls back to the FS cache when the SQL search rejects", async () => {
+ const client = makeClient();
+ const fs = await DeeplakeFs.create(client as never, "test", "/memory");
+ await fs.writeFile("/memory/a.txt", "hello world");
+ vi.spyOn(grepCore, "searchDeeplakeTables").mockRejectedValueOnce(new Error("timeout"));
+
+ const cmd = createGrepCommand(client as never, fs, "test");
+ const result = await cmd.execute(["hello", "/memory"], makeCtx(fs) as never);
+
+ expect(result.exitCode).toBe(0);
+ expect(result.stdout).toContain("hello world");
+ });
});
diff --git a/claude-code/tests/hooks-source.test.ts b/claude-code/tests/hooks-source.test.ts
new file mode 100644
index 0000000..10c4595
--- /dev/null
+++ b/claude-code/tests/hooks-source.test.ts
@@ -0,0 +1,872 @@
+import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
+import type { Config } from "../../src/config.js";
+import type { Credentials } from "../../src/commands/auth.js";
+import {
+ buildCaptureEntry,
+ maybeTriggerPeriodicSummary,
+ runCaptureHook,
+} from "../../src/hooks/capture.js";
+import {
+ extractGrepParams,
+ getShellCommand,
+ isSafe,
+ processPreToolUse,
+ rewritePaths,
+ touchesMemory,
+} from "../../src/hooks/pre-tool-use.js";
+import {
+ buildSessionStartAdditionalContext,
+ runSessionStartHook,
+} from "../../src/hooks/session-start.js";
+import {
+ createPlaceholder,
+ runSessionStartSetup,
+} from "../../src/hooks/session-start-setup.js";
+import { runSessionEndHook } from "../../src/hooks/session-end.js";
+import { isDirectRun } from "../../src/utils/direct-run.js";
+
+const baseConfig: Config = {
+ token: "token",
+ orgId: "org-1",
+ orgName: "Acme",
+ userName: "alice",
+ workspaceId: "default",
+ apiUrl: "https://api.example.com",
+ tableName: "memory",
+ sessionsTableName: "sessions",
+ memoryPath: "/tmp/.deeplake/memory",
+};
+
+const baseCreds: Credentials = {
+ token: "token",
+ orgId: "org-1",
+ orgName: "Acme",
+ userName: "alice",
+ workspaceId: "default",
+ apiUrl: "https://api.example.com",
+ savedAt: "2026-01-01T00:00:00.000Z",
+};
+
+let originalArgv1: string | undefined;
+
+beforeEach(() => {
+ originalArgv1 = process.argv[1];
+});
+
+afterEach(() => {
+ if (originalArgv1 === undefined) delete process.argv[1];
+ else process.argv[1] = originalArgv1;
+ vi.restoreAllMocks();
+});
+
+describe("direct-run", () => {
+ it("returns true when the current entry matches the module path", () => {
+ process.argv[1] = "/tmp/hook.js";
+ expect(isDirectRun("file:///tmp/hook.js")).toBe(true);
+ });
+
+ it("returns false when the current entry differs", () => {
+ process.argv[1] = "/tmp/other.js";
+ expect(isDirectRun("file:///tmp/hook.js")).toBe(false);
+ });
+
+ it("returns false when there is no entry script", () => {
+ delete process.argv[1];
+ expect(isDirectRun("file:///tmp/hook.js")).toBe(false);
+ });
+
+ it("returns false when the meta url cannot be converted to a file path", () => {
+ process.argv[1] = "/tmp/hook.js";
+ expect(isDirectRun("not-a-valid-file-url")).toBe(false);
+ });
+});
+
+describe("claude capture source", () => {
+ it("builds user, tool, and assistant entries", () => {
+ const user = buildCaptureEntry({
+ session_id: "s1",
+ hook_event_name: "UserPromptSubmit",
+ prompt: "hello",
+ }, "2026-01-01T00:00:00.000Z");
+ const tool = buildCaptureEntry({
+ session_id: "s1",
+ hook_event_name: "PostToolUse",
+ tool_name: "Read",
+ tool_input: { file_path: "/tmp/a.ts" },
+ tool_response: { content: "ok" },
+ tool_use_id: "tu-1",
+ }, "2026-01-01T00:00:01.000Z");
+ const assistant = buildCaptureEntry({
+ session_id: "s1",
+ hook_event_name: "Stop",
+ last_assistant_message: "done",
+ agent_transcript_path: "/tmp/agent.jsonl",
+ }, "2026-01-01T00:00:02.000Z");
+
+ expect(user?.type).toBe("user_message");
+ expect(user?.content).toBe("hello");
+ expect(tool?.type).toBe("tool_call");
+ expect(tool?.tool_name).toBe("Read");
+ expect(JSON.parse(tool?.tool_input as string)).toEqual({ file_path: "/tmp/a.ts" });
+ expect(assistant?.type).toBe("assistant_message");
+ expect(assistant?.agent_transcript_path).toBe("/tmp/agent.jsonl");
+ expect(buildCaptureEntry({ session_id: "s1" }, "2026-01-01T00:00:00.000Z")).toBeNull();
+ });
+
+ it("triggers periodic summaries only when the threshold is met and the lock is acquired", () => {
+ const bump = vi.fn(() => ({ totalCount: 10, lastSummaryCount: 4 }));
+ const load = vi.fn(() => ({ everyNMessages: 5, everyHours: 24 }));
+ const should = vi.fn(() => true);
+ const lock = vi.fn(() => true);
+ const spawn = vi.fn();
+ const wiki = vi.fn();
+
+ maybeTriggerPeriodicSummary("s1", "/repo", baseConfig, {
+ bumpTotalCountFn: bump as any,
+ loadTriggerConfigFn: load as any,
+ shouldTriggerFn: should as any,
+ tryAcquireLockFn: lock as any,
+ spawnWikiWorkerFn: spawn as any,
+ wikiLogFn: wiki as any,
+ bundleDir: "/tmp/bundle",
+ });
+
+ expect(spawn).toHaveBeenCalledWith({
+ config: baseConfig,
+ sessionId: "s1",
+ cwd: "/repo",
+ bundleDir: "/tmp/bundle",
+ reason: "Periodic",
+ });
+ expect(wiki).toHaveBeenCalled();
+ });
+
+ it("suppresses periodic summaries when the lock is held", () => {
+ const spawn = vi.fn();
+ const logFn = vi.fn();
+
+ maybeTriggerPeriodicSummary("s1", "/repo", baseConfig, {
+ bumpTotalCountFn: vi.fn(() => ({ totalCount: 10, lastSummaryCount: 4 })) as any,
+ loadTriggerConfigFn: vi.fn(() => ({ everyNMessages: 5, everyHours: 24 })) as any,
+ shouldTriggerFn: vi.fn(() => true) as any,
+ tryAcquireLockFn: vi.fn(() => false) as any,
+ spawnWikiWorkerFn: spawn as any,
+ logFn,
+ });
+
+ expect(spawn).not.toHaveBeenCalled();
+ expect(logFn).toHaveBeenCalledWith(expect.stringContaining("lock held"));
+ });
+
+ it("returns disabled, no_config, ignored, queued, and flushed states", async () => {
+ expect(await runCaptureHook({ session_id: "s1", prompt: "hi" }, {
+ captureEnabled: false,
+ config: baseConfig,
+ })).toEqual({ status: "disabled" });
+
+ expect(await runCaptureHook({ session_id: "s1", prompt: "hi" }, {
+ config: null,
+ })).toEqual({ status: "no_config" });
+
+ expect(await runCaptureHook({ session_id: "s1" }, {
+ config: baseConfig,
+ })).toEqual({ status: "ignored" });
+
+ const append = vi.fn();
+ const maybe = vi.fn();
+ const clear = vi.fn();
+ const queued = await runCaptureHook({
+ session_id: "s1",
+ cwd: "/repo",
+ hook_event_name: "UserPromptSubmit",
+ prompt: "hi",
+ }, {
+ config: baseConfig,
+ now: () => "2026-01-01T00:00:00.000Z",
+ appendQueuedSessionRowFn: append as any,
+ clearSessionQueryCacheFn: clear as any,
+ maybeTriggerPeriodicSummaryFn: maybe as any,
+ });
+ expect(queued.status).toBe("queued");
+ expect(append).toHaveBeenCalledTimes(1);
+ expect(clear).toHaveBeenCalledWith("s1");
+ expect(maybe).toHaveBeenCalledWith("s1", "/repo", baseConfig);
+
+ const flush = vi.fn(async () => ({ status: "flushed", rows: 2, batches: 1 }));
+ const flushed = await runCaptureHook({
+ session_id: "s1",
+ cwd: "/repo",
+ hook_event_name: "Stop",
+ last_assistant_message: "done",
+ }, {
+ config: baseConfig,
+ now: () => "2026-01-01T00:00:01.000Z",
+ appendQueuedSessionRowFn: vi.fn() as any,
+ flushSessionQueueFn: flush as any,
+ });
+ expect(flushed).toMatchObject({ status: "queued", flushStatus: "flushed" });
+ expect(flush).toHaveBeenCalledTimes(1);
+ });
+
+ it("suppresses periodic summaries when skipped or when the helper throws", () => {
+ const spawn = vi.fn();
+ maybeTriggerPeriodicSummary("s1", "/repo", baseConfig, {
+ wikiWorker: true,
+ spawnWikiWorkerFn: spawn as any,
+ });
+ maybeTriggerPeriodicSummary("s1", "/repo", baseConfig, {
+ bumpTotalCountFn: vi.fn(() => { throw new Error("boom"); }) as any,
+ spawnWikiWorkerFn: spawn as any,
+ logFn: vi.fn(),
+ });
+ maybeTriggerPeriodicSummary("s1", "/repo", baseConfig, {
+ bumpTotalCountFn: vi.fn(() => ({ totalCount: 1, lastSummaryCount: 1 })) as any,
+ loadTriggerConfigFn: vi.fn(() => ({ everyNMessages: 5, everyHours: 24 })) as any,
+ shouldTriggerFn: vi.fn(() => false) as any,
+ spawnWikiWorkerFn: spawn as any,
+ });
+ expect(spawn).not.toHaveBeenCalled();
+ });
+
+ it("queues assistant events with fallback project and description metadata", async () => {
+ const append = vi.fn();
+ const build = vi.fn((row) => row);
+ const result = await runCaptureHook({
+ session_id: "s1",
+ last_assistant_message: "done",
+ }, {
+ config: baseConfig,
+ appendQueuedSessionRowFn: append as any,
+ buildQueuedSessionRowFn: build as any,
+ maybeTriggerPeriodicSummaryFn: vi.fn() as any,
+ now: () => "2026-01-01T00:00:00.000Z",
+ });
+ expect(result.status).toBe("queued");
+ expect(build).toHaveBeenCalledWith(expect.objectContaining({
+ projectName: "unknown",
+ description: "",
+ }));
+ });
+});
+
+describe("claude pre-tool source", () => {
+ it("detects, rewrites, and validates memory commands", () => {
+ expect(touchesMemory("cat ~/.deeplake/memory/index.md")).toBe(true);
+ expect(rewritePaths("cat ~/.deeplake/memory/index.md")).toBe("cat /index.md");
+ expect(isSafe("cat /index.md | head -20")).toBe(true);
+ expect(isSafe("python3 -c 'print(1)' /index.md")).toBe(false);
+ });
+
+ it("builds shell commands and grep params for supported tools", () => {
+ expect(getShellCommand("Read", { file_path: "~/.deeplake/memory/index.md" })).toBe("cat /index.md");
+ expect(getShellCommand("Read", { path: "~/.deeplake/memory" })).toBe("ls /");
+ expect(getShellCommand("Glob", { path: "~/.deeplake/memory/summaries" })).toBe("ls /");
+ expect(getShellCommand("Bash", { command: "cat ~/.deeplake/memory/index.md" })).toBe("cat /index.md");
+ expect(getShellCommand("Bash", { command: "python3 ~/.deeplake/memory/index.md" })).toBeNull();
+
+ const grep = extractGrepParams("Grep", {
+ pattern: "needle",
+ path: "~/.deeplake/memory/index.md",
+ output_mode: "count",
+ "-i": true,
+ "-n": true,
+ }, "grep -r needle /");
+ expect(grep).toMatchObject({
+ pattern: "needle",
+ targetPath: "/index.md",
+ ignoreCase: true,
+ countOnly: true,
+ lineNumber: true,
+ });
+ });
+
+ it("returns guidance for unsupported memory commands and passthrough for non-memory commands", async () => {
+ const guidance = await processPreToolUse({
+ session_id: "s1",
+ tool_name: "Bash",
+ tool_input: { command: "python3 -c 'print(1)' ~/.deeplake/memory" },
+ tool_use_id: "tu-1",
+ }, {
+ config: baseConfig,
+ });
+ expect(guidance?.command).toContain("RETRY REQUIRED");
+
+ const passthrough = await processPreToolUse({
+ session_id: "s1",
+ tool_name: "Bash",
+ tool_input: { command: "ls -la /tmp" },
+ tool_use_id: "tu-2",
+ }, {
+ config: baseConfig,
+ });
+ expect(passthrough).toBeNull();
+ });
+
+ it("uses direct grep, direct reads, listings, finds, and shell fallback", async () => {
+ const grepDecision = await processPreToolUse({
+ session_id: "s1",
+ tool_name: "Grep",
+ tool_input: {
+ pattern: "needle",
+ path: "~/.deeplake/memory/index.md",
+ output_mode: "files_with_matches",
+ },
+ tool_use_id: "tu-1",
+ }, {
+ config: baseConfig,
+ handleGrepDirectFn: vi.fn(async () => "/index.md:needle") as any,
+ executeCompiledBashCommandFn: vi.fn(async () => null) as any,
+ });
+ expect(grepDecision?.command).toContain("/index.md:needle");
+
+ const api = {
+ query: vi.fn(async () => [
+ {
+ path: "/summaries/alice/s1.md",
+ project: "repo",
+ description: "session summary",
+ creation_date: "2026-01-01T00:00:00.000Z",
+ },
+ ]),
+ };
+ const readDecision = await processPreToolUse({
+ session_id: "s1",
+ tool_name: "Read",
+ tool_input: { file_path: "~/.deeplake/memory/index.md" },
+ tool_use_id: "tu-2",
+ }, {
+ config: baseConfig,
+ createApi: vi.fn(() => api as any),
+ readVirtualPathContentFn: vi.fn(async () => null) as any,
+ executeCompiledBashCommandFn: vi.fn(async () => null) as any,
+ });
+ expect(readDecision?.command).toContain("# Memory Index");
+
+ const readDirDecision = await processPreToolUse({
+ session_id: "s1",
+ tool_name: "Read",
+ tool_input: { path: "~/.deeplake/memory" },
+ tool_use_id: "tu-2b",
+ }, {
+ config: baseConfig,
+ listVirtualPathRowsFn: vi.fn(async () => [
+ { path: "/summaries/alice/s1.md", size_bytes: 42 },
+ ]) as any,
+ executeCompiledBashCommandFn: vi.fn(async () => null) as any,
+ });
+ expect(readDirDecision?.command).toContain("summaries/");
+
+ const lsDecision = await processPreToolUse({
+ session_id: "s1",
+ tool_name: "Bash",
+ tool_input: { command: "ls -la ~/.deeplake/memory/summaries" },
+ tool_use_id: "tu-3",
+ }, {
+ config: baseConfig,
+ listVirtualPathRowsFn: vi.fn(async () => [
+ { path: "/summaries/alice/s1.md", size_bytes: 42 },
+ ]) as any,
+ executeCompiledBashCommandFn: vi.fn(async () => null) as any,
+ });
+ expect(lsDecision?.command).toContain("drwxr-xr-x");
+ expect(lsDecision?.command).toContain("alice/");
+
+ const findDecision = await processPreToolUse({
+ session_id: "s1",
+ tool_name: "Bash",
+ tool_input: { command: "find ~/.deeplake/memory/summaries -name '*.md'" },
+ tool_use_id: "tu-4",
+ }, {
+ config: baseConfig,
+ findVirtualPathsFn: vi.fn(async () => ["/summaries/alice/s1.md"]) as any,
+ executeCompiledBashCommandFn: vi.fn(async () => null) as any,
+ });
+ expect(findDecision?.command).toContain("/summaries/alice/s1.md");
+
+ const fallback = await processPreToolUse({
+ session_id: "s1",
+ tool_name: "Bash",
+ tool_input: { command: "echo hi > ~/.deeplake/memory/test.md" },
+ tool_use_id: "tu-5",
+ }, {
+ config: null,
+ shellBundle: "/tmp/deeplake-shell.js",
+ });
+ expect(fallback?.command).toContain('node "/tmp/deeplake-shell.js"');
+ });
+
+ it("reuses cached /index.md content for direct and compiled reads within a session", async () => {
+ const readVirtualPathContentFn = vi.fn(async () => "fresh index");
+ const readVirtualPathContentsFn = vi.fn(async (_api, _memory, _sessions, paths: string[]) => new Map(
+ paths.map((path) => [path, path === "/index.md" ? "fresh index" : null]),
+ )) as any;
+ const readCachedIndexContentFn = vi.fn(() => "cached index");
+ const writeCachedIndexContentFn = vi.fn();
+
+ const directDecision = await processPreToolUse({
+ session_id: "s1",
+ tool_name: "Read",
+ tool_input: { file_path: "~/.deeplake/memory/index.md" },
+ tool_use_id: "tu-cache-1",
+ }, {
+ config: baseConfig,
+ readCachedIndexContentFn: readCachedIndexContentFn as any,
+ writeCachedIndexContentFn: writeCachedIndexContentFn as any,
+ readVirtualPathContentFn: readVirtualPathContentFn as any,
+ executeCompiledBashCommandFn: vi.fn(async () => null) as any,
+ });
+ expect(directDecision?.command).toContain("cached index");
+ expect(readVirtualPathContentFn).not.toHaveBeenCalled();
+ expect(writeCachedIndexContentFn).toHaveBeenCalledWith("s1", "cached index");
+
+ const compiledDecision = await processPreToolUse({
+ session_id: "s1",
+ tool_name: "Bash",
+ tool_input: { command: "cat ~/.deeplake/memory/index.md && ls ~/.deeplake/memory/summaries" },
+ tool_use_id: "tu-cache-2",
+ }, {
+ config: baseConfig,
+ readCachedIndexContentFn: readCachedIndexContentFn as any,
+ writeCachedIndexContentFn: writeCachedIndexContentFn as any,
+ readVirtualPathContentsFn,
+ executeCompiledBashCommandFn: vi.fn(async (_api, _table, _sessions, _cmd, deps) => {
+ const map = await deps.readVirtualPathContentsFn(_api, _table, _sessions, ["/index.md"]);
+ return map.get("/index.md") ?? null;
+ }) as any,
+ });
+ expect(compiledDecision?.command).toContain("cached index");
+ expect(readVirtualPathContentsFn).not.toHaveBeenCalled();
+ });
+
+ it("supports head, tail, wc -l, empty directories, and shell fallback after direct-query errors", async () => {
+ const contentReader = vi.fn(async () => "line1\nline2\nline3");
+
+ const headDecision = await processPreToolUse({
+ session_id: "s1",
+ tool_name: "Bash",
+ tool_input: { command: "head -2 ~/.deeplake/memory/index.md" },
+ tool_use_id: "tu-6",
+ }, {
+ config: baseConfig,
+ readCachedIndexContentFn: vi.fn(() => null) as any,
+ writeCachedIndexContentFn: vi.fn() as any,
+ readVirtualPathContentFn: contentReader as any,
+ executeCompiledBashCommandFn: vi.fn(async () => null) as any,
+ });
+ expect(headDecision?.command).toContain("line1\\nline2");
+
+ const tailDecision = await processPreToolUse({
+ session_id: "s1",
+ tool_name: "Bash",
+ tool_input: { command: "tail -2 ~/.deeplake/memory/index.md" },
+ tool_use_id: "tu-7",
+ }, {
+ config: baseConfig,
+ readCachedIndexContentFn: vi.fn(() => null) as any,
+ writeCachedIndexContentFn: vi.fn() as any,
+ readVirtualPathContentFn: contentReader as any,
+ executeCompiledBashCommandFn: vi.fn(async () => null) as any,
+ });
+ expect(tailDecision?.command).toContain("line2\\nline3");
+
+ const wcDecision = await processPreToolUse({
+ session_id: "s1",
+ tool_name: "Bash",
+ tool_input: { command: "wc -l ~/.deeplake/memory/index.md" },
+ tool_use_id: "tu-8",
+ }, {
+ config: baseConfig,
+ readCachedIndexContentFn: vi.fn(() => null) as any,
+ writeCachedIndexContentFn: vi.fn() as any,
+ readVirtualPathContentFn: contentReader as any,
+ executeCompiledBashCommandFn: vi.fn(async () => null) as any,
+ });
+ expect(wcDecision?.command).toContain("3 /index.md");
+
+ const emptyDir = await processPreToolUse({
+ session_id: "s1",
+ tool_name: "Glob",
+ tool_input: { path: "~/.deeplake/memory/empty" },
+ tool_use_id: "tu-9",
+ }, {
+ config: baseConfig,
+ listVirtualPathRowsFn: vi.fn(async () => []) as any,
+ executeCompiledBashCommandFn: vi.fn(async () => null) as any,
+ });
+ expect(emptyDir?.command).toContain("(empty directory)");
+
+ const fallback = await processPreToolUse({
+ session_id: "s1",
+ tool_name: "Grep",
+ tool_input: {
+ pattern: "needle",
+ path: "~/.deeplake/memory/index.md",
+ },
+ tool_use_id: "tu-10",
+ }, {
+ config: baseConfig,
+ handleGrepDirectFn: vi.fn(async () => { throw new Error("boom"); }) as any,
+ shellBundle: "/tmp/deeplake-shell.js",
+ executeCompiledBashCommandFn: vi.fn(async () => null) as any,
+ });
+ expect(fallback?.description).toContain("DeepLake shell");
+ });
+
+ it("returns compiled output when the bash compiler can satisfy the command directly", async () => {
+ const decision = await processPreToolUse({
+ session_id: "s1",
+ tool_name: "Bash",
+ tool_input: { command: "cat ~/.deeplake/memory/index.md && ls ~/.deeplake/memory/summaries" },
+ tool_use_id: "tu-11",
+ }, {
+ config: baseConfig,
+ executeCompiledBashCommandFn: vi.fn(async () => "compiled output") as any,
+ });
+
+ expect(decision?.command).toContain("compiled output");
+ expect(decision?.description).toContain("DeepLake compiled");
+ });
+});
+
+describe("claude session start source", () => {
+ it("builds logged-in and logged-out context with update notices", () => {
+ const loggedIn = buildSessionStartAdditionalContext({
+ authCommand: "/tmp/auth-login.js",
+ creds: baseCreds,
+ currentVersion: "0.6.0",
+ latestVersion: "0.6.0",
+ });
+ const loggedOut = buildSessionStartAdditionalContext({
+ authCommand: "/tmp/auth-login.js",
+ creds: null,
+ currentVersion: "0.6.0",
+ latestVersion: "0.7.0",
+ });
+
+ expect(loggedIn).toContain("Logged in to Deeplake");
+ expect(loggedIn).toContain("Hivemind v0.6.0");
+ expect(loggedIn).toContain("resolve it against that session's own date/date_time metadata");
+ expect(loggedIn).toContain("convert the final answer into an absolute month/date/year");
+ expect(loggedIn).toContain("answer with the smallest exact phrase supported by memory");
+ expect(loggedIn).toContain('Do NOT answer "not found"');
+ expect(loggedOut).toContain("Not logged in to Deeplake");
+ expect(loggedOut).toContain("update available");
+ });
+
+ it("skips in wiki-worker mode and backfills usernames when needed", async () => {
+ expect(await runSessionStartHook({}, { wikiWorker: true })).toBeNull();
+
+ const save = vi.fn();
+ const result = await runSessionStartHook({}, {
+ creds: { ...baseCreds, userName: undefined },
+ saveCredentialsFn: save as any,
+ currentVersion: "0.6.0",
+ latestVersion: "0.6.0",
+ authCommand: "/tmp/auth-login.js",
+ });
+
+ expect(result?.hookSpecificOutput.additionalContext).toContain("Logged in to Deeplake");
+ expect(save).toHaveBeenCalledTimes(1);
+ });
+
+ it("logs unauthenticated startup and still returns context", async () => {
+ const logFn = vi.fn();
+ const result = await runSessionStartHook({}, {
+ creds: null,
+ currentVersion: null,
+ latestVersion: null,
+ authCommand: "/tmp/auth-login.js",
+ logFn,
+ });
+
+ expect(result?.hookSpecificOutput.additionalContext).toContain("Not logged in to Deeplake");
+ expect(logFn).toHaveBeenCalledWith(expect.stringContaining("no credentials"));
+ });
+
+ it("falls back to org id and default workspace when names are missing", () => {
+ const context = buildSessionStartAdditionalContext({
+ authCommand: "/tmp/auth-login.js",
+ creds: { ...baseCreds, orgName: undefined, workspaceId: undefined } as any,
+ currentVersion: null,
+ latestVersion: null,
+ });
+ expect(context).toContain("org-1");
+ expect(context).toContain("workspace: default");
+ expect(context).not.toContain("Hivemind v");
+ });
+
+ it("logs authenticated startup without backfilling when the username is already present", async () => {
+ const logFn = vi.fn();
+ const save = vi.fn();
+ await runSessionStartHook({}, {
+ creds: { ...baseCreds, orgName: undefined },
+ saveCredentialsFn: save as any,
+ currentVersion: "0.6.0",
+ latestVersion: null,
+ authCommand: "/tmp/auth-login.js",
+ logFn,
+ });
+ expect(save).not.toHaveBeenCalled();
+ expect(logFn).toHaveBeenCalledWith(expect.stringContaining("org=org-1"));
+ });
+});
+
+describe("claude session start setup source", () => {
+ it("creates placeholders only when summaries do not already exist", async () => {
+ const query = vi.fn(async (sql: string) => {
+ if (sql.startsWith("SELECT path")) return [];
+ return [];
+ });
+ const api = { query } as any;
+
+ await createPlaceholder(api, "memory", "s1", "/repo", "alice", "Acme", "default");
+
+ expect(query).toHaveBeenCalledTimes(2);
+ expect(String(query.mock.calls[1]?.[0])).toContain('INSERT INTO "memory"');
+ expect(String(query.mock.calls[1]?.[0])).toContain("/summaries/alice/s1.md");
+ expect(String(query.mock.calls[1]?.[0])).toContain("/sessions/alice/alice_Acme_default_s1.jsonl");
+
+ query.mockReset();
+ query.mockResolvedValueOnce([{ path: "/summaries/alice/s1.md" }]);
+ await createPlaceholder(api, "memory", "s1", "/repo", "alice", "Acme", "default");
+ expect(query).toHaveBeenCalledTimes(1);
+ });
+
+ it("handles no credentials, disabled session writes, auth failures, and update notices", async () => {
+ expect(await runSessionStartSetup({ session_id: "s1" }, {
+ creds: null,
+ })).toEqual({ status: "no_credentials" });
+
+ const createApi = vi.fn(() => ({
+ ensureTable: vi.fn(async () => undefined),
+ ensureSessionsTable: vi.fn(async () => undefined),
+ query: vi.fn(async () => []),
+ }) as any);
+ const placeholder = vi.fn(async () => undefined);
+
+ await runSessionStartSetup({ session_id: "s1", cwd: "/repo" }, {
+ creds: baseCreds,
+ config: baseConfig,
+ createApi,
+ isSessionWriteDisabledFn: vi.fn(() => true) as any,
+ createPlaceholderFn: placeholder as any,
+ getInstalledVersionFn: vi.fn(() => "0.6.0") as any,
+ getLatestVersionCachedFn: vi.fn(async () => "0.7.0") as any,
+ execSyncFn: vi.fn() as any,
+ });
+ expect(placeholder).toHaveBeenCalledTimes(1);
+ expect(createApi).toHaveBeenCalledTimes(1);
+
+ const markDisabled = vi.fn();
+ const stderr = vi.spyOn(process.stderr, "write").mockImplementation(() => true as any);
+ await runSessionStartSetup({ session_id: "s1", cwd: "/repo" }, {
+ creds: { ...baseCreds, autoupdate: false },
+ config: baseConfig,
+ createApi: vi.fn(() => ({
+ ensureTable: vi.fn(async () => undefined),
+ ensureSessionsTable: vi.fn(async () => { throw new Error("403 Forbidden"); }),
+ query: vi.fn(async () => []),
+ }) as any),
+ isSessionWriteDisabledFn: vi.fn(() => false) as any,
+ isSessionWriteAuthErrorFn: vi.fn(() => true) as any,
+ markSessionWriteDisabledFn: markDisabled as any,
+ tryAcquireSessionDrainLockFn: vi.fn(() => (() => undefined)) as any,
+ createPlaceholderFn: vi.fn(async () => undefined) as any,
+ getInstalledVersionFn: vi.fn(() => "0.6.0") as any,
+ getLatestVersionCachedFn: vi.fn(async () => "0.7.0") as any,
+ });
+ expect(markDisabled).toHaveBeenCalledTimes(1);
+ expect(stderr).toHaveBeenCalledWith(expect.stringContaining("update available"));
+ });
+
+ it("backfills usernames, logs drained queues, and handles setup/version failures", async () => {
+ const save = vi.fn();
+ const logFn = vi.fn();
+ const wikiLogFn = vi.fn();
+ await runSessionStartSetup({ session_id: "s1", cwd: "/repo" }, {
+ creds: { ...baseCreds, userName: undefined, autoupdate: true },
+ saveCredentialsFn: save as any,
+ config: baseConfig,
+ createApi: vi.fn(() => ({
+ ensureTable: vi.fn(async () => undefined),
+ ensureSessionsTable: vi.fn(async () => undefined),
+ query: vi.fn(async () => []),
+ }) as any),
+ drainSessionQueuesFn: vi.fn(async () => ({
+ queuedSessions: 1,
+ flushedSessions: 1,
+ rows: 3,
+ batches: 1,
+ })) as any,
+ isSessionWriteDisabledFn: vi.fn(() => false) as any,
+ tryAcquireSessionDrainLockFn: vi.fn(() => (() => undefined)) as any,
+ createPlaceholderFn: vi.fn(async () => undefined) as any,
+ getInstalledVersionFn: vi.fn(() => "0.6.0") as any,
+ getLatestVersionCachedFn: vi.fn(async () => "0.6.0") as any,
+ logFn,
+ wikiLogFn,
+ });
+ expect(save).toHaveBeenCalledTimes(1);
+ expect(logFn).toHaveBeenCalledWith(expect.stringContaining("drained 1 queued session"));
+ expect(logFn).toHaveBeenCalledWith("version up to date: 0.6.0");
+ expect(wikiLogFn).not.toHaveBeenCalledWith(expect.stringContaining("failed"));
+
+ await runSessionStartSetup({ session_id: "s1", cwd: "/repo" }, {
+ creds: baseCreds,
+ config: baseConfig,
+ createApi: vi.fn(() => ({
+ ensureTable: vi.fn(async () => { throw new Error("boom"); }),
+ }) as any),
+ getInstalledVersionFn: vi.fn(() => "0.6.0") as any,
+ getLatestVersionCachedFn: vi.fn(async () => { throw new Error("offline"); }) as any,
+ logFn,
+ wikiLogFn,
+ });
+ expect(logFn).toHaveBeenCalledWith(expect.stringContaining("setup failed: boom"));
+ expect(logFn).toHaveBeenCalledWith(expect.stringContaining("version check failed: offline"));
+ expect(wikiLogFn).toHaveBeenCalledWith(expect.stringContaining("failed for s1: boom"));
+ });
+
+ it("skips duplicate queue drains while another session-start setup is already handling sessions", async () => {
+ const logFn = vi.fn();
+ const createPlaceholderFn = vi.fn(async () => undefined);
+ const ensureSessionsTable = vi.fn(async () => undefined);
+ const drainSessionQueuesFn = vi.fn(async () => ({
+ queuedSessions: 1,
+ flushedSessions: 1,
+ rows: 1,
+ batches: 1,
+ }));
+
+ await runSessionStartSetup({ session_id: "s1", cwd: "/repo" }, {
+ creds: baseCreds,
+ config: baseConfig,
+ createApi: vi.fn(() => ({
+ ensureTable: vi.fn(async () => undefined),
+ ensureSessionsTable,
+ query: vi.fn(async () => []),
+ }) as any),
+ isSessionWriteDisabledFn: vi.fn(() => false) as any,
+ tryAcquireSessionDrainLockFn: vi.fn(() => null) as any,
+ drainSessionQueuesFn: drainSessionQueuesFn as any,
+ createPlaceholderFn: createPlaceholderFn as any,
+ getInstalledVersionFn: vi.fn(() => null) as any,
+ logFn,
+ });
+
+ expect(ensureSessionsTable).not.toHaveBeenCalled();
+ expect(drainSessionQueuesFn).not.toHaveBeenCalled();
+ expect(createPlaceholderFn).toHaveBeenCalledTimes(1);
+ expect(logFn).toHaveBeenCalledWith(expect.stringContaining("sessions drain already in progress"));
+ });
+
+ it("handles capture-disabled, successful autoupdate, and skipped setup work", async () => {
+ const stderr = vi.spyOn(process.stderr, "write").mockImplementation(() => true as any);
+ const execSyncFn = vi.fn();
+ const createPlaceholderFn = vi.fn();
+ await runSessionStartSetup({ session_id: "s1", cwd: "/repo" }, {
+ creds: baseCreds,
+ config: baseConfig,
+ captureEnabled: false,
+ createApi: vi.fn(() => ({
+ ensureTable: vi.fn(async () => undefined),
+ }) as any),
+ createPlaceholderFn: createPlaceholderFn as any,
+ getInstalledVersionFn: vi.fn(() => "0.6.0") as any,
+ getLatestVersionCachedFn: vi.fn(async () => "0.7.0") as any,
+ execSyncFn: execSyncFn as any,
+ });
+ expect(createPlaceholderFn).not.toHaveBeenCalled();
+ expect(execSyncFn).toHaveBeenCalledTimes(1);
+ expect(stderr).toHaveBeenCalledWith(expect.stringContaining("auto-updated"));
+
+ await expect(runSessionStartSetup({ session_id: "", cwd: "/repo" }, {
+ creds: baseCreds,
+ config: baseConfig,
+ getInstalledVersionFn: vi.fn(() => null) as any,
+ })).resolves.toEqual({ status: "complete" });
+ });
+
+ it("treats non-auth session setup errors as setup failures", async () => {
+ const wikiLogFn = vi.fn();
+ const createPlaceholderFn = vi.fn();
+ await runSessionStartSetup({ session_id: "s1", cwd: "/repo" }, {
+ creds: baseCreds,
+ config: baseConfig,
+ createApi: vi.fn(() => ({
+ ensureTable: vi.fn(async () => undefined),
+ ensureSessionsTable: vi.fn(async () => { throw new Error("boom"); }),
+ }) as any),
+ isSessionWriteDisabledFn: vi.fn(() => false) as any,
+ isSessionWriteAuthErrorFn: vi.fn(() => false) as any,
+ tryAcquireSessionDrainLockFn: vi.fn(() => (() => undefined)) as any,
+ createPlaceholderFn: createPlaceholderFn as any,
+ getInstalledVersionFn: vi.fn(() => null) as any,
+ wikiLogFn,
+ });
+ expect(createPlaceholderFn).not.toHaveBeenCalled();
+ expect(wikiLogFn).toHaveBeenCalledWith(expect.stringContaining("failed for s1: boom"));
+ });
+
+ it("skips in wiki-worker mode and handles zero-drain session writes", async () => {
+ expect(await runSessionStartSetup({ session_id: "s1" }, {
+ wikiWorker: true,
+ })).toEqual({ status: "skipped" });
+
+ const createPlaceholderFn = vi.fn(async () => undefined);
+ await runSessionStartSetup({ session_id: "s1", cwd: undefined as any }, {
+ creds: baseCreds,
+ config: baseConfig,
+ createApi: vi.fn(() => ({
+ ensureTable: vi.fn(async () => undefined),
+ ensureSessionsTable: vi.fn(async () => undefined),
+ }) as any),
+ drainSessionQueuesFn: vi.fn(async () => ({
+ queuedSessions: 0,
+ flushedSessions: 0,
+ rows: 0,
+ batches: 0,
+ })) as any,
+ isSessionWriteDisabledFn: vi.fn(() => false) as any,
+ tryAcquireSessionDrainLockFn: vi.fn(() => (() => undefined)) as any,
+ createPlaceholderFn: createPlaceholderFn as any,
+ getInstalledVersionFn: vi.fn(() => null) as any,
+ });
+ expect(createPlaceholderFn).toHaveBeenCalledWith(expect.anything(), "memory", "s1", "", "alice", "Acme", "default");
+ });
+});
+
+describe("claude session end source", () => {
+ it("skips when disabled, returns no_config, and flushes when active", async () => {
+ expect(await runSessionEndHook({ session_id: "s1" }, {
+ captureEnabled: false,
+ config: baseConfig,
+ })).toEqual({ status: "skipped" });
+
+ expect(await runSessionEndHook({ session_id: "s1" }, {
+ config: null,
+ })).toEqual({ status: "no_config" });
+
+ const flush = vi.fn(async () => ({ status: "flushed", rows: 3, batches: 1 }));
+ const spawn = vi.fn();
+ const wiki = vi.fn();
+ const result = await runSessionEndHook({ session_id: "s1", cwd: "/repo" }, {
+ config: baseConfig,
+ flushSessionQueueFn: flush as any,
+ spawnWikiWorkerFn: spawn as any,
+ wikiLogFn: wiki as any,
+ bundleDir: "/tmp/bundle",
+ });
+
+ expect(result).toEqual({ status: "flushed", flushStatus: "flushed" });
+ expect(flush).toHaveBeenCalledTimes(1);
+ expect(spawn).toHaveBeenCalledWith({
+ config: baseConfig,
+ sessionId: "s1",
+ cwd: "/repo",
+ bundleDir: "/tmp/bundle",
+ reason: "SessionEnd",
+ });
+ expect(wiki).toHaveBeenCalled();
+ });
+});
diff --git a/claude-code/tests/periodic-summary-bundles.test.ts b/claude-code/tests/periodic-summary-bundles.test.ts
deleted file mode 100644
index e0ee786..0000000
--- a/claude-code/tests/periodic-summary-bundles.test.ts
+++ /dev/null
@@ -1,129 +0,0 @@
-import { describe, it, expect } from "vitest";
-import { readFileSync, existsSync } from "node:fs";
-import { resolve } from "node:path";
-
-/**
- * Bundle-level anti-regression for the periodic-summary feature. These
- * tests scan the SHIPPED bundles (claude-code + codex) to confirm:
- *
- * 1. The SessionEnd race fix is present: before spawning the worker, the
- * hook checks tryAcquireLock and bails when another worker is running.
- * Two concurrent workers writing the same summary row trip the Deeplake
- * UPDATE-coalescing quirk and drop one write.
- *
- * 2. The periodic trigger in the capture hook also acquires the lock
- * before spawning — same reason.
- *
- * 3. The internal wiki-worker flag uses ONLY the new HIVEMIND_WIKI_WORKER
- * name. DEEPLAKE_WIKI_WORKER was a migration-only fallback and is a
- * plugin-internal signal, so there is no reason to keep it shipped.
- *
- * 4. HIVEMIND_CAPTURE=false is respected everywhere the guard existed —
- * the rename left one path reading the old name only, which we fixed.
- *
- * Source tests (summary-state.test.ts) prove the lock module is correct;
- * these bundle checks prove the build didn't drop the call sites.
- */
-
-const BUNDLE_ROOT = resolve(__dirname, "..", "..");
-
-const SESSION_END_HOOKS: Array<[string, string]> = [
- ["claude-code session-end", resolve(BUNDLE_ROOT, "claude-code", "bundle", "session-end.js")],
- ["codex stop", resolve(BUNDLE_ROOT, "codex", "bundle", "stop.js")],
-];
-
-const CAPTURE_HOOKS: Array<[string, string]> = [
- ["claude-code capture", resolve(BUNDLE_ROOT, "claude-code", "bundle", "capture.js")],
- ["codex capture", resolve(BUNDLE_ROOT, "codex", "bundle", "capture.js")],
-];
-
-const ALL_BUNDLES: Array<[string, string]> = [
- ...SESSION_END_HOOKS,
- ...CAPTURE_HOOKS,
- ["claude-code session-start", resolve(BUNDLE_ROOT, "claude-code", "bundle", "session-start.js")],
- ["claude-code session-start-setup", resolve(BUNDLE_ROOT, "claude-code", "bundle", "session-start-setup.js")],
- ["codex session-start", resolve(BUNDLE_ROOT, "codex", "bundle", "session-start.js")],
- ["codex session-start-setup", resolve(BUNDLE_ROOT, "codex", "bundle", "session-start-setup.js")],
-];
-
-describe("bundles exist", () => {
- it.each(ALL_BUNDLES)("%s bundle file is present", (_label, path) => {
- expect(existsSync(path)).toBe(true);
- });
-});
-
-// ══ SessionEnd-style hooks: must acquire the lock before spawning ══════════
-describe.each(SESSION_END_HOOKS)("%s bundle — race fix", (_label, path) => {
- const src = readFileSync(path, "utf-8");
-
- it("calls tryAcquireLock before spawning the worker", () => {
- expect(src).toMatch(/tryAcquireLock/);
- // The bail-out branch that exists only because of the race fix: when
- // the lock is held, we log and return without spawning.
- expect(src).toMatch(/periodic worker already running/);
- });
-
- it("spawns the wiki worker only on the happy path", () => {
- // Must still reference the spawn helper — a full removal would also
- // match "no race" but would break the feature.
- expect(src).toMatch(/spawn(Codex)?WikiWorker/);
- });
-});
-
-// ══ Capture hooks: periodic trigger also acquires the lock ═════════════════
-describe.each(CAPTURE_HOOKS)("%s bundle — periodic trigger", (_label, path) => {
- const src = readFileSync(path, "utf-8");
-
- it("acquires the lock before spawning from the periodic path", () => {
- expect(src).toMatch(/tryAcquireLock/);
- expect(src).toMatch(/shouldTrigger/);
- expect(src).toMatch(/bumpTotalCount/);
- });
-
- it("references the summary-state helpers (feature wired end-to-end)", () => {
- expect(src).toMatch(/loadTriggerConfig/);
- });
-});
-
-// ══ Internal flag uses only the new name ═══════════════════════════════════
-describe.each(ALL_BUNDLES)("%s bundle — clean env flags", (_label, path) => {
- const src = readFileSync(path, "utf-8");
-
- it("uses HIVEMIND_WIKI_WORKER and not the legacy DEEPLAKE_WIKI_WORKER", () => {
- // HIVEMIND_WIKI_WORKER is the internal signal the wiki worker sets on
- // itself; every hook must gate on it. The old DEEPLAKE_* fallback was
- // pure back-compat noise for an internal flag and is removed.
- if (!src.includes("HIVEMIND_WIKI_WORKER")) {
- // Some bundles don't need the guard (e.g. pure utility bundles) —
- // skip. Every bundle in this suite actually does gate, but be lenient.
- return;
- }
- expect(src).not.toMatch(/DEEPLAKE_WIKI_WORKER/);
- });
-
- it("does not fall back to DEEPLAKE_CAPTURE for the capture-disabled guard", () => {
- // The guard must read HIVEMIND_CAPTURE only. DEEPLAKE_CAPTURE is a
- // pre-rename alias that would mask a user setting HIVEMIND_CAPTURE=false.
- expect(src).not.toMatch(/DEEPLAKE_CAPTURE/);
- });
-});
-
-// ══ summary-state module is inlined into every bundle that needs it ════════
-describe("summary-state helpers are inlined into the hook bundles", () => {
- // SessionEnd-style hooks only need tryAcquireLock (the worker itself
- // releases the lock in its finally block). esbuild tree-shakes
- // releaseLock out of those bundles, which is expected.
- it.each(SESSION_END_HOOKS)("%s bundle inlines tryAcquireLock", (_label, path) => {
- const src = readFileSync(path, "utf-8");
- expect(src).toMatch(/function tryAcquireLock/);
- });
-
- // Capture hooks need both: tryAcquireLock to gate the spawn, and
- // releaseLock as the error-path fallback when spawn throws before the
- // worker takes ownership of the lock.
- it.each(CAPTURE_HOOKS)("%s bundle inlines tryAcquireLock + releaseLock", (_label, path) => {
- const src = readFileSync(path, "utf-8");
- expect(src).toMatch(/function tryAcquireLock/);
- expect(src).toMatch(/function releaseLock/);
- });
-});
diff --git a/claude-code/tests/pre-tool-use.test.ts b/claude-code/tests/pre-tool-use.test.ts
index bbf00ba..f5bb682 100644
--- a/claude-code/tests/pre-tool-use.test.ts
+++ b/claude-code/tests/pre-tool-use.test.ts
@@ -305,6 +305,15 @@ describe("pre-tool-use: non-Bash tools targeting memory", () => {
}
});
+ it("intercepts Read using path alias for the memory root", () => {
+ const r = runPreToolUse("Read", { path: "~/.deeplake/memory" });
+ expect(r.empty).toBe(false);
+ if (!r.empty) {
+ expect(r.decision).toBe("allow");
+ expect(r.updatedCommand).toContain("ls /");
+ }
+ });
+
it("intercepts Glob targeting memory path", () => {
const r = runPreToolUse("Glob", { path: "~/.deeplake/memory/", pattern: "*.md" });
expect(r.empty).toBe(false);
diff --git a/claude-code/tests/query-cache.test.ts b/claude-code/tests/query-cache.test.ts
new file mode 100644
index 0000000..84f62a9
--- /dev/null
+++ b/claude-code/tests/query-cache.test.ts
@@ -0,0 +1,68 @@
+import { mkdtempSync, rmSync } from "node:fs";
+import { join } from "node:path";
+import { tmpdir } from "node:os";
+import { afterEach, describe, expect, it, vi } from "vitest";
+import {
+ clearSessionQueryCache,
+ getSessionQueryCacheDir,
+ readCachedIndexContent,
+ writeCachedIndexContent,
+} from "../../src/hooks/query-cache.js";
+
+describe("query-cache", () => {
+ const tempRoots: string[] = [];
+
+ afterEach(() => {
+ for (const root of tempRoots.splice(0)) {
+ rmSync(root, { recursive: true, force: true });
+ }
+ vi.restoreAllMocks();
+ });
+
+ it("writes and reads cached index content per session", () => {
+ const cacheRoot = mkdtempSync(join(tmpdir(), "hivemind-query-cache-"));
+ tempRoots.push(cacheRoot);
+
+ writeCachedIndexContent("session-1", "# Memory Index", { cacheRoot });
+
+ expect(readCachedIndexContent("session-1", { cacheRoot })).toBe("# Memory Index");
+ expect(getSessionQueryCacheDir("session-1", { cacheRoot })).toBe(join(cacheRoot, "session-1"));
+ });
+
+ it("returns null for missing cache files and logs non-ENOENT read and write failures", () => {
+ const cacheRoot = mkdtempSync(join(tmpdir(), "hivemind-query-cache-"));
+ tempRoots.push(cacheRoot);
+ const logFn = vi.fn();
+
+ expect(readCachedIndexContent("missing", { cacheRoot, logFn })).toBeNull();
+ expect(logFn).not.toHaveBeenCalled();
+
+ expect(readCachedIndexContent("broken", {
+ cacheRoot: "\u0000",
+ logFn,
+ })).toBeNull();
+ expect(logFn).toHaveBeenCalledWith(expect.stringContaining("read failed"));
+
+ writeCachedIndexContent("blocked", "content", {
+ cacheRoot: "\u0000",
+ logFn,
+ });
+ expect(logFn).toHaveBeenCalledWith(expect.stringContaining("write failed"));
+ });
+
+ it("clears a session cache directory and swallows removal errors", () => {
+ const cacheRoot = mkdtempSync(join(tmpdir(), "hivemind-query-cache-"));
+ tempRoots.push(cacheRoot);
+ writeCachedIndexContent("session-2", "cached", { cacheRoot });
+
+ clearSessionQueryCache("session-2", { cacheRoot });
+ expect(readCachedIndexContent("session-2", { cacheRoot })).toBeNull();
+
+ const logFn = vi.fn();
+ clearSessionQueryCache("session-2", {
+ cacheRoot: "\u0000",
+ logFn,
+ });
+ expect(logFn).toHaveBeenCalledWith(expect.stringContaining("clear failed"));
+ });
+});
diff --git a/claude-code/tests/session-end-hook.test.ts b/claude-code/tests/session-end-hook.test.ts
deleted file mode 100644
index aaf4cff..0000000
--- a/claude-code/tests/session-end-hook.test.ts
+++ /dev/null
@@ -1,165 +0,0 @@
-import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
-
-/**
- * Direct source-level tests for src/hooks/session-end.ts. The hook's
- * `main()` runs at module import time, so each test resets the module
- * registry, wires mocks, then dynamically imports the module and waits
- * for the main promise chain to settle.
- *
- * Coverage target: every branch of the hook — the WIKI_WORKER / CAPTURE
- * early-exits, empty session_id, missing config, lock held, happy path,
- * and the outer catch for thrown errors.
- *
- * CLAUDE.md rule #2: mock only at the boundary. readStdin, loadConfig,
- * spawnWikiWorker, wikiLog, and tryAcquireLock are the seams. The rest
- * of the hook body runs for real.
- */
-
-const stdinMock = vi.fn();
-const loadConfigMock = vi.fn();
-const spawnMock = vi.fn();
-const wikiLogMock = vi.fn();
-const tryAcquireLockMock = vi.fn();
-const releaseLockMock = vi.fn();
-const debugLogMock = vi.fn();
-
-vi.mock("../../src/utils/stdin.js", () => ({ readStdin: (...a: any[]) => stdinMock(...a) }));
-vi.mock("../../src/config.js", () => ({ loadConfig: (...a: any[]) => loadConfigMock(...a) }));
-vi.mock("../../src/hooks/spawn-wiki-worker.js", () => ({
- spawnWikiWorker: (...a: any[]) => spawnMock(...a),
- wikiLog: (...a: any[]) => wikiLogMock(...a),
- bundleDirFromImportMeta: () => "/fake/bundle",
-}));
-vi.mock("../../src/hooks/summary-state.js", () => ({
- tryAcquireLock: (...a: any[]) => tryAcquireLockMock(...a),
- releaseLock: (...a: any[]) => releaseLockMock(...a),
-}));
-vi.mock("../../src/utils/debug.js", () => ({
- log: (_tag: string, msg: string) => debugLogMock(msg),
-}));
-
-async function runHook(): Promise {
- vi.resetModules();
- await import("../../src/hooks/session-end.js");
- // main() is async and fires on import; give the microtask queue a
- // chance to drain before we assert on the mocks.
- await new Promise(r => setImmediate(r));
-}
-
-const validConfig = {
- token: "t", orgId: "o", orgName: "o", workspaceId: "default",
- userName: "u", apiUrl: "http://example", tableName: "memory",
- sessionsTableName: "sessions",
-};
-
-beforeEach(() => {
- delete process.env.HIVEMIND_WIKI_WORKER;
- delete process.env.HIVEMIND_CAPTURE;
- stdinMock.mockReset().mockResolvedValue({ session_id: "sid-1", cwd: "/proj" });
- loadConfigMock.mockReset().mockReturnValue(validConfig);
- spawnMock.mockReset();
- wikiLogMock.mockReset();
- tryAcquireLockMock.mockReset().mockReturnValue(true);
- releaseLockMock.mockReset();
- debugLogMock.mockReset();
-});
-
-afterEach(() => { vi.restoreAllMocks(); });
-
-describe("session-end hook", () => {
- it("returns immediately when HIVEMIND_WIKI_WORKER=1 (nested worker invocation)", async () => {
- process.env.HIVEMIND_WIKI_WORKER = "1";
- await runHook();
- expect(stdinMock).not.toHaveBeenCalled();
- expect(spawnMock).not.toHaveBeenCalled();
- expect(tryAcquireLockMock).not.toHaveBeenCalled();
- });
-
- it("returns immediately when HIVEMIND_CAPTURE=false (opt-out)", async () => {
- process.env.HIVEMIND_CAPTURE = "false";
- await runHook();
- expect(stdinMock).not.toHaveBeenCalled();
- expect(spawnMock).not.toHaveBeenCalled();
- });
-
- it("returns without spawning when session_id is missing", async () => {
- stdinMock.mockResolvedValue({ session_id: "", cwd: "/proj" });
- await runHook();
- expect(loadConfigMock).not.toHaveBeenCalled();
- expect(tryAcquireLockMock).not.toHaveBeenCalled();
- expect(spawnMock).not.toHaveBeenCalled();
- });
-
- it("returns without spawning when loadConfig returns null (no credentials)", async () => {
- loadConfigMock.mockReturnValue(null);
- await runHook();
- expect(tryAcquireLockMock).not.toHaveBeenCalled();
- expect(spawnMock).not.toHaveBeenCalled();
- expect(debugLogMock).toHaveBeenCalledWith("no config");
- });
-
- it("skips spawn with a wiki log line when the periodic worker holds the lock", async () => {
- tryAcquireLockMock.mockReturnValue(false);
- await runHook();
- expect(spawnMock).not.toHaveBeenCalled();
- expect(wikiLogMock).toHaveBeenCalledWith(
- expect.stringContaining("periodic worker already running for sid-1, skipping"),
- );
- });
-
- it("spawns the wiki worker on the happy path and logs 'triggering summary'", async () => {
- await runHook();
- expect(tryAcquireLockMock).toHaveBeenCalledWith("sid-1");
- expect(wikiLogMock).toHaveBeenCalledWith(
- expect.stringContaining("triggering summary for sid-1"),
- );
- expect(spawnMock).toHaveBeenCalledTimes(1);
- const callArg = spawnMock.mock.calls[0][0];
- expect(callArg.sessionId).toBe("sid-1");
- expect(callArg.cwd).toBe("/proj");
- expect(callArg.reason).toBe("SessionEnd");
- expect(callArg.config).toBe(validConfig);
- });
-
- it("falls back to empty cwd when stdin omits the field", async () => {
- stdinMock.mockResolvedValue({ session_id: "sid-2" });
- await runHook();
- expect(spawnMock).toHaveBeenCalledWith(
- expect.objectContaining({ sessionId: "sid-2", cwd: "" }),
- );
- });
-
- it("catches and logs a fatal error from readStdin without crashing the process", async () => {
- const boom = new Error("stdin boom");
- stdinMock.mockRejectedValue(boom);
- const exitSpy = vi.spyOn(process, "exit").mockImplementation(() => undefined as never);
- await runHook();
- // Let the catch in `main().catch(...)` run.
- await new Promise(r => setImmediate(r));
- expect(debugLogMock).toHaveBeenCalledWith("fatal: stdin boom");
- expect(exitSpy).toHaveBeenCalledWith(0);
- });
-
- it("releases the lock if spawnWikiWorker throws (no lock leak)", async () => {
- spawnMock.mockImplementation(() => { throw new Error("spawn exploded"); });
- const exitSpy = vi.spyOn(process, "exit").mockImplementation(() => undefined as never);
- await runHook();
- // Let the outer main().catch run.
- await new Promise(r => setImmediate(r));
- expect(releaseLockMock).toHaveBeenCalledWith("sid-1");
- // The throw bubbles to main().catch and logs "fatal: ..."
- expect(debugLogMock).toHaveBeenCalledWith("fatal: spawn exploded");
- expect(exitSpy).toHaveBeenCalledWith(0);
- });
-
- it("still swallows release errors when spawn throws (no double-fault)", async () => {
- spawnMock.mockImplementation(() => { throw new Error("spawn exploded"); });
- releaseLockMock.mockImplementation(() => { throw new Error("release also broken"); });
- const exitSpy = vi.spyOn(process, "exit").mockImplementation(() => undefined as never);
- await runHook();
- await new Promise(r => setImmediate(r));
- // Outer fatal is the ORIGINAL spawn failure, not the release failure
- expect(debugLogMock).toHaveBeenCalledWith("fatal: spawn exploded");
- expect(exitSpy).toHaveBeenCalledWith(0);
- });
-});
diff --git a/claude-code/tests/session-queue.test.ts b/claude-code/tests/session-queue.test.ts
new file mode 100644
index 0000000..068f41b
--- /dev/null
+++ b/claude-code/tests/session-queue.test.ts
@@ -0,0 +1,579 @@
+import { afterEach, describe, expect, it, vi } from "vitest";
+import {
+ existsSync,
+ mkdtempSync,
+ readFileSync,
+ renameSync,
+ rmSync,
+ utimesSync,
+ writeFileSync,
+} from "node:fs";
+import { join } from "node:path";
+import { tmpdir } from "node:os";
+import {
+ appendQueuedSessionRow,
+ buildQueuedSessionRow,
+ buildSessionInsertSql,
+ buildSessionPath,
+ clearSessionWriteDisabled,
+ drainSessionQueues,
+ flushSessionQueue,
+ isSessionWriteDisabled,
+ isSessionWriteAuthError,
+ markSessionWriteDisabled,
+ type QueuedSessionRow,
+ type SessionQueueApi,
+ tryAcquireSessionDrainLock,
+} from "../../src/hooks/session-queue.js";
+
+const tempDirs: string[] = [];
+
+function makeQueueDir(): string {
+ const dir = mkdtempSync(join(tmpdir(), "hivemind-session-queue-"));
+ tempDirs.push(dir);
+ return dir;
+}
+
+function makeRow(sessionId: string, seq: number, overrides: Partial = {}): QueuedSessionRow {
+ const sessionPath = buildSessionPath(
+ { userName: "alice", orgName: "acme", workspaceId: "default" },
+ sessionId,
+ );
+ const timestamp = `2026-01-01T00:00:${String(seq % 60).padStart(2, "0")}Z`;
+ const line = JSON.stringify({
+ id: `event-${seq}`,
+ session_id: sessionId,
+ hook_event_name: "PostToolUse",
+ timestamp,
+ type: "tool_call",
+ tool_name: "Read",
+ content: `row-${seq}`,
+ });
+
+ return {
+ ...buildQueuedSessionRow({
+ sessionPath,
+ line,
+ userName: "alice",
+ projectName: "repo",
+ description: "PostToolUse",
+ agent: "claude_code",
+ timestamp,
+ }),
+ ...overrides,
+ };
+}
+
+function makeApi(queryImpl?: (sql: string) => Promise[]>) {
+ const api: SessionQueueApi & {
+ query: ReturnType;
+ ensureSessionsTable: ReturnType;
+ } = {
+ query: vi.fn(queryImpl ?? (async () => [])),
+ ensureSessionsTable: vi.fn(async () => undefined),
+ };
+ return api;
+}
+
+afterEach(() => {
+ while (tempDirs.length > 0) {
+ const dir = tempDirs.pop();
+ if (dir) rmSync(dir, { recursive: true, force: true });
+ }
+});
+
+describe("session queue", () => {
+ it("appends one JSONL line per queued row", () => {
+ const queueDir = makeQueueDir();
+ const row = makeRow("session-append", 1);
+
+ const queuePath = appendQueuedSessionRow(row, queueDir);
+ const lines = readFileSync(queuePath, "utf-8").trim().split("\n");
+
+ expect(lines).toHaveLength(1);
+ expect(JSON.parse(lines[0])).toEqual(row);
+ });
+
+ it("builds a multi-row INSERT that preserves JSONB payloads", () => {
+ const row1 = makeRow("session-sql", 1, {
+ message: JSON.stringify({ content: "it's", path: "C:\\Users\\alice\\file.ts" }),
+ });
+ const row2 = makeRow("session-sql", 2);
+
+ const sql = buildSessionInsertSql("sessions", [row1, row2]);
+
+ expect(sql.match(/::jsonb/g)).toHaveLength(2);
+ expect(sql).toContain("it''s");
+ expect(sql).toContain('"path":"C:');
+ expect(sql).toContain("file.ts");
+ expect(sql).toContain("), (");
+ });
+
+ it("wraps malformed messages in a valid JSON object before casting to jsonb", () => {
+ const row = makeRow("session-sql-fallback", 1, {
+ message: "{not-json",
+ });
+
+ const sql = buildSessionInsertSql("sessions", [row]);
+
+ expect(sql).toContain(`"type":"raw_message"`);
+ expect(sql).toContain(`"content":"{not-json"`);
+ expect(sql).toContain("::jsonb");
+ });
+
+ it("rejects empty INSERT batches", () => {
+ expect(() => buildSessionInsertSql("sessions", [])).toThrow("rows must not be empty");
+ });
+
+ it("returns empty when there is nothing to flush", async () => {
+ const queueDir = makeQueueDir();
+ const api = makeApi();
+
+ const result = await flushSessionQueue(api, {
+ sessionId: "session-empty",
+ sessionsTable: "sessions",
+ queueDir,
+ });
+
+ expect(result).toEqual({ status: "empty", rows: 0, batches: 0 });
+ expect(api.query).not.toHaveBeenCalled();
+ });
+
+ it("flushes a queue in chunked multi-row INSERT batches", async () => {
+ const queueDir = makeQueueDir();
+ const api = makeApi();
+
+ for (let i = 0; i < 51; i++) {
+ appendQueuedSessionRow(makeRow("session-batch", i), queueDir);
+ }
+
+ const result = await flushSessionQueue(api, {
+ sessionId: "session-batch",
+ sessionsTable: "sessions",
+ queueDir,
+ maxBatchRows: 50,
+ drainAll: true,
+ });
+
+ expect(result).toEqual({ status: "flushed", rows: 51, batches: 2 });
+ expect(api.query).toHaveBeenCalledTimes(2);
+ expect(api.ensureSessionsTable).not.toHaveBeenCalled();
+ expect(existsSync(join(queueDir, "session-batch.jsonl"))).toBe(false);
+ expect(existsSync(join(queueDir, "session-batch.inflight"))).toBe(false);
+ });
+
+ it("retries once after ensuring the sessions table", async () => {
+ const queueDir = makeQueueDir();
+ appendQueuedSessionRow(makeRow("session-retry", 1), queueDir);
+
+ let attempts = 0;
+ const api = makeApi(async () => {
+ attempts += 1;
+ if (attempts === 1) throw new Error("table sessions does not exist");
+ return [];
+ });
+
+ const result = await flushSessionQueue(api, {
+ sessionId: "session-retry",
+ sessionsTable: "sessions",
+ queueDir,
+ });
+
+ expect(result).toEqual({ status: "flushed", rows: 1, batches: 1 });
+ expect(api.ensureSessionsTable).toHaveBeenCalledWith("sessions");
+ expect(api.query).toHaveBeenCalledTimes(2);
+ });
+
+ it("removes empty queue files without issuing inserts", async () => {
+ const queueDir = makeQueueDir();
+ writeFileSync(join(queueDir, "session-empty-file.jsonl"), "");
+
+ const api = makeApi();
+ const result = await flushSessionQueue(api, {
+ sessionId: "session-empty-file",
+ sessionsTable: "sessions",
+ queueDir,
+ });
+
+ expect(result).toEqual({ status: "flushed", rows: 0, batches: 0 });
+ expect(api.query).not.toHaveBeenCalled();
+ expect(existsSync(join(queueDir, "session-empty-file.inflight"))).toBe(false);
+ });
+
+ it("rethrows non-auth ensureSessionsTable failures", async () => {
+ const queueDir = makeQueueDir();
+ appendQueuedSessionRow(makeRow("session-ensure-error", 1), queueDir);
+
+ const api = makeApi(async () => {
+ throw new Error("table sessions does not exist");
+ });
+ api.ensureSessionsTable.mockRejectedValueOnce(new Error("dial tcp reset"));
+
+ await expect(flushSessionQueue(api, {
+ sessionId: "session-ensure-error",
+ sessionsTable: "sessions",
+ queueDir,
+ })).rejects.toThrow("dial tcp reset");
+ });
+
+ it("rethrows non-auth retry failures after ensureSessionsTable succeeds", async () => {
+ const queueDir = makeQueueDir();
+ appendQueuedSessionRow(makeRow("session-retry-error", 1), queueDir);
+
+ let attempts = 0;
+ const api = makeApi(async () => {
+ attempts += 1;
+ if (attempts === 1) throw new Error("table sessions does not exist");
+ throw new Error("network blew up");
+ });
+
+ await expect(flushSessionQueue(api, {
+ sessionId: "session-retry-error",
+ sessionsTable: "sessions",
+ queueDir,
+ })).rejects.toThrow("network blew up");
+ expect(api.ensureSessionsTable).toHaveBeenCalledWith("sessions");
+ });
+
+ it("re-queues failed inflight rows back into the queue", async () => {
+ const queueDir = makeQueueDir();
+ appendQueuedSessionRow(makeRow("session-fail", 1), queueDir);
+
+ const api = makeApi(async () => {
+ appendQueuedSessionRow(makeRow("session-fail", 2), queueDir);
+ throw new Error("network blew up");
+ });
+
+ await expect(flushSessionQueue(api, {
+ sessionId: "session-fail",
+ sessionsTable: "sessions",
+ queueDir,
+ })).rejects.toThrow("network blew up");
+
+ const lines = readFileSync(join(queueDir, "session-fail.jsonl"), "utf-8").trim().split("\n");
+ expect(lines).toHaveLength(2);
+ const messages = lines.map((line) => JSON.parse(line).message);
+ expect(messages.some((m: string) => m.includes("row-1"))).toBe(true);
+ expect(messages.some((m: string) => m.includes("row-2"))).toBe(true);
+ expect(existsSync(join(queueDir, "session-fail.inflight"))).toBe(false);
+ });
+
+ it("returns busy while another flusher owns the inflight file", async () => {
+ const queueDir = makeQueueDir();
+ appendQueuedSessionRow(makeRow("session-busy", 1), queueDir);
+ renameSync(
+ join(queueDir, "session-busy.jsonl"),
+ join(queueDir, "session-busy.inflight"),
+ );
+ appendQueuedSessionRow(makeRow("session-busy", 2), queueDir);
+
+ const api = makeApi();
+ const result = await flushSessionQueue(api, {
+ sessionId: "session-busy",
+ sessionsTable: "sessions",
+ queueDir,
+ });
+
+ expect(result).toEqual({ status: "busy", rows: 0, batches: 0 });
+ expect(api.query).not.toHaveBeenCalled();
+ });
+
+ it("waits for inflight ownership to clear before flushing queued rows", async () => {
+ const queueDir = makeQueueDir();
+ appendQueuedSessionRow(makeRow("session-wait", 1), queueDir);
+ renameSync(
+ join(queueDir, "session-wait.jsonl"),
+ join(queueDir, "session-wait.inflight"),
+ );
+ appendQueuedSessionRow(makeRow("session-wait", 2), queueDir);
+
+ setTimeout(() => {
+ rmSync(join(queueDir, "session-wait.inflight"), { force: true });
+ }, 50);
+
+ const api = makeApi();
+ const result = await flushSessionQueue(api, {
+ sessionId: "session-wait",
+ sessionsTable: "sessions",
+ queueDir,
+ waitIfBusyMs: 250,
+ });
+
+ expect(result).toEqual({ status: "flushed", rows: 1, batches: 1 });
+ expect(api.query).toHaveBeenCalledTimes(1);
+ expect((api.query.mock.calls[0]?.[0] as string) ?? "").toContain("row-2");
+ });
+
+ it("drains stale inflight files on session start replay", async () => {
+ const queueDir = makeQueueDir();
+ appendQueuedSessionRow(makeRow("session-stale", 1), queueDir);
+ renameSync(
+ join(queueDir, "session-stale.jsonl"),
+ join(queueDir, "session-stale.inflight"),
+ );
+ utimesSync(join(queueDir, "session-stale.inflight"), 0, 0);
+
+ const api = makeApi();
+ const result = await drainSessionQueues(api, {
+ sessionsTable: "sessions",
+ queueDir,
+ staleInflightMs: 1,
+ });
+
+ expect(result).toEqual({
+ queuedSessions: 1,
+ flushedSessions: 1,
+ rows: 1,
+ batches: 1,
+ });
+ expect(api.query).toHaveBeenCalledTimes(1);
+ expect(existsSync(join(queueDir, "session-stale.inflight"))).toBe(false);
+ });
+
+ it("drains queued .jsonl sessions on session start replay", async () => {
+ const queueDir = makeQueueDir();
+ appendQueuedSessionRow(makeRow("session-drain-queued", 1), queueDir);
+
+ const api = makeApi();
+ const result = await drainSessionQueues(api, {
+ sessionsTable: "sessions",
+ queueDir,
+ });
+
+ expect(result).toEqual({
+ queuedSessions: 1,
+ flushedSessions: 1,
+ rows: 1,
+ batches: 1,
+ });
+ });
+
+ it("counts queued sessions even when local auth-disable prevents flushing", async () => {
+ const queueDir = makeQueueDir();
+ appendQueuedSessionRow(makeRow("session-drain-disabled", 1), queueDir);
+ markSessionWriteDisabled("sessions", "403 Forbidden", queueDir);
+
+ const result = await drainSessionQueues(makeApi(), {
+ sessionsTable: "sessions",
+ queueDir,
+ });
+
+ expect(result).toEqual({
+ queuedSessions: 1,
+ flushedSessions: 0,
+ rows: 0,
+ batches: 0,
+ });
+ });
+
+ it("marks session writes disabled on auth failures and preserves the queue", async () => {
+ const queueDir = makeQueueDir();
+ appendQueuedSessionRow(makeRow("session-auth", 1), queueDir);
+
+ const api = makeApi(async () => {
+ throw new Error("Query failed: 403: Forbidden");
+ });
+
+ const result = await flushSessionQueue(api, {
+ sessionId: "session-auth",
+ sessionsTable: "sessions",
+ queueDir,
+ });
+
+ expect(result).toEqual({ status: "disabled", rows: 0, batches: 0 });
+ expect(api.ensureSessionsTable).not.toHaveBeenCalled();
+ expect(isSessionWriteDisabled("sessions", queueDir)).toBe(true);
+ expect(existsSync(join(queueDir, "session-auth.jsonl"))).toBe(true);
+ });
+
+ it("skips flush attempts while session writes are locally disabled", async () => {
+ const queueDir = makeQueueDir();
+ appendQueuedSessionRow(makeRow("session-skip", 1), queueDir);
+
+ const api = makeApi();
+ const first = await flushSessionQueue(api, {
+ sessionId: "session-skip",
+ sessionsTable: "sessions",
+ queueDir,
+ });
+ expect(first.status).toBe("flushed");
+
+ appendQueuedSessionRow(makeRow("session-skip", 2), queueDir);
+ const failingApi = makeApi(async () => {
+ throw new Error("403 Forbidden");
+ });
+ const disabled = await flushSessionQueue(failingApi, {
+ sessionId: "session-skip",
+ sessionsTable: "sessions",
+ queueDir,
+ });
+ expect(disabled.status).toBe("disabled");
+
+ const skipped = await flushSessionQueue(api, {
+ sessionId: "session-skip",
+ sessionsTable: "sessions",
+ queueDir,
+ });
+ expect(skipped).toEqual({ status: "disabled", rows: 0, batches: 0 });
+ expect(api.query).toHaveBeenCalledTimes(1);
+
+ clearSessionWriteDisabled("sessions", queueDir);
+ });
+
+ it("returns empty when writes are disabled but no queue files remain", async () => {
+ const queueDir = makeQueueDir();
+ markSessionWriteDisabled("sessions", "403 Forbidden", queueDir);
+
+ const result = await flushSessionQueue(makeApi(), {
+ sessionId: "session-disabled-empty",
+ sessionsTable: "sessions",
+ queueDir,
+ });
+
+ expect(result).toEqual({ status: "empty", rows: 0, batches: 0 });
+ });
+
+ it("recovers stale inflight files during a direct flush when allowed", async () => {
+ const queueDir = makeQueueDir();
+ appendQueuedSessionRow(makeRow("session-recover", 1), queueDir);
+ renameSync(
+ join(queueDir, "session-recover.jsonl"),
+ join(queueDir, "session-recover.inflight"),
+ );
+ utimesSync(join(queueDir, "session-recover.inflight"), 0, 0);
+
+ const api = makeApi();
+ const result = await flushSessionQueue(api, {
+ sessionId: "session-recover",
+ sessionsTable: "sessions",
+ queueDir,
+ allowStaleInflight: true,
+ staleInflightMs: 1,
+ });
+
+ expect(result).toEqual({ status: "flushed", rows: 1, batches: 1 });
+ expect(api.query).toHaveBeenCalledTimes(1);
+ });
+
+ it("recovers stale inflight files after waiting on a busy session", async () => {
+ const queueDir = makeQueueDir();
+ appendQueuedSessionRow(makeRow("session-wait-stale", 1), queueDir);
+ renameSync(
+ join(queueDir, "session-wait-stale.jsonl"),
+ join(queueDir, "session-wait-stale.inflight"),
+ );
+ utimesSync(join(queueDir, "session-wait-stale.inflight"), 0, 0);
+
+ const api = makeApi();
+ const result = await flushSessionQueue(api, {
+ sessionId: "session-wait-stale",
+ sessionsTable: "sessions",
+ queueDir,
+ allowStaleInflight: true,
+ staleInflightMs: 1,
+ waitIfBusyMs: 1,
+ });
+
+ expect(result).toEqual({ status: "flushed", rows: 1, batches: 1 });
+ expect(api.query).toHaveBeenCalledTimes(1);
+ });
+
+ it("ignores fresh inflight files during drain replay", async () => {
+ const queueDir = makeQueueDir();
+ appendQueuedSessionRow(makeRow("session-fresh-inflight", 1), queueDir);
+ renameSync(
+ join(queueDir, "session-fresh-inflight.jsonl"),
+ join(queueDir, "session-fresh-inflight.inflight"),
+ );
+
+ const result = await drainSessionQueues(makeApi(), {
+ sessionsTable: "sessions",
+ queueDir,
+ staleInflightMs: 60_000,
+ });
+
+ expect(result).toEqual({
+ queuedSessions: 0,
+ flushedSessions: 0,
+ rows: 0,
+ batches: 0,
+ });
+ expect(existsSync(join(queueDir, "session-fresh-inflight.inflight"))).toBe(true);
+ });
+
+ it("removes expired and malformed disabled markers", () => {
+ const queueDir = makeQueueDir();
+ markSessionWriteDisabled("sessions", "403 Forbidden", queueDir);
+
+ expect(isSessionWriteDisabled("sessions", queueDir, 0)).toBe(false);
+
+ const disabledPath = join(queueDir, ".sessions.disabled.json");
+ writeFileSync(disabledPath, "{not-json");
+ expect(isSessionWriteDisabled("sessions", queueDir)).toBe(false);
+ expect(existsSync(disabledPath)).toBe(false);
+ });
+
+ it("marks writes disabled when ensureSessionsTable fails with auth", async () => {
+ const queueDir = makeQueueDir();
+ appendQueuedSessionRow(makeRow("session-ensure-auth", 1), queueDir);
+
+ const api = makeApi(async () => {
+ throw new Error("table sessions does not exist");
+ });
+ api.ensureSessionsTable.mockRejectedValueOnce(new Error("403 Forbidden"));
+
+ const result = await flushSessionQueue(api, {
+ sessionId: "session-ensure-auth",
+ sessionsTable: "sessions",
+ queueDir,
+ });
+
+ expect(result).toEqual({ status: "disabled", rows: 0, batches: 0 });
+ expect(isSessionWriteDisabled("sessions", queueDir)).toBe(true);
+ });
+
+ it("marks writes disabled when the retry after ensure fails with auth", async () => {
+ const queueDir = makeQueueDir();
+ appendQueuedSessionRow(makeRow("session-retry-auth", 1), queueDir);
+
+ let attempts = 0;
+ const api = makeApi(async () => {
+ attempts += 1;
+ if (attempts === 1) throw new Error("table sessions does not exist");
+ throw new Error("401 Unauthorized");
+ });
+
+ const result = await flushSessionQueue(api, {
+ sessionId: "session-retry-auth",
+ sessionsTable: "sessions",
+ queueDir,
+ });
+
+ expect(result).toEqual({ status: "disabled", rows: 0, batches: 0 });
+ expect(api.ensureSessionsTable).toHaveBeenCalledWith("sessions");
+ expect(isSessionWriteDisabled("sessions", queueDir)).toBe(true);
+ });
+
+ it("treats string auth errors as auth failures and ignores unrelated errors", () => {
+ expect(isSessionWriteAuthError("401 Unauthorized")).toBe(true);
+ expect(isSessionWriteAuthError("something else")).toBe(false);
+ });
+
+ it("acquires, releases, and reclaims stale drain locks", () => {
+ const queueDir = makeQueueDir();
+
+ const release = tryAcquireSessionDrainLock("sessions", queueDir, 60_000);
+ expect(release).toBeTypeOf("function");
+ expect(existsSync(join(queueDir, ".sessions.drain.lock"))).toBe(true);
+
+ expect(tryAcquireSessionDrainLock("sessions", queueDir, 60_000)).toBeNull();
+
+ utimesSync(join(queueDir, ".sessions.drain.lock"), 0, 0);
+ const reclaimed = tryAcquireSessionDrainLock("sessions", queueDir, 1);
+ expect(reclaimed).toBeTypeOf("function");
+
+ reclaimed?.();
+ expect(existsSync(join(queueDir, ".sessions.drain.lock"))).toBe(false);
+ release?.();
+ });
+});
diff --git a/claude-code/tests/session-start-hook.test.ts b/claude-code/tests/session-start-hook.test.ts
deleted file mode 100644
index 27b15c8..0000000
--- a/claude-code/tests/session-start-hook.test.ts
+++ /dev/null
@@ -1,338 +0,0 @@
-import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
-import { mkdtempSync, rmSync } from "node:fs";
-import { tmpdir } from "node:os";
-import { join } from "node:path";
-
-/**
- * Direct source-level tests for src/hooks/session-start.ts. The hook
- * orchestrates: credential load, userName backfill, table+placeholder
- * setup, version check + auto-update, and the additionalContext output.
- *
- * Mocks: readStdin, loadCredentials/saveCredentials, loadConfig,
- * DeeplakeApi, global.fetch, child_process.execSync, and the two
- * node:fs helpers used by the cache-cleanup path (readdirSync, rmSync).
- */
-
-const stdinMock = vi.fn();
-const loadCredsMock = vi.fn();
-const saveCredsMock = vi.fn();
-const loginMock = vi.fn();
-const loadConfigMock = vi.fn();
-const debugLogMock = vi.fn();
-const ensureTableMock = vi.fn();
-const ensureSessionsTableMock = vi.fn();
-const queryMock = vi.fn();
-const execSyncMock = vi.fn();
-const readdirSyncMock = vi.fn();
-const rmSyncMock = vi.fn();
-
-vi.mock("../../src/utils/stdin.js", () => ({ readStdin: (...a: any[]) => stdinMock(...a) }));
-vi.mock("../../src/commands/auth.js", () => ({
- loadCredentials: (...a: any[]) => loadCredsMock(...a),
- saveCredentials: (...a: any[]) => saveCredsMock(...a),
- login: (...a: any[]) => loginMock(...a),
-}));
-vi.mock("../../src/config.js", () => ({ loadConfig: (...a: any[]) => loadConfigMock(...a) }));
-vi.mock("../../src/utils/debug.js", () => ({
- log: (_t: string, msg: string) => debugLogMock(msg),
- utcTimestamp: () => "2026-04-17 00:00:00 UTC",
-}));
-vi.mock("../../src/deeplake-api.js", () => ({
- DeeplakeApi: class {
- ensureTable() { return ensureTableMock(); }
- ensureSessionsTable(t: string) { return ensureSessionsTableMock(t); }
- query(sql: string) { return queryMock(sql); }
- },
-}));
-vi.mock("node:child_process", async () => {
- const actual = await vi.importActual("node:child_process");
- return { ...actual, execSync: (...a: any[]) => execSyncMock(...a) };
-});
-vi.mock("node:fs", async () => {
- const actual = await vi.importActual("node:fs");
- return {
- ...actual,
- readdirSync: (...a: any[]) => readdirSyncMock(...a),
- rmSync: (...a: any[]) => rmSyncMock(...a),
- };
-});
-
-const originalFetch = global.fetch;
-const fetchMock = vi.fn();
-
-let stdoutLines: string[] = [];
-const stdoutSpy = vi.spyOn(process.stdout, "write");
-
-async function runHook(env: Record = {}): Promise {
- delete process.env.HIVEMIND_WIKI_WORKER;
- delete process.env.HIVEMIND_CAPTURE;
- for (const [k, v] of Object.entries(env)) {
- if (v === undefined) delete process.env[k];
- else process.env[k] = v;
- }
- stdoutLines = [];
- stdoutSpy.mockImplementation((chunk: any) => { stdoutLines.push(String(chunk)); return true; });
- vi.resetModules();
- // @ts-expect-error
- global.fetch = fetchMock;
- // Intercept console.log which session-start.ts uses for the JSON emit
- const originalLog = console.log;
- const collected: string[] = [];
- console.log = (...args: any[]) => { collected.push(args.join(" ")); };
- try {
- await import("../../src/hooks/session-start.js");
- await new Promise(r => setImmediate(r));
- await new Promise(r => setImmediate(r));
- return collected.join("\n") || null;
- } finally {
- console.log = originalLog;
- }
-}
-
-const validConfig = {
- token: "t", orgId: "o", orgName: "acme", workspaceId: "default",
- userName: "alice", apiUrl: "http://example", tableName: "memory",
- sessionsTableName: "sessions",
-};
-
-let cacheTmp: string;
-
-beforeEach(() => {
- cacheTmp = mkdtempSync(join(tmpdir(), "session-start-test-"));
- stdinMock.mockReset().mockResolvedValue({ session_id: "sid-1", cwd: "/workspaces/proj" });
- loadCredsMock.mockReset().mockReturnValue({
- token: "tok", orgId: "o", orgName: "acme", userName: "alice", workspaceId: "default",
- });
- saveCredsMock.mockReset();
- loadConfigMock.mockReset().mockReturnValue(validConfig);
- debugLogMock.mockReset();
- ensureTableMock.mockReset().mockResolvedValue(undefined);
- ensureSessionsTableMock.mockReset().mockResolvedValue(undefined);
- queryMock.mockReset().mockResolvedValue([]); // "no existing summary"
- execSyncMock.mockReset();
- readdirSyncMock.mockReset().mockReturnValue([]);
- rmSyncMock.mockReset();
- fetchMock.mockReset().mockResolvedValue({
- ok: true,
- json: async () => ({ version: "0.0.1" }), // older-or-equal → no update
- });
-});
-
-afterEach(() => {
- vi.restoreAllMocks();
- // @ts-expect-error
- global.fetch = originalFetch;
- try { rmSync(cacheTmp, { recursive: true, force: true }); } catch { /* ignore */ }
-});
-
-// ═══ Guard + credential branches ═══════════════════════════════════════════
-
-describe("session-start hook — guards", () => {
- it("returns immediately when HIVEMIND_WIKI_WORKER=1", async () => {
- const out = await runHook({ HIVEMIND_WIKI_WORKER: "1" });
- expect(stdinMock).not.toHaveBeenCalled();
- expect(out).toBeNull();
- });
-
- it("emits additionalContext with the not-logged-in warning when no creds", async () => {
- loadCredsMock.mockReturnValue(null);
- const out = await runHook();
- expect(out).not.toBeNull();
- const parsed = JSON.parse(out!);
- expect(parsed.hookSpecificOutput.additionalContext).toContain("Not logged in to Deeplake");
- expect(debugLogMock).toHaveBeenCalledWith(
- expect.stringContaining("no credentials found"),
- );
- });
-
- it("emits the logged-in context when creds are present", async () => {
- const out = await runHook();
- const parsed = JSON.parse(out!);
- expect(parsed.hookSpecificOutput.additionalContext).toContain("Logged in to Deeplake as org: acme");
- expect(parsed.hookSpecificOutput.additionalContext).toContain("workspace: default");
- });
-
- it("falls back to orgId when orgName is missing", async () => {
- loadCredsMock.mockReturnValue({
- token: "t", orgId: "org-uuid", userName: "u", workspaceId: "default",
- });
- const out = await runHook();
- const parsed = JSON.parse(out!);
- expect(parsed.hookSpecificOutput.additionalContext).toContain("Logged in to Deeplake as org: org-uuid");
- });
-
- it("backfills userName via node:os when credentials lack one", async () => {
- loadCredsMock.mockReturnValue({
- token: "t", orgId: "o", orgName: "acme", workspaceId: "default",
- });
- await runHook();
- expect(saveCredsMock).toHaveBeenCalled();
- expect(debugLogMock).toHaveBeenCalledWith(
- expect.stringMatching(/^backfilled and persisted userName: /),
- );
- });
-});
-
-// ═══ Table setup + placeholder ═════════════════════════════════════════════
-
-describe("session-start hook — placeholder branching", () => {
- it("creates placeholder when summary does not exist (query returns [])", async () => {
- await runHook();
- expect(ensureTableMock).toHaveBeenCalled();
- expect(ensureSessionsTableMock).toHaveBeenCalledWith("sessions");
- // 1 SELECT (existing check) + 1 INSERT = 2 queries.
- expect(queryMock).toHaveBeenCalledTimes(2);
- expect(queryMock.mock.calls[0][0]).toMatch(/^SELECT path FROM/);
- expect(queryMock.mock.calls[1][0]).toMatch(/^INSERT INTO/);
- expect(debugLogMock).toHaveBeenCalledWith("placeholder created");
- });
-
- it("skips placeholder INSERT when summary already exists (resumed session)", async () => {
- queryMock.mockResolvedValueOnce([{ path: "/summaries/alice/sid-1.md" }]);
- await runHook();
- expect(queryMock).toHaveBeenCalledTimes(1); // only the SELECT
- });
-
- it("skips placeholder INSERT when HIVEMIND_CAPTURE=false but still ensures tables", async () => {
- await runHook({ HIVEMIND_CAPTURE: "false" });
- expect(ensureTableMock).toHaveBeenCalled();
- expect(ensureSessionsTableMock).toHaveBeenCalled();
- expect(queryMock).not.toHaveBeenCalled();
- expect(debugLogMock).toHaveBeenCalledWith(
- "placeholder skipped (HIVEMIND_CAPTURE=false)",
- );
- });
-
- it("swallows placeholder errors and logs via both loggers", async () => {
- ensureTableMock.mockRejectedValue(new Error("table boom"));
- await runHook();
- expect(debugLogMock).toHaveBeenCalledWith(
- expect.stringContaining("placeholder failed: table boom"),
- );
- });
-
- it("skips setup when loadConfig returns null", async () => {
- loadConfigMock.mockReturnValue(null);
- await runHook();
- expect(ensureTableMock).not.toHaveBeenCalled();
- });
-
- it("skips setup when session_id is empty", async () => {
- stdinMock.mockResolvedValue({ session_id: "", cwd: "/x" });
- await runHook();
- expect(ensureTableMock).not.toHaveBeenCalled();
- });
-});
-
-// ═══ Version check + autoupdate ═════════════════════════════════════════════
-
-describe("session-start hook — version check", () => {
- it("runs execSync and cleans old cache entries when a newer version is available", async () => {
- fetchMock.mockResolvedValue({
- ok: true,
- json: async () => ({ version: "999.0.0" }),
- });
- readdirSyncMock.mockReturnValue([
- { name: "0.0.1", isDirectory: () => true },
- { name: "999.0.0", isDirectory: () => true }, // latest, must NOT be removed
- ]);
- const stderrSpy = vi.spyOn(process.stderr, "write").mockReturnValue(true);
- const out = await runHook();
- expect(execSyncMock).toHaveBeenCalled();
- expect(rmSyncMock).toHaveBeenCalledTimes(1);
- expect(rmSyncMock.mock.calls[0][0]).toContain("0.0.1");
- expect(stderrSpy).toHaveBeenCalledWith(expect.stringContaining("auto-updated"));
- const parsed = JSON.parse(out!);
- expect(parsed.hookSpecificOutput.additionalContext).toContain("auto-updated");
- });
-
- it("falls back to manual-upgrade message when autoupdate is disabled", async () => {
- loadCredsMock.mockReturnValue({
- token: "t", orgId: "o", orgName: "acme", userName: "u", workspaceId: "default",
- autoupdate: false,
- });
- fetchMock.mockResolvedValue({ ok: true, json: async () => ({ version: "999.0.0" }) });
- const stderrSpy = vi.spyOn(process.stderr, "write").mockReturnValue(true);
- await runHook();
- expect(execSyncMock).not.toHaveBeenCalled();
- expect(stderrSpy).toHaveBeenCalledWith(
- expect.stringContaining("update available"),
- );
- });
-
- it("emits the 'auto-update failed' message when execSync throws", async () => {
- fetchMock.mockResolvedValue({ ok: true, json: async () => ({ version: "999.0.0" }) });
- execSyncMock.mockImplementation(() => { throw new Error("npm unreachable"); });
- const stderrSpy = vi.spyOn(process.stderr, "write").mockReturnValue(true);
- await runHook();
- expect(stderrSpy).toHaveBeenCalledWith(
- expect.stringContaining("Auto-update failed"),
- );
- });
-
- it("tolerates fetch failure (GitHub unreachable)", async () => {
- fetchMock.mockRejectedValue(new Error("offline"));
- await runHook();
- expect(execSyncMock).not.toHaveBeenCalled();
- });
-
- it("tolerates readdirSync throw during cache cleanup", async () => {
- fetchMock.mockResolvedValue({ ok: true, json: async () => ({ version: "999.0.0" }) });
- readdirSyncMock.mockImplementation(() => { throw new Error("readdir boom"); });
- await runHook();
- expect(debugLogMock).toHaveBeenCalledWith(
- expect.stringContaining("cache cleanup failed: readdir boom"),
- );
- });
-
- it("emits 'up to date' context when latest == current", async () => {
- // Real getInstalledVersion reads plugin.json from the real repo; we
- // simulate "latest equals current" by returning the same version.
- // Since we don't know the installed version at runtime, we use
- // readFileSync-based indirection: fetchMock returns a version that
- // is definitely older (0.0.1). The file read picks up the repo's
- // real version → latest 0.0.1 is NOT newer → "up to date" branch.
- fetchMock.mockResolvedValue({ ok: true, json: async () => ({ version: "0.0.1" }) });
- const out = await runHook();
- const parsed = JSON.parse(out!);
- expect(parsed.hookSpecificOutput.additionalContext).toContain("up to date");
- });
-});
-
-// ═══ Fatal catch ════════════════════════════════════════════════════════════
-
-describe("session-start hook — fatal catch", () => {
- it("catches a stdin throw and exits 0", async () => {
- stdinMock.mockRejectedValue(new Error("bad stdin"));
- const exitSpy = vi.spyOn(process, "exit").mockImplementation(() => undefined as never);
- await runHook();
- await new Promise(r => setImmediate(r));
- expect(debugLogMock).toHaveBeenCalledWith("fatal: bad stdin");
- expect(exitSpy).toHaveBeenCalledWith(0);
- });
-});
-
-// Additional branch coverage
-describe("session-start hook — version helpers edge cases", () => {
- it("fetch ok:false short-circuits getLatestVersion (no autoupdate)", async () => {
- fetchMock.mockResolvedValue({ ok: false, json: async () => ({ version: "999.0.0" }) });
- await runHook();
- expect(execSyncMock).not.toHaveBeenCalled();
- });
-
- it("GitHub response without a version field falls through to null", async () => {
- fetchMock.mockResolvedValue({ ok: true, json: async () => ({}) });
- await runHook();
- expect(execSyncMock).not.toHaveBeenCalled();
- });
-
- it("workspaceId missing on creds falls back to 'default' in context", async () => {
- loadCredsMock.mockReturnValue({
- token: "t", orgId: "o", orgName: "acme", userName: "alice",
- // workspaceId omitted
- });
- const out = await runHook();
- const parsed = JSON.parse(out!);
- expect(parsed.hookSpecificOutput.additionalContext).toContain("workspace: default");
- });
-});
diff --git a/claude-code/tests/session-start-setup-hook.test.ts b/claude-code/tests/session-start-setup-hook.test.ts
deleted file mode 100644
index e3c9ca6..0000000
--- a/claude-code/tests/session-start-setup-hook.test.ts
+++ /dev/null
@@ -1,262 +0,0 @@
-import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
-
-/**
- * Source-level tests for src/hooks/session-start-setup.ts. This hook
- * handles three things on a fresh session: table setup, userName
- * backfill, and version check + auto-update. Mocks the boundaries:
- * readStdin, loadCredentials, saveCredentials, loadConfig, DeeplakeApi,
- * global fetch (for the GitHub version lookup), and execSync (for the
- * claude-plugin update call).
- */
-
-const stdinMock = vi.fn();
-const loadCredsMock = vi.fn();
-const saveCredsMock = vi.fn();
-const loadConfigMock = vi.fn();
-const debugLogMock = vi.fn();
-const ensureTableMock = vi.fn();
-const ensureSessionsTableMock = vi.fn();
-const execSyncMock = vi.fn();
-
-vi.mock("../../src/utils/stdin.js", () => ({ readStdin: (...a: any[]) => stdinMock(...a) }));
-vi.mock("../../src/commands/auth.js", () => ({
- loadCredentials: (...a: any[]) => loadCredsMock(...a),
- saveCredentials: (...a: any[]) => saveCredsMock(...a),
-}));
-vi.mock("../../src/config.js", () => ({ loadConfig: (...a: any[]) => loadConfigMock(...a) }));
-vi.mock("../../src/utils/debug.js", () => ({
- log: (_t: string, msg: string) => debugLogMock(msg),
- utcTimestamp: () => "2026-04-17 00:00:00 UTC",
-}));
-vi.mock("../../src/deeplake-api.js", () => ({
- DeeplakeApi: class {
- ensureTable() { return ensureTableMock(); }
- ensureSessionsTable(t: string) { return ensureSessionsTableMock(t); }
- },
-}));
-vi.mock("node:child_process", async () => {
- const actual = await vi.importActual("node:child_process");
- return { ...actual, execSync: (...a: any[]) => execSyncMock(...a) };
-});
-
-// We also need to control global.fetch for the GitHub version lookup.
-const originalFetch = global.fetch;
-const fetchMock = vi.fn();
-
-async function runHook(env: Record = {}): Promise {
- delete process.env.HIVEMIND_WIKI_WORKER;
- for (const [k, v] of Object.entries(env)) {
- if (v === undefined) delete process.env[k];
- else process.env[k] = v;
- }
- vi.resetModules();
- // @ts-expect-error: replace global fetch for the GitHub lookup
- global.fetch = fetchMock;
- await import("../../src/hooks/session-start-setup.js");
- await new Promise(r => setImmediate(r));
- await new Promise(r => setImmediate(r));
-}
-
-const validConfig = {
- token: "t", orgId: "o", orgName: "acme", workspaceId: "default",
- userName: "alice", apiUrl: "http://example", tableName: "memory",
- sessionsTableName: "sessions",
-};
-
-beforeEach(() => {
- stdinMock.mockReset().mockResolvedValue({ session_id: "sid-1", cwd: "/x" });
- loadCredsMock.mockReset().mockReturnValue({
- token: "tok", orgId: "o", orgName: "acme", userName: "alice",
- });
- saveCredsMock.mockReset();
- loadConfigMock.mockReset().mockReturnValue(validConfig);
- debugLogMock.mockReset();
- ensureTableMock.mockReset().mockResolvedValue(undefined);
- ensureSessionsTableMock.mockReset().mockResolvedValue(undefined);
- execSyncMock.mockReset();
- fetchMock.mockReset().mockResolvedValue({
- ok: true,
- json: async () => ({ version: "0.0.1" }), // same-as-current: no update
- });
-});
-
-afterEach(() => {
- vi.restoreAllMocks();
- // @ts-expect-error
- global.fetch = originalFetch;
-});
-
-describe("session-start-setup hook — guards", () => {
- it("returns without reading stdin when HIVEMIND_WIKI_WORKER=1", async () => {
- await runHook({ HIVEMIND_WIKI_WORKER: "1" });
- expect(stdinMock).not.toHaveBeenCalled();
- });
-
- it("returns when no credentials are loaded", async () => {
- loadCredsMock.mockReturnValue(null);
- await runHook();
- expect(debugLogMock).toHaveBeenCalledWith("no credentials");
- expect(ensureTableMock).not.toHaveBeenCalled();
- });
-
- it("returns when credentials have no token", async () => {
- loadCredsMock.mockReturnValue({ token: "", userName: "alice" });
- await runHook();
- expect(debugLogMock).toHaveBeenCalledWith("no credentials");
- });
-});
-
-describe("session-start-setup hook — userName backfill", () => {
- it("backfills userName via node:os when missing and saves creds", async () => {
- loadCredsMock.mockReturnValue({ token: "tok", orgId: "o", orgName: "acme" });
- await runHook();
- expect(saveCredsMock).toHaveBeenCalled();
- expect(debugLogMock).toHaveBeenCalledWith(
- expect.stringMatching(/^backfilled userName: /),
- );
- });
-
- it("does not call saveCredentials when userName already set", async () => {
- // Default creds in beforeEach have userName=alice.
- await runHook();
- expect(saveCredsMock).not.toHaveBeenCalled();
- });
-});
-
-describe("session-start-setup hook — table setup", () => {
- it("ensures both tables on the happy path", async () => {
- await runHook();
- expect(ensureTableMock).toHaveBeenCalled();
- expect(ensureSessionsTableMock).toHaveBeenCalledWith("sessions");
- expect(debugLogMock).toHaveBeenCalledWith("setup complete");
- });
-
- it("swallows setup errors and logs them", async () => {
- ensureTableMock.mockRejectedValue(new Error("table boom"));
- await runHook();
- expect(debugLogMock).toHaveBeenCalledWith("setup failed: table boom");
- });
-
- it("skips setup entirely when session_id is empty", async () => {
- stdinMock.mockResolvedValue({ session_id: "", cwd: "/x" });
- await runHook();
- expect(ensureTableMock).not.toHaveBeenCalled();
- });
-
- it("skips setup when loadConfig returns null", async () => {
- loadConfigMock.mockReturnValue(null);
- await runHook();
- expect(ensureTableMock).not.toHaveBeenCalled();
- });
-});
-
-describe("session-start-setup hook — version check + autoupdate", () => {
- it("runs the autoupdate path when newer version is available", async () => {
- fetchMock.mockResolvedValue({
- ok: true,
- json: async () => ({ version: "999.0.0" }), // clearly newer
- });
- const stderrSpy = vi.spyOn(process.stderr, "write").mockReturnValue(true);
- await runHook();
- expect(execSyncMock).toHaveBeenCalled();
- expect(stderrSpy).toHaveBeenCalledWith(
- expect.stringContaining("auto-updated"),
- );
- });
-
- it("emits a manual-upgrade message when autoupdate is disabled and newer exists", async () => {
- loadCredsMock.mockReturnValue({
- token: "t", orgId: "o", orgName: "acme", userName: "alice",
- autoupdate: false,
- });
- fetchMock.mockResolvedValue({
- ok: true,
- json: async () => ({ version: "999.0.0" }),
- });
- const stderrSpy = vi.spyOn(process.stderr, "write").mockReturnValue(true);
- await runHook();
- expect(execSyncMock).not.toHaveBeenCalled();
- expect(stderrSpy).toHaveBeenCalledWith(
- expect.stringContaining("update available"),
- );
- });
-
- it("emits the 'auto-update failed' message when execSync throws", async () => {
- fetchMock.mockResolvedValue({
- ok: true,
- json: async () => ({ version: "999.0.0" }),
- });
- execSyncMock.mockImplementation(() => { throw new Error("npm down"); });
- const stderrSpy = vi.spyOn(process.stderr, "write").mockReturnValue(true);
- await runHook();
- expect(stderrSpy).toHaveBeenCalledWith(
- expect.stringContaining("Auto-update failed"),
- );
- });
-
- it("logs 'up to date' when installed version matches latest", async () => {
- // fetchMock default returns 0.0.1; getInstalledVersion reads plugin.json
- // from the real filesystem, which will be 0.6.x. So we force the
- // GitHub answer to match by returning ok=false → latest=null →
- // falls through the else.
- fetchMock.mockResolvedValue({ ok: false });
- await runHook();
- // The "version up to date" branch is reached when latest is non-null
- // but not newer. Hard to hit deterministically without also mocking
- // the file read; covering the fetch-error branch (ok=false → null)
- // at least keeps the outer try from throwing.
- // Assert we did not log an autoupdate:
- expect(execSyncMock).not.toHaveBeenCalled();
- });
-
- it("tolerates a fetch error (GitHub unreachable)", async () => {
- fetchMock.mockRejectedValue(new Error("network down"));
- await runHook();
- // Inner try/catch in getLatestVersion swallows; no autoupdate triggers.
- expect(execSyncMock).not.toHaveBeenCalled();
- });
-});
-
-describe("session-start-setup hook — fatal catch", () => {
- it("catches a stdin throw and exits 0", async () => {
- stdinMock.mockRejectedValue(new Error("stdin boom"));
- const exitSpy = vi.spyOn(process, "exit").mockImplementation(() => undefined as never);
- await runHook();
- await new Promise(r => setImmediate(r));
- expect(debugLogMock).toHaveBeenCalledWith("fatal: stdin boom");
- expect(exitSpy).toHaveBeenCalledWith(0);
- });
-});
-
-// Extra branch coverage: getLatestVersion edge cases + version-compare chain
-describe("session-start-setup hook — version helpers edge cases", () => {
- it("treats fetch with ok:false as no-new-version (line 61 branch)", async () => {
- fetchMock.mockResolvedValue({ ok: false, json: async () => ({ version: "999.0.0" }) });
- await runHook();
- expect(execSyncMock).not.toHaveBeenCalled();
- });
-
- it("treats a response missing the 'version' field as null (?? null fallback)", async () => {
- fetchMock.mockResolvedValue({ ok: true, json: async () => ({}) });
- await runHook();
- expect(execSyncMock).not.toHaveBeenCalled();
- });
-
- it("treats latest == current as 'up to date' (isNewer false)", async () => {
- // Force current to be a version that fetchMock exactly matches.
- // We can't change what getInstalledVersion reads from disk, but we
- // can make fetch return the installed version. With equal strings,
- // isNewer returns false and the else-branch fires.
- const pkg = JSON.parse(
- require("node:fs").readFileSync(
- require("node:path").join(
- __dirname, "..", ".claude-plugin", "plugin.json",
- ),
- "utf-8",
- ),
- );
- fetchMock.mockResolvedValue({ ok: true, json: async () => ({ version: pkg.version }) });
- await runHook();
- expect(execSyncMock).not.toHaveBeenCalled();
- });
-});
diff --git a/claude-code/tests/session-start.test.ts b/claude-code/tests/session-start.test.ts
index 0d311cf..858f544 100644
--- a/claude-code/tests/session-start.test.ts
+++ b/claude-code/tests/session-start.test.ts
@@ -137,6 +137,17 @@ describe("claude-code integration: session-start.js (sync hook)", () => {
expect(ctx).toMatch(/Logged in to Deeplake|Not logged in to Deeplake/);
});
+ it("steers recall tasks toward index-first exact file reads", () => {
+ const raw = runHook("session-start.js", baseInput);
+ const parsed = JSON.parse(raw);
+ const ctx = parsed.hookSpecificOutput.additionalContext;
+ expect(ctx).toContain("Always read index.md first");
+ expect(ctx).toContain("read that exact summary or session file directly");
+ expect(ctx).toContain("Do NOT probe unrelated local paths");
+ expect(ctx).toContain("answer with the smallest exact phrase supported by memory");
+ expect(ctx).toContain("convert the final answer into an absolute month/date/year");
+ });
+
it("completes within 3s with no credentials (no server calls)", () => {
const start = Date.now();
runHook("session-start.js", baseInput);
diff --git a/claude-code/tests/sessions-table.test.ts b/claude-code/tests/sessions-table.test.ts
index 8c65aa8..40a254f 100644
--- a/claude-code/tests/sessions-table.test.ts
+++ b/claude-code/tests/sessions-table.test.ts
@@ -77,11 +77,11 @@ function makeClient(memoryRows: Row[] = [], sessionRows: Row[] = []) {
// ── Tests ───────────────────────────────────────────────────────────────────
describe("DeeplakeFs — sessions table multi-row read", () => {
- it("reads session file by concatenating rows ordered by creation_date", async () => {
+ it("reads session file by normalizing rows ordered by creation_date", async () => {
const sessionRows: Row[] = [
{ path: "/sessions/alice/alice_org_default_s1.jsonl", text_content: '{"type":"user_message","content":"hello"}', size_bytes: 40, mime_type: "application/json", creation_date: "2026-01-01T00:00:01Z" },
- { path: "/sessions/alice/alice_org_default_s1.jsonl", text_content: '{"type":"tool_call","tool_name":"Read"}', size_bytes: 38, mime_type: "application/json", creation_date: "2026-01-01T00:00:02Z" },
- { path: "/sessions/alice/alice_org_default_s1.jsonl", text_content: '{"type":"assistant_message","content":"done"}', size_bytes: 44, mime_type: "application/json", creation_date: "2026-01-01T00:00:03Z" },
+ { path: "/sessions/alice/alice_org_default_s1.jsonl", text_content: '{"type":"assistant_message","content":"done"}', size_bytes: 44, mime_type: "application/json", creation_date: "2026-01-01T00:00:02Z" },
+ { path: "/sessions/alice/alice_org_default_s1.jsonl", text_content: '{"type":"user_message","content":"bye"}', size_bytes: 42, mime_type: "application/json", creation_date: "2026-01-01T00:00:03Z" },
];
const client = makeClient([], sessionRows);
@@ -90,9 +90,9 @@ describe("DeeplakeFs — sessions table multi-row read", () => {
const content = await fs.readFile("/sessions/alice/alice_org_default_s1.jsonl");
const lines = content.split("\n");
expect(lines).toHaveLength(3);
- expect(JSON.parse(lines[0]).type).toBe("user_message");
- expect(JSON.parse(lines[1]).type).toBe("tool_call");
- expect(JSON.parse(lines[2]).type).toBe("assistant_message");
+ expect(lines[0]).toBe("[user] hello");
+ expect(lines[1]).toBe("[assistant] done");
+ expect(lines[2]).toBe("[user] bye");
});
it("preserves creation_date ordering even if inserted out of order", async () => {
@@ -121,9 +121,7 @@ describe("DeeplakeFs — sessions table multi-row read", () => {
const fs = await DeeplakeFs.create(client as never, "memory", "/", "sessions");
const content = await fs.readFile("/sessions/u/s1.jsonl");
- const parsed = JSON.parse(content);
- expect(parsed.type).toBe("user_message");
- expect(parsed.content).toBe("hi");
+ expect(content).toBe("[user] hi");
});
it("lists session files in directory listing", async () => {
diff --git a/claude-code/tests/summary-state.test.ts b/claude-code/tests/summary-state.test.ts
deleted file mode 100644
index 0c32a5d..0000000
--- a/claude-code/tests/summary-state.test.ts
+++ /dev/null
@@ -1,434 +0,0 @@
-import { describe, it, expect, beforeAll, afterAll, beforeEach } from "vitest";
-import { mkdtempSync, rmSync, writeFileSync, existsSync, mkdirSync, readFileSync } from "node:fs";
-import { tmpdir } from "node:os";
-import { join, dirname } from "node:path";
-import { spawn } from "node:child_process";
-
-/**
- * Functional tests for summary-state. The module computes STATE_DIR from
- * homedir() at module-load time, so we redirect $HOME to a tmp dir BEFORE
- * importing. Every test uses a unique session id so there is no cross-test
- * contamination.
- *
- * What these tests pin down:
- * - bumpTotalCount seeds fresh state and increments existing state
- * - shouldTrigger fires the first summary at 10 events, obeys msg/time
- * cadence, and guards time-cadence with msgsSince > 0
- * - tryAcquireLock is mutually exclusive, reclaims stale locks, and rejects
- * held locks
- * - finalizeSummary advances lastSummaryCount and preserves the highest
- * observed totalCount
- * - loadTriggerConfig respects env overrides and falls back to defaults
- */
-
-let tmpHome: string;
-let mod: typeof import("../../src/hooks/summary-state.js");
-
-beforeAll(async () => {
- tmpHome = mkdtempSync(join(tmpdir(), "summary-state-test-"));
- process.env.HOME = tmpHome;
- mod = await import("../../src/hooks/summary-state.js");
-});
-
-afterAll(() => {
- try { rmSync(tmpHome, { recursive: true, force: true }); } catch { /* ignore */ }
-});
-
-const newSessionId = () => `test-${crypto.randomUUID()}`;
-
-describe("bumpTotalCount", () => {
- it("seeds fresh state with totalCount=1 and lastSummaryCount=0", () => {
- const sid = newSessionId();
- const state = mod.bumpTotalCount(sid);
- expect(state.totalCount).toBe(1);
- expect(state.lastSummaryCount).toBe(0);
- expect(typeof state.lastSummaryAt).toBe("number");
- });
-
- it("increments existing totalCount and preserves lastSummaryAt/lastSummaryCount", () => {
- const sid = newSessionId();
- const first = mod.bumpTotalCount(sid);
- const second = mod.bumpTotalCount(sid);
- const third = mod.bumpTotalCount(sid);
- expect(second.totalCount).toBe(2);
- expect(third.totalCount).toBe(3);
- expect(second.lastSummaryAt).toBe(first.lastSummaryAt);
- expect(third.lastSummaryCount).toBe(0);
- });
-});
-
-describe("shouldTrigger", () => {
- const cfg = { everyNMessages: 50, everyHours: 2 };
-
- it("does NOT fire before 10 events on a fresh session", () => {
- const now = Date.now();
- for (let n = 1; n <= 9; n++) {
- expect(mod.shouldTrigger(
- { lastSummaryAt: now, lastSummaryCount: 0, totalCount: n }, cfg, now,
- )).toBe(false);
- }
- });
-
- it("fires the first summary at exactly 10 events", () => {
- const now = Date.now();
- expect(mod.shouldTrigger(
- { lastSummaryAt: now, lastSummaryCount: 0, totalCount: 10 }, cfg, now,
- )).toBe(true);
- });
-
- it("fires when msgsSince reaches everyNMessages", () => {
- const now = Date.now();
- expect(mod.shouldTrigger(
- { lastSummaryAt: now, lastSummaryCount: 10, totalCount: 59 }, cfg, now,
- )).toBe(false);
- expect(mod.shouldTrigger(
- { lastSummaryAt: now, lastSummaryCount: 10, totalCount: 60 }, cfg, now,
- )).toBe(true);
- });
-
- it("fires when enough time has elapsed and there is at least one new event", () => {
- const now = Date.now();
- const twoHoursAgo = now - 2 * 3600 * 1000;
- expect(mod.shouldTrigger(
- { lastSummaryAt: twoHoursAgo, lastSummaryCount: 10, totalCount: 11 }, cfg, now,
- )).toBe(true);
- });
-
- it("does NOT fire on time alone when no new events have arrived", () => {
- const now = Date.now();
- const twoHoursAgo = now - 2 * 3600 * 1000;
- expect(mod.shouldTrigger(
- { lastSummaryAt: twoHoursAgo, lastSummaryCount: 42, totalCount: 42 }, cfg, now,
- )).toBe(false);
- });
-
- it("does NOT fire when below both thresholds", () => {
- const now = Date.now();
- expect(mod.shouldTrigger(
- { lastSummaryAt: now - 30 * 60 * 1000, lastSummaryCount: 10, totalCount: 30 }, cfg, now,
- )).toBe(false);
- });
-});
-
-describe("tryAcquireLock", () => {
- it("succeeds on a fresh session and blocks a second acquire", () => {
- const sid = newSessionId();
- expect(mod.tryAcquireLock(sid)).toBe(true);
- expect(mod.tryAcquireLock(sid)).toBe(false);
- mod.releaseLock(sid);
- });
-
- it("reclaims a stale lock past maxAge", () => {
- const sid = newSessionId();
- // Seed a stale lock file directly: timestamp well in the past.
- const p = mod.lockPath(sid);
- mkdirSync(dirname(p), { recursive: true });
- writeFileSync(p, String(Date.now() - 11 * 60 * 1000));
- // 10-minute default maxAge: the stale lock must be reclaimed.
- expect(mod.tryAcquireLock(sid)).toBe(true);
- mod.releaseLock(sid);
- });
-
- it("honors a fresh lock younger than maxAge", () => {
- const sid = newSessionId();
- expect(mod.tryAcquireLock(sid)).toBe(true);
- // Second acquire must fail — lock timestamp is ~now, well inside maxAge.
- expect(mod.tryAcquireLock(sid)).toBe(false);
- mod.releaseLock(sid);
- });
-
- it("releaseLock on a non-existent lock is a no-op", () => {
- const sid = newSessionId();
- expect(() => mod.releaseLock(sid)).not.toThrow();
- });
-
- it("treats an unreadable lock (non-numeric contents) as stale", () => {
- const sid = newSessionId();
- const p = mod.lockPath(sid);
- mkdirSync(dirname(p), { recursive: true });
- writeFileSync(p, "garbage-not-a-number");
- expect(mod.tryAcquireLock(sid)).toBe(true);
- mod.releaseLock(sid);
- });
-});
-
-describe("finalizeSummary", () => {
- it("sets lastSummaryCount to the jsonl line count and advances lastSummaryAt", () => {
- const sid = newSessionId();
- mod.bumpTotalCount(sid);
- mod.bumpTotalCount(sid);
- const before = Date.now();
- mod.finalizeSummary(sid, 2);
- // Re-read: totalCount must be preserved (max of previous and jsonlLines)
- const s = JSON.parse(readFileSync(mod.statePath(sid), "utf-8"));
- expect(s.lastSummaryCount).toBe(2);
- expect(s.totalCount).toBe(2);
- expect(s.lastSummaryAt).toBeGreaterThanOrEqual(before);
- });
-
- it("preserves totalCount when jsonlLines is lower than totalCount", () => {
- const sid = newSessionId();
- for (let i = 0; i < 5; i++) mod.bumpTotalCount(sid);
- mod.finalizeSummary(sid, 3);
- const s = JSON.parse(readFileSync(mod.statePath(sid), "utf-8"));
- expect(s.lastSummaryCount).toBe(3);
- expect(s.totalCount).toBe(5);
- });
-
- it("handles missing prior state (no earlier bumpTotalCount)", () => {
- const sid = newSessionId();
- mod.finalizeSummary(sid, 4);
- const s = JSON.parse(readFileSync(mod.statePath(sid), "utf-8"));
- expect(s.lastSummaryCount).toBe(4);
- expect(s.totalCount).toBe(4);
- });
-});
-
-describe("loadTriggerConfig", () => {
- const origN = process.env.HIVEMIND_SUMMARY_EVERY_N_MSGS;
- const origH = process.env.HIVEMIND_SUMMARY_EVERY_HOURS;
-
- beforeEach(() => {
- delete process.env.HIVEMIND_SUMMARY_EVERY_N_MSGS;
- delete process.env.HIVEMIND_SUMMARY_EVERY_HOURS;
- });
-
- afterAll(() => {
- if (origN !== undefined) process.env.HIVEMIND_SUMMARY_EVERY_N_MSGS = origN;
- if (origH !== undefined) process.env.HIVEMIND_SUMMARY_EVERY_HOURS = origH;
- });
-
- it("falls back to defaults when env vars are unset", () => {
- const cfg = mod.loadTriggerConfig();
- expect(cfg.everyNMessages).toBe(50);
- expect(cfg.everyHours).toBe(2);
- });
-
- it("respects valid env overrides", () => {
- process.env.HIVEMIND_SUMMARY_EVERY_N_MSGS = "30";
- process.env.HIVEMIND_SUMMARY_EVERY_HOURS = "1";
- const cfg = mod.loadTriggerConfig();
- expect(cfg.everyNMessages).toBe(30);
- expect(cfg.everyHours).toBe(1);
- });
-
- it("ignores invalid values and uses defaults", () => {
- process.env.HIVEMIND_SUMMARY_EVERY_N_MSGS = "not-a-number";
- process.env.HIVEMIND_SUMMARY_EVERY_HOURS = "-5";
- const cfg = mod.loadTriggerConfig();
- expect(cfg.everyNMessages).toBe(50);
- expect(cfg.everyHours).toBe(2);
- });
-
- it("accepts fractional hours", () => {
- process.env.HIVEMIND_SUMMARY_EVERY_HOURS = "0.5";
- const cfg = mod.loadTriggerConfig();
- expect(cfg.everyHours).toBe(0.5);
- });
-});
-
-describe("state files live under $HOME/.claude/hooks/summary-state/", () => {
- it("writeState creates the directory and writes JSON", () => {
- const sid = newSessionId();
- mod.bumpTotalCount(sid);
- const expected = join(tmpHome, ".claude", "hooks", "summary-state", `${sid}.json`);
- expect(existsSync(expected)).toBe(true);
- });
-});
-
-// ══════════════════════════════════════════════════════════════════════════════
-// Edge-case and integration tests — these pin down the full periodic-summary
-// state machine and the bounds that the capture hook relies on.
-// ══════════════════════════════════════════════════════════════════════════════
-
-describe("shouldTrigger — boundary conditions", () => {
- const cfg = { everyNMessages: 50, everyHours: 2 };
-
- it("first-summary rule only applies while lastSummaryCount is 0", () => {
- const now = Date.now();
- // lastSummaryCount > 0 means the first-summary path is no longer active:
- // totalCount=15 with lastSummaryCount=10 is 5 new messages, well below 50.
- expect(mod.shouldTrigger(
- { lastSummaryAt: now, lastSummaryCount: 10, totalCount: 15 }, cfg, now,
- )).toBe(false);
- });
-
- it("time trigger fires exactly at the cadence boundary", () => {
- const now = Date.now();
- const twoHoursExact = now - 2 * 3600 * 1000;
- expect(mod.shouldTrigger(
- { lastSummaryAt: twoHoursExact, lastSummaryCount: 10, totalCount: 11 }, cfg, now,
- )).toBe(true);
- });
-
- it("time trigger does NOT fire just below the cadence boundary", () => {
- const now = Date.now();
- const justUnder = now - (2 * 3600 * 1000 - 1);
- expect(mod.shouldTrigger(
- { lastSummaryAt: justUnder, lastSummaryCount: 10, totalCount: 11 }, cfg, now,
- )).toBe(false);
- });
-
- it("msg trigger respects custom everyNMessages", () => {
- const now = Date.now();
- const tightCfg = { everyNMessages: 3, everyHours: 999 };
- expect(mod.shouldTrigger(
- { lastSummaryAt: now, lastSummaryCount: 10, totalCount: 12 }, tightCfg, now,
- )).toBe(false);
- expect(mod.shouldTrigger(
- { lastSummaryAt: now, lastSummaryCount: 10, totalCount: 13 }, tightCfg, now,
- )).toBe(true);
- });
-});
-
-describe("tryAcquireLock — age boundaries and custom maxAge", () => {
- it("honors a custom maxAgeMs (short TTL reclaims quickly)", async () => {
- const sid = newSessionId();
- expect(mod.tryAcquireLock(sid, 50)).toBe(true);
- // With 50ms TTL, sleep past the window and try again from a "new process"
- await new Promise(r => setTimeout(r, 80));
- // The existing lock must now look stale even though the current process
- // holds it — a separate caller (simulated here) would reclaim it.
- expect(mod.tryAcquireLock(sid, 50)).toBe(true);
- mod.releaseLock(sid);
- });
-
- it("a lock timestamp of exactly Date.now() is considered fresh", () => {
- const sid = newSessionId();
- const p = mod.lockPath(sid);
- mkdirSync(dirname(p), { recursive: true });
- writeFileSync(p, String(Date.now()));
- expect(mod.tryAcquireLock(sid)).toBe(false);
- try { rmSync(p); } catch { /* ignore */ }
- });
-
- it("a lock timestamp from the future (clock skew) is treated as fresh", () => {
- const sid = newSessionId();
- const p = mod.lockPath(sid);
- mkdirSync(dirname(p), { recursive: true });
- writeFileSync(p, String(Date.now() + 60_000));
- // ageMs is negative (< maxAgeMs), so the lock is held.
- expect(mod.tryAcquireLock(sid)).toBe(false);
- try { rmSync(p); } catch { /* ignore */ }
- });
-});
-
-describe("full periodic-summary cycle", () => {
- it("bump → trigger → acquire → finalize → next bump no longer triggers", () => {
- const sid = newSessionId();
- const cfg = { everyNMessages: 50, everyHours: 24 };
-
- // Bump 9 times — first-summary threshold is 10, so nothing yet.
- for (let i = 0; i < 9; i++) {
- const s = mod.bumpTotalCount(sid);
- expect(mod.shouldTrigger(s, cfg)).toBe(false);
- }
-
- // 10th bump crosses the first-summary threshold.
- const tenth = mod.bumpTotalCount(sid);
- expect(tenth.totalCount).toBe(10);
- expect(mod.shouldTrigger(tenth, cfg)).toBe(true);
-
- // Acquire the lock so the capture hook would spawn exactly one worker.
- expect(mod.tryAcquireLock(sid)).toBe(true);
- // A second capture within the same window cannot acquire — this is what
- // prevents duplicate workers when events arrive in quick succession.
- expect(mod.tryAcquireLock(sid)).toBe(false);
-
- // Worker finishes: finalize + release.
- mod.finalizeSummary(sid, 10);
- mod.releaseLock(sid);
-
- // Next bump: lastSummaryCount is now 10, msgsSince=1, well below 50.
- const eleventh = mod.bumpTotalCount(sid);
- expect(eleventh.lastSummaryCount).toBe(10);
- expect(eleventh.totalCount).toBe(11);
- expect(mod.shouldTrigger(eleventh, cfg)).toBe(false);
- });
-
- it("second summary fires after everyNMessages messages past lastSummaryCount", () => {
- const sid = newSessionId();
- const cfg = { everyNMessages: 50, everyHours: 24 };
-
- // Fast-forward state as if a first summary already landed at 10.
- for (let i = 0; i < 10; i++) mod.bumpTotalCount(sid);
- mod.finalizeSummary(sid, 10);
-
- // Bump 49 more times: msgsSince=49, still below 50.
- for (let i = 0; i < 49; i++) {
- const s = mod.bumpTotalCount(sid);
- expect(mod.shouldTrigger(s, cfg)).toBe(false);
- }
-
- // 50th bump past lastSummaryCount triggers.
- const trigger = mod.bumpTotalCount(sid);
- expect(trigger.totalCount).toBe(60);
- expect(mod.shouldTrigger(trigger, cfg)).toBe(true);
- });
-
- it("releaseLock is idempotent across calls", () => {
- const sid = newSessionId();
- mod.tryAcquireLock(sid);
- mod.releaseLock(sid);
- expect(() => mod.releaseLock(sid)).not.toThrow();
- expect(() => mod.releaseLock(sid)).not.toThrow();
- // After release, a fresh acquire must succeed again.
- expect(mod.tryAcquireLock(sid)).toBe(true);
- mod.releaseLock(sid);
- });
-});
-
-describe("cross-process concurrency", () => {
- // Each subprocess imports summary-state with the same $HOME + a sessionId
- // passed via env var. The file-based RMW lock is the ONLY thing preventing
- // lost updates (bumpTotalCount) and preventing multiple winners
- // (tryAcquireLock) across processes, so these tests are a real stress test
- // of the lock. Session id comes via env (TEST_SID) because tsx's `-e` flag
- // does not forward positional args reliably across node versions.
- const modPath = new URL("../../src/hooks/summary-state.ts", import.meta.url).pathname;
-
- const runParallel = async (code: string, N: number, sid: string): Promise => {
- const runs = Array.from({ length: N }, () =>
- new Promise((resolve, reject) => {
- const child = spawn("npx", ["tsx", "-e", code], {
- env: { ...process.env, HOME: tmpHome, TEST_SID: sid },
- stdio: ["ignore", "pipe", "pipe"],
- });
- let out = "";
- child.stdout.on("data", (d: Buffer) => { out += d.toString(); });
- child.on("exit", (c: number | null) => c === 0 ? resolve(out) : reject(new Error(`exit ${c}`)));
- child.on("error", reject);
- }),
- );
- return Promise.all(runs);
- };
-
- it("N parallel subprocesses each bump once and the total equals N", async () => {
- const sid = newSessionId();
- const N = 8;
- const code =
- `import("${modPath}").then(m => { ` +
- ` const s = m.bumpTotalCount(process.env.TEST_SID); ` +
- ` process.stdout.write(String(s.totalCount)); ` +
- `});`;
-
- await runParallel(code, N, sid);
-
- const finalState = JSON.parse(readFileSync(mod.statePath(sid), "utf-8"));
- expect(finalState.totalCount).toBe(N);
- }, 30_000);
-
- it("N parallel subprocesses racing on tryAcquireLock — exactly one wins", async () => {
- const sid = newSessionId();
- const N = 8;
- const code =
- `import("${modPath}").then(m => { ` +
- ` process.stdout.write(m.tryAcquireLock(process.env.TEST_SID) ? "1" : "0"); ` +
- `});`;
-
- const results = await runParallel(code, N, sid);
- const winners = results.filter(r => r === "1").length;
- expect(winners).toBe(1);
- mod.releaseLock(sid);
- }, 30_000);
-});
diff --git a/claude-code/tests/version-check.test.ts b/claude-code/tests/version-check.test.ts
new file mode 100644
index 0000000..4d01aad
--- /dev/null
+++ b/claude-code/tests/version-check.test.ts
@@ -0,0 +1,227 @@
+import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
+import { mkdirSync, rmSync, writeFileSync } from "node:fs";
+import { dirname, join } from "node:path";
+import { tmpdir } from "node:os";
+import {
+ getInstalledVersion,
+ getLatestVersionCached,
+ isNewer,
+ readFreshCachedLatestVersion,
+ readVersionCache,
+ writeVersionCache,
+} from "../../src/hooks/version-check.js";
+
+describe("version-check utilities", () => {
+ it("compares semantic versions", () => {
+ expect(isNewer("0.7.0", "0.6.37")).toBe(true);
+ expect(isNewer("0.6.37", "0.6.37")).toBe(false);
+ expect(isNewer("0.6.36", "0.6.37")).toBe(false);
+ });
+});
+
+describe("getInstalledVersion", () => {
+ let root: string;
+
+ beforeEach(() => {
+ root = join(tmpdir(), `hivemind-version-${Date.now()}-${Math.random().toString(36).slice(2)}`);
+ mkdirSync(root, { recursive: true });
+ });
+
+ afterEach(() => {
+ rmSync(root, { recursive: true, force: true });
+ });
+
+ it("prefers plugin manifest when present", () => {
+ const bundleDir = join(root, "claude-code", "bundle");
+ mkdirSync(join(root, "claude-code", ".claude-plugin"), { recursive: true });
+ mkdirSync(bundleDir, { recursive: true });
+ writeFileSync(join(root, "claude-code", ".claude-plugin", "plugin.json"), JSON.stringify({ version: "0.6.37" }));
+ writeFileSync(join(root, "package.json"), JSON.stringify({ name: "hivemind", version: "0.1.0" }));
+
+ expect(getInstalledVersion(bundleDir, ".claude-plugin")).toBe("0.6.37");
+ });
+
+ it("falls back to package.json when plugin manifest has no version", () => {
+ const bundleDir = join(root, "claude-code", "bundle");
+ mkdirSync(join(root, "claude-code", ".claude-plugin"), { recursive: true });
+ mkdirSync(bundleDir, { recursive: true });
+ writeFileSync(join(root, "claude-code", ".claude-plugin", "plugin.json"), JSON.stringify({ name: "hivemind" }));
+ writeFileSync(join(root, "package.json"), JSON.stringify({ name: "hivemind", version: "0.6.41" }));
+
+ expect(getInstalledVersion(bundleDir, ".claude-plugin")).toBe("0.6.41");
+ });
+
+ it("walks up to package.json when plugin manifest is absent", () => {
+ const bundleDir = join(root, "codex", "bundle");
+ mkdirSync(bundleDir, { recursive: true });
+ writeFileSync(join(root, "package.json"), JSON.stringify({ name: "hivemind-codex", version: "0.6.40" }));
+
+ expect(getInstalledVersion(bundleDir, ".codex-plugin")).toBe("0.6.40");
+ });
+
+ it("returns null when neither plugin.json nor a matching package.json exists", () => {
+ const bundleDir = join(root, "bundle");
+ mkdirSync(bundleDir, { recursive: true });
+ writeFileSync(join(root, "package.json"), JSON.stringify({ name: "other-package", version: "1.0.0" }));
+
+ expect(getInstalledVersion(bundleDir, ".claude-plugin")).toBeNull();
+ });
+
+ it("returns null when the plugin manifest is invalid json and no package matches", () => {
+ const bundleDir = join(root, "claude-code", "bundle");
+ mkdirSync(join(root, "claude-code", ".claude-plugin"), { recursive: true });
+ mkdirSync(bundleDir, { recursive: true });
+ writeFileSync(join(root, "claude-code", ".claude-plugin", "plugin.json"), "{bad-json");
+
+ expect(getInstalledVersion(bundleDir, ".claude-plugin")).toBeNull();
+ });
+});
+
+describe("version cache", () => {
+ let cachePath: string;
+
+ beforeEach(() => {
+ cachePath = join(tmpdir(), `hivemind-cache-${Date.now()}-${Math.random().toString(36).slice(2)}`, "version.json");
+ mkdirSync(dirname(cachePath), { recursive: true });
+ });
+
+ afterEach(() => {
+ rmSync(dirname(cachePath), { recursive: true, force: true });
+ vi.restoreAllMocks();
+ });
+
+ it("reads and writes cache entries", () => {
+ writeVersionCache({ checkedAt: 123, latest: "0.6.38", url: "https://example.com/pkg.json" }, cachePath);
+ expect(readVersionCache(cachePath)).toEqual({
+ checkedAt: 123,
+ latest: "0.6.38",
+ url: "https://example.com/pkg.json",
+ });
+ });
+
+ it("returns fresh cached version within ttl", () => {
+ writeVersionCache({ checkedAt: 1_000, latest: "0.6.38", url: "https://example.com/pkg.json" }, cachePath);
+ expect(readFreshCachedLatestVersion("https://example.com/pkg.json", 500, cachePath, 1_400)).toBe("0.6.38");
+ expect(readFreshCachedLatestVersion("https://example.com/pkg.json", 500, cachePath, 1_500)).toBe("0.6.38");
+ expect(readFreshCachedLatestVersion("https://example.com/pkg.json", 500, cachePath, 1_600)).toBeUndefined();
+ });
+
+ it("returns null for invalid cache files and url mismatches", () => {
+ writeFileSync(cachePath, JSON.stringify({ checkedAt: "bad", latest: 42, url: 123 }));
+ expect(readVersionCache(cachePath)).toBeNull();
+ expect(readFreshCachedLatestVersion("https://other.example.com/pkg.json", 500, cachePath, 1_200)).toBeUndefined();
+ });
+
+ it("uses cached value without fetching when cache is fresh", async () => {
+ writeVersionCache({ checkedAt: 1_000, latest: "0.6.38", url: "https://example.com/pkg.json" }, cachePath);
+ const fetchImpl = vi.fn();
+
+ const latest = await getLatestVersionCached({
+ url: "https://example.com/pkg.json",
+ timeoutMs: 3000,
+ ttlMs: 500,
+ cachePath,
+ nowMs: 1_400,
+ fetchImpl: fetchImpl as unknown as typeof fetch,
+ });
+
+ expect(latest).toBe("0.6.38");
+ expect(fetchImpl).not.toHaveBeenCalled();
+ });
+
+ it("fetches and caches when cache is stale", async () => {
+ writeVersionCache({ checkedAt: 1_000, latest: "0.6.38", url: "https://example.com/pkg.json" }, cachePath);
+ const fetchImpl = vi.fn(async () => ({
+ ok: true,
+ json: async () => ({ version: "0.6.40" }),
+ }));
+
+ const latest = await getLatestVersionCached({
+ url: "https://example.com/pkg.json",
+ timeoutMs: 3000,
+ ttlMs: 100,
+ cachePath,
+ nowMs: 2_000,
+ fetchImpl: fetchImpl as unknown as typeof fetch,
+ });
+
+ expect(latest).toBe("0.6.40");
+ expect(fetchImpl).toHaveBeenCalledTimes(1);
+ expect(readVersionCache(cachePath)?.latest).toBe("0.6.40");
+ });
+
+ it("writes null when a successful fetch returns no version field", async () => {
+ const fetchImpl = vi.fn(async () => ({
+ ok: true,
+ json: async () => ({ name: "hivemind" }),
+ }));
+
+ const latest = await getLatestVersionCached({
+ url: "https://example.com/pkg.json",
+ timeoutMs: 3000,
+ cachePath,
+ nowMs: 2_000,
+ fetchImpl: fetchImpl as unknown as typeof fetch,
+ });
+
+ expect(latest).toBeNull();
+ expect(readVersionCache(cachePath)?.latest).toBeNull();
+ });
+
+ it("falls back to stale cached value on non-ok fetch responses", async () => {
+ writeVersionCache({ checkedAt: 1_000, latest: "0.6.38", url: "https://example.com/pkg.json" }, cachePath);
+ const fetchImpl = vi.fn(async () => ({
+ ok: false,
+ json: async () => ({ version: "0.6.40" }),
+ }));
+
+ const latest = await getLatestVersionCached({
+ url: "https://example.com/pkg.json",
+ timeoutMs: 3000,
+ ttlMs: 100,
+ cachePath,
+ nowMs: 2_000,
+ fetchImpl: fetchImpl as unknown as typeof fetch,
+ });
+
+ expect(latest).toBe("0.6.38");
+ expect(readVersionCache(cachePath)?.latest).toBe("0.6.38");
+ });
+
+ it("reuses stale cached value on fetch failure and refreshes checkedAt", async () => {
+ writeVersionCache({ checkedAt: 1_000, latest: "0.6.38", url: "https://example.com/pkg.json" }, cachePath);
+ const fetchImpl = vi.fn(async () => { throw new Error("network down"); });
+
+ const latest = await getLatestVersionCached({
+ url: "https://example.com/pkg.json",
+ timeoutMs: 3000,
+ ttlMs: 100,
+ cachePath,
+ nowMs: 2_000,
+ fetchImpl: fetchImpl as unknown as typeof fetch,
+ });
+
+ expect(latest).toBe("0.6.38");
+ expect(readVersionCache(cachePath)?.checkedAt).toBe(2_000);
+ });
+
+ it("returns null and still writes cache state when fetch fails without stale cache", async () => {
+ const fetchImpl = vi.fn(async () => { throw new Error("network down"); });
+
+ const latest = await getLatestVersionCached({
+ url: "https://example.com/pkg.json",
+ timeoutMs: 3000,
+ ttlMs: 100,
+ cachePath,
+ nowMs: 2_000,
+ fetchImpl: fetchImpl as unknown as typeof fetch,
+ });
+
+ expect(latest).toBeNull();
+ expect(readVersionCache(cachePath)).toEqual({
+ checkedAt: 2_000,
+ latest: null,
+ url: "https://example.com/pkg.json",
+ });
+ });
+});
diff --git a/claude-code/tests/virtual-table-query.test.ts b/claude-code/tests/virtual-table-query.test.ts
new file mode 100644
index 0000000..bcace78
--- /dev/null
+++ b/claude-code/tests/virtual-table-query.test.ts
@@ -0,0 +1,221 @@
+import { describe, expect, it, vi } from "vitest";
+import {
+ buildVirtualIndexContent,
+ findVirtualPaths,
+ listVirtualPathRowsForDirs,
+ listVirtualPathRows,
+ readVirtualPathContents,
+ readVirtualPathContent,
+} from "../../src/hooks/virtual-table-query.js";
+
+describe("virtual-table-query", () => {
+ it("builds a synthetic virtual index", () => {
+ const content = buildVirtualIndexContent([
+ {
+ path: "/summaries/alice/s1.md",
+ project: "repo",
+ description: "session summary",
+ creation_date: "2026-01-01T00:00:00.000Z",
+ },
+ ]);
+ expect(content).toContain("# Memory Index");
+ expect(content).toContain("/summaries/alice/s1.md");
+ });
+
+ it("builds index rows when project metadata is missing", () => {
+ const content = buildVirtualIndexContent([
+ {
+ path: "/summaries/alice/s2.md",
+ },
+ ]);
+ expect(content).toContain("/summaries/alice/s2.md");
+ expect(content).toContain("# Memory Index");
+ });
+
+ it("prefers a memory-table hit for exact path reads", async () => {
+ const api = {
+ query: vi.fn().mockResolvedValueOnce([
+ { path: "/summaries/a.md", content: "summary body", source_order: 0 },
+ ]),
+ } as any;
+
+ const content = await readVirtualPathContent(api, "memory", "sessions", "/summaries/a.md");
+
+ expect(content).toBe("summary body");
+ expect(api.query).toHaveBeenCalledTimes(1);
+ });
+
+ it("returns an empty map when no virtual paths are requested", async () => {
+ const api = { query: vi.fn() } as any;
+
+ const content = await readVirtualPathContents(api, "memory", "sessions", []);
+
+ expect(content).toEqual(new Map());
+ expect(api.query).not.toHaveBeenCalled();
+ });
+
+ it("normalizes session rows for exact path reads", async () => {
+ const api = {
+ query: vi.fn().mockResolvedValueOnce([
+ { path: "/sessions/a.jsonl", content: "{\"type\":\"user_message\",\"content\":\"hello\"}", source_order: 1 },
+ { path: "/sessions/a.jsonl", content: "{\"type\":\"assistant_message\",\"content\":\"hi\"}", source_order: 1 },
+ ]),
+ } as any;
+
+ const content = await readVirtualPathContent(api, "memory", "sessions", "/sessions/a.jsonl");
+
+ expect(content).toBe("[user] hello\n[assistant] hi");
+ });
+
+ it("reads multiple exact paths in a single query and synthesizes /index.md when needed", async () => {
+ const api = {
+ query: vi.fn()
+ .mockResolvedValueOnce([
+ { path: "/summaries/a.md", content: "summary body", source_order: 0 },
+ ])
+ .mockResolvedValueOnce([
+ {
+ path: "/summaries/alice/s1.md",
+ project: "repo",
+ description: "session summary",
+ creation_date: "2026-01-01T00:00:00.000Z",
+ },
+ ]),
+ } as any;
+
+ const content = await readVirtualPathContents(api, "memory", "sessions", ["/summaries/a.md", "/index.md"]);
+
+ expect(content.get("/summaries/a.md")).toBe("summary body");
+ expect(content.get("/index.md")).toContain("# Memory Index");
+ expect(api.query).toHaveBeenCalledTimes(2);
+ });
+
+ it("ignores invalid exact-read rows before merging content", async () => {
+ const api = {
+ query: vi.fn().mockResolvedValueOnce([
+ { path: 42, content: "bad", source_order: 0 },
+ { path: "/summaries/a.md", content: 7, source_order: 0 },
+ { path: "/summaries/a.md", content: "summary body", source_order: 0 },
+ ]),
+ } as any;
+
+ const content = await readVirtualPathContents(api, "memory", "sessions", ["/summaries/a.md"]);
+
+ expect(content.get("/summaries/a.md")).toBe("summary body");
+ });
+
+ it("merges and de-duplicates rows for directory listings", async () => {
+ const api = {
+ query: vi.fn().mockResolvedValueOnce([
+ { path: "/summaries/a.md", size_bytes: 10, source_order: 0 },
+ { path: "/shared.md", size_bytes: 11, source_order: 0 },
+ { path: "/sessions/a.jsonl", size_bytes: 12, source_order: 1 },
+ { path: "/shared.md", size_bytes: 13, source_order: 1 },
+ ]),
+ } as any;
+
+ const rows = await listVirtualPathRows(api, "memory", "sessions", "/");
+
+ expect(rows).toEqual([
+ { path: "/summaries/a.md", size_bytes: 10 },
+ { path: "/shared.md", size_bytes: 11 },
+ { path: "/sessions/a.jsonl", size_bytes: 12 },
+ ]);
+ });
+
+ it("batches directory listing rows for multiple directories", async () => {
+ const api = {
+ query: vi.fn().mockResolvedValueOnce([
+ { path: "/summaries/a/file1.md", size_bytes: 10, source_order: 0 },
+ { path: "/summaries/b/file2.md", size_bytes: 20, source_order: 0 },
+ ]),
+ } as any;
+
+ const rows = await listVirtualPathRowsForDirs(api, "memory", "sessions", ["/summaries/a", "/summaries/b"]);
+
+ expect(rows.get("/summaries/a")).toEqual([{ path: "/summaries/a/file1.md", size_bytes: 10 }]);
+ expect(rows.get("/summaries/b")).toEqual([{ path: "/summaries/b/file2.md", size_bytes: 20 }]);
+ expect(api.query).toHaveBeenCalledTimes(1);
+ });
+
+ it("lists root directories without adding a path filter and ignores invalid row paths", async () => {
+ const api = {
+ query: vi.fn().mockResolvedValueOnce([
+ { path: "/summaries/a/file1.md", size_bytes: 10, source_order: 0 },
+ { path: 42, size_bytes: 20, source_order: 0 },
+ ]),
+ } as any;
+
+ const rows = await listVirtualPathRowsForDirs(api, "memory", "sessions", ["/"]);
+
+ expect(rows.get("/")).toEqual([{ path: "/summaries/a/file1.md", size_bytes: 10 }]);
+ expect((api.query.mock.calls[0]?.[0] as string) ?? "").not.toContain("WHERE path LIKE");
+ });
+
+ it("merges and de-duplicates path search results", async () => {
+ const api = {
+ query: vi.fn().mockResolvedValueOnce([
+ { path: "/summaries/a.md", source_order: 0 },
+ { path: "/shared.md", source_order: 0 },
+ { path: "/sessions/a.jsonl", source_order: 1 },
+ { path: "/shared.md", source_order: 1 },
+ ]),
+ } as any;
+
+ const paths = await findVirtualPaths(api, "memory", "sessions", "/", "%.md");
+
+ expect(paths).toEqual(["/summaries/a.md", "/shared.md", "/sessions/a.jsonl"]);
+ });
+
+ it("falls back to per-table queries when the union query fails", async () => {
+ const api = {
+ query: vi.fn()
+ .mockRejectedValueOnce(new Error("bad union"))
+ .mockResolvedValueOnce([{ path: "/summaries/a.md", content: "summary body", source_order: 0 }])
+ .mockResolvedValueOnce([]),
+ } as any;
+
+ const content = await readVirtualPathContent(api, "memory", "sessions", "/summaries/a.md");
+
+ expect(content).toBe("summary body");
+ expect(api.query).toHaveBeenCalledTimes(3);
+ });
+
+ it("returns null when union and fallback queries all fail", async () => {
+ const api = {
+ query: vi.fn()
+ .mockRejectedValueOnce(new Error("bad union"))
+ .mockRejectedValueOnce(new Error("memory down"))
+ .mockRejectedValueOnce(new Error("sessions down")),
+ } as any;
+
+ const content = await readVirtualPathContent(api, "memory", "sessions", "/summaries/a.md");
+
+ expect(content).toBeNull();
+ expect(api.query).toHaveBeenCalledTimes(3);
+ });
+
+ it("filters invalid paths from find results", async () => {
+ const api = {
+ query: vi.fn().mockResolvedValueOnce([
+ { path: "/summaries/a.md", source_order: 0 },
+ { path: "", source_order: 0 },
+ { path: 123, source_order: 1 },
+ ]),
+ } as any;
+
+ const paths = await findVirtualPaths(api, "memory", "sessions", "/", "%.md");
+
+ expect(paths).toEqual(["/summaries/a.md"]);
+ });
+
+ it("normalizes non-root find directories before building the LIKE path", async () => {
+ const api = {
+ query: vi.fn().mockResolvedValueOnce([]),
+ } as any;
+
+ await findVirtualPaths(api, "memory", "sessions", "/summaries/a///", "%.md");
+
+ expect(String(api.query.mock.calls[0]?.[0])).toContain("path LIKE '/summaries/a/%'");
+ });
+});
diff --git a/claude-code/tests/wiki-worker.test.ts b/claude-code/tests/wiki-worker.test.ts
deleted file mode 100644
index f287cc1..0000000
--- a/claude-code/tests/wiki-worker.test.ts
+++ /dev/null
@@ -1,422 +0,0 @@
-import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
-import { mkdtempSync, rmSync, writeFileSync, readFileSync, existsSync } from "node:fs";
-import { tmpdir } from "node:os";
-import { join } from "node:path";
-
-/**
- * Direct source-level tests for src/hooks/wiki-worker.ts. The module
- * reads its config JSON from process.argv[2] at module load, then
- * runs main() immediately. Each scenario writes a fresh config file
- * under a tmp dir, points process.argv[2] at it, wires the mocks, and
- * dynamically imports the worker.
- *
- * Mocks:
- * - global.fetch (the query() helper)
- * - child_process.execFileSync (the claude -p invocation)
- * - summary-state (finalizeSummary + releaseLock)
- * - upload-summary (uploadSummary)
- *
- * fs stays real: the worker writes the reconstructed JSONL and the
- * summary markdown to the tmp dir, and main() reads the summary back
- * after claude -p has "written" it. The execFileSync mock simulates
- * claude by writing the summary file directly, which is how the real
- * binary behaves from the worker's perspective.
- */
-
-const finalizeSummaryMock = vi.fn();
-const releaseLockMock = vi.fn();
-const uploadSummaryMock = vi.fn();
-const execFileSyncMock = vi.fn();
-
-vi.mock("../../src/hooks/summary-state.js", () => ({
- finalizeSummary: (...a: any[]) => finalizeSummaryMock(...a),
- releaseLock: (...a: any[]) => releaseLockMock(...a),
-}));
-vi.mock("../../src/hooks/upload-summary.js", () => ({
- uploadSummary: (...a: any[]) => uploadSummaryMock(...a),
-}));
-vi.mock("node:child_process", async () => {
- const actual = await vi.importActual("node:child_process");
- return { ...actual, execFileSync: (...a: any[]) => execFileSyncMock(...a) };
-});
-
-const originalFetch = global.fetch;
-const fetchMock = vi.fn();
-
-const originalArgv2 = process.argv[2];
-
-let rootDir: string; // shared parent — NOT removed by the worker
-let tmpDir: string; // worker's tmpDir, rmSync'd in cleanup()
-let hooksDir: string; // wiki.log lives here; must outlive tmpDir
-let configPath: string;
-
-const defaultConfig = () => ({
- apiUrl: "http://fake.local",
- token: "tok",
- orgId: "org",
- workspaceId: "default",
- memoryTable: "memory",
- sessionsTable: "sessions",
- sessionId: "sid-worker",
- userName: "alice",
- project: "proj",
- tmpDir,
- claudeBin: "/fake/claude",
- wikiLog: join(hooksDir, "wiki.log"),
- hooksDir,
- promptTemplate: "JSONL=__JSONL__ SUMMARY=__SUMMARY__ SID=__SESSION_ID__ PROJ=__PROJECT__ OFFSET=__PREV_OFFSET__ LINES=__JSONL_LINES__ SRC=__JSONL_SERVER_PATH__",
-});
-
-function writeConfig(overrides: Partial> = {}): void {
- const cfg = { ...defaultConfig(), ...overrides };
- writeFileSync(configPath, JSON.stringify(cfg));
-}
-
-function jsonResp(body: unknown, ok = true, status = 200): Response {
- return {
- ok,
- status,
- json: async () => body,
- text: async () => typeof body === "string" ? body : JSON.stringify(body),
- } as Response;
-}
-
-async function runWorker(): Promise {
- vi.resetModules();
- // @ts-expect-error
- global.fetch = fetchMock;
- await import("../../src/hooks/wiki-worker.js");
- // Let main() and all its awaits complete.
- await new Promise(r => setImmediate(r));
- await new Promise(r => setImmediate(r));
- await new Promise(r => setImmediate(r));
-}
-
-beforeEach(() => {
- rootDir = mkdtempSync(join(tmpdir(), "wiki-worker-test-"));
- tmpDir = join(rootDir, "tmp");
- hooksDir = join(rootDir, "hooks");
- // The worker will mkdir hooksDir lazily via wlog, but it needs tmpDir
- // to exist for writeFileSync(tmpJsonl, ...).
- require("node:fs").mkdirSync(tmpDir, { recursive: true });
- require("node:fs").mkdirSync(hooksDir, { recursive: true });
- configPath = join(rootDir, "config.json");
- writeConfig();
- process.argv[2] = configPath;
- fetchMock.mockReset();
- finalizeSummaryMock.mockReset();
- releaseLockMock.mockReset();
- uploadSummaryMock.mockReset().mockResolvedValue({ path: "insert", summaryLength: 100, descLength: 20, sql: "..." });
- execFileSyncMock.mockReset();
-});
-
-afterEach(() => {
- // @ts-expect-error
- global.fetch = originalFetch;
- process.argv[2] = originalArgv2;
- try { rmSync(rootDir, { recursive: true, force: true }); } catch { /* ignore */ }
- vi.restoreAllMocks();
-});
-
-// ═══ early exit: zero events ═══════════════════════════════════════════════
-
-describe("wiki-worker — no events", () => {
- it("exits early when the sessions table has no rows for this session", async () => {
- fetchMock.mockResolvedValue(jsonResp({ columns: ["message", "creation_date"], rows: [] }));
- await runWorker();
- const log = readFileSync(join(hooksDir, "wiki.log"), "utf-8");
- expect(log).toContain("no session events found — exiting");
- expect(execFileSyncMock).not.toHaveBeenCalled();
- expect(uploadSummaryMock).not.toHaveBeenCalled();
- expect(finalizeSummaryMock).not.toHaveBeenCalled();
- // The finally block must still release the lock.
- expect(releaseLockMock).toHaveBeenCalledWith("sid-worker");
- });
-
- it("treats a response with null rows/columns as empty", async () => {
- fetchMock.mockResolvedValue(jsonResp({}));
- await runWorker();
- expect(execFileSyncMock).not.toHaveBeenCalled();
- expect(releaseLockMock).toHaveBeenCalled();
- });
-});
-
-// ═══ happy path: events + claude -p + upload ═══════════════════════════════
-
-describe("wiki-worker — happy path", () => {
- const eventRows = [
- { message: JSON.stringify({ type: "user_message", content: "hi" }), creation_date: "2026-04-20T00:00:00Z" },
- { message: JSON.stringify({ type: "assistant_message", content: "hello" }), creation_date: "2026-04-20T00:00:01Z" },
- ];
-
- const mkFetch = (eventsCol: string[] = ["message", "creation_date"], pathRows = 1, hasSummary = false) => {
- let call = 0;
- return fetchMock.mockImplementation(async (_url: string, init: any) => {
- const sql = JSON.parse(init.body).query as string;
- if (sql.startsWith("SELECT message, creation_date")) {
- return jsonResp({ columns: eventsCol, rows: eventRows.map(r => [r.message, r.creation_date]) });
- }
- if (sql.startsWith("SELECT DISTINCT path")) {
- return jsonResp({
- columns: ["path"],
- rows: pathRows > 0 ? [["/sessions/alice/alice_org_default_sid-worker.jsonl"]] : [],
- });
- }
- if (sql.startsWith("SELECT summary FROM")) {
- if (hasSummary) {
- return jsonResp({ columns: ["summary"], rows: [["# Session X\n- **JSONL offset**: 12\n\n## What Happened\nprior"]] });
- }
- return jsonResp({ columns: ["summary"], rows: [] });
- }
- call++;
- throw new Error(`unexpected query (${call}): ${sql}`);
- });
- };
-
- it("fetches events, writes JSONL, runs claude -p, uploads, finalizes, releases", async () => {
- mkFetch();
- let capturedJsonl: string | null = null;
- // Simulate claude -p producing a summary file. We also snapshot the
- // reconstructed JSONL here because cleanup() will rmSync tmpDir
- // before the test can read it back from disk.
- execFileSyncMock.mockImplementation((_bin: string, args: string[]) => {
- const promptIdx = args.indexOf("-p") + 1;
- const prompt = args[promptIdx];
- const jsonlPath = prompt.match(/JSONL=(\S+)/)![1];
- capturedJsonl = readFileSync(jsonlPath, "utf-8");
- const summaryPath = prompt.match(/SUMMARY=(\S+)/)![1];
- writeFileSync(summaryPath, "# Session sid-worker\n\n## What Happened\nStuff happened.\n");
- return Buffer.from("");
- });
- await runWorker();
-
- // JSONL was written with the two events joined (captured before cleanup)
- expect(capturedJsonl).not.toBeNull();
- expect(capturedJsonl!.split("\n")).toHaveLength(2);
-
- // claude -p was called with the prompt template expanded
- expect(execFileSyncMock).toHaveBeenCalledTimes(1);
- const calledArgs = execFileSyncMock.mock.calls[0][1] as string[];
- expect(calledArgs[0]).toBe("-p");
- expect(calledArgs).toContain("--no-session-persistence");
- expect(calledArgs).toContain("--model");
- expect(calledArgs).toContain("haiku");
- expect(calledArgs).toContain("--permission-mode");
- expect(calledArgs).toContain("bypassPermissions");
-
- // Prompt template was expanded with real values
- const prompt = calledArgs[1];
- expect(prompt).toContain("SID=sid-worker");
- expect(prompt).toContain("PROJ=proj");
- expect(prompt).toContain("LINES=2");
- expect(prompt).toContain("OFFSET=0");
- expect(prompt).toContain("SRC=/sessions/alice/alice_org_default_sid-worker.jsonl");
-
- // env flags on execFileSync to prevent runaway recursion
- const execOpts = execFileSyncMock.mock.calls[0][2];
- expect(execOpts.env.HIVEMIND_WIKI_WORKER).toBe("1");
- expect(execOpts.env.HIVEMIND_CAPTURE).toBe("false");
-
- // upload was called with the full summary
- expect(uploadSummaryMock).toHaveBeenCalledTimes(1);
- const uploadParams = uploadSummaryMock.mock.calls[0][1];
- expect(uploadParams.tableName).toBe("memory");
- expect(uploadParams.agent).toBe("claude_code");
- expect(uploadParams.text).toContain("## What Happened");
-
- // finalize + release
- expect(finalizeSummaryMock).toHaveBeenCalledWith("sid-worker", 2);
- expect(releaseLockMock).toHaveBeenCalledWith("sid-worker");
- });
-
- it("parses JSONL offset from an existing summary on a resumed session", async () => {
- mkFetch(undefined, 1, true);
- execFileSyncMock.mockImplementation((_bin: string, args: string[]) => {
- const summaryPath = args[1].match(/SUMMARY=(\S+)/)![1];
- writeFileSync(summaryPath, "# Session sid-worker\n\n## What Happened\ndone.\n");
- return Buffer.from("");
- });
- await runWorker();
- const prompt = execFileSyncMock.mock.calls[0][1][1] as string;
- expect(prompt).toContain("OFFSET=12");
- // tmpSummary was pre-seeded with the existing summary so claude -p
- // can merge on top. Verify the worker did write it.
- const log = readFileSync(join(hooksDir, "wiki.log"), "utf-8");
- expect(log).toContain("existing summary found, offset=12");
- });
-
- it("defaults to /sessions/unknown/ when the path SELECT returns no rows", async () => {
- mkFetch(undefined, 0);
- execFileSyncMock.mockImplementation((_bin: string, args: string[]) => {
- const summaryPath = args[1].match(/SUMMARY=(\S+)/)![1];
- writeFileSync(summaryPath, "# Session\n\n## What Happened\nfallback.\n");
- return Buffer.from("");
- });
- await runWorker();
- const prompt = execFileSyncMock.mock.calls[0][1][1] as string;
- expect(prompt).toContain("SRC=/sessions/unknown/sid-worker.jsonl");
- });
-
- it("serializes event rows that arrive as objects (JSONB) instead of strings", async () => {
- fetchMock.mockImplementation(async (_url: string, init: any) => {
- const sql = JSON.parse(init.body).query as string;
- if (sql.startsWith("SELECT message, creation_date")) {
- return jsonResp({
- columns: ["message", "creation_date"],
- rows: [
- [{ type: "user_message", content: "hi" }, "2026-04-20T00:00:00Z"],
- [{ type: "tool_call", tool_name: "Bash" }, "2026-04-20T00:00:01Z"],
- ],
- });
- }
- if (sql.startsWith("SELECT DISTINCT path")) {
- return jsonResp({ columns: ["path"], rows: [["/sessions/alice/x.jsonl"]] });
- }
- return jsonResp({ columns: ["summary"], rows: [] });
- });
- let capturedJsonl: string | null = null;
- execFileSyncMock.mockImplementation((_bin: string, args: string[]) => {
- const jsonlPath = args[1].match(/JSONL=(\S+)/)![1];
- capturedJsonl = readFileSync(jsonlPath, "utf-8");
- const summaryPath = args[1].match(/SUMMARY=(\S+)/)![1];
- writeFileSync(summaryPath, "x");
- return Buffer.from("");
- });
- await runWorker();
- expect(capturedJsonl).toContain('"type":"user_message"');
- expect(capturedJsonl).toContain('"type":"tool_call"');
- });
-});
-
-// ═══ claude -p failure paths ═══════════════════════════════════════════════
-
-describe("wiki-worker — claude -p failure", () => {
- it("logs the claude exit code and skips the upload when no summary file lands", async () => {
- fetchMock.mockImplementation(async (_url: string, init: any) => {
- const sql = JSON.parse(init.body).query as string;
- if (sql.startsWith("SELECT message")) return jsonResp({ columns: ["message", "creation_date"], rows: [["{}", "t"]] });
- if (sql.startsWith("SELECT DISTINCT path")) return jsonResp({ columns: ["path"], rows: [["/sessions/x.jsonl"]] });
- return jsonResp({ columns: ["summary"], rows: [] });
- });
- const err: any = new Error("claude boom");
- err.status = 42;
- execFileSyncMock.mockImplementation(() => { throw err; });
- await runWorker();
-
- const log = readFileSync(join(hooksDir, "wiki.log"), "utf-8");
- expect(log).toContain("claude -p failed: 42");
- expect(log).toContain("no summary file generated");
- expect(uploadSummaryMock).not.toHaveBeenCalled();
- expect(finalizeSummaryMock).not.toHaveBeenCalled();
- expect(releaseLockMock).toHaveBeenCalled();
- });
-
- it("falls back to err.message when err.status is absent", async () => {
- fetchMock.mockImplementation(async (_url: string, init: any) => {
- const sql = JSON.parse(init.body).query as string;
- if (sql.startsWith("SELECT message")) return jsonResp({ columns: ["message", "creation_date"], rows: [["{}", "t"]] });
- if (sql.startsWith("SELECT DISTINCT path")) return jsonResp({ columns: ["path"], rows: [["/x.jsonl"]] });
- return jsonResp({ columns: ["summary"], rows: [] });
- });
- execFileSyncMock.mockImplementation(() => { throw new Error("no status"); });
- await runWorker();
- const log = readFileSync(join(hooksDir, "wiki.log"), "utf-8");
- expect(log).toContain("claude -p failed: no status");
- });
-});
-
-// ═══ query retry logic ═════════════════════════════════════════════════════
-
-describe("wiki-worker — query retry logic", () => {
- beforeEach(() => {
- // Stub setTimeout so retries don't actually sleep.
- vi.spyOn(global, "setTimeout").mockImplementation(((cb: any) => {
- cb();
- return 0 as any;
- }) as any);
- });
-
- it("retries on 500 and eventually succeeds", async () => {
- const responses = [
- jsonResp("server error", false, 500),
- jsonResp("server error", false, 500),
- jsonResp({ columns: ["message", "creation_date"], rows: [] }),
- ];
- fetchMock.mockImplementation(async () => responses.shift()!);
- await runWorker();
- // First query to sessions table was retried 2 times before success.
- expect(fetchMock.mock.calls.length).toBeGreaterThanOrEqual(3);
- expect(releaseLockMock).toHaveBeenCalled();
- });
-
- it("retries on 401/403/429/502/503 (CloudFlare rate-limit class)", async () => {
- for (const status of [401, 403, 429, 502, 503]) {
- fetchMock.mockReset();
- fetchMock
- .mockResolvedValueOnce(jsonResp("", false, status))
- .mockResolvedValue(jsonResp({ columns: ["message", "creation_date"], rows: [] }));
- await runWorker();
- expect(fetchMock.mock.calls.length).toBeGreaterThanOrEqual(2);
- }
- });
-
- it("throws (and main catches) on a non-retryable 400", async () => {
- fetchMock.mockResolvedValue(jsonResp("bad request", false, 400));
- await runWorker();
- const log = readFileSync(join(hooksDir, "wiki.log"), "utf-8");
- expect(log).toMatch(/fatal: API 400/);
- expect(releaseLockMock).toHaveBeenCalled();
- });
-
- it("gives up after exhausting retries on persistent 500", async () => {
- fetchMock.mockResolvedValue(jsonResp("still down", false, 500));
- await runWorker();
- const log = readFileSync(join(hooksDir, "wiki.log"), "utf-8");
- expect(log).toMatch(/fatal: API 500/);
- });
-});
-
-// ═══ finalize + release edge cases ═════════════════════════════════════════
-
-describe("wiki-worker — finalize + release edge cases", () => {
- beforeEach(() => {
- fetchMock.mockImplementation(async (_url: string, init: any) => {
- const sql = JSON.parse(init.body).query as string;
- if (sql.startsWith("SELECT message")) return jsonResp({ columns: ["message", "creation_date"], rows: [["{}", "t"]] });
- if (sql.startsWith("SELECT DISTINCT path")) return jsonResp({ columns: ["path"], rows: [["/x.jsonl"]] });
- return jsonResp({ columns: ["summary"], rows: [] });
- });
- execFileSyncMock.mockImplementation((_bin: string, args: string[]) => {
- const summaryPath = args[1].match(/SUMMARY=(\S+)/)![1];
- writeFileSync(summaryPath, "# s\n## What Happened\nX\n");
- return Buffer.from("");
- });
- });
-
- it("logs sidecar update failure but still releases the lock", async () => {
- finalizeSummaryMock.mockImplementation(() => { throw new Error("sidecar boom"); });
- await runWorker();
- const log = readFileSync(join(hooksDir, "wiki.log"), "utf-8");
- expect(log).toContain("sidecar update failed: sidecar boom");
- expect(releaseLockMock).toHaveBeenCalled();
- });
-
- it("keeps going when releaseLock throws — the finally swallows it", async () => {
- releaseLockMock.mockImplementation(() => { throw new Error("release boom"); });
- await runWorker();
- // Worker still completes; the failure is caught in the finally.
- const log = readFileSync(join(hooksDir, "wiki.log"), "utf-8");
- expect(log).toContain("done");
- });
-
- it("does not upload when the summary file is present but empty", async () => {
- execFileSyncMock.mockImplementation((_bin: string, args: string[]) => {
- const summaryPath = args[1].match(/SUMMARY=(\S+)/)![1];
- writeFileSync(summaryPath, " \n");
- return Buffer.from("");
- });
- await runWorker();
- expect(uploadSummaryMock).not.toHaveBeenCalled();
- expect(finalizeSummaryMock).not.toHaveBeenCalled();
- });
-});
diff --git a/codex/bundle/capture.js b/codex/bundle/capture.js
index b449e10..764460e 100755
--- a/codex/bundle/capture.js
+++ b/codex/bundle/capture.js
@@ -2,13 +2,13 @@
// dist/src/utils/stdin.js
function readStdin() {
- return new Promise((resolve, reject) => {
+ return new Promise((resolve2, reject) => {
let data = "";
process.stdin.setEncoding("utf-8");
process.stdin.on("data", (chunk) => data += chunk);
process.stdin.on("end", () => {
try {
- resolve(JSON.parse(data));
+ resolve2(JSON.parse(data));
} catch (err) {
reject(new Error(`Failed to parse hook input: ${err}`));
}
@@ -53,18 +53,12 @@ function loadConfig() {
};
}
-// dist/src/deeplake-api.js
-import { randomUUID } from "node:crypto";
-
// dist/src/utils/debug.js
import { appendFileSync } from "node:fs";
import { join as join2 } from "node:path";
import { homedir as homedir2 } from "node:os";
var DEBUG = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1";
var LOG = join2(homedir2(), ".deeplake", "hook-debug.log");
-function utcTimestamp(d = /* @__PURE__ */ new Date()) {
- return d.toISOString().replace("T", " ").slice(0, 19) + " UTC";
-}
function log(tag, msg) {
if (!DEBUG)
return;
@@ -72,247 +66,24 @@ function log(tag, msg) {
`);
}
-// dist/src/utils/sql.js
-function sqlStr(value) {
- return value.replace(/\\/g, "\\\\").replace(/'/g, "''").replace(/\0/g, "").replace(/[\x01-\x08\x0b\x0c\x0e-\x1f\x7f]/g, "");
-}
-
-// dist/src/deeplake-api.js
-var log2 = (msg) => log("sdk", msg);
-var TRACE_SQL = (process.env.HIVEMIND_TRACE_SQL ?? process.env.DEEPLAKE_TRACE_SQL) === "1" || (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1";
-var DEBUG_FILE_LOG = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1";
-function summarizeSql(sql, maxLen = 220) {
- const compact = sql.replace(/\s+/g, " ").trim();
- return compact.length > maxLen ? `${compact.slice(0, maxLen)}...` : compact;
-}
-function traceSql(msg) {
- if (!TRACE_SQL)
- return;
- process.stderr.write(`[deeplake-sql] ${msg}
-`);
- if (DEBUG_FILE_LOG)
- log2(msg);
-}
-var RETRYABLE_CODES = /* @__PURE__ */ new Set([429, 500, 502, 503, 504]);
-var MAX_RETRIES = 3;
-var BASE_DELAY_MS = 500;
-var MAX_CONCURRENCY = 5;
-function sleep(ms) {
- return new Promise((resolve) => setTimeout(resolve, ms));
-}
-var Semaphore = class {
- max;
- waiting = [];
- active = 0;
- constructor(max) {
- this.max = max;
- }
- async acquire() {
- if (this.active < this.max) {
- this.active++;
- return;
- }
- await new Promise((resolve) => this.waiting.push(resolve));
- }
- release() {
- this.active--;
- const next = this.waiting.shift();
- if (next) {
- this.active++;
- next();
- }
- }
-};
-var DeeplakeApi = class {
- token;
- apiUrl;
- orgId;
- workspaceId;
- tableName;
- _pendingRows = [];
- _sem = new Semaphore(MAX_CONCURRENCY);
- constructor(token, apiUrl, orgId, workspaceId, tableName) {
- this.token = token;
- this.apiUrl = apiUrl;
- this.orgId = orgId;
- this.workspaceId = workspaceId;
- this.tableName = tableName;
- }
- /** Execute SQL with retry on transient errors and bounded concurrency. */
- async query(sql) {
- const startedAt = Date.now();
- const summary = summarizeSql(sql);
- traceSql(`query start: ${summary}`);
- await this._sem.acquire();
- try {
- const rows = await this._queryWithRetry(sql);
- traceSql(`query ok (${Date.now() - startedAt}ms, rows=${rows.length}): ${summary}`);
- return rows;
- } catch (e) {
- const message = e instanceof Error ? e.message : String(e);
- traceSql(`query fail (${Date.now() - startedAt}ms): ${summary} :: ${message}`);
- throw e;
- } finally {
- this._sem.release();
- }
- }
- async _queryWithRetry(sql) {
- let lastError;
- for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
- let resp;
- try {
- resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables/query`, {
- method: "POST",
- headers: {
- Authorization: `Bearer ${this.token}`,
- "Content-Type": "application/json",
- "X-Activeloop-Org-Id": this.orgId
- },
- body: JSON.stringify({ query: sql })
- });
- } catch (e) {
- lastError = e instanceof Error ? e : new Error(String(e));
- if (attempt < MAX_RETRIES) {
- const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200;
- log2(`query retry ${attempt + 1}/${MAX_RETRIES} (fetch error: ${lastError.message}) in ${delay.toFixed(0)}ms`);
- await sleep(delay);
- continue;
- }
- throw lastError;
- }
- if (resp.ok) {
- const raw = await resp.json();
- if (!raw?.rows || !raw?.columns)
- return [];
- return raw.rows.map((row) => Object.fromEntries(raw.columns.map((col, i) => [col, row[i]])));
- }
- const text = await resp.text().catch(() => "");
- if (attempt < MAX_RETRIES && RETRYABLE_CODES.has(resp.status)) {
- const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200;
- log2(`query retry ${attempt + 1}/${MAX_RETRIES} (${resp.status}) in ${delay.toFixed(0)}ms`);
- await sleep(delay);
- continue;
- }
- throw new Error(`Query failed: ${resp.status}: ${text.slice(0, 200)}`);
- }
- throw lastError ?? new Error("Query failed: max retries exceeded");
- }
- // ── Writes ──────────────────────────────────────────────────────────────────
- /** Queue rows for writing. Call commit() to flush. */
- appendRows(rows) {
- this._pendingRows.push(...rows);
- }
- /** Flush pending rows via SQL. */
- async commit() {
- if (this._pendingRows.length === 0)
- return;
- const rows = this._pendingRows;
- this._pendingRows = [];
- const CONCURRENCY = 10;
- for (let i = 0; i < rows.length; i += CONCURRENCY) {
- const chunk = rows.slice(i, i + CONCURRENCY);
- await Promise.allSettled(chunk.map((r) => this.upsertRowSql(r)));
- }
- log2(`commit: ${rows.length} rows`);
- }
- async upsertRowSql(row) {
- const ts = (/* @__PURE__ */ new Date()).toISOString();
- const cd = row.creationDate ?? ts;
- const lud = row.lastUpdateDate ?? ts;
- const exists = await this.query(`SELECT path FROM "${this.tableName}" WHERE path = '${sqlStr(row.path)}' LIMIT 1`);
- if (exists.length > 0) {
- let setClauses = `summary = E'${sqlStr(row.contentText)}', mime_type = '${sqlStr(row.mimeType)}', size_bytes = ${row.sizeBytes}, last_update_date = '${lud}'`;
- if (row.project !== void 0)
- setClauses += `, project = '${sqlStr(row.project)}'`;
- if (row.description !== void 0)
- setClauses += `, description = '${sqlStr(row.description)}'`;
- await this.query(`UPDATE "${this.tableName}" SET ${setClauses} WHERE path = '${sqlStr(row.path)}'`);
- } else {
- const id = randomUUID();
- let cols = "id, path, filename, summary, mime_type, size_bytes, creation_date, last_update_date";
- let vals = `'${id}', '${sqlStr(row.path)}', '${sqlStr(row.filename)}', E'${sqlStr(row.contentText)}', '${sqlStr(row.mimeType)}', ${row.sizeBytes}, '${cd}', '${lud}'`;
- if (row.project !== void 0) {
- cols += ", project";
- vals += `, '${sqlStr(row.project)}'`;
- }
- if (row.description !== void 0) {
- cols += ", description";
- vals += `, '${sqlStr(row.description)}'`;
- }
- await this.query(`INSERT INTO "${this.tableName}" (${cols}) VALUES (${vals})`);
- }
- }
- /** Update specific columns on a row by path. */
- async updateColumns(path, columns) {
- const setClauses = Object.entries(columns).map(([col, val]) => typeof val === "number" ? `${col} = ${val}` : `${col} = '${sqlStr(String(val))}'`).join(", ");
- await this.query(`UPDATE "${this.tableName}" SET ${setClauses} WHERE path = '${sqlStr(path)}'`);
- }
- // ── Convenience ─────────────────────────────────────────────────────────────
- /** Create a BM25 search index on a column. */
- async createIndex(column) {
- await this.query(`CREATE INDEX IF NOT EXISTS idx_${sqlStr(column)}_bm25 ON "${this.tableName}" USING deeplake_index ("${column}")`);
- }
- /** List all tables in the workspace (with retry). */
- async listTables() {
- for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
- try {
- const resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables`, {
- headers: {
- Authorization: `Bearer ${this.token}`,
- "X-Activeloop-Org-Id": this.orgId
- }
- });
- if (resp.ok) {
- const data = await resp.json();
- return (data.tables ?? []).map((t) => t.table_name);
- }
- if (attempt < MAX_RETRIES && RETRYABLE_CODES.has(resp.status)) {
- await sleep(BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200);
- continue;
- }
- return [];
- } catch {
- if (attempt < MAX_RETRIES) {
- await sleep(BASE_DELAY_MS * Math.pow(2, attempt));
- continue;
- }
- return [];
- }
- }
- return [];
- }
- /** Create the memory table if it doesn't already exist. Migrate columns on existing tables. */
- async ensureTable(name) {
- const tbl = name ?? this.tableName;
- const tables = await this.listTables();
- if (!tables.includes(tbl)) {
- log2(`table "${tbl}" not found, creating`);
- await this.query(`CREATE TABLE IF NOT EXISTS "${tbl}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', summary TEXT NOT NULL DEFAULT '', author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'text/plain', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`);
- log2(`table "${tbl}" created`);
- }
- }
- /** Create the sessions table (uses JSONB for message since every row is a JSON event). */
- async ensureSessionsTable(name) {
- const tables = await this.listTables();
- if (!tables.includes(name)) {
- log2(`table "${name}" not found, creating`);
- await this.query(`CREATE TABLE IF NOT EXISTS "${name}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', message JSONB, author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'application/json', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`);
- log2(`table "${name}" created`);
- }
+// dist/src/utils/direct-run.js
+import { resolve } from "node:path";
+import { fileURLToPath } from "node:url";
+function isDirectRun(metaUrl) {
+ const entry = process.argv[1];
+ if (!entry)
+ return false;
+ try {
+ return resolve(fileURLToPath(metaUrl)) === resolve(entry);
+ } catch {
+ return false;
}
-};
-
-// dist/src/utils/session-path.js
-function buildSessionPath(config, sessionId) {
- const workspace = config.workspaceId ?? "default";
- return `/sessions/${config.userName}/${config.userName}_${config.orgName}_${workspace}_${sessionId}.jsonl`;
}
// dist/src/hooks/summary-state.js
import { readFileSync as readFileSync2, writeFileSync, writeSync, mkdirSync, renameSync, existsSync as existsSync2, unlinkSync, openSync, closeSync } from "node:fs";
import { homedir as homedir3 } from "node:os";
import { join as join3 } from "node:path";
-var dlog = (msg) => log("summary-state", msg);
var STATE_DIR = join3(homedir3(), ".claude", "hooks", "summary-state");
var YIELD_BUF = new Int32Array(new SharedArrayBuffer(4));
function statePath(sessionId) {
@@ -350,11 +121,9 @@ function withRmwLock(sessionId, fn) {
if (e.code !== "EEXIST")
throw e;
if (Date.now() > deadline) {
- dlog(`rmw lock deadline exceeded for ${sessionId}, reclaiming stale lock`);
try {
unlinkSync(rmwLock);
- } catch (unlinkErr) {
- dlog(`stale rmw lock unlink failed for ${sessionId}: ${unlinkErr.message}`);
+ } catch {
}
continue;
}
@@ -367,8 +136,7 @@ function withRmwLock(sessionId, fn) {
closeSync(fd);
try {
unlinkSync(rmwLock);
- } catch (unlinkErr) {
- dlog(`rmw lock cleanup failed for ${sessionId}: ${unlinkErr.message}`);
+ } catch {
}
}
}
@@ -408,13 +176,11 @@ function tryAcquireLock(sessionId, maxAgeMs = 10 * 60 * 1e3) {
const ageMs = Date.now() - parseInt(readFileSync2(p, "utf-8"), 10);
if (Number.isFinite(ageMs) && ageMs < maxAgeMs)
return false;
- } catch (readErr) {
- dlog(`lock file unreadable for ${sessionId}, treating as stale: ${readErr.message}`);
+ } catch {
}
try {
unlinkSync(p);
- } catch (unlinkErr) {
- dlog(`could not unlink stale lock for ${sessionId}: ${unlinkErr.message}`);
+ } catch {
return false;
}
}
@@ -432,45 +198,15 @@ function tryAcquireLock(sessionId, maxAgeMs = 10 * 60 * 1e3) {
throw e;
}
}
-function releaseLock(sessionId) {
- try {
- unlinkSync(lockPath(sessionId));
- } catch (e) {
- if (e?.code !== "ENOENT") {
- dlog(`releaseLock unlink failed for ${sessionId}: ${e.message}`);
- }
- }
-}
// dist/src/hooks/codex/spawn-wiki-worker.js
import { spawn, execSync } from "node:child_process";
-import { fileURLToPath } from "node:url";
-import { dirname, join as join5 } from "node:path";
-import { writeFileSync as writeFileSync2, mkdirSync as mkdirSync3 } from "node:fs";
+import { fileURLToPath as fileURLToPath2 } from "node:url";
+import { dirname, join as join4 } from "node:path";
+import { writeFileSync as writeFileSync2, mkdirSync as mkdirSync2, appendFileSync as appendFileSync2 } from "node:fs";
import { homedir as homedir4, tmpdir } from "node:os";
-
-// dist/src/utils/wiki-log.js
-import { mkdirSync as mkdirSync2, appendFileSync as appendFileSync2 } from "node:fs";
-import { join as join4 } from "node:path";
-function makeWikiLogger(hooksDir, filename = "deeplake-wiki.log") {
- const path = join4(hooksDir, filename);
- return {
- path,
- log(msg) {
- try {
- mkdirSync2(hooksDir, { recursive: true });
- appendFileSync2(path, `[${utcTimestamp()}] ${msg}
-`);
- } catch {
- }
- }
- };
-}
-
-// dist/src/hooks/codex/spawn-wiki-worker.js
var HOME = homedir4();
-var wikiLogger = makeWikiLogger(join5(HOME, ".codex", "hooks"));
-var WIKI_LOG = wikiLogger.path;
+var WIKI_LOG = join4(HOME, ".codex", "hooks", "deeplake-wiki.log");
var WIKI_PROMPT_TEMPLATE = `You are building a personal wiki from a coding session. Your goal is to extract every piece of knowledge \u2014 entities, decisions, relationships, and facts \u2014 into a structured, searchable wiki entry.
SESSION JSONL path: __JSONL__
@@ -520,7 +256,14 @@ Format: **entity** (type) \u2014 what was done with it, its current state>
IMPORTANT: Be exhaustive. Extract EVERY entity, decision, and fact.
PRIVACY: Never include absolute filesystem paths in the summary.
LENGTH LIMIT: Keep the total summary under 4000 characters.`;
-var wikiLog = wikiLogger.log;
+function wikiLog(msg) {
+ try {
+ mkdirSync2(join4(HOME, ".codex", "hooks"), { recursive: true });
+ appendFileSync2(WIKI_LOG, `[${(/* @__PURE__ */ new Date()).toISOString().replace("T", " ").slice(0, 19)}] ${msg}
+`);
+ } catch {
+ }
+}
function findCodexBin() {
try {
return execSync("which codex 2>/dev/null", { encoding: "utf-8" }).trim();
@@ -531,9 +274,9 @@ function findCodexBin() {
function spawnCodexWikiWorker(opts) {
const { config, sessionId, cwd, bundleDir, reason } = opts;
const projectName = cwd.split("/").pop() || "unknown";
- const tmpDir = join5(tmpdir(), `deeplake-wiki-${sessionId}-${Date.now()}`);
- mkdirSync3(tmpDir, { recursive: true });
- const configFile = join5(tmpDir, "config.json");
+ const tmpDir = join4(tmpdir(), `deeplake-wiki-${sessionId}-${Date.now()}`);
+ mkdirSync2(tmpDir, { recursive: true });
+ const configFile = join4(tmpDir, "config.json");
writeFileSync2(configFile, JSON.stringify({
apiUrl: config.apiUrl,
token: config.token,
@@ -547,11 +290,11 @@ function spawnCodexWikiWorker(opts) {
tmpDir,
codexBin: findCodexBin(),
wikiLog: WIKI_LOG,
- hooksDir: join5(HOME, ".codex", "hooks"),
+ hooksDir: join4(HOME, ".codex", "hooks"),
promptTemplate: WIKI_PROMPT_TEMPLATE
}));
wikiLog(`${reason}: spawning summary worker for ${sessionId}`);
- const workerPath = join5(bundleDir, "wiki-worker.js");
+ const workerPath = join4(bundleDir, "wiki-worker.js");
spawn("nohup", ["node", workerPath, configFile], {
detached: true,
stdio: ["ignore", "ignore", "ignore"]
@@ -559,24 +302,72 @@ function spawnCodexWikiWorker(opts) {
wikiLog(`${reason}: spawned summary worker for ${sessionId}`);
}
function bundleDirFromImportMeta(importMetaUrl) {
- return dirname(fileURLToPath(importMetaUrl));
+ return dirname(fileURLToPath2(importMetaUrl));
+}
+
+// dist/src/hooks/session-queue.js
+import { appendFileSync as appendFileSync3, closeSync as closeSync2, existsSync as existsSync3, mkdirSync as mkdirSync3, openSync as openSync2, readFileSync as readFileSync3, readdirSync, renameSync as renameSync2, rmSync, statSync, writeFileSync as writeFileSync3 } from "node:fs";
+import { dirname as dirname2, join as join5 } from "node:path";
+import { homedir as homedir5 } from "node:os";
+var DEFAULT_QUEUE_DIR = join5(homedir5(), ".deeplake", "queue");
+var DEFAULT_AUTH_FAILURE_TTL_MS = 5 * 6e4;
+function buildSessionPath(config, sessionId) {
+ return `/sessions/${config.userName}/${config.userName}_${config.orgName}_${config.workspaceId}_${sessionId}.jsonl`;
+}
+function buildQueuedSessionRow(args) {
+ return {
+ id: crypto.randomUUID(),
+ path: args.sessionPath,
+ filename: args.sessionPath.split("/").pop() ?? "",
+ message: args.line,
+ author: args.userName,
+ sizeBytes: Buffer.byteLength(args.line, "utf-8"),
+ project: args.projectName,
+ description: args.description,
+ agent: args.agent,
+ creationDate: args.timestamp,
+ lastUpdateDate: args.timestamp
+ };
+}
+function appendQueuedSessionRow(row, queueDir = DEFAULT_QUEUE_DIR) {
+ mkdirSync3(queueDir, { recursive: true });
+ const sessionId = extractSessionId(row.path);
+ const queuePath = getQueuePath(queueDir, sessionId);
+ appendFileSync3(queuePath, `${JSON.stringify(row)}
+`);
+ return queuePath;
+}
+function getQueuePath(queueDir, sessionId) {
+ return join5(queueDir, `${sessionId}.jsonl`);
+}
+function extractSessionId(sessionPath) {
+ const filename = sessionPath.split("/").pop() ?? "";
+ return filename.replace(/\.jsonl$/, "").split("_").pop() ?? filename;
+}
+
+// dist/src/hooks/query-cache.js
+import { mkdirSync as mkdirSync4, readFileSync as readFileSync4, rmSync as rmSync2, writeFileSync as writeFileSync4 } from "node:fs";
+import { join as join6 } from "node:path";
+import { homedir as homedir6 } from "node:os";
+var log2 = (msg) => log("query-cache", msg);
+var DEFAULT_CACHE_ROOT = join6(homedir6(), ".deeplake", "query-cache");
+function getSessionQueryCacheDir(sessionId, deps = {}) {
+ const { cacheRoot = DEFAULT_CACHE_ROOT } = deps;
+ return join6(cacheRoot, sessionId);
+}
+function clearSessionQueryCache(sessionId, deps = {}) {
+ const { logFn = log2 } = deps;
+ try {
+ rmSync2(getSessionQueryCacheDir(sessionId, deps), { recursive: true, force: true });
+ } catch (e) {
+ logFn(`clear failed for session=${sessionId}: ${e.message}`);
+ }
}
// dist/src/hooks/codex/capture.js
var log3 = (msg) => log("codex-capture", msg);
-var CAPTURE = process.env.HIVEMIND_CAPTURE !== "false";
-async function main() {
- if (!CAPTURE)
- return;
- const input = await readStdin();
- const config = loadConfig();
- if (!config) {
- log3("no config");
- return;
- }
- const sessionsTable = config.sessionsTableName;
- const api = new DeeplakeApi(config.token, config.apiUrl, config.orgId, config.workspaceId, sessionsTable);
- const ts = (/* @__PURE__ */ new Date()).toISOString();
+var CAPTURE = (process.env.HIVEMIND_CAPTURE ?? process.env.DEEPLAKE_CAPTURE) !== "false";
+function buildCodexCaptureEntry(input, timestamp) {
const meta = {
session_id: input.session_id,
transcript_path: input.transcript_path,
@@ -584,20 +375,18 @@ async function main() {
hook_event_name: input.hook_event_name,
model: input.model,
turn_id: input.turn_id,
- timestamp: ts
+ timestamp
};
- let entry;
if (input.hook_event_name === "UserPromptSubmit" && input.prompt !== void 0) {
- log3(`user session=${input.session_id}`);
- entry = {
+ return {
id: crypto.randomUUID(),
...meta,
type: "user_message",
content: input.prompt
};
- } else if (input.hook_event_name === "PostToolUse" && input.tool_name !== void 0) {
- log3(`tool=${input.tool_name} session=${input.session_id}`);
- entry = {
+ }
+ if (input.hook_event_name === "PostToolUse" && input.tool_name !== void 0) {
+ return {
id: crypto.randomUUID(),
...meta,
type: "tool_call",
@@ -606,66 +395,83 @@ async function main() {
tool_input: JSON.stringify(input.tool_input),
tool_response: JSON.stringify(input.tool_response)
};
- } else {
- log3(`unknown event: ${input.hook_event_name}, skipping`);
- return;
}
- const sessionPath = buildSessionPath(config, input.session_id);
- const line = JSON.stringify(entry);
- log3(`writing to ${sessionPath}`);
- const projectName = (input.cwd ?? "").split("/").pop() || "unknown";
- const filename = sessionPath.split("/").pop() ?? "";
- const jsonForSql = sqlStr(line);
- const insertSql = `INSERT INTO "${sessionsTable}" (id, path, filename, message, author, size_bytes, project, description, agent, creation_date, last_update_date) VALUES ('${crypto.randomUUID()}', '${sqlStr(sessionPath)}', '${sqlStr(filename)}', '${jsonForSql}'::jsonb, '${sqlStr(config.userName)}', ${Buffer.byteLength(line, "utf-8")}, '${sqlStr(projectName)}', '${sqlStr(input.hook_event_name ?? "")}', 'codex', '${ts}', '${ts}')`;
- try {
- await api.query(insertSql);
- } catch (e) {
- if (e.message?.includes("permission denied") || e.message?.includes("does not exist")) {
- log3("table missing, creating and retrying");
- await api.ensureSessionsTable(sessionsTable);
- await api.query(insertSql);
- } else {
- throw e;
- }
- }
- log3("capture ok");
- maybeTriggerPeriodicSummary(input.session_id, input.cwd ?? "", config);
+ return null;
}
-function maybeTriggerPeriodicSummary(sessionId, cwd, config) {
- if (process.env.HIVEMIND_WIKI_WORKER === "1")
+function maybeTriggerPeriodicSummary(sessionId, cwd, config, deps = {}) {
+ const { bundleDir = bundleDirFromImportMeta(import.meta.url), wikiWorker = process.env.HIVEMIND_WIKI_WORKER === "1", logFn = log3, bumpTotalCountFn = bumpTotalCount, loadTriggerConfigFn = loadTriggerConfig, shouldTriggerFn = shouldTrigger, tryAcquireLockFn = tryAcquireLock, wikiLogFn = wikiLog, spawnCodexWikiWorkerFn = spawnCodexWikiWorker } = deps;
+ if (wikiWorker)
return;
try {
- const state = bumpTotalCount(sessionId);
- const cfg = loadTriggerConfig();
- if (!shouldTrigger(state, cfg))
+ const state = bumpTotalCountFn(sessionId);
+ const cfg = loadTriggerConfigFn();
+ if (!shouldTriggerFn(state, cfg))
return;
- if (!tryAcquireLock(sessionId)) {
- log3(`periodic trigger suppressed (lock held) session=${sessionId}`);
+ if (!tryAcquireLockFn(sessionId)) {
+ logFn(`periodic trigger suppressed (lock held) session=${sessionId}`);
return;
}
- wikiLog(`Periodic: threshold hit (total=${state.totalCount}, since=${state.totalCount - state.lastSummaryCount}, N=${cfg.everyNMessages}, hours=${cfg.everyHours})`);
- try {
- spawnCodexWikiWorker({
- config,
- sessionId,
- cwd,
- bundleDir: bundleDirFromImportMeta(import.meta.url),
- reason: "Periodic"
- });
- } catch (e) {
- log3(`periodic spawn failed: ${e.message}`);
- try {
- releaseLock(sessionId);
- } catch (releaseErr) {
- log3(`releaseLock after periodic spawn failure also failed: ${releaseErr.message}`);
- }
- throw e;
- }
+ wikiLogFn(`Periodic: threshold hit (total=${state.totalCount}, since=${state.totalCount - state.lastSummaryCount}, N=${cfg.everyNMessages}, hours=${cfg.everyHours})`);
+ spawnCodexWikiWorkerFn({
+ config,
+ sessionId,
+ cwd,
+ bundleDir,
+ reason: "Periodic"
+ });
} catch (e) {
- log3(`periodic trigger error: ${e.message}`);
+ logFn(`periodic trigger error: ${e.message}`);
}
}
-main().catch((e) => {
- log3(`fatal: ${e.message}`);
- process.exit(0);
-});
+async function runCodexCaptureHook(input, deps = {}) {
+ const { captureEnabled = CAPTURE, config = loadConfig(), now = () => (/* @__PURE__ */ new Date()).toISOString(), appendQueuedSessionRowFn = appendQueuedSessionRow, buildQueuedSessionRowFn = buildQueuedSessionRow, clearSessionQueryCacheFn = clearSessionQueryCache, maybeTriggerPeriodicSummaryFn = maybeTriggerPeriodicSummary, logFn = log3 } = deps;
+ if (!captureEnabled)
+ return { status: "disabled" };
+ if (!config) {
+ logFn("no config");
+ return { status: "no_config" };
+ }
+ const ts = now();
+ const entry = buildCodexCaptureEntry(input, ts);
+ if (!entry) {
+ logFn(`unknown event: ${input.hook_event_name}, skipping`);
+ return { status: "ignored" };
+ }
+ if (input.hook_event_name === "UserPromptSubmit")
+ logFn(`user session=${input.session_id}`);
+ else
+ logFn(`tool=${input.tool_name} session=${input.session_id}`);
+ if (input.hook_event_name === "UserPromptSubmit") {
+ clearSessionQueryCacheFn(input.session_id);
+ }
+ const sessionPath = buildSessionPath(config, input.session_id);
+ const line = JSON.stringify(entry);
+ const projectName = (input.cwd ?? "").split("/").pop() || "unknown";
+ appendQueuedSessionRowFn(buildQueuedSessionRowFn({
+ sessionPath,
+ line,
+ userName: config.userName,
+ projectName,
+ description: input.hook_event_name ?? "",
+ agent: "codex",
+ timestamp: ts
+ }));
+ logFn(`queued ${input.hook_event_name} for ${sessionPath}`);
+ maybeTriggerPeriodicSummaryFn(input.session_id, input.cwd ?? "", config);
+ return { status: "queued", entry };
+}
+async function main() {
+ const input = await readStdin();
+ await runCodexCaptureHook(input);
+}
+if (isDirectRun(import.meta.url)) {
+ main().catch((e) => {
+ log3(`fatal: ${e.message}`);
+ process.exit(0);
+ });
+}
+export {
+ buildCodexCaptureEntry,
+ maybeTriggerPeriodicSummary,
+ runCodexCaptureHook
+};
diff --git a/codex/bundle/commands/auth-login.js b/codex/bundle/commands/auth-login.js
index 6d4cb13..ff5e179 100755
--- a/codex/bundle/commands/auth-login.js
+++ b/codex/bundle/commands/auth-login.js
@@ -239,6 +239,9 @@ function loadConfig() {
// dist/src/deeplake-api.js
import { randomUUID } from "node:crypto";
+import { existsSync as existsSync3, mkdirSync as mkdirSync2, readFileSync as readFileSync3, writeFileSync as writeFileSync2 } from "node:fs";
+import { join as join4 } from "node:path";
+import { tmpdir } from "node:os";
// dist/src/utils/debug.js
import { appendFileSync } from "node:fs";
@@ -278,9 +281,30 @@ var RETRYABLE_CODES = /* @__PURE__ */ new Set([429, 500, 502, 503, 504]);
var MAX_RETRIES = 3;
var BASE_DELAY_MS = 500;
var MAX_CONCURRENCY = 5;
+var QUERY_TIMEOUT_MS = Number(process.env["HIVEMIND_QUERY_TIMEOUT_MS"] ?? process.env["DEEPLAKE_QUERY_TIMEOUT_MS"] ?? 1e4);
+var INDEX_MARKER_TTL_MS = Number(process.env["HIVEMIND_INDEX_MARKER_TTL_MS"] ?? 6 * 60 * 6e4);
function sleep(ms) {
return new Promise((resolve) => setTimeout(resolve, ms));
}
+function isTimeoutError(error) {
+ const name = error instanceof Error ? error.name.toLowerCase() : "";
+ const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase();
+ return name.includes("timeout") || name === "aborterror" || message.includes("timeout") || message.includes("timed out");
+}
+function isDuplicateIndexError(error) {
+ const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase();
+ return message.includes("duplicate key value violates unique constraint") || message.includes("pg_class_relname_nsp_index") || message.includes("already exists");
+}
+function isSessionInsertQuery(sql) {
+ return /^\s*insert\s+into\s+"[^"]+"\s*\(\s*id\s*,\s*path\s*,\s*filename\s*,\s*message\s*,/i.test(sql);
+}
+function isTransientHtml403(text) {
+ const body = text.toLowerCase();
+ return body.includes(" Object.fromEntries(raw.columns.map((col, i) => [col, row[i]])));
}
const text = await resp.text().catch(() => "");
- if (attempt < MAX_RETRIES && RETRYABLE_CODES.has(resp.status)) {
+ const retryable403 = isSessionInsertQuery(sql) && (resp.status === 401 || resp.status === 403 && (text.length === 0 || isTransientHtml403(text)));
+ if (attempt < MAX_RETRIES && (RETRYABLE_CODES.has(resp.status) || retryable403)) {
const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200;
log2(`query retry ${attempt + 1}/${MAX_RETRIES} (${resp.status}) in ${delay.toFixed(0)}ms`);
await sleep(delay);
@@ -433,8 +465,61 @@ var DeeplakeApi = class {
async createIndex(column) {
await this.query(`CREATE INDEX IF NOT EXISTS idx_${sqlStr(column)}_bm25 ON "${this.tableName}" USING deeplake_index ("${column}")`);
}
+ buildLookupIndexName(table, suffix) {
+ return `idx_${table}_${suffix}`.replace(/[^a-zA-Z0-9_]/g, "_");
+ }
+ getLookupIndexMarkerPath(table, suffix) {
+ const markerKey = [
+ this.workspaceId,
+ this.orgId,
+ table,
+ suffix
+ ].join("__").replace(/[^a-zA-Z0-9_.-]/g, "_");
+ return join4(getIndexMarkerDir(), `${markerKey}.json`);
+ }
+ hasFreshLookupIndexMarker(table, suffix) {
+ const markerPath = this.getLookupIndexMarkerPath(table, suffix);
+ if (!existsSync3(markerPath))
+ return false;
+ try {
+ const raw = JSON.parse(readFileSync3(markerPath, "utf-8"));
+ const updatedAt = raw.updatedAt ? new Date(raw.updatedAt).getTime() : NaN;
+ if (!Number.isFinite(updatedAt) || Date.now() - updatedAt > INDEX_MARKER_TTL_MS)
+ return false;
+ return true;
+ } catch {
+ return false;
+ }
+ }
+ markLookupIndexReady(table, suffix) {
+ mkdirSync2(getIndexMarkerDir(), { recursive: true });
+ writeFileSync2(this.getLookupIndexMarkerPath(table, suffix), JSON.stringify({ updatedAt: (/* @__PURE__ */ new Date()).toISOString() }), "utf-8");
+ }
+ async ensureLookupIndex(table, suffix, columnsSql) {
+ if (this.hasFreshLookupIndexMarker(table, suffix))
+ return;
+ const indexName = this.buildLookupIndexName(table, suffix);
+ try {
+ await this.query(`CREATE INDEX IF NOT EXISTS "${indexName}" ON "${table}" ${columnsSql}`);
+ this.markLookupIndexReady(table, suffix);
+ } catch (e) {
+ if (isDuplicateIndexError(e)) {
+ this.markLookupIndexReady(table, suffix);
+ return;
+ }
+ log2(`index "${indexName}" skipped: ${e.message}`);
+ }
+ }
/** List all tables in the workspace (with retry). */
- async listTables() {
+ async listTables(forceRefresh = false) {
+ if (!forceRefresh && this._tablesCache)
+ return [...this._tablesCache];
+ const { tables, cacheable } = await this._fetchTables();
+ if (cacheable)
+ this._tablesCache = [...tables];
+ return tables;
+ }
+ async _fetchTables() {
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
try {
const resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables`, {
@@ -445,22 +530,25 @@ var DeeplakeApi = class {
});
if (resp.ok) {
const data = await resp.json();
- return (data.tables ?? []).map((t) => t.table_name);
+ return {
+ tables: (data.tables ?? []).map((t) => t.table_name),
+ cacheable: true
+ };
}
if (attempt < MAX_RETRIES && RETRYABLE_CODES.has(resp.status)) {
await sleep(BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200);
continue;
}
- return [];
+ return { tables: [], cacheable: false };
} catch {
if (attempt < MAX_RETRIES) {
await sleep(BASE_DELAY_MS * Math.pow(2, attempt));
continue;
}
- return [];
+ return { tables: [], cacheable: false };
}
}
- return [];
+ return { tables: [], cacheable: false };
}
/** Create the memory table if it doesn't already exist. Migrate columns on existing tables. */
async ensureTable(name) {
@@ -470,6 +558,8 @@ var DeeplakeApi = class {
log2(`table "${tbl}" not found, creating`);
await this.query(`CREATE TABLE IF NOT EXISTS "${tbl}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', summary TEXT NOT NULL DEFAULT '', author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'text/plain', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`);
log2(`table "${tbl}" created`);
+ if (!tables.includes(tbl))
+ this._tablesCache = [...tables, tbl];
}
}
/** Create the sessions table (uses JSONB for message since every row is a JSON event). */
@@ -479,7 +569,10 @@ var DeeplakeApi = class {
log2(`table "${name}" not found, creating`);
await this.query(`CREATE TABLE IF NOT EXISTS "${name}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', message JSONB, author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'application/json', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`);
log2(`table "${name}" created`);
+ if (!tables.includes(name))
+ this._tablesCache = [...tables, name];
}
+ await this.ensureLookupIndex(name, "path_creation_date", `("path", "creation_date")`);
}
};
diff --git a/codex/bundle/pre-tool-use.js b/codex/bundle/pre-tool-use.js
index 4f3873b..a31916a 100755
--- a/codex/bundle/pre-tool-use.js
+++ b/codex/bundle/pre-tool-use.js
@@ -1,22 +1,20 @@
#!/usr/bin/env node
// dist/src/hooks/codex/pre-tool-use.js
-import { existsSync as existsSync2 } from "node:fs";
import { execFileSync } from "node:child_process";
-import { join as join3 } from "node:path";
-import { homedir as homedir3 } from "node:os";
-import { fileURLToPath } from "node:url";
-import { dirname } from "node:path";
+import { existsSync as existsSync3 } from "node:fs";
+import { join as join6, dirname } from "node:path";
+import { fileURLToPath as fileURLToPath2 } from "node:url";
// dist/src/utils/stdin.js
function readStdin() {
- return new Promise((resolve, reject) => {
+ return new Promise((resolve2, reject) => {
let data = "";
process.stdin.setEncoding("utf-8");
process.stdin.on("data", (chunk) => data += chunk);
process.stdin.on("end", () => {
try {
- resolve(JSON.parse(data));
+ resolve2(JSON.parse(data));
} catch (err) {
reject(new Error(`Failed to parse hook input: ${err}`));
}
@@ -63,6 +61,9 @@ function loadConfig() {
// dist/src/deeplake-api.js
import { randomUUID } from "node:crypto";
+import { existsSync as existsSync2, mkdirSync, readFileSync as readFileSync2, writeFileSync } from "node:fs";
+import { join as join3 } from "node:path";
+import { tmpdir } from "node:os";
// dist/src/utils/debug.js
import { appendFileSync } from "node:fs";
@@ -105,8 +106,29 @@ var RETRYABLE_CODES = /* @__PURE__ */ new Set([429, 500, 502, 503, 504]);
var MAX_RETRIES = 3;
var BASE_DELAY_MS = 500;
var MAX_CONCURRENCY = 5;
+var QUERY_TIMEOUT_MS = Number(process.env["HIVEMIND_QUERY_TIMEOUT_MS"] ?? process.env["DEEPLAKE_QUERY_TIMEOUT_MS"] ?? 1e4);
+var INDEX_MARKER_TTL_MS = Number(process.env["HIVEMIND_INDEX_MARKER_TTL_MS"] ?? 6 * 60 * 6e4);
function sleep(ms) {
- return new Promise((resolve) => setTimeout(resolve, ms));
+ return new Promise((resolve2) => setTimeout(resolve2, ms));
+}
+function isTimeoutError(error) {
+ const name = error instanceof Error ? error.name.toLowerCase() : "";
+ const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase();
+ return name.includes("timeout") || name === "aborterror" || message.includes("timeout") || message.includes("timed out");
+}
+function isDuplicateIndexError(error) {
+ const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase();
+ return message.includes("duplicate key value violates unique constraint") || message.includes("pg_class_relname_nsp_index") || message.includes("already exists");
+}
+function isSessionInsertQuery(sql) {
+ return /^\s*insert\s+into\s+"[^"]+"\s*\(\s*id\s*,\s*path\s*,\s*filename\s*,\s*message\s*,/i.test(sql);
+}
+function isTransientHtml403(text) {
+ const body = text.toLowerCase();
+ return body.includes(" this.waiting.push(resolve));
+ await new Promise((resolve2) => this.waiting.push(resolve2));
}
release() {
this.active--;
@@ -139,6 +161,7 @@ var DeeplakeApi = class {
tableName;
_pendingRows = [];
_sem = new Semaphore(MAX_CONCURRENCY);
+ _tablesCache = null;
constructor(token, apiUrl, orgId, workspaceId, tableName) {
this.token = token;
this.apiUrl = apiUrl;
@@ -169,6 +192,7 @@ var DeeplakeApi = class {
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
let resp;
try {
+ const signal = AbortSignal.timeout(QUERY_TIMEOUT_MS);
resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables/query`, {
method: "POST",
headers: {
@@ -176,9 +200,14 @@ var DeeplakeApi = class {
"Content-Type": "application/json",
"X-Activeloop-Org-Id": this.orgId
},
+ signal,
body: JSON.stringify({ query: sql })
});
} catch (e) {
+ if (isTimeoutError(e)) {
+ lastError = new Error(`Query timeout after ${QUERY_TIMEOUT_MS}ms`);
+ throw lastError;
+ }
lastError = e instanceof Error ? e : new Error(String(e));
if (attempt < MAX_RETRIES) {
const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200;
@@ -195,7 +224,8 @@ var DeeplakeApi = class {
return raw.rows.map((row) => Object.fromEntries(raw.columns.map((col, i) => [col, row[i]])));
}
const text = await resp.text().catch(() => "");
- if (attempt < MAX_RETRIES && RETRYABLE_CODES.has(resp.status)) {
+ const retryable403 = isSessionInsertQuery(sql) && (resp.status === 401 || resp.status === 403 && (text.length === 0 || isTransientHtml403(text)));
+ if (attempt < MAX_RETRIES && (RETRYABLE_CODES.has(resp.status) || retryable403)) {
const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200;
log2(`query retry ${attempt + 1}/${MAX_RETRIES} (${resp.status}) in ${delay.toFixed(0)}ms`);
await sleep(delay);
@@ -260,8 +290,61 @@ var DeeplakeApi = class {
async createIndex(column) {
await this.query(`CREATE INDEX IF NOT EXISTS idx_${sqlStr(column)}_bm25 ON "${this.tableName}" USING deeplake_index ("${column}")`);
}
+ buildLookupIndexName(table, suffix) {
+ return `idx_${table}_${suffix}`.replace(/[^a-zA-Z0-9_]/g, "_");
+ }
+ getLookupIndexMarkerPath(table, suffix) {
+ const markerKey = [
+ this.workspaceId,
+ this.orgId,
+ table,
+ suffix
+ ].join("__").replace(/[^a-zA-Z0-9_.-]/g, "_");
+ return join3(getIndexMarkerDir(), `${markerKey}.json`);
+ }
+ hasFreshLookupIndexMarker(table, suffix) {
+ const markerPath = this.getLookupIndexMarkerPath(table, suffix);
+ if (!existsSync2(markerPath))
+ return false;
+ try {
+ const raw = JSON.parse(readFileSync2(markerPath, "utf-8"));
+ const updatedAt = raw.updatedAt ? new Date(raw.updatedAt).getTime() : NaN;
+ if (!Number.isFinite(updatedAt) || Date.now() - updatedAt > INDEX_MARKER_TTL_MS)
+ return false;
+ return true;
+ } catch {
+ return false;
+ }
+ }
+ markLookupIndexReady(table, suffix) {
+ mkdirSync(getIndexMarkerDir(), { recursive: true });
+ writeFileSync(this.getLookupIndexMarkerPath(table, suffix), JSON.stringify({ updatedAt: (/* @__PURE__ */ new Date()).toISOString() }), "utf-8");
+ }
+ async ensureLookupIndex(table, suffix, columnsSql) {
+ if (this.hasFreshLookupIndexMarker(table, suffix))
+ return;
+ const indexName = this.buildLookupIndexName(table, suffix);
+ try {
+ await this.query(`CREATE INDEX IF NOT EXISTS "${indexName}" ON "${table}" ${columnsSql}`);
+ this.markLookupIndexReady(table, suffix);
+ } catch (e) {
+ if (isDuplicateIndexError(e)) {
+ this.markLookupIndexReady(table, suffix);
+ return;
+ }
+ log2(`index "${indexName}" skipped: ${e.message}`);
+ }
+ }
/** List all tables in the workspace (with retry). */
- async listTables() {
+ async listTables(forceRefresh = false) {
+ if (!forceRefresh && this._tablesCache)
+ return [...this._tablesCache];
+ const { tables, cacheable } = await this._fetchTables();
+ if (cacheable)
+ this._tablesCache = [...tables];
+ return tables;
+ }
+ async _fetchTables() {
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
try {
const resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables`, {
@@ -272,22 +355,25 @@ var DeeplakeApi = class {
});
if (resp.ok) {
const data = await resp.json();
- return (data.tables ?? []).map((t) => t.table_name);
+ return {
+ tables: (data.tables ?? []).map((t) => t.table_name),
+ cacheable: true
+ };
}
if (attempt < MAX_RETRIES && RETRYABLE_CODES.has(resp.status)) {
await sleep(BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200);
continue;
}
- return [];
+ return { tables: [], cacheable: false };
} catch {
if (attempt < MAX_RETRIES) {
await sleep(BASE_DELAY_MS * Math.pow(2, attempt));
continue;
}
- return [];
+ return { tables: [], cacheable: false };
}
}
- return [];
+ return { tables: [], cacheable: false };
}
/** Create the memory table if it doesn't already exist. Migrate columns on existing tables. */
async ensureTable(name) {
@@ -297,6 +383,8 @@ var DeeplakeApi = class {
log2(`table "${tbl}" not found, creating`);
await this.query(`CREATE TABLE IF NOT EXISTS "${tbl}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', summary TEXT NOT NULL DEFAULT '', author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'text/plain', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`);
log2(`table "${tbl}" created`);
+ if (!tables.includes(tbl))
+ this._tablesCache = [...tables, tbl];
}
}
/** Create the sessions table (uses JSONB for message since every row is a JSON event). */
@@ -306,7 +394,10 @@ var DeeplakeApi = class {
log2(`table "${name}" not found, creating`);
await this.query(`CREATE TABLE IF NOT EXISTS "${name}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', message JSONB, author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'application/json', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`);
log2(`table "${name}" created`);
+ if (!tables.includes(name))
+ this._tablesCache = [...tables, name];
}
+ await this.ensureLookupIndex(name, "path_creation_date", `("path", "creation_date")`);
}
};
@@ -519,29 +610,127 @@ function normalizeContent(path, raw) {
return raw;
return out;
}
+function buildPathCondition(targetPath) {
+ if (!targetPath || targetPath === "/")
+ return "";
+ const clean = targetPath.replace(/\/+$/, "");
+ if (/[*?]/.test(clean)) {
+ const likePattern = sqlLike(clean).replace(/\*/g, "%").replace(/\?/g, "_");
+ return `path LIKE '${likePattern}'`;
+ }
+ const base = clean.split("/").pop() ?? "";
+ if (base.includes(".")) {
+ return `path = '${sqlStr(clean)}'`;
+ }
+ return `(path = '${sqlStr(clean)}' OR path LIKE '${sqlLike(clean)}/%')`;
+}
async function searchDeeplakeTables(api, memoryTable, sessionsTable, opts) {
- const { pathFilter, contentScanOnly, likeOp, escapedPattern } = opts;
+ const { pathFilter, contentScanOnly, likeOp, escapedPattern, prefilterPattern, prefilterPatterns } = opts;
const limit = opts.limit ?? 100;
- const memFilter = contentScanOnly ? "" : ` AND summary::text ${likeOp} '%${escapedPattern}%'`;
- const sessFilter = contentScanOnly ? "" : ` AND message::text ${likeOp} '%${escapedPattern}%'`;
- const memQuery = `SELECT path, summary::text AS content FROM "${memoryTable}" WHERE 1=1${pathFilter}${memFilter} LIMIT ${limit}`;
- const sessQuery = `SELECT path, message::text AS content FROM "${sessionsTable}" WHERE 1=1${pathFilter}${sessFilter} LIMIT ${limit}`;
- const [memRows, sessRows] = await Promise.all([
- api.query(memQuery).catch(() => []),
- api.query(sessQuery).catch(() => [])
- ]);
- const rows = [];
- for (const r of memRows)
- rows.push({ path: String(r.path), content: String(r.content ?? "") });
- for (const r of sessRows)
- rows.push({ path: String(r.path), content: String(r.content ?? "") });
- return rows;
+ const filterPatterns = contentScanOnly ? prefilterPatterns && prefilterPatterns.length > 0 ? prefilterPatterns : prefilterPattern ? [prefilterPattern] : [] : [escapedPattern];
+ const memFilter = buildContentFilter("summary::text", likeOp, filterPatterns);
+ const sessFilter = buildContentFilter("message::text", likeOp, filterPatterns);
+ const memQuery = `SELECT path, summary::text AS content, 0 AS source_order, '' AS creation_date FROM "${memoryTable}" WHERE 1=1${pathFilter}${memFilter} LIMIT ${limit}`;
+ const sessQuery = `SELECT path, message::text AS content, 1 AS source_order, COALESCE(creation_date::text, '') AS creation_date FROM "${sessionsTable}" WHERE 1=1${pathFilter}${sessFilter} LIMIT ${limit}`;
+ const rows = await api.query(`SELECT path, content, source_order, creation_date FROM ((${memQuery}) UNION ALL (${sessQuery})) AS combined ORDER BY path, source_order, creation_date`);
+ return rows.map((row) => ({
+ path: String(row["path"]),
+ content: String(row["content"] ?? "")
+ }));
}
function buildPathFilter(targetPath) {
- if (!targetPath || targetPath === "/")
+ const condition = buildPathCondition(targetPath);
+ return condition ? ` AND ${condition}` : "";
+}
+function extractRegexLiteralPrefilter(pattern) {
+ if (!pattern)
+ return null;
+ const parts = [];
+ let current = "";
+ for (let i = 0; i < pattern.length; i++) {
+ const ch = pattern[i];
+ if (ch === "\\") {
+ const next = pattern[i + 1];
+ if (!next)
+ return null;
+ if (/[dDsSwWbBAZzGkKpP]/.test(next))
+ return null;
+ current += next;
+ i++;
+ continue;
+ }
+ if (ch === ".") {
+ if (pattern[i + 1] === "*") {
+ if (current)
+ parts.push(current);
+ current = "";
+ i++;
+ continue;
+ }
+ return null;
+ }
+ if ("|()[]{}+?^$".includes(ch) || ch === "*")
+ return null;
+ current += ch;
+ }
+ if (current)
+ parts.push(current);
+ const literal = parts.reduce((best, part) => part.length > best.length ? part : best, "");
+ return literal.length >= 2 ? literal : null;
+}
+function extractRegexAlternationPrefilters(pattern) {
+ if (!pattern.includes("|"))
+ return null;
+ const parts = [];
+ let current = "";
+ let escaped = false;
+ for (let i = 0; i < pattern.length; i++) {
+ const ch = pattern[i];
+ if (escaped) {
+ current += `\\${ch}`;
+ escaped = false;
+ continue;
+ }
+ if (ch === "\\") {
+ escaped = true;
+ continue;
+ }
+ if (ch === "|") {
+ if (!current)
+ return null;
+ parts.push(current);
+ current = "";
+ continue;
+ }
+ if ("()[]{}^$".includes(ch))
+ return null;
+ current += ch;
+ }
+ if (escaped || !current)
+ return null;
+ parts.push(current);
+ const literals = [...new Set(parts.map((part) => extractRegexLiteralPrefilter(part)).filter((part) => typeof part === "string" && part.length >= 2))];
+ return literals.length > 0 ? literals : null;
+}
+function buildGrepSearchOptions(params, targetPath) {
+ const hasRegexMeta = !params.fixedString && /[.*+?^${}()|[\]\\]/.test(params.pattern);
+ const literalPrefilter = hasRegexMeta ? extractRegexLiteralPrefilter(params.pattern) : null;
+ const alternationPrefilters = hasRegexMeta ? extractRegexAlternationPrefilters(params.pattern) : null;
+ return {
+ pathFilter: buildPathFilter(targetPath),
+ contentScanOnly: hasRegexMeta,
+ likeOp: params.ignoreCase ? "ILIKE" : "LIKE",
+ escapedPattern: sqlLike(params.pattern),
+ prefilterPattern: literalPrefilter ? sqlLike(literalPrefilter) : void 0,
+ prefilterPatterns: alternationPrefilters?.map((literal) => sqlLike(literal))
+ };
+}
+function buildContentFilter(column, likeOp, patterns) {
+ if (patterns.length === 0)
return "";
- const clean = targetPath.replace(/\/+$/, "");
- return ` AND (path = '${sqlStr(clean)}' OR path LIKE '${sqlLike(clean)}/%')`;
+ if (patterns.length === 1)
+ return ` AND ${column} ${likeOp} '%${patterns[0]}%'`;
+ return ` AND (${patterns.map((pattern) => `${column} ${likeOp} '%${pattern}%'`).join(" OR ")})`;
}
function compileGrepRegex(params) {
let reStr = params.fixedString ? params.pattern.replace(/[.*+?^${}()|[\]\\]/g, "\\$&") : params.pattern;
@@ -585,13 +774,7 @@ function refineGrepMatches(rows, params, forceMultiFilePrefix) {
return output;
}
async function grepBothTables(api, memoryTable, sessionsTable, params, targetPath) {
- const hasRegexMeta = !params.fixedString && /[.*+?^${}()|[\]\\]/.test(params.pattern);
- const rows = await searchDeeplakeTables(api, memoryTable, sessionsTable, {
- pathFilter: buildPathFilter(targetPath),
- contentScanOnly: hasRegexMeta,
- likeOp: params.ignoreCase ? "ILIKE" : "LIKE",
- escapedPattern: sqlLike(params.pattern)
- });
+ const rows = await searchDeeplakeTables(api, memoryTable, sessionsTable, buildGrepSearchOptions(params, targetPath));
const seen = /* @__PURE__ */ new Set();
const unique = rows.filter((r) => seen.has(r.path) ? false : (seen.add(r.path), true));
const normalized = unique.map((r) => ({ path: r.path, content: normalizeContent(r.path, r.content) }));
@@ -599,67 +782,157 @@ async function grepBothTables(api, memoryTable, sessionsTable, params, targetPat
}
// dist/src/hooks/grep-direct.js
-function parseBashGrep(cmd) {
- const first = cmd.trim().split(/\s*\|\s*/)[0];
- if (!/^(grep|egrep|fgrep)\b/.test(first))
- return null;
- const isFixed = first.startsWith("fgrep");
+function splitFirstPipelineStage(cmd) {
+ const input = cmd.trim();
+ let quote = null;
+ let escaped = false;
+ for (let i = 0; i < input.length; i++) {
+ const ch = input[i];
+ if (escaped) {
+ escaped = false;
+ continue;
+ }
+ if (quote) {
+ if (ch === quote) {
+ quote = null;
+ continue;
+ }
+ if (ch === "\\" && quote === '"') {
+ escaped = true;
+ }
+ continue;
+ }
+ if (ch === "\\") {
+ escaped = true;
+ continue;
+ }
+ if (ch === "'" || ch === '"') {
+ quote = ch;
+ continue;
+ }
+ if (ch === "|")
+ return input.slice(0, i).trim();
+ }
+ return quote ? null : input;
+}
+function tokenizeGrepStage(input) {
const tokens = [];
- let pos = 0;
- while (pos < first.length) {
- if (first[pos] === " " || first[pos] === " ") {
- pos++;
+ let current = "";
+ let quote = null;
+ for (let i = 0; i < input.length; i++) {
+ const ch = input[i];
+ if (quote) {
+ if (ch === quote) {
+ quote = null;
+ } else if (ch === "\\" && quote === '"' && i + 1 < input.length) {
+ current += input[++i];
+ } else {
+ current += ch;
+ }
continue;
}
- if (first[pos] === "'" || first[pos] === '"') {
- const q = first[pos];
- let end = pos + 1;
- while (end < first.length && first[end] !== q)
- end++;
- tokens.push(first.slice(pos + 1, end));
- pos = end + 1;
- } else {
- let end = pos;
- while (end < first.length && first[end] !== " " && first[end] !== " ")
- end++;
- tokens.push(first.slice(pos, end));
- pos = end;
+ if (ch === "'" || ch === '"') {
+ quote = ch;
+ continue;
+ }
+ if (ch === "\\" && i + 1 < input.length) {
+ current += input[++i];
+ continue;
+ }
+ if (/\s/.test(ch)) {
+ if (current) {
+ tokens.push(current);
+ current = "";
+ }
+ continue;
}
+ current += ch;
}
+ if (quote)
+ return null;
+ if (current)
+ tokens.push(current);
+ return tokens;
+}
+function parseBashGrep(cmd) {
+ const first = splitFirstPipelineStage(cmd);
+ if (!first)
+ return null;
+ if (!/^(grep|egrep|fgrep)\b/.test(first))
+ return null;
+ const isFixed = first.startsWith("fgrep");
+ const tokens = tokenizeGrepStage(first);
+ if (!tokens || tokens.length === 0)
+ return null;
let ignoreCase = false, wordMatch = false, filesOnly = false, countOnly = false, lineNumber = false, invertMatch = false, fixedString = isFixed;
+ const explicitPatterns = [];
let ti = 1;
- while (ti < tokens.length && tokens[ti].startsWith("-") && tokens[ti] !== "--") {
- const flag = tokens[ti];
- if (flag.startsWith("--")) {
+ while (ti < tokens.length) {
+ const token = tokens[ti];
+ if (token === "--") {
+ ti++;
+ break;
+ }
+ if (!token.startsWith("-") || token === "-")
+ break;
+ if (token.startsWith("--")) {
+ const [flag, inlineValue] = token.split("=", 2);
const handlers = {
"--ignore-case": () => {
ignoreCase = true;
+ return false;
},
"--word-regexp": () => {
wordMatch = true;
+ return false;
},
"--files-with-matches": () => {
filesOnly = true;
+ return false;
},
"--count": () => {
countOnly = true;
+ return false;
},
"--line-number": () => {
lineNumber = true;
+ return false;
},
"--invert-match": () => {
invertMatch = true;
+ return false;
},
"--fixed-strings": () => {
fixedString = true;
+ return false;
+ },
+ "--after-context": () => inlineValue === void 0,
+ "--before-context": () => inlineValue === void 0,
+ "--context": () => inlineValue === void 0,
+ "--max-count": () => inlineValue === void 0,
+ "--regexp": () => {
+ if (inlineValue !== void 0) {
+ explicitPatterns.push(inlineValue);
+ return false;
+ }
+ return true;
}
};
- handlers[flag]?.();
+ const consumeNext = handlers[flag]?.() ?? false;
+ if (consumeNext) {
+ ti++;
+ if (ti >= tokens.length)
+ return null;
+ if (flag === "--regexp")
+ explicitPatterns.push(tokens[ti]);
+ }
ti++;
continue;
}
- for (const c of flag.slice(1)) {
- switch (c) {
+ const shortFlags = token.slice(1);
+ for (let i = 0; i < shortFlags.length; i++) {
+ const flag = shortFlags[i];
+ switch (flag) {
case "i":
ignoreCase = true;
break;
@@ -681,19 +954,48 @@ function parseBashGrep(cmd) {
case "F":
fixedString = true;
break;
+ case "r":
+ case "R":
+ case "E":
+ break;
+ case "A":
+ case "B":
+ case "C":
+ case "m":
+ if (i === shortFlags.length - 1) {
+ ti++;
+ if (ti >= tokens.length)
+ return null;
+ }
+ i = shortFlags.length;
+ break;
+ case "e": {
+ const inlineValue = shortFlags.slice(i + 1);
+ if (inlineValue) {
+ explicitPatterns.push(inlineValue);
+ } else {
+ ti++;
+ if (ti >= tokens.length)
+ return null;
+ explicitPatterns.push(tokens[ti]);
+ }
+ i = shortFlags.length;
+ break;
+ }
+ default:
+ break;
}
}
ti++;
}
- if (ti < tokens.length && tokens[ti] === "--")
- ti++;
- if (ti >= tokens.length)
+ const pattern = explicitPatterns.length > 0 ? explicitPatterns[0] : tokens[ti];
+ if (!pattern)
return null;
- let target = tokens[ti + 1] ?? "/";
+ let target = explicitPatterns.length > 0 ? tokens[ti] ?? "/" : tokens[ti + 1] ?? "/";
if (target === "." || target === "./")
target = "/";
return {
- pattern: tokens[ti],
+ pattern,
targetPath: target,
ignoreCase,
wordMatch,
@@ -721,13 +1023,653 @@ async function handleGrepDirect(api, table, sessionsTable, params) {
return output.join("\n") || "(no matches)";
}
-// dist/src/hooks/codex/pre-tool-use.js
-var log3 = (msg) => log("codex-pre", msg);
-var MEMORY_PATH = join3(homedir3(), ".deeplake", "memory");
+// dist/src/hooks/virtual-table-query.js
+function normalizeSessionPart(path, content) {
+ return normalizeContent(path, content);
+}
+function buildVirtualIndexContent(rows) {
+ const lines = ["# Memory Index", "", `${rows.length} sessions:`, ""];
+ for (const row of rows) {
+ const path = row["path"];
+ const project = row["project"] || "";
+ const description = (row["description"] || "").slice(0, 120);
+ const date = (row["creation_date"] || "").slice(0, 10);
+ lines.push(`- [${path}](${path}) ${date} ${project ? `[${project}]` : ""} ${description}`);
+ }
+ return lines.join("\n");
+}
+function buildUnionQuery(memoryQuery, sessionsQuery) {
+ return `SELECT path, content, size_bytes, creation_date, source_order FROM ((${memoryQuery}) UNION ALL (${sessionsQuery})) AS combined ORDER BY path, source_order, creation_date`;
+}
+function buildInList(paths) {
+ return paths.map((path) => `'${sqlStr(path)}'`).join(", ");
+}
+function buildDirFilter(dirs) {
+ const cleaned = [...new Set(dirs.map((dir) => dir.replace(/\/+$/, "") || "/"))];
+ if (cleaned.length === 0 || cleaned.includes("/"))
+ return "";
+ const clauses = cleaned.map((dir) => `path LIKE '${sqlLike(dir)}/%'`);
+ return ` WHERE ${clauses.join(" OR ")}`;
+}
+async function queryUnionRows(api, memoryQuery, sessionsQuery) {
+ const unionQuery = buildUnionQuery(memoryQuery, sessionsQuery);
+ try {
+ return await api.query(unionQuery);
+ } catch {
+ const [memoryRows, sessionRows] = await Promise.all([
+ api.query(memoryQuery).catch(() => []),
+ api.query(sessionsQuery).catch(() => [])
+ ]);
+ return [...memoryRows, ...sessionRows];
+ }
+}
+async function readVirtualPathContents(api, memoryTable, sessionsTable, virtualPaths) {
+ const uniquePaths = [...new Set(virtualPaths)];
+ const result = new Map(uniquePaths.map((path) => [path, null]));
+ if (uniquePaths.length === 0)
+ return result;
+ const inList = buildInList(uniquePaths);
+ const rows = await queryUnionRows(api, `SELECT path, summary::text AS content, NULL::bigint AS size_bytes, '' AS creation_date, 0 AS source_order FROM "${memoryTable}" WHERE path IN (${inList})`, `SELECT path, message::text AS content, NULL::bigint AS size_bytes, COALESCE(creation_date::text, '') AS creation_date, 1 AS source_order FROM "${sessionsTable}" WHERE path IN (${inList})`);
+ const memoryHits = /* @__PURE__ */ new Map();
+ const sessionHits = /* @__PURE__ */ new Map();
+ for (const row of rows) {
+ const path = row["path"];
+ const content = row["content"];
+ const sourceOrder = Number(row["source_order"] ?? 0);
+ if (typeof path !== "string" || typeof content !== "string")
+ continue;
+ if (sourceOrder === 0) {
+ memoryHits.set(path, content);
+ } else {
+ const current = sessionHits.get(path) ?? [];
+ current.push(normalizeSessionPart(path, content));
+ sessionHits.set(path, current);
+ }
+ }
+ for (const path of uniquePaths) {
+ if (memoryHits.has(path)) {
+ result.set(path, memoryHits.get(path) ?? null);
+ continue;
+ }
+ const sessionParts = sessionHits.get(path) ?? [];
+ if (sessionParts.length > 0) {
+ result.set(path, sessionParts.join("\n"));
+ }
+ }
+ if (result.get("/index.md") === null && uniquePaths.includes("/index.md")) {
+ const rows2 = await api.query(`SELECT path, project, description, creation_date FROM "${memoryTable}" WHERE path LIKE '/summaries/%' ORDER BY creation_date DESC`).catch(() => []);
+ result.set("/index.md", buildVirtualIndexContent(rows2));
+ }
+ return result;
+}
+async function listVirtualPathRowsForDirs(api, memoryTable, sessionsTable, dirs) {
+ const uniqueDirs = [...new Set(dirs.map((dir) => dir.replace(/\/+$/, "") || "/"))];
+ const filter = buildDirFilter(uniqueDirs);
+ const rows = await queryUnionRows(api, `SELECT path, NULL::text AS content, size_bytes, '' AS creation_date, 0 AS source_order FROM "${memoryTable}"${filter}`, `SELECT path, NULL::text AS content, size_bytes, '' AS creation_date, 1 AS source_order FROM "${sessionsTable}"${filter}`);
+ const deduped = dedupeRowsByPath(rows.map((row) => ({
+ path: row["path"],
+ size_bytes: row["size_bytes"]
+ })));
+ const byDir = /* @__PURE__ */ new Map();
+ for (const dir of uniqueDirs)
+ byDir.set(dir, []);
+ for (const row of deduped) {
+ const path = row["path"];
+ if (typeof path !== "string")
+ continue;
+ for (const dir of uniqueDirs) {
+ const prefix = dir === "/" ? "/" : `${dir}/`;
+ if (dir === "/" || path.startsWith(prefix)) {
+ byDir.get(dir)?.push(row);
+ }
+ }
+ }
+ return byDir;
+}
+async function readVirtualPathContent(api, memoryTable, sessionsTable, virtualPath) {
+ return (await readVirtualPathContents(api, memoryTable, sessionsTable, [virtualPath])).get(virtualPath) ?? null;
+}
+async function listVirtualPathRows(api, memoryTable, sessionsTable, dir) {
+ return (await listVirtualPathRowsForDirs(api, memoryTable, sessionsTable, [dir])).get(dir.replace(/\/+$/, "") || "/") ?? [];
+}
+async function findVirtualPaths(api, memoryTable, sessionsTable, dir, filenamePattern) {
+ const normalizedDir = dir.replace(/\/+$/, "") || "/";
+ const likePath = `${sqlLike(normalizedDir === "/" ? "" : normalizedDir)}/%`;
+ const rows = await queryUnionRows(api, `SELECT path, NULL::text AS content, NULL::bigint AS size_bytes, '' AS creation_date, 0 AS source_order FROM "${memoryTable}" WHERE path LIKE '${likePath}' AND filename LIKE '${filenamePattern}'`, `SELECT path, NULL::text AS content, NULL::bigint AS size_bytes, '' AS creation_date, 1 AS source_order FROM "${sessionsTable}" WHERE path LIKE '${likePath}' AND filename LIKE '${filenamePattern}'`);
+ return [...new Set(rows.map((row) => row["path"]).filter((value) => typeof value === "string" && value.length > 0))];
+}
+function dedupeRowsByPath(rows) {
+ const seen = /* @__PURE__ */ new Set();
+ const unique = [];
+ for (const row of rows) {
+ const path = typeof row["path"] === "string" ? row["path"] : "";
+ if (!path || seen.has(path))
+ continue;
+ seen.add(path);
+ unique.push(row);
+ }
+ return unique;
+}
+
+// dist/src/hooks/bash-command-compiler.js
+function isQuoted(ch) {
+ return ch === "'" || ch === '"';
+}
+function splitTopLevel(input, operators) {
+ const parts = [];
+ let current = "";
+ let quote = null;
+ for (let i = 0; i < input.length; i++) {
+ const ch = input[i];
+ if (quote) {
+ if (ch === quote)
+ quote = null;
+ current += ch;
+ continue;
+ }
+ if (isQuoted(ch)) {
+ quote = ch;
+ current += ch;
+ continue;
+ }
+ const matched = operators.find((op) => input.startsWith(op, i));
+ if (matched) {
+ const trimmed2 = current.trim();
+ if (trimmed2)
+ parts.push(trimmed2);
+ current = "";
+ i += matched.length - 1;
+ continue;
+ }
+ current += ch;
+ }
+ if (quote)
+ return null;
+ const trimmed = current.trim();
+ if (trimmed)
+ parts.push(trimmed);
+ return parts;
+}
+function tokenizeShellWords(input) {
+ const tokens = [];
+ let current = "";
+ let quote = null;
+ for (let i = 0; i < input.length; i++) {
+ const ch = input[i];
+ if (quote) {
+ if (ch === quote) {
+ quote = null;
+ } else if (ch === "\\" && quote === '"' && i + 1 < input.length) {
+ current += input[++i];
+ } else {
+ current += ch;
+ }
+ continue;
+ }
+ if (isQuoted(ch)) {
+ quote = ch;
+ continue;
+ }
+ if (/\s/.test(ch)) {
+ if (current) {
+ tokens.push(current);
+ current = "";
+ }
+ continue;
+ }
+ current += ch;
+ }
+ if (quote)
+ return null;
+ if (current)
+ tokens.push(current);
+ return tokens;
+}
+function expandBraceToken(token) {
+ const match = token.match(/\{([^{}]+)\}/);
+ if (!match)
+ return [token];
+ const [expr] = match;
+ const prefix = token.slice(0, match.index);
+ const suffix = token.slice((match.index ?? 0) + expr.length);
+ let variants = [];
+ const numericRange = match[1].match(/^(-?\d+)\.\.(-?\d+)$/);
+ if (numericRange) {
+ const start = Number(numericRange[1]);
+ const end = Number(numericRange[2]);
+ const step = start <= end ? 1 : -1;
+ for (let value = start; step > 0 ? value <= end : value >= end; value += step) {
+ variants.push(String(value));
+ }
+ } else {
+ variants = match[1].split(",");
+ }
+ return variants.flatMap((variant) => expandBraceToken(`${prefix}${variant}${suffix}`));
+}
+function stripAllowedModifiers(segment) {
+ const ignoreMissing = /\s2>\/dev\/null\s*$/.test(segment);
+ const clean = segment.replace(/\s2>\/dev\/null\s*$/g, "").replace(/\s2>&1\s*/g, " ").trim();
+ return { clean, ignoreMissing };
+}
+function hasUnsupportedRedirection(segment) {
+ let quote = null;
+ for (let i = 0; i < segment.length; i++) {
+ const ch = segment[i];
+ if (quote) {
+ if (ch === quote)
+ quote = null;
+ continue;
+ }
+ if (isQuoted(ch)) {
+ quote = ch;
+ continue;
+ }
+ if (ch === ">" || ch === "<")
+ return true;
+ }
+ return false;
+}
+function parseHeadTailStage(stage) {
+ const tokens = tokenizeShellWords(stage);
+ if (!tokens || tokens.length === 0)
+ return null;
+ const [cmd, ...rest] = tokens;
+ if (cmd !== "head" && cmd !== "tail")
+ return null;
+ if (rest.length === 0)
+ return { lineLimit: 10, fromEnd: cmd === "tail" };
+ if (rest.length === 1) {
+ const count = Number(rest[0]);
+ if (!Number.isFinite(count)) {
+ return { lineLimit: 10, fromEnd: cmd === "tail" };
+ }
+ return { lineLimit: Math.abs(count), fromEnd: cmd === "tail" };
+ }
+ if (rest.length === 2 && /^-\d+$/.test(rest[0])) {
+ const count = Number(rest[0]);
+ if (!Number.isFinite(count))
+ return null;
+ return { lineLimit: Math.abs(count), fromEnd: cmd === "tail" };
+ }
+ if (rest.length === 2 && rest[0] === "-n") {
+ const count = Number(rest[1]);
+ if (!Number.isFinite(count))
+ return null;
+ return { lineLimit: Math.abs(count), fromEnd: cmd === "tail" };
+ }
+ if (rest.length === 3 && rest[0] === "-n") {
+ const count = Number(rest[1]);
+ if (!Number.isFinite(count))
+ return null;
+ return { lineLimit: Math.abs(count), fromEnd: cmd === "tail" };
+ }
+ return null;
+}
+function isValidPipelineHeadTailStage(stage) {
+ const tokens = tokenizeShellWords(stage);
+ if (!tokens || tokens[0] !== "head" && tokens[0] !== "tail")
+ return false;
+ if (tokens.length === 1)
+ return true;
+ if (tokens.length === 2)
+ return /^-\d+$/.test(tokens[1]);
+ if (tokens.length === 3)
+ return tokens[1] === "-n" && /^-?\d+$/.test(tokens[2]);
+ return false;
+}
+function parseFindNamePatterns(tokens) {
+ const patterns = [];
+ for (let i = 2; i < tokens.length; i++) {
+ const token = tokens[i];
+ if (token === "-type") {
+ i += 1;
+ continue;
+ }
+ if (token === "-o")
+ continue;
+ if (token === "-name") {
+ const pattern = tokens[i + 1];
+ if (!pattern)
+ return null;
+ patterns.push(pattern);
+ i += 1;
+ continue;
+ }
+ return null;
+ }
+ return patterns.length > 0 ? patterns : null;
+}
+function parseCompiledSegment(segment) {
+ const { clean, ignoreMissing } = stripAllowedModifiers(segment);
+ if (hasUnsupportedRedirection(clean))
+ return null;
+ const pipeline = splitTopLevel(clean, ["|"]);
+ if (!pipeline || pipeline.length === 0)
+ return null;
+ const tokens = tokenizeShellWords(pipeline[0]);
+ if (!tokens || tokens.length === 0)
+ return null;
+ if (tokens[0] === "echo" && pipeline.length === 1) {
+ const text = tokens.slice(1).join(" ");
+ return { kind: "echo", text };
+ }
+ if (tokens[0] === "cat") {
+ const paths = tokens.slice(1).flatMap(expandBraceToken);
+ if (paths.length === 0)
+ return null;
+ let lineLimit = 0;
+ let fromEnd = false;
+ let countLines2 = false;
+ if (pipeline.length > 1) {
+ if (pipeline.length !== 2)
+ return null;
+ const pipeStage = pipeline[1].trim();
+ if (/^wc\s+-l\s*$/.test(pipeStage)) {
+ if (paths.length !== 1)
+ return null;
+ countLines2 = true;
+ } else {
+ if (!isValidPipelineHeadTailStage(pipeStage))
+ return null;
+ const headTail = parseHeadTailStage(pipeStage);
+ if (!headTail)
+ return null;
+ lineLimit = headTail.lineLimit;
+ fromEnd = headTail.fromEnd;
+ }
+ }
+ return { kind: "cat", paths, lineLimit, fromEnd, countLines: countLines2, ignoreMissing };
+ }
+ if (tokens[0] === "head" || tokens[0] === "tail") {
+ if (pipeline.length !== 1)
+ return null;
+ const parsed = parseHeadTailStage(clean);
+ if (!parsed)
+ return null;
+ const headTokens = tokenizeShellWords(clean);
+ if (!headTokens)
+ return null;
+ if (headTokens[1] === "-n" && headTokens.length < 4 || /^-\d+$/.test(headTokens[1] ?? "") && headTokens.length < 3 || headTokens.length === 2 && /^-?\d+$/.test(headTokens[1] ?? ""))
+ return null;
+ const path = headTokens[headTokens.length - 1];
+ if (path === "head" || path === "tail" || path === "-n")
+ return null;
+ return {
+ kind: "cat",
+ paths: expandBraceToken(path),
+ lineLimit: parsed.lineLimit,
+ fromEnd: parsed.fromEnd,
+ countLines: false,
+ ignoreMissing
+ };
+ }
+ if (tokens[0] === "wc" && tokens[1] === "-l" && pipeline.length === 1 && tokens[2]) {
+ return {
+ kind: "cat",
+ paths: expandBraceToken(tokens[2]),
+ lineLimit: 0,
+ fromEnd: false,
+ countLines: true,
+ ignoreMissing
+ };
+ }
+ if (tokens[0] === "ls" && pipeline.length === 1) {
+ const dirs = tokens.slice(1).filter((token) => !token.startsWith("-")).flatMap(expandBraceToken);
+ const longFormat = tokens.some((token) => token.startsWith("-") && token.includes("l"));
+ return { kind: "ls", dirs: dirs.length > 0 ? dirs : ["/"], longFormat };
+ }
+ if (tokens[0] === "find") {
+ if (pipeline.length > 3)
+ return null;
+ const dir = tokens[1];
+ if (!dir)
+ return null;
+ const patterns = parseFindNamePatterns(tokens);
+ if (!patterns)
+ return null;
+ const countOnly = pipeline.length === 2 && /^wc\s+-l\s*$/.test(pipeline[1].trim());
+ if (countOnly) {
+ if (patterns.length !== 1)
+ return null;
+ return { kind: "find", dir, pattern: patterns[0], countOnly };
+ }
+ if (pipeline.length >= 2) {
+ const xargsTokens = tokenizeShellWords(pipeline[1].trim());
+ if (!xargsTokens || xargsTokens[0] !== "xargs")
+ return null;
+ const xargsArgs = xargsTokens.slice(1);
+ while (xargsArgs[0] && xargsArgs[0].startsWith("-")) {
+ if (xargsArgs[0] === "-r") {
+ xargsArgs.shift();
+ continue;
+ }
+ return null;
+ }
+ const grepCmd = xargsArgs.join(" ");
+ const grepParams2 = parseBashGrep(grepCmd);
+ if (!grepParams2)
+ return null;
+ let lineLimit = 0;
+ if (pipeline.length === 3) {
+ const headStage = pipeline[2].trim();
+ if (!isValidPipelineHeadTailStage(headStage))
+ return null;
+ const headTail = parseHeadTailStage(headStage);
+ if (!headTail || headTail.fromEnd)
+ return null;
+ lineLimit = headTail.lineLimit;
+ }
+ return { kind: "find_grep", dir, patterns, params: grepParams2, lineLimit };
+ }
+ if (patterns.length !== 1)
+ return null;
+ return { kind: "find", dir, pattern: patterns[0], countOnly };
+ }
+ const grepParams = parseBashGrep(clean);
+ if (grepParams) {
+ let lineLimit = 0;
+ if (pipeline.length > 1) {
+ if (pipeline.length !== 2)
+ return null;
+ const headStage = pipeline[1].trim();
+ if (!isValidPipelineHeadTailStage(headStage))
+ return null;
+ const headTail = parseHeadTailStage(headStage);
+ if (!headTail || headTail.fromEnd)
+ return null;
+ lineLimit = headTail.lineLimit;
+ }
+ return { kind: "grep", params: grepParams, lineLimit };
+ }
+ return null;
+}
+function parseCompiledBashCommand(cmd) {
+ if (cmd.includes("||"))
+ return null;
+ const segments = splitTopLevel(cmd, ["&&", ";", "\n"]);
+ if (!segments || segments.length === 0)
+ return null;
+ const parsed = segments.map(parseCompiledSegment);
+ if (parsed.some((segment) => segment === null))
+ return null;
+ return parsed;
+}
+function applyLineWindow(content, lineLimit, fromEnd) {
+ if (lineLimit <= 0)
+ return content;
+ const lines = content.split("\n");
+ return (fromEnd ? lines.slice(-lineLimit) : lines.slice(0, lineLimit)).join("\n");
+}
+function countLines(content) {
+ return content === "" ? 0 : content.split("\n").length;
+}
+function renderDirectoryListing(dir, rows, longFormat) {
+ const entries = /* @__PURE__ */ new Map();
+ const prefix = dir === "/" ? "/" : `${dir}/`;
+ for (const row of rows) {
+ const path = row["path"];
+ if (!path.startsWith(prefix) && dir !== "/")
+ continue;
+ const rest = dir === "/" ? path.slice(1) : path.slice(prefix.length);
+ const slash = rest.indexOf("/");
+ const name = slash === -1 ? rest : rest.slice(0, slash);
+ if (!name)
+ continue;
+ const existing = entries.get(name);
+ if (slash !== -1) {
+ if (!existing)
+ entries.set(name, { isDir: true, size: 0 });
+ } else {
+ entries.set(name, { isDir: false, size: Number(row["size_bytes"] ?? 0) });
+ }
+ }
+ if (entries.size === 0)
+ return `ls: cannot access '${dir}': No such file or directory`;
+ const lines = [];
+ for (const [name, info] of [...entries].sort((a, b) => a[0].localeCompare(b[0]))) {
+ if (longFormat) {
+ const type = info.isDir ? "drwxr-xr-x" : "-rw-r--r--";
+ const size = String(info.isDir ? 0 : info.size).padStart(6);
+ lines.push(`${type} 1 user user ${size} ${name}${info.isDir ? "/" : ""}`);
+ } else {
+ lines.push(name + (info.isDir ? "/" : ""));
+ }
+ }
+ return lines.join("\n");
+}
+async function executeCompiledBashCommand(api, memoryTable, sessionsTable, cmd, deps = {}) {
+ const { readVirtualPathContentsFn = readVirtualPathContents, listVirtualPathRowsForDirsFn = listVirtualPathRowsForDirs, findVirtualPathsFn = findVirtualPaths, handleGrepDirectFn = handleGrepDirect } = deps;
+ const plan = parseCompiledBashCommand(cmd);
+ if (!plan)
+ return null;
+ const readPaths = [...new Set(plan.flatMap((segment) => segment.kind === "cat" ? segment.paths : []))];
+ const listDirs = [...new Set(plan.flatMap((segment) => segment.kind === "ls" ? segment.dirs.map((dir) => dir.replace(/\/+$/, "") || "/") : []))];
+ const contentMap = readPaths.length > 0 ? await readVirtualPathContentsFn(api, memoryTable, sessionsTable, readPaths) : /* @__PURE__ */ new Map();
+ const dirRowsMap = listDirs.length > 0 ? await listVirtualPathRowsForDirsFn(api, memoryTable, sessionsTable, listDirs) : /* @__PURE__ */ new Map();
+ const outputs = [];
+ for (const segment of plan) {
+ if (segment.kind === "echo") {
+ outputs.push(segment.text);
+ continue;
+ }
+ if (segment.kind === "cat") {
+ const contents = [];
+ for (const path of segment.paths) {
+ const content = contentMap.get(path) ?? null;
+ if (content === null) {
+ if (segment.ignoreMissing)
+ continue;
+ return null;
+ }
+ contents.push(content);
+ }
+ const combined = contents.join("");
+ if (segment.countLines) {
+ outputs.push(`${countLines(combined)} ${segment.paths[0]}`);
+ } else {
+ outputs.push(applyLineWindow(combined, segment.lineLimit, segment.fromEnd));
+ }
+ continue;
+ }
+ if (segment.kind === "ls") {
+ for (const dir of segment.dirs) {
+ outputs.push(renderDirectoryListing(dir.replace(/\/+$/, "") || "/", dirRowsMap.get(dir.replace(/\/+$/, "") || "/") ?? [], segment.longFormat));
+ }
+ continue;
+ }
+ if (segment.kind === "find") {
+ const filenamePattern = sqlLike(segment.pattern).replace(/\*/g, "%").replace(/\?/g, "_");
+ const paths = await findVirtualPathsFn(api, memoryTable, sessionsTable, segment.dir.replace(/\/+$/, "") || "/", filenamePattern);
+ outputs.push(segment.countOnly ? String(paths.length) : paths.join("\n") || "(no matches)");
+ continue;
+ }
+ if (segment.kind === "find_grep") {
+ const dir = segment.dir.replace(/\/+$/, "") || "/";
+ const candidateBatches = await Promise.all(segment.patterns.map((pattern) => findVirtualPathsFn(api, memoryTable, sessionsTable, dir, sqlLike(pattern).replace(/\*/g, "%").replace(/\?/g, "_"))));
+ const candidatePaths = [...new Set(candidateBatches.flat())];
+ if (candidatePaths.length === 0) {
+ outputs.push("(no matches)");
+ continue;
+ }
+ const candidateContents = await readVirtualPathContentsFn(api, memoryTable, sessionsTable, candidatePaths);
+ const matched = refineGrepMatches(candidatePaths.flatMap((path) => {
+ const content = candidateContents.get(path);
+ if (content === null || content === void 0)
+ return [];
+ return [{ path, content: normalizeContent(path, content) }];
+ }), segment.params);
+ const limited = segment.lineLimit > 0 ? matched.slice(0, segment.lineLimit) : matched;
+ outputs.push(limited.join("\n") || "(no matches)");
+ continue;
+ }
+ if (segment.kind === "grep") {
+ const result = await handleGrepDirectFn(api, memoryTable, sessionsTable, segment.params);
+ if (result === null)
+ return null;
+ if (segment.lineLimit > 0) {
+ outputs.push(result.split("\n").slice(0, segment.lineLimit).join("\n"));
+ } else {
+ outputs.push(result);
+ }
+ continue;
+ }
+ }
+ return outputs.join("\n");
+}
+
+// dist/src/hooks/query-cache.js
+import { mkdirSync as mkdirSync2, readFileSync as readFileSync3, rmSync, writeFileSync as writeFileSync2 } from "node:fs";
+import { join as join4 } from "node:path";
+import { homedir as homedir3 } from "node:os";
+var log3 = (msg) => log("query-cache", msg);
+var DEFAULT_CACHE_ROOT = join4(homedir3(), ".deeplake", "query-cache");
+var INDEX_CACHE_FILE = "index.md";
+function getSessionQueryCacheDir(sessionId, deps = {}) {
+ const { cacheRoot = DEFAULT_CACHE_ROOT } = deps;
+ return join4(cacheRoot, sessionId);
+}
+function readCachedIndexContent(sessionId, deps = {}) {
+ const { logFn = log3 } = deps;
+ try {
+ return readFileSync3(join4(getSessionQueryCacheDir(sessionId, deps), INDEX_CACHE_FILE), "utf-8");
+ } catch (e) {
+ if (e?.code === "ENOENT")
+ return null;
+ logFn(`read failed for session=${sessionId}: ${e.message}`);
+ return null;
+ }
+}
+function writeCachedIndexContent(sessionId, content, deps = {}) {
+ const { logFn = log3 } = deps;
+ try {
+ const dir = getSessionQueryCacheDir(sessionId, deps);
+ mkdirSync2(dir, { recursive: true });
+ writeFileSync2(join4(dir, INDEX_CACHE_FILE), content, "utf-8");
+ } catch (e) {
+ logFn(`write failed for session=${sessionId}: ${e.message}`);
+ }
+}
+
+// dist/src/utils/direct-run.js
+import { resolve } from "node:path";
+import { fileURLToPath } from "node:url";
+function isDirectRun(metaUrl) {
+ const entry = process.argv[1];
+ if (!entry)
+ return false;
+ try {
+ return resolve(fileURLToPath(metaUrl)) === resolve(entry);
+ } catch {
+ return false;
+ }
+}
+
+// dist/src/hooks/memory-path-utils.js
+import { homedir as homedir4 } from "node:os";
+import { join as join5 } from "node:path";
+var MEMORY_PATH = join5(homedir4(), ".deeplake", "memory");
var TILDE_PATH = "~/.deeplake/memory";
var HOME_VAR_PATH = "$HOME/.deeplake/memory";
-var __bundleDir = dirname(fileURLToPath(import.meta.url));
-var SHELL_BUNDLE = existsSync2(join3(__bundleDir, "shell", "deeplake-shell.js")) ? join3(__bundleDir, "shell", "deeplake-shell.js") : join3(__bundleDir, "..", "shell", "deeplake-shell.js");
var SAFE_BUILTINS = /* @__PURE__ */ new Set([
"cat",
"ls",
@@ -833,146 +1775,173 @@ function isSafe(cmd) {
}
return true;
}
-function touchesMemory(cmd) {
- return cmd.includes(MEMORY_PATH) || cmd.includes(TILDE_PATH) || cmd.includes(HOME_VAR_PATH);
+function touchesMemory(p) {
+ return p.includes(MEMORY_PATH) || p.includes(TILDE_PATH) || p.includes(HOME_VAR_PATH);
}
function rewritePaths(cmd) {
return cmd.replace(new RegExp(MEMORY_PATH.replace(/[.*+?^${}()|[\]\\]/g, "\\$&") + "/?", "g"), "/").replace(/~\/.deeplake\/memory\/?/g, "/").replace(/\$HOME\/.deeplake\/memory\/?/g, "/").replace(/"\$HOME\/.deeplake\/memory\/?"/g, '"/"');
}
-function blockWithContent(content) {
- process.stderr.write(content);
- process.exit(2);
+
+// dist/src/hooks/codex/pre-tool-use.js
+var log4 = (msg) => log("codex-pre", msg);
+var __bundleDir = dirname(fileURLToPath2(import.meta.url));
+var SHELL_BUNDLE = existsSync3(join6(__bundleDir, "shell", "deeplake-shell.js")) ? join6(__bundleDir, "shell", "deeplake-shell.js") : join6(__bundleDir, "..", "shell", "deeplake-shell.js");
+function buildUnsupportedGuidance() {
+ return "This command is not supported for ~/.deeplake/memory/ operations. Only bash builtins are available: cat, ls, grep, echo, jq, head, tail, sed, awk, wc, sort, find, etc. Do NOT use python, python3, node, curl, or other interpreters. Rewrite your command using only bash tools and retry.";
}
-function runVirtualShell(cmd) {
+function runVirtualShell(cmd, shellBundle = SHELL_BUNDLE, logFn = log4) {
try {
- return execFileSync("node", [SHELL_BUNDLE, "-c", cmd], {
+ return execFileSync("node", [shellBundle, "-c", cmd], {
encoding: "utf-8",
timeout: 1e4,
env: { ...process.env },
stdio: ["pipe", "pipe", "pipe"]
- // capture stderr instead of inheriting
}).trim();
} catch (e) {
- log3(`virtual shell failed: ${e.message}`);
+ logFn(`virtual shell failed: ${e.message}`);
return "";
}
}
-async function main() {
- const input = await readStdin();
+function buildIndexContent(rows) {
+ const lines = ["# Memory Index", "", `${rows.length} sessions:`, ""];
+ for (const row of rows) {
+ const path = row["path"];
+ const project = row["project"] || "";
+ const description = (row["description"] || "").slice(0, 120);
+ const date = (row["creation_date"] || "").slice(0, 10);
+ lines.push(`- [${path}](${path}) ${date} ${project ? `[${project}]` : ""} ${description}`);
+ }
+ return lines.join("\n");
+}
+async function processCodexPreToolUse(input, deps = {}) {
+ const { config = loadConfig(), createApi = (table, activeConfig) => new DeeplakeApi(activeConfig.token, activeConfig.apiUrl, activeConfig.orgId, activeConfig.workspaceId, table), executeCompiledBashCommandFn = executeCompiledBashCommand, readVirtualPathContentsFn = readVirtualPathContents, readVirtualPathContentFn = readVirtualPathContent, listVirtualPathRowsFn = listVirtualPathRows, findVirtualPathsFn = findVirtualPaths, handleGrepDirectFn = handleGrepDirect, readCachedIndexContentFn = readCachedIndexContent, writeCachedIndexContentFn = writeCachedIndexContent, runVirtualShellFn = runVirtualShell, shellBundle = SHELL_BUNDLE, logFn = log4 } = deps;
const cmd = input.tool_input?.command ?? "";
- log3(`hook fired: cmd=${cmd}`);
+ logFn(`hook fired: cmd=${cmd}`);
if (!touchesMemory(cmd))
- return;
+ return { action: "pass" };
const rewritten = rewritePaths(cmd);
if (!isSafe(rewritten)) {
- const guidance = "This command is not supported for ~/.deeplake/memory/ operations. Only bash builtins are available: cat, ls, grep, echo, jq, head, tail, sed, awk, wc, sort, find, etc. Do NOT use python, python3, node, curl, or other interpreters. Rewrite your command using only bash tools and retry.";
- log3(`unsupported command, returning guidance: ${rewritten}`);
- process.stdout.write(guidance);
- process.exit(0);
+ const guidance = buildUnsupportedGuidance();
+ logFn(`unsupported command, returning guidance: ${rewritten}`);
+ return {
+ action: "guide",
+ output: guidance,
+ rewrittenCommand: rewritten
+ };
}
- const config = loadConfig();
if (config) {
const table = process.env["HIVEMIND_TABLE"] ?? "memory";
- const api = new DeeplakeApi(config.token, config.apiUrl, config.orgId, config.workspaceId, table);
+ const sessionsTable = process.env["HIVEMIND_SESSIONS_TABLE"] ?? "sessions";
+ const api = createApi(table, config);
+ const readVirtualPathContentsWithCache = async (cachePaths) => {
+ const uniquePaths = [...new Set(cachePaths)];
+ const result2 = new Map(uniquePaths.map((path) => [path, null]));
+ const cachedIndex = uniquePaths.includes("/index.md") ? readCachedIndexContentFn(input.session_id) : null;
+ const remainingPaths = cachedIndex === null ? uniquePaths : uniquePaths.filter((path) => path !== "/index.md");
+ if (cachedIndex !== null) {
+ result2.set("/index.md", cachedIndex);
+ }
+ if (remainingPaths.length > 0) {
+ const fetched = await readVirtualPathContentsFn(api, table, sessionsTable, remainingPaths);
+ for (const [path, content] of fetched)
+ result2.set(path, content);
+ }
+ const fetchedIndex = result2.get("/index.md");
+ if (typeof fetchedIndex === "string") {
+ writeCachedIndexContentFn(input.session_id, fetchedIndex);
+ }
+ return result2;
+ };
try {
- {
- let virtualPath = null;
- let lineLimit = 0;
- let fromEnd = false;
- const catCmd = rewritten.replace(/\s+2>\S+/g, "").trim();
- const catPipeHead = catCmd.match(/^cat\s+(\S+?)\s*(?:\|[^|]*)*\|\s*head\s+(?:-n?\s*)?(-?\d+)\s*$/);
- if (catPipeHead) {
- virtualPath = catPipeHead[1];
- lineLimit = Math.abs(parseInt(catPipeHead[2], 10));
- }
- if (!virtualPath) {
- const catMatch = catCmd.match(/^cat\s+(\S+)\s*$/);
- if (catMatch)
- virtualPath = catMatch[1];
- }
- if (!virtualPath) {
- const headMatch = rewritten.match(/^head\s+(?:-n\s*)?(-?\d+)\s+(\S+)\s*$/) ?? rewritten.match(/^head\s+(\S+)\s*$/);
- if (headMatch) {
- if (headMatch[2]) {
- virtualPath = headMatch[2];
- lineLimit = Math.abs(parseInt(headMatch[1], 10));
- } else {
- virtualPath = headMatch[1];
- lineLimit = 10;
- }
+ const compiled = await executeCompiledBashCommandFn(api, table, sessionsTable, rewritten, {
+ readVirtualPathContentsFn: async (_api, _memoryTable, _sessionsTable, cachePaths) => readVirtualPathContentsWithCache(cachePaths)
+ });
+ if (compiled !== null) {
+ return { action: "block", output: compiled, rewrittenCommand: rewritten };
+ }
+ let virtualPath = null;
+ let lineLimit = 0;
+ let fromEnd = false;
+ const catCmd = rewritten.replace(/\s+2>\S+/g, "").trim();
+ const catPipeHead = catCmd.match(/^cat\s+(\S+?)\s*(?:\|[^|]*)*\|\s*head\s+(?:-n?\s*)?(-?\d+)\s*$/);
+ if (catPipeHead) {
+ virtualPath = catPipeHead[1];
+ lineLimit = Math.abs(parseInt(catPipeHead[2], 10));
+ }
+ if (!virtualPath) {
+ const catMatch = catCmd.match(/^cat\s+(\S+)\s*$/);
+ if (catMatch)
+ virtualPath = catMatch[1];
+ }
+ if (!virtualPath) {
+ const headMatch = rewritten.match(/^head\s+(?:-n\s*)?(-?\d+)\s+(\S+)\s*$/) ?? rewritten.match(/^head\s+(\S+)\s*$/);
+ if (headMatch) {
+ if (headMatch[2]) {
+ virtualPath = headMatch[2];
+ lineLimit = Math.abs(parseInt(headMatch[1], 10));
+ } else {
+ virtualPath = headMatch[1];
+ lineLimit = 10;
}
}
- if (!virtualPath) {
- const tailMatch = rewritten.match(/^tail\s+(?:-n\s*)?(-?\d+)\s+(\S+)\s*$/) ?? rewritten.match(/^tail\s+(\S+)\s*$/);
- if (tailMatch) {
- fromEnd = true;
- if (tailMatch[2]) {
- virtualPath = tailMatch[2];
- lineLimit = Math.abs(parseInt(tailMatch[1], 10));
- } else {
- virtualPath = tailMatch[1];
- lineLimit = 10;
- }
+ }
+ if (!virtualPath) {
+ const tailMatch = rewritten.match(/^tail\s+(?:-n\s*)?(-?\d+)\s+(\S+)\s*$/) ?? rewritten.match(/^tail\s+(\S+)\s*$/);
+ if (tailMatch) {
+ fromEnd = true;
+ if (tailMatch[2]) {
+ virtualPath = tailMatch[2];
+ lineLimit = Math.abs(parseInt(tailMatch[1], 10));
+ } else {
+ virtualPath = tailMatch[1];
+ lineLimit = 10;
}
}
- if (!virtualPath) {
- const wcMatch = rewritten.match(/^wc\s+-l\s+(\S+)\s*$/);
- if (wcMatch) {
- virtualPath = wcMatch[1];
- lineLimit = -1;
- }
+ }
+ if (!virtualPath) {
+ const wcMatch = rewritten.match(/^wc\s+-l\s+(\S+)\s*$/);
+ if (wcMatch) {
+ virtualPath = wcMatch[1];
+ lineLimit = -1;
}
- if (virtualPath && !virtualPath.endsWith("/")) {
- const sessionsTable = process.env["HIVEMIND_SESSIONS_TABLE"] ?? "sessions";
- const isSession = virtualPath.startsWith("/sessions/");
- log3(`direct read: ${virtualPath}`);
- let content = null;
- if (isSession) {
- const rows = await api.query(`SELECT message::text AS content FROM "${sessionsTable}" WHERE path = '${sqlStr(virtualPath)}' LIMIT 1`);
- if (rows.length > 0 && rows[0]["content"])
- content = rows[0]["content"];
- } else {
- const rows = await api.query(`SELECT summary FROM "${table}" WHERE path = '${sqlStr(virtualPath)}' LIMIT 1`);
- if (rows.length > 0 && rows[0]["summary"]) {
- content = rows[0]["summary"];
- } else if (virtualPath === "/index.md") {
- const idxRows = await api.query(`SELECT path, project, description, creation_date FROM "${table}" WHERE path LIKE '/summaries/%' ORDER BY creation_date DESC`);
- const lines = ["# Memory Index", "", `${idxRows.length} sessions:`, ""];
- for (const r of idxRows) {
- const p = r["path"];
- const proj = r["project"] || "";
- const desc = (r["description"] || "").slice(0, 120);
- const date = (r["creation_date"] || "").slice(0, 10);
- lines.push(`- [${p}](${p}) ${date} ${proj ? `[${proj}]` : ""} ${desc}`);
- }
- content = lines.join("\n");
- }
+ }
+ if (virtualPath && !virtualPath.endsWith("/")) {
+ logFn(`direct read: ${virtualPath}`);
+ let content = virtualPath === "/index.md" ? readCachedIndexContentFn(input.session_id) : null;
+ if (content === null) {
+ content = await readVirtualPathContentFn(api, table, sessionsTable, virtualPath);
+ }
+ if (content === null && virtualPath === "/index.md") {
+ const idxRows = await api.query(`SELECT path, project, description, creation_date FROM "${table}" WHERE path LIKE '/summaries/%' ORDER BY creation_date DESC`);
+ content = buildIndexContent(idxRows);
+ }
+ if (content !== null) {
+ if (virtualPath === "/index.md") {
+ writeCachedIndexContentFn(input.session_id, content);
}
- if (content !== null) {
- if (lineLimit === -1) {
- blockWithContent(`${content.split("\n").length} ${virtualPath}`);
- }
- if (lineLimit > 0) {
- const lines = content.split("\n");
- content = fromEnd ? lines.slice(-lineLimit).join("\n") : lines.slice(0, lineLimit).join("\n");
- }
- blockWithContent(content);
+ if (lineLimit === -1) {
+ return { action: "block", output: `${content.split("\n").length} ${virtualPath}`, rewrittenCommand: rewritten };
}
+ if (lineLimit > 0) {
+ const lines = content.split("\n");
+ content = fromEnd ? lines.slice(-lineLimit).join("\n") : lines.slice(0, lineLimit).join("\n");
+ }
+ return { action: "block", output: content, rewrittenCommand: rewritten };
}
}
const lsMatch = rewritten.match(/^ls\s+(?:-[a-zA-Z]+\s+)*(\S+)?\s*$/);
if (lsMatch) {
const dir = (lsMatch[1] ?? "/").replace(/\/+$/, "") || "/";
const isLong = /\s-[a-zA-Z]*l/.test(rewritten);
- log3(`direct ls: ${dir}`);
- const rows = await api.query(`SELECT path, size_bytes FROM "${table}" WHERE path LIKE '${sqlLike(dir === "/" ? "" : dir)}/%' ORDER BY path`);
+ logFn(`direct ls: ${dir}`);
+ const rows = await listVirtualPathRowsFn(api, table, sessionsTable, dir);
const entries = /* @__PURE__ */ new Map();
- const prefix = dir === "/" ? "/" : dir + "/";
+ const prefix = dir === "/" ? "/" : `${dir}/`;
for (const row of rows) {
- const p = row["path"];
- if (!p.startsWith(prefix) && dir !== "/")
+ const path = row["path"];
+ if (!path.startsWith(prefix) && dir !== "/")
continue;
- const rest = dir === "/" ? p.slice(1) : p.slice(prefix.length);
+ const rest = dir === "/" ? path.slice(1) : path.slice(prefix.length);
const slash = rest.indexOf("/");
const name = slash === -1 ? rest : rest.slice(0, slash);
if (!name)
@@ -996,50 +1965,74 @@ async function main() {
lines.push(name + (info.isDir ? "/" : ""));
}
}
- blockWithContent(lines.join("\n"));
- } else {
- blockWithContent(`ls: cannot access '${dir}': No such file or directory`);
+ return { action: "block", output: lines.join("\n"), rewrittenCommand: rewritten };
}
+ return {
+ action: "block",
+ output: `ls: cannot access '${dir}': No such file or directory`,
+ rewrittenCommand: rewritten
+ };
}
- {
- const findMatch = rewritten.match(/^find\s+(\S+)\s+(?:-type\s+\S+\s+)?-name\s+'([^']+)'/);
- if (findMatch) {
- const dir = findMatch[1].replace(/\/+$/, "") || "/";
- const namePattern = sqlLike(findMatch[2]).replace(/\*/g, "%").replace(/\?/g, "_");
- const sessionsTable = process.env["HIVEMIND_SESSIONS_TABLE"] ?? "sessions";
- const isSessionDir = dir === "/sessions" || dir.startsWith("/sessions/");
- const findTable = isSessionDir ? sessionsTable : table;
- log3(`direct find: ${dir} -name '${findMatch[2]}'`);
- const rows = await api.query(`SELECT path FROM "${findTable}" WHERE path LIKE '${sqlLike(dir === "/" ? "" : dir)}/%' AND filename LIKE '${namePattern}' ORDER BY path`);
- let result2 = rows.map((r) => r["path"]).join("\n") || "";
- if (/\|\s*wc\s+-l\s*$/.test(rewritten)) {
- result2 = String(rows.length);
- }
- blockWithContent(result2 || "(no matches)");
- }
+ const findMatch = rewritten.match(/^find\s+(\S+)\s+(?:-type\s+\S+\s+)?-name\s+'([^']+)'/);
+ if (findMatch) {
+ const dir = findMatch[1].replace(/\/+$/, "") || "/";
+ const namePattern = sqlLike(findMatch[2]).replace(/\*/g, "%").replace(/\?/g, "_");
+ logFn(`direct find: ${dir} -name '${findMatch[2]}'`);
+ const paths = await findVirtualPathsFn(api, table, sessionsTable, dir, namePattern);
+ let result2 = paths.join("\n") || "";
+ if (/\|\s*wc\s+-l\s*$/.test(rewritten))
+ result2 = String(paths.length);
+ return {
+ action: "block",
+ output: result2 || "(no matches)",
+ rewrittenCommand: rewritten
+ };
}
const grepParams = parseBashGrep(rewritten);
if (grepParams) {
- const sessionsTable = process.env["HIVEMIND_SESSIONS_TABLE"] ?? "sessions";
- log3(`direct grep: pattern=${grepParams.pattern} path=${grepParams.targetPath}`);
- const result2 = await handleGrepDirect(api, table, sessionsTable, grepParams);
+ logFn(`direct grep: pattern=${grepParams.pattern} path=${grepParams.targetPath}`);
+ const result2 = await handleGrepDirectFn(api, table, sessionsTable, grepParams);
if (result2 !== null) {
- blockWithContent(result2);
+ return { action: "block", output: result2, rewrittenCommand: rewritten };
}
}
} catch (e) {
- log3(`direct query failed, falling back to shell: ${e.message}`);
+ logFn(`direct query failed, falling back to shell: ${e.message}`);
}
}
- log3(`intercepted \u2192 running via virtual shell: ${rewritten}`);
- const result = runVirtualShell(rewritten);
- if (result) {
- blockWithContent(result);
- } else {
- blockWithContent("[Deeplake Memory] Command returned empty or the file does not exist in cloud storage.");
+ logFn(`intercepted \u2192 running via virtual shell: ${rewritten}`);
+ const result = runVirtualShellFn(rewritten, shellBundle, logFn);
+ return {
+ action: "block",
+ output: result || "[Deeplake Memory] Command returned empty or the file does not exist in cloud storage.",
+ rewrittenCommand: rewritten
+ };
+}
+async function main() {
+ const input = await readStdin();
+ const decision = await processCodexPreToolUse(input);
+ if (decision.action === "pass")
+ return;
+ if (decision.action === "guide") {
+ if (decision.output)
+ process.stdout.write(decision.output);
+ process.exit(0);
}
+ if (decision.output)
+ process.stderr.write(decision.output);
+ process.exit(2);
}
-main().catch((e) => {
- log3(`fatal: ${e.message}`);
- process.exit(0);
-});
+if (isDirectRun(import.meta.url)) {
+ main().catch((e) => {
+ log4(`fatal: ${e.message}`);
+ process.exit(0);
+ });
+}
+export {
+ buildUnsupportedGuidance,
+ isSafe,
+ processCodexPreToolUse,
+ rewritePaths,
+ runVirtualShell,
+ touchesMemory
+};
diff --git a/codex/bundle/session-start-setup.js b/codex/bundle/session-start-setup.js
index 02be970..e13a5e2 100755
--- a/codex/bundle/session-start-setup.js
+++ b/codex/bundle/session-start-setup.js
@@ -1,10 +1,11 @@
#!/usr/bin/env node
// dist/src/hooks/codex/session-start-setup.js
-import { fileURLToPath } from "node:url";
-import { dirname as dirname2, join as join6 } from "node:path";
+import { fileURLToPath as fileURLToPath2 } from "node:url";
+import { dirname as dirname3, join as join7 } from "node:path";
+import { mkdirSync as mkdirSync5, appendFileSync as appendFileSync3 } from "node:fs";
import { execSync as execSync2 } from "node:child_process";
-import { homedir as homedir4 } from "node:os";
+import { homedir as homedir6 } from "node:os";
// dist/src/commands/auth.js
import { readFileSync, writeFileSync, existsSync, mkdirSync, unlinkSync } from "node:fs";
@@ -66,6 +67,9 @@ function loadConfig() {
// dist/src/deeplake-api.js
import { randomUUID } from "node:crypto";
+import { existsSync as existsSync3, mkdirSync as mkdirSync2, readFileSync as readFileSync3, writeFileSync as writeFileSync2 } from "node:fs";
+import { join as join4 } from "node:path";
+import { tmpdir } from "node:os";
// dist/src/utils/debug.js
import { appendFileSync } from "node:fs";
@@ -73,9 +77,6 @@ import { join as join3 } from "node:path";
import { homedir as homedir3 } from "node:os";
var DEBUG = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1";
var LOG = join3(homedir3(), ".deeplake", "hook-debug.log");
-function utcTimestamp(d = /* @__PURE__ */ new Date()) {
- return d.toISOString().replace("T", " ").slice(0, 19) + " UTC";
-}
function log(tag, msg) {
if (!DEBUG)
return;
@@ -87,6 +88,12 @@ function log(tag, msg) {
function sqlStr(value) {
return value.replace(/\\/g, "\\\\").replace(/'/g, "''").replace(/\0/g, "").replace(/[\x01-\x08\x0b\x0c\x0e-\x1f\x7f]/g, "");
}
+function sqlIdent(name) {
+ if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(name)) {
+ throw new Error(`Invalid SQL identifier: ${JSON.stringify(name)}`);
+ }
+ return name;
+}
// dist/src/deeplake-api.js
var log2 = (msg) => log("sdk", msg);
@@ -108,8 +115,29 @@ var RETRYABLE_CODES = /* @__PURE__ */ new Set([429, 500, 502, 503, 504]);
var MAX_RETRIES = 3;
var BASE_DELAY_MS = 500;
var MAX_CONCURRENCY = 5;
+var QUERY_TIMEOUT_MS = Number(process.env["HIVEMIND_QUERY_TIMEOUT_MS"] ?? process.env["DEEPLAKE_QUERY_TIMEOUT_MS"] ?? 1e4);
+var INDEX_MARKER_TTL_MS = Number(process.env["HIVEMIND_INDEX_MARKER_TTL_MS"] ?? 6 * 60 * 6e4);
function sleep(ms) {
- return new Promise((resolve) => setTimeout(resolve, ms));
+ return new Promise((resolve2) => setTimeout(resolve2, ms));
+}
+function isTimeoutError(error) {
+ const name = error instanceof Error ? error.name.toLowerCase() : "";
+ const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase();
+ return name.includes("timeout") || name === "aborterror" || message.includes("timeout") || message.includes("timed out");
+}
+function isDuplicateIndexError(error) {
+ const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase();
+ return message.includes("duplicate key value violates unique constraint") || message.includes("pg_class_relname_nsp_index") || message.includes("already exists");
+}
+function isSessionInsertQuery(sql) {
+ return /^\s*insert\s+into\s+"[^"]+"\s*\(\s*id\s*,\s*path\s*,\s*filename\s*,\s*message\s*,/i.test(sql);
+}
+function isTransientHtml403(text) {
+ const body = text.toLowerCase();
+ return body.includes(" this.waiting.push(resolve));
+ await new Promise((resolve2) => this.waiting.push(resolve2));
}
release() {
this.active--;
@@ -142,6 +170,7 @@ var DeeplakeApi = class {
tableName;
_pendingRows = [];
_sem = new Semaphore(MAX_CONCURRENCY);
+ _tablesCache = null;
constructor(token, apiUrl, orgId, workspaceId, tableName) {
this.token = token;
this.apiUrl = apiUrl;
@@ -172,6 +201,7 @@ var DeeplakeApi = class {
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
let resp;
try {
+ const signal = AbortSignal.timeout(QUERY_TIMEOUT_MS);
resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables/query`, {
method: "POST",
headers: {
@@ -179,9 +209,14 @@ var DeeplakeApi = class {
"Content-Type": "application/json",
"X-Activeloop-Org-Id": this.orgId
},
+ signal,
body: JSON.stringify({ query: sql })
});
} catch (e) {
+ if (isTimeoutError(e)) {
+ lastError = new Error(`Query timeout after ${QUERY_TIMEOUT_MS}ms`);
+ throw lastError;
+ }
lastError = e instanceof Error ? e : new Error(String(e));
if (attempt < MAX_RETRIES) {
const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200;
@@ -198,7 +233,8 @@ var DeeplakeApi = class {
return raw.rows.map((row) => Object.fromEntries(raw.columns.map((col, i) => [col, row[i]])));
}
const text = await resp.text().catch(() => "");
- if (attempt < MAX_RETRIES && RETRYABLE_CODES.has(resp.status)) {
+ const retryable403 = isSessionInsertQuery(sql) && (resp.status === 401 || resp.status === 403 && (text.length === 0 || isTransientHtml403(text)));
+ if (attempt < MAX_RETRIES && (RETRYABLE_CODES.has(resp.status) || retryable403)) {
const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200;
log2(`query retry ${attempt + 1}/${MAX_RETRIES} (${resp.status}) in ${delay.toFixed(0)}ms`);
await sleep(delay);
@@ -263,8 +299,61 @@ var DeeplakeApi = class {
async createIndex(column) {
await this.query(`CREATE INDEX IF NOT EXISTS idx_${sqlStr(column)}_bm25 ON "${this.tableName}" USING deeplake_index ("${column}")`);
}
+ buildLookupIndexName(table, suffix) {
+ return `idx_${table}_${suffix}`.replace(/[^a-zA-Z0-9_]/g, "_");
+ }
+ getLookupIndexMarkerPath(table, suffix) {
+ const markerKey = [
+ this.workspaceId,
+ this.orgId,
+ table,
+ suffix
+ ].join("__").replace(/[^a-zA-Z0-9_.-]/g, "_");
+ return join4(getIndexMarkerDir(), `${markerKey}.json`);
+ }
+ hasFreshLookupIndexMarker(table, suffix) {
+ const markerPath = this.getLookupIndexMarkerPath(table, suffix);
+ if (!existsSync3(markerPath))
+ return false;
+ try {
+ const raw = JSON.parse(readFileSync3(markerPath, "utf-8"));
+ const updatedAt = raw.updatedAt ? new Date(raw.updatedAt).getTime() : NaN;
+ if (!Number.isFinite(updatedAt) || Date.now() - updatedAt > INDEX_MARKER_TTL_MS)
+ return false;
+ return true;
+ } catch {
+ return false;
+ }
+ }
+ markLookupIndexReady(table, suffix) {
+ mkdirSync2(getIndexMarkerDir(), { recursive: true });
+ writeFileSync2(this.getLookupIndexMarkerPath(table, suffix), JSON.stringify({ updatedAt: (/* @__PURE__ */ new Date()).toISOString() }), "utf-8");
+ }
+ async ensureLookupIndex(table, suffix, columnsSql) {
+ if (this.hasFreshLookupIndexMarker(table, suffix))
+ return;
+ const indexName = this.buildLookupIndexName(table, suffix);
+ try {
+ await this.query(`CREATE INDEX IF NOT EXISTS "${indexName}" ON "${table}" ${columnsSql}`);
+ this.markLookupIndexReady(table, suffix);
+ } catch (e) {
+ if (isDuplicateIndexError(e)) {
+ this.markLookupIndexReady(table, suffix);
+ return;
+ }
+ log2(`index "${indexName}" skipped: ${e.message}`);
+ }
+ }
/** List all tables in the workspace (with retry). */
- async listTables() {
+ async listTables(forceRefresh = false) {
+ if (!forceRefresh && this._tablesCache)
+ return [...this._tablesCache];
+ const { tables, cacheable } = await this._fetchTables();
+ if (cacheable)
+ this._tablesCache = [...tables];
+ return tables;
+ }
+ async _fetchTables() {
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
try {
const resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables`, {
@@ -275,22 +364,25 @@ var DeeplakeApi = class {
});
if (resp.ok) {
const data = await resp.json();
- return (data.tables ?? []).map((t) => t.table_name);
+ return {
+ tables: (data.tables ?? []).map((t) => t.table_name),
+ cacheable: true
+ };
}
if (attempt < MAX_RETRIES && RETRYABLE_CODES.has(resp.status)) {
await sleep(BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200);
continue;
}
- return [];
+ return { tables: [], cacheable: false };
} catch {
if (attempt < MAX_RETRIES) {
await sleep(BASE_DELAY_MS * Math.pow(2, attempt));
continue;
}
- return [];
+ return { tables: [], cacheable: false };
}
}
- return [];
+ return { tables: [], cacheable: false };
}
/** Create the memory table if it doesn't already exist. Migrate columns on existing tables. */
async ensureTable(name) {
@@ -300,6 +392,8 @@ var DeeplakeApi = class {
log2(`table "${tbl}" not found, creating`);
await this.query(`CREATE TABLE IF NOT EXISTS "${tbl}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', summary TEXT NOT NULL DEFAULT '', author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'text/plain', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`);
log2(`table "${tbl}" created`);
+ if (!tables.includes(tbl))
+ this._tablesCache = [...tables, tbl];
}
}
/** Create the sessions table (uses JSONB for message since every row is a JSON event). */
@@ -309,19 +403,22 @@ var DeeplakeApi = class {
log2(`table "${name}" not found, creating`);
await this.query(`CREATE TABLE IF NOT EXISTS "${name}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', message JSONB, author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'application/json', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`);
log2(`table "${name}" created`);
+ if (!tables.includes(name))
+ this._tablesCache = [...tables, name];
}
+ await this.ensureLookupIndex(name, "path_creation_date", `("path", "creation_date")`);
}
};
// dist/src/utils/stdin.js
function readStdin() {
- return new Promise((resolve, reject) => {
+ return new Promise((resolve2, reject) => {
let data = "";
process.stdin.setEncoding("utf-8");
process.stdin.on("data", (chunk) => data += chunk);
process.stdin.on("end", () => {
try {
- resolve(JSON.parse(data));
+ resolve2(JSON.parse(data));
} catch (err) {
reject(new Error(`Failed to parse hook input: ${err}`));
}
@@ -330,74 +427,410 @@ function readStdin() {
});
}
-// dist/src/utils/version-check.js
-import { readFileSync as readFileSync3 } from "node:fs";
-import { dirname, join as join4 } from "node:path";
-var GITHUB_RAW_PKG = "https://raw.githubusercontent.com/activeloopai/hivemind/main/package.json";
+// dist/src/utils/direct-run.js
+import { resolve } from "node:path";
+import { fileURLToPath } from "node:url";
+function isDirectRun(metaUrl) {
+ const entry = process.argv[1];
+ if (!entry)
+ return false;
+ try {
+ return resolve(fileURLToPath(metaUrl)) === resolve(entry);
+ } catch {
+ return false;
+ }
+}
+
+// dist/src/hooks/session-queue.js
+import { appendFileSync as appendFileSync2, closeSync, existsSync as existsSync4, mkdirSync as mkdirSync3, openSync, readFileSync as readFileSync4, readdirSync, renameSync, rmSync, statSync, writeFileSync as writeFileSync3 } from "node:fs";
+import { dirname, join as join5 } from "node:path";
+import { homedir as homedir4 } from "node:os";
+var DEFAULT_QUEUE_DIR = join5(homedir4(), ".deeplake", "queue");
+var DEFAULT_MAX_BATCH_ROWS = 50;
+var DEFAULT_STALE_INFLIGHT_MS = 6e4;
+var DEFAULT_AUTH_FAILURE_TTL_MS = 5 * 6e4;
+var DEFAULT_DRAIN_LOCK_STALE_MS = 3e4;
+var BUSY_WAIT_STEP_MS = 100;
+var SessionWriteDisabledError = class extends Error {
+ constructor(message) {
+ super(message);
+ this.name = "SessionWriteDisabledError";
+ }
+};
+function buildSessionInsertSql(sessionsTable, rows) {
+ if (rows.length === 0)
+ throw new Error("buildSessionInsertSql: rows must not be empty");
+ const table = sqlIdent(sessionsTable);
+ const values = rows.map((row) => {
+ const jsonForSql = sqlStr(coerceJsonbPayload(row.message));
+ return `('${sqlStr(row.id)}', '${sqlStr(row.path)}', '${sqlStr(row.filename)}', '${jsonForSql}'::jsonb, '${sqlStr(row.author)}', ${row.sizeBytes}, '${sqlStr(row.project)}', '${sqlStr(row.description)}', '${sqlStr(row.agent)}', '${sqlStr(row.creationDate)}', '${sqlStr(row.lastUpdateDate)}')`;
+ }).join(", ");
+ return `INSERT INTO "${table}" (id, path, filename, message, author, size_bytes, project, description, agent, creation_date, last_update_date) VALUES ${values}`;
+}
+function coerceJsonbPayload(message) {
+ try {
+ return JSON.stringify(JSON.parse(message));
+ } catch {
+ return JSON.stringify({
+ type: "raw_message",
+ content: message
+ });
+ }
+}
+async function flushSessionQueue(api, opts) {
+ const queueDir = opts.queueDir ?? DEFAULT_QUEUE_DIR;
+ const maxBatchRows = opts.maxBatchRows ?? DEFAULT_MAX_BATCH_ROWS;
+ const staleInflightMs = opts.staleInflightMs ?? DEFAULT_STALE_INFLIGHT_MS;
+ const waitIfBusyMs = opts.waitIfBusyMs ?? 0;
+ const drainAll = opts.drainAll ?? false;
+ mkdirSync3(queueDir, { recursive: true });
+ const queuePath = getQueuePath(queueDir, opts.sessionId);
+ const inflightPath = getInflightPath(queueDir, opts.sessionId);
+ if (isSessionWriteDisabled(opts.sessionsTable, queueDir)) {
+ return existsSync4(queuePath) || existsSync4(inflightPath) ? { status: "disabled", rows: 0, batches: 0 } : { status: "empty", rows: 0, batches: 0 };
+ }
+ let totalRows = 0;
+ let totalBatches = 0;
+ let flushedAny = false;
+ while (true) {
+ if (opts.allowStaleInflight)
+ recoverStaleInflight(queuePath, inflightPath, staleInflightMs);
+ if (existsSync4(inflightPath)) {
+ if (waitIfBusyMs > 0) {
+ await waitForInflightToClear(inflightPath, waitIfBusyMs);
+ if (opts.allowStaleInflight)
+ recoverStaleInflight(queuePath, inflightPath, staleInflightMs);
+ }
+ if (existsSync4(inflightPath)) {
+ return flushedAny ? { status: "flushed", rows: totalRows, batches: totalBatches } : { status: "busy", rows: 0, batches: 0 };
+ }
+ }
+ if (!existsSync4(queuePath)) {
+ return flushedAny ? { status: "flushed", rows: totalRows, batches: totalBatches } : { status: "empty", rows: 0, batches: 0 };
+ }
+ try {
+ renameSync(queuePath, inflightPath);
+ } catch (e) {
+ if (e?.code === "ENOENT") {
+ return flushedAny ? { status: "flushed", rows: totalRows, batches: totalBatches } : { status: "empty", rows: 0, batches: 0 };
+ }
+ throw e;
+ }
+ try {
+ const { rows, batches } = await flushInflightFile(api, opts.sessionsTable, inflightPath, maxBatchRows);
+ totalRows += rows;
+ totalBatches += batches;
+ flushedAny = flushedAny || rows > 0;
+ } catch (e) {
+ requeueInflight(queuePath, inflightPath);
+ if (e instanceof SessionWriteDisabledError) {
+ return { status: "disabled", rows: totalRows, batches: totalBatches };
+ }
+ throw e;
+ }
+ if (!drainAll) {
+ return { status: "flushed", rows: totalRows, batches: totalBatches };
+ }
+ }
+}
+async function drainSessionQueues(api, opts) {
+ const queueDir = opts.queueDir ?? DEFAULT_QUEUE_DIR;
+ mkdirSync3(queueDir, { recursive: true });
+ const sessionIds = listQueuedSessionIds(queueDir, opts.staleInflightMs ?? DEFAULT_STALE_INFLIGHT_MS);
+ let flushedSessions = 0;
+ let rows = 0;
+ let batches = 0;
+ for (const sessionId of sessionIds) {
+ const result = await flushSessionQueue(api, {
+ sessionId,
+ sessionsTable: opts.sessionsTable,
+ queueDir,
+ maxBatchRows: opts.maxBatchRows,
+ allowStaleInflight: true,
+ staleInflightMs: opts.staleInflightMs,
+ drainAll: true
+ });
+ if (result.status === "flushed") {
+ flushedSessions += 1;
+ rows += result.rows;
+ batches += result.batches;
+ }
+ }
+ return {
+ queuedSessions: sessionIds.length,
+ flushedSessions,
+ rows,
+ batches
+ };
+}
+function tryAcquireSessionDrainLock(sessionsTable, queueDir = DEFAULT_QUEUE_DIR, staleMs = DEFAULT_DRAIN_LOCK_STALE_MS) {
+ mkdirSync3(queueDir, { recursive: true });
+ const lockPath = getSessionDrainLockPath(queueDir, sessionsTable);
+ for (let attempt = 0; attempt < 2; attempt++) {
+ try {
+ const fd = openSync(lockPath, "wx");
+ closeSync(fd);
+ return () => rmSync(lockPath, { force: true });
+ } catch (e) {
+ if (e?.code !== "EEXIST")
+ throw e;
+ if (existsSync4(lockPath) && isStale(lockPath, staleMs)) {
+ rmSync(lockPath, { force: true });
+ continue;
+ }
+ return null;
+ }
+ }
+ return null;
+}
+function getQueuePath(queueDir, sessionId) {
+ return join5(queueDir, `${sessionId}.jsonl`);
+}
+function getInflightPath(queueDir, sessionId) {
+ return join5(queueDir, `${sessionId}.inflight`);
+}
+async function flushInflightFile(api, sessionsTable, inflightPath, maxBatchRows) {
+ const rows = readQueuedRows(inflightPath);
+ if (rows.length === 0) {
+ rmSync(inflightPath, { force: true });
+ return { rows: 0, batches: 0 };
+ }
+ let ensured = false;
+ let batches = 0;
+ const queueDir = dirname(inflightPath);
+ for (let i = 0; i < rows.length; i += maxBatchRows) {
+ const chunk = rows.slice(i, i + maxBatchRows);
+ const sql = buildSessionInsertSql(sessionsTable, chunk);
+ try {
+ await api.query(sql);
+ } catch (e) {
+ if (isSessionWriteAuthError(e)) {
+ markSessionWriteDisabled(sessionsTable, errorMessage(e), queueDir);
+ throw new SessionWriteDisabledError(errorMessage(e));
+ }
+ if (!ensured && isEnsureSessionsTableRetryable(e)) {
+ try {
+ await api.ensureSessionsTable(sessionsTable);
+ } catch (ensureError) {
+ if (isSessionWriteAuthError(ensureError)) {
+ markSessionWriteDisabled(sessionsTable, errorMessage(ensureError), queueDir);
+ throw new SessionWriteDisabledError(errorMessage(ensureError));
+ }
+ throw ensureError;
+ }
+ ensured = true;
+ try {
+ await api.query(sql);
+ } catch (retryError) {
+ if (isSessionWriteAuthError(retryError)) {
+ markSessionWriteDisabled(sessionsTable, errorMessage(retryError), queueDir);
+ throw new SessionWriteDisabledError(errorMessage(retryError));
+ }
+ throw retryError;
+ }
+ } else {
+ throw e;
+ }
+ }
+ batches += 1;
+ }
+ clearSessionWriteDisabled(sessionsTable, queueDir);
+ rmSync(inflightPath, { force: true });
+ return { rows: rows.length, batches };
+}
+function readQueuedRows(path) {
+ const raw = readFileSync4(path, "utf-8");
+ return raw.split("\n").map((line) => line.trim()).filter(Boolean).map((line) => JSON.parse(line));
+}
+function requeueInflight(queuePath, inflightPath) {
+ if (!existsSync4(inflightPath))
+ return;
+ const inflight = readFileSync4(inflightPath, "utf-8");
+ appendFileSync2(queuePath, inflight);
+ rmSync(inflightPath, { force: true });
+}
+function recoverStaleInflight(queuePath, inflightPath, staleInflightMs) {
+ if (!existsSync4(inflightPath) || !isStale(inflightPath, staleInflightMs))
+ return;
+ requeueInflight(queuePath, inflightPath);
+}
+function isStale(path, staleInflightMs) {
+ return Date.now() - statSync(path).mtimeMs >= staleInflightMs;
+}
+function listQueuedSessionIds(queueDir, staleInflightMs) {
+ const sessionIds = /* @__PURE__ */ new Set();
+ for (const name of readdirSync(queueDir)) {
+ if (name.endsWith(".jsonl")) {
+ sessionIds.add(name.slice(0, -".jsonl".length));
+ } else if (name.endsWith(".inflight")) {
+ const path = join5(queueDir, name);
+ if (isStale(path, staleInflightMs)) {
+ sessionIds.add(name.slice(0, -".inflight".length));
+ }
+ }
+ }
+ return [...sessionIds].sort();
+}
+function isEnsureSessionsTableRetryable(error) {
+ const message = errorMessage(error).toLowerCase();
+ return message.includes("does not exist") || message.includes("doesn't exist") || message.includes("relation") || message.includes("not found");
+}
+function isSessionWriteAuthError(error) {
+ const message = errorMessage(error).toLowerCase();
+ return message.includes("403") || message.includes("401") || message.includes("forbidden") || message.includes("unauthorized");
+}
+function markSessionWriteDisabled(sessionsTable, reason, queueDir = DEFAULT_QUEUE_DIR) {
+ mkdirSync3(queueDir, { recursive: true });
+ writeFileSync3(getSessionWriteDisabledPath(queueDir, sessionsTable), JSON.stringify({
+ disabledAt: (/* @__PURE__ */ new Date()).toISOString(),
+ reason,
+ sessionsTable
+ }));
+}
+function clearSessionWriteDisabled(sessionsTable, queueDir = DEFAULT_QUEUE_DIR) {
+ rmSync(getSessionWriteDisabledPath(queueDir, sessionsTable), { force: true });
+}
+function isSessionWriteDisabled(sessionsTable, queueDir = DEFAULT_QUEUE_DIR, ttlMs = DEFAULT_AUTH_FAILURE_TTL_MS) {
+ const path = getSessionWriteDisabledPath(queueDir, sessionsTable);
+ if (!existsSync4(path))
+ return false;
+ try {
+ const raw = readFileSync4(path, "utf-8");
+ const state = JSON.parse(raw);
+ const ageMs = Date.now() - new Date(state.disabledAt).getTime();
+ if (Number.isNaN(ageMs) || ageMs >= ttlMs) {
+ rmSync(path, { force: true });
+ return false;
+ }
+ return true;
+ } catch {
+ rmSync(path, { force: true });
+ return false;
+ }
+}
+function getSessionWriteDisabledPath(queueDir, sessionsTable) {
+ return join5(queueDir, `.${sessionsTable}.disabled.json`);
+}
+function getSessionDrainLockPath(queueDir, sessionsTable) {
+ return join5(queueDir, `.${sessionsTable}.drain.lock`);
+}
+function errorMessage(error) {
+ return error instanceof Error ? error.message : String(error);
+}
+async function waitForInflightToClear(inflightPath, waitIfBusyMs) {
+ const startedAt = Date.now();
+ while (existsSync4(inflightPath) && Date.now() - startedAt < waitIfBusyMs) {
+ await sleep2(BUSY_WAIT_STEP_MS);
+ }
+}
+function sleep2(ms) {
+ return new Promise((resolve2) => setTimeout(resolve2, ms));
+}
+
+// dist/src/hooks/version-check.js
+import { existsSync as existsSync5, mkdirSync as mkdirSync4, readFileSync as readFileSync5, writeFileSync as writeFileSync4 } from "node:fs";
+import { dirname as dirname2, join as join6 } from "node:path";
+import { homedir as homedir5 } from "node:os";
+var DEFAULT_VERSION_CACHE_PATH = join6(homedir5(), ".deeplake", ".version-check.json");
+var DEFAULT_VERSION_CACHE_TTL_MS = 60 * 60 * 1e3;
function getInstalledVersion(bundleDir, pluginManifestDir) {
try {
- const pluginJson = join4(bundleDir, "..", pluginManifestDir, "plugin.json");
- const plugin = JSON.parse(readFileSync3(pluginJson, "utf-8"));
+ const pluginJson = join6(bundleDir, "..", pluginManifestDir, "plugin.json");
+ const plugin = JSON.parse(readFileSync5(pluginJson, "utf-8"));
if (plugin.version)
return plugin.version;
} catch {
}
let dir = bundleDir;
for (let i = 0; i < 5; i++) {
- const candidate = join4(dir, "package.json");
+ const candidate = join6(dir, "package.json");
try {
- const pkg = JSON.parse(readFileSync3(candidate, "utf-8"));
+ const pkg = JSON.parse(readFileSync5(candidate, "utf-8"));
if ((pkg.name === "hivemind" || pkg.name === "hivemind-codex") && pkg.version)
return pkg.version;
} catch {
}
- const parent = dirname(dir);
+ const parent = dirname2(dir);
if (parent === dir)
break;
dir = parent;
}
return null;
}
-async function getLatestVersion(timeoutMs = 3e3) {
- try {
- const res = await fetch(GITHUB_RAW_PKG, { signal: AbortSignal.timeout(timeoutMs) });
- if (!res.ok)
- return null;
- const pkg = await res.json();
- return pkg.version ?? null;
- } catch {
- return null;
- }
-}
function isNewer(latest, current) {
- const parse = (v) => v.split(".").map(Number);
+ const parse = (v) => v.replace(/-.*$/, "").split(".").map(Number);
const [la, lb, lc] = parse(latest);
const [ca, cb, cc] = parse(current);
return la > ca || la === ca && lb > cb || la === ca && lb === cb && lc > cc;
}
-
-// dist/src/utils/wiki-log.js
-import { mkdirSync as mkdirSync2, appendFileSync as appendFileSync2 } from "node:fs";
-import { join as join5 } from "node:path";
-function makeWikiLogger(hooksDir, filename = "deeplake-wiki.log") {
- const path = join5(hooksDir, filename);
- return {
- path,
- log(msg) {
- try {
- mkdirSync2(hooksDir, { recursive: true });
- appendFileSync2(path, `[${utcTimestamp()}] ${msg}
-`);
- } catch {
- }
+function readVersionCache(cachePath = DEFAULT_VERSION_CACHE_PATH) {
+ if (!existsSync5(cachePath))
+ return null;
+ try {
+ const parsed = JSON.parse(readFileSync5(cachePath, "utf-8"));
+ if (parsed && typeof parsed.checkedAt === "number" && typeof parsed.url === "string" && (typeof parsed.latest === "string" || parsed.latest === null)) {
+ return parsed;
}
- };
+ } catch {
+ }
+ return null;
+}
+function writeVersionCache(entry, cachePath = DEFAULT_VERSION_CACHE_PATH) {
+ mkdirSync4(dirname2(cachePath), { recursive: true });
+ writeFileSync4(cachePath, JSON.stringify(entry));
+}
+function readFreshCachedLatestVersion(url, ttlMs = DEFAULT_VERSION_CACHE_TTL_MS, cachePath = DEFAULT_VERSION_CACHE_PATH, nowMs = Date.now()) {
+ const cached = readVersionCache(cachePath);
+ if (!cached || cached.url !== url)
+ return void 0;
+ if (nowMs - cached.checkedAt > ttlMs)
+ return void 0;
+ return cached.latest;
+}
+async function getLatestVersionCached(opts) {
+ const ttlMs = opts.ttlMs ?? DEFAULT_VERSION_CACHE_TTL_MS;
+ const cachePath = opts.cachePath ?? DEFAULT_VERSION_CACHE_PATH;
+ const nowMs = opts.nowMs ?? Date.now();
+ const fetchImpl = opts.fetchImpl ?? fetch;
+ const fresh = readFreshCachedLatestVersion(opts.url, ttlMs, cachePath, nowMs);
+ if (fresh !== void 0)
+ return fresh;
+ const stale = readVersionCache(cachePath);
+ try {
+ const res = await fetchImpl(opts.url, { signal: AbortSignal.timeout(opts.timeoutMs) });
+ const latest = res.ok ? (await res.json()).version ?? null : stale?.latest ?? null;
+ writeVersionCache({
+ checkedAt: nowMs,
+ latest,
+ url: opts.url
+ }, cachePath);
+ return latest;
+ } catch {
+ const latest = stale?.latest ?? null;
+ writeVersionCache({
+ checkedAt: nowMs,
+ latest,
+ url: opts.url
+ }, cachePath);
+ return latest;
+ }
}
// dist/src/hooks/codex/session-start-setup.js
var log3 = (msg) => log("codex-session-setup", msg);
-var __bundleDir = dirname2(fileURLToPath(import.meta.url));
-var { log: wikiLog } = makeWikiLogger(join6(homedir4(), ".codex", "hooks"));
+var __bundleDir = dirname3(fileURLToPath2(import.meta.url));
+var GITHUB_RAW_PKG = "https://raw.githubusercontent.com/activeloopai/hivemind/main/package.json";
+var VERSION_CHECK_TIMEOUT = 3e3;
+var HOME = homedir6();
+var WIKI_LOG = join7(HOME, ".codex", "hooks", "deeplake-wiki.log");
+function wikiLog(msg) {
+ try {
+ mkdirSync5(join7(HOME, ".codex", "hooks"), { recursive: true });
+ appendFileSync3(WIKI_LOG, `[${(/* @__PURE__ */ new Date()).toISOString().replace("T", " ").slice(0, 19)}] ${msg}
+`);
+ } catch {
+ }
+}
async function createPlaceholder(api, table, sessionId, cwd, userName, orgName, workspaceId) {
const summaryPath = `/summaries/${userName}/${sessionId}.md`;
const existing = await api.query(`SELECT path FROM "${table}" WHERE path = '${sqlStr(summaryPath)}' LIMIT 1`);
@@ -406,7 +839,7 @@ async function createPlaceholder(api, table, sessionId, cwd, userName, orgName,
return;
}
const now = (/* @__PURE__ */ new Date()).toISOString();
- const projectName = cwd.split("/").pop() ?? "unknown";
+ const projectName = cwd.split("/").pop() || "unknown";
const sessionSource = `/sessions/${userName}/${userName}_${orgName}_${workspaceId}_${sessionId}.jsonl`;
const content = [
`# Session ${sessionId}`,
@@ -420,78 +853,114 @@ async function createPlaceholder(api, table, sessionId, cwd, userName, orgName,
await api.query(`INSERT INTO "${table}" (id, path, filename, summary, author, mime_type, size_bytes, project, description, agent, creation_date, last_update_date) VALUES ('${crypto.randomUUID()}', '${sqlStr(summaryPath)}', '${sqlStr(filename)}', E'${sqlStr(content)}', '${sqlStr(userName)}', 'text/markdown', ${Buffer.byteLength(content, "utf-8")}, '${sqlStr(projectName)}', 'in progress', 'codex', '${now}', '${now}')`);
wikiLog(`SessionSetup: created placeholder for ${sessionId} (${cwd})`);
}
-async function main() {
- if (process.env.HIVEMIND_WIKI_WORKER === "1")
- return;
- const input = await readStdin();
- const creds = loadCredentials();
+async function runCodexSessionStartSetup(input, deps = {}) {
+ const { wikiWorker = (process.env.HIVEMIND_WIKI_WORKER ?? process.env.DEEPLAKE_WIKI_WORKER) === "1", creds = loadCredentials(), saveCredentialsFn = saveCredentials, config = loadConfig(), createApi = (activeConfig) => new DeeplakeApi(activeConfig.token, activeConfig.apiUrl, activeConfig.orgId, activeConfig.workspaceId, activeConfig.tableName), captureEnabled = (process.env.HIVEMIND_CAPTURE ?? process.env.DEEPLAKE_CAPTURE) !== "false", drainSessionQueuesFn = drainSessionQueues, isSessionWriteDisabledFn = isSessionWriteDisabled, isSessionWriteAuthErrorFn = isSessionWriteAuthError, markSessionWriteDisabledFn = markSessionWriteDisabled, tryAcquireSessionDrainLockFn = tryAcquireSessionDrainLock, createPlaceholderFn = createPlaceholder, getInstalledVersionFn = getInstalledVersion, getLatestVersionCachedFn = getLatestVersionCached, isNewerFn = isNewer, execSyncFn = execSync2, logFn = log3, wikiLogFn = wikiLog } = deps;
+ if (wikiWorker)
+ return { status: "skipped" };
if (!creds?.token) {
- log3("no credentials");
- return;
+ logFn("no credentials");
+ return { status: "no_credentials" };
}
if (!creds.userName) {
try {
const { userInfo: userInfo2 } = await import("node:os");
creds.userName = userInfo2().username ?? "unknown";
- saveCredentials(creds);
- log3(`backfilled userName: ${creds.userName}`);
+ saveCredentialsFn(creds);
+ logFn(`backfilled userName: ${creds.userName}`);
} catch {
}
}
- const captureEnabled = process.env.HIVEMIND_CAPTURE !== "false";
- if (input.session_id) {
+ if (input.session_id && config) {
try {
- const config = loadConfig();
- if (config) {
- const api = new DeeplakeApi(config.token, config.apiUrl, config.orgId, config.workspaceId, config.tableName);
- await api.ensureTable();
- await api.ensureSessionsTable(config.sessionsTableName);
- if (captureEnabled) {
- await createPlaceholder(api, config.tableName, input.session_id, input.cwd ?? "", config.userName, config.orgName, config.workspaceId);
+ const api = createApi(config);
+ await api.ensureTable();
+ if (captureEnabled) {
+ if (isSessionWriteDisabledFn(config.sessionsTableName)) {
+ logFn(`sessions table disabled, skipping setup for "${config.sessionsTableName}"`);
+ } else {
+ const releaseDrainLock = tryAcquireSessionDrainLockFn(config.sessionsTableName);
+ if (!releaseDrainLock) {
+ logFn(`sessions drain already in progress, skipping duplicate setup for "${config.sessionsTableName}"`);
+ } else {
+ try {
+ await api.ensureSessionsTable(config.sessionsTableName);
+ const drain = await drainSessionQueuesFn(api, {
+ sessionsTable: config.sessionsTableName
+ });
+ if (drain.flushedSessions > 0) {
+ logFn(`drained ${drain.flushedSessions} queued session(s), rows=${drain.rows}, batches=${drain.batches}`);
+ }
+ } catch (e) {
+ if (isSessionWriteAuthErrorFn(e)) {
+ markSessionWriteDisabledFn(config.sessionsTableName, e.message);
+ logFn(`sessions table unavailable, skipping setup: ${e.message}`);
+ } else {
+ throw e;
+ }
+ } finally {
+ releaseDrainLock();
+ }
+ }
}
- log3("setup complete");
+ await createPlaceholderFn(api, config.tableName, input.session_id, input.cwd ?? "", config.userName, config.orgName, config.workspaceId);
}
+ logFn("setup complete");
} catch (e) {
- log3(`setup failed: ${e.message}`);
- wikiLog(`SessionSetup: failed for ${input.session_id}: ${e.message}`);
+ logFn(`setup failed: ${e.message}`);
+ wikiLogFn(`SessionSetup: failed for ${input.session_id}: ${e.message}`);
}
}
const autoupdate = creds.autoupdate !== false;
try {
- const current = getInstalledVersion(__bundleDir, ".codex-plugin");
+ const current = getInstalledVersionFn(__bundleDir, ".codex-plugin");
if (current) {
- const latest = await getLatestVersion();
- if (latest && isNewer(latest, current)) {
+ const latest = await getLatestVersionCachedFn({
+ url: GITHUB_RAW_PKG,
+ timeoutMs: VERSION_CHECK_TIMEOUT
+ });
+ if (latest && isNewerFn(latest, current)) {
if (autoupdate) {
- log3(`autoupdate: updating ${current} \u2192 ${latest}`);
+ logFn(`autoupdate: updating ${current} \u2192 ${latest}`);
try {
const tag = `v${latest}`;
if (!/^v\d+\.\d+\.\d+$/.test(tag))
throw new Error(`unsafe version tag: ${tag}`);
const findCmd = `INSTALL_DIR=""; CACHE_DIR=$(find ~/.codex/plugins/cache -maxdepth 3 -name "hivemind" -type d 2>/dev/null | head -1); if [ -n "$CACHE_DIR" ]; then INSTALL_DIR=$(ls -1d "$CACHE_DIR"/*/ 2>/dev/null | tail -1); elif [ -d ~/.codex/hivemind ]; then INSTALL_DIR=~/.codex/hivemind; fi; if [ -n "$INSTALL_DIR" ]; then TMPDIR=$(mktemp -d); git clone --depth 1 --branch ${tag} -q https://github.com/activeloopai/hivemind.git "$TMPDIR/hivemind" 2>/dev/null && cp -r "$TMPDIR/hivemind/codex/"* "$INSTALL_DIR/" 2>/dev/null; rm -rf "$TMPDIR"; fi`;
- execSync2(findCmd, { stdio: "ignore", timeout: 6e4 });
+ execSyncFn(findCmd, { stdio: "ignore", timeout: 6e4 });
process.stderr.write(`Hivemind auto-updated: ${current} \u2192 ${latest}. Restart Codex to apply.
`);
- log3(`autoupdate succeeded: ${current} \u2192 ${latest} (tag: ${tag})`);
+ logFn(`autoupdate succeeded: ${current} \u2192 ${latest} (tag: ${tag})`);
} catch (e) {
process.stderr.write(`Hivemind update available: ${current} \u2192 ${latest}. Auto-update failed.
`);
- log3(`autoupdate failed: ${e.message}`);
+ logFn(`autoupdate failed: ${e.message}`);
}
} else {
process.stderr.write(`Hivemind update available: ${current} \u2192 ${latest}.
`);
- log3(`update available (autoupdate off): ${current} \u2192 ${latest}`);
+ logFn(`update available (autoupdate off): ${current} \u2192 ${latest}`);
}
} else {
- log3(`version up to date: ${current}`);
+ logFn(`version up to date: ${current}`);
}
}
} catch (e) {
- log3(`version check failed: ${e.message}`);
+ logFn(`version check failed: ${e.message}`);
}
+ return { status: "complete" };
}
-main().catch((e) => {
- log3(`fatal: ${e.message}`);
- process.exit(0);
-});
+async function main() {
+ const input = await readStdin();
+ await runCodexSessionStartSetup(input);
+}
+if (isDirectRun(import.meta.url)) {
+ main().catch((e) => {
+ log3(`fatal: ${e.message}`);
+ process.exit(0);
+ });
+}
+export {
+ createPlaceholder,
+ runCodexSessionStartSetup,
+ wikiLog
+};
diff --git a/codex/bundle/session-start.js b/codex/bundle/session-start.js
index fe5cfe1..bb3ebd0 100755
--- a/codex/bundle/session-start.js
+++ b/codex/bundle/session-start.js
@@ -2,7 +2,7 @@
// dist/src/hooks/codex/session-start.js
import { spawn } from "node:child_process";
-import { fileURLToPath } from "node:url";
+import { fileURLToPath as fileURLToPath2 } from "node:url";
import { dirname as dirname2, join as join4 } from "node:path";
// dist/src/commands/auth.js
@@ -24,13 +24,13 @@ function loadCredentials() {
// dist/src/utils/stdin.js
function readStdin() {
- return new Promise((resolve, reject) => {
+ return new Promise((resolve2, reject) => {
let data = "";
process.stdin.setEncoding("utf-8");
process.stdin.on("data", (chunk) => data += chunk);
process.stdin.on("end", () => {
try {
- resolve(JSON.parse(data));
+ resolve2(JSON.parse(data));
} catch (err) {
reject(new Error(`Failed to parse hook input: ${err}`));
}
@@ -52,9 +52,26 @@ function log(tag, msg) {
`);
}
-// dist/src/utils/version-check.js
-import { readFileSync as readFileSync2 } from "node:fs";
+// dist/src/utils/direct-run.js
+import { resolve } from "node:path";
+import { fileURLToPath } from "node:url";
+function isDirectRun(metaUrl) {
+ const entry = process.argv[1];
+ if (!entry)
+ return false;
+ try {
+ return resolve(fileURLToPath(metaUrl)) === resolve(entry);
+ } catch {
+ return false;
+ }
+}
+
+// dist/src/hooks/version-check.js
+import { existsSync as existsSync2, mkdirSync as mkdirSync2, readFileSync as readFileSync2, writeFileSync as writeFileSync2 } from "node:fs";
import { dirname, join as join3 } from "node:path";
+import { homedir as homedir3 } from "node:os";
+var DEFAULT_VERSION_CACHE_PATH = join3(homedir3(), ".deeplake", ".version-check.json");
+var DEFAULT_VERSION_CACHE_TTL_MS = 60 * 60 * 1e3;
function getInstalledVersion(bundleDir, pluginManifestDir) {
try {
const pluginJson = join3(bundleDir, "..", pluginManifestDir, "plugin.json");
@@ -82,27 +99,41 @@ function getInstalledVersion(bundleDir, pluginManifestDir) {
// dist/src/hooks/codex/session-start.js
var log2 = (msg) => log("codex-session-start", msg);
-var __bundleDir = dirname2(fileURLToPath(import.meta.url));
+var __bundleDir = dirname2(fileURLToPath2(import.meta.url));
var AUTH_CMD = join4(__bundleDir, "commands", "auth-login.js");
-var context = `DEEPLAKE MEMORY: Persistent memory at ~/.deeplake/memory/ shared across sessions, users, and agents.
+var CODEX_SESSION_START_CONTEXT = `DEEPLAKE MEMORY: Persistent memory at ~/.deeplake/memory/ shared across sessions, users, and agents.
-Structure: index.md (start here) \u2192 summaries/*.md \u2192 sessions/*.jsonl (last resort). Do NOT jump straight to JSONL.
+Structure: index.md (start here) \u2192 summaries/*.md \u2192 sessions/{author}/* (last resort). Do NOT jump straight to raw session files.
+When index.md identifies a likely match, read that exact summary or session path directly before broader grep variants.
+If index.md already points to likely candidate files, open those exact files before broader synonym greps or wide exploratory scans.
+Do NOT probe unrelated local paths such as ~/.claude/projects/, arbitrary home directories, or guessed summary roots for Deeplake recall tasks.
+TEMPORAL GROUNDING: If a summary or transcript uses relative time like "last year", "last week", or "next month", resolve it against that session's own date/date_time metadata, not today's date.
+TEMPORAL FOLLOW-THROUGH: If a summary only gives a relative time, open the linked source session and use its date/date_time to convert the final answer into an absolute month/date/year or explicit range before responding.
+ANSWER SHAPE: Once you have enough evidence, answer with the smallest exact phrase supported by memory. For identity or relationship questions, use just the noun phrase. For education questions, answer with the likely field or credential directly, not the broader life story. For "when" questions, prefer absolute dates/months/years over relative phrases. Avoid extra biography, explanation, or hedging.
+NOT-FOUND BAR: Do NOT answer "not found" until you have checked index.md plus at least one likely summary or raw session file for the named person. If keyword grep is empty, grep the person's name alone and inspect the candidate files.
+NEGATIVE-EVIDENCE QUESTIONS: For identity, relationship status, and research-topic questions, summaries may omit the exact phrase. If likely summaries are ambiguous, read the candidate raw session transcript and look for positive clues before concluding the answer is absent.
+SELF-LABEL PRIORITY: For identity questions, prefer the person's own explicit self-label from the transcript over broader category descriptions or paraphrases.
+RELATIONSHIP STATUS INFERENCE: For relationship-status questions, treat explicit self-descriptions about partnership, dating, marriage, or parenting plans as status evidence. If the transcript strongly supports an unpartnered status, answer with the concise status phrase instead of "not found."
Search: grep -r "keyword" ~/.deeplake/memory/
IMPORTANT: Only use bash commands (cat, ls, grep, echo, jq, head, tail, sed, awk, etc.) to interact with ~/.deeplake/memory/. Do NOT use python, python3, node, curl, or other interpreters \u2014 they are not available in the memory filesystem.
Do NOT spawn subagents to read deeplake memory.`;
-async function main() {
- if (process.env.HIVEMIND_WIKI_WORKER === "1")
- return;
- const input = await readStdin();
- const creds = loadCredentials();
- if (!creds?.token) {
- log2("no credentials found \u2014 run auth login to authenticate");
- } else {
- log2(`credentials loaded: org=${creds.orgName ?? creds.orgId}`);
- }
+function buildCodexSessionStartContext(args) {
+ const versionNotice = args.currentVersion ? `
+Hivemind v${args.currentVersion}` : "";
+ return args.creds?.token ? `${CODEX_SESSION_START_CONTEXT}
+Logged in to Deeplake as org: ${args.creds.orgName ?? args.creds.orgId} (workspace: ${args.creds.workspaceId ?? "default"})${versionNotice}` : `${CODEX_SESSION_START_CONTEXT}
+Not logged in to Deeplake. Run: node "${args.authCommand}" login${versionNotice}`;
+}
+async function runCodexSessionStartHook(input, deps = {}) {
+ const { wikiWorker = (process.env.HIVEMIND_WIKI_WORKER ?? process.env.DEEPLAKE_WIKI_WORKER) === "1", creds = loadCredentials(), spawnFn = spawn, currentVersion = getInstalledVersion(__bundleDir, ".codex-plugin"), authCommand = AUTH_CMD, setupScript = join4(__bundleDir, "session-start-setup.js"), logFn = log2 } = deps;
+ if (wikiWorker)
+ return null;
+ if (!creds?.token)
+ logFn("no credentials found \u2014 run auth login to authenticate");
+ else
+ logFn(`credentials loaded: org=${creds.orgName ?? creds.orgId}`);
if (creds?.token) {
- const setupScript = join4(__bundleDir, "session-start-setup.js");
- const child = spawn("node", [setupScript], {
+ const child = spawnFn("node", [setupScript], {
detached: true,
stdio: ["pipe", "ignore", "ignore"],
env: { ...process.env }
@@ -110,20 +141,28 @@ async function main() {
child.stdin?.write(JSON.stringify(input));
child.stdin?.end();
child.unref();
- log2("spawned async setup process");
+ logFn("spawned async setup process");
}
- let versionNotice = "";
- const current = getInstalledVersion(__bundleDir, ".codex-plugin");
- if (current) {
- versionNotice = `
-Hivemind v${current}`;
- }
- const additionalContext = creds?.token ? `${context}
-Logged in to Deeplake as org: ${creds.orgName ?? creds.orgId} (workspace: ${creds.workspaceId ?? "default"})${versionNotice}` : `${context}
-Not logged in to Deeplake. Run: node "${AUTH_CMD}" login${versionNotice}`;
- console.log(additionalContext);
+ return buildCodexSessionStartContext({
+ creds,
+ currentVersion,
+ authCommand
+ });
+}
+async function main() {
+ const input = await readStdin();
+ const output = await runCodexSessionStartHook(input);
+ if (output)
+ console.log(output);
+}
+if (isDirectRun(import.meta.url)) {
+ main().catch((e) => {
+ log2(`fatal: ${e.message}`);
+ process.exit(0);
+ });
}
-main().catch((e) => {
- log2(`fatal: ${e.message}`);
- process.exit(0);
-});
+export {
+ CODEX_SESSION_START_CONTEXT,
+ buildCodexSessionStartContext,
+ runCodexSessionStartHook
+};
diff --git a/codex/bundle/shell/deeplake-shell.js b/codex/bundle/shell/deeplake-shell.js
index 2d0b237..5872059 100755
--- a/codex/bundle/shell/deeplake-shell.js
+++ b/codex/bundle/shell/deeplake-shell.js
@@ -46081,14 +46081,14 @@ var require_turndown_cjs = __commonJS({
} else if (node.nodeType === 1) {
replacement = replacementForNode.call(self2, node);
}
- return join6(output, replacement);
+ return join7(output, replacement);
}, "");
}
function postProcess(output) {
var self2 = this;
this.rules.forEach(function(rule) {
if (typeof rule.append === "function") {
- output = join6(output, rule.append(self2.options));
+ output = join7(output, rule.append(self2.options));
}
});
return output.replace(/^[\t\r\n]+/, "").replace(/[\t\r\n\s]+$/, "");
@@ -46100,7 +46100,7 @@ var require_turndown_cjs = __commonJS({
if (whitespace.leading || whitespace.trailing) content = content.trim();
return whitespace.leading + rule.replacement(content, node, this.options) + whitespace.trailing;
}
- function join6(output, replacement) {
+ function join7(output, replacement) {
var s12 = trimTrailingNewlines(output);
var s22 = trimLeadingNewlines(replacement);
var nls = Math.max(output.length - s12.length, replacement.length - s22.length);
@@ -66758,6 +66758,9 @@ function loadConfig() {
// dist/src/deeplake-api.js
import { randomUUID } from "node:crypto";
+import { existsSync as existsSync3, mkdirSync, readFileSync as readFileSync2, writeFileSync } from "node:fs";
+import { join as join6 } from "node:path";
+import { tmpdir } from "node:os";
// dist/src/utils/debug.js
import { appendFileSync } from "node:fs";
@@ -66800,9 +66803,30 @@ var RETRYABLE_CODES = /* @__PURE__ */ new Set([429, 500, 502, 503, 504]);
var MAX_RETRIES = 3;
var BASE_DELAY_MS = 500;
var MAX_CONCURRENCY = 5;
+var QUERY_TIMEOUT_MS = Number(process.env["HIVEMIND_QUERY_TIMEOUT_MS"] ?? process.env["DEEPLAKE_QUERY_TIMEOUT_MS"] ?? 1e4);
+var INDEX_MARKER_TTL_MS = Number(process.env["HIVEMIND_INDEX_MARKER_TTL_MS"] ?? 6 * 60 * 6e4);
function sleep(ms3) {
return new Promise((resolve5) => setTimeout(resolve5, ms3));
}
+function isTimeoutError(error) {
+ const name = error instanceof Error ? error.name.toLowerCase() : "";
+ const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase();
+ return name.includes("timeout") || name === "aborterror" || message.includes("timeout") || message.includes("timed out");
+}
+function isDuplicateIndexError(error) {
+ const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase();
+ return message.includes("duplicate key value violates unique constraint") || message.includes("pg_class_relname_nsp_index") || message.includes("already exists");
+}
+function isSessionInsertQuery(sql) {
+ return /^\s*insert\s+into\s+"[^"]+"\s*\(\s*id\s*,\s*path\s*,\s*filename\s*,\s*message\s*,/i.test(sql);
+}
+function isTransientHtml403(text) {
+ const body = text.toLowerCase();
+ return body.includes(" Object.fromEntries(raw.columns.map((col, i11) => [col, row[i11]])));
}
const text = await resp.text().catch(() => "");
- if (attempt < MAX_RETRIES && RETRYABLE_CODES.has(resp.status)) {
+ const retryable403 = isSessionInsertQuery(sql) && (resp.status === 401 || resp.status === 403 && (text.length === 0 || isTransientHtml403(text)));
+ if (attempt < MAX_RETRIES && (RETRYABLE_CODES.has(resp.status) || retryable403)) {
const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200;
log2(`query retry ${attempt + 1}/${MAX_RETRIES} (${resp.status}) in ${delay.toFixed(0)}ms`);
await sleep(delay);
@@ -66955,8 +66987,61 @@ var DeeplakeApi = class {
async createIndex(column) {
await this.query(`CREATE INDEX IF NOT EXISTS idx_${sqlStr(column)}_bm25 ON "${this.tableName}" USING deeplake_index ("${column}")`);
}
+ buildLookupIndexName(table, suffix) {
+ return `idx_${table}_${suffix}`.replace(/[^a-zA-Z0-9_]/g, "_");
+ }
+ getLookupIndexMarkerPath(table, suffix) {
+ const markerKey = [
+ this.workspaceId,
+ this.orgId,
+ table,
+ suffix
+ ].join("__").replace(/[^a-zA-Z0-9_.-]/g, "_");
+ return join6(getIndexMarkerDir(), `${markerKey}.json`);
+ }
+ hasFreshLookupIndexMarker(table, suffix) {
+ const markerPath = this.getLookupIndexMarkerPath(table, suffix);
+ if (!existsSync3(markerPath))
+ return false;
+ try {
+ const raw = JSON.parse(readFileSync2(markerPath, "utf-8"));
+ const updatedAt = raw.updatedAt ? new Date(raw.updatedAt).getTime() : NaN;
+ if (!Number.isFinite(updatedAt) || Date.now() - updatedAt > INDEX_MARKER_TTL_MS)
+ return false;
+ return true;
+ } catch {
+ return false;
+ }
+ }
+ markLookupIndexReady(table, suffix) {
+ mkdirSync(getIndexMarkerDir(), { recursive: true });
+ writeFileSync(this.getLookupIndexMarkerPath(table, suffix), JSON.stringify({ updatedAt: (/* @__PURE__ */ new Date()).toISOString() }), "utf-8");
+ }
+ async ensureLookupIndex(table, suffix, columnsSql) {
+ if (this.hasFreshLookupIndexMarker(table, suffix))
+ return;
+ const indexName = this.buildLookupIndexName(table, suffix);
+ try {
+ await this.query(`CREATE INDEX IF NOT EXISTS "${indexName}" ON "${table}" ${columnsSql}`);
+ this.markLookupIndexReady(table, suffix);
+ } catch (e6) {
+ if (isDuplicateIndexError(e6)) {
+ this.markLookupIndexReady(table, suffix);
+ return;
+ }
+ log2(`index "${indexName}" skipped: ${e6.message}`);
+ }
+ }
/** List all tables in the workspace (with retry). */
- async listTables() {
+ async listTables(forceRefresh = false) {
+ if (!forceRefresh && this._tablesCache)
+ return [...this._tablesCache];
+ const { tables, cacheable } = await this._fetchTables();
+ if (cacheable)
+ this._tablesCache = [...tables];
+ return tables;
+ }
+ async _fetchTables() {
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
try {
const resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables`, {
@@ -66967,22 +67052,25 @@ var DeeplakeApi = class {
});
if (resp.ok) {
const data = await resp.json();
- return (data.tables ?? []).map((t6) => t6.table_name);
+ return {
+ tables: (data.tables ?? []).map((t6) => t6.table_name),
+ cacheable: true
+ };
}
if (attempt < MAX_RETRIES && RETRYABLE_CODES.has(resp.status)) {
await sleep(BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200);
continue;
}
- return [];
+ return { tables: [], cacheable: false };
} catch {
if (attempt < MAX_RETRIES) {
await sleep(BASE_DELAY_MS * Math.pow(2, attempt));
continue;
}
- return [];
+ return { tables: [], cacheable: false };
}
}
- return [];
+ return { tables: [], cacheable: false };
}
/** Create the memory table if it doesn't already exist. Migrate columns on existing tables. */
async ensureTable(name) {
@@ -66992,6 +67080,8 @@ var DeeplakeApi = class {
log2(`table "${tbl}" not found, creating`);
await this.query(`CREATE TABLE IF NOT EXISTS "${tbl}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', summary TEXT NOT NULL DEFAULT '', author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'text/plain', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`);
log2(`table "${tbl}" created`);
+ if (!tables.includes(tbl))
+ this._tablesCache = [...tables, tbl];
}
}
/** Create the sessions table (uses JSONB for message since every row is a JSON event). */
@@ -67001,673 +67091,1090 @@ var DeeplakeApi = class {
log2(`table "${name}" not found, creating`);
await this.query(`CREATE TABLE IF NOT EXISTS "${name}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', message JSONB, author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'application/json', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`);
log2(`table "${name}" created`);
+ if (!tables.includes(name))
+ this._tablesCache = [...tables, name];
}
+ await this.ensureLookupIndex(name, "path_creation_date", `("path", "creation_date")`);
}
};
// dist/src/shell/deeplake-fs.js
import { basename as basename4, posix } from "node:path";
import { randomUUID as randomUUID2 } from "node:crypto";
-var BATCH_SIZE = 10;
-var FLUSH_DEBOUNCE_MS = 200;
-function normPath(p22) {
- const r10 = posix.normalize(p22.startsWith("/") ? p22 : "/" + p22);
- return r10 === "/" ? r10 : r10.replace(/\/$/, "");
+
+// dist/src/shell/grep-core.js
+var TOOL_INPUT_FIELDS = [
+ "command",
+ "file_path",
+ "path",
+ "pattern",
+ "prompt",
+ "subagent_type",
+ "query",
+ "url",
+ "notebook_path",
+ "old_string",
+ "new_string",
+ "content",
+ "skill",
+ "args",
+ "taskId",
+ "status",
+ "subject",
+ "description",
+ "to",
+ "message",
+ "summary",
+ "max_results"
+];
+var TOOL_RESPONSE_DROP = /* @__PURE__ */ new Set([
+ // Note: `stderr` is intentionally NOT in this set. The `stdout` high-signal
+ // branch below already de-dupes it for the common case (appends as suffix
+ // when non-empty). If a tool response has ONLY `stderr` and no `stdout`
+ // (hard-failure on some tools), the generic cleanup preserves it so the
+ // error message reaches Claude instead of collapsing to `[ok]`.
+ "interrupted",
+ "isImage",
+ "noOutputExpected",
+ "type",
+ "structuredPatch",
+ "userModified",
+ "originalFile",
+ "replaceAll",
+ "totalDurationMs",
+ "totalTokens",
+ "totalToolUseCount",
+ "usage",
+ "toolStats",
+ "durationMs",
+ "durationSeconds",
+ "bytes",
+ "code",
+ "codeText",
+ "agentId",
+ "agentType",
+ "verificationNudgeNeeded",
+ "numLines",
+ "numFiles",
+ "truncated",
+ "statusChange",
+ "updatedFields",
+ "isAgent",
+ "success"
+]);
+function maybeParseJson(v27) {
+ if (typeof v27 !== "string")
+ return v27;
+ const s10 = v27.trim();
+ if (s10[0] !== "{" && s10[0] !== "[")
+ return v27;
+ try {
+ return JSON.parse(s10);
+ } catch {
+ return v27;
+ }
}
-function parentOf(p22) {
- const i11 = p22.lastIndexOf("/");
- return i11 <= 0 ? "/" : p22.slice(0, i11);
+function snakeCase(k17) {
+ return k17.replace(/([A-Z])/g, "_$1").toLowerCase();
}
-function guessMime(filename) {
- const ext2 = filename.split(".").pop()?.toLowerCase() ?? "";
- return {
- json: "application/json",
- md: "text/markdown",
- txt: "text/plain",
- js: "text/javascript",
- ts: "text/typescript",
- html: "text/html",
- css: "text/css"
- }[ext2] ?? "text/plain";
+function camelCase(k17) {
+ return k17.replace(/_([a-z])/g, (_16, c15) => c15.toUpperCase());
}
-function fsErr(code, msg, path2) {
- return Object.assign(new Error(`${code}: ${msg}, '${path2}'`), { code });
+function formatToolInput(raw) {
+ const p22 = maybeParseJson(raw);
+ if (typeof p22 !== "object" || p22 === null)
+ return String(p22 ?? "");
+ const parts = [];
+ for (const k17 of TOOL_INPUT_FIELDS) {
+ if (p22[k17] === void 0)
+ continue;
+ const v27 = p22[k17];
+ parts.push(`${k17}: ${typeof v27 === "string" ? v27 : JSON.stringify(v27)}`);
+ }
+ for (const k17 of ["glob", "output_mode", "limit", "offset"]) {
+ if (p22[k17] !== void 0)
+ parts.push(`${k17}: ${p22[k17]}`);
+ }
+ return parts.length ? parts.join("\n") : JSON.stringify(p22);
}
-var DeeplakeFs = class _DeeplakeFs {
- client;
- table;
- mountPoint;
- // path → Buffer (content) or null (exists but not fetched yet)
- files = /* @__PURE__ */ new Map();
- meta = /* @__PURE__ */ new Map();
- // dir path → Set of immediate child names
- dirs = /* @__PURE__ */ new Map();
- // batched writes pending SQL flush
- pending = /* @__PURE__ */ new Map();
- // paths that have been flushed (INSERT) at least once — subsequent flushes use UPDATE
- flushed = /* @__PURE__ */ new Set();
- /** Number of files loaded from the server during bootstrap. */
- get fileCount() {
- return this.files.size;
+function formatToolResponse(raw, inp, toolName) {
+ const r10 = maybeParseJson(raw);
+ if (typeof r10 !== "object" || r10 === null)
+ return String(r10 ?? "");
+ if (toolName === "Edit" || toolName === "Write" || toolName === "MultiEdit") {
+ return r10.filePath ? `[wrote ${r10.filePath}]` : "[ok]";
}
- flushTimer = null;
- // serialize flushes
- flushChain = Promise.resolve();
- // Paths that live in the sessions table (multi-row, read by concatenation)
- sessionPaths = /* @__PURE__ */ new Set();
- sessionsTable = null;
- constructor(client, table, mountPoint) {
- this.client = client;
- this.table = table;
- this.mountPoint = mountPoint;
- this.dirs.set(mountPoint, /* @__PURE__ */ new Set());
- if (mountPoint !== "/")
- this.dirs.set("/", /* @__PURE__ */ new Set([mountPoint.slice(1)]));
+ if (typeof r10.stdout === "string") {
+ const stderr = r10.stderr;
+ return r10.stdout + (stderr ? `
+stderr: ${stderr}` : "");
}
- static async create(client, table, mount = "/memory", sessionsTable) {
- const fs3 = new _DeeplakeFs(client, table, mount);
- fs3.sessionsTable = sessionsTable ?? null;
- await client.ensureTable();
- let sessionSyncOk = true;
- const memoryBootstrap = (async () => {
- const sql = `SELECT path, size_bytes, mime_type FROM "${table}" ORDER BY path`;
- try {
- const rows = await client.query(sql);
- for (const row of rows) {
- const p22 = row["path"];
- fs3.files.set(p22, null);
- fs3.meta.set(p22, {
- size: Number(row["size_bytes"] ?? 0),
- mime: row["mime_type"] ?? "application/octet-stream",
- mtime: /* @__PURE__ */ new Date()
- });
- fs3.addToTree(p22);
- fs3.flushed.add(p22);
- }
- } catch {
- }
- })();
- const sessionsBootstrap = sessionsTable && sessionSyncOk ? (async () => {
- try {
- const sessionRows = await client.query(`SELECT path, SUM(size_bytes) as total_size FROM "${sessionsTable}" GROUP BY path ORDER BY path`);
- for (const row of sessionRows) {
- const p22 = row["path"];
- if (!fs3.files.has(p22)) {
- fs3.files.set(p22, null);
- fs3.meta.set(p22, {
- size: Number(row["total_size"] ?? 0),
- mime: "application/x-ndjson",
- mtime: /* @__PURE__ */ new Date()
- });
- fs3.addToTree(p22);
- }
- fs3.sessionPaths.add(p22);
- }
- } catch {
- }
- })() : Promise.resolve();
- await Promise.all([memoryBootstrap, sessionsBootstrap]);
- return fs3;
+ if (typeof r10.content === "string")
+ return r10.content;
+ if (r10.file && typeof r10.file === "object") {
+ const f11 = r10.file;
+ if (typeof f11.content === "string")
+ return `[${f11.filePath ?? ""}]
+${f11.content}`;
+ if (typeof f11.base64 === "string")
+ return `[binary ${f11.filePath ?? ""}: ${f11.base64.length} base64 chars]`;
}
- // ── tree management ───────────────────────────────────────────────────────
- addToTree(filePath) {
- const segs = filePath.split("/").filter(Boolean);
- for (let d15 = 0; d15 < segs.length; d15++) {
- const dir = d15 === 0 ? "/" : "/" + segs.slice(0, d15).join("/");
- if (!this.dirs.has(dir))
- this.dirs.set(dir, /* @__PURE__ */ new Set());
- this.dirs.get(dir).add(segs[d15]);
- }
+ if (Array.isArray(r10.filenames))
+ return r10.filenames.join("\n");
+ if (Array.isArray(r10.matches)) {
+ return r10.matches.map((m26) => typeof m26 === "string" ? m26 : JSON.stringify(m26)).join("\n");
}
- removeFromTree(filePath) {
- this.files.delete(filePath);
- this.meta.delete(filePath);
- this.pending.delete(filePath);
- this.flushed.delete(filePath);
- const parent = parentOf(filePath);
- this.dirs.get(parent)?.delete(basename4(filePath));
+ if (Array.isArray(r10.results)) {
+ return r10.results.map((x28) => typeof x28 === "string" ? x28 : x28?.title ?? x28?.url ?? JSON.stringify(x28)).join("\n");
}
- // ── flush / write batching ────────────────────────────────────────────────
- scheduleFlush() {
- if (this.flushTimer !== null)
- return;
- this.flushTimer = setTimeout(() => {
- this.flush().catch(() => {
- });
- }, FLUSH_DEBOUNCE_MS);
+ const inpObj = maybeParseJson(inp);
+ const kept = {};
+ for (const [k17, v27] of Object.entries(r10)) {
+ if (TOOL_RESPONSE_DROP.has(k17))
+ continue;
+ if (v27 === "" || v27 === false || v27 == null)
+ continue;
+ if (typeof inpObj === "object" && inpObj) {
+ const inObj = inpObj;
+ if (k17 in inObj && JSON.stringify(inObj[k17]) === JSON.stringify(v27))
+ continue;
+ const snake = snakeCase(k17);
+ if (snake in inObj && JSON.stringify(inObj[snake]) === JSON.stringify(v27))
+ continue;
+ const camel = camelCase(k17);
+ if (camel in inObj && JSON.stringify(inObj[camel]) === JSON.stringify(v27))
+ continue;
+ }
+ kept[k17] = v27;
}
- async flush() {
- this.flushChain = this.flushChain.then(() => this._doFlush());
- return this.flushChain;
+ return Object.keys(kept).length ? JSON.stringify(kept) : "[ok]";
+}
+function formatToolCall(obj) {
+ return `[tool:${obj?.tool_name ?? "?"}]
+input: ${formatToolInput(obj?.tool_input)}
+response: ${formatToolResponse(obj?.tool_response, obj?.tool_input, obj?.tool_name)}`;
+}
+function normalizeContent(path2, raw) {
+ if (!path2.includes("/sessions/"))
+ return raw;
+ if (!raw || raw[0] !== "{")
+ return raw;
+ let obj;
+ try {
+ obj = JSON.parse(raw);
+ } catch {
+ return raw;
}
- async _doFlush() {
- if (this.pending.size === 0)
- return;
- if (this.flushTimer !== null) {
- clearTimeout(this.flushTimer);
- this.flushTimer = null;
- }
- const rows = [...this.pending.values()];
- this.pending.clear();
- const results = await Promise.allSettled(rows.map((r10) => this.upsertRow(r10)));
- let failures = 0;
- for (let i11 = 0; i11 < results.length; i11++) {
- if (results[i11].status === "rejected") {
- if (!this.pending.has(rows[i11].path)) {
- this.pending.set(rows[i11].path, rows[i11]);
- }
- failures++;
- }
- }
- if (failures > 0) {
- throw new Error(`flush: ${failures}/${rows.length} writes failed and were re-queued`);
+ if (Array.isArray(obj.turns)) {
+ const header = [];
+ if (obj.date_time)
+ header.push(`date: ${obj.date_time}`);
+ if (obj.speakers) {
+ const s10 = obj.speakers;
+ const names = [s10.speaker_a, s10.speaker_b].filter(Boolean).join(", ");
+ if (names)
+ header.push(`speakers: ${names}`);
}
+ const lines = obj.turns.map((t6) => {
+ const sp = String(t6?.speaker ?? t6?.name ?? "?").trim();
+ const tx = String(t6?.text ?? t6?.content ?? "").replace(/\s+/g, " ").trim();
+ const tag = t6?.dia_id ? `[${t6.dia_id}] ` : "";
+ return `${tag}${sp}: ${tx}`;
+ });
+ const out2 = [...header, ...lines].join("\n");
+ return out2.trim() ? out2 : raw;
}
- async upsertRow(r10) {
- const text = sqlStr(r10.contentText);
- const p22 = sqlStr(r10.path);
- const fname = sqlStr(r10.filename);
- const mime = sqlStr(r10.mimeType);
- const ts3 = (/* @__PURE__ */ new Date()).toISOString();
- const cd = r10.creationDate ?? ts3;
- const lud = r10.lastUpdateDate ?? ts3;
- if (this.flushed.has(r10.path)) {
- let setClauses = `filename = '${fname}', summary = E'${text}', mime_type = '${mime}', size_bytes = ${r10.sizeBytes}, last_update_date = '${sqlStr(lud)}'`;
- if (r10.project !== void 0)
- setClauses += `, project = '${sqlStr(r10.project)}'`;
- if (r10.description !== void 0)
- setClauses += `, description = '${sqlStr(r10.description)}'`;
- await this.client.query(`UPDATE "${this.table}" SET ${setClauses} WHERE path = '${p22}'`);
- } else {
- const id = randomUUID2();
- const cols = "id, path, filename, summary, mime_type, size_bytes, creation_date, last_update_date" + (r10.project !== void 0 ? ", project" : "") + (r10.description !== void 0 ? ", description" : "");
- const vals = `'${id}', '${p22}', '${fname}', E'${text}', '${mime}', ${r10.sizeBytes}, '${sqlStr(cd)}', '${sqlStr(lud)}'` + (r10.project !== void 0 ? `, '${sqlStr(r10.project)}'` : "") + (r10.description !== void 0 ? `, '${sqlStr(r10.description)}'` : "");
- await this.client.query(`INSERT INTO "${this.table}" (${cols}) VALUES (${vals})`);
- this.flushed.add(r10.path);
- }
+ const stripRecalled = (t6) => {
+ const i11 = t6.indexOf("");
+ if (i11 === -1)
+ return t6;
+ const j14 = t6.lastIndexOf("");
+ if (j14 === -1 || j14 < i11)
+ return t6;
+ const head = t6.slice(0, i11);
+ const tail = t6.slice(j14 + "".length);
+ return (head + tail).replace(/^\s+/, "").replace(/\n{3,}/g, "\n\n");
+ };
+ let out = null;
+ if (obj.type === "user_message") {
+ out = `[user] ${stripRecalled(String(obj.content ?? ""))}`;
+ } else if (obj.type === "assistant_message") {
+ const agent = obj.agent_type ? ` (agent=${obj.agent_type})` : "";
+ out = `[assistant${agent}] ${stripRecalled(String(obj.content ?? ""))}`;
+ } else if (obj.type === "tool_call") {
+ out = formatToolCall(obj);
}
- // ── Virtual index.md generation ────────────────────────────────────────────
- async generateVirtualIndex() {
- const rows = await this.client.query(`SELECT path, project, description, creation_date, last_update_date FROM "${this.table}" WHERE path LIKE '${sqlStr("/summaries/")}%' ORDER BY last_update_date DESC`);
- const sessionPathsByKey = /* @__PURE__ */ new Map();
- for (const sp of this.sessionPaths) {
- const hivemind = sp.match(/\/sessions\/[^/]+\/[^/]+_([^.]+)\.jsonl$/);
- if (hivemind) {
- sessionPathsByKey.set(hivemind[1], sp.slice(1));
- } else {
- const fname = sp.split("/").pop() ?? "";
- const stem = fname.replace(/\.[^.]+$/, "");
- if (stem)
- sessionPathsByKey.set(stem, sp.slice(1));
- }
+ if (out === null)
+ return raw;
+ const trimmed = out.trim();
+ if (!trimmed || trimmed === "[user]" || trimmed === "[assistant]" || /^\[tool:[^\]]*\]\s+input:\s+\{\}\s+response:\s+\{\}$/.test(trimmed))
+ return raw;
+ return out;
+}
+function buildPathCondition(targetPath) {
+ if (!targetPath || targetPath === "/")
+ return "";
+ const clean = targetPath.replace(/\/+$/, "");
+ if (/[*?]/.test(clean)) {
+ const likePattern = sqlLike(clean).replace(/\*/g, "%").replace(/\?/g, "_");
+ return `path LIKE '${likePattern}'`;
+ }
+ const base = clean.split("/").pop() ?? "";
+ if (base.includes(".")) {
+ return `path = '${sqlStr(clean)}'`;
+ }
+ return `(path = '${sqlStr(clean)}' OR path LIKE '${sqlLike(clean)}/%')`;
+}
+async function searchDeeplakeTables(api, memoryTable, sessionsTable, opts) {
+ const { pathFilter, contentScanOnly, likeOp, escapedPattern, prefilterPattern, prefilterPatterns } = opts;
+ const limit = opts.limit ?? 100;
+ const filterPatterns = contentScanOnly ? prefilterPatterns && prefilterPatterns.length > 0 ? prefilterPatterns : prefilterPattern ? [prefilterPattern] : [] : [escapedPattern];
+ const memFilter = buildContentFilter("summary::text", likeOp, filterPatterns);
+ const sessFilter = buildContentFilter("message::text", likeOp, filterPatterns);
+ const memQuery = `SELECT path, summary::text AS content, 0 AS source_order, '' AS creation_date FROM "${memoryTable}" WHERE 1=1${pathFilter}${memFilter} LIMIT ${limit}`;
+ const sessQuery = `SELECT path, message::text AS content, 1 AS source_order, COALESCE(creation_date::text, '') AS creation_date FROM "${sessionsTable}" WHERE 1=1${pathFilter}${sessFilter} LIMIT ${limit}`;
+ const rows = await api.query(`SELECT path, content, source_order, creation_date FROM ((${memQuery}) UNION ALL (${sessQuery})) AS combined ORDER BY path, source_order, creation_date`);
+ return rows.map((row) => ({
+ path: String(row["path"]),
+ content: String(row["content"] ?? "")
+ }));
+}
+function buildPathFilter(targetPath) {
+ const condition = buildPathCondition(targetPath);
+ return condition ? ` AND ${condition}` : "";
+}
+function buildPathFilterForTargets(targetPaths) {
+ if (targetPaths.some((targetPath) => !targetPath || targetPath === "/"))
+ return "";
+ const conditions = [...new Set(targetPaths.map((targetPath) => buildPathCondition(targetPath)).filter((condition) => condition.length > 0))];
+ if (conditions.length === 0)
+ return "";
+ if (conditions.length === 1)
+ return ` AND ${conditions[0]}`;
+ return ` AND (${conditions.join(" OR ")})`;
+}
+function extractRegexLiteralPrefilter(pattern) {
+ if (!pattern)
+ return null;
+ const parts = [];
+ let current = "";
+ for (let i11 = 0; i11 < pattern.length; i11++) {
+ const ch = pattern[i11];
+ if (ch === "\\") {
+ const next = pattern[i11 + 1];
+ if (!next)
+ return null;
+ if (/[dDsSwWbBAZzGkKpP]/.test(next))
+ return null;
+ current += next;
+ i11++;
+ continue;
}
- const lines = [
- "# Session Index",
- "",
- "List of all Claude Code sessions with summaries.",
- "",
- "| Session | Conversation | Created | Last Updated | Project | Description |",
- "|---------|-------------|---------|--------------|---------|-------------|"
- ];
- for (const row of rows) {
- const p22 = row["path"];
- const match2 = p22.match(/\/summaries\/([^/]+)\/([^/]+)\.md$/);
- if (!match2)
+ if (ch === ".") {
+ if (pattern[i11 + 1] === "*") {
+ if (current)
+ parts.push(current);
+ current = "";
+ i11++;
continue;
- const summaryUser = match2[1];
- const sessionId = match2[2];
- const relPath = `summaries/${summaryUser}/${sessionId}.md`;
- const baseName = sessionId.replace(/_summary$/, "");
- const convPath = sessionPathsByKey.get(sessionId) ?? sessionPathsByKey.get(baseName);
- const convLink = convPath ? `[messages](${convPath})` : "";
- const project = row["project"] || "";
- const description = row["description"] || "";
- const creationDate = row["creation_date"] || "";
- const lastUpdateDate = row["last_update_date"] || "";
- lines.push(`| [${sessionId}](${relPath}) | ${convLink} | ${creationDate} | ${lastUpdateDate} | ${project} | ${description} |`);
+ }
+ return null;
}
- lines.push("");
- return lines.join("\n");
+ if ("|()[]{}+?^$".includes(ch) || ch === "*")
+ return null;
+ current += ch;
}
- // ── batch prefetch ────────────────────────────────────────────────────────
- /**
- * Prefetch multiple files into the content cache with a single SQL query.
- * Skips paths that are already cached, pending, or session-backed.
- * After this call, subsequent readFile() calls for these paths hit cache.
- */
- async prefetch(paths) {
- const uncached = [];
- for (const raw of paths) {
- const p22 = normPath(raw);
- if (this.files.get(p22) !== null && this.files.get(p22) !== void 0)
- continue;
- if (this.pending.has(p22))
- continue;
- if (this.sessionPaths.has(p22))
- continue;
- if (!this.files.has(p22))
- continue;
- uncached.push(p22);
- }
- if (uncached.length === 0)
- return;
- const inList = uncached.map((p22) => `'${sqlStr(p22)}'`).join(", ");
- const rows = await this.client.query(`SELECT path, summary FROM "${this.table}" WHERE path IN (${inList})`);
- for (const row of rows) {
- const p22 = row["path"];
- const text = row["summary"] ?? "";
- this.files.set(p22, Buffer.from(text, "utf-8"));
+ if (current)
+ parts.push(current);
+ const literal = parts.reduce((best, part) => part.length > best.length ? part : best, "");
+ return literal.length >= 2 ? literal : null;
+}
+function extractRegexAlternationPrefilters(pattern) {
+ if (!pattern.includes("|"))
+ return null;
+ const parts = [];
+ let current = "";
+ let escaped = false;
+ for (let i11 = 0; i11 < pattern.length; i11++) {
+ const ch = pattern[i11];
+ if (escaped) {
+ current += `\\${ch}`;
+ escaped = false;
+ continue;
}
- }
- // ── IFileSystem: reads ────────────────────────────────────────────────────
- async readFileBuffer(path2) {
- const p22 = normPath(path2);
- if (this.dirs.has(p22) && !this.files.has(p22))
- throw fsErr("EISDIR", "illegal operation on a directory", p22);
- if (!this.files.has(p22))
- throw fsErr("ENOENT", "no such file or directory", p22);
- const cached = this.files.get(p22);
- if (cached !== null && cached !== void 0)
- return cached;
- const pend = this.pending.get(p22);
- if (pend) {
- const buf2 = Buffer.from(pend.contentText, "utf-8");
- this.files.set(p22, buf2);
- return buf2;
+ if (ch === "\\") {
+ escaped = true;
+ continue;
}
- if (this.sessionPaths.has(p22) && this.sessionsTable) {
- const rows2 = await this.client.query(`SELECT message FROM "${this.sessionsTable}" WHERE path = '${sqlStr(p22)}' ORDER BY creation_date ASC`);
- if (rows2.length === 0)
- throw fsErr("ENOENT", "no such file or directory", p22);
- const text = rows2.map((r10) => typeof r10["message"] === "string" ? r10["message"] : JSON.stringify(r10["message"])).join("\n");
- const buf2 = Buffer.from(text, "utf-8");
- this.files.set(p22, buf2);
- return buf2;
+ if (ch === "|") {
+ if (!current)
+ return null;
+ parts.push(current);
+ current = "";
+ continue;
}
- const rows = await this.client.query(`SELECT summary FROM "${this.table}" WHERE path = '${sqlStr(p22)}' LIMIT 1`);
- if (rows.length === 0)
- throw fsErr("ENOENT", "no such file or directory", p22);
- const buf = Buffer.from(rows[0]["summary"] ?? "", "utf-8");
- this.files.set(p22, buf);
- return buf;
+ if ("()[]{}^$".includes(ch))
+ return null;
+ current += ch;
}
- async readFile(path2, _opts) {
- const p22 = normPath(path2);
- if (this.dirs.has(p22) && !this.files.has(p22))
- throw fsErr("EISDIR", "illegal operation on a directory", p22);
- if (p22 === "/index.md" && !this.files.has(p22)) {
- const realRows = await this.client.query(`SELECT summary FROM "${this.table}" WHERE path = '${sqlStr("/index.md")}' LIMIT 1`);
- if (realRows.length > 0 && realRows[0]["summary"]) {
- const text2 = realRows[0]["summary"];
- const buf2 = Buffer.from(text2, "utf-8");
- this.files.set(p22, buf2);
- return text2;
+ if (escaped || !current)
+ return null;
+ parts.push(current);
+ const literals = [...new Set(parts.map((part) => extractRegexLiteralPrefilter(part)).filter((part) => typeof part === "string" && part.length >= 2))];
+ return literals.length > 0 ? literals : null;
+}
+function buildGrepSearchOptions(params, targetPath) {
+ const hasRegexMeta = !params.fixedString && /[.*+?^${}()|[\]\\]/.test(params.pattern);
+ const literalPrefilter = hasRegexMeta ? extractRegexLiteralPrefilter(params.pattern) : null;
+ const alternationPrefilters = hasRegexMeta ? extractRegexAlternationPrefilters(params.pattern) : null;
+ return {
+ pathFilter: buildPathFilter(targetPath),
+ contentScanOnly: hasRegexMeta,
+ likeOp: params.ignoreCase ? "ILIKE" : "LIKE",
+ escapedPattern: sqlLike(params.pattern),
+ prefilterPattern: literalPrefilter ? sqlLike(literalPrefilter) : void 0,
+ prefilterPatterns: alternationPrefilters?.map((literal) => sqlLike(literal))
+ };
+}
+function buildContentFilter(column, likeOp, patterns) {
+ if (patterns.length === 0)
+ return "";
+ if (patterns.length === 1)
+ return ` AND ${column} ${likeOp} '%${patterns[0]}%'`;
+ return ` AND (${patterns.map((pattern) => `${column} ${likeOp} '%${pattern}%'`).join(" OR ")})`;
+}
+function compileGrepRegex(params) {
+ let reStr = params.fixedString ? params.pattern.replace(/[.*+?^${}()|[\]\\]/g, "\\$&") : params.pattern;
+ if (params.wordMatch)
+ reStr = `\\b${reStr}\\b`;
+ try {
+ return new RegExp(reStr, params.ignoreCase ? "i" : "");
+ } catch {
+ return new RegExp(params.pattern.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"), params.ignoreCase ? "i" : "");
+ }
+}
+function refineGrepMatches(rows, params, forceMultiFilePrefix) {
+ const re9 = compileGrepRegex(params);
+ const multi = forceMultiFilePrefix ?? rows.length > 1;
+ const output = [];
+ for (const row of rows) {
+ if (!row.content)
+ continue;
+ const lines = row.content.split("\n");
+ const matched = [];
+ for (let i11 = 0; i11 < lines.length; i11++) {
+ const hit = re9.test(lines[i11]);
+ if (hit !== !!params.invertMatch) {
+ if (params.filesOnly) {
+ output.push(row.path);
+ break;
+ }
+ const prefix = multi ? `${row.path}:` : "";
+ const ln3 = params.lineNumber ? `${i11 + 1}:` : "";
+ matched.push(`${prefix}${ln3}${lines[i11]}`);
}
- return this.generateVirtualIndex();
}
- if (!this.files.has(p22))
- throw fsErr("ENOENT", "no such file or directory", p22);
- const cached = this.files.get(p22);
- if (cached !== null && cached !== void 0)
- return cached.toString("utf-8");
- const pend = this.pending.get(p22);
- if (pend)
- return pend.contentText;
- if (this.sessionPaths.has(p22) && this.sessionsTable) {
- const rows2 = await this.client.query(`SELECT message FROM "${this.sessionsTable}" WHERE path = '${sqlStr(p22)}' ORDER BY creation_date ASC`);
- if (rows2.length === 0)
- throw fsErr("ENOENT", "no such file or directory", p22);
- const text2 = rows2.map((r10) => typeof r10["message"] === "string" ? r10["message"] : JSON.stringify(r10["message"])).join("\n");
- const buf2 = Buffer.from(text2, "utf-8");
- this.files.set(p22, buf2);
- return text2;
+ if (!params.filesOnly) {
+ if (params.countOnly) {
+ output.push(`${multi ? `${row.path}:` : ""}${matched.length}`);
+ } else {
+ output.push(...matched);
+ }
}
- const rows = await this.client.query(`SELECT summary FROM "${this.table}" WHERE path = '${sqlStr(p22)}' LIMIT 1`);
- if (rows.length === 0)
- throw fsErr("ENOENT", "no such file or directory", p22);
- const text = rows[0]["summary"] ?? "";
- const buf = Buffer.from(text, "utf-8");
- this.files.set(p22, buf);
- return text;
}
- // ── IFileSystem: writes ───────────────────────────────────────────────────
- /** Write a file with optional row-level metadata (project, description, dates). */
- async writeFileWithMeta(path2, content, meta) {
- const p22 = normPath(path2);
- if (this.sessionPaths.has(p22))
- throw fsErr("EPERM", "session files are read-only", p22);
- if (this.dirs.has(p22) && !this.files.has(p22))
- throw fsErr("EISDIR", "illegal operation on a directory", p22);
- const text = typeof content === "string" ? content : Buffer.from(content).toString("utf-8");
- const buf = Buffer.from(text, "utf-8");
- const mime = guessMime(basename4(p22));
- this.files.set(p22, buf);
- this.meta.set(p22, { size: buf.length, mime, mtime: /* @__PURE__ */ new Date() });
- this.addToTree(p22);
- this.pending.set(p22, {
- path: p22,
- filename: basename4(p22),
- contentText: text,
- mimeType: mime,
- sizeBytes: buf.length,
- ...meta
- });
- if (this.pending.size >= BATCH_SIZE)
- await this.flush();
- else
- this.scheduleFlush();
+ return output;
+}
+
+// dist/src/shell/deeplake-fs.js
+var BATCH_SIZE = 10;
+var PREFETCH_BATCH_SIZE = 50;
+var FLUSH_DEBOUNCE_MS = 200;
+function normPath(p22) {
+ const r10 = posix.normalize(p22.startsWith("/") ? p22 : "/" + p22);
+ return r10 === "/" ? r10 : r10.replace(/\/$/, "");
+}
+function parentOf(p22) {
+ const i11 = p22.lastIndexOf("/");
+ return i11 <= 0 ? "/" : p22.slice(0, i11);
+}
+function guessMime(filename) {
+ const ext2 = filename.split(".").pop()?.toLowerCase() ?? "";
+ return {
+ json: "application/json",
+ md: "text/markdown",
+ txt: "text/plain",
+ js: "text/javascript",
+ ts: "text/typescript",
+ html: "text/html",
+ css: "text/css"
+ }[ext2] ?? "text/plain";
+}
+function normalizeSessionMessage(path2, message) {
+ const raw = typeof message === "string" ? message : JSON.stringify(message);
+ return normalizeContent(path2, raw);
+}
+function joinSessionMessages(path2, messages) {
+ return messages.map((message) => normalizeSessionMessage(path2, message)).join("\n");
+}
+function fsErr(code, msg, path2) {
+ return Object.assign(new Error(`${code}: ${msg}, '${path2}'`), { code });
+}
+var DeeplakeFs = class _DeeplakeFs {
+ client;
+ table;
+ mountPoint;
+ // path → Buffer (content) or null (exists but not fetched yet)
+ files = /* @__PURE__ */ new Map();
+ meta = /* @__PURE__ */ new Map();
+ // dir path → Set of immediate child names
+ dirs = /* @__PURE__ */ new Map();
+ // batched writes pending SQL flush
+ pending = /* @__PURE__ */ new Map();
+ // paths that have been flushed (INSERT) at least once — subsequent flushes use UPDATE
+ flushed = /* @__PURE__ */ new Set();
+ /** Number of files loaded from the server during bootstrap. */
+ get fileCount() {
+ return this.files.size;
}
- async writeFile(path2, content, _opts) {
- const p22 = normPath(path2);
- if (this.sessionPaths.has(p22))
- throw fsErr("EPERM", "session files are read-only", p22);
- if (this.dirs.has(p22) && !this.files.has(p22))
- throw fsErr("EISDIR", "illegal operation on a directory", p22);
- const text = typeof content === "string" ? content : Buffer.from(content).toString("utf-8");
- const buf = Buffer.from(text, "utf-8");
- const mime = guessMime(basename4(p22));
- this.files.set(p22, buf);
- this.meta.set(p22, { size: buf.length, mime, mtime: /* @__PURE__ */ new Date() });
- this.addToTree(p22);
- this.pending.set(p22, {
- path: p22,
- filename: basename4(p22),
- contentText: text,
- mimeType: mime,
- sizeBytes: buf.length
- });
- if (this.pending.size >= BATCH_SIZE)
- await this.flush();
- else
- this.scheduleFlush();
+ flushTimer = null;
+ // serialize flushes
+ flushChain = Promise.resolve();
+ // Paths that live in the sessions table (multi-row, read by concatenation)
+ sessionPaths = /* @__PURE__ */ new Set();
+ sessionsTable = null;
+ constructor(client, table, mountPoint) {
+ this.client = client;
+ this.table = table;
+ this.mountPoint = mountPoint;
+ this.dirs.set(mountPoint, /* @__PURE__ */ new Set());
+ if (mountPoint !== "/")
+ this.dirs.set("/", /* @__PURE__ */ new Set([mountPoint.slice(1)]));
}
- async appendFile(path2, content, opts) {
- const p22 = normPath(path2);
- const add = typeof content === "string" ? content : Buffer.from(content).toString("utf-8");
- if (this.sessionPaths.has(p22))
- throw fsErr("EPERM", "session files are read-only", p22);
- if (this.files.has(p22) || await this.exists(p22).catch(() => false)) {
- const ts3 = (/* @__PURE__ */ new Date()).toISOString();
- await this.client.query(`UPDATE "${this.table}" SET summary = summary || E'${sqlStr(add)}', size_bytes = size_bytes + ${Buffer.byteLength(add, "utf-8")}, last_update_date = '${ts3}' WHERE path = '${sqlStr(p22)}'`);
- this.files.set(p22, null);
- const m26 = this.meta.get(p22);
- if (m26) {
- m26.size += Buffer.byteLength(add, "utf-8");
- m26.mtime = new Date(ts3);
+ static async create(client, table, mount = "/memory", sessionsTable) {
+ const fs3 = new _DeeplakeFs(client, table, mount);
+ fs3.sessionsTable = sessionsTable ?? null;
+ await client.ensureTable();
+ let sessionSyncOk = true;
+ const memoryBootstrap = (async () => {
+ const sql = `SELECT path, size_bytes, mime_type FROM "${table}" ORDER BY path`;
+ try {
+ const rows = await client.query(sql);
+ for (const row of rows) {
+ const p22 = row["path"];
+ fs3.files.set(p22, null);
+ fs3.meta.set(p22, {
+ size: Number(row["size_bytes"] ?? 0),
+ mime: row["mime_type"] ?? "application/octet-stream",
+ mtime: /* @__PURE__ */ new Date()
+ });
+ fs3.addToTree(p22);
+ fs3.flushed.add(p22);
+ }
+ } catch {
}
- } else {
- await this.writeFile(p22, content, opts);
- await this.flush();
- }
+ })();
+ const sessionsBootstrap = sessionsTable && sessionSyncOk ? (async () => {
+ try {
+ const sessionRows = await client.query(`SELECT path, SUM(size_bytes) as total_size FROM "${sessionsTable}" GROUP BY path ORDER BY path`);
+ for (const row of sessionRows) {
+ const p22 = row["path"];
+ if (!fs3.files.has(p22)) {
+ fs3.files.set(p22, null);
+ fs3.meta.set(p22, {
+ size: Number(row["total_size"] ?? 0),
+ mime: "application/x-ndjson",
+ mtime: /* @__PURE__ */ new Date()
+ });
+ fs3.addToTree(p22);
+ }
+ fs3.sessionPaths.add(p22);
+ }
+ } catch {
+ }
+ })() : Promise.resolve();
+ await Promise.all([memoryBootstrap, sessionsBootstrap]);
+ return fs3;
}
- // ── IFileSystem: metadata ─────────────────────────────────────────────────
- async exists(path2) {
- const p22 = normPath(path2);
- if (p22 === "/index.md")
- return true;
- return this.files.has(p22) || this.dirs.has(p22);
+ // ── tree management ───────────────────────────────────────────────────────
+ addToTree(filePath) {
+ const segs = filePath.split("/").filter(Boolean);
+ for (let d15 = 0; d15 < segs.length; d15++) {
+ const dir = d15 === 0 ? "/" : "/" + segs.slice(0, d15).join("/");
+ if (!this.dirs.has(dir))
+ this.dirs.set(dir, /* @__PURE__ */ new Set());
+ this.dirs.get(dir).add(segs[d15]);
+ }
}
- async stat(path2) {
- const p22 = normPath(path2);
- const isFile = this.files.has(p22);
- const isDir = this.dirs.has(p22);
- if (p22 === "/index.md" && !isFile && !isDir) {
- return {
- isFile: true,
- isDirectory: false,
- isSymbolicLink: false,
- mode: 420,
- size: 0,
- mtime: /* @__PURE__ */ new Date()
- };
- }
- if (!isFile && !isDir)
- throw fsErr("ENOENT", "no such file or directory", p22);
- const m26 = this.meta.get(p22);
- return {
- isFile: isFile && !isDir,
- isDirectory: isDir,
- isSymbolicLink: false,
- mode: isDir ? 493 : 420,
- size: m26?.size ?? 0,
- mtime: m26?.mtime ?? /* @__PURE__ */ new Date()
- };
- }
- async lstat(path2) {
- return this.stat(path2);
- }
- async chmod(_path, _mode) {
- }
- async utimes(_path, _atime, _mtime) {
- }
- async symlink(_target, linkPath) {
- throw fsErr("EPERM", "operation not permitted", linkPath);
- }
- async link(_src, destPath) {
- throw fsErr("EPERM", "operation not permitted", destPath);
+ removeFromTree(filePath) {
+ this.files.delete(filePath);
+ this.meta.delete(filePath);
+ this.pending.delete(filePath);
+ this.flushed.delete(filePath);
+ const parent = parentOf(filePath);
+ this.dirs.get(parent)?.delete(basename4(filePath));
}
- async readlink(path2) {
- throw fsErr("EINVAL", "invalid argument", path2);
+ // ── flush / write batching ────────────────────────────────────────────────
+ scheduleFlush() {
+ if (this.flushTimer !== null)
+ return;
+ this.flushTimer = setTimeout(() => {
+ this.flush().catch(() => {
+ });
+ }, FLUSH_DEBOUNCE_MS);
}
- async realpath(path2) {
- const p22 = normPath(path2);
- if (p22 === "/index.md")
- return p22;
- if (!this.files.has(p22) && !this.dirs.has(p22))
- throw fsErr("ENOENT", "no such file or directory", p22);
- return p22;
+ async flush() {
+ this.flushChain = this.flushChain.then(() => this._doFlush());
+ return this.flushChain;
}
- // ── IFileSystem: directories ──────────────────────────────────────────────
- async mkdir(path2, opts) {
- const p22 = normPath(path2);
- if (this.files.has(p22))
- throw fsErr("EEXIST", "file exists", p22);
- if (this.dirs.has(p22)) {
- if (!opts?.recursive)
- throw fsErr("EEXIST", "file exists", p22);
+ async _doFlush() {
+ if (this.pending.size === 0)
return;
+ if (this.flushTimer !== null) {
+ clearTimeout(this.flushTimer);
+ this.flushTimer = null;
}
- if (!opts?.recursive) {
- const parent2 = parentOf(p22);
- if (!this.dirs.has(parent2))
- throw fsErr("ENOENT", "no such file or directory", parent2);
+ const rows = [...this.pending.values()];
+ this.pending.clear();
+ const results = await Promise.allSettled(rows.map((r10) => this.upsertRow(r10)));
+ let failures = 0;
+ for (let i11 = 0; i11 < results.length; i11++) {
+ if (results[i11].status === "rejected") {
+ if (!this.pending.has(rows[i11].path)) {
+ this.pending.set(rows[i11].path, rows[i11]);
+ }
+ failures++;
+ }
}
- this.dirs.set(p22, /* @__PURE__ */ new Set());
- const parent = parentOf(p22);
- if (!this.dirs.has(parent))
- this.dirs.set(parent, /* @__PURE__ */ new Set());
- this.dirs.get(parent).add(basename4(p22));
- }
- async readdir(path2) {
- const p22 = normPath(path2);
- if (!this.dirs.has(p22))
- throw fsErr("ENOTDIR", "not a directory", p22);
- const entries = [...this.dirs.get(p22) ?? []];
- if (p22 === "/" && !entries.includes("index.md")) {
- entries.push("index.md");
+ if (failures > 0) {
+ throw new Error(`flush: ${failures}/${rows.length} writes failed and were re-queued`);
}
- return entries;
}
- async readdirWithFileTypes(path2) {
- const names = await this.readdir(path2);
- const p22 = normPath(path2);
- return names.map((name) => {
- const child = p22 === "/" ? `/${name}` : `${p22}/${name}`;
- return {
- name,
- isFile: (this.files.has(child) || child === "/index.md") && !this.dirs.has(child),
- isDirectory: this.dirs.has(child),
- isSymbolicLink: false
- };
- });
+ async upsertRow(r10) {
+ const text = sqlStr(r10.contentText);
+ const p22 = sqlStr(r10.path);
+ const fname = sqlStr(r10.filename);
+ const mime = sqlStr(r10.mimeType);
+ const ts3 = (/* @__PURE__ */ new Date()).toISOString();
+ const cd = r10.creationDate ?? ts3;
+ const lud = r10.lastUpdateDate ?? ts3;
+ if (this.flushed.has(r10.path)) {
+ let setClauses = `filename = '${fname}', summary = E'${text}', mime_type = '${mime}', size_bytes = ${r10.sizeBytes}, last_update_date = '${sqlStr(lud)}'`;
+ if (r10.project !== void 0)
+ setClauses += `, project = '${sqlStr(r10.project)}'`;
+ if (r10.description !== void 0)
+ setClauses += `, description = '${sqlStr(r10.description)}'`;
+ await this.client.query(`UPDATE "${this.table}" SET ${setClauses} WHERE path = '${p22}'`);
+ } else {
+ const id = randomUUID2();
+ const cols = "id, path, filename, summary, mime_type, size_bytes, creation_date, last_update_date" + (r10.project !== void 0 ? ", project" : "") + (r10.description !== void 0 ? ", description" : "");
+ const vals = `'${id}', '${p22}', '${fname}', E'${text}', '${mime}', ${r10.sizeBytes}, '${sqlStr(cd)}', '${sqlStr(lud)}'` + (r10.project !== void 0 ? `, '${sqlStr(r10.project)}'` : "") + (r10.description !== void 0 ? `, '${sqlStr(r10.description)}'` : "");
+ await this.client.query(`INSERT INTO "${this.table}" (${cols}) VALUES (${vals})`);
+ this.flushed.add(r10.path);
+ }
}
- // ── IFileSystem: structural mutations ─────────────────────────────────────
- async rm(path2, opts) {
- const p22 = normPath(path2);
- if (this.sessionPaths.has(p22))
- throw fsErr("EPERM", "session files are read-only", p22);
- if (!this.files.has(p22) && !this.dirs.has(p22)) {
- if (opts?.force)
- return;
- throw fsErr("ENOENT", "no such file or directory", p22);
+ // ── Virtual index.md generation ────────────────────────────────────────────
+ async generateVirtualIndex() {
+ const rows = await this.client.query(`SELECT path, project, description, creation_date, last_update_date FROM "${this.table}" WHERE path LIKE '${sqlStr("/summaries/")}%' ORDER BY last_update_date DESC`);
+ const sessionPathsByKey = /* @__PURE__ */ new Map();
+ for (const sp of this.sessionPaths) {
+ const hivemind = sp.match(/\/sessions\/[^/]+\/[^/]+_([^.]+)\.jsonl$/);
+ if (hivemind) {
+ sessionPathsByKey.set(hivemind[1], sp.slice(1));
+ } else {
+ const fname = sp.split("/").pop() ?? "";
+ const stem = fname.replace(/\.[^.]+$/, "");
+ if (stem)
+ sessionPathsByKey.set(stem, sp.slice(1));
+ }
}
- if (this.dirs.has(p22)) {
- const children = this.dirs.get(p22) ?? /* @__PURE__ */ new Set();
- if (children.size > 0 && !opts?.recursive)
- throw fsErr("ENOTEMPTY", "directory not empty", p22);
- const toDelete = [];
- const stack = [p22];
- while (stack.length) {
- const cur = stack.pop();
- for (const child of [...this.dirs.get(cur) ?? []]) {
- const childPath = cur === "/" ? `/${child}` : `${cur}/${child}`;
- if (this.files.has(childPath))
- toDelete.push(childPath);
- if (this.dirs.has(childPath))
- stack.push(childPath);
- }
+ const lines = [
+ "# Session Index",
+ "",
+ "List of all Claude Code sessions with summaries.",
+ "",
+ "| Session | Conversation | Created | Last Updated | Project | Description |",
+ "|---------|-------------|---------|--------------|---------|-------------|"
+ ];
+ for (const row of rows) {
+ const p22 = row["path"];
+ const match2 = p22.match(/\/summaries\/([^/]+)\/([^/]+)\.md$/);
+ if (!match2)
+ continue;
+ const summaryUser = match2[1];
+ const sessionId = match2[2];
+ const relPath = `summaries/${summaryUser}/${sessionId}.md`;
+ const baseName = sessionId.replace(/_summary$/, "");
+ const convPath = sessionPathsByKey.get(sessionId) ?? sessionPathsByKey.get(baseName);
+ const convLink = convPath ? `[messages](${convPath})` : "";
+ const project = row["project"] || "";
+ const description = row["description"] || "";
+ const creationDate = row["creation_date"] || "";
+ const lastUpdateDate = row["last_update_date"] || "";
+ lines.push(`| [${sessionId}](${relPath}) | ${convLink} | ${creationDate} | ${lastUpdateDate} | ${project} | ${description} |`);
+ }
+ lines.push("");
+ return lines.join("\n");
+ }
+ // ── batch prefetch ────────────────────────────────────────────────────────
+ /**
+ * Prefetch multiple files into the content cache with a single SQL query.
+ * Skips paths that are already cached, pending, or session-backed.
+ * After this call, subsequent readFile() calls for these paths hit cache.
+ */
+ async prefetch(paths) {
+ const uncached = [];
+ const uncachedSessions = [];
+ for (const raw of paths) {
+ const p22 = normPath(raw);
+ if (this.files.get(p22) !== null && this.files.get(p22) !== void 0)
+ continue;
+ if (this.pending.has(p22))
+ continue;
+ if (!this.files.has(p22))
+ continue;
+ if (this.sessionPaths.has(p22)) {
+ uncachedSessions.push(p22);
+ } else {
+ uncached.push(p22);
}
- const safeToDelete = toDelete.filter((fp) => !this.sessionPaths.has(fp));
- for (const fp of safeToDelete)
- this.removeFromTree(fp);
- this.dirs.delete(p22);
- this.dirs.get(parentOf(p22))?.delete(basename4(p22));
- if (safeToDelete.length > 0) {
- const inList = safeToDelete.map((fp) => `'${sqlStr(fp)}'`).join(", ");
- await this.client.query(`DELETE FROM "${this.table}" WHERE path IN (${inList})`);
+ }
+ for (let i11 = 0; i11 < uncached.length; i11 += PREFETCH_BATCH_SIZE) {
+ const chunk = uncached.slice(i11, i11 + PREFETCH_BATCH_SIZE);
+ const inList = chunk.map((p22) => `'${sqlStr(p22)}'`).join(", ");
+ const rows = await this.client.query(`SELECT path, summary FROM "${this.table}" WHERE path IN (${inList})`);
+ for (const row of rows) {
+ const p22 = row["path"];
+ const text = row["summary"] ?? "";
+ this.files.set(p22, Buffer.from(text, "utf-8"));
}
- } else {
- await this.client.query(`DELETE FROM "${this.table}" WHERE path = '${sqlStr(p22)}'`);
- this.removeFromTree(p22);
}
- }
- async cp(src, dest, opts) {
- const s10 = normPath(src), d15 = normPath(dest);
- if (this.sessionPaths.has(d15))
- throw fsErr("EPERM", "session files are read-only", d15);
- if (this.dirs.has(s10) && !this.files.has(s10)) {
- if (!opts?.recursive)
- throw fsErr("EISDIR", "is a directory", s10);
- for (const fp of [...this.files.keys()].filter((k17) => k17 === s10 || k17.startsWith(s10 + "/"))) {
- await this.writeFile(d15 + fp.slice(s10.length), await this.readFileBuffer(fp));
+ if (!this.sessionsTable)
+ return;
+ for (let i11 = 0; i11 < uncachedSessions.length; i11 += PREFETCH_BATCH_SIZE) {
+ const chunk = uncachedSessions.slice(i11, i11 + PREFETCH_BATCH_SIZE);
+ const inList = chunk.map((p22) => `'${sqlStr(p22)}'`).join(", ");
+ const rows = await this.client.query(`SELECT path, message, creation_date FROM "${this.sessionsTable}" WHERE path IN (${inList}) ORDER BY path, creation_date ASC`);
+ const grouped = /* @__PURE__ */ new Map();
+ for (const row of rows) {
+ const p22 = row["path"];
+ const current = grouped.get(p22) ?? [];
+ current.push(normalizeSessionMessage(p22, row["message"]));
+ grouped.set(p22, current);
+ }
+ for (const [p22, parts] of grouped) {
+ this.files.set(p22, Buffer.from(parts.join("\n"), "utf-8"));
}
- } else {
- await this.writeFile(d15, await this.readFileBuffer(s10));
}
}
- async mv(src, dest) {
- const s10 = normPath(src), d15 = normPath(dest);
- if (this.sessionPaths.has(s10))
- throw fsErr("EPERM", "session files are read-only", s10);
- if (this.sessionPaths.has(d15))
- throw fsErr("EPERM", "session files are read-only", d15);
- await this.cp(src, dest, { recursive: true });
- await this.rm(src, { recursive: true, force: true });
+ // ── IFileSystem: reads ────────────────────────────────────────────────────
+ async readFileBuffer(path2) {
+ const p22 = normPath(path2);
+ if (this.dirs.has(p22) && !this.files.has(p22))
+ throw fsErr("EISDIR", "illegal operation on a directory", p22);
+ if (!this.files.has(p22))
+ throw fsErr("ENOENT", "no such file or directory", p22);
+ const cached = this.files.get(p22);
+ if (cached !== null && cached !== void 0)
+ return cached;
+ const pend = this.pending.get(p22);
+ if (pend) {
+ const buf2 = Buffer.from(pend.contentText, "utf-8");
+ this.files.set(p22, buf2);
+ return buf2;
+ }
+ if (this.sessionPaths.has(p22) && this.sessionsTable) {
+ const rows2 = await this.client.query(`SELECT message FROM "${this.sessionsTable}" WHERE path = '${sqlStr(p22)}' ORDER BY creation_date ASC`);
+ if (rows2.length === 0)
+ throw fsErr("ENOENT", "no such file or directory", p22);
+ const text = joinSessionMessages(p22, rows2.map((row) => row["message"]));
+ const buf2 = Buffer.from(text, "utf-8");
+ this.files.set(p22, buf2);
+ return buf2;
+ }
+ const rows = await this.client.query(`SELECT summary FROM "${this.table}" WHERE path = '${sqlStr(p22)}' LIMIT 1`);
+ if (rows.length === 0)
+ throw fsErr("ENOENT", "no such file or directory", p22);
+ const buf = Buffer.from(rows[0]["summary"] ?? "", "utf-8");
+ this.files.set(p22, buf);
+ return buf;
}
- resolvePath(base, path2) {
- if (path2.startsWith("/"))
- return normPath(path2);
- return normPath(posix.join(base, path2));
+ async readFile(path2, _opts) {
+ const p22 = normPath(path2);
+ if (this.dirs.has(p22) && !this.files.has(p22))
+ throw fsErr("EISDIR", "illegal operation on a directory", p22);
+ if (p22 === "/index.md" && !this.files.has(p22)) {
+ const realRows = await this.client.query(`SELECT summary FROM "${this.table}" WHERE path = '${sqlStr("/index.md")}' LIMIT 1`);
+ if (realRows.length > 0 && realRows[0]["summary"]) {
+ const text2 = realRows[0]["summary"];
+ const buf2 = Buffer.from(text2, "utf-8");
+ this.files.set(p22, buf2);
+ return text2;
+ }
+ return this.generateVirtualIndex();
+ }
+ if (!this.files.has(p22))
+ throw fsErr("ENOENT", "no such file or directory", p22);
+ const cached = this.files.get(p22);
+ if (cached !== null && cached !== void 0)
+ return cached.toString("utf-8");
+ const pend = this.pending.get(p22);
+ if (pend)
+ return pend.contentText;
+ if (this.sessionPaths.has(p22) && this.sessionsTable) {
+ const rows2 = await this.client.query(`SELECT message FROM "${this.sessionsTable}" WHERE path = '${sqlStr(p22)}' ORDER BY creation_date ASC`);
+ if (rows2.length === 0)
+ throw fsErr("ENOENT", "no such file or directory", p22);
+ const text2 = joinSessionMessages(p22, rows2.map((row) => row["message"]));
+ const buf2 = Buffer.from(text2, "utf-8");
+ this.files.set(p22, buf2);
+ return text2;
+ }
+ const rows = await this.client.query(`SELECT summary FROM "${this.table}" WHERE path = '${sqlStr(p22)}' LIMIT 1`);
+ if (rows.length === 0)
+ throw fsErr("ENOENT", "no such file or directory", p22);
+ const text = rows[0]["summary"] ?? "";
+ const buf = Buffer.from(text, "utf-8");
+ this.files.set(p22, buf);
+ return text;
}
- getAllPaths() {
- return [.../* @__PURE__ */ new Set([...this.files.keys(), ...this.dirs.keys()])];
+ // ── IFileSystem: writes ───────────────────────────────────────────────────
+ /** Write a file with optional row-level metadata (project, description, dates). */
+ async writeFileWithMeta(path2, content, meta) {
+ const p22 = normPath(path2);
+ if (this.sessionPaths.has(p22))
+ throw fsErr("EPERM", "session files are read-only", p22);
+ if (this.dirs.has(p22) && !this.files.has(p22))
+ throw fsErr("EISDIR", "illegal operation on a directory", p22);
+ const text = typeof content === "string" ? content : Buffer.from(content).toString("utf-8");
+ const buf = Buffer.from(text, "utf-8");
+ const mime = guessMime(basename4(p22));
+ this.files.set(p22, buf);
+ this.meta.set(p22, { size: buf.length, mime, mtime: /* @__PURE__ */ new Date() });
+ this.addToTree(p22);
+ this.pending.set(p22, {
+ path: p22,
+ filename: basename4(p22),
+ contentText: text,
+ mimeType: mime,
+ sizeBytes: buf.length,
+ ...meta
+ });
+ if (this.pending.size >= BATCH_SIZE)
+ await this.flush();
+ else
+ this.scheduleFlush();
}
-};
-
-// node_modules/yargs-parser/build/lib/index.js
-import { format } from "util";
-import { normalize, resolve as resolve4 } from "path";
-
-// node_modules/yargs-parser/build/lib/string-utils.js
-function camelCase(str) {
- const isCamelCase = str !== str.toLowerCase() && str !== str.toUpperCase();
- if (!isCamelCase) {
- str = str.toLowerCase();
+ async writeFile(path2, content, _opts) {
+ const p22 = normPath(path2);
+ if (this.sessionPaths.has(p22))
+ throw fsErr("EPERM", "session files are read-only", p22);
+ if (this.dirs.has(p22) && !this.files.has(p22))
+ throw fsErr("EISDIR", "illegal operation on a directory", p22);
+ const text = typeof content === "string" ? content : Buffer.from(content).toString("utf-8");
+ const buf = Buffer.from(text, "utf-8");
+ const mime = guessMime(basename4(p22));
+ this.files.set(p22, buf);
+ this.meta.set(p22, { size: buf.length, mime, mtime: /* @__PURE__ */ new Date() });
+ this.addToTree(p22);
+ this.pending.set(p22, {
+ path: p22,
+ filename: basename4(p22),
+ contentText: text,
+ mimeType: mime,
+ sizeBytes: buf.length
+ });
+ if (this.pending.size >= BATCH_SIZE)
+ await this.flush();
+ else
+ this.scheduleFlush();
}
- if (str.indexOf("-") === -1 && str.indexOf("_") === -1) {
- return str;
- } else {
- let camelcase = "";
- let nextChrUpper = false;
- const leadingHyphens = str.match(/^-+/);
- for (let i11 = leadingHyphens ? leadingHyphens[0].length : 0; i11 < str.length; i11++) {
- let chr = str.charAt(i11);
- if (nextChrUpper) {
- nextChrUpper = false;
- chr = chr.toUpperCase();
- }
- if (i11 !== 0 && (chr === "-" || chr === "_")) {
- nextChrUpper = true;
- } else if (chr !== "-" && chr !== "_") {
- camelcase += chr;
+ async appendFile(path2, content, opts) {
+ const p22 = normPath(path2);
+ const add = typeof content === "string" ? content : Buffer.from(content).toString("utf-8");
+ if (this.sessionPaths.has(p22))
+ throw fsErr("EPERM", "session files are read-only", p22);
+ if (this.files.has(p22) || await this.exists(p22).catch(() => false)) {
+ const ts3 = (/* @__PURE__ */ new Date()).toISOString();
+ await this.client.query(`UPDATE "${this.table}" SET summary = summary || E'${sqlStr(add)}', size_bytes = size_bytes + ${Buffer.byteLength(add, "utf-8")}, last_update_date = '${ts3}' WHERE path = '${sqlStr(p22)}'`);
+ this.files.set(p22, null);
+ const m26 = this.meta.get(p22);
+ if (m26) {
+ m26.size += Buffer.byteLength(add, "utf-8");
+ m26.mtime = new Date(ts3);
}
- }
- return camelcase;
- }
-}
-function decamelize(str, joinString) {
- const lowercase = str.toLowerCase();
- joinString = joinString || "-";
- let notCamelcase = "";
- for (let i11 = 0; i11 < str.length; i11++) {
- const chrLower = lowercase.charAt(i11);
- const chrString = str.charAt(i11);
- if (chrLower !== chrString && i11 > 0) {
- notCamelcase += `${joinString}${lowercase.charAt(i11)}`;
} else {
- notCamelcase += chrString;
+ await this.writeFile(p22, content, opts);
+ await this.flush();
}
}
- return notCamelcase;
-}
-function looksLikeNumber(x28) {
- if (x28 === null || x28 === void 0)
- return false;
- if (typeof x28 === "number")
- return true;
- if (/^0x[0-9a-f]+$/i.test(x28))
- return true;
- if (/^0[^.]/.test(x28))
- return false;
- return /^[-]?(?:\d+(?:\.\d*)?|\.\d+)(e[-+]?\d+)?$/.test(x28);
-}
-
-// node_modules/yargs-parser/build/lib/tokenize-arg-string.js
-function tokenizeArgString(argString) {
- if (Array.isArray(argString)) {
- return argString.map((e6) => typeof e6 !== "string" ? e6 + "" : e6);
+ // ── IFileSystem: metadata ─────────────────────────────────────────────────
+ async exists(path2) {
+ const p22 = normPath(path2);
+ if (p22 === "/index.md")
+ return true;
+ return this.files.has(p22) || this.dirs.has(p22);
}
- argString = argString.trim();
- let i11 = 0;
- let prevC = null;
- let c15 = null;
- let opening = null;
- const args = [];
- for (let ii2 = 0; ii2 < argString.length; ii2++) {
- prevC = c15;
- c15 = argString.charAt(ii2);
- if (c15 === " " && !opening) {
- if (!(prevC === " ")) {
- i11++;
- }
- continue;
- }
- if (c15 === opening) {
- opening = null;
- } else if ((c15 === "'" || c15 === '"') && !opening) {
- opening = c15;
+ async stat(path2) {
+ const p22 = normPath(path2);
+ const isFile = this.files.has(p22);
+ const isDir = this.dirs.has(p22);
+ if (p22 === "/index.md" && !isFile && !isDir) {
+ return {
+ isFile: true,
+ isDirectory: false,
+ isSymbolicLink: false,
+ mode: 420,
+ size: 0,
+ mtime: /* @__PURE__ */ new Date()
+ };
}
- if (!args[i11])
- args[i11] = "";
- args[i11] += c15;
+ if (!isFile && !isDir)
+ throw fsErr("ENOENT", "no such file or directory", p22);
+ const m26 = this.meta.get(p22);
+ return {
+ isFile: isFile && !isDir,
+ isDirectory: isDir,
+ isSymbolicLink: false,
+ mode: isDir ? 493 : 420,
+ size: m26?.size ?? 0,
+ mtime: m26?.mtime ?? /* @__PURE__ */ new Date()
+ };
}
- return args;
-}
-
-// node_modules/yargs-parser/build/lib/yargs-parser-types.js
-var DefaultValuesForTypeKey;
-(function(DefaultValuesForTypeKey2) {
- DefaultValuesForTypeKey2["BOOLEAN"] = "boolean";
+ async lstat(path2) {
+ return this.stat(path2);
+ }
+ async chmod(_path, _mode) {
+ }
+ async utimes(_path, _atime, _mtime) {
+ }
+ async symlink(_target, linkPath) {
+ throw fsErr("EPERM", "operation not permitted", linkPath);
+ }
+ async link(_src, destPath) {
+ throw fsErr("EPERM", "operation not permitted", destPath);
+ }
+ async readlink(path2) {
+ throw fsErr("EINVAL", "invalid argument", path2);
+ }
+ async realpath(path2) {
+ const p22 = normPath(path2);
+ if (p22 === "/index.md")
+ return p22;
+ if (!this.files.has(p22) && !this.dirs.has(p22))
+ throw fsErr("ENOENT", "no such file or directory", p22);
+ return p22;
+ }
+ // ── IFileSystem: directories ──────────────────────────────────────────────
+ async mkdir(path2, opts) {
+ const p22 = normPath(path2);
+ if (this.files.has(p22))
+ throw fsErr("EEXIST", "file exists", p22);
+ if (this.dirs.has(p22)) {
+ if (!opts?.recursive)
+ throw fsErr("EEXIST", "file exists", p22);
+ return;
+ }
+ if (!opts?.recursive) {
+ const parent2 = parentOf(p22);
+ if (!this.dirs.has(parent2))
+ throw fsErr("ENOENT", "no such file or directory", parent2);
+ }
+ this.dirs.set(p22, /* @__PURE__ */ new Set());
+ const parent = parentOf(p22);
+ if (!this.dirs.has(parent))
+ this.dirs.set(parent, /* @__PURE__ */ new Set());
+ this.dirs.get(parent).add(basename4(p22));
+ }
+ async readdir(path2) {
+ const p22 = normPath(path2);
+ if (!this.dirs.has(p22))
+ throw fsErr("ENOTDIR", "not a directory", p22);
+ const entries = [...this.dirs.get(p22) ?? []];
+ if (p22 === "/" && !entries.includes("index.md")) {
+ entries.push("index.md");
+ }
+ return entries;
+ }
+ async readdirWithFileTypes(path2) {
+ const names = await this.readdir(path2);
+ const p22 = normPath(path2);
+ return names.map((name) => {
+ const child = p22 === "/" ? `/${name}` : `${p22}/${name}`;
+ return {
+ name,
+ isFile: (this.files.has(child) || child === "/index.md") && !this.dirs.has(child),
+ isDirectory: this.dirs.has(child),
+ isSymbolicLink: false
+ };
+ });
+ }
+ // ── IFileSystem: structural mutations ─────────────────────────────────────
+ async rm(path2, opts) {
+ const p22 = normPath(path2);
+ if (this.sessionPaths.has(p22))
+ throw fsErr("EPERM", "session files are read-only", p22);
+ if (!this.files.has(p22) && !this.dirs.has(p22)) {
+ if (opts?.force)
+ return;
+ throw fsErr("ENOENT", "no such file or directory", p22);
+ }
+ if (this.dirs.has(p22)) {
+ const children = this.dirs.get(p22) ?? /* @__PURE__ */ new Set();
+ if (children.size > 0 && !opts?.recursive)
+ throw fsErr("ENOTEMPTY", "directory not empty", p22);
+ const toDelete = [];
+ const stack = [p22];
+ while (stack.length) {
+ const cur = stack.pop();
+ for (const child of [...this.dirs.get(cur) ?? []]) {
+ const childPath = cur === "/" ? `/${child}` : `${cur}/${child}`;
+ if (this.files.has(childPath))
+ toDelete.push(childPath);
+ if (this.dirs.has(childPath))
+ stack.push(childPath);
+ }
+ }
+ const safeToDelete = toDelete.filter((fp) => !this.sessionPaths.has(fp));
+ for (const fp of safeToDelete)
+ this.removeFromTree(fp);
+ this.dirs.delete(p22);
+ this.dirs.get(parentOf(p22))?.delete(basename4(p22));
+ if (safeToDelete.length > 0) {
+ const inList = safeToDelete.map((fp) => `'${sqlStr(fp)}'`).join(", ");
+ await this.client.query(`DELETE FROM "${this.table}" WHERE path IN (${inList})`);
+ }
+ } else {
+ await this.client.query(`DELETE FROM "${this.table}" WHERE path = '${sqlStr(p22)}'`);
+ this.removeFromTree(p22);
+ }
+ }
+ async cp(src, dest, opts) {
+ const s10 = normPath(src), d15 = normPath(dest);
+ if (this.sessionPaths.has(d15))
+ throw fsErr("EPERM", "session files are read-only", d15);
+ if (this.dirs.has(s10) && !this.files.has(s10)) {
+ if (!opts?.recursive)
+ throw fsErr("EISDIR", "is a directory", s10);
+ for (const fp of [...this.files.keys()].filter((k17) => k17 === s10 || k17.startsWith(s10 + "/"))) {
+ await this.writeFile(d15 + fp.slice(s10.length), await this.readFileBuffer(fp));
+ }
+ } else {
+ await this.writeFile(d15, await this.readFileBuffer(s10));
+ }
+ }
+ async mv(src, dest) {
+ const s10 = normPath(src), d15 = normPath(dest);
+ if (this.sessionPaths.has(s10))
+ throw fsErr("EPERM", "session files are read-only", s10);
+ if (this.sessionPaths.has(d15))
+ throw fsErr("EPERM", "session files are read-only", d15);
+ await this.cp(src, dest, { recursive: true });
+ await this.rm(src, { recursive: true, force: true });
+ }
+ resolvePath(base, path2) {
+ if (path2.startsWith("/"))
+ return normPath(path2);
+ return normPath(posix.join(base, path2));
+ }
+ getAllPaths() {
+ return [.../* @__PURE__ */ new Set([...this.files.keys(), ...this.dirs.keys()])];
+ }
+};
+
+// node_modules/yargs-parser/build/lib/index.js
+import { format } from "util";
+import { normalize, resolve as resolve4 } from "path";
+
+// node_modules/yargs-parser/build/lib/string-utils.js
+function camelCase2(str) {
+ const isCamelCase = str !== str.toLowerCase() && str !== str.toUpperCase();
+ if (!isCamelCase) {
+ str = str.toLowerCase();
+ }
+ if (str.indexOf("-") === -1 && str.indexOf("_") === -1) {
+ return str;
+ } else {
+ let camelcase = "";
+ let nextChrUpper = false;
+ const leadingHyphens = str.match(/^-+/);
+ for (let i11 = leadingHyphens ? leadingHyphens[0].length : 0; i11 < str.length; i11++) {
+ let chr = str.charAt(i11);
+ if (nextChrUpper) {
+ nextChrUpper = false;
+ chr = chr.toUpperCase();
+ }
+ if (i11 !== 0 && (chr === "-" || chr === "_")) {
+ nextChrUpper = true;
+ } else if (chr !== "-" && chr !== "_") {
+ camelcase += chr;
+ }
+ }
+ return camelcase;
+ }
+}
+function decamelize(str, joinString) {
+ const lowercase = str.toLowerCase();
+ joinString = joinString || "-";
+ let notCamelcase = "";
+ for (let i11 = 0; i11 < str.length; i11++) {
+ const chrLower = lowercase.charAt(i11);
+ const chrString = str.charAt(i11);
+ if (chrLower !== chrString && i11 > 0) {
+ notCamelcase += `${joinString}${lowercase.charAt(i11)}`;
+ } else {
+ notCamelcase += chrString;
+ }
+ }
+ return notCamelcase;
+}
+function looksLikeNumber(x28) {
+ if (x28 === null || x28 === void 0)
+ return false;
+ if (typeof x28 === "number")
+ return true;
+ if (/^0x[0-9a-f]+$/i.test(x28))
+ return true;
+ if (/^0[^.]/.test(x28))
+ return false;
+ return /^[-]?(?:\d+(?:\.\d*)?|\.\d+)(e[-+]?\d+)?$/.test(x28);
+}
+
+// node_modules/yargs-parser/build/lib/tokenize-arg-string.js
+function tokenizeArgString(argString) {
+ if (Array.isArray(argString)) {
+ return argString.map((e6) => typeof e6 !== "string" ? e6 + "" : e6);
+ }
+ argString = argString.trim();
+ let i11 = 0;
+ let prevC = null;
+ let c15 = null;
+ let opening = null;
+ const args = [];
+ for (let ii2 = 0; ii2 < argString.length; ii2++) {
+ prevC = c15;
+ c15 = argString.charAt(ii2);
+ if (c15 === " " && !opening) {
+ if (!(prevC === " ")) {
+ i11++;
+ }
+ continue;
+ }
+ if (c15 === opening) {
+ opening = null;
+ } else if ((c15 === "'" || c15 === '"') && !opening) {
+ opening = c15;
+ }
+ if (!args[i11])
+ args[i11] = "";
+ args[i11] += c15;
+ }
+ return args;
+}
+
+// node_modules/yargs-parser/build/lib/yargs-parser-types.js
+var DefaultValuesForTypeKey;
+(function(DefaultValuesForTypeKey2) {
+ DefaultValuesForTypeKey2["BOOLEAN"] = "boolean";
DefaultValuesForTypeKey2["STRING"] = "string";
DefaultValuesForTypeKey2["NUMBER"] = "number";
DefaultValuesForTypeKey2["ARRAY"] = "array";
@@ -67983,7 +68490,7 @@ var YargsParser = class {
;
[].concat(...Object.keys(aliases).map((k17) => aliases[k17])).forEach((alias) => {
if (configuration["camel-case-expansion"] && alias.includes("-")) {
- delete argv[alias.split(".").map((prop) => camelCase(prop)).join(".")];
+ delete argv[alias.split(".").map((prop) => camelCase2(prop)).join(".")];
}
delete argv[alias];
});
@@ -68065,7 +68572,7 @@ var YargsParser = class {
function setArg(key, val, shouldStripQuotes = inputIsString) {
if (/-/.test(key) && configuration["camel-case-expansion"]) {
const alias = key.split(".").map(function(prop) {
- return camelCase(prop);
+ return camelCase2(prop);
}).join(".");
addNewAlias(key, alias);
}
@@ -68213,7 +68720,7 @@ var YargsParser = class {
if (i11 === 0) {
key = key.substring(prefix.length);
}
- return camelCase(key);
+ return camelCase2(key);
});
if ((configOnly && flags.configs[keys.join(".")] || !configOnly) && !hasKey(argv2, keys)) {
setArg(keys.join("."), env2[envVar]);
@@ -68333,7 +68840,7 @@ var YargsParser = class {
flags.aliases[key] = [].concat(aliases[key] || []);
flags.aliases[key].concat(key).forEach(function(x28) {
if (/-/.test(x28) && configuration["camel-case-expansion"]) {
- const c15 = camelCase(x28);
+ const c15 = camelCase2(x28);
if (c15 !== key && flags.aliases[key].indexOf(c15) === -1) {
flags.aliases[key].push(c15);
newAliases[c15] = true;
@@ -68394,445 +68901,170 @@ var YargsParser = class {
break;
}
}
- return hasAllFlags;
- }
- function isUnknownOptionAsArg(arg) {
- return configuration["unknown-options-as-args"] && isUnknownOption(arg);
- }
- function isUnknownOption(arg) {
- arg = arg.replace(/^-{3,}/, "--");
- if (arg.match(negative)) {
- return false;
- }
- if (hasAllShortFlags(arg)) {
- return false;
- }
- const flagWithEquals = /^-+([^=]+?)=[\s\S]*$/;
- const normalFlag = /^-+([^=]+?)$/;
- const flagEndingInHyphen = /^-+([^=]+?)-$/;
- const flagEndingInDigits = /^-+([^=]+?\d+)$/;
- const flagEndingInNonWordCharacters = /^-+([^=]+?)\W+.*$/;
- return !hasFlagsMatching(arg, flagWithEquals, negatedBoolean, normalFlag, flagEndingInHyphen, flagEndingInDigits, flagEndingInNonWordCharacters);
- }
- function defaultValue(key) {
- if (!checkAllAliases(key, flags.bools) && !checkAllAliases(key, flags.counts) && `${key}` in defaults2) {
- return defaults2[key];
- } else {
- return defaultForType(guessType(key));
- }
- }
- function defaultForType(type) {
- const def = {
- [DefaultValuesForTypeKey.BOOLEAN]: true,
- [DefaultValuesForTypeKey.STRING]: "",
- [DefaultValuesForTypeKey.NUMBER]: void 0,
- [DefaultValuesForTypeKey.ARRAY]: []
- };
- return def[type];
- }
- function guessType(key) {
- let type = DefaultValuesForTypeKey.BOOLEAN;
- if (checkAllAliases(key, flags.strings))
- type = DefaultValuesForTypeKey.STRING;
- else if (checkAllAliases(key, flags.numbers))
- type = DefaultValuesForTypeKey.NUMBER;
- else if (checkAllAliases(key, flags.bools))
- type = DefaultValuesForTypeKey.BOOLEAN;
- else if (checkAllAliases(key, flags.arrays))
- type = DefaultValuesForTypeKey.ARRAY;
- return type;
- }
- function isUndefined(num) {
- return num === void 0;
- }
- function checkConfiguration() {
- Object.keys(flags.counts).find((key) => {
- if (checkAllAliases(key, flags.arrays)) {
- error = Error(__("Invalid configuration: %s, opts.count excludes opts.array.", key));
- return true;
- } else if (checkAllAliases(key, flags.nargs)) {
- error = Error(__("Invalid configuration: %s, opts.count excludes opts.narg.", key));
- return true;
- }
- return false;
- });
- }
- return {
- aliases: Object.assign({}, flags.aliases),
- argv: Object.assign(argvReturn, argv),
- configuration,
- defaulted: Object.assign({}, defaulted),
- error,
- newAliases: Object.assign({}, newAliases)
- };
- }
-};
-function combineAliases(aliases) {
- const aliasArrays = [];
- const combined = /* @__PURE__ */ Object.create(null);
- let change = true;
- Object.keys(aliases).forEach(function(key) {
- aliasArrays.push([].concat(aliases[key], key));
- });
- while (change) {
- change = false;
- for (let i11 = 0; i11 < aliasArrays.length; i11++) {
- for (let ii2 = i11 + 1; ii2 < aliasArrays.length; ii2++) {
- const intersect = aliasArrays[i11].filter(function(v27) {
- return aliasArrays[ii2].indexOf(v27) !== -1;
- });
- if (intersect.length) {
- aliasArrays[i11] = aliasArrays[i11].concat(aliasArrays[ii2]);
- aliasArrays.splice(ii2, 1);
- change = true;
- break;
- }
- }
- }
- }
- aliasArrays.forEach(function(aliasArray) {
- aliasArray = aliasArray.filter(function(v27, i11, self2) {
- return self2.indexOf(v27) === i11;
- });
- const lastAlias = aliasArray.pop();
- if (lastAlias !== void 0 && typeof lastAlias === "string") {
- combined[lastAlias] = aliasArray;
- }
- });
- return combined;
-}
-function increment(orig) {
- return orig !== void 0 ? orig + 1 : 1;
-}
-function sanitizeKey(key) {
- if (key === "__proto__")
- return "___proto___";
- return key;
-}
-function stripQuotes(val) {
- return typeof val === "string" && (val[0] === "'" || val[0] === '"') && val[val.length - 1] === val[0] ? val.substring(1, val.length - 1) : val;
-}
-
-// node_modules/yargs-parser/build/lib/index.js
-import { readFileSync as readFileSync2 } from "fs";
-import { createRequire } from "node:module";
-var _a3;
-var _b;
-var _c;
-var minNodeVersion = process && process.env && process.env.YARGS_MIN_NODE_VERSION ? Number(process.env.YARGS_MIN_NODE_VERSION) : 20;
-var nodeVersion = (_b = (_a3 = process === null || process === void 0 ? void 0 : process.versions) === null || _a3 === void 0 ? void 0 : _a3.node) !== null && _b !== void 0 ? _b : (_c = process === null || process === void 0 ? void 0 : process.version) === null || _c === void 0 ? void 0 : _c.slice(1);
-if (nodeVersion) {
- const major = Number(nodeVersion.match(/^([^.]+)/)[1]);
- if (major < minNodeVersion) {
- throw Error(`yargs parser supports a minimum Node.js version of ${minNodeVersion}. Read our version support policy: https://github.com/yargs/yargs-parser#supported-nodejs-versions`);
- }
-}
-var env = process ? process.env : {};
-var require2 = createRequire ? createRequire(import.meta.url) : void 0;
-var parser = new YargsParser({
- cwd: process.cwd,
- env: () => {
- return env;
- },
- format,
- normalize,
- resolve: resolve4,
- require: (path2) => {
- if (typeof require2 !== "undefined") {
- return require2(path2);
- } else if (path2.match(/\.json$/)) {
- return JSON.parse(readFileSync2(path2, "utf8"));
- } else {
- throw Error("only .json config files are supported in ESM");
- }
- }
-});
-var yargsParser = function Parser(args, opts) {
- const result = parser.parse(args.slice(), opts);
- return result.argv;
-};
-yargsParser.detailed = function(args, opts) {
- return parser.parse(args.slice(), opts);
-};
-yargsParser.camelCase = camelCase;
-yargsParser.decamelize = decamelize;
-yargsParser.looksLikeNumber = looksLikeNumber;
-var lib_default = yargsParser;
-
-// dist/src/shell/grep-core.js
-var TOOL_INPUT_FIELDS = [
- "command",
- "file_path",
- "path",
- "pattern",
- "prompt",
- "subagent_type",
- "query",
- "url",
- "notebook_path",
- "old_string",
- "new_string",
- "content",
- "skill",
- "args",
- "taskId",
- "status",
- "subject",
- "description",
- "to",
- "message",
- "summary",
- "max_results"
-];
-var TOOL_RESPONSE_DROP = /* @__PURE__ */ new Set([
- // Note: `stderr` is intentionally NOT in this set. The `stdout` high-signal
- // branch below already de-dupes it for the common case (appends as suffix
- // when non-empty). If a tool response has ONLY `stderr` and no `stdout`
- // (hard-failure on some tools), the generic cleanup preserves it so the
- // error message reaches Claude instead of collapsing to `[ok]`.
- "interrupted",
- "isImage",
- "noOutputExpected",
- "type",
- "structuredPatch",
- "userModified",
- "originalFile",
- "replaceAll",
- "totalDurationMs",
- "totalTokens",
- "totalToolUseCount",
- "usage",
- "toolStats",
- "durationMs",
- "durationSeconds",
- "bytes",
- "code",
- "codeText",
- "agentId",
- "agentType",
- "verificationNudgeNeeded",
- "numLines",
- "numFiles",
- "truncated",
- "statusChange",
- "updatedFields",
- "isAgent",
- "success"
-]);
-function maybeParseJson(v27) {
- if (typeof v27 !== "string")
- return v27;
- const s10 = v27.trim();
- if (s10[0] !== "{" && s10[0] !== "[")
- return v27;
- try {
- return JSON.parse(s10);
- } catch {
- return v27;
- }
-}
-function snakeCase(k17) {
- return k17.replace(/([A-Z])/g, "_$1").toLowerCase();
-}
-function camelCase2(k17) {
- return k17.replace(/_([a-z])/g, (_16, c15) => c15.toUpperCase());
-}
-function formatToolInput(raw) {
- const p22 = maybeParseJson(raw);
- if (typeof p22 !== "object" || p22 === null)
- return String(p22 ?? "");
- const parts = [];
- for (const k17 of TOOL_INPUT_FIELDS) {
- if (p22[k17] === void 0)
- continue;
- const v27 = p22[k17];
- parts.push(`${k17}: ${typeof v27 === "string" ? v27 : JSON.stringify(v27)}`);
- }
- for (const k17 of ["glob", "output_mode", "limit", "offset"]) {
- if (p22[k17] !== void 0)
- parts.push(`${k17}: ${p22[k17]}`);
- }
- return parts.length ? parts.join("\n") : JSON.stringify(p22);
-}
-function formatToolResponse(raw, inp, toolName) {
- const r10 = maybeParseJson(raw);
- if (typeof r10 !== "object" || r10 === null)
- return String(r10 ?? "");
- if (toolName === "Edit" || toolName === "Write" || toolName === "MultiEdit") {
- return r10.filePath ? `[wrote ${r10.filePath}]` : "[ok]";
- }
- if (typeof r10.stdout === "string") {
- const stderr = r10.stderr;
- return r10.stdout + (stderr ? `
-stderr: ${stderr}` : "");
- }
- if (typeof r10.content === "string")
- return r10.content;
- if (r10.file && typeof r10.file === "object") {
- const f11 = r10.file;
- if (typeof f11.content === "string")
- return `[${f11.filePath ?? ""}]
-${f11.content}`;
- if (typeof f11.base64 === "string")
- return `[binary ${f11.filePath ?? ""}: ${f11.base64.length} base64 chars]`;
- }
- if (Array.isArray(r10.filenames))
- return r10.filenames.join("\n");
- if (Array.isArray(r10.matches)) {
- return r10.matches.map((m26) => typeof m26 === "string" ? m26 : JSON.stringify(m26)).join("\n");
- }
- if (Array.isArray(r10.results)) {
- return r10.results.map((x28) => typeof x28 === "string" ? x28 : x28?.title ?? x28?.url ?? JSON.stringify(x28)).join("\n");
- }
- const inpObj = maybeParseJson(inp);
- const kept = {};
- for (const [k17, v27] of Object.entries(r10)) {
- if (TOOL_RESPONSE_DROP.has(k17))
- continue;
- if (v27 === "" || v27 === false || v27 == null)
- continue;
- if (typeof inpObj === "object" && inpObj) {
- const inObj = inpObj;
- if (k17 in inObj && JSON.stringify(inObj[k17]) === JSON.stringify(v27))
- continue;
- const snake = snakeCase(k17);
- if (snake in inObj && JSON.stringify(inObj[snake]) === JSON.stringify(v27))
- continue;
- const camel = camelCase2(k17);
- if (camel in inObj && JSON.stringify(inObj[camel]) === JSON.stringify(v27))
- continue;
+ return hasAllFlags;
}
- kept[k17] = v27;
- }
- return Object.keys(kept).length ? JSON.stringify(kept) : "[ok]";
-}
-function formatToolCall(obj) {
- return `[tool:${obj?.tool_name ?? "?"}]
-input: ${formatToolInput(obj?.tool_input)}
-response: ${formatToolResponse(obj?.tool_response, obj?.tool_input, obj?.tool_name)}`;
-}
-function normalizeContent(path2, raw) {
- if (!path2.includes("/sessions/"))
- return raw;
- if (!raw || raw[0] !== "{")
- return raw;
- let obj;
- try {
- obj = JSON.parse(raw);
- } catch {
- return raw;
- }
- if (Array.isArray(obj.turns)) {
- const header = [];
- if (obj.date_time)
- header.push(`date: ${obj.date_time}`);
- if (obj.speakers) {
- const s10 = obj.speakers;
- const names = [s10.speaker_a, s10.speaker_b].filter(Boolean).join(", ");
- if (names)
- header.push(`speakers: ${names}`);
+ function isUnknownOptionAsArg(arg) {
+ return configuration["unknown-options-as-args"] && isUnknownOption(arg);
}
- const lines = obj.turns.map((t6) => {
- const sp = String(t6?.speaker ?? t6?.name ?? "?").trim();
- const tx = String(t6?.text ?? t6?.content ?? "").replace(/\s+/g, " ").trim();
- const tag = t6?.dia_id ? `[${t6.dia_id}] ` : "";
- return `${tag}${sp}: ${tx}`;
- });
- const out2 = [...header, ...lines].join("\n");
- return out2.trim() ? out2 : raw;
+ function isUnknownOption(arg) {
+ arg = arg.replace(/^-{3,}/, "--");
+ if (arg.match(negative)) {
+ return false;
+ }
+ if (hasAllShortFlags(arg)) {
+ return false;
+ }
+ const flagWithEquals = /^-+([^=]+?)=[\s\S]*$/;
+ const normalFlag = /^-+([^=]+?)$/;
+ const flagEndingInHyphen = /^-+([^=]+?)-$/;
+ const flagEndingInDigits = /^-+([^=]+?\d+)$/;
+ const flagEndingInNonWordCharacters = /^-+([^=]+?)\W+.*$/;
+ return !hasFlagsMatching(arg, flagWithEquals, negatedBoolean, normalFlag, flagEndingInHyphen, flagEndingInDigits, flagEndingInNonWordCharacters);
+ }
+ function defaultValue(key) {
+ if (!checkAllAliases(key, flags.bools) && !checkAllAliases(key, flags.counts) && `${key}` in defaults2) {
+ return defaults2[key];
+ } else {
+ return defaultForType(guessType(key));
+ }
+ }
+ function defaultForType(type) {
+ const def = {
+ [DefaultValuesForTypeKey.BOOLEAN]: true,
+ [DefaultValuesForTypeKey.STRING]: "",
+ [DefaultValuesForTypeKey.NUMBER]: void 0,
+ [DefaultValuesForTypeKey.ARRAY]: []
+ };
+ return def[type];
+ }
+ function guessType(key) {
+ let type = DefaultValuesForTypeKey.BOOLEAN;
+ if (checkAllAliases(key, flags.strings))
+ type = DefaultValuesForTypeKey.STRING;
+ else if (checkAllAliases(key, flags.numbers))
+ type = DefaultValuesForTypeKey.NUMBER;
+ else if (checkAllAliases(key, flags.bools))
+ type = DefaultValuesForTypeKey.BOOLEAN;
+ else if (checkAllAliases(key, flags.arrays))
+ type = DefaultValuesForTypeKey.ARRAY;
+ return type;
+ }
+ function isUndefined(num) {
+ return num === void 0;
+ }
+ function checkConfiguration() {
+ Object.keys(flags.counts).find((key) => {
+ if (checkAllAliases(key, flags.arrays)) {
+ error = Error(__("Invalid configuration: %s, opts.count excludes opts.array.", key));
+ return true;
+ } else if (checkAllAliases(key, flags.nargs)) {
+ error = Error(__("Invalid configuration: %s, opts.count excludes opts.narg.", key));
+ return true;
+ }
+ return false;
+ });
+ }
+ return {
+ aliases: Object.assign({}, flags.aliases),
+ argv: Object.assign(argvReturn, argv),
+ configuration,
+ defaulted: Object.assign({}, defaulted),
+ error,
+ newAliases: Object.assign({}, newAliases)
+ };
}
- const stripRecalled = (t6) => {
- const i11 = t6.indexOf("");
- if (i11 === -1)
- return t6;
- const j14 = t6.lastIndexOf("");
- if (j14 === -1 || j14 < i11)
- return t6;
- const head = t6.slice(0, i11);
- const tail = t6.slice(j14 + "".length);
- return (head + tail).replace(/^\s+/, "").replace(/\n{3,}/g, "\n\n");
- };
- let out = null;
- if (obj.type === "user_message") {
- out = `[user] ${stripRecalled(String(obj.content ?? ""))}`;
- } else if (obj.type === "assistant_message") {
- const agent = obj.agent_type ? ` (agent=${obj.agent_type})` : "";
- out = `[assistant${agent}] ${stripRecalled(String(obj.content ?? ""))}`;
- } else if (obj.type === "tool_call") {
- out = formatToolCall(obj);
+};
+function combineAliases(aliases) {
+ const aliasArrays = [];
+ const combined = /* @__PURE__ */ Object.create(null);
+ let change = true;
+ Object.keys(aliases).forEach(function(key) {
+ aliasArrays.push([].concat(aliases[key], key));
+ });
+ while (change) {
+ change = false;
+ for (let i11 = 0; i11 < aliasArrays.length; i11++) {
+ for (let ii2 = i11 + 1; ii2 < aliasArrays.length; ii2++) {
+ const intersect = aliasArrays[i11].filter(function(v27) {
+ return aliasArrays[ii2].indexOf(v27) !== -1;
+ });
+ if (intersect.length) {
+ aliasArrays[i11] = aliasArrays[i11].concat(aliasArrays[ii2]);
+ aliasArrays.splice(ii2, 1);
+ change = true;
+ break;
+ }
+ }
+ }
}
- if (out === null)
- return raw;
- const trimmed = out.trim();
- if (!trimmed || trimmed === "[user]" || trimmed === "[assistant]" || /^\[tool:[^\]]*\]\s+input:\s+\{\}\s+response:\s+\{\}$/.test(trimmed))
- return raw;
- return out;
+ aliasArrays.forEach(function(aliasArray) {
+ aliasArray = aliasArray.filter(function(v27, i11, self2) {
+ return self2.indexOf(v27) === i11;
+ });
+ const lastAlias = aliasArray.pop();
+ if (lastAlias !== void 0 && typeof lastAlias === "string") {
+ combined[lastAlias] = aliasArray;
+ }
+ });
+ return combined;
}
-async function searchDeeplakeTables(api, memoryTable, sessionsTable, opts) {
- const { pathFilter, contentScanOnly, likeOp, escapedPattern } = opts;
- const limit = opts.limit ?? 100;
- const memFilter = contentScanOnly ? "" : ` AND summary::text ${likeOp} '%${escapedPattern}%'`;
- const sessFilter = contentScanOnly ? "" : ` AND message::text ${likeOp} '%${escapedPattern}%'`;
- const memQuery = `SELECT path, summary::text AS content FROM "${memoryTable}" WHERE 1=1${pathFilter}${memFilter} LIMIT ${limit}`;
- const sessQuery = `SELECT path, message::text AS content FROM "${sessionsTable}" WHERE 1=1${pathFilter}${sessFilter} LIMIT ${limit}`;
- const [memRows, sessRows] = await Promise.all([
- api.query(memQuery).catch(() => []),
- api.query(sessQuery).catch(() => [])
- ]);
- const rows = [];
- for (const r10 of memRows)
- rows.push({ path: String(r10.path), content: String(r10.content ?? "") });
- for (const r10 of sessRows)
- rows.push({ path: String(r10.path), content: String(r10.content ?? "") });
- return rows;
+function increment(orig) {
+ return orig !== void 0 ? orig + 1 : 1;
}
-function buildPathFilter(targetPath) {
- if (!targetPath || targetPath === "/")
- return "";
- const clean = targetPath.replace(/\/+$/, "");
- return ` AND (path = '${sqlStr(clean)}' OR path LIKE '${sqlLike(clean)}/%')`;
+function sanitizeKey(key) {
+ if (key === "__proto__")
+ return "___proto___";
+ return key;
}
-function compileGrepRegex(params) {
- let reStr = params.fixedString ? params.pattern.replace(/[.*+?^${}()|[\]\\]/g, "\\$&") : params.pattern;
- if (params.wordMatch)
- reStr = `\\b${reStr}\\b`;
- try {
- return new RegExp(reStr, params.ignoreCase ? "i" : "");
- } catch {
- return new RegExp(params.pattern.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"), params.ignoreCase ? "i" : "");
+function stripQuotes(val) {
+ return typeof val === "string" && (val[0] === "'" || val[0] === '"') && val[val.length - 1] === val[0] ? val.substring(1, val.length - 1) : val;
+}
+
+// node_modules/yargs-parser/build/lib/index.js
+import { readFileSync as readFileSync3 } from "fs";
+import { createRequire } from "node:module";
+var _a3;
+var _b;
+var _c;
+var minNodeVersion = process && process.env && process.env.YARGS_MIN_NODE_VERSION ? Number(process.env.YARGS_MIN_NODE_VERSION) : 20;
+var nodeVersion = (_b = (_a3 = process === null || process === void 0 ? void 0 : process.versions) === null || _a3 === void 0 ? void 0 : _a3.node) !== null && _b !== void 0 ? _b : (_c = process === null || process === void 0 ? void 0 : process.version) === null || _c === void 0 ? void 0 : _c.slice(1);
+if (nodeVersion) {
+ const major = Number(nodeVersion.match(/^([^.]+)/)[1]);
+ if (major < minNodeVersion) {
+ throw Error(`yargs parser supports a minimum Node.js version of ${minNodeVersion}. Read our version support policy: https://github.com/yargs/yargs-parser#supported-nodejs-versions`);
}
}
-function refineGrepMatches(rows, params, forceMultiFilePrefix) {
- const re9 = compileGrepRegex(params);
- const multi = forceMultiFilePrefix ?? rows.length > 1;
- const output = [];
- for (const row of rows) {
- if (!row.content)
- continue;
- const lines = row.content.split("\n");
- const matched = [];
- for (let i11 = 0; i11 < lines.length; i11++) {
- const hit = re9.test(lines[i11]);
- if (hit !== !!params.invertMatch) {
- if (params.filesOnly) {
- output.push(row.path);
- break;
- }
- const prefix = multi ? `${row.path}:` : "";
- const ln3 = params.lineNumber ? `${i11 + 1}:` : "";
- matched.push(`${prefix}${ln3}${lines[i11]}`);
- }
- }
- if (!params.filesOnly) {
- if (params.countOnly) {
- output.push(`${multi ? `${row.path}:` : ""}${matched.length}`);
- } else {
- output.push(...matched);
- }
+var env = process ? process.env : {};
+var require2 = createRequire ? createRequire(import.meta.url) : void 0;
+var parser = new YargsParser({
+ cwd: process.cwd,
+ env: () => {
+ return env;
+ },
+ format,
+ normalize,
+ resolve: resolve4,
+ require: (path2) => {
+ if (typeof require2 !== "undefined") {
+ return require2(path2);
+ } else if (path2.match(/\.json$/)) {
+ return JSON.parse(readFileSync3(path2, "utf8"));
+ } else {
+ throw Error("only .json config files are supported in ESM");
}
}
- return output;
-}
+});
+var yargsParser = function Parser(args, opts) {
+ const result = parser.parse(args.slice(), opts);
+ return result.argv;
+};
+yargsParser.detailed = function(args, opts) {
+ return parser.parse(args.slice(), opts);
+};
+yargsParser.camelCase = camelCase2;
+yargsParser.decamelize = decamelize;
+yargsParser.looksLikeNumber = looksLikeNumber;
+var lib_default = yargsParser;
// dist/src/shell/grep-interceptor.js
var MAX_FALLBACK_CANDIDATES = 500;
@@ -68876,23 +69108,18 @@ function createGrepCommand(client, fs3, table, sessionsTable) {
filesOnly: Boolean(parsed.l || parsed["files-with-matches"]),
countOnly: Boolean(parsed.c || parsed["count"])
};
- const likeOp = matchParams.ignoreCase ? "ILIKE" : "LIKE";
- const hasRegexMeta = !matchParams.fixedString && /[.*+?^${}()|[\]\\]/.test(pattern);
- const escapedPattern = sqlLike(pattern);
let rows = [];
try {
- const perTarget = await Promise.race([
- Promise.all(targets.map((t6) => searchDeeplakeTables(client, table, sessionsTable ?? "sessions", {
- pathFilter: buildPathFilter(t6),
- contentScanOnly: hasRegexMeta,
- likeOp,
- escapedPattern,
- limit: 100
- }))),
+ const searchOptions = {
+ ...buildGrepSearchOptions(matchParams, targets[0] ?? ctx.cwd),
+ pathFilter: buildPathFilterForTargets(targets),
+ limit: 100
+ };
+ const queryRows = await Promise.race([
+ searchDeeplakeTables(client, table, sessionsTable ?? "sessions", searchOptions),
new Promise((_16, reject) => setTimeout(() => reject(new Error("timeout")), 3e3))
]);
- for (const batch of perTarget)
- rows.push(...batch);
+ rows.push(...queryRows);
} catch {
rows = [];
}
diff --git a/codex/bundle/stop.js b/codex/bundle/stop.js
index 2de7118..b2da8a8 100755
--- a/codex/bundle/stop.js
+++ b/codex/bundle/stop.js
@@ -1,17 +1,17 @@
#!/usr/bin/env node
// dist/src/hooks/codex/stop.js
-import { readFileSync as readFileSync3, existsSync as existsSync3 } from "node:fs";
+import { readFileSync as readFileSync4, existsSync as existsSync4 } from "node:fs";
// dist/src/utils/stdin.js
function readStdin() {
- return new Promise((resolve, reject) => {
+ return new Promise((resolve2, reject) => {
let data = "";
process.stdin.setEncoding("utf-8");
process.stdin.on("data", (chunk) => data += chunk);
process.stdin.on("end", () => {
try {
- resolve(JSON.parse(data));
+ resolve2(JSON.parse(data));
} catch (err) {
reject(new Error(`Failed to parse hook input: ${err}`));
}
@@ -58,6 +58,9 @@ function loadConfig() {
// dist/src/deeplake-api.js
import { randomUUID } from "node:crypto";
+import { existsSync as existsSync2, mkdirSync, readFileSync as readFileSync2, writeFileSync } from "node:fs";
+import { join as join3 } from "node:path";
+import { tmpdir } from "node:os";
// dist/src/utils/debug.js
import { appendFileSync } from "node:fs";
@@ -65,9 +68,6 @@ import { join as join2 } from "node:path";
import { homedir as homedir2 } from "node:os";
var DEBUG = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1";
var LOG = join2(homedir2(), ".deeplake", "hook-debug.log");
-function utcTimestamp(d = /* @__PURE__ */ new Date()) {
- return d.toISOString().replace("T", " ").slice(0, 19) + " UTC";
-}
function log(tag, msg) {
if (!DEBUG)
return;
@@ -79,6 +79,12 @@ function log(tag, msg) {
function sqlStr(value) {
return value.replace(/\\/g, "\\\\").replace(/'/g, "''").replace(/\0/g, "").replace(/[\x01-\x08\x0b\x0c\x0e-\x1f\x7f]/g, "");
}
+function sqlIdent(name) {
+ if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(name)) {
+ throw new Error(`Invalid SQL identifier: ${JSON.stringify(name)}`);
+ }
+ return name;
+}
// dist/src/deeplake-api.js
var log2 = (msg) => log("sdk", msg);
@@ -100,8 +106,29 @@ var RETRYABLE_CODES = /* @__PURE__ */ new Set([429, 500, 502, 503, 504]);
var MAX_RETRIES = 3;
var BASE_DELAY_MS = 500;
var MAX_CONCURRENCY = 5;
+var QUERY_TIMEOUT_MS = Number(process.env["HIVEMIND_QUERY_TIMEOUT_MS"] ?? process.env["DEEPLAKE_QUERY_TIMEOUT_MS"] ?? 1e4);
+var INDEX_MARKER_TTL_MS = Number(process.env["HIVEMIND_INDEX_MARKER_TTL_MS"] ?? 6 * 60 * 6e4);
function sleep(ms) {
- return new Promise((resolve) => setTimeout(resolve, ms));
+ return new Promise((resolve2) => setTimeout(resolve2, ms));
+}
+function isTimeoutError(error) {
+ const name = error instanceof Error ? error.name.toLowerCase() : "";
+ const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase();
+ return name.includes("timeout") || name === "aborterror" || message.includes("timeout") || message.includes("timed out");
+}
+function isDuplicateIndexError(error) {
+ const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase();
+ return message.includes("duplicate key value violates unique constraint") || message.includes("pg_class_relname_nsp_index") || message.includes("already exists");
+}
+function isSessionInsertQuery(sql) {
+ return /^\s*insert\s+into\s+"[^"]+"\s*\(\s*id\s*,\s*path\s*,\s*filename\s*,\s*message\s*,/i.test(sql);
+}
+function isTransientHtml403(text) {
+ const body = text.toLowerCase();
+ return body.includes(" this.waiting.push(resolve));
+ await new Promise((resolve2) => this.waiting.push(resolve2));
}
release() {
this.active--;
@@ -134,6 +161,7 @@ var DeeplakeApi = class {
tableName;
_pendingRows = [];
_sem = new Semaphore(MAX_CONCURRENCY);
+ _tablesCache = null;
constructor(token, apiUrl, orgId, workspaceId, tableName) {
this.token = token;
this.apiUrl = apiUrl;
@@ -164,6 +192,7 @@ var DeeplakeApi = class {
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
let resp;
try {
+ const signal = AbortSignal.timeout(QUERY_TIMEOUT_MS);
resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables/query`, {
method: "POST",
headers: {
@@ -171,9 +200,14 @@ var DeeplakeApi = class {
"Content-Type": "application/json",
"X-Activeloop-Org-Id": this.orgId
},
+ signal,
body: JSON.stringify({ query: sql })
});
} catch (e) {
+ if (isTimeoutError(e)) {
+ lastError = new Error(`Query timeout after ${QUERY_TIMEOUT_MS}ms`);
+ throw lastError;
+ }
lastError = e instanceof Error ? e : new Error(String(e));
if (attempt < MAX_RETRIES) {
const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200;
@@ -190,7 +224,8 @@ var DeeplakeApi = class {
return raw.rows.map((row) => Object.fromEntries(raw.columns.map((col, i) => [col, row[i]])));
}
const text = await resp.text().catch(() => "");
- if (attempt < MAX_RETRIES && RETRYABLE_CODES.has(resp.status)) {
+ const retryable403 = isSessionInsertQuery(sql) && (resp.status === 401 || resp.status === 403 && (text.length === 0 || isTransientHtml403(text)));
+ if (attempt < MAX_RETRIES && (RETRYABLE_CODES.has(resp.status) || retryable403)) {
const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200;
log2(`query retry ${attempt + 1}/${MAX_RETRIES} (${resp.status}) in ${delay.toFixed(0)}ms`);
await sleep(delay);
@@ -255,8 +290,61 @@ var DeeplakeApi = class {
async createIndex(column) {
await this.query(`CREATE INDEX IF NOT EXISTS idx_${sqlStr(column)}_bm25 ON "${this.tableName}" USING deeplake_index ("${column}")`);
}
+ buildLookupIndexName(table, suffix) {
+ return `idx_${table}_${suffix}`.replace(/[^a-zA-Z0-9_]/g, "_");
+ }
+ getLookupIndexMarkerPath(table, suffix) {
+ const markerKey = [
+ this.workspaceId,
+ this.orgId,
+ table,
+ suffix
+ ].join("__").replace(/[^a-zA-Z0-9_.-]/g, "_");
+ return join3(getIndexMarkerDir(), `${markerKey}.json`);
+ }
+ hasFreshLookupIndexMarker(table, suffix) {
+ const markerPath = this.getLookupIndexMarkerPath(table, suffix);
+ if (!existsSync2(markerPath))
+ return false;
+ try {
+ const raw = JSON.parse(readFileSync2(markerPath, "utf-8"));
+ const updatedAt = raw.updatedAt ? new Date(raw.updatedAt).getTime() : NaN;
+ if (!Number.isFinite(updatedAt) || Date.now() - updatedAt > INDEX_MARKER_TTL_MS)
+ return false;
+ return true;
+ } catch {
+ return false;
+ }
+ }
+ markLookupIndexReady(table, suffix) {
+ mkdirSync(getIndexMarkerDir(), { recursive: true });
+ writeFileSync(this.getLookupIndexMarkerPath(table, suffix), JSON.stringify({ updatedAt: (/* @__PURE__ */ new Date()).toISOString() }), "utf-8");
+ }
+ async ensureLookupIndex(table, suffix, columnsSql) {
+ if (this.hasFreshLookupIndexMarker(table, suffix))
+ return;
+ const indexName = this.buildLookupIndexName(table, suffix);
+ try {
+ await this.query(`CREATE INDEX IF NOT EXISTS "${indexName}" ON "${table}" ${columnsSql}`);
+ this.markLookupIndexReady(table, suffix);
+ } catch (e) {
+ if (isDuplicateIndexError(e)) {
+ this.markLookupIndexReady(table, suffix);
+ return;
+ }
+ log2(`index "${indexName}" skipped: ${e.message}`);
+ }
+ }
/** List all tables in the workspace (with retry). */
- async listTables() {
+ async listTables(forceRefresh = false) {
+ if (!forceRefresh && this._tablesCache)
+ return [...this._tablesCache];
+ const { tables, cacheable } = await this._fetchTables();
+ if (cacheable)
+ this._tablesCache = [...tables];
+ return tables;
+ }
+ async _fetchTables() {
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
try {
const resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables`, {
@@ -267,22 +355,25 @@ var DeeplakeApi = class {
});
if (resp.ok) {
const data = await resp.json();
- return (data.tables ?? []).map((t) => t.table_name);
+ return {
+ tables: (data.tables ?? []).map((t) => t.table_name),
+ cacheable: true
+ };
}
if (attempt < MAX_RETRIES && RETRYABLE_CODES.has(resp.status)) {
await sleep(BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200);
continue;
}
- return [];
+ return { tables: [], cacheable: false };
} catch {
if (attempt < MAX_RETRIES) {
await sleep(BASE_DELAY_MS * Math.pow(2, attempt));
continue;
}
- return [];
+ return { tables: [], cacheable: false };
}
}
- return [];
+ return { tables: [], cacheable: false };
}
/** Create the memory table if it doesn't already exist. Migrate columns on existing tables. */
async ensureTable(name) {
@@ -292,6 +383,8 @@ var DeeplakeApi = class {
log2(`table "${tbl}" not found, creating`);
await this.query(`CREATE TABLE IF NOT EXISTS "${tbl}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', summary TEXT NOT NULL DEFAULT '', author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'text/plain', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`);
log2(`table "${tbl}" created`);
+ if (!tables.includes(tbl))
+ this._tablesCache = [...tables, tbl];
}
}
/** Create the sessions table (uses JSONB for message since every row is a JSON event). */
@@ -301,39 +394,35 @@ var DeeplakeApi = class {
log2(`table "${name}" not found, creating`);
await this.query(`CREATE TABLE IF NOT EXISTS "${name}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', message JSONB, author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'application/json', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`);
log2(`table "${name}" created`);
+ if (!tables.includes(name))
+ this._tablesCache = [...tables, name];
}
+ await this.ensureLookupIndex(name, "path_creation_date", `("path", "creation_date")`);
}
};
-// dist/src/hooks/codex/spawn-wiki-worker.js
-import { spawn, execSync } from "node:child_process";
+// dist/src/utils/direct-run.js
+import { resolve } from "node:path";
import { fileURLToPath } from "node:url";
-import { dirname, join as join4 } from "node:path";
-import { writeFileSync, mkdirSync as mkdirSync2 } from "node:fs";
-import { homedir as homedir3, tmpdir } from "node:os";
-
-// dist/src/utils/wiki-log.js
-import { mkdirSync, appendFileSync as appendFileSync2 } from "node:fs";
-import { join as join3 } from "node:path";
-function makeWikiLogger(hooksDir, filename = "deeplake-wiki.log") {
- const path = join3(hooksDir, filename);
- return {
- path,
- log(msg) {
- try {
- mkdirSync(hooksDir, { recursive: true });
- appendFileSync2(path, `[${utcTimestamp()}] ${msg}
-`);
- } catch {
- }
- }
- };
+function isDirectRun(metaUrl) {
+ const entry = process.argv[1];
+ if (!entry)
+ return false;
+ try {
+ return resolve(fileURLToPath(metaUrl)) === resolve(entry);
+ } catch {
+ return false;
+ }
}
// dist/src/hooks/codex/spawn-wiki-worker.js
+import { spawn, execSync } from "node:child_process";
+import { fileURLToPath as fileURLToPath2 } from "node:url";
+import { dirname, join as join4 } from "node:path";
+import { writeFileSync as writeFileSync2, mkdirSync as mkdirSync2, appendFileSync as appendFileSync2 } from "node:fs";
+import { homedir as homedir3, tmpdir as tmpdir2 } from "node:os";
var HOME = homedir3();
-var wikiLogger = makeWikiLogger(join4(HOME, ".codex", "hooks"));
-var WIKI_LOG = wikiLogger.path;
+var WIKI_LOG = join4(HOME, ".codex", "hooks", "deeplake-wiki.log");
var WIKI_PROMPT_TEMPLATE = `You are building a personal wiki from a coding session. Your goal is to extract every piece of knowledge \u2014 entities, decisions, relationships, and facts \u2014 into a structured, searchable wiki entry.
SESSION JSONL path: __JSONL__
@@ -383,7 +472,14 @@ Format: **entity** (type) \u2014 what was done with it, its current state>
IMPORTANT: Be exhaustive. Extract EVERY entity, decision, and fact.
PRIVACY: Never include absolute filesystem paths in the summary.
LENGTH LIMIT: Keep the total summary under 4000 characters.`;
-var wikiLog = wikiLogger.log;
+function wikiLog(msg) {
+ try {
+ mkdirSync2(join4(HOME, ".codex", "hooks"), { recursive: true });
+ appendFileSync2(WIKI_LOG, `[${(/* @__PURE__ */ new Date()).toISOString().replace("T", " ").slice(0, 19)}] ${msg}
+`);
+ } catch {
+ }
+}
function findCodexBin() {
try {
return execSync("which codex 2>/dev/null", { encoding: "utf-8" }).trim();
@@ -394,10 +490,10 @@ function findCodexBin() {
function spawnCodexWikiWorker(opts) {
const { config, sessionId, cwd, bundleDir, reason } = opts;
const projectName = cwd.split("/").pop() || "unknown";
- const tmpDir = join4(tmpdir(), `deeplake-wiki-${sessionId}-${Date.now()}`);
+ const tmpDir = join4(tmpdir2(), `deeplake-wiki-${sessionId}-${Date.now()}`);
mkdirSync2(tmpDir, { recursive: true });
const configFile = join4(tmpDir, "config.json");
- writeFileSync(configFile, JSON.stringify({
+ writeFileSync2(configFile, JSON.stringify({
apiUrl: config.apiUrl,
token: config.token,
orgId: config.orgId,
@@ -422,164 +518,363 @@ function spawnCodexWikiWorker(opts) {
wikiLog(`${reason}: spawned summary worker for ${sessionId}`);
}
function bundleDirFromImportMeta(importMetaUrl) {
- return dirname(fileURLToPath(importMetaUrl));
+ return dirname(fileURLToPath2(importMetaUrl));
}
-// dist/src/hooks/summary-state.js
-import { readFileSync as readFileSync2, writeFileSync as writeFileSync2, writeSync, mkdirSync as mkdirSync3, renameSync, existsSync as existsSync2, unlinkSync, openSync, closeSync } from "node:fs";
+// dist/src/hooks/session-queue.js
+import { appendFileSync as appendFileSync3, closeSync, existsSync as existsSync3, mkdirSync as mkdirSync3, openSync, readFileSync as readFileSync3, readdirSync, renameSync, rmSync, statSync, writeFileSync as writeFileSync3 } from "node:fs";
+import { dirname as dirname2, join as join5 } from "node:path";
import { homedir as homedir4 } from "node:os";
-import { join as join5 } from "node:path";
-var dlog = (msg) => log("summary-state", msg);
-var STATE_DIR = join5(homedir4(), ".claude", "hooks", "summary-state");
-var YIELD_BUF = new Int32Array(new SharedArrayBuffer(4));
-function lockPath(sessionId) {
- return join5(STATE_DIR, `${sessionId}.lock`);
-}
-function tryAcquireLock(sessionId, maxAgeMs = 10 * 60 * 1e3) {
- mkdirSync3(STATE_DIR, { recursive: true });
- const p = lockPath(sessionId);
- if (existsSync2(p)) {
+var DEFAULT_QUEUE_DIR = join5(homedir4(), ".deeplake", "queue");
+var DEFAULT_MAX_BATCH_ROWS = 50;
+var DEFAULT_STALE_INFLIGHT_MS = 6e4;
+var DEFAULT_AUTH_FAILURE_TTL_MS = 5 * 6e4;
+var BUSY_WAIT_STEP_MS = 100;
+var SessionWriteDisabledError = class extends Error {
+ constructor(message) {
+ super(message);
+ this.name = "SessionWriteDisabledError";
+ }
+};
+function buildSessionPath(config, sessionId) {
+ return `/sessions/${config.userName}/${config.userName}_${config.orgName}_${config.workspaceId}_${sessionId}.jsonl`;
+}
+function buildQueuedSessionRow(args) {
+ return {
+ id: crypto.randomUUID(),
+ path: args.sessionPath,
+ filename: args.sessionPath.split("/").pop() ?? "",
+ message: args.line,
+ author: args.userName,
+ sizeBytes: Buffer.byteLength(args.line, "utf-8"),
+ project: args.projectName,
+ description: args.description,
+ agent: args.agent,
+ creationDate: args.timestamp,
+ lastUpdateDate: args.timestamp
+ };
+}
+function appendQueuedSessionRow(row, queueDir = DEFAULT_QUEUE_DIR) {
+ mkdirSync3(queueDir, { recursive: true });
+ const sessionId = extractSessionId(row.path);
+ const queuePath = getQueuePath(queueDir, sessionId);
+ appendFileSync3(queuePath, `${JSON.stringify(row)}
+`);
+ return queuePath;
+}
+function buildSessionInsertSql(sessionsTable, rows) {
+ if (rows.length === 0)
+ throw new Error("buildSessionInsertSql: rows must not be empty");
+ const table = sqlIdent(sessionsTable);
+ const values = rows.map((row) => {
+ const jsonForSql = sqlStr(coerceJsonbPayload(row.message));
+ return `('${sqlStr(row.id)}', '${sqlStr(row.path)}', '${sqlStr(row.filename)}', '${jsonForSql}'::jsonb, '${sqlStr(row.author)}', ${row.sizeBytes}, '${sqlStr(row.project)}', '${sqlStr(row.description)}', '${sqlStr(row.agent)}', '${sqlStr(row.creationDate)}', '${sqlStr(row.lastUpdateDate)}')`;
+ }).join(", ");
+ return `INSERT INTO "${table}" (id, path, filename, message, author, size_bytes, project, description, agent, creation_date, last_update_date) VALUES ${values}`;
+}
+function coerceJsonbPayload(message) {
+ try {
+ return JSON.stringify(JSON.parse(message));
+ } catch {
+ return JSON.stringify({
+ type: "raw_message",
+ content: message
+ });
+ }
+}
+async function flushSessionQueue(api, opts) {
+ const queueDir = opts.queueDir ?? DEFAULT_QUEUE_DIR;
+ const maxBatchRows = opts.maxBatchRows ?? DEFAULT_MAX_BATCH_ROWS;
+ const staleInflightMs = opts.staleInflightMs ?? DEFAULT_STALE_INFLIGHT_MS;
+ const waitIfBusyMs = opts.waitIfBusyMs ?? 0;
+ const drainAll = opts.drainAll ?? false;
+ mkdirSync3(queueDir, { recursive: true });
+ const queuePath = getQueuePath(queueDir, opts.sessionId);
+ const inflightPath = getInflightPath(queueDir, opts.sessionId);
+ if (isSessionWriteDisabled(opts.sessionsTable, queueDir)) {
+ return existsSync3(queuePath) || existsSync3(inflightPath) ? { status: "disabled", rows: 0, batches: 0 } : { status: "empty", rows: 0, batches: 0 };
+ }
+ let totalRows = 0;
+ let totalBatches = 0;
+ let flushedAny = false;
+ while (true) {
+ if (opts.allowStaleInflight)
+ recoverStaleInflight(queuePath, inflightPath, staleInflightMs);
+ if (existsSync3(inflightPath)) {
+ if (waitIfBusyMs > 0) {
+ await waitForInflightToClear(inflightPath, waitIfBusyMs);
+ if (opts.allowStaleInflight)
+ recoverStaleInflight(queuePath, inflightPath, staleInflightMs);
+ }
+ if (existsSync3(inflightPath)) {
+ return flushedAny ? { status: "flushed", rows: totalRows, batches: totalBatches } : { status: "busy", rows: 0, batches: 0 };
+ }
+ }
+ if (!existsSync3(queuePath)) {
+ return flushedAny ? { status: "flushed", rows: totalRows, batches: totalBatches } : { status: "empty", rows: 0, batches: 0 };
+ }
try {
- const ageMs = Date.now() - parseInt(readFileSync2(p, "utf-8"), 10);
- if (Number.isFinite(ageMs) && ageMs < maxAgeMs)
- return false;
- } catch (readErr) {
- dlog(`lock file unreadable for ${sessionId}, treating as stale: ${readErr.message}`);
+ renameSync(queuePath, inflightPath);
+ } catch (e) {
+ if (e?.code === "ENOENT") {
+ return flushedAny ? { status: "flushed", rows: totalRows, batches: totalBatches } : { status: "empty", rows: 0, batches: 0 };
+ }
+ throw e;
}
try {
- unlinkSync(p);
- } catch (unlinkErr) {
- dlog(`could not unlink stale lock for ${sessionId}: ${unlinkErr.message}`);
- return false;
+ const { rows, batches } = await flushInflightFile(api, opts.sessionsTable, inflightPath, maxBatchRows);
+ totalRows += rows;
+ totalBatches += batches;
+ flushedAny = flushedAny || rows > 0;
+ } catch (e) {
+ requeueInflight(queuePath, inflightPath);
+ if (e instanceof SessionWriteDisabledError) {
+ return { status: "disabled", rows: totalRows, batches: totalBatches };
+ }
+ throw e;
+ }
+ if (!drainAll) {
+ return { status: "flushed", rows: totalRows, batches: totalBatches };
}
}
- try {
- const fd = openSync(p, "wx");
+}
+function getQueuePath(queueDir, sessionId) {
+ return join5(queueDir, `${sessionId}.jsonl`);
+}
+function getInflightPath(queueDir, sessionId) {
+ return join5(queueDir, `${sessionId}.inflight`);
+}
+function extractSessionId(sessionPath) {
+ const filename = sessionPath.split("/").pop() ?? "";
+ return filename.replace(/\.jsonl$/, "").split("_").pop() ?? filename;
+}
+async function flushInflightFile(api, sessionsTable, inflightPath, maxBatchRows) {
+ const rows = readQueuedRows(inflightPath);
+ if (rows.length === 0) {
+ rmSync(inflightPath, { force: true });
+ return { rows: 0, batches: 0 };
+ }
+ let ensured = false;
+ let batches = 0;
+ const queueDir = dirname2(inflightPath);
+ for (let i = 0; i < rows.length; i += maxBatchRows) {
+ const chunk = rows.slice(i, i + maxBatchRows);
+ const sql = buildSessionInsertSql(sessionsTable, chunk);
try {
- writeSync(fd, String(Date.now()));
- } finally {
- closeSync(fd);
+ await api.query(sql);
+ } catch (e) {
+ if (isSessionWriteAuthError(e)) {
+ markSessionWriteDisabled(sessionsTable, errorMessage(e), queueDir);
+ throw new SessionWriteDisabledError(errorMessage(e));
+ }
+ if (!ensured && isEnsureSessionsTableRetryable(e)) {
+ try {
+ await api.ensureSessionsTable(sessionsTable);
+ } catch (ensureError) {
+ if (isSessionWriteAuthError(ensureError)) {
+ markSessionWriteDisabled(sessionsTable, errorMessage(ensureError), queueDir);
+ throw new SessionWriteDisabledError(errorMessage(ensureError));
+ }
+ throw ensureError;
+ }
+ ensured = true;
+ try {
+ await api.query(sql);
+ } catch (retryError) {
+ if (isSessionWriteAuthError(retryError)) {
+ markSessionWriteDisabled(sessionsTable, errorMessage(retryError), queueDir);
+ throw new SessionWriteDisabledError(errorMessage(retryError));
+ }
+ throw retryError;
+ }
+ } else {
+ throw e;
+ }
}
- return true;
- } catch (e) {
- if (e.code === "EEXIST")
- return false;
- throw e;
+ batches += 1;
}
+ clearSessionWriteDisabled(sessionsTable, queueDir);
+ rmSync(inflightPath, { force: true });
+ return { rows: rows.length, batches };
+}
+function readQueuedRows(path) {
+ const raw = readFileSync3(path, "utf-8");
+ return raw.split("\n").map((line) => line.trim()).filter(Boolean).map((line) => JSON.parse(line));
+}
+function requeueInflight(queuePath, inflightPath) {
+ if (!existsSync3(inflightPath))
+ return;
+ const inflight = readFileSync3(inflightPath, "utf-8");
+ appendFileSync3(queuePath, inflight);
+ rmSync(inflightPath, { force: true });
+}
+function recoverStaleInflight(queuePath, inflightPath, staleInflightMs) {
+ if (!existsSync3(inflightPath) || !isStale(inflightPath, staleInflightMs))
+ return;
+ requeueInflight(queuePath, inflightPath);
+}
+function isStale(path, staleInflightMs) {
+ return Date.now() - statSync(path).mtimeMs >= staleInflightMs;
+}
+function isEnsureSessionsTableRetryable(error) {
+ const message = errorMessage(error).toLowerCase();
+ return message.includes("does not exist") || message.includes("doesn't exist") || message.includes("relation") || message.includes("not found");
}
-function releaseLock(sessionId) {
+function isSessionWriteAuthError(error) {
+ const message = errorMessage(error).toLowerCase();
+ return message.includes("403") || message.includes("401") || message.includes("forbidden") || message.includes("unauthorized");
+}
+function markSessionWriteDisabled(sessionsTable, reason, queueDir = DEFAULT_QUEUE_DIR) {
+ mkdirSync3(queueDir, { recursive: true });
+ writeFileSync3(getSessionWriteDisabledPath(queueDir, sessionsTable), JSON.stringify({
+ disabledAt: (/* @__PURE__ */ new Date()).toISOString(),
+ reason,
+ sessionsTable
+ }));
+}
+function clearSessionWriteDisabled(sessionsTable, queueDir = DEFAULT_QUEUE_DIR) {
+ rmSync(getSessionWriteDisabledPath(queueDir, sessionsTable), { force: true });
+}
+function isSessionWriteDisabled(sessionsTable, queueDir = DEFAULT_QUEUE_DIR, ttlMs = DEFAULT_AUTH_FAILURE_TTL_MS) {
+ const path = getSessionWriteDisabledPath(queueDir, sessionsTable);
+ if (!existsSync3(path))
+ return false;
try {
- unlinkSync(lockPath(sessionId));
- } catch (e) {
- if (e?.code !== "ENOENT") {
- dlog(`releaseLock unlink failed for ${sessionId}: ${e.message}`);
+ const raw = readFileSync3(path, "utf-8");
+ const state = JSON.parse(raw);
+ const ageMs = Date.now() - new Date(state.disabledAt).getTime();
+ if (Number.isNaN(ageMs) || ageMs >= ttlMs) {
+ rmSync(path, { force: true });
+ return false;
}
+ return true;
+ } catch {
+ rmSync(path, { force: true });
+ return false;
}
}
-
-// dist/src/utils/session-path.js
-function buildSessionPath(config, sessionId) {
- const workspace = config.workspaceId ?? "default";
- return `/sessions/${config.userName}/${config.userName}_${config.orgName}_${workspace}_${sessionId}.jsonl`;
+function getSessionWriteDisabledPath(queueDir, sessionsTable) {
+ return join5(queueDir, `.${sessionsTable}.disabled.json`);
+}
+function errorMessage(error) {
+ return error instanceof Error ? error.message : String(error);
+}
+async function waitForInflightToClear(inflightPath, waitIfBusyMs) {
+ const startedAt = Date.now();
+ while (existsSync3(inflightPath) && Date.now() - startedAt < waitIfBusyMs) {
+ await sleep2(BUSY_WAIT_STEP_MS);
+ }
+}
+function sleep2(ms) {
+ return new Promise((resolve2) => setTimeout(resolve2, ms));
}
// dist/src/hooks/codex/stop.js
var log3 = (msg) => log("codex-stop", msg);
-var CAPTURE = process.env.HIVEMIND_CAPTURE !== "false";
-async function main() {
- if (process.env.HIVEMIND_WIKI_WORKER === "1")
- return;
- const input = await readStdin();
- const sessionId = input.session_id;
- if (!sessionId)
- return;
- const config = loadConfig();
+var CAPTURE = (process.env.HIVEMIND_CAPTURE ?? process.env.DEEPLAKE_CAPTURE) !== "false";
+function extractLastAssistantMessage(transcript) {
+ const lines = transcript.trim().split("\n").reverse();
+ for (const line of lines) {
+ try {
+ const entry = JSON.parse(line);
+ const msg = entry.payload ?? entry;
+ if (msg.role === "assistant" && msg.content) {
+ const content = typeof msg.content === "string" ? msg.content : Array.isArray(msg.content) ? msg.content.filter((b) => b.type === "output_text" || b.type === "text").map((b) => b.text).join("\n") : "";
+ if (content)
+ return content.slice(0, 4e3);
+ }
+ } catch {
+ }
+ }
+ return "";
+}
+function buildCodexStopEntry(input, timestamp, lastAssistantMessage) {
+ return {
+ id: crypto.randomUUID(),
+ session_id: input.session_id,
+ transcript_path: input.transcript_path,
+ cwd: input.cwd,
+ hook_event_name: input.hook_event_name,
+ model: input.model,
+ timestamp,
+ type: lastAssistantMessage ? "assistant_message" : "assistant_stop",
+ content: lastAssistantMessage
+ };
+}
+async function runCodexStopHook(input, deps = {}) {
+ const { wikiWorker = (process.env.HIVEMIND_WIKI_WORKER ?? process.env.DEEPLAKE_WIKI_WORKER) === "1", captureEnabled = CAPTURE, config = loadConfig(), now = () => (/* @__PURE__ */ new Date()).toISOString(), transcriptExists = existsSync4, readTranscript = (path) => readFileSync4(path, "utf-8"), createApi = (activeConfig) => new DeeplakeApi(activeConfig.token, activeConfig.apiUrl, activeConfig.orgId, activeConfig.workspaceId, activeConfig.sessionsTableName), appendQueuedSessionRowFn = appendQueuedSessionRow, buildQueuedSessionRowFn = buildQueuedSessionRow, flushSessionQueueFn = flushSessionQueue, spawnCodexWikiWorkerFn = spawnCodexWikiWorker, wikiLogFn = wikiLog, bundleDir = bundleDirFromImportMeta(import.meta.url), logFn = log3 } = deps;
+ if (wikiWorker || !input.session_id)
+ return { status: "skipped" };
if (!config) {
- log3("no config");
- return;
+ logFn("no config");
+ return { status: "no_config" };
}
- if (CAPTURE) {
+ let entry;
+ let flushStatus;
+ if (captureEnabled) {
try {
- const sessionsTable = config.sessionsTableName;
- const api = new DeeplakeApi(config.token, config.apiUrl, config.orgId, config.workspaceId, sessionsTable);
- const ts = (/* @__PURE__ */ new Date()).toISOString();
+ const ts = now();
let lastAssistantMessage = "";
if (input.transcript_path) {
try {
- const transcriptPath = input.transcript_path;
- if (existsSync3(transcriptPath)) {
- const transcript = readFileSync3(transcriptPath, "utf-8");
- const lines = transcript.trim().split("\n").reverse();
- for (const line2 of lines) {
- try {
- const entry2 = JSON.parse(line2);
- const msg = entry2.payload ?? entry2;
- if (msg.role === "assistant" && msg.content) {
- const content = typeof msg.content === "string" ? msg.content : Array.isArray(msg.content) ? msg.content.filter((b) => b.type === "output_text" || b.type === "text").map((b) => b.text).join("\n") : "";
- if (content) {
- lastAssistantMessage = content.slice(0, 4e3);
- break;
- }
- }
- } catch {
- }
+ if (transcriptExists(input.transcript_path)) {
+ lastAssistantMessage = extractLastAssistantMessage(readTranscript(input.transcript_path));
+ if (lastAssistantMessage) {
+ logFn(`extracted assistant message from transcript (${lastAssistantMessage.length} chars)`);
}
- if (lastAssistantMessage)
- log3(`extracted assistant message from transcript (${lastAssistantMessage.length} chars)`);
}
} catch (e) {
- log3(`transcript read failed: ${e.message}`);
+ logFn(`transcript read failed: ${e.message}`);
}
}
- const entry = {
- id: crypto.randomUUID(),
- session_id: sessionId,
- transcript_path: input.transcript_path,
- cwd: input.cwd,
- hook_event_name: input.hook_event_name,
- model: input.model,
- timestamp: ts,
- type: lastAssistantMessage ? "assistant_message" : "assistant_stop",
- content: lastAssistantMessage
- };
+ entry = buildCodexStopEntry(input, ts, lastAssistantMessage);
const line = JSON.stringify(entry);
- const sessionPath = buildSessionPath(config, sessionId);
+ const sessionPath = buildSessionPath(config, input.session_id);
const projectName = (input.cwd ?? "").split("/").pop() || "unknown";
- const filename = sessionPath.split("/").pop() ?? "";
- const jsonForSql = sqlStr(line);
- const insertSql = `INSERT INTO "${sessionsTable}" (id, path, filename, message, author, size_bytes, project, description, agent, creation_date, last_update_date) VALUES ('${crypto.randomUUID()}', '${sqlStr(sessionPath)}', '${sqlStr(filename)}', '${jsonForSql}'::jsonb, '${sqlStr(config.userName)}', ${Buffer.byteLength(line, "utf-8")}, '${sqlStr(projectName)}', 'Stop', 'codex', '${ts}', '${ts}')`;
- await api.query(insertSql);
- log3("stop event captured");
+ appendQueuedSessionRowFn(buildQueuedSessionRowFn({
+ sessionPath,
+ line,
+ userName: config.userName,
+ projectName,
+ description: "Stop",
+ agent: "codex",
+ timestamp: ts
+ }));
+ const flush = await flushSessionQueueFn(createApi(config), {
+ sessionId: input.session_id,
+ sessionsTable: config.sessionsTableName,
+ drainAll: true
+ });
+ flushStatus = flush.status;
+ logFn(`stop flush ${flush.status}: rows=${flush.rows} batches=${flush.batches}`);
} catch (e) {
- log3(`capture failed: ${e.message}`);
- }
- }
- if (!CAPTURE)
- return;
- if (!tryAcquireLock(sessionId)) {
- wikiLog(`Stop: periodic worker already running for ${sessionId}, skipping`);
- return;
- }
- wikiLog(`Stop: triggering summary for ${sessionId}`);
- try {
- spawnCodexWikiWorker({
- config,
- sessionId,
- cwd: input.cwd ?? "",
- bundleDir: bundleDirFromImportMeta(import.meta.url),
- reason: "Stop"
- });
- } catch (e) {
- log3(`spawn failed: ${e.message}`);
- try {
- releaseLock(sessionId);
- } catch (releaseErr) {
- log3(`releaseLock after spawn failure also failed: ${releaseErr.message}`);
+ logFn(`capture failed: ${e.message}`);
}
- throw e;
}
+ if (!captureEnabled)
+ return { status: "complete", entry };
+ wikiLogFn(`Stop: triggering summary for ${input.session_id}`);
+ spawnCodexWikiWorkerFn({
+ config,
+ sessionId: input.session_id,
+ cwd: input.cwd ?? "",
+ bundleDir,
+ reason: "Stop"
+ });
+ return { status: "complete", flushStatus, entry };
}
-main().catch((e) => {
- log3(`fatal: ${e.message}`);
- process.exit(0);
-});
+async function main() {
+ const input = await readStdin();
+ await runCodexStopHook(input);
+}
+if (isDirectRun(import.meta.url)) {
+ main().catch((e) => {
+ log3(`fatal: ${e.message}`);
+ process.exit(0);
+ });
+}
+export {
+ buildCodexStopEntry,
+ extractLastAssistantMessage,
+ runCodexStopHook
+};
diff --git a/codex/bundle/wiki-worker.js b/codex/bundle/wiki-worker.js
index 913c279..1b596aa 100755
--- a/codex/bundle/wiki-worker.js
+++ b/codex/bundle/wiki-worker.js
@@ -1,37 +1,21 @@
#!/usr/bin/env node
// dist/src/hooks/codex/wiki-worker.js
-import { readFileSync as readFileSync2, writeFileSync as writeFileSync2, existsSync as existsSync2, appendFileSync as appendFileSync2, mkdirSync as mkdirSync2, rmSync } from "node:fs";
+import { readFileSync as readFileSync2, writeFileSync as writeFileSync2, existsSync as existsSync2, appendFileSync, mkdirSync as mkdirSync2, rmSync } from "node:fs";
import { execFileSync } from "node:child_process";
-import { join as join3 } from "node:path";
+import { join as join2 } from "node:path";
// dist/src/hooks/summary-state.js
import { readFileSync, writeFileSync, writeSync, mkdirSync, renameSync, existsSync, unlinkSync, openSync, closeSync } from "node:fs";
-import { homedir as homedir2 } from "node:os";
-import { join as join2 } from "node:path";
-
-// dist/src/utils/debug.js
-import { appendFileSync } from "node:fs";
-import { join } from "node:path";
import { homedir } from "node:os";
-var DEBUG = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1";
-var LOG = join(homedir(), ".deeplake", "hook-debug.log");
-function log(tag, msg) {
- if (!DEBUG)
- return;
- appendFileSync(LOG, `${(/* @__PURE__ */ new Date()).toISOString()} [${tag}] ${msg}
-`);
-}
-
-// dist/src/hooks/summary-state.js
-var dlog = (msg) => log("summary-state", msg);
-var STATE_DIR = join2(homedir2(), ".claude", "hooks", "summary-state");
+import { join } from "node:path";
+var STATE_DIR = join(homedir(), ".claude", "hooks", "summary-state");
var YIELD_BUF = new Int32Array(new SharedArrayBuffer(4));
function statePath(sessionId) {
- return join2(STATE_DIR, `${sessionId}.json`);
+ return join(STATE_DIR, `${sessionId}.json`);
}
function lockPath(sessionId) {
- return join2(STATE_DIR, `${sessionId}.lock`);
+ return join(STATE_DIR, `${sessionId}.lock`);
}
function readState(sessionId) {
const p = statePath(sessionId);
@@ -62,11 +46,9 @@ function withRmwLock(sessionId, fn) {
if (e.code !== "EEXIST")
throw e;
if (Date.now() > deadline) {
- dlog(`rmw lock deadline exceeded for ${sessionId}, reclaiming stale lock`);
try {
unlinkSync(rmwLock);
- } catch (unlinkErr) {
- dlog(`stale rmw lock unlink failed for ${sessionId}: ${unlinkErr.message}`);
+ } catch {
}
continue;
}
@@ -79,8 +61,7 @@ function withRmwLock(sessionId, fn) {
closeSync(fd);
try {
unlinkSync(rmwLock);
- } catch (unlinkErr) {
- dlog(`rmw lock cleanup failed for ${sessionId}: ${unlinkErr.message}`);
+ } catch {
}
}
}
@@ -97,10 +78,7 @@ function finalizeSummary(sessionId, jsonlLines) {
function releaseLock(sessionId) {
try {
unlinkSync(lockPath(sessionId));
- } catch (e) {
- if (e?.code !== "ENOENT") {
- dlog(`releaseLock unlink failed for ${sessionId}: ${e.message}`);
- }
+ } catch {
}
}
@@ -130,15 +108,14 @@ async function uploadSummary(query2, params) {
}
// dist/src/hooks/codex/wiki-worker.js
-var dlog2 = (msg) => log("codex-wiki-worker", msg);
var cfg = JSON.parse(readFileSync2(process.argv[2], "utf-8"));
var tmpDir = cfg.tmpDir;
-var tmpJsonl = join3(tmpDir, "session.jsonl");
-var tmpSummary = join3(tmpDir, "summary.md");
+var tmpJsonl = join2(tmpDir, "session.jsonl");
+var tmpSummary = join2(tmpDir, "summary.md");
function wlog(msg) {
try {
mkdirSync2(cfg.hooksDir, { recursive: true });
- appendFileSync2(cfg.wikiLog, `[${(/* @__PURE__ */ new Date()).toISOString().replace("T", " ").slice(0, 19)}] wiki-worker(${cfg.sessionId}): ${msg}
+ appendFileSync(cfg.wikiLog, `[${(/* @__PURE__ */ new Date()).toISOString().replace("T", " ").slice(0, 19)}] wiki-worker(${cfg.sessionId}): ${msg}
`);
} catch {
}
@@ -178,8 +155,7 @@ async function query(sql, retries = 4) {
function cleanup() {
try {
rmSync(tmpDir, { recursive: true, force: true });
- } catch (cleanupErr) {
- dlog2(`cleanup failed to remove ${tmpDir}: ${cleanupErr.message}`);
+ } catch {
}
}
async function main() {
@@ -258,8 +234,7 @@ async function main() {
cleanup();
try {
releaseLock(cfg.sessionId);
- } catch (releaseErr) {
- dlog2(`releaseLock failed in finally for ${cfg.sessionId}: ${releaseErr.message}`);
+ } catch {
}
}
}
diff --git a/codex/package.json b/codex/package.json
index 0a42990..377c98a 100644
--- a/codex/package.json
+++ b/codex/package.json
@@ -1,6 +1,6 @@
{
"name": "hivemind-codex",
- "version": "0.6.38",
+ "version": "0.6.37",
"description": "Cloud-backed persistent shared memory for OpenAI Codex CLI powered by Deeplake",
"type": "module"
}
diff --git a/codex/tests/codex-integration.test.ts b/codex/tests/codex-integration.test.ts
index d399a9d..44b41dd 100644
--- a/codex/tests/codex-integration.test.ts
+++ b/codex/tests/codex-integration.test.ts
@@ -106,14 +106,27 @@ describe("codex integration: session-start", () => {
expect(raw).toContain("Do NOT spawn subagents");
});
- it("context includes JSONL warning", () => {
+ it("context includes raw session file warning", () => {
const raw = runHook("session-start.js", {
session_id: "test-session-004",
cwd: "/tmp",
hook_event_name: "SessionStart",
model: "gpt-5.2",
});
- expect(raw).toContain("Do NOT jump straight to JSONL");
+ expect(raw).toContain("Do NOT jump straight to raw session files");
+ });
+
+ it("context steers recall tasks to index-first exact file reads", () => {
+ const raw = runHook("session-start.js", {
+ session_id: "test-session-004b",
+ cwd: "/tmp",
+ hook_event_name: "SessionStart",
+ model: "gpt-5.2",
+ });
+ expect(raw).toContain("read that exact summary or session path directly");
+ expect(raw).toContain("Do NOT probe unrelated local paths");
+ expect(raw).toContain("answer with the smallest exact phrase supported by memory");
+ expect(raw).toContain("convert the final answer into an absolute month/date/year");
});
});
diff --git a/codex/tests/codex-source-hooks.test.ts b/codex/tests/codex-source-hooks.test.ts
new file mode 100644
index 0000000..263a473
--- /dev/null
+++ b/codex/tests/codex-source-hooks.test.ts
@@ -0,0 +1,1126 @@
+import { afterEach, describe, expect, it, vi } from "vitest";
+import type { Config } from "../../src/config.js";
+import type { Credentials } from "../../src/commands/auth.js";
+import {
+ buildCodexCaptureEntry,
+ maybeTriggerPeriodicSummary,
+ runCodexCaptureHook,
+} from "../../src/hooks/codex/capture.js";
+import {
+ buildUnsupportedGuidance,
+ isSafe,
+ processCodexPreToolUse,
+ rewritePaths,
+ touchesMemory,
+} from "../../src/hooks/codex/pre-tool-use.js";
+import {
+ buildCodexSessionStartContext,
+ runCodexSessionStartHook,
+} from "../../src/hooks/codex/session-start.js";
+import {
+ createPlaceholder,
+ runCodexSessionStartSetup,
+} from "../../src/hooks/codex/session-start-setup.js";
+import {
+ buildCodexStopEntry,
+ extractLastAssistantMessage,
+ runCodexStopHook,
+} from "../../src/hooks/codex/stop.js";
+
+const baseConfig: Config = {
+ token: "token",
+ orgId: "org-1",
+ orgName: "Acme",
+ userName: "alice",
+ workspaceId: "default",
+ apiUrl: "https://api.example.com",
+ tableName: "memory",
+ sessionsTableName: "sessions",
+ memoryPath: "/tmp/.deeplake/memory",
+};
+
+const baseCreds: Credentials = {
+ token: "token",
+ orgId: "org-1",
+ orgName: "Acme",
+ userName: "alice",
+ workspaceId: "default",
+ apiUrl: "https://api.example.com",
+ savedAt: "2026-01-01T00:00:00.000Z",
+};
+
+afterEach(() => {
+ vi.restoreAllMocks();
+});
+
+describe("codex capture source", () => {
+ it("builds user/tool entries and ignores unsupported events", () => {
+ const user = buildCodexCaptureEntry({
+ session_id: "s1",
+ cwd: "/repo",
+ hook_event_name: "UserPromptSubmit",
+ model: "gpt-5.2",
+ prompt: "hello",
+ }, "2026-01-01T00:00:00.000Z");
+ const tool = buildCodexCaptureEntry({
+ session_id: "s1",
+ cwd: "/repo",
+ hook_event_name: "PostToolUse",
+ model: "gpt-5.2",
+ tool_name: "Bash",
+ tool_use_id: "tu-1",
+ tool_input: { command: "ls" },
+ tool_response: { stdout: "ok" },
+ }, "2026-01-01T00:00:01.000Z");
+
+ expect(user?.type).toBe("user_message");
+ expect(tool?.type).toBe("tool_call");
+ expect(buildCodexCaptureEntry({
+ session_id: "s1",
+ cwd: "/repo",
+ hook_event_name: "Stop",
+ model: "gpt-5.2",
+ }, "2026-01-01T00:00:02.000Z")).toBeNull();
+ });
+
+ it("triggers periodic summaries and queues capture rows", async () => {
+ const spawn = vi.fn();
+ maybeTriggerPeriodicSummary("s1", "/repo", baseConfig, {
+ bumpTotalCountFn: vi.fn(() => ({ totalCount: 10, lastSummaryCount: 4 })) as any,
+ loadTriggerConfigFn: vi.fn(() => ({ everyNMessages: 5, everyHours: 24 })) as any,
+ shouldTriggerFn: vi.fn(() => true) as any,
+ tryAcquireLockFn: vi.fn(() => true) as any,
+ spawnCodexWikiWorkerFn: spawn as any,
+ wikiLogFn: vi.fn() as any,
+ bundleDir: "/tmp/bundle",
+ });
+ expect(spawn).toHaveBeenCalledTimes(1);
+
+ const append = vi.fn();
+ const clear = vi.fn();
+ const queued = await runCodexCaptureHook({
+ session_id: "s1",
+ cwd: "/repo",
+ hook_event_name: "PostToolUse",
+ model: "gpt-5.2",
+ tool_name: "Bash",
+ tool_use_id: "tu-1",
+ tool_input: { command: "ls" },
+ tool_response: { stdout: "ok" },
+ }, {
+ config: baseConfig,
+ appendQueuedSessionRowFn: append as any,
+ clearSessionQueryCacheFn: clear as any,
+ });
+ expect(queued.status).toBe("queued");
+ expect(append).toHaveBeenCalledTimes(1);
+ expect(clear).not.toHaveBeenCalled();
+
+ await runCodexCaptureHook({
+ session_id: "s1",
+ cwd: "/repo",
+ hook_event_name: "UserPromptSubmit",
+ model: "gpt-5.2",
+ prompt: "hi",
+ }, {
+ config: baseConfig,
+ appendQueuedSessionRowFn: vi.fn() as any,
+ clearSessionQueryCacheFn: clear as any,
+ });
+ expect(clear).toHaveBeenCalledWith("s1");
+ });
+
+ it("returns disabled, no_config, and ignored states", async () => {
+ expect(await runCodexCaptureHook({
+ session_id: "s1",
+ cwd: "/repo",
+ hook_event_name: "UserPromptSubmit",
+ model: "gpt-5.2",
+ prompt: "hi",
+ }, {
+ captureEnabled: false,
+ config: baseConfig,
+ })).toEqual({ status: "disabled" });
+
+ expect(await runCodexCaptureHook({
+ session_id: "s1",
+ cwd: "/repo",
+ hook_event_name: "UserPromptSubmit",
+ model: "gpt-5.2",
+ prompt: "hi",
+ }, {
+ config: null,
+ })).toEqual({ status: "no_config" });
+
+ expect(await runCodexCaptureHook({
+ session_id: "s1",
+ cwd: "/repo",
+ hook_event_name: "Unknown",
+ model: "gpt-5.2",
+ }, {
+ config: baseConfig,
+ })).toEqual({ status: "ignored" });
+ });
+
+ it("suppresses periodic summaries when skipped or when the helper throws", () => {
+ const spawn = vi.fn();
+ maybeTriggerPeriodicSummary("s1", "/repo", baseConfig, {
+ wikiWorker: true,
+ spawnCodexWikiWorkerFn: spawn as any,
+ });
+ maybeTriggerPeriodicSummary("s1", "/repo", baseConfig, {
+ bumpTotalCountFn: vi.fn(() => { throw new Error("boom"); }) as any,
+ spawnCodexWikiWorkerFn: spawn as any,
+ logFn: vi.fn(),
+ });
+ maybeTriggerPeriodicSummary("s1", "/repo", baseConfig, {
+ bumpTotalCountFn: vi.fn(() => ({ totalCount: 1, lastSummaryCount: 1 })) as any,
+ loadTriggerConfigFn: vi.fn(() => ({ everyNMessages: 5, everyHours: 24 })) as any,
+ shouldTriggerFn: vi.fn(() => false) as any,
+ spawnCodexWikiWorkerFn: spawn as any,
+ });
+ expect(spawn).not.toHaveBeenCalled();
+ });
+});
+
+describe("codex pre-tool source", () => {
+ it("detects, rewrites, and validates memory commands", () => {
+ expect(touchesMemory("cat ~/.deeplake/memory/index.md")).toBe(true);
+ expect(rewritePaths("cat $HOME/.deeplake/memory/index.md")).toBe("cat /index.md");
+ expect(isSafe("grep -r needle /")).toBe(true);
+ expect(isSafe("node -e '1' /")).toBe(false);
+ expect(isSafe("echo $(uname)")).toBe(false);
+ expect(buildUnsupportedGuidance()).toContain("Do NOT use python");
+ });
+
+ it("passes through non-memory commands and guides unsafe ones", async () => {
+ expect(await processCodexPreToolUse({
+ session_id: "s1",
+ tool_name: "Bash",
+ tool_use_id: "tu-1",
+ tool_input: { command: "ls -la /tmp" },
+ cwd: "/repo",
+ hook_event_name: "PreToolUse",
+ model: "gpt-5.2",
+ })).toEqual({ action: "pass" });
+
+ const guidance = await processCodexPreToolUse({
+ session_id: "s1",
+ tool_name: "Bash",
+ tool_use_id: "tu-2",
+ tool_input: { command: "python3 -c 'print(1)' ~/.deeplake/memory" },
+ cwd: "/repo",
+ hook_event_name: "PreToolUse",
+ model: "gpt-5.2",
+ }, {
+ config: baseConfig,
+ });
+ expect(guidance.action).toBe("guide");
+ expect(guidance.output).toContain("Only bash builtins");
+ });
+
+ it("uses direct read, direct grep, and shell fallback", async () => {
+ const api = {
+ query: vi.fn(async () => [
+ {
+ path: "/summaries/alice/s1.md",
+ project: "repo",
+ description: "session summary",
+ creation_date: "2026-01-01T00:00:00.000Z",
+ },
+ ]),
+ };
+ const readDecision = await processCodexPreToolUse({
+ session_id: "s1",
+ tool_name: "Bash",
+ tool_use_id: "tu-1",
+ tool_input: { command: "cat ~/.deeplake/memory/index.md | head -20" },
+ cwd: "/repo",
+ hook_event_name: "PreToolUse",
+ model: "gpt-5.2",
+ }, {
+ config: baseConfig,
+ createApi: vi.fn(() => api as any),
+ readVirtualPathContentFn: vi.fn(async () => null) as any,
+ executeCompiledBashCommandFn: vi.fn(async () => null) as any,
+ });
+ expect(readDecision.action).toBe("block");
+ expect(readDecision.output).toContain("# Memory Index");
+
+ const grepDecision = await processCodexPreToolUse({
+ session_id: "s1",
+ tool_name: "Bash",
+ tool_use_id: "tu-2",
+ tool_input: { command: "grep -r needle ~/.deeplake/memory/" },
+ cwd: "/repo",
+ hook_event_name: "PreToolUse",
+ model: "gpt-5.2",
+ }, {
+ config: baseConfig,
+ handleGrepDirectFn: vi.fn(async () => "/index.md:needle") as any,
+ executeCompiledBashCommandFn: vi.fn(async () => null) as any,
+ });
+ expect(grepDecision.output).toContain("/index.md:needle");
+
+ const fallback = await processCodexPreToolUse({
+ session_id: "s1",
+ tool_name: "Bash",
+ tool_use_id: "tu-3",
+ tool_input: { command: "echo hi > ~/.deeplake/memory/test.md" },
+ cwd: "/repo",
+ hook_event_name: "PreToolUse",
+ model: "gpt-5.2",
+ }, {
+ config: null,
+ runVirtualShellFn: vi.fn(() => "ok") as any,
+ });
+ expect(fallback).toEqual({
+ action: "block",
+ output: "ok",
+ rewrittenCommand: "echo hi > /test.md",
+ });
+ });
+
+ it("supports head, tail, wc -l, find counts, missing ls paths, and default empty-shell output", async () => {
+ const contentReader = vi.fn(async () => "line1\nline2\nline3");
+
+ const headDecision = await processCodexPreToolUse({
+ session_id: "s1",
+ tool_name: "Bash",
+ tool_use_id: "tu-4",
+ tool_input: { command: "head -2 ~/.deeplake/memory/index.md" },
+ cwd: "/repo",
+ hook_event_name: "PreToolUse",
+ model: "gpt-5.2",
+ }, {
+ config: baseConfig,
+ readCachedIndexContentFn: vi.fn(() => null) as any,
+ writeCachedIndexContentFn: vi.fn() as any,
+ readVirtualPathContentFn: contentReader as any,
+ executeCompiledBashCommandFn: vi.fn(async () => null) as any,
+ });
+ expect(headDecision.output).toBe("line1\nline2");
+
+ const tailDecision = await processCodexPreToolUse({
+ session_id: "s1",
+ tool_name: "Bash",
+ tool_use_id: "tu-5",
+ tool_input: { command: "tail -2 ~/.deeplake/memory/index.md" },
+ cwd: "/repo",
+ hook_event_name: "PreToolUse",
+ model: "gpt-5.2",
+ }, {
+ config: baseConfig,
+ readCachedIndexContentFn: vi.fn(() => null) as any,
+ writeCachedIndexContentFn: vi.fn() as any,
+ readVirtualPathContentFn: contentReader as any,
+ executeCompiledBashCommandFn: vi.fn(async () => null) as any,
+ });
+ expect(tailDecision.output).toBe("line2\nline3");
+
+ const wcDecision = await processCodexPreToolUse({
+ session_id: "s1",
+ tool_name: "Bash",
+ tool_use_id: "tu-6",
+ tool_input: { command: "wc -l ~/.deeplake/memory/index.md" },
+ cwd: "/repo",
+ hook_event_name: "PreToolUse",
+ model: "gpt-5.2",
+ }, {
+ config: baseConfig,
+ readCachedIndexContentFn: vi.fn(() => null) as any,
+ writeCachedIndexContentFn: vi.fn() as any,
+ readVirtualPathContentFn: contentReader as any,
+ executeCompiledBashCommandFn: vi.fn(async () => null) as any,
+ });
+ expect(wcDecision.output).toBe("3 /index.md");
+
+ const findDecision = await processCodexPreToolUse({
+ session_id: "s1",
+ tool_name: "Bash",
+ tool_use_id: "tu-7",
+ tool_input: { command: "find ~/.deeplake/memory/summaries -name '*.md' | wc -l" },
+ cwd: "/repo",
+ hook_event_name: "PreToolUse",
+ model: "gpt-5.2",
+ }, {
+ config: baseConfig,
+ findVirtualPathsFn: vi.fn(async () => ["/summaries/alice/s1.md", "/summaries/alice/s2.md"]) as any,
+ executeCompiledBashCommandFn: vi.fn(async () => null) as any,
+ });
+ expect(findDecision.output).toBe("2");
+
+ const missingLs = await processCodexPreToolUse({
+ session_id: "s1",
+ tool_name: "Bash",
+ tool_use_id: "tu-8",
+ tool_input: { command: "ls ~/.deeplake/memory/missing" },
+ cwd: "/repo",
+ hook_event_name: "PreToolUse",
+ model: "gpt-5.2",
+ }, {
+ config: baseConfig,
+ listVirtualPathRowsFn: vi.fn(async () => []) as any,
+ executeCompiledBashCommandFn: vi.fn(async () => null) as any,
+ });
+ expect(missingLs.output).toContain("No such file or directory");
+
+ const emptyShell = await processCodexPreToolUse({
+ session_id: "s1",
+ tool_name: "Bash",
+ tool_use_id: "tu-9",
+ tool_input: { command: "echo hi > ~/.deeplake/memory/test.md" },
+ cwd: "/repo",
+ hook_event_name: "PreToolUse",
+ model: "gpt-5.2",
+ }, {
+ config: baseConfig,
+ runVirtualShellFn: vi.fn(() => "") as any,
+ });
+ expect(emptyShell.output).toContain("Command returned empty");
+ });
+
+ it("returns compiled output when the bash compiler can satisfy the command directly", async () => {
+ const decision = await processCodexPreToolUse({
+ session_id: "s1",
+ tool_name: "Bash",
+ tool_use_id: "tu-10",
+ tool_input: { command: "cat ~/.deeplake/memory/index.md && ls ~/.deeplake/memory/summaries" },
+ cwd: "/repo",
+ hook_event_name: "PreToolUse",
+ model: "gpt-5.2",
+ }, {
+ config: baseConfig,
+ executeCompiledBashCommandFn: vi.fn(async () => "compiled output") as any,
+ });
+
+ expect(decision).toEqual({
+ action: "block",
+ output: "compiled output",
+ rewrittenCommand: "cat /index.md && ls /summaries",
+ });
+ });
+
+ it("reuses cached /index.md content for direct and compiled reads within a session", async () => {
+ const readVirtualPathContentFn = vi.fn(async () => "fresh index");
+ const readVirtualPathContentsFn = vi.fn(async (_api, _memory, _sessions, paths: string[]) => new Map(
+ paths.map((path) => [path, path === "/index.md" ? "fresh index" : null]),
+ )) as any;
+ const readCachedIndexContentFn = vi.fn(() => "cached index");
+ const writeCachedIndexContentFn = vi.fn();
+
+ const directDecision = await processCodexPreToolUse({
+ session_id: "s1",
+ tool_name: "Bash",
+ tool_use_id: "tu-cache-1",
+ tool_input: { command: "cat ~/.deeplake/memory/index.md" },
+ cwd: "/repo",
+ hook_event_name: "PreToolUse",
+ model: "gpt-5.2",
+ }, {
+ config: baseConfig,
+ readCachedIndexContentFn: readCachedIndexContentFn as any,
+ writeCachedIndexContentFn: writeCachedIndexContentFn as any,
+ readVirtualPathContentFn: readVirtualPathContentFn as any,
+ executeCompiledBashCommandFn: vi.fn(async () => null) as any,
+ });
+ expect(directDecision.output).toBe("cached index");
+ expect(readVirtualPathContentFn).not.toHaveBeenCalled();
+ expect(writeCachedIndexContentFn).toHaveBeenCalledWith("s1", "cached index");
+
+ const compiledDecision = await processCodexPreToolUse({
+ session_id: "s1",
+ tool_name: "Bash",
+ tool_use_id: "tu-cache-2",
+ tool_input: { command: "cat ~/.deeplake/memory/index.md && ls ~/.deeplake/memory/summaries" },
+ cwd: "/repo",
+ hook_event_name: "PreToolUse",
+ model: "gpt-5.2",
+ }, {
+ config: baseConfig,
+ readCachedIndexContentFn: readCachedIndexContentFn as any,
+ writeCachedIndexContentFn: writeCachedIndexContentFn as any,
+ readVirtualPathContentsFn,
+ executeCompiledBashCommandFn: vi.fn(async (_api, _table, _sessions, _cmd, deps) => {
+ const map = await deps.readVirtualPathContentsFn(_api, _table, _sessions, ["/index.md"]);
+ return map.get("/index.md") ?? null;
+ }) as any,
+ });
+ expect(compiledDecision.output).toBe("cached index");
+ expect(readVirtualPathContentsFn).not.toHaveBeenCalled();
+ });
+
+ it("covers plain cat, directory listings, non-count find, grep fallback, and direct-query exceptions", async () => {
+ const plainCat = await processCodexPreToolUse({
+ session_id: "s1",
+ tool_name: "Bash",
+ tool_use_id: "tu-11",
+ tool_input: { command: "cat ~/.deeplake/memory/index.md" },
+ cwd: "/repo",
+ hook_event_name: "PreToolUse",
+ model: "gpt-5.2",
+ }, {
+ config: baseConfig,
+ readCachedIndexContentFn: vi.fn(() => null) as any,
+ writeCachedIndexContentFn: vi.fn() as any,
+ readVirtualPathContentFn: vi.fn(async () => "line1\nline2") as any,
+ executeCompiledBashCommandFn: vi.fn(async () => null) as any,
+ });
+ expect(plainCat).toEqual({
+ action: "block",
+ output: "line1\nline2",
+ rewrittenCommand: "cat /index.md",
+ });
+
+ const listed = await processCodexPreToolUse({
+ session_id: "s1",
+ tool_name: "Bash",
+ tool_use_id: "tu-12",
+ tool_input: { command: "ls ~/.deeplake/memory/summaries" },
+ cwd: "/repo",
+ hook_event_name: "PreToolUse",
+ model: "gpt-5.2",
+ }, {
+ config: baseConfig,
+ listVirtualPathRowsFn: vi.fn(async () => [
+ { path: "/other/place.md", size_bytes: 1 },
+ { path: "/summaries/", size_bytes: 0 },
+ { path: "/summaries/alice/s1.md", size_bytes: 10 },
+ { path: "/summaries/bob/nested/file.md", size_bytes: 20 },
+ ]) as any,
+ executeCompiledBashCommandFn: vi.fn(async () => null) as any,
+ });
+ expect(listed.output).toContain("alice/");
+ expect(listed.output).toContain("bob/");
+ expect(listed.output).not.toContain("other");
+
+ const rootLs = await processCodexPreToolUse({
+ session_id: "s1",
+ tool_name: "Bash",
+ tool_use_id: "tu-13",
+ tool_input: { command: "ls ~/.deeplake/memory" },
+ cwd: "/repo",
+ hook_event_name: "PreToolUse",
+ model: "gpt-5.2",
+ }, {
+ config: baseConfig,
+ listVirtualPathRowsFn: vi.fn(async () => [
+ { path: "/", size_bytes: 0 },
+ { path: "/root.md", size_bytes: 5 },
+ { path: "/summaries/alice/s1.md", size_bytes: 10 },
+ ]) as any,
+ executeCompiledBashCommandFn: vi.fn(async () => null) as any,
+ });
+ expect(rootLs.output).toContain("root.md");
+ expect(rootLs.output).toContain("summaries/");
+
+ const findNoMatches = await processCodexPreToolUse({
+ session_id: "s1",
+ tool_name: "Bash",
+ tool_use_id: "tu-14",
+ tool_input: { command: "find ~/.deeplake/memory/summaries -name '*.md'" },
+ cwd: "/repo",
+ hook_event_name: "PreToolUse",
+ model: "gpt-5.2",
+ }, {
+ config: baseConfig,
+ findVirtualPathsFn: vi.fn(async () => []) as any,
+ executeCompiledBashCommandFn: vi.fn(async () => null) as any,
+ });
+ expect(findNoMatches.output).toBe("(no matches)");
+
+ const findRoot = await processCodexPreToolUse({
+ session_id: "s1",
+ tool_name: "Bash",
+ tool_use_id: "tu-14b",
+ tool_input: { command: "find ~/.deeplake/memory -name '*.md'" },
+ cwd: "/repo",
+ hook_event_name: "PreToolUse",
+ model: "gpt-5.2",
+ }, {
+ config: baseConfig,
+ findVirtualPathsFn: vi.fn(async () => ["/summaries/a.md", "/notes.md"]) as any,
+ executeCompiledBashCommandFn: vi.fn(async () => null) as any,
+ });
+ expect(findRoot.output).toContain("/summaries/a.md");
+ expect(findRoot.output).toContain("/notes.md");
+
+ const grepFallback = await processCodexPreToolUse({
+ session_id: "s1",
+ tool_name: "Bash",
+ tool_use_id: "tu-15",
+ tool_input: { command: "grep needle ~/.deeplake/memory/index.md" },
+ cwd: "/repo",
+ hook_event_name: "PreToolUse",
+ model: "gpt-5.2",
+ }, {
+ config: baseConfig,
+ handleGrepDirectFn: vi.fn(async () => null) as any,
+ runVirtualShellFn: vi.fn(() => "shell fallback") as any,
+ executeCompiledBashCommandFn: vi.fn(async () => null) as any,
+ });
+ expect(grepFallback.output).toBe("shell fallback");
+
+ const errorFallback = await processCodexPreToolUse({
+ session_id: "s1",
+ tool_name: "Bash",
+ tool_use_id: "tu-16",
+ tool_input: { command: "cat ~/.deeplake/memory/index.md" },
+ cwd: "/repo",
+ hook_event_name: "PreToolUse",
+ model: "gpt-5.2",
+ }, {
+ config: baseConfig,
+ executeCompiledBashCommandFn: vi.fn(async () => { throw new Error("boom"); }) as any,
+ runVirtualShellFn: vi.fn(() => "fallback after error") as any,
+ });
+ expect(errorFallback.output).toBe("fallback after error");
+ });
+
+ it("covers default head/tail forms, synthetic index rows, and long ls formatting", async () => {
+ const headDecision = await processCodexPreToolUse({
+ session_id: "s1",
+ tool_name: "Bash",
+ tool_use_id: "tu-17",
+ tool_input: { command: "head ~/.deeplake/memory/index.md" },
+ cwd: "/repo",
+ hook_event_name: "PreToolUse",
+ model: "gpt-5.2",
+ }, {
+ config: baseConfig,
+ readCachedIndexContentFn: vi.fn(() => null) as any,
+ writeCachedIndexContentFn: vi.fn() as any,
+ readVirtualPathContentFn: vi.fn(async () => "a\nb\nc") as any,
+ executeCompiledBashCommandFn: vi.fn(async () => null) as any,
+ });
+ expect(headDecision.output).toBe("a\nb\nc");
+
+ const tailDecision = await processCodexPreToolUse({
+ session_id: "s1",
+ tool_name: "Bash",
+ tool_use_id: "tu-18",
+ tool_input: { command: "tail ~/.deeplake/memory/index.md" },
+ cwd: "/repo",
+ hook_event_name: "PreToolUse",
+ model: "gpt-5.2",
+ }, {
+ config: baseConfig,
+ readCachedIndexContentFn: vi.fn(() => null) as any,
+ writeCachedIndexContentFn: vi.fn() as any,
+ readVirtualPathContentFn: vi.fn(async () => "a\nb\nc") as any,
+ executeCompiledBashCommandFn: vi.fn(async () => null) as any,
+ });
+ expect(tailDecision.output).toBe("a\nb\nc");
+
+ const api = {
+ query: vi.fn(async () => [{ path: "/summaries/alice/s1.md" }]),
+ };
+ const syntheticIndex = await processCodexPreToolUse({
+ session_id: "s1",
+ tool_name: "Bash",
+ tool_use_id: "tu-19",
+ tool_input: { command: "cat ~/.deeplake/memory/index.md" },
+ cwd: "/repo",
+ hook_event_name: "PreToolUse",
+ model: "gpt-5.2",
+ }, {
+ config: baseConfig,
+ createApi: vi.fn(() => api as any),
+ readVirtualPathContentFn: vi.fn(async () => null) as any,
+ executeCompiledBashCommandFn: vi.fn(async () => null) as any,
+ });
+ expect(syntheticIndex.output).toContain("# Memory Index");
+
+ const longLs = await processCodexPreToolUse({
+ session_id: "s1",
+ tool_name: "Bash",
+ tool_use_id: "tu-20",
+ tool_input: { command: "ls -l ~/.deeplake/memory/summaries" },
+ cwd: "/repo",
+ hook_event_name: "PreToolUse",
+ model: "gpt-5.2",
+ }, {
+ config: baseConfig,
+ listVirtualPathRowsFn: vi.fn(async () => [
+ { path: "/summaries/alice/file.md" },
+ { path: "/summaries/alice/another.md", size_bytes: 3 },
+ { path: "/summaries/team/nested/file.md", size_bytes: 5 },
+ ]) as any,
+ executeCompiledBashCommandFn: vi.fn(async () => null) as any,
+ });
+ expect(longLs.output).toContain("alice/");
+ expect(longLs.output).toContain("team/");
+ expect(longLs.output).toContain("drwxr-xr-x");
+ });
+});
+
+describe("codex session start source", () => {
+ it("builds logged-in and logged-out context", () => {
+ const loggedIn = buildCodexSessionStartContext({
+ creds: baseCreds,
+ currentVersion: "0.6.0",
+ authCommand: "/tmp/auth-login.js",
+ });
+ const loggedOut = buildCodexSessionStartContext({
+ creds: null,
+ currentVersion: "0.6.0",
+ authCommand: "/tmp/auth-login.js",
+ });
+
+ expect(loggedIn).toContain("Logged in to Deeplake");
+ expect(loggedIn).toContain("Hivemind v0.6.0");
+ expect(loggedIn).toContain("resolve it against that session's own date/date_time metadata");
+ expect(loggedIn).toContain("convert the final answer into an absolute month/date/year");
+ expect(loggedIn).toContain("answer with the smallest exact phrase supported by memory");
+ expect(loggedIn).toContain('Do NOT answer "not found"');
+ expect(loggedOut).toContain('Run: node "/tmp/auth-login.js" login');
+ });
+
+ it("skips in wiki-worker mode and spawns async setup when authenticated", async () => {
+ expect(await runCodexSessionStartHook({
+ session_id: "s1",
+ cwd: "/repo",
+ hook_event_name: "SessionStart",
+ model: "gpt-5.2",
+ }, {
+ wikiWorker: true,
+ })).toBeNull();
+
+ const write = vi.fn();
+ const end = vi.fn();
+ const unref = vi.fn();
+ const spawnFn = vi.fn(() => ({
+ stdin: { write, end },
+ unref,
+ }) as any);
+ const result = await runCodexSessionStartHook({
+ session_id: "s1",
+ cwd: "/repo",
+ hook_event_name: "SessionStart",
+ model: "gpt-5.2",
+ }, {
+ creds: baseCreds,
+ currentVersion: "0.6.0",
+ spawnFn: spawnFn as any,
+ setupScript: "/tmp/session-start-setup.js",
+ authCommand: "/tmp/auth-login.js",
+ });
+
+ expect(result).toContain("Logged in to Deeplake");
+ expect(spawnFn).toHaveBeenCalledTimes(1);
+ expect(write).toHaveBeenCalled();
+ expect(end).toHaveBeenCalled();
+ expect(unref).toHaveBeenCalled();
+ });
+
+ it("returns logged-out context without spawning setup when unauthenticated", async () => {
+ const spawnFn = vi.fn();
+ const result = await runCodexSessionStartHook({
+ session_id: "s1",
+ cwd: "/repo",
+ hook_event_name: "SessionStart",
+ model: "gpt-5.2",
+ }, {
+ creds: null,
+ spawnFn: spawnFn as any,
+ currentVersion: null,
+ authCommand: "/tmp/auth-login.js",
+ });
+
+ expect(result).toContain("Not logged in to Deeplake");
+ expect(spawnFn).not.toHaveBeenCalled();
+ });
+
+ it("falls back to org id and default workspace when names are missing", () => {
+ const context = buildCodexSessionStartContext({
+ creds: { ...baseCreds, orgName: undefined, workspaceId: undefined } as any,
+ currentVersion: null,
+ authCommand: "/tmp/auth-login.js",
+ });
+ expect(context).toContain("org-1");
+ expect(context).toContain("workspace: default");
+ expect(context).not.toContain("Hivemind v");
+ });
+});
+
+describe("codex session start setup source", () => {
+ it("creates placeholders only when summaries do not already exist", async () => {
+ const query = vi.fn(async () => []);
+ const api = { query } as any;
+ await createPlaceholder(api, "memory", "s1", "/repo", "alice", "Acme", "default");
+ expect(query).toHaveBeenCalledTimes(2);
+ expect(String(query.mock.calls[1]?.[0])).toContain('INSERT INTO "memory"');
+
+ query.mockReset();
+ query.mockResolvedValueOnce([{ path: "/summaries/alice/s1.md" }]);
+ await createPlaceholder(api, "memory", "s1", "/repo", "alice", "Acme", "default");
+ expect(query).toHaveBeenCalledTimes(1);
+ });
+
+ it("handles no credentials, disabled session writes, and update notices", async () => {
+ expect(await runCodexSessionStartSetup({
+ session_id: "s1",
+ cwd: "/repo",
+ hook_event_name: "SessionStart",
+ model: "gpt-5.2",
+ }, {
+ creds: null,
+ })).toEqual({ status: "no_credentials" });
+
+ const stderr = vi.spyOn(process.stderr, "write").mockImplementation(() => true as any);
+ const placeholder = vi.fn(async () => undefined);
+ await runCodexSessionStartSetup({
+ session_id: "s1",
+ cwd: "/repo",
+ hook_event_name: "SessionStart",
+ model: "gpt-5.2",
+ }, {
+ creds: { ...baseCreds, autoupdate: false },
+ config: baseConfig,
+ createApi: vi.fn(() => ({
+ ensureTable: vi.fn(async () => undefined),
+ ensureSessionsTable: vi.fn(async () => undefined),
+ query: vi.fn(async () => []),
+ }) as any),
+ isSessionWriteDisabledFn: vi.fn(() => true) as any,
+ createPlaceholderFn: placeholder as any,
+ getInstalledVersionFn: vi.fn(() => "0.6.0") as any,
+ getLatestVersionCachedFn: vi.fn(async () => "0.7.0") as any,
+ });
+ expect(placeholder).toHaveBeenCalledTimes(1);
+ expect(stderr).toHaveBeenCalledWith(expect.stringContaining("update available"));
+ });
+
+ it("skips in wiki-worker mode and logs setup/version failures", async () => {
+ expect(await runCodexSessionStartSetup({
+ session_id: "s1",
+ cwd: "/repo",
+ hook_event_name: "SessionStart",
+ model: "gpt-5.2",
+ }, {
+ wikiWorker: true,
+ })).toEqual({ status: "skipped" });
+
+ const logFn = vi.fn();
+ const wikiLogFn = vi.fn();
+ await runCodexSessionStartSetup({
+ session_id: "s1",
+ cwd: "/repo",
+ hook_event_name: "SessionStart",
+ model: "gpt-5.2",
+ }, {
+ creds: baseCreds,
+ config: baseConfig,
+ createApi: vi.fn(() => ({
+ ensureTable: vi.fn(async () => { throw new Error("boom"); }),
+ }) as any),
+ getInstalledVersionFn: vi.fn(() => "0.6.0") as any,
+ getLatestVersionCachedFn: vi.fn(async () => { throw new Error("offline"); }) as any,
+ logFn,
+ wikiLogFn,
+ });
+
+ expect(logFn).toHaveBeenCalledWith(expect.stringContaining("setup failed: boom"));
+ expect(logFn).toHaveBeenCalledWith(expect.stringContaining("version check failed: offline"));
+ expect(wikiLogFn).toHaveBeenCalledWith(expect.stringContaining("failed for s1: boom"));
+ });
+
+ it("handles capture-disabled and successful autoupdate flows", async () => {
+ const placeholder = vi.fn();
+ const stderr = vi.spyOn(process.stderr, "write").mockImplementation(() => true as any);
+ const execSyncFn = vi.fn();
+ await runCodexSessionStartSetup({
+ session_id: "s1",
+ cwd: "/repo",
+ hook_event_name: "SessionStart",
+ model: "gpt-5.2",
+ }, {
+ creds: baseCreds,
+ config: baseConfig,
+ captureEnabled: false,
+ createApi: vi.fn(() => ({
+ ensureTable: vi.fn(async () => undefined),
+ }) as any),
+ createPlaceholderFn: placeholder as any,
+ getInstalledVersionFn: vi.fn(() => "0.6.0") as any,
+ getLatestVersionCachedFn: vi.fn(async () => "0.7.0") as any,
+ execSyncFn: execSyncFn as any,
+ });
+ expect(placeholder).not.toHaveBeenCalled();
+ expect(execSyncFn).toHaveBeenCalledTimes(1);
+ expect(stderr).toHaveBeenCalledWith(expect.stringContaining("auto-updated"));
+ });
+
+ it("handles non-auth setup errors and skips setup when session metadata is absent", async () => {
+ const wikiLogFn = vi.fn();
+ const createPlaceholderFn = vi.fn();
+ await runCodexSessionStartSetup({
+ session_id: "s1",
+ cwd: "/repo",
+ hook_event_name: "SessionStart",
+ model: "gpt-5.2",
+ }, {
+ creds: baseCreds,
+ config: baseConfig,
+ createApi: vi.fn(() => ({
+ ensureTable: vi.fn(async () => undefined),
+ ensureSessionsTable: vi.fn(async () => { throw new Error("boom"); }),
+ }) as any),
+ isSessionWriteDisabledFn: vi.fn(() => false) as any,
+ isSessionWriteAuthErrorFn: vi.fn(() => false) as any,
+ tryAcquireSessionDrainLockFn: vi.fn(() => (() => undefined)) as any,
+ createPlaceholderFn: createPlaceholderFn as any,
+ getInstalledVersionFn: vi.fn(() => null) as any,
+ wikiLogFn,
+ });
+ expect(createPlaceholderFn).not.toHaveBeenCalled();
+ expect(wikiLogFn).toHaveBeenCalledWith(expect.stringContaining("failed for s1: boom"));
+
+ await expect(runCodexSessionStartSetup({
+ session_id: "",
+ cwd: "/repo",
+ hook_event_name: "SessionStart",
+ model: "gpt-5.2",
+ }, {
+ creds: baseCreds,
+ config: baseConfig,
+ getInstalledVersionFn: vi.fn(() => null) as any,
+ })).resolves.toEqual({ status: "complete" });
+ });
+
+ it("backfills missing usernames, handles auth-disabled session writes, and treats missing cwd as unknown", async () => {
+ const save = vi.fn();
+ const placeholder = vi.fn(async () => undefined);
+ await runCodexSessionStartSetup({
+ session_id: "s1",
+ cwd: undefined as any,
+ hook_event_name: "SessionStart",
+ model: "gpt-5.2",
+ }, {
+ creds: { ...baseCreds, userName: undefined },
+ saveCredentialsFn: save as any,
+ config: baseConfig,
+ createApi: vi.fn(() => ({
+ ensureTable: vi.fn(async () => undefined),
+ ensureSessionsTable: vi.fn(async () => { throw new Error("403 Forbidden"); }),
+ }) as any),
+ isSessionWriteDisabledFn: vi.fn(() => false) as any,
+ isSessionWriteAuthErrorFn: vi.fn(() => true) as any,
+ markSessionWriteDisabledFn: vi.fn() as any,
+ tryAcquireSessionDrainLockFn: vi.fn(() => (() => undefined)) as any,
+ createPlaceholderFn: placeholder as any,
+ getInstalledVersionFn: vi.fn(() => "0.6.0") as any,
+ getLatestVersionCachedFn: vi.fn(async () => "0.6.0") as any,
+ });
+ expect(save).toHaveBeenCalledTimes(1);
+ expect(placeholder).toHaveBeenCalledWith(expect.anything(), "memory", "s1", "", "alice", "Acme", "default");
+
+ const query = vi.fn(async () => []);
+ await createPlaceholder({ query } as any, "memory", "s2", "", "alice", "Acme", "default");
+ expect(String(query.mock.calls[1]?.[0])).toContain("'unknown'");
+ });
+
+ it("skips duplicate queue drains while another codex session-start setup is already handling sessions", async () => {
+ const logFn = vi.fn();
+ const createPlaceholderFn = vi.fn(async () => undefined);
+ const ensureSessionsTable = vi.fn(async () => undefined);
+ const drainSessionQueuesFn = vi.fn(async () => ({
+ queuedSessions: 1,
+ flushedSessions: 1,
+ rows: 1,
+ batches: 1,
+ }));
+
+ await runCodexSessionStartSetup({
+ session_id: "s1",
+ cwd: "/repo",
+ hook_event_name: "SessionStart",
+ model: "gpt-5.2",
+ }, {
+ creds: baseCreds,
+ config: baseConfig,
+ createApi: vi.fn(() => ({
+ ensureTable: vi.fn(async () => undefined),
+ ensureSessionsTable,
+ query: vi.fn(async () => []),
+ }) as any),
+ isSessionWriteDisabledFn: vi.fn(() => false) as any,
+ tryAcquireSessionDrainLockFn: vi.fn(() => null) as any,
+ drainSessionQueuesFn: drainSessionQueuesFn as any,
+ createPlaceholderFn: createPlaceholderFn as any,
+ getInstalledVersionFn: vi.fn(() => null) as any,
+ logFn,
+ });
+
+ expect(ensureSessionsTable).not.toHaveBeenCalled();
+ expect(drainSessionQueuesFn).not.toHaveBeenCalled();
+ expect(createPlaceholderFn).toHaveBeenCalledTimes(1);
+ expect(logFn).toHaveBeenCalledWith(expect.stringContaining("sessions drain already in progress"));
+ });
+});
+
+describe("codex stop source", () => {
+ it("extracts assistant messages from string and block transcripts", () => {
+ expect(extractLastAssistantMessage([
+ '{"role":"assistant","content":"done"}',
+ ].join("\n"))).toBe("done");
+
+ expect(extractLastAssistantMessage([
+ '{"payload":{"role":"assistant","content":[{"type":"output_text","text":"first"},{"type":"text","text":"second"}]}}',
+ ].join("\n"))).toBe("first\nsecond");
+
+ expect(extractLastAssistantMessage("not json")).toBe("");
+ });
+
+ it("builds stop entries for assistant messages and assistant stops", () => {
+ const message = buildCodexStopEntry({
+ session_id: "s1",
+ transcript_path: "/tmp/t.jsonl",
+ cwd: "/repo",
+ hook_event_name: "Stop",
+ model: "gpt-5.2",
+ }, "2026-01-01T00:00:00.000Z", "done");
+ const stop = buildCodexStopEntry({
+ session_id: "s1",
+ transcript_path: null,
+ cwd: "/repo",
+ hook_event_name: "Stop",
+ model: "gpt-5.2",
+ }, "2026-01-01T00:00:01.000Z", "");
+
+ expect(message.type).toBe("assistant_message");
+ expect(stop.type).toBe("assistant_stop");
+ });
+
+ it("skips, returns no_config, and flushes plus spawns summaries", async () => {
+ expect(await runCodexStopHook({
+ session_id: "",
+ cwd: "/repo",
+ hook_event_name: "Stop",
+ model: "gpt-5.2",
+ }, {
+ config: baseConfig,
+ })).toEqual({ status: "skipped" });
+
+ expect(await runCodexStopHook({
+ session_id: "s1",
+ cwd: "/repo",
+ hook_event_name: "Stop",
+ model: "gpt-5.2",
+ }, {
+ config: null,
+ })).toEqual({ status: "no_config" });
+
+ const flush = vi.fn(async () => ({ status: "flushed", rows: 2, batches: 1 }));
+ const spawn = vi.fn();
+ const result = await runCodexStopHook({
+ session_id: "s1",
+ transcript_path: "/tmp/t.jsonl",
+ cwd: "/repo",
+ hook_event_name: "Stop",
+ model: "gpt-5.2",
+ }, {
+ config: baseConfig,
+ transcriptExists: vi.fn(() => true) as any,
+ readTranscript: vi.fn(() => '{"role":"assistant","content":"done"}') as any,
+ appendQueuedSessionRowFn: vi.fn() as any,
+ flushSessionQueueFn: flush as any,
+ spawnCodexWikiWorkerFn: spawn as any,
+ wikiLogFn: vi.fn() as any,
+ bundleDir: "/tmp/bundle",
+ });
+
+ expect(result).toMatchObject({ status: "complete", flushStatus: "flushed" });
+ expect(flush).toHaveBeenCalledTimes(1);
+ expect(spawn).toHaveBeenCalledWith({
+ config: baseConfig,
+ sessionId: "s1",
+ cwd: "/repo",
+ bundleDir: "/tmp/bundle",
+ reason: "Stop",
+ });
+
+ const noCapture = await runCodexStopHook({
+ session_id: "s1",
+ cwd: "/repo",
+ hook_event_name: "Stop",
+ model: "gpt-5.2",
+ }, {
+ config: baseConfig,
+ captureEnabled: false,
+ });
+ expect(noCapture).toEqual({ status: "complete", entry: undefined });
+ });
+
+ it("continues when transcript reads fail and when wiki-worker mode is active", async () => {
+ expect(await runCodexStopHook({
+ session_id: "s1",
+ cwd: "/repo",
+ hook_event_name: "Stop",
+ model: "gpt-5.2",
+ }, {
+ wikiWorker: true,
+ config: baseConfig,
+ })).toEqual({ status: "skipped" });
+
+ const flush = vi.fn(async () => ({ status: "flushed", rows: 1, batches: 1 }));
+ const result = await runCodexStopHook({
+ session_id: "s1",
+ transcript_path: "/tmp/t.jsonl",
+ cwd: "/repo",
+ hook_event_name: "Stop",
+ model: "gpt-5.2",
+ }, {
+ config: baseConfig,
+ transcriptExists: vi.fn(() => true) as any,
+ readTranscript: vi.fn(() => { throw new Error("boom"); }) as any,
+ appendQueuedSessionRowFn: vi.fn() as any,
+ flushSessionQueueFn: flush as any,
+ spawnCodexWikiWorkerFn: vi.fn() as any,
+ wikiLogFn: vi.fn() as any,
+ bundleDir: "/tmp/bundle",
+ });
+
+ expect(result.flushStatus).toBe("flushed");
+ expect(flush).toHaveBeenCalledTimes(1);
+ });
+
+ it("returns empty when assistant blocks have no text and keeps going after capture failures", async () => {
+ expect(extractLastAssistantMessage([
+ "{\"role\":\"assistant\",\"content\":[{\"type\":\"image\",\"url\":\"x\"}]}",
+ "{\"role\":\"user\",\"content\":\"hi\"}",
+ ].join("\n"))).toBe("");
+
+ const spawn = vi.fn();
+ const logFn = vi.fn();
+ const result = await runCodexStopHook({
+ session_id: "s1",
+ transcript_path: "/tmp/missing.jsonl",
+ cwd: undefined as any,
+ hook_event_name: "Stop",
+ model: "gpt-5.2",
+ }, {
+ config: baseConfig,
+ transcriptExists: vi.fn(() => false) as any,
+ appendQueuedSessionRowFn: vi.fn() as any,
+ flushSessionQueueFn: vi.fn(async () => { throw new Error("flush boom"); }) as any,
+ spawnCodexWikiWorkerFn: spawn as any,
+ wikiLogFn: vi.fn() as any,
+ logFn,
+ bundleDir: "/tmp/bundle",
+ });
+
+ expect(result).toMatchObject({
+ status: "complete",
+ entry: expect.objectContaining({ type: "assistant_stop" }),
+ });
+ expect(logFn).toHaveBeenCalledWith(expect.stringContaining("capture failed: flush boom"));
+ expect(spawn).toHaveBeenCalledWith({
+ config: baseConfig,
+ sessionId: "s1",
+ cwd: "",
+ bundleDir: "/tmp/bundle",
+ reason: "Stop",
+ });
+ });
+});
diff --git a/openclaw/openclaw.plugin.json b/openclaw/openclaw.plugin.json
index 04cdf6c..485df8d 100644
--- a/openclaw/openclaw.plugin.json
+++ b/openclaw/openclaw.plugin.json
@@ -23,5 +23,5 @@
}
}
},
- "version": "0.6.38"
+ "version": "0.6.37"
}
diff --git a/openclaw/package.json b/openclaw/package.json
index 31161cb..712bffd 100644
--- a/openclaw/package.json
+++ b/openclaw/package.json
@@ -1,6 +1,6 @@
{
"name": "hivemind",
- "version": "0.6.38",
+ "version": "0.6.37",
"type": "module",
"description": "Hivemind — cloud-backed persistent shared memory for AI agents, powered by DeepLake",
"license": "Apache-2.0",
diff --git a/package-lock.json b/package-lock.json
index f0ebfcc..7ec599d 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "hivemind",
- "version": "0.6.38",
+ "version": "0.6.37",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "hivemind",
- "version": "0.6.38",
+ "version": "0.6.37",
"dependencies": {
"deeplake": "^0.3.30",
"just-bash": "^2.14.0",
@@ -21,7 +21,6 @@
"@vitest/coverage-v8": "^4.1.3",
"esbuild": "^0.28.0",
"husky": "^9.1.7",
- "jscpd": "^4.0.9",
"lint-staged": "^16.4.0",
"tsx": "^4.7.0",
"typescript": "^6.0.0",
@@ -1058,17 +1057,6 @@
"url": "https://github.com/sponsors/Borewit"
}
},
- "node_modules/@colors/colors": {
- "version": "1.5.0",
- "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz",
- "integrity": "sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==",
- "dev": true,
- "license": "MIT",
- "optional": true,
- "engines": {
- "node": ">=0.1.90"
- }
- },
"node_modules/@emnapi/core": {
"version": "1.9.1",
"resolved": "https://registry.npmjs.org/@emnapi/core/-/core-1.9.1.tgz",
@@ -1092,16 +1080,6 @@
"tslib": "^2.4.0"
}
},
- "node_modules/@emnapi/runtime": {
- "version": "1.10.0",
- "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.10.0.tgz",
- "integrity": "sha512-ewvYlk86xUoGI0zQRNq/mC+16R1QeDlKQy21Ki3oSYXNgLb45GV1P6A0M+/s6nyCuNDqe5VpaY84BzXGwVbwFA==",
- "license": "MIT",
- "optional": true,
- "dependencies": {
- "tslib": "^2.4.0"
- }
- },
"node_modules/@esbuild/aix-ppc64": {
"version": "0.28.0",
"resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.28.0.tgz",
@@ -2080,71 +2058,6 @@
"@jridgewell/sourcemap-codec": "^1.4.14"
}
},
- "node_modules/@jscpd/badge-reporter": {
- "version": "4.0.5",
- "resolved": "https://registry.npmjs.org/@jscpd/badge-reporter/-/badge-reporter-4.0.5.tgz",
- "integrity": "sha512-SLVhP00R9lkQ//Ivaanfm7k0L9sewpBven670kk1uGec2SWUOa7MVQcuad/TV59KEZ73UIC1lXvi6O9hAnbpUw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "badgen": "^3.2.3",
- "colors": "^1.4.0",
- "fs-extra": "^11.2.0"
- }
- },
- "node_modules/@jscpd/core": {
- "version": "4.0.5",
- "resolved": "https://registry.npmjs.org/@jscpd/core/-/core-4.0.5.tgz",
- "integrity": "sha512-Udvym21nWzxjYRVXwwpYNBqZ6b50QV2zHN3fFNzOPPg4cfQVYOZerILB7xNDUsXHC1PCr/N52Tq3q7AElvjWWA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "eventemitter3": "^5.0.1"
- }
- },
- "node_modules/@jscpd/finder": {
- "version": "4.0.5",
- "resolved": "https://registry.npmjs.org/@jscpd/finder/-/finder-4.0.5.tgz",
- "integrity": "sha512-/2VkRoVrrfya+51sitZo5I9MdwsRaPKB8X3L3khAYoHFXk4L/mUuG81RmGazDHjUIGg22ItlkQtwzorNZ2+aPw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@jscpd/core": "4.0.5",
- "@jscpd/tokenizer": "4.0.5",
- "blamer": "^1.0.6",
- "bytes": "^3.1.2",
- "cli-table3": "^0.6.5",
- "colors": "^1.4.0",
- "fast-glob": "^3.3.2",
- "fs-extra": "^11.2.0",
- "markdown-table": "^2.0.0",
- "pug": "^3.0.3"
- }
- },
- "node_modules/@jscpd/html-reporter": {
- "version": "4.0.5",
- "resolved": "https://registry.npmjs.org/@jscpd/html-reporter/-/html-reporter-4.0.5.tgz",
- "integrity": "sha512-drK2J8KyPIW9wvaElSIobZFp4dBO9GA++JW4gx3oihvLdDSp8qSo/CNqH47Dw0XkjQTxND3j/+Wz5JWvYRBgFQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "colors": "1.4.0",
- "fs-extra": "^11.2.0",
- "pug": "^3.0.3"
- }
- },
- "node_modules/@jscpd/tokenizer": {
- "version": "4.0.5",
- "resolved": "https://registry.npmjs.org/@jscpd/tokenizer/-/tokenizer-4.0.5.tgz",
- "integrity": "sha512-WzRujQtN5WedxZVDKuoanxmKAFrxcLrHpcA6kaM4z8AhGtWXZ325yseqgL5TZ8OK7Auwu7kQLlqhfk05fGYG7A==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@jscpd/core": "4.0.5",
- "reprism": "^0.0.11",
- "spark-md5": "^3.0.2"
- }
- },
"node_modules/@mixmark-io/domino": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@mixmark-io/domino/-/domino-2.2.0.tgz",
@@ -2185,44 +2098,6 @@
"@emnapi/runtime": "^1.7.1"
}
},
- "node_modules/@nodelib/fs.scandir": {
- "version": "2.1.5",
- "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
- "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@nodelib/fs.stat": "2.0.5",
- "run-parallel": "^1.1.9"
- },
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/@nodelib/fs.stat": {
- "version": "2.0.5",
- "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz",
- "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/@nodelib/fs.walk": {
- "version": "1.2.8",
- "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz",
- "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@nodelib/fs.scandir": "2.1.5",
- "fastq": "^1.6.0"
- },
- "engines": {
- "node": ">= 8"
- }
- },
"node_modules/@oxc-project/types": {
"version": "0.123.0",
"resolved": "https://registry.npmjs.org/@oxc-project/types/-/types-0.123.0.tgz",
@@ -3348,17 +3223,11 @@
"integrity": "sha512-tO4ZIRKNC+MDWV4qKVZe3Ql/woTnmHDr5JD8UI5hn2pwBrHEwOEMZK7WlNb5RKB6EoJ02gwmQS9OrjuFnZYdpg==",
"dev": true,
"license": "MIT",
+ "peer": true,
"dependencies": {
"undici-types": "~7.18.0"
}
},
- "node_modules/@types/sarif": {
- "version": "2.1.7",
- "resolved": "https://registry.npmjs.org/@types/sarif/-/sarif-2.1.7.tgz",
- "integrity": "sha512-kRz0VEkJqWLf1LLVN4pT1cg1Z9wAuvI6L97V3m2f5B76Tg8d413ddvLBPTEHAZJlnn4XSvu0FkZtViCQGVyrXQ==",
- "dev": true,
- "license": "MIT"
- },
"node_modules/@types/yargs-parser": {
"version": "21.0.3",
"resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.3.tgz",
@@ -3372,6 +3241,7 @@
"integrity": "sha512-/MBdrkA8t6hbdCWFKs09dPik774xvs4Z6L4bycdCxYNLHM8oZuRyosumQMG19LUlBsB6GeVpL1q4kFFazvyKGA==",
"dev": true,
"license": "MIT",
+ "peer": true,
"dependencies": {
"@bcoe/v8-coverage": "^1.0.2",
"@vitest/utils": "4.1.3",
@@ -3510,19 +3380,6 @@
"url": "https://opencollective.com/vitest"
}
},
- "node_modules/acorn": {
- "version": "7.4.1",
- "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz",
- "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==",
- "dev": true,
- "license": "MIT",
- "bin": {
- "acorn": "bin/acorn"
- },
- "engines": {
- "node": ">=0.4.0"
- }
- },
"node_modules/amdefine": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/amdefine/-/amdefine-1.0.1.tgz",
@@ -3574,20 +3431,6 @@
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
}
},
- "node_modules/asap": {
- "version": "2.0.6",
- "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz",
- "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/assert-never": {
- "version": "1.4.0",
- "resolved": "https://registry.npmjs.org/assert-never/-/assert-never-1.4.0.tgz",
- "integrity": "sha512-5oJg84os6NMQNl27T9LnZkvvqzvAnHu03ShCnoj6bsJwS7L8AO4lf+C/XjK/nvzEqQB744moC6V128RucQd1jA==",
- "dev": true,
- "license": "MIT"
- },
"node_modules/assertion-error": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-2.0.1.tgz",
@@ -3610,26 +3453,6 @@
"js-tokens": "^10.0.0"
}
},
- "node_modules/babel-walk": {
- "version": "3.0.0-canary-5",
- "resolved": "https://registry.npmjs.org/babel-walk/-/babel-walk-3.0.0-canary-5.tgz",
- "integrity": "sha512-GAwkz0AihzY5bkwIY5QDR+LvsRQgB/B+1foMPvi0FZPMl5fjD7ICiznUiBdLYMH1QYe6vqu4gWYytZOccLouFw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@babel/types": "^7.9.6"
- },
- "engines": {
- "node": ">= 10.0.0"
- }
- },
- "node_modules/badgen": {
- "version": "3.2.3",
- "resolved": "https://registry.npmjs.org/badgen/-/badgen-3.2.3.tgz",
- "integrity": "sha512-svDuwkc63E/z0ky3drpUppB83s/nlgDciH9m+STwwQoWyq7yCgew1qEfJ+9axkKdNq7MskByptWUN9j1PGMwFA==",
- "dev": true,
- "license": "MIT"
- },
"node_modules/balanced-match": {
"version": "4.0.4",
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-4.0.4.tgz",
@@ -3672,20 +3495,6 @@
"readable-stream": "^3.4.0"
}
},
- "node_modules/blamer": {
- "version": "1.0.7",
- "resolved": "https://registry.npmjs.org/blamer/-/blamer-1.0.7.tgz",
- "integrity": "sha512-GbBStl/EVlSWkiJQBZps3H1iARBrC7vt++Jb/TTmCNu/jZ04VW7tSN1nScbFXBUy1AN+jzeL7Zep9sbQxLhXKA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "execa": "^4.0.0",
- "which": "^2.0.2"
- },
- "engines": {
- "node": ">=8.9"
- }
- },
"node_modules/bowser": {
"version": "2.14.1",
"resolved": "https://registry.npmjs.org/bowser/-/bowser-2.14.1.tgz",
@@ -3705,19 +3514,6 @@
"node": "18 || 20 || >=22"
}
},
- "node_modules/braces": {
- "version": "3.0.3",
- "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
- "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "fill-range": "^7.1.1"
- },
- "engines": {
- "node": ">=8"
- }
- },
"node_modules/buffer": {
"version": "5.7.1",
"resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz",
@@ -3743,47 +3539,6 @@
"ieee754": "^1.1.13"
}
},
- "node_modules/bytes": {
- "version": "3.1.2",
- "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz",
- "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">= 0.8"
- }
- },
- "node_modules/call-bind-apply-helpers": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz",
- "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "es-errors": "^1.3.0",
- "function-bind": "^1.1.2"
- },
- "engines": {
- "node": ">= 0.4"
- }
- },
- "node_modules/call-bound": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz",
- "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "call-bind-apply-helpers": "^1.0.2",
- "get-intrinsic": "^1.3.0"
- },
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
"node_modules/chai": {
"version": "6.2.2",
"resolved": "https://registry.npmjs.org/chai/-/chai-6.2.2.tgz",
@@ -3794,16 +3549,6 @@
"node": ">=18"
}
},
- "node_modules/character-parser": {
- "version": "2.2.0",
- "resolved": "https://registry.npmjs.org/character-parser/-/character-parser-2.2.0.tgz",
- "integrity": "sha512-+UqJQjFEFaTAs3bNsF2j2kEN1baG/zghZbdqoYEDxGZtJo9LBzl1A+m0D4n3qKx8N2FNv8/Xp6yV9mQmBuptaw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "is-regex": "^1.0.3"
- }
- },
"node_modules/chownr": {
"version": "1.1.4",
"resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz",
@@ -3827,77 +3572,6 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
- "node_modules/cli-table3": {
- "version": "0.6.5",
- "resolved": "https://registry.npmjs.org/cli-table3/-/cli-table3-0.6.5.tgz",
- "integrity": "sha512-+W/5efTR7y5HRD7gACw9yQjqMVvEMLBHmboM/kPWam+H+Hmyrgjh6YncVKK122YZkXrLudzTuAukUw9FnMf7IQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "string-width": "^4.2.0"
- },
- "engines": {
- "node": "10.* || >= 12.*"
- },
- "optionalDependencies": {
- "@colors/colors": "1.5.0"
- }
- },
- "node_modules/cli-table3/node_modules/ansi-regex": {
- "version": "5.0.1",
- "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
- "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/cli-table3/node_modules/emoji-regex": {
- "version": "8.0.0",
- "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
- "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/cli-table3/node_modules/is-fullwidth-code-point": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
- "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/cli-table3/node_modules/string-width": {
- "version": "4.2.3",
- "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
- "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "emoji-regex": "^8.0.0",
- "is-fullwidth-code-point": "^3.0.0",
- "strip-ansi": "^6.0.1"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/cli-table3/node_modules/strip-ansi": {
- "version": "6.0.1",
- "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
- "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "ansi-regex": "^5.0.1"
- },
- "engines": {
- "node": ">=8"
- }
- },
"node_modules/cli-truncate": {
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-5.2.0.tgz",
@@ -3922,16 +3596,6 @@
"dev": true,
"license": "MIT"
},
- "node_modules/colors": {
- "version": "1.4.0",
- "resolved": "https://registry.npmjs.org/colors/-/colors-1.4.0.tgz",
- "integrity": "sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=0.1.90"
- }
- },
"node_modules/commander": {
"version": "2.8.1",
"resolved": "https://registry.npmjs.org/commander/-/commander-2.8.1.tgz",
@@ -3957,17 +3621,6 @@
"compressjs": "bin/compressjs"
}
},
- "node_modules/constantinople": {
- "version": "4.0.1",
- "resolved": "https://registry.npmjs.org/constantinople/-/constantinople-4.0.1.tgz",
- "integrity": "sha512-vCrqcSIq4//Gx74TXXCGnHpulY1dskqLTFGDmhrGxzeXL8lF8kvXv6mpNWlJj1uD4DW23D4ljAqbY4RRaaUZIw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@babel/parser": "^7.6.0",
- "@babel/types": "^7.6.1"
- }
- },
"node_modules/convert-source-map": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz",
@@ -3975,21 +3628,6 @@
"dev": true,
"license": "MIT"
},
- "node_modules/cross-spawn": {
- "version": "7.0.6",
- "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
- "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "path-key": "^3.1.0",
- "shebang-command": "^2.0.0",
- "which": "^2.0.1"
- },
- "engines": {
- "node": ">= 8"
- }
- },
"node_modules/debug": {
"version": "4.4.3",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz",
@@ -4067,28 +3705,6 @@
"node": ">=0.3.1"
}
},
- "node_modules/doctypes": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/doctypes/-/doctypes-1.1.0.tgz",
- "integrity": "sha512-LLBi6pEqS6Do3EKQ3J0NqHWV5hhb78Pi8vvESYwyOy2c31ZEZVdtitdzsQsKb7878PEERhzUk0ftqGhG6Mz+pQ==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/dunder-proto": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
- "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "call-bind-apply-helpers": "^1.0.1",
- "es-errors": "^1.3.0",
- "gopd": "^1.2.0"
- },
- "engines": {
- "node": ">= 0.4"
- }
- },
"node_modules/emoji-regex": {
"version": "10.6.0",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.6.0.tgz",
@@ -4100,8 +3716,8 @@
"version": "1.4.5",
"resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.5.tgz",
"integrity": "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==",
- "devOptional": true,
"license": "MIT",
+ "optional": true,
"dependencies": {
"once": "^1.4.0"
}
@@ -4119,26 +3735,6 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
- "node_modules/es-define-property": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz",
- "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">= 0.4"
- }
- },
- "node_modules/es-errors": {
- "version": "1.3.0",
- "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz",
- "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">= 0.4"
- }
- },
"node_modules/es-module-lexer": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-2.0.0.tgz",
@@ -4146,19 +3742,6 @@
"dev": true,
"license": "MIT"
},
- "node_modules/es-object-atoms": {
- "version": "1.1.1",
- "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz",
- "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "es-errors": "^1.3.0"
- },
- "engines": {
- "node": ">= 0.4"
- }
- },
"node_modules/esbuild": {
"version": "0.28.0",
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.28.0.tgz",
@@ -4166,6 +3749,7 @@
"dev": true,
"hasInstallScript": true,
"license": "MIT",
+ "peer": true,
"bin": {
"esbuild": "bin/esbuild"
},
@@ -4218,53 +3802,6 @@
"dev": true,
"license": "MIT"
},
- "node_modules/execa": {
- "version": "4.1.0",
- "resolved": "https://registry.npmjs.org/execa/-/execa-4.1.0.tgz",
- "integrity": "sha512-j5W0//W7f8UxAn8hXVnwG8tLwdiUy4FJLcSupCg6maBYZDpyBvTApK7KyuI4bKj8KOh1r2YH+6ucuYtJv1bTZA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "cross-spawn": "^7.0.0",
- "get-stream": "^5.0.0",
- "human-signals": "^1.1.1",
- "is-stream": "^2.0.0",
- "merge-stream": "^2.0.0",
- "npm-run-path": "^4.0.0",
- "onetime": "^5.1.0",
- "signal-exit": "^3.0.2",
- "strip-final-newline": "^2.0.0"
- },
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sindresorhus/execa?sponsor=1"
- }
- },
- "node_modules/execa/node_modules/onetime": {
- "version": "5.1.2",
- "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz",
- "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "mimic-fn": "^2.1.0"
- },
- "engines": {
- "node": ">=6"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/execa/node_modules/signal-exit": {
- "version": "3.0.7",
- "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz",
- "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==",
- "dev": true,
- "license": "ISC"
- },
"node_modules/expand-template": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/expand-template/-/expand-template-2.0.3.tgz",
@@ -4285,23 +3822,6 @@
"node": ">=12.0.0"
}
},
- "node_modules/fast-glob": {
- "version": "3.3.3",
- "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz",
- "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@nodelib/fs.stat": "^2.0.2",
- "@nodelib/fs.walk": "^1.2.3",
- "glob-parent": "^5.1.2",
- "merge2": "^1.3.0",
- "micromatch": "^4.0.8"
- },
- "engines": {
- "node": ">=8.6.0"
- }
- },
"node_modules/fast-xml-builder": {
"version": "1.1.4",
"resolved": "https://registry.npmjs.org/fast-xml-builder/-/fast-xml-builder-1.1.4.tgz",
@@ -4337,16 +3857,6 @@
"fxparser": "src/cli/cli.js"
}
},
- "node_modules/fastq": {
- "version": "1.20.1",
- "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.20.1.tgz",
- "integrity": "sha512-GGToxJ/w1x32s/D2EKND7kTil4n8OVk/9mycTc4VDza13lOvpUZTGX3mFSCtV9ksdGBVzvsyAVLM6mHFThxXxw==",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "reusify": "^1.0.4"
- }
- },
"node_modules/fdir": {
"version": "6.5.0",
"resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz",
@@ -4383,19 +3893,6 @@
"url": "https://github.com/sindresorhus/file-type?sponsor=1"
}
},
- "node_modules/fill-range": {
- "version": "7.1.1",
- "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
- "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "to-regex-range": "^5.0.1"
- },
- "engines": {
- "node": ">=8"
- }
- },
"node_modules/fs-constants": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz",
@@ -4403,21 +3900,6 @@
"license": "MIT",
"optional": true
},
- "node_modules/fs-extra": {
- "version": "11.3.4",
- "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.4.tgz",
- "integrity": "sha512-CTXd6rk/M3/ULNQj8FBqBWHYBVYybQ3VPBw0xGKFe3tuH7ytT6ACnvzpIQ3UZtB8yvUKC2cXn1a+x+5EVQLovA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "graceful-fs": "^4.2.0",
- "jsonfile": "^6.0.1",
- "universalify": "^2.0.0"
- },
- "engines": {
- "node": ">=14.14"
- }
- },
"node_modules/fsevents": {
"version": "2.3.3",
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
@@ -4433,16 +3915,6 @@
"node": "^8.16.0 || ^10.6.0 || >=11.0.0"
}
},
- "node_modules/function-bind": {
- "version": "1.1.2",
- "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz",
- "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==",
- "dev": true,
- "license": "MIT",
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
"node_modules/get-east-asian-width": {
"version": "1.5.0",
"resolved": "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.5.0.tgz",
@@ -4456,61 +3928,6 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
- "node_modules/get-intrinsic": {
- "version": "1.3.0",
- "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz",
- "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "call-bind-apply-helpers": "^1.0.2",
- "es-define-property": "^1.0.1",
- "es-errors": "^1.3.0",
- "es-object-atoms": "^1.1.1",
- "function-bind": "^1.1.2",
- "get-proto": "^1.0.1",
- "gopd": "^1.2.0",
- "has-symbols": "^1.1.0",
- "hasown": "^2.0.2",
- "math-intrinsics": "^1.1.0"
- },
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/get-proto": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz",
- "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "dunder-proto": "^1.0.1",
- "es-object-atoms": "^1.0.0"
- },
- "engines": {
- "node": ">= 0.4"
- }
- },
- "node_modules/get-stream": {
- "version": "5.2.0",
- "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz",
- "integrity": "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "pump": "^3.0.0"
- },
- "engines": {
- "node": ">=8"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
"node_modules/get-tsconfig": {
"version": "4.13.7",
"resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.13.7.tgz",
@@ -4531,39 +3948,6 @@
"license": "MIT",
"optional": true
},
- "node_modules/glob-parent": {
- "version": "5.1.2",
- "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz",
- "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "is-glob": "^4.0.1"
- },
- "engines": {
- "node": ">= 6"
- }
- },
- "node_modules/gopd": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz",
- "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/graceful-fs": {
- "version": "4.2.11",
- "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz",
- "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==",
- "dev": true,
- "license": "ISC"
- },
"node_modules/graceful-readlink": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/graceful-readlink/-/graceful-readlink-1.0.1.tgz",
@@ -4580,48 +3964,6 @@
"node": ">=8"
}
},
- "node_modules/has-symbols": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz",
- "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/has-tostringtag": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz",
- "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "has-symbols": "^1.0.3"
- },
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/hasown": {
- "version": "2.0.3",
- "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.3.tgz",
- "integrity": "sha512-ej4AhfhfL2Q2zpMmLo7U1Uv9+PyhIZpgQLGT1F9miIGmiCJIoCgSmczFdrc97mWT4kVY72KA+WnnhJ5pghSvSg==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "function-bind": "^1.1.2"
- },
- "engines": {
- "node": ">= 0.4"
- }
- },
"node_modules/html-escaper": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz",
@@ -4629,16 +3971,6 @@
"dev": true,
"license": "MIT"
},
- "node_modules/human-signals": {
- "version": "1.1.1",
- "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-1.1.1.tgz",
- "integrity": "sha512-SEQu7vl8KjNL2eoGBLF3+wAjpsNfA9XMlXAYj/3EdaNfAlxKthD1xjEQfGOUhllCGGJVNY34bRr6lPINhNjyZw==",
- "dev": true,
- "license": "Apache-2.0",
- "engines": {
- "node": ">=8.12.0"
- }
- },
"node_modules/husky": {
"version": "9.1.7",
"resolved": "https://registry.npmjs.org/husky/-/husky-9.1.7.tgz",
@@ -4691,43 +4023,6 @@
"node": "^20.17.0 || >=22.9.0"
}
},
- "node_modules/is-core-module": {
- "version": "2.16.1",
- "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz",
- "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "hasown": "^2.0.2"
- },
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/is-expression": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/is-expression/-/is-expression-4.0.0.tgz",
- "integrity": "sha512-zMIXX63sxzG3XrkHkrAPvm/OVZVSCPNkwMHU8oTX7/U3AL78I0QXCEICXUM13BIa8TYGZ68PiTKfQz3yaTNr4A==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "acorn": "^7.1.1",
- "object-assign": "^4.1.1"
- }
- },
- "node_modules/is-extglob": {
- "version": "2.1.1",
- "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
- "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=0.10.0"
- }
- },
"node_modules/is-fullwidth-code-point": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-5.1.0.tgz",
@@ -4744,75 +4039,6 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
- "node_modules/is-glob": {
- "version": "4.0.3",
- "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz",
- "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "is-extglob": "^2.1.1"
- },
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/is-number": {
- "version": "7.0.0",
- "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
- "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=0.12.0"
- }
- },
- "node_modules/is-promise": {
- "version": "2.2.2",
- "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-2.2.2.tgz",
- "integrity": "sha512-+lP4/6lKUBfQjZ2pdxThZvLUAafmZb8OAxFb8XXtiQmS35INgr85hdOGoEs124ez1FCnZJt6jau/T+alh58QFQ==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/is-regex": {
- "version": "1.2.1",
- "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.2.1.tgz",
- "integrity": "sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "call-bound": "^1.0.2",
- "gopd": "^1.2.0",
- "has-tostringtag": "^1.0.2",
- "hasown": "^2.0.2"
- },
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/is-stream": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz",
- "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=8"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/isexe": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
- "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==",
- "dev": true,
- "license": "ISC"
- },
"node_modules/istanbul-lib-coverage": {
"version": "3.2.2",
"resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz",
@@ -4852,13 +4078,6 @@
"node": ">=8"
}
},
- "node_modules/js-stringify": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/js-stringify/-/js-stringify-1.0.2.tgz",
- "integrity": "sha512-rtS5ATOo2Q5k1G+DADISilDA6lv79zIiwFd6CcjuIxGKLFm5C+RLImRscVap9k55i+MOZwgliw+NejvkLuGD5g==",
- "dev": true,
- "license": "MIT"
- },
"node_modules/js-tokens": {
"version": "10.0.0",
"resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-10.0.0.tgz",
@@ -4866,73 +4085,6 @@
"dev": true,
"license": "MIT"
},
- "node_modules/jscpd": {
- "version": "4.0.9",
- "resolved": "https://registry.npmjs.org/jscpd/-/jscpd-4.0.9.tgz",
- "integrity": "sha512-fp6Sh42W3mIPoQgZmgYmKDLQzEDnnX2vaGlTN4haILkB2vsi+ewcCHEtWR/2CR/QbsBvAvsNo8U5Sa+p9aHiGw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@jscpd/badge-reporter": "4.0.5",
- "@jscpd/core": "4.0.5",
- "@jscpd/finder": "4.0.5",
- "@jscpd/html-reporter": "4.0.5",
- "@jscpd/tokenizer": "4.0.5",
- "colors": "^1.4.0",
- "commander": "^5.0.0",
- "fs-extra": "^11.2.0",
- "jscpd-sarif-reporter": "4.0.7"
- },
- "bin": {
- "jscpd": "bin/jscpd"
- }
- },
- "node_modules/jscpd-sarif-reporter": {
- "version": "4.0.7",
- "resolved": "https://registry.npmjs.org/jscpd-sarif-reporter/-/jscpd-sarif-reporter-4.0.7.tgz",
- "integrity": "sha512-Q/VlfTI/Nbjc8dZ/2pDVIf1aRi2bM2CTYujcAoeYr7brRnS4o5ZeW86W8q7MM7cQu40gezlNckl+E9wKFSMFiA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "colors": "^1.4.0",
- "fs-extra": "^11.2.0",
- "node-sarif-builder": "^3.4.0"
- }
- },
- "node_modules/jscpd/node_modules/commander": {
- "version": "5.1.0",
- "resolved": "https://registry.npmjs.org/commander/-/commander-5.1.0.tgz",
- "integrity": "sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">= 6"
- }
- },
- "node_modules/jsonfile": {
- "version": "6.2.1",
- "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.1.tgz",
- "integrity": "sha512-zwOTdL3rFQ/lRdBnntKVOX6k5cKJwEc1HdilT71BWEu7J41gXIB2MRp+vxduPSwZJPWBxEzv4yH1wYLJGUHX4Q==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "universalify": "^2.0.0"
- },
- "optionalDependencies": {
- "graceful-fs": "^4.1.6"
- }
- },
- "node_modules/jstransformer": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/jstransformer/-/jstransformer-1.0.0.tgz",
- "integrity": "sha512-C9YK3Rf8q6VAPDCCU9fnqo3mAfOH6vUGnMcP4AQAYIEpWtfGLpwOTmZ+igtdK5y+VvI2n3CyYSzy4Qh34eq24A==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "is-promise": "^2.0.0",
- "promise": "^7.0.1"
- }
- },
"node_modules/just-bash": {
"version": "2.14.0",
"resolved": "https://registry.npmjs.org/just-bash/-/just-bash-2.14.0.tgz",
@@ -5352,84 +4504,6 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
- "node_modules/markdown-table": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-2.0.0.tgz",
- "integrity": "sha512-Ezda85ToJUBhM6WGaG6veasyym+Tbs3cMAw/ZhOPqXiYsr0jgocBV3j3nx+4lk47plLlIqjwuTm/ywVI+zjJ/A==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "repeat-string": "^1.0.0"
- },
- "funding": {
- "type": "github",
- "url": "https://github.com/sponsors/wooorm"
- }
- },
- "node_modules/math-intrinsics": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz",
- "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">= 0.4"
- }
- },
- "node_modules/merge-stream": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz",
- "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/merge2": {
- "version": "1.4.1",
- "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz",
- "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/micromatch": {
- "version": "4.0.8",
- "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz",
- "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "braces": "^3.0.3",
- "picomatch": "^2.3.1"
- },
- "engines": {
- "node": ">=8.6"
- }
- },
- "node_modules/micromatch/node_modules/picomatch": {
- "version": "2.3.2",
- "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.2.tgz",
- "integrity": "sha512-V7+vQEJ06Z+c5tSye8S+nHUfI51xoXIXjHQ99cQtKUkQqqO1kO/KCJUfZXuB47h/YBlDhah2H3hdUGXn8ie0oA==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=8.6"
- },
- "funding": {
- "url": "https://github.com/sponsors/jonschlinkert"
- }
- },
- "node_modules/mimic-fn": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz",
- "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=6"
- }
- },
"node_modules/mimic-function": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/mimic-function/-/mimic-function-5.0.1.tgz",
@@ -5575,52 +4649,15 @@
"node-addon-api": "^8.5.0",
"node-gyp-build": "^4.8.4"
},
- "bin": {
- "nxz": "lib/cli/nxz.js"
- },
- "engines": {
- "node": ">=16.0.0"
- },
- "funding": {
- "type": "github",
- "url": "https://github.com/sponsors/oorabona"
- }
- },
- "node_modules/node-sarif-builder": {
- "version": "3.4.0",
- "resolved": "https://registry.npmjs.org/node-sarif-builder/-/node-sarif-builder-3.4.0.tgz",
- "integrity": "sha512-tGnJW6OKRii9u/b2WiUViTJS+h7Apxx17qsMUjsUeNDiMMX5ZFf8F8Fcz7PAQ6omvOxHZtvDTmOYKJQwmfpjeg==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@types/sarif": "^2.1.7",
- "fs-extra": "^11.1.1"
- },
- "engines": {
- "node": ">=20"
- }
- },
- "node_modules/npm-run-path": {
- "version": "4.0.1",
- "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz",
- "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "path-key": "^3.0.0"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/object-assign": {
- "version": "4.1.1",
- "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
- "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==",
- "dev": true,
- "license": "MIT",
+ "bin": {
+ "nxz": "lib/cli/nxz.js"
+ },
"engines": {
- "node": ">=0.10.0"
+ "node": ">=16.0.0"
+ },
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/oorabona"
}
},
"node_modules/obug": {
@@ -5638,8 +4675,8 @@
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
"integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==",
- "devOptional": true,
"license": "ISC",
+ "optional": true,
"dependencies": {
"wrappy": "1"
}
@@ -5681,23 +4718,6 @@
"node": ">=14.0.0"
}
},
- "node_modules/path-key": {
- "version": "3.1.1",
- "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz",
- "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/path-parse": {
- "version": "1.0.7",
- "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz",
- "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==",
- "dev": true,
- "license": "MIT"
- },
"node_modules/pathe": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz",
@@ -5705,34 +4725,6 @@
"dev": true,
"license": "MIT"
},
- "node_modules/pg": {
- "version": "8.20.0",
- "resolved": "https://registry.npmjs.org/pg/-/pg-8.20.0.tgz",
- "integrity": "sha512-ldhMxz2r8fl/6QkXnBD3CR9/xg694oT6DZQ2s6c/RI28OjtSOpxnPrUCGOBJ46RCUxcWdx3p6kw/xnDHjKvaRA==",
- "license": "MIT",
- "optional": true,
- "dependencies": {
- "pg-connection-string": "^2.12.0",
- "pg-pool": "^3.13.0",
- "pg-protocol": "^1.13.0",
- "pg-types": "2.2.0",
- "pgpass": "1.0.5"
- },
- "engines": {
- "node": ">= 16.0.0"
- },
- "optionalDependencies": {
- "pg-cloudflare": "^1.3.0"
- },
- "peerDependencies": {
- "pg-native": ">=3.0.1"
- },
- "peerDependenciesMeta": {
- "pg-native": {
- "optional": true
- }
- }
- },
"node_modules/pg-cloudflare": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/pg-cloudflare/-/pg-cloudflare-1.3.0.tgz",
@@ -5921,184 +4913,17 @@
"node": ">=10"
}
},
- "node_modules/promise": {
- "version": "7.3.1",
- "resolved": "https://registry.npmjs.org/promise/-/promise-7.3.1.tgz",
- "integrity": "sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "asap": "~2.0.3"
- }
- },
- "node_modules/pug": {
- "version": "3.0.4",
- "resolved": "https://registry.npmjs.org/pug/-/pug-3.0.4.tgz",
- "integrity": "sha512-kFfq5mMzrS7+wrl5pLJzZEzemx34OQ0w4SARfhy/3yxTlhbstsudDwJzhf1hP02yHzbjoVMSXUj/Sz6RNfMyXg==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "pug-code-gen": "^3.0.4",
- "pug-filters": "^4.0.0",
- "pug-lexer": "^5.0.1",
- "pug-linker": "^4.0.0",
- "pug-load": "^3.0.0",
- "pug-parser": "^6.0.0",
- "pug-runtime": "^3.0.1",
- "pug-strip-comments": "^2.0.0"
- }
- },
- "node_modules/pug-attrs": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/pug-attrs/-/pug-attrs-3.0.0.tgz",
- "integrity": "sha512-azINV9dUtzPMFQktvTXciNAfAuVh/L/JCl0vtPCwvOA21uZrC08K/UnmrL+SXGEVc1FwzjW62+xw5S/uaLj6cA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "constantinople": "^4.0.1",
- "js-stringify": "^1.0.2",
- "pug-runtime": "^3.0.0"
- }
- },
- "node_modules/pug-code-gen": {
- "version": "3.0.4",
- "resolved": "https://registry.npmjs.org/pug-code-gen/-/pug-code-gen-3.0.4.tgz",
- "integrity": "sha512-6okWYIKdasTyXICyEtvobmTZAVX57JkzgzIi4iRJlin8kmhG+Xry2dsus+Mun/nGCn6F2U49haHI5mkELXB14g==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "constantinople": "^4.0.1",
- "doctypes": "^1.1.0",
- "js-stringify": "^1.0.2",
- "pug-attrs": "^3.0.0",
- "pug-error": "^2.1.0",
- "pug-runtime": "^3.0.1",
- "void-elements": "^3.1.0",
- "with": "^7.0.0"
- }
- },
- "node_modules/pug-error": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/pug-error/-/pug-error-2.1.0.tgz",
- "integrity": "sha512-lv7sU9e5Jk8IeUheHata6/UThZ7RK2jnaaNztxfPYUY+VxZyk/ePVaNZ/vwmH8WqGvDz3LrNYt/+gA55NDg6Pg==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/pug-filters": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/pug-filters/-/pug-filters-4.0.0.tgz",
- "integrity": "sha512-yeNFtq5Yxmfz0f9z2rMXGw/8/4i1cCFecw/Q7+D0V2DdtII5UvqE12VaZ2AY7ri6o5RNXiweGH79OCq+2RQU4A==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "constantinople": "^4.0.1",
- "jstransformer": "1.0.0",
- "pug-error": "^2.0.0",
- "pug-walk": "^2.0.0",
- "resolve": "^1.15.1"
- }
- },
- "node_modules/pug-lexer": {
- "version": "5.0.1",
- "resolved": "https://registry.npmjs.org/pug-lexer/-/pug-lexer-5.0.1.tgz",
- "integrity": "sha512-0I6C62+keXlZPZkOJeVam9aBLVP2EnbeDw3An+k0/QlqdwH6rv8284nko14Na7c0TtqtogfWXcRoFE4O4Ff20w==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "character-parser": "^2.2.0",
- "is-expression": "^4.0.0",
- "pug-error": "^2.0.0"
- }
- },
- "node_modules/pug-linker": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/pug-linker/-/pug-linker-4.0.0.tgz",
- "integrity": "sha512-gjD1yzp0yxbQqnzBAdlhbgoJL5qIFJw78juN1NpTLt/mfPJ5VgC4BvkoD3G23qKzJtIIXBbcCt6FioLSFLOHdw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "pug-error": "^2.0.0",
- "pug-walk": "^2.0.0"
- }
- },
- "node_modules/pug-load": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/pug-load/-/pug-load-3.0.0.tgz",
- "integrity": "sha512-OCjTEnhLWZBvS4zni/WUMjH2YSUosnsmjGBB1An7CsKQarYSWQ0GCVyd4eQPMFJqZ8w9xgs01QdiZXKVjk92EQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "object-assign": "^4.1.1",
- "pug-walk": "^2.0.0"
- }
- },
- "node_modules/pug-parser": {
- "version": "6.0.0",
- "resolved": "https://registry.npmjs.org/pug-parser/-/pug-parser-6.0.0.tgz",
- "integrity": "sha512-ukiYM/9cH6Cml+AOl5kETtM9NR3WulyVP2y4HOU45DyMim1IeP/OOiyEWRr6qk5I5klpsBnbuHpwKmTx6WURnw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "pug-error": "^2.0.0",
- "token-stream": "1.0.0"
- }
- },
- "node_modules/pug-runtime": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/pug-runtime/-/pug-runtime-3.0.1.tgz",
- "integrity": "sha512-L50zbvrQ35TkpHwv0G6aLSuueDRwc/97XdY8kL3tOT0FmhgG7UypU3VztfV/LATAvmUfYi4wNxSajhSAeNN+Kg==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/pug-strip-comments": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/pug-strip-comments/-/pug-strip-comments-2.0.0.tgz",
- "integrity": "sha512-zo8DsDpH7eTkPHCXFeAk1xZXJbyoTfdPlNR0bK7rpOMuhBYb0f5qUVCO1xlsitYd3w5FQTK7zpNVKb3rZoUrrQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "pug-error": "^2.0.0"
- }
- },
- "node_modules/pug-walk": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/pug-walk/-/pug-walk-2.0.0.tgz",
- "integrity": "sha512-yYELe9Q5q9IQhuvqsZNwA5hfPkMJ8u92bQLIMcsMxf/VADjNtEYptU+inlufAFYcWdHlwNfZOEnOOQrZrcyJCQ==",
- "dev": true,
- "license": "MIT"
- },
"node_modules/pump": {
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/pump/-/pump-3.0.4.tgz",
"integrity": "sha512-VS7sjc6KR7e1ukRFhQSY5LM2uBWAUPiOPa/A3mkKmiMwSmRFUITt0xuj+/lesgnCv+dPIEYlkzrcyXgquIHMcA==",
- "devOptional": true,
"license": "MIT",
+ "optional": true,
"dependencies": {
"end-of-stream": "^1.1.0",
"once": "^1.3.1"
}
},
- "node_modules/queue-microtask": {
- "version": "1.2.3",
- "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
- "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==",
- "dev": true,
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/feross"
- },
- {
- "type": "patreon",
- "url": "https://www.patreon.com/feross"
- },
- {
- "type": "consulting",
- "url": "https://feross.org/support"
- }
- ],
- "license": "MIT"
- },
"node_modules/quickjs-emscripten": {
"version": "0.32.0",
"resolved": "https://registry.npmjs.org/quickjs-emscripten/-/quickjs-emscripten-0.32.0.tgz",
@@ -6168,45 +4993,6 @@
"node": ">= 6"
}
},
- "node_modules/repeat-string": {
- "version": "1.6.1",
- "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz",
- "integrity": "sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=0.10"
- }
- },
- "node_modules/reprism": {
- "version": "0.0.11",
- "resolved": "https://registry.npmjs.org/reprism/-/reprism-0.0.11.tgz",
- "integrity": "sha512-VsxDR5QxZo08M/3nRypNlScw5r3rKeSOPdU/QhDmu3Ai3BJxHn/qgfXGWQp/tAxUtzwYNo9W6997JZR0tPLZsA==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/resolve": {
- "version": "1.22.12",
- "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.12.tgz",
- "integrity": "sha512-TyeJ1zif53BPfHootBGwPRYT1RUt6oGWsaQr8UyZW/eAm9bKoijtvruSDEmZHm92CwS9nj7/fWttqPCgzep8CA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "es-errors": "^1.3.0",
- "is-core-module": "^2.16.1",
- "path-parse": "^1.0.7",
- "supports-preserve-symlinks-flag": "^1.0.0"
- },
- "bin": {
- "resolve": "bin/resolve"
- },
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
"node_modules/resolve-pkg-maps": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz",
@@ -6234,17 +5020,6 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
- "node_modules/reusify": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz",
- "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "iojs": ">=1.0.0",
- "node": ">=0.10.0"
- }
- },
"node_modules/rfdc": {
"version": "1.4.1",
"resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.4.1.tgz",
@@ -6286,30 +5061,6 @@
"@rolldown/binding-win32-x64-msvc": "1.0.0-rc.13"
}
},
- "node_modules/run-parallel": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz",
- "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==",
- "dev": true,
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/feross"
- },
- {
- "type": "patreon",
- "url": "https://www.patreon.com/feross"
- },
- {
- "type": "consulting",
- "url": "https://feross.org/support"
- }
- ],
- "license": "MIT",
- "dependencies": {
- "queue-microtask": "^1.2.2"
- }
- },
"node_modules/safe-buffer": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
@@ -6389,29 +5140,6 @@
"@img/sharp-win32-x64": "0.34.5"
}
},
- "node_modules/shebang-command": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
- "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "shebang-regex": "^3.0.0"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/shebang-regex": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz",
- "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=8"
- }
- },
"node_modules/siginfo": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/siginfo/-/siginfo-2.0.0.tgz",
@@ -6518,13 +5246,6 @@
"node": ">=0.10.0"
}
},
- "node_modules/spark-md5": {
- "version": "3.0.2",
- "resolved": "https://registry.npmjs.org/spark-md5/-/spark-md5-3.0.2.tgz",
- "integrity": "sha512-wcFzz9cDfbuqe0FZzfi2or1sgyIrsDwmPwfZC4hiNidPdPINjeUwNfv5kldczoEAcjl9Y1L3SM7Uz2PUEQzxQw==",
- "dev": true,
- "license": "(WTFPL OR MIT)"
- },
"node_modules/split2": {
"version": "4.2.0",
"resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz",
@@ -6614,16 +5335,6 @@
"url": "https://github.com/chalk/strip-ansi?sponsor=1"
}
},
- "node_modules/strip-final-newline": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz",
- "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=6"
- }
- },
"node_modules/strip-json-comments": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz",
@@ -6675,19 +5386,6 @@
"node": ">=8"
}
},
- "node_modules/supports-preserve-symlinks-flag": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz",
- "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
"node_modules/tar-fs": {
"version": "2.1.4",
"resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.4.tgz",
@@ -6762,26 +5460,6 @@
"node": ">=14.0.0"
}
},
- "node_modules/to-regex-range": {
- "version": "5.0.1",
- "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
- "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "is-number": "^7.0.0"
- },
- "engines": {
- "node": ">=8.0"
- }
- },
- "node_modules/token-stream": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/token-stream/-/token-stream-1.0.0.tgz",
- "integrity": "sha512-VSsyNPPW74RpHwR8Fc21uubwHY7wMDeJLys2IX5zJNih+OnAnaifKHo+1LHT7DAdloQ7apeaaWg8l7qnf/TnEg==",
- "dev": true,
- "license": "MIT"
- },
"node_modules/token-types": {
"version": "6.1.2",
"resolved": "https://registry.npmjs.org/token-types/-/token-types-6.1.2.tgz",
@@ -7370,16 +6048,6 @@
"dev": true,
"license": "MIT"
},
- "node_modules/universalify": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz",
- "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">= 10.0.0"
- }
- },
"node_modules/util-deprecate": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
@@ -7393,6 +6061,7 @@
"integrity": "sha512-P1PbweD+2/udplnThz3btF4cf6AgPky7kk23RtHUkJIU5BIxwPprhRGmOAHs6FTI7UiGbTNrgNP6jSYD6JaRnw==",
"dev": true,
"license": "MIT",
+ "peer": true,
"dependencies": {
"lightningcss": "^1.32.0",
"picomatch": "^4.0.4",
@@ -7471,6 +6140,7 @@
"integrity": "sha512-DBc4Tx0MPNsqb9isoyOq00lHftVx/KIU44QOm2q59npZyLUkENn8TMFsuzuO+4U2FUa9rgbbPt3udrP25GcjXw==",
"dev": true,
"license": "MIT",
+ "peer": true,
"dependencies": {
"@vitest/expect": "4.1.3",
"@vitest/mocker": "4.1.3",
@@ -7555,32 +6225,6 @@
}
}
},
- "node_modules/void-elements": {
- "version": "3.1.0",
- "resolved": "https://registry.npmjs.org/void-elements/-/void-elements-3.1.0.tgz",
- "integrity": "sha512-Dhxzh5HZuiHQhbvTW9AMetFfBHDMYpo23Uo9btPXgdYP+3T5S+p+jgNy7spra+veYhBP2dCSgxR/i2Y02h5/6w==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/which": {
- "version": "2.0.2",
- "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
- "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "isexe": "^2.0.0"
- },
- "bin": {
- "node-which": "bin/node-which"
- },
- "engines": {
- "node": ">= 8"
- }
- },
"node_modules/why-is-node-running": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/why-is-node-running/-/why-is-node-running-2.3.0.tgz",
@@ -7598,22 +6242,6 @@
"node": ">=8"
}
},
- "node_modules/with": {
- "version": "7.0.2",
- "resolved": "https://registry.npmjs.org/with/-/with-7.0.2.tgz",
- "integrity": "sha512-RNGKj82nUPg3g5ygxkQl0R937xLyho1J24ItRCBTr/m1YnZkzJy1hUiHUJrc/VlsDQzsCnInEGSg3bci0Lmd4w==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@babel/parser": "^7.9.6",
- "@babel/types": "^7.9.6",
- "assert-never": "^1.2.1",
- "babel-walk": "3.0.0-canary-5"
- },
- "engines": {
- "node": ">= 10.0.0"
- }
- },
"node_modules/wrap-ansi": {
"version": "9.0.2",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.2.tgz",
@@ -7654,8 +6282,8 @@
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
"integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==",
- "devOptional": true,
- "license": "ISC"
+ "license": "ISC",
+ "optional": true
},
"node_modules/xtend": {
"version": "4.0.2",
diff --git a/package.json b/package.json
index c503dd2..81f2b6c 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "hivemind",
- "version": "0.6.38",
+ "version": "0.6.37",
"description": "Cloud-backed persistent shared memory for AI agents powered by Deeplake",
"type": "module",
"bin": {
@@ -13,8 +13,7 @@
"shell": "tsx src/shell/deeplake-shell.ts",
"test": "vitest run",
"typecheck": "tsc --noEmit",
- "dup": "jscpd src",
- "ci": "npm run typecheck && npm run dup && npm test",
+ "ci": "npm run typecheck && npm test",
"prepare": "husky"
},
"lint-staged": {
@@ -34,7 +33,6 @@
"@vitest/coverage-v8": "^4.1.3",
"esbuild": "^0.28.0",
"husky": "^9.1.7",
- "jscpd": "^4.0.9",
"lint-staged": "^16.4.0",
"tsx": "^4.7.0",
"typescript": "^6.0.0",
diff --git a/src/deeplake-api.ts b/src/deeplake-api.ts
index ad3cb4d..4b1dfed 100644
--- a/src/deeplake-api.ts
+++ b/src/deeplake-api.ts
@@ -1,4 +1,7 @@
import { randomUUID } from "node:crypto";
+import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs";
+import { join } from "node:path";
+import { tmpdir } from "node:os";
import { log as _log } from "./utils/debug.js";
import { sqlStr } from "./utils/sql.js";
@@ -23,11 +26,45 @@ const RETRYABLE_CODES = new Set([429, 500, 502, 503, 504]);
const MAX_RETRIES = 3;
const BASE_DELAY_MS = 500;
const MAX_CONCURRENCY = 5;
+const QUERY_TIMEOUT_MS = Number(process.env["HIVEMIND_QUERY_TIMEOUT_MS"] ?? process.env["DEEPLAKE_QUERY_TIMEOUT_MS"] ?? 10_000);
+const INDEX_MARKER_TTL_MS = Number(process.env["HIVEMIND_INDEX_MARKER_TTL_MS"] ?? 6 * 60 * 60_000);
function sleep(ms: number): Promise {
return new Promise(resolve => setTimeout(resolve, ms));
}
+function isTimeoutError(error: unknown): boolean {
+ const name = error instanceof Error ? error.name.toLowerCase() : "";
+ const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase();
+ return name.includes("timeout") ||
+ name === "aborterror" ||
+ message.includes("timeout") ||
+ message.includes("timed out");
+}
+
+function isDuplicateIndexError(error: unknown): boolean {
+ const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase();
+ return message.includes("duplicate key value violates unique constraint") ||
+ message.includes("pg_class_relname_nsp_index") ||
+ message.includes("already exists");
+}
+
+function isSessionInsertQuery(sql: string): boolean {
+ return /^\s*insert\s+into\s+"[^"]+"\s*\(\s*id\s*,\s*path\s*,\s*filename\s*,\s*message\s*,/i.test(sql);
+}
+
+function isTransientHtml403(text: string): boolean {
+ const body = text.toLowerCase();
+ return body.includes(" void)[] = [];
private active = 0;
@@ -62,6 +99,7 @@ export interface WriteRow {
export class DeeplakeApi {
private _pendingRows: WriteRow[] = [];
private _sem = new Semaphore(MAX_CONCURRENCY);
+ private _tablesCache: string[] | null = null;
constructor(
private token: string,
@@ -95,6 +133,7 @@ export class DeeplakeApi {
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
let resp: Response;
try {
+ const signal = AbortSignal.timeout(QUERY_TIMEOUT_MS);
resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables/query`, {
method: "POST",
headers: {
@@ -102,10 +141,15 @@ export class DeeplakeApi {
"Content-Type": "application/json",
"X-Activeloop-Org-Id": this.orgId,
},
+ signal,
body: JSON.stringify({ query: sql }),
});
} catch (e: unknown) {
// Network-level failure (DNS, TCP reset, timeout, etc.)
+ if (isTimeoutError(e)) {
+ lastError = new Error(`Query timeout after ${QUERY_TIMEOUT_MS}ms`);
+ throw lastError;
+ }
lastError = e instanceof Error ? e : new Error(String(e));
if (attempt < MAX_RETRIES) {
const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200;
@@ -123,7 +167,10 @@ export class DeeplakeApi {
);
}
const text = await resp.text().catch(() => "");
- if (attempt < MAX_RETRIES && RETRYABLE_CODES.has(resp.status)) {
+ const retryable403 =
+ isSessionInsertQuery(sql) &&
+ (resp.status === 401 || (resp.status === 403 && (text.length === 0 || isTransientHtml403(text))));
+ if (attempt < MAX_RETRIES && (RETRYABLE_CODES.has(resp.status) || retryable403)) {
const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200;
log(`query retry ${attempt + 1}/${MAX_RETRIES} (${resp.status}) in ${delay.toFixed(0)}ms`);
await sleep(delay);
@@ -199,8 +246,67 @@ export class DeeplakeApi {
await this.query(`CREATE INDEX IF NOT EXISTS idx_${sqlStr(column)}_bm25 ON "${this.tableName}" USING deeplake_index ("${column}")`);
}
+ private buildLookupIndexName(table: string, suffix: string): string {
+ return `idx_${table}_${suffix}`.replace(/[^a-zA-Z0-9_]/g, "_");
+ }
+
+ private getLookupIndexMarkerPath(table: string, suffix: string): string {
+ const markerKey = [
+ this.workspaceId,
+ this.orgId,
+ table,
+ suffix,
+ ].join("__").replace(/[^a-zA-Z0-9_.-]/g, "_");
+ return join(getIndexMarkerDir(), `${markerKey}.json`);
+ }
+
+ private hasFreshLookupIndexMarker(table: string, suffix: string): boolean {
+ const markerPath = this.getLookupIndexMarkerPath(table, suffix);
+ if (!existsSync(markerPath)) return false;
+ try {
+ const raw = JSON.parse(readFileSync(markerPath, "utf-8")) as { updatedAt?: string };
+ const updatedAt = raw.updatedAt ? new Date(raw.updatedAt).getTime() : NaN;
+ if (!Number.isFinite(updatedAt) || (Date.now() - updatedAt) > INDEX_MARKER_TTL_MS) return false;
+ return true;
+ } catch {
+ return false;
+ }
+ }
+
+ private markLookupIndexReady(table: string, suffix: string): void {
+ mkdirSync(getIndexMarkerDir(), { recursive: true });
+ writeFileSync(
+ this.getLookupIndexMarkerPath(table, suffix),
+ JSON.stringify({ updatedAt: new Date().toISOString() }),
+ "utf-8",
+ );
+ }
+
+ private async ensureLookupIndex(table: string, suffix: string, columnsSql: string): Promise {
+ if (this.hasFreshLookupIndexMarker(table, suffix)) return;
+ const indexName = this.buildLookupIndexName(table, suffix);
+ try {
+ await this.query(`CREATE INDEX IF NOT EXISTS "${indexName}" ON "${table}" ${columnsSql}`);
+ this.markLookupIndexReady(table, suffix);
+ } catch (e: any) {
+ if (isDuplicateIndexError(e)) {
+ this.markLookupIndexReady(table, suffix);
+ return;
+ }
+ log(`index "${indexName}" skipped: ${e.message}`);
+ }
+ }
+
/** List all tables in the workspace (with retry). */
- async listTables(): Promise {
+ async listTables(forceRefresh = false): Promise {
+ if (!forceRefresh && this._tablesCache) return [...this._tablesCache];
+
+ const { tables, cacheable } = await this._fetchTables();
+ if (cacheable) this._tablesCache = [...tables];
+ return tables;
+ }
+
+ private async _fetchTables(): Promise<{ tables: string[]; cacheable: boolean }> {
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
try {
const resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables`, {
@@ -211,22 +317,25 @@ export class DeeplakeApi {
});
if (resp.ok) {
const data = await resp.json() as { tables?: { table_name: string }[] };
- return (data.tables ?? []).map(t => t.table_name);
+ return {
+ tables: (data.tables ?? []).map(t => t.table_name),
+ cacheable: true,
+ };
}
if (attempt < MAX_RETRIES && RETRYABLE_CODES.has(resp.status)) {
await sleep(BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200);
continue;
}
- return [];
+ return { tables: [], cacheable: false };
} catch {
if (attempt < MAX_RETRIES) {
await sleep(BASE_DELAY_MS * Math.pow(2, attempt));
continue;
}
- return [];
+ return { tables: [], cacheable: false };
}
}
- return [];
+ return { tables: [], cacheable: false };
}
/** Create the memory table if it doesn't already exist. Migrate columns on existing tables. */
@@ -252,6 +361,7 @@ export class DeeplakeApi {
`) USING deeplake`,
);
log(`table "${tbl}" created`);
+ if (!tables.includes(tbl)) this._tablesCache = [...tables, tbl];
}
// BM25 index disabled — CREATE INDEX causes intermittent oid errors on fresh tables.
// See bm25-oid-bug.sh for reproduction. Re-enable once Deeplake fixes the oid invalidation.
@@ -284,6 +394,8 @@ export class DeeplakeApi {
`) USING deeplake`,
);
log(`table "${name}" created`);
+ if (!tables.includes(name)) this._tablesCache = [...tables, name];
}
+ await this.ensureLookupIndex(name, "path_creation_date", `("path", "creation_date")`);
}
}
diff --git a/src/hooks/bash-command-compiler.ts b/src/hooks/bash-command-compiler.ts
new file mode 100644
index 0000000..4bf6ce0
--- /dev/null
+++ b/src/hooks/bash-command-compiler.ts
@@ -0,0 +1,524 @@
+import type { DeeplakeApi } from "../deeplake-api.js";
+import { sqlLike } from "../utils/sql.js";
+import { type GrepParams, handleGrepDirect, parseBashGrep } from "./grep-direct.js";
+import { normalizeContent, refineGrepMatches } from "../shell/grep-core.js";
+import {
+ listVirtualPathRowsForDirs,
+ readVirtualPathContents,
+ findVirtualPaths,
+} from "./virtual-table-query.js";
+
+type VirtualRow = Record;
+
+export type CompiledSegment =
+ | { kind: "echo"; text: string }
+ | { kind: "cat"; paths: string[]; lineLimit: number; fromEnd: boolean; countLines: boolean; ignoreMissing: boolean }
+ | { kind: "ls"; dirs: string[]; longFormat: boolean }
+ | { kind: "find"; dir: string; pattern: string; countOnly: boolean }
+ | { kind: "find_grep"; dir: string; patterns: string[]; params: GrepParams; lineLimit: number }
+ | { kind: "grep"; params: GrepParams; lineLimit: number };
+
+interface ParsedModifier {
+ clean: string;
+ ignoreMissing: boolean;
+}
+
+function isQuoted(ch: string): boolean {
+ return ch === "'" || ch === "\"";
+}
+
+export function splitTopLevel(input: string, operators: string[]): string[] | null {
+ const parts: string[] = [];
+ let current = "";
+ let quote: string | null = null;
+
+ for (let i = 0; i < input.length; i++) {
+ const ch = input[i];
+ if (quote) {
+ if (ch === quote) quote = null;
+ current += ch;
+ continue;
+ }
+ if (isQuoted(ch)) {
+ quote = ch;
+ current += ch;
+ continue;
+ }
+
+ const matched = operators.find((op) => input.startsWith(op, i));
+ if (matched) {
+ const trimmed = current.trim();
+ if (trimmed) parts.push(trimmed);
+ current = "";
+ i += matched.length - 1;
+ continue;
+ }
+
+ current += ch;
+ }
+
+ if (quote) return null;
+ const trimmed = current.trim();
+ if (trimmed) parts.push(trimmed);
+ return parts;
+}
+
+export function tokenizeShellWords(input: string): string[] | null {
+ const tokens: string[] = [];
+ let current = "";
+ let quote: string | null = null;
+
+ for (let i = 0; i < input.length; i++) {
+ const ch = input[i];
+ if (quote) {
+ if (ch === quote) {
+ quote = null;
+ } else if (ch === "\\" && quote === "\"" && i + 1 < input.length) {
+ current += input[++i];
+ } else {
+ current += ch;
+ }
+ continue;
+ }
+
+ if (isQuoted(ch)) {
+ quote = ch;
+ continue;
+ }
+
+ if (/\s/.test(ch)) {
+ if (current) {
+ tokens.push(current);
+ current = "";
+ }
+ continue;
+ }
+
+ current += ch;
+ }
+
+ if (quote) return null;
+ if (current) tokens.push(current);
+ return tokens;
+}
+
+export function expandBraceToken(token: string): string[] {
+ const match = token.match(/\{([^{}]+)\}/);
+ if (!match) return [token];
+
+ const [expr] = match;
+ const prefix = token.slice(0, match.index);
+ const suffix = token.slice((match.index ?? 0) + expr.length);
+
+ let variants: string[] = [];
+ const numericRange = match[1].match(/^(-?\d+)\.\.(-?\d+)$/);
+ if (numericRange) {
+ const start = Number(numericRange[1]);
+ const end = Number(numericRange[2]);
+ const step = start <= end ? 1 : -1;
+ for (let value = start; step > 0 ? value <= end : value >= end; value += step) {
+ variants.push(String(value));
+ }
+ } else {
+ variants = match[1].split(",");
+ }
+
+ return variants.flatMap((variant) => expandBraceToken(`${prefix}${variant}${suffix}`));
+}
+
+export function stripAllowedModifiers(segment: string): ParsedModifier {
+ const ignoreMissing = /\s2>\/dev\/null\s*$/.test(segment);
+ const clean = segment
+ .replace(/\s2>\/dev\/null\s*$/g, "")
+ .replace(/\s2>&1\s*/g, " ")
+ .trim();
+ return { clean, ignoreMissing };
+}
+
+export function hasUnsupportedRedirection(segment: string): boolean {
+ let quote: string | null = null;
+ for (let i = 0; i < segment.length; i++) {
+ const ch = segment[i];
+ if (quote) {
+ if (ch === quote) quote = null;
+ continue;
+ }
+ if (isQuoted(ch)) {
+ quote = ch;
+ continue;
+ }
+ if (ch === ">" || ch === "<") return true;
+ }
+ return false;
+}
+
+function parseHeadTailStage(stage: string): { lineLimit: number; fromEnd: boolean } | null {
+ const tokens = tokenizeShellWords(stage);
+ if (!tokens || tokens.length === 0) return null;
+ const [cmd, ...rest] = tokens;
+ if (cmd !== "head" && cmd !== "tail") return null;
+ if (rest.length === 0) return { lineLimit: 10, fromEnd: cmd === "tail" };
+ if (rest.length === 1) {
+ const count = Number(rest[0]);
+ if (!Number.isFinite(count)) {
+ return { lineLimit: 10, fromEnd: cmd === "tail" };
+ }
+ return { lineLimit: Math.abs(count), fromEnd: cmd === "tail" };
+ }
+ if (rest.length === 2 && /^-\d+$/.test(rest[0])) {
+ const count = Number(rest[0]);
+ if (!Number.isFinite(count)) return null;
+ return { lineLimit: Math.abs(count), fromEnd: cmd === "tail" };
+ }
+ if (rest.length === 2 && rest[0] === "-n") {
+ const count = Number(rest[1]);
+ if (!Number.isFinite(count)) return null;
+ return { lineLimit: Math.abs(count), fromEnd: cmd === "tail" };
+ }
+ if (rest.length === 3 && rest[0] === "-n") {
+ const count = Number(rest[1]);
+ if (!Number.isFinite(count)) return null;
+ return { lineLimit: Math.abs(count), fromEnd: cmd === "tail" };
+ }
+ return null;
+}
+
+function isValidPipelineHeadTailStage(stage: string): boolean {
+ const tokens = tokenizeShellWords(stage);
+ if (!tokens || (tokens[0] !== "head" && tokens[0] !== "tail")) return false;
+ if (tokens.length === 1) return true;
+ if (tokens.length === 2) return /^-\d+$/.test(tokens[1]);
+ if (tokens.length === 3) return tokens[1] === "-n" && /^-?\d+$/.test(tokens[2]);
+ return false;
+}
+
+function parseFindNamePatterns(tokens: string[]): string[] | null {
+ const patterns: string[] = [];
+ for (let i = 2; i < tokens.length; i++) {
+ const token = tokens[i];
+ if (token === "-type") {
+ i += 1;
+ continue;
+ }
+ if (token === "-o") continue;
+ if (token === "-name") {
+ const pattern = tokens[i + 1];
+ if (!pattern) return null;
+ patterns.push(pattern);
+ i += 1;
+ continue;
+ }
+ return null;
+ }
+ return patterns.length > 0 ? patterns : null;
+}
+
+export function parseCompiledSegment(segment: string): CompiledSegment | null {
+ const { clean, ignoreMissing } = stripAllowedModifiers(segment);
+ if (hasUnsupportedRedirection(clean)) return null;
+ const pipeline = splitTopLevel(clean, ["|"]);
+ if (!pipeline || pipeline.length === 0) return null;
+
+ const tokens = tokenizeShellWords(pipeline[0]);
+ if (!tokens || tokens.length === 0) return null;
+
+ if (tokens[0] === "echo" && pipeline.length === 1) {
+ const text = tokens.slice(1).join(" ");
+ return { kind: "echo", text };
+ }
+
+ if (tokens[0] === "cat") {
+ const paths = tokens.slice(1).flatMap(expandBraceToken);
+ if (paths.length === 0) return null;
+ let lineLimit = 0;
+ let fromEnd = false;
+ let countLines = false;
+ if (pipeline.length > 1) {
+ if (pipeline.length !== 2) return null;
+ const pipeStage = pipeline[1].trim();
+ if (/^wc\s+-l\s*$/.test(pipeStage)) {
+ if (paths.length !== 1) return null;
+ countLines = true;
+ } else {
+ if (!isValidPipelineHeadTailStage(pipeStage)) return null;
+ const headTail = parseHeadTailStage(pipeStage);
+ if (!headTail) return null;
+ lineLimit = headTail.lineLimit;
+ fromEnd = headTail.fromEnd;
+ }
+ }
+ return { kind: "cat", paths, lineLimit, fromEnd, countLines, ignoreMissing };
+ }
+
+ if (tokens[0] === "head" || tokens[0] === "tail") {
+ if (pipeline.length !== 1) return null;
+ const parsed = parseHeadTailStage(clean);
+ if (!parsed) return null;
+ const headTokens = tokenizeShellWords(clean);
+ if (!headTokens) return null;
+ if (
+ (headTokens[1] === "-n" && headTokens.length < 4) ||
+ (/^-\d+$/.test(headTokens[1] ?? "") && headTokens.length < 3) ||
+ (headTokens.length === 2 && /^-?\d+$/.test(headTokens[1] ?? ""))
+ ) return null;
+ const path = headTokens[headTokens.length - 1];
+ if (path === "head" || path === "tail" || path === "-n") return null;
+ return {
+ kind: "cat",
+ paths: expandBraceToken(path),
+ lineLimit: parsed.lineLimit,
+ fromEnd: parsed.fromEnd,
+ countLines: false,
+ ignoreMissing,
+ };
+ }
+
+ if (tokens[0] === "wc" && tokens[1] === "-l" && pipeline.length === 1 && tokens[2]) {
+ return {
+ kind: "cat",
+ paths: expandBraceToken(tokens[2]),
+ lineLimit: 0,
+ fromEnd: false,
+ countLines: true,
+ ignoreMissing,
+ };
+ }
+
+ if (tokens[0] === "ls" && pipeline.length === 1) {
+ const dirs = tokens
+ .slice(1)
+ .filter(token => !token.startsWith("-"))
+ .flatMap(expandBraceToken);
+ const longFormat = tokens.some(token => token.startsWith("-") && token.includes("l"));
+ return { kind: "ls", dirs: dirs.length > 0 ? dirs : ["/"], longFormat };
+ }
+
+ if (tokens[0] === "find") {
+ if (pipeline.length > 3) return null;
+ const dir = tokens[1];
+ if (!dir) return null;
+ const patterns = parseFindNamePatterns(tokens);
+ if (!patterns) return null;
+ const countOnly = pipeline.length === 2 && /^wc\s+-l\s*$/.test(pipeline[1].trim());
+ if (countOnly) {
+ if (patterns.length !== 1) return null;
+ return { kind: "find", dir, pattern: patterns[0], countOnly };
+ }
+
+ if (pipeline.length >= 2) {
+ const xargsTokens = tokenizeShellWords(pipeline[1].trim());
+ if (!xargsTokens || xargsTokens[0] !== "xargs") return null;
+ const xargsArgs = xargsTokens.slice(1);
+ while (xargsArgs[0] && xargsArgs[0].startsWith("-")) {
+ if (xargsArgs[0] === "-r") {
+ xargsArgs.shift();
+ continue;
+ }
+ return null;
+ }
+ const grepCmd = xargsArgs.join(" ");
+ const grepParams = parseBashGrep(grepCmd);
+ if (!grepParams) return null;
+ let lineLimit = 0;
+ if (pipeline.length === 3) {
+ const headStage = pipeline[2].trim();
+ if (!isValidPipelineHeadTailStage(headStage)) return null;
+ const headTail = parseHeadTailStage(headStage);
+ if (!headTail || headTail.fromEnd) return null;
+ lineLimit = headTail.lineLimit;
+ }
+ return { kind: "find_grep", dir, patterns, params: grepParams, lineLimit };
+ }
+
+ if (patterns.length !== 1) return null;
+ return { kind: "find", dir, pattern: patterns[0], countOnly };
+ }
+
+ const grepParams = parseBashGrep(clean);
+ if (grepParams) {
+ let lineLimit = 0;
+ if (pipeline.length > 1) {
+ if (pipeline.length !== 2) return null;
+ const headStage = pipeline[1].trim();
+ if (!isValidPipelineHeadTailStage(headStage)) return null;
+ const headTail = parseHeadTailStage(headStage);
+ if (!headTail || headTail.fromEnd) return null;
+ lineLimit = headTail.lineLimit;
+ }
+ return { kind: "grep", params: grepParams, lineLimit };
+ }
+
+ return null;
+}
+
+export function parseCompiledBashCommand(cmd: string): CompiledSegment[] | null {
+ if (cmd.includes("||")) return null;
+ const segments = splitTopLevel(cmd, ["&&", ";", "\n"]);
+ if (!segments || segments.length === 0) return null;
+ const parsed = segments.map(parseCompiledSegment);
+ if (parsed.some((segment) => segment === null)) return null;
+ return parsed as CompiledSegment[];
+}
+
+function applyLineWindow(content: string, lineLimit: number, fromEnd: boolean): string {
+ if (lineLimit <= 0) return content;
+ const lines = content.split("\n");
+ return (fromEnd ? lines.slice(-lineLimit) : lines.slice(0, lineLimit)).join("\n");
+}
+
+function countLines(content: string): number {
+ return content === "" ? 0 : content.split("\n").length;
+}
+
+function renderDirectoryListing(dir: string, rows: VirtualRow[], longFormat: boolean): string {
+ const entries = new Map();
+ const prefix = dir === "/" ? "/" : `${dir}/`;
+ for (const row of rows) {
+ const path = row["path"] as string;
+ if (!path.startsWith(prefix) && dir !== "/") continue;
+ const rest = dir === "/" ? path.slice(1) : path.slice(prefix.length);
+ const slash = rest.indexOf("/");
+ const name = slash === -1 ? rest : rest.slice(0, slash);
+ if (!name) continue;
+ const existing = entries.get(name);
+ if (slash !== -1) {
+ if (!existing) entries.set(name, { isDir: true, size: 0 });
+ } else {
+ entries.set(name, { isDir: false, size: Number(row["size_bytes"] ?? 0) });
+ }
+ }
+ if (entries.size === 0) return `ls: cannot access '${dir}': No such file or directory`;
+
+ const lines: string[] = [];
+ for (const [name, info] of [...entries].sort((a, b) => a[0].localeCompare(b[0]))) {
+ if (longFormat) {
+ const type = info.isDir ? "drwxr-xr-x" : "-rw-r--r--";
+ const size = String(info.isDir ? 0 : info.size).padStart(6);
+ lines.push(`${type} 1 user user ${size} ${name}${info.isDir ? "/" : ""}`);
+ } else {
+ lines.push(name + (info.isDir ? "/" : ""));
+ }
+ }
+ return lines.join("\n");
+}
+
+interface ExecuteCompiledBashDeps {
+ readVirtualPathContentsFn?: typeof readVirtualPathContents;
+ listVirtualPathRowsForDirsFn?: typeof listVirtualPathRowsForDirs;
+ findVirtualPathsFn?: typeof findVirtualPaths;
+ handleGrepDirectFn?: typeof handleGrepDirect;
+}
+
+export async function executeCompiledBashCommand(
+ api: DeeplakeApi,
+ memoryTable: string,
+ sessionsTable: string,
+ cmd: string,
+ deps: ExecuteCompiledBashDeps = {},
+): Promise {
+ const {
+ readVirtualPathContentsFn = readVirtualPathContents,
+ listVirtualPathRowsForDirsFn = listVirtualPathRowsForDirs,
+ findVirtualPathsFn = findVirtualPaths,
+ handleGrepDirectFn = handleGrepDirect,
+ } = deps;
+
+ const plan = parseCompiledBashCommand(cmd);
+ if (!plan) return null;
+
+ const readPaths = [...new Set(plan.flatMap((segment) => segment.kind === "cat" ? segment.paths : []))];
+ const listDirs = [...new Set(plan.flatMap((segment) => segment.kind === "ls" ? segment.dirs.map(dir => dir.replace(/\/+$/, "") || "/") : []))];
+
+ const contentMap = readPaths.length > 0
+ ? await readVirtualPathContentsFn(api, memoryTable, sessionsTable, readPaths)
+ : new Map();
+ const dirRowsMap = listDirs.length > 0
+ ? await listVirtualPathRowsForDirsFn(api, memoryTable, sessionsTable, listDirs)
+ : new Map();
+
+ const outputs: string[] = [];
+ for (const segment of plan) {
+ if (segment.kind === "echo") {
+ outputs.push(segment.text);
+ continue;
+ }
+
+ if (segment.kind === "cat") {
+ const contents: string[] = [];
+ for (const path of segment.paths) {
+ const content = contentMap.get(path) ?? null;
+ if (content === null) {
+ if (segment.ignoreMissing) continue;
+ return null;
+ }
+ contents.push(content);
+ }
+ const combined = contents.join("");
+ if (segment.countLines) {
+ outputs.push(`${countLines(combined)} ${segment.paths[0]}`);
+ } else {
+ outputs.push(applyLineWindow(combined, segment.lineLimit, segment.fromEnd));
+ }
+ continue;
+ }
+
+ if (segment.kind === "ls") {
+ for (const dir of segment.dirs) {
+ outputs.push(renderDirectoryListing(dir.replace(/\/+$/, "") || "/", dirRowsMap.get(dir.replace(/\/+$/, "") || "/") ?? [], segment.longFormat));
+ }
+ continue;
+ }
+
+ if (segment.kind === "find") {
+ const filenamePattern = sqlLike(segment.pattern).replace(/\*/g, "%").replace(/\?/g, "_");
+ const paths = await findVirtualPathsFn(api, memoryTable, sessionsTable, segment.dir.replace(/\/+$/, "") || "/", filenamePattern);
+ outputs.push(segment.countOnly ? String(paths.length) : (paths.join("\n") || "(no matches)"));
+ continue;
+ }
+
+ if (segment.kind === "find_grep") {
+ const dir = segment.dir.replace(/\/+$/, "") || "/";
+ const candidateBatches = await Promise.all(
+ segment.patterns.map((pattern) =>
+ findVirtualPathsFn(
+ api,
+ memoryTable,
+ sessionsTable,
+ dir,
+ sqlLike(pattern).replace(/\*/g, "%").replace(/\?/g, "_"),
+ ),
+ ),
+ );
+ const candidatePaths = [...new Set(candidateBatches.flat())];
+ if (candidatePaths.length === 0) {
+ outputs.push("(no matches)");
+ continue;
+ }
+ const candidateContents = await readVirtualPathContentsFn(api, memoryTable, sessionsTable, candidatePaths);
+ const matched = refineGrepMatches(
+ candidatePaths.flatMap((path) => {
+ const content = candidateContents.get(path);
+ if (content === null || content === undefined) return [];
+ return [{ path, content: normalizeContent(path, content) }];
+ }),
+ segment.params,
+ );
+ const limited = segment.lineLimit > 0 ? matched.slice(0, segment.lineLimit) : matched;
+ outputs.push(limited.join("\n") || "(no matches)");
+ continue;
+ }
+
+ if (segment.kind === "grep") {
+ const result = await handleGrepDirectFn(api, memoryTable, sessionsTable, segment.params);
+ if (result === null) return null;
+ if (segment.lineLimit > 0) {
+ outputs.push(result.split("\n").slice(0, segment.lineLimit).join("\n"));
+ } else {
+ outputs.push(result);
+ }
+ continue;
+ }
+ }
+
+ return outputs.join("\n");
+}
diff --git a/src/hooks/capture.ts b/src/hooks/capture.ts
index 81c8385..ae90ad8 100644
--- a/src/hooks/capture.ts
+++ b/src/hooks/capture.ts
@@ -1,8 +1,8 @@
#!/usr/bin/env node
/**
- * Capture hook — writes each session event as a separate row in the sessions table.
- * One INSERT per event, no concat, no race conditions.
+ * Capture hook — appends session events to a local queue on the hot path.
+ * Stop/SubagentStop flush that queue to the sessions table in batched INSERTs.
*
* Used by: UserPromptSubmit, PostToolUse (async), Stop, SubagentStop
*/
@@ -10,20 +10,26 @@
import { readStdin } from "../utils/stdin.js";
import { loadConfig, type Config } from "../config.js";
import { DeeplakeApi } from "../deeplake-api.js";
-import { sqlStr } from "../utils/sql.js";
import { log as _log } from "../utils/debug.js";
-import { buildSessionPath } from "../utils/session-path.js";
+import { isDirectRun } from "../utils/direct-run.js";
import {
bumpTotalCount,
loadTriggerConfig,
shouldTrigger,
tryAcquireLock,
- releaseLock,
} from "./summary-state.js";
import { bundleDirFromImportMeta, spawnWikiWorker, wikiLog } from "./spawn-wiki-worker.js";
+import {
+ appendQueuedSessionRow,
+ buildQueuedSessionRow,
+ buildSessionPath,
+ flushSessionQueue,
+} from "./session-queue.js";
+import { clearSessionQueryCache } from "./query-cache.js";
+
const log = (msg: string) => _log("capture", msg);
-interface HookInput {
+export interface HookInput {
session_id: string;
transcript_path?: string;
cwd?: string;
@@ -31,32 +37,19 @@ interface HookInput {
hook_event_name?: string;
agent_id?: string;
agent_type?: string;
- // UserPromptSubmit
prompt?: string;
- // PostToolUse
tool_name?: string;
tool_input?: Record;
tool_response?: Record;
tool_use_id?: string;
- // Stop / SubagentStop
last_assistant_message?: string;
stop_hook_active?: boolean;
agent_transcript_path?: string;
}
-const CAPTURE = process.env.HIVEMIND_CAPTURE !== "false";
-
-async function main(): Promise {
- if (!CAPTURE) return;
- const input = await readStdin();
- const config = loadConfig();
- if (!config) { log("no config"); return; }
-
- const sessionsTable = config.sessionsTableName;
- const api = new DeeplakeApi(config.token, config.apiUrl, config.orgId, config.workspaceId, sessionsTable);
+const CAPTURE = (process.env.HIVEMIND_CAPTURE ?? process.env.DEEPLAKE_CAPTURE) !== "false";
- // Build the event entry
- const ts = new Date().toISOString();
+export function buildCaptureEntry(input: HookInput, timestamp: string): Record | null {
const meta = {
session_id: input.session_id,
transcript_path: input.transcript_path,
@@ -65,22 +58,20 @@ async function main(): Promise {
hook_event_name: input.hook_event_name,
agent_id: input.agent_id,
agent_type: input.agent_type,
- timestamp: ts,
+ timestamp,
};
- let entry: Record;
-
if (input.prompt !== undefined) {
- log(`user session=${input.session_id}`);
- entry = {
+ return {
id: crypto.randomUUID(),
...meta,
type: "user_message",
content: input.prompt,
};
- } else if (input.tool_name !== undefined) {
- log(`tool=${input.tool_name} session=${input.session_id}`);
- entry = {
+ }
+
+ if (input.tool_name !== undefined) {
+ return {
id: crypto.randomUUID(),
...meta,
type: "tool_call",
@@ -89,91 +80,165 @@ async function main(): Promise {
tool_input: JSON.stringify(input.tool_input),
tool_response: JSON.stringify(input.tool_response),
};
- } else if (input.last_assistant_message !== undefined) {
- log(`assistant session=${input.session_id}`);
- entry = {
+ }
+
+ if (input.last_assistant_message !== undefined) {
+ return {
id: crypto.randomUUID(),
...meta,
type: "assistant_message",
content: input.last_assistant_message,
...(input.agent_transcript_path ? { agent_transcript_path: input.agent_transcript_path } : {}),
};
- } else {
- log("unknown event, skipping");
- return;
}
- const sessionPath = buildSessionPath(config, input.session_id);
- const line = JSON.stringify(entry);
- log(`writing to ${sessionPath}`);
-
- // Simple INSERT — one row per event, no concat, no race conditions.
- const projectName = (input.cwd ?? "").split("/").pop() || "unknown";
- const filename = sessionPath.split("/").pop() ?? "";
+ return null;
+}
- // For JSONB: only escape single quotes for the SQL literal, keep JSON structure intact.
- // sqlStr() would also escape backslashes and strip control chars, corrupting the JSON.
- const jsonForSql = line.replace(/'/g, "''");
+interface PeriodicSummaryDeps {
+ bundleDir?: string;
+ wikiWorker?: boolean;
+ logFn?: (msg: string) => void;
+ bumpTotalCountFn?: typeof bumpTotalCount;
+ loadTriggerConfigFn?: typeof loadTriggerConfig;
+ shouldTriggerFn?: typeof shouldTrigger;
+ tryAcquireLockFn?: typeof tryAcquireLock;
+ wikiLogFn?: typeof wikiLog;
+ spawnWikiWorkerFn?: typeof spawnWikiWorker;
+}
- const insertSql =
- `INSERT INTO "${sessionsTable}" (id, path, filename, message, author, size_bytes, project, description, agent, creation_date, last_update_date) ` +
- `VALUES ('${crypto.randomUUID()}', '${sqlStr(sessionPath)}', '${sqlStr(filename)}', '${jsonForSql}'::jsonb, '${sqlStr(config.userName)}', ` +
- `${Buffer.byteLength(line, "utf-8")}, '${sqlStr(projectName)}', '${sqlStr(input.hook_event_name ?? "")}', 'claude_code', '${ts}', '${ts}')`;
+export function maybeTriggerPeriodicSummary(sessionId: string, cwd: string, config: Config, deps: PeriodicSummaryDeps = {}): void {
+ const {
+ bundleDir = bundleDirFromImportMeta(import.meta.url),
+ wikiWorker = process.env.HIVEMIND_WIKI_WORKER === "1",
+ logFn = log,
+ bumpTotalCountFn = bumpTotalCount,
+ loadTriggerConfigFn = loadTriggerConfig,
+ shouldTriggerFn = shouldTrigger,
+ tryAcquireLockFn = tryAcquireLock,
+ wikiLogFn = wikiLog,
+ spawnWikiWorkerFn = spawnWikiWorker,
+ } = deps;
+
+ if (wikiWorker) return;
try {
- await api.query(insertSql);
- } catch (e: any) {
- // Fallback: table might not exist (session-start failed or org switched mid-session).
- // Create it and retry once.
- if (e.message?.includes("permission denied") || e.message?.includes("does not exist")) {
- log("table missing, creating and retrying");
- await api.ensureSessionsTable(sessionsTable);
- await api.query(insertSql);
- } else {
- throw e;
+ const state = bumpTotalCountFn(sessionId);
+ const cfg = loadTriggerConfigFn();
+ if (!shouldTriggerFn(state, cfg)) return;
+
+ if (!tryAcquireLockFn(sessionId)) {
+ logFn(`periodic trigger suppressed (lock held) session=${sessionId}`);
+ return;
}
- }
- log("capture ok → cloud");
+ wikiLogFn(`Periodic: threshold hit (total=${state.totalCount}, since=${state.totalCount - state.lastSummaryCount}, N=${cfg.everyNMessages}, hours=${cfg.everyHours})`);
+ spawnWikiWorkerFn({
+ config,
+ sessionId,
+ cwd,
+ bundleDir,
+ reason: "Periodic",
+ });
+ } catch (e: any) {
+ logFn(`periodic trigger error: ${e.message}`);
+ }
+}
- maybeTriggerPeriodicSummary(input.session_id, input.cwd ?? "", config);
+interface CaptureHookDeps {
+ captureEnabled?: boolean;
+ config?: Config | null;
+ now?: () => string;
+ createApi?: (config: Config) => DeeplakeApi;
+ appendQueuedSessionRowFn?: typeof appendQueuedSessionRow;
+ buildQueuedSessionRowFn?: typeof buildQueuedSessionRow;
+ flushSessionQueueFn?: typeof flushSessionQueue;
+ clearSessionQueryCacheFn?: typeof clearSessionQueryCache;
+ maybeTriggerPeriodicSummaryFn?: typeof maybeTriggerPeriodicSummary;
+ logFn?: (msg: string) => void;
}
-/** Increment the event counter and, if the threshold is crossed, spawn a background wiki worker. */
-function maybeTriggerPeriodicSummary(sessionId: string, cwd: string, config: Config): void {
- if (process.env.HIVEMIND_WIKI_WORKER === "1") return;
+export async function runCaptureHook(input: HookInput, deps: CaptureHookDeps = {}): Promise<{
+ status: "disabled" | "no_config" | "ignored" | "queued";
+ entry?: Record;
+ flushStatus?: string;
+}> {
+ const {
+ captureEnabled = CAPTURE,
+ config = loadConfig(),
+ now = () => new Date().toISOString(),
+ createApi = (activeConfig) => new DeeplakeApi(
+ activeConfig.token,
+ activeConfig.apiUrl,
+ activeConfig.orgId,
+ activeConfig.workspaceId,
+ activeConfig.sessionsTableName,
+ ),
+ appendQueuedSessionRowFn = appendQueuedSessionRow,
+ buildQueuedSessionRowFn = buildQueuedSessionRow,
+ flushSessionQueueFn = flushSessionQueue,
+ clearSessionQueryCacheFn = clearSessionQueryCache,
+ maybeTriggerPeriodicSummaryFn = maybeTriggerPeriodicSummary,
+ logFn = log,
+ } = deps;
+
+ if (!captureEnabled) return { status: "disabled" };
+ if (!config) {
+ logFn("no config");
+ return { status: "no_config" };
+ }
- try {
- const state = bumpTotalCount(sessionId);
- const cfg = loadTriggerConfig();
- if (!shouldTrigger(state, cfg)) return;
+ const ts = now();
+ const entry = buildCaptureEntry(input, ts);
+ if (!entry) {
+ logFn("unknown event, skipping");
+ return { status: "ignored" };
+ }
- if (!tryAcquireLock(sessionId)) {
- log(`periodic trigger suppressed (lock held) session=${sessionId}`);
- return;
- }
+ if (input.prompt !== undefined) logFn(`user session=${input.session_id}`);
+ else if (input.tool_name !== undefined) logFn(`tool=${input.tool_name} session=${input.session_id}`);
+ else logFn(`assistant session=${input.session_id}`);
- wikiLog(`Periodic: threshold hit (total=${state.totalCount}, since=${state.totalCount - state.lastSummaryCount}, N=${cfg.everyNMessages}, hours=${cfg.everyHours})`);
- try {
- spawnWikiWorker({
- config,
- sessionId,
- cwd,
- bundleDir: bundleDirFromImportMeta(import.meta.url),
- reason: "Periodic",
- });
- } catch (e: any) {
- log(`periodic spawn failed: ${e.message}`);
- try {
- releaseLock(sessionId);
- } catch (releaseErr: any) {
- log(`releaseLock after periodic spawn failure also failed: ${releaseErr.message}`);
- }
- throw e;
- }
- } catch (e: any) {
- log(`periodic trigger error: ${e.message}`);
+ if (input.hook_event_name === "UserPromptSubmit") {
+ clearSessionQueryCacheFn(input.session_id);
+ }
+
+ const sessionPath = buildSessionPath(config, input.session_id);
+ const line = JSON.stringify(entry);
+ const projectName = (input.cwd ?? "").split("/").pop() || "unknown";
+ appendQueuedSessionRowFn(buildQueuedSessionRowFn({
+ sessionPath,
+ line,
+ userName: config.userName,
+ projectName,
+ description: input.hook_event_name ?? "",
+ agent: "claude_code",
+ timestamp: ts,
+ }));
+ logFn(`queued ${input.hook_event_name ?? "event"} for ${sessionPath}`);
+
+ maybeTriggerPeriodicSummaryFn(input.session_id, input.cwd ?? "", config);
+
+ if (input.hook_event_name === "Stop" || input.hook_event_name === "SubagentStop") {
+ const result = await flushSessionQueueFn(createApi(config), {
+ sessionId: input.session_id,
+ sessionsTable: config.sessionsTableName,
+ drainAll: true,
+ });
+ logFn(`flush ${result.status}: rows=${result.rows} batches=${result.batches}`);
+ return { status: "queued", entry, flushStatus: result.status };
}
+
+ return { status: "queued", entry };
}
-main().catch((e) => { log(`fatal: ${e.message}`); process.exit(0); });
+/* c8 ignore start */
+async function main(): Promise {
+ const input = await readStdin();
+ await runCaptureHook(input);
+}
+
+if (isDirectRun(import.meta.url)) {
+ main().catch((e) => { log(`fatal: ${e.message}`); process.exit(0); });
+}
+/* c8 ignore stop */
diff --git a/src/hooks/codex/capture.ts b/src/hooks/codex/capture.ts
index 0c80802..615b72d 100644
--- a/src/hooks/codex/capture.ts
+++ b/src/hooks/codex/capture.ts
@@ -1,61 +1,48 @@
#!/usr/bin/env node
/**
- * Codex Capture hook — writes each session event as a row in the sessions table.
+ * Codex Capture hook — appends session events to a local queue on the hot path.
*
* Used by: UserPromptSubmit, PostToolUse
- *
- * Codex input fields:
- * All events: session_id, transcript_path, cwd, hook_event_name, model
- * UserPromptSubmit: prompt (user text)
- * PostToolUse: tool_name, tool_use_id, tool_input, tool_response
- * Stop: (no extra fields — Codex has no last_assistant_message equivalent)
*/
import { readStdin } from "../../utils/stdin.js";
import { loadConfig, type Config } from "../../config.js";
-import { DeeplakeApi } from "../../deeplake-api.js";
-import { sqlStr } from "../../utils/sql.js";
import { log as _log } from "../../utils/debug.js";
-import { buildSessionPath } from "../../utils/session-path.js";
+import { isDirectRun } from "../../utils/direct-run.js";
import {
bumpTotalCount,
loadTriggerConfig,
shouldTrigger,
tryAcquireLock,
- releaseLock,
} from "../summary-state.js";
import { bundleDirFromImportMeta, spawnCodexWikiWorker, wikiLog } from "./spawn-wiki-worker.js";
+import {
+ appendQueuedSessionRow,
+ buildQueuedSessionRow,
+ buildSessionPath,
+} from "../session-queue.js";
+import { clearSessionQueryCache } from "../query-cache.js";
+
const log = (msg: string) => _log("codex-capture", msg);
-interface CodexHookInput {
+export interface CodexHookInput {
session_id: string;
transcript_path?: string | null;
cwd: string;
hook_event_name: string;
model: string;
turn_id?: string;
- // UserPromptSubmit
prompt?: string;
- // PostToolUse (Bash only in Codex)
tool_name?: string;
tool_use_id?: string;
tool_input?: { command?: string };
tool_response?: Record;
}
-const CAPTURE = process.env.HIVEMIND_CAPTURE !== "false";
-
-async function main(): Promise {
- if (!CAPTURE) return;
- const input = await readStdin();
- const config = loadConfig();
- if (!config) { log("no config"); return; }
+const CAPTURE = (process.env.HIVEMIND_CAPTURE ?? process.env.DEEPLAKE_CAPTURE) !== "false";
- const sessionsTable = config.sessionsTableName;
- const api = new DeeplakeApi(config.token, config.apiUrl, config.orgId, config.workspaceId, sessionsTable);
-
- const ts = new Date().toISOString();
+export function buildCodexCaptureEntry(input: CodexHookInput, timestamp: string): Record | null {
const meta = {
session_id: input.session_id,
transcript_path: input.transcript_path,
@@ -63,22 +50,20 @@ async function main(): Promise {
hook_event_name: input.hook_event_name,
model: input.model,
turn_id: input.turn_id,
- timestamp: ts,
+ timestamp,
};
- let entry: Record;
-
if (input.hook_event_name === "UserPromptSubmit" && input.prompt !== undefined) {
- log(`user session=${input.session_id}`);
- entry = {
+ return {
id: crypto.randomUUID(),
...meta,
type: "user_message",
content: input.prompt,
};
- } else if (input.hook_event_name === "PostToolUse" && input.tool_name !== undefined) {
- log(`tool=${input.tool_name} session=${input.session_id}`);
- entry = {
+ }
+
+ if (input.hook_event_name === "PostToolUse" && input.tool_name !== undefined) {
+ return {
id: crypto.randomUUID(),
...meta,
type: "tool_call",
@@ -87,75 +72,132 @@ async function main(): Promise {
tool_input: JSON.stringify(input.tool_input),
tool_response: JSON.stringify(input.tool_response),
};
- } else {
- log(`unknown event: ${input.hook_event_name}, skipping`);
- return;
}
- const sessionPath = buildSessionPath(config, input.session_id);
- const line = JSON.stringify(entry);
- log(`writing to ${sessionPath}`);
+ return null;
+}
- const projectName = (input.cwd ?? "").split("/").pop() || "unknown";
- const filename = sessionPath.split("/").pop() ?? "";
- const jsonForSql = sqlStr(line);
+interface PeriodicSummaryDeps {
+ bundleDir?: string;
+ wikiWorker?: boolean;
+ logFn?: (msg: string) => void;
+ bumpTotalCountFn?: typeof bumpTotalCount;
+ loadTriggerConfigFn?: typeof loadTriggerConfig;
+ shouldTriggerFn?: typeof shouldTrigger;
+ tryAcquireLockFn?: typeof tryAcquireLock;
+ wikiLogFn?: typeof wikiLog;
+ spawnCodexWikiWorkerFn?: typeof spawnCodexWikiWorker;
+}
- const insertSql =
- `INSERT INTO "${sessionsTable}" (id, path, filename, message, author, size_bytes, project, description, agent, creation_date, last_update_date) ` +
- `VALUES ('${crypto.randomUUID()}', '${sqlStr(sessionPath)}', '${sqlStr(filename)}', '${jsonForSql}'::jsonb, '${sqlStr(config.userName)}', ` +
- `${Buffer.byteLength(line, "utf-8")}, '${sqlStr(projectName)}', '${sqlStr(input.hook_event_name ?? "")}', 'codex', '${ts}', '${ts}')`;
+export function maybeTriggerPeriodicSummary(sessionId: string, cwd: string, config: Config, deps: PeriodicSummaryDeps = {}): void {
+ const {
+ bundleDir = bundleDirFromImportMeta(import.meta.url),
+ wikiWorker = process.env.HIVEMIND_WIKI_WORKER === "1",
+ logFn = log,
+ bumpTotalCountFn = bumpTotalCount,
+ loadTriggerConfigFn = loadTriggerConfig,
+ shouldTriggerFn = shouldTrigger,
+ tryAcquireLockFn = tryAcquireLock,
+ wikiLogFn = wikiLog,
+ spawnCodexWikiWorkerFn = spawnCodexWikiWorker,
+ } = deps;
+
+ if (wikiWorker) return;
try {
- await api.query(insertSql);
- } catch (e: any) {
- if (e.message?.includes("permission denied") || e.message?.includes("does not exist")) {
- log("table missing, creating and retrying");
- await api.ensureSessionsTable(sessionsTable);
- await api.query(insertSql);
- } else {
- throw e;
+ const state = bumpTotalCountFn(sessionId);
+ const cfg = loadTriggerConfigFn();
+ if (!shouldTriggerFn(state, cfg)) return;
+
+ if (!tryAcquireLockFn(sessionId)) {
+ logFn(`periodic trigger suppressed (lock held) session=${sessionId}`);
+ return;
}
- }
- log("capture ok");
+ wikiLogFn(`Periodic: threshold hit (total=${state.totalCount}, since=${state.totalCount - state.lastSummaryCount}, N=${cfg.everyNMessages}, hours=${cfg.everyHours})`);
+ spawnCodexWikiWorkerFn({
+ config,
+ sessionId,
+ cwd,
+ bundleDir,
+ reason: "Periodic",
+ });
+ } catch (e: any) {
+ logFn(`periodic trigger error: ${e.message}`);
+ }
+}
- maybeTriggerPeriodicSummary(input.session_id, input.cwd ?? "", config);
+interface CodexCaptureDeps {
+ captureEnabled?: boolean;
+ config?: Config | null;
+ now?: () => string;
+ appendQueuedSessionRowFn?: typeof appendQueuedSessionRow;
+ buildQueuedSessionRowFn?: typeof buildQueuedSessionRow;
+ clearSessionQueryCacheFn?: typeof clearSessionQueryCache;
+ maybeTriggerPeriodicSummaryFn?: typeof maybeTriggerPeriodicSummary;
+ logFn?: (msg: string) => void;
}
-function maybeTriggerPeriodicSummary(sessionId: string, cwd: string, config: Config): void {
- if (process.env.HIVEMIND_WIKI_WORKER === "1") return;
+export async function runCodexCaptureHook(input: CodexHookInput, deps: CodexCaptureDeps = {}): Promise<{
+ status: "disabled" | "no_config" | "ignored" | "queued";
+ entry?: Record;
+}> {
+ const {
+ captureEnabled = CAPTURE,
+ config = loadConfig(),
+ now = () => new Date().toISOString(),
+ appendQueuedSessionRowFn = appendQueuedSessionRow,
+ buildQueuedSessionRowFn = buildQueuedSessionRow,
+ clearSessionQueryCacheFn = clearSessionQueryCache,
+ maybeTriggerPeriodicSummaryFn = maybeTriggerPeriodicSummary,
+ logFn = log,
+ } = deps;
+
+ if (!captureEnabled) return { status: "disabled" };
+ if (!config) {
+ logFn("no config");
+ return { status: "no_config" };
+ }
- try {
- const state = bumpTotalCount(sessionId);
- const cfg = loadTriggerConfig();
- if (!shouldTrigger(state, cfg)) return;
+ const ts = now();
+ const entry = buildCodexCaptureEntry(input, ts);
+ if (!entry) {
+ logFn(`unknown event: ${input.hook_event_name}, skipping`);
+ return { status: "ignored" };
+ }
- if (!tryAcquireLock(sessionId)) {
- log(`periodic trigger suppressed (lock held) session=${sessionId}`);
- return;
- }
+ if (input.hook_event_name === "UserPromptSubmit") logFn(`user session=${input.session_id}`);
+ else logFn(`tool=${input.tool_name} session=${input.session_id}`);
- wikiLog(`Periodic: threshold hit (total=${state.totalCount}, since=${state.totalCount - state.lastSummaryCount}, N=${cfg.everyNMessages}, hours=${cfg.everyHours})`);
- try {
- spawnCodexWikiWorker({
- config,
- sessionId,
- cwd,
- bundleDir: bundleDirFromImportMeta(import.meta.url),
- reason: "Periodic",
- });
- } catch (e: any) {
- log(`periodic spawn failed: ${e.message}`);
- try {
- releaseLock(sessionId);
- } catch (releaseErr: any) {
- log(`releaseLock after periodic spawn failure also failed: ${releaseErr.message}`);
- }
- throw e;
- }
- } catch (e: any) {
- log(`periodic trigger error: ${e.message}`);
+ if (input.hook_event_name === "UserPromptSubmit") {
+ clearSessionQueryCacheFn(input.session_id);
}
+
+ const sessionPath = buildSessionPath(config, input.session_id);
+ const line = JSON.stringify(entry);
+ const projectName = (input.cwd ?? "").split("/").pop() || "unknown";
+ appendQueuedSessionRowFn(buildQueuedSessionRowFn({
+ sessionPath,
+ line,
+ userName: config.userName,
+ projectName,
+ description: input.hook_event_name ?? "",
+ agent: "codex",
+ timestamp: ts,
+ }));
+ logFn(`queued ${input.hook_event_name} for ${sessionPath}`);
+
+ maybeTriggerPeriodicSummaryFn(input.session_id, input.cwd ?? "", config);
+ return { status: "queued", entry };
}
-main().catch((e) => { log(`fatal: ${e.message}`); process.exit(0); });
+/* c8 ignore start */
+async function main(): Promise {
+ const input = await readStdin();
+ await runCodexCaptureHook(input);
+}
+
+if (isDirectRun(import.meta.url)) {
+ main().catch((e) => { log(`fatal: ${e.message}`); process.exit(0); });
+}
+/* c8 ignore stop */
diff --git a/src/hooks/codex/pre-tool-use.ts b/src/hooks/codex/pre-tool-use.ts
index 1d0904d..3b1aacd 100644
--- a/src/hooks/codex/pre-tool-use.ts
+++ b/src/hooks/codex/pre-tool-use.ts
@@ -4,74 +4,49 @@
* Codex PreToolUse hook — intercepts Bash commands targeting ~/.deeplake/memory/.
*
* Strategy: "block + inject"
- * Codex 0.118.0 doesn't parse JSON hook output, but supports:
- * - stderr + exit code 2 → blocks the command, stderr becomes model feedback
- * - plain text stdout → adds context (command still runs)
- * - exit 0 + no output → pass through
+ * Codex does not parse JSON hook output here, so the CLI wrapper still maps:
+ * - action=pass -> exit 0, no output
+ * - action=guide -> stdout guidance, exit 0
+ * - action=block -> stderr content, exit 2
*
- * When we detect a memory-targeting command, we:
- * 1. Fetch the real content from the cloud (SQL or virtual shell)
- * 2. Block the command (exit 2) and return the content via stderr
- * 3. The model receives the cloud content as if the command ran
- *
- * Codex input: { session_id, tool_name, tool_use_id, tool_input: { command }, cwd, ... }
+ * The source logic is exported so tests can exercise it directly without
+ * spawning the bundled script in a subprocess.
*/
-import { existsSync } from "node:fs";
import { execFileSync } from "node:child_process";
-import { join } from "node:path";
-import { homedir } from "node:os";
+import { existsSync } from "node:fs";
+import { join, dirname } from "node:path";
import { fileURLToPath } from "node:url";
-import { dirname } from "node:path";
import { readStdin } from "../../utils/stdin.js";
import { loadConfig } from "../../config.js";
import { DeeplakeApi } from "../../deeplake-api.js";
-import { sqlStr, sqlLike } from "../../utils/sql.js";
+import { sqlLike } from "../../utils/sql.js";
import { parseBashGrep, handleGrepDirect } from "../grep-direct.js";
-
+import { executeCompiledBashCommand } from "../bash-command-compiler.js";
+import {
+ findVirtualPaths,
+ readVirtualPathContents,
+ listVirtualPathRows,
+ readVirtualPathContent,
+} from "../virtual-table-query.js";
+import {
+ readCachedIndexContent,
+ writeCachedIndexContent,
+} from "../query-cache.js";
import { log as _log } from "../../utils/debug.js";
-const log = (msg: string) => _log("codex-pre", msg);
+import { isDirectRun } from "../../utils/direct-run.js";
+import { isSafe, touchesMemory, rewritePaths } from "../memory-path-utils.js";
-const MEMORY_PATH = join(homedir(), ".deeplake", "memory");
-const TILDE_PATH = "~/.deeplake/memory";
-const HOME_VAR_PATH = "$HOME/.deeplake/memory";
+export { isSafe, touchesMemory, rewritePaths };
+
+const log = (msg: string) => _log("codex-pre", msg);
const __bundleDir = dirname(fileURLToPath(import.meta.url));
const SHELL_BUNDLE = existsSync(join(__bundleDir, "shell", "deeplake-shell.js"))
? join(__bundleDir, "shell", "deeplake-shell.js")
: join(__bundleDir, "..", "shell", "deeplake-shell.js");
-// Safe builtins that can run against the virtual FS
-const SAFE_BUILTINS = new Set([
- "cat", "ls", "cp", "mv", "rm", "rmdir", "mkdir", "touch", "ln", "chmod",
- "stat", "readlink", "du", "tree", "file",
- "grep", "egrep", "fgrep", "rg", "sed", "awk", "cut", "tr", "sort", "uniq",
- "wc", "head", "tail", "tac", "rev", "nl", "fold", "expand", "unexpand",
- "paste", "join", "comm", "column", "diff", "strings", "split",
- "find", "xargs", "which",
- "jq", "yq", "xan", "base64", "od",
- "tar", "gzip", "gunzip", "zcat",
- "md5sum", "sha1sum", "sha256sum",
- "echo", "printf", "tee",
- "pwd", "cd", "basename", "dirname", "env", "printenv", "hostname", "whoami",
- "date", "seq", "expr", "sleep", "timeout", "time", "true", "false", "test",
- "alias", "unalias", "history", "help", "clear",
- "for", "while", "do", "done", "if", "then", "else", "fi", "case", "esac",
-]);
-
-function isSafe(cmd: string): boolean {
- // Reject command/process substitution before checking tokens
- if (/\$\(|`|<\(/.test(cmd)) return false;
- const stripped = cmd.replace(/'[^']*'/g, "''").replace(/"[^"]*"/g, '""');
- const stages = stripped.split(/\||;|&&|\|\||\n/);
- for (const stage of stages) {
- const firstToken = stage.trim().split(/\s+/)[0] ?? "";
- if (firstToken && !SAFE_BUILTINS.has(firstToken)) return false;
- }
- return true;
-}
-
-interface CodexPreToolUseInput {
+export interface CodexPreToolUseInput {
session_id: string;
tool_name: string;
tool_use_id: string;
@@ -82,168 +57,239 @@ interface CodexPreToolUseInput {
turn_id?: string;
}
-function touchesMemory(cmd: string): boolean {
- return cmd.includes(MEMORY_PATH) || cmd.includes(TILDE_PATH) || cmd.includes(HOME_VAR_PATH);
-}
-
-function rewritePaths(cmd: string): string {
- return cmd
- .replace(new RegExp(MEMORY_PATH.replace(/[.*+?^${}()|[\]\\]/g, "\\$&") + "/?", "g"), "/")
- .replace(/~\/.deeplake\/memory\/?/g, "/")
- .replace(/\$HOME\/.deeplake\/memory\/?/g, "/")
- .replace(/"\$HOME\/.deeplake\/memory\/?"/g, '"/"');
+export interface CodexPreToolDecision {
+ action: "pass" | "guide" | "block";
+ output?: string;
+ rewrittenCommand?: string;
}
-/** Block the command and return content to the model via stderr + exit 2. */
-function blockWithContent(content: string): never {
- process.stderr.write(content);
- process.exit(2);
+export function buildUnsupportedGuidance(): string {
+ return "This command is not supported for ~/.deeplake/memory/ operations. " +
+ "Only bash builtins are available: cat, ls, grep, echo, jq, head, tail, sed, awk, wc, sort, find, etc. " +
+ "Do NOT use python, python3, node, curl, or other interpreters. " +
+ "Rewrite your command using only bash tools and retry.";
}
-/** Run a command through the virtual shell and return the output. */
-function runVirtualShell(cmd: string): string {
+export function runVirtualShell(cmd: string, shellBundle = SHELL_BUNDLE, logFn: (msg: string) => void = log): string {
try {
- return execFileSync("node", [SHELL_BUNDLE, "-c", cmd], {
+ return execFileSync("node", [shellBundle, "-c", cmd], {
encoding: "utf-8",
timeout: 10_000,
env: { ...process.env },
- stdio: ["pipe", "pipe", "pipe"], // capture stderr instead of inheriting
+ stdio: ["pipe", "pipe", "pipe"],
}).trim();
} catch (e: any) {
- log(`virtual shell failed: ${e.message}`);
+ logFn(`virtual shell failed: ${e.message}`);
return "";
}
}
-async function main(): Promise {
- const input = await readStdin();
+function buildIndexContent(rows: Record[]): string {
+ const lines = ["# Memory Index", "", `${rows.length} sessions:`, ""];
+ for (const row of rows) {
+ const path = row["path"] as string;
+ const project = row["project"] as string || "";
+ const description = (row["description"] as string || "").slice(0, 120);
+ const date = (row["creation_date"] as string || "").slice(0, 10);
+ lines.push(`- [${path}](${path}) ${date} ${project ? `[${project}]` : ""} ${description}`);
+ }
+ return lines.join("\n");
+}
+
+interface CodexPreToolDeps {
+ config?: ReturnType;
+ createApi?: (table: string, config: NonNullable>) => DeeplakeApi;
+ executeCompiledBashCommandFn?: typeof executeCompiledBashCommand;
+ readVirtualPathContentsFn?: typeof readVirtualPathContents;
+ readVirtualPathContentFn?: typeof readVirtualPathContent;
+ listVirtualPathRowsFn?: typeof listVirtualPathRows;
+ findVirtualPathsFn?: typeof findVirtualPaths;
+ handleGrepDirectFn?: typeof handleGrepDirect;
+ readCachedIndexContentFn?: typeof readCachedIndexContent;
+ writeCachedIndexContentFn?: typeof writeCachedIndexContent;
+ runVirtualShellFn?: typeof runVirtualShell;
+ shellBundle?: string;
+ logFn?: (msg: string) => void;
+}
+
+export async function processCodexPreToolUse(
+ input: CodexPreToolUseInput,
+ deps: CodexPreToolDeps = {},
+): Promise {
+ const {
+ config = loadConfig(),
+ createApi = (table, activeConfig) => new DeeplakeApi(
+ activeConfig.token,
+ activeConfig.apiUrl,
+ activeConfig.orgId,
+ activeConfig.workspaceId,
+ table,
+ ),
+ executeCompiledBashCommandFn = executeCompiledBashCommand,
+ readVirtualPathContentsFn = readVirtualPathContents,
+ readVirtualPathContentFn = readVirtualPathContent,
+ listVirtualPathRowsFn = listVirtualPathRows,
+ findVirtualPathsFn = findVirtualPaths,
+ handleGrepDirectFn = handleGrepDirect,
+ readCachedIndexContentFn = readCachedIndexContent,
+ writeCachedIndexContentFn = writeCachedIndexContent,
+ runVirtualShellFn = runVirtualShell,
+ shellBundle = SHELL_BUNDLE,
+ logFn = log,
+ } = deps;
+
const cmd = input.tool_input?.command ?? "";
- log(`hook fired: cmd=${cmd}`);
+ logFn(`hook fired: cmd=${cmd}`);
- if (!touchesMemory(cmd)) return;
+ if (!touchesMemory(cmd)) return { action: "pass" };
const rewritten = rewritePaths(cmd);
-
if (!isSafe(rewritten)) {
- // Instead of hard-blocking (exit code 2), output guidance so the agent self-corrects.
- const guidance = "This command is not supported for ~/.deeplake/memory/ operations. " +
- "Only bash builtins are available: cat, ls, grep, echo, jq, head, tail, sed, awk, wc, sort, find, etc. " +
- "Do NOT use python, python3, node, curl, or other interpreters. " +
- "Rewrite your command using only bash tools and retry.";
- log(`unsupported command, returning guidance: ${rewritten}`);
- process.stdout.write(guidance);
- process.exit(0);
+ const guidance = buildUnsupportedGuidance();
+ logFn(`unsupported command, returning guidance: ${rewritten}`);
+ return {
+ action: "guide",
+ output: guidance,
+ rewrittenCommand: rewritten,
+ };
}
- // ── Fast path: handle grep and cat directly via SQL ──
- const config = loadConfig();
if (config) {
const table = process.env["HIVEMIND_TABLE"] ?? "memory";
- const api = new DeeplakeApi(config.token, config.apiUrl, config.orgId, config.workspaceId, table);
+ const sessionsTable = process.env["HIVEMIND_SESSIONS_TABLE"] ?? "sessions";
+ const api = createApi(table, config);
+
+ const readVirtualPathContentsWithCache = async (
+ cachePaths: string[],
+ ): Promise