diff --git a/.claude-plugin/marketplace.json b/.claude-plugin/marketplace.json index 4fd6dc3..427fc2e 100644 --- a/.claude-plugin/marketplace.json +++ b/.claude-plugin/marketplace.json @@ -6,13 +6,13 @@ }, "metadata": { "description": "Cloud-backed persistent shared memory for AI agents powered by Deeplake", - "version": "0.6.37" + "version": "0.6.38" }, "plugins": [ { "name": "hivemind", "description": "Persistent shared memory powered by Deeplake — captures all session activity and provides cross-session, cross-agent memory search", - "version": "0.6.37", + "version": "0.6.38", "source": "./claude-code", "homepage": "https://github.com/activeloopai/hivemind" } diff --git a/.claude-plugin/plugin.json b/.claude-plugin/plugin.json index dddddbd..e178805 100644 --- a/.claude-plugin/plugin.json +++ b/.claude-plugin/plugin.json @@ -1,7 +1,7 @@ { "name": "hivemind", "description": "Cloud-backed persistent memory powered by Deeplake — read, write, and share memory across Claude Code sessions and agents", - "version": "0.6.37", + "version": "0.6.38", "author": { "name": "Activeloop", "url": "https://deeplake.ai" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 665f2a9..c39022e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -3,14 +3,49 @@ name: CI on: push: branches: [main, dev] + # Run on every PR regardless of base branch. The `branches` filter on + # pull_request only matches base, so stacked / long-lived branches + # (e.g. `optimizations`) would otherwise skip the whole CI job. pull_request: - branches: [main, dev] permissions: contents: read pull-requests: write jobs: + duplication: + # Code-duplication regression guard. Pulled out of the `test` job so + # the PR checks table shows a dedicated pass/fail row — reviewers see + # at a glance whether the change introduced duplicated code without + # having to open the combined "Typecheck and Test" log. + name: Duplication check + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: 22 + + - name: Install dependencies + run: npm install + + - name: Run jscpd + # Threshold 7% is the current baseline (see .jscpd.json). The job + # fails if a future change pushes duplication above it, so the + # number is a regression guard — reviewers can see the exact + # clones in the markdown report uploaded below. + run: npm run dup + + - name: Upload jscpd report + if: always() + uses: actions/upload-artifact@v4 + with: + name: jscpd-report + path: jscpd-report/ + if-no-files-found: ignore + test: name: Typecheck and Test runs-on: ubuntu-latest diff --git a/.gitignore b/.gitignore index 4f538ba..b952b68 100644 --- a/.gitignore +++ b/.gitignore @@ -6,6 +6,7 @@ tmp/ .env .env.* coverage/ +jscpd-report/ bench/ .claude/ CLAUDE.md diff --git a/.jscpd.json b/.jscpd.json new file mode 100644 index 0000000..842983c --- /dev/null +++ b/.jscpd.json @@ -0,0 +1,21 @@ +{ + "threshold": 7, + "reporters": ["console", "markdown"], + "output": "./jscpd-report", + "ignore": [ + "**/node_modules/**", + "**/dist/**", + "**/bundle/**", + "**/coverage/**", + "**/*.test.ts", + "**/tests/**", + "**/fixtures/**", + "**/claude-code/.claude-plugin/**", + "**/codex/.codex-plugin/**" + ], + "absolute": false, + "gitignore": true, + "format": ["typescript"], + "minLines": 10, + "minTokens": 60 +} diff --git a/claude-code/.claude-plugin/plugin.json b/claude-code/.claude-plugin/plugin.json index dddddbd..e178805 100644 --- a/claude-code/.claude-plugin/plugin.json +++ b/claude-code/.claude-plugin/plugin.json @@ -1,7 +1,7 @@ { "name": "hivemind", "description": "Cloud-backed persistent memory powered by Deeplake — read, write, and share memory across Claude Code sessions and agents", - "version": "0.6.37", + "version": "0.6.38", "author": { "name": "Activeloop", "url": "https://deeplake.ai" diff --git a/claude-code/bundle/capture.js b/claude-code/bundle/capture.js index 82a4aac..50551da 100755 --- a/claude-code/bundle/capture.js +++ b/claude-code/bundle/capture.js @@ -2,13 +2,13 @@ // dist/src/utils/stdin.js function readStdin() { - return new Promise((resolve2, reject) => { + return new Promise((resolve, reject) => { let data = ""; process.stdin.setEncoding("utf-8"); process.stdin.on("data", (chunk) => data += chunk); process.stdin.on("end", () => { try { - resolve2(JSON.parse(data)); + resolve(JSON.parse(data)); } catch (err) { reject(new Error(`Failed to parse hook input: ${err}`)); } @@ -79,27 +79,21 @@ function log(tag, msg) { function sqlStr(value) { return value.replace(/\\/g, "\\\\").replace(/'/g, "''").replace(/\0/g, "").replace(/[\x01-\x08\x0b\x0c\x0e-\x1f\x7f]/g, ""); } -function sqlIdent(name) { - if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(name)) { - throw new Error(`Invalid SQL identifier: ${JSON.stringify(name)}`); - } - return name; -} // dist/src/deeplake-api.js var log2 = (msg) => log("sdk", msg); -var TRACE_SQL = (process.env.HIVEMIND_TRACE_SQL ?? process.env.DEEPLAKE_TRACE_SQL) === "1" || (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; -var DEBUG_FILE_LOG = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; function summarizeSql(sql, maxLen = 220) { const compact = sql.replace(/\s+/g, " ").trim(); return compact.length > maxLen ? `${compact.slice(0, maxLen)}...` : compact; } function traceSql(msg) { - if (!TRACE_SQL) + const traceEnabled = (process.env.HIVEMIND_TRACE_SQL ?? process.env.DEEPLAKE_TRACE_SQL) === "1" || (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; + if (!traceEnabled) return; process.stderr.write(`[deeplake-sql] ${msg} `); - if (DEBUG_FILE_LOG) + const debugFileLog = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; + if (debugFileLog) log2(msg); } var RETRYABLE_CODES = /* @__PURE__ */ new Set([429, 500, 502, 503, 504]); @@ -109,7 +103,7 @@ var MAX_CONCURRENCY = 5; var QUERY_TIMEOUT_MS = Number(process.env["HIVEMIND_QUERY_TIMEOUT_MS"] ?? process.env["DEEPLAKE_QUERY_TIMEOUT_MS"] ?? 1e4); var INDEX_MARKER_TTL_MS = Number(process.env["HIVEMIND_INDEX_MARKER_TTL_MS"] ?? 6 * 60 * 6e4); function sleep(ms) { - return new Promise((resolve2) => setTimeout(resolve2, ms)); + return new Promise((resolve) => setTimeout(resolve, ms)); } function isTimeoutError(error) { const name = error instanceof Error ? error.name.toLowerCase() : ""; @@ -142,7 +136,7 @@ var Semaphore = class { this.active++; return; } - await new Promise((resolve2) => this.waiting.push(resolve2)); + await new Promise((resolve) => this.waiting.push(resolve)); } release() { this.active--; @@ -401,24 +395,17 @@ var DeeplakeApi = class { } }; -// dist/src/utils/direct-run.js -import { resolve } from "node:path"; -import { fileURLToPath } from "node:url"; -function isDirectRun(metaUrl) { - const entry = process.argv[1]; - if (!entry) - return false; - try { - return resolve(fileURLToPath(metaUrl)) === resolve(entry); - } catch { - return false; - } +// dist/src/utils/session-path.js +function buildSessionPath(config, sessionId) { + const workspace = config.workspaceId ?? "default"; + return `/sessions/${config.userName}/${config.userName}_${config.orgName}_${workspace}_${sessionId}.jsonl`; } // dist/src/hooks/summary-state.js import { readFileSync as readFileSync3, writeFileSync as writeFileSync2, writeSync, mkdirSync as mkdirSync2, renameSync, existsSync as existsSync3, unlinkSync, openSync, closeSync } from "node:fs"; import { homedir as homedir3 } from "node:os"; import { join as join4 } from "node:path"; +var dlog = (msg) => log("summary-state", msg); var STATE_DIR = join4(homedir3(), ".claude", "hooks", "summary-state"); var YIELD_BUF = new Int32Array(new SharedArrayBuffer(4)); function statePath(sessionId) { @@ -456,9 +443,11 @@ function withRmwLock(sessionId, fn) { if (e.code !== "EEXIST") throw e; if (Date.now() > deadline) { + dlog(`rmw lock deadline exceeded for ${sessionId}, reclaiming stale lock`); try { unlinkSync(rmwLock); - } catch { + } catch (unlinkErr) { + dlog(`stale rmw lock unlink failed for ${sessionId}: ${unlinkErr.message}`); } continue; } @@ -471,7 +460,8 @@ function withRmwLock(sessionId, fn) { closeSync(fd); try { unlinkSync(rmwLock); - } catch { + } catch (unlinkErr) { + dlog(`rmw lock cleanup failed for ${sessionId}: ${unlinkErr.message}`); } } } @@ -511,11 +501,13 @@ function tryAcquireLock(sessionId, maxAgeMs = 10 * 60 * 1e3) { const ageMs = Date.now() - parseInt(readFileSync3(p, "utf-8"), 10); if (Number.isFinite(ageMs) && ageMs < maxAgeMs) return false; - } catch { + } catch (readErr) { + dlog(`lock file unreadable for ${sessionId}, treating as stale: ${readErr.message}`); } try { unlinkSync(p); - } catch { + } catch (unlinkErr) { + dlog(`could not unlink stale lock for ${sessionId}: ${unlinkErr.message}`); return false; } } @@ -533,15 +525,45 @@ function tryAcquireLock(sessionId, maxAgeMs = 10 * 60 * 1e3) { throw e; } } +function releaseLock(sessionId) { + try { + unlinkSync(lockPath(sessionId)); + } catch (e) { + if (e?.code !== "ENOENT") { + dlog(`releaseLock unlink failed for ${sessionId}: ${e.message}`); + } + } +} // dist/src/hooks/spawn-wiki-worker.js import { spawn, execSync } from "node:child_process"; -import { fileURLToPath as fileURLToPath2 } from "node:url"; -import { dirname, join as join5 } from "node:path"; -import { writeFileSync as writeFileSync3, mkdirSync as mkdirSync3, appendFileSync as appendFileSync2 } from "node:fs"; +import { fileURLToPath } from "node:url"; +import { dirname, join as join6 } from "node:path"; +import { writeFileSync as writeFileSync3, mkdirSync as mkdirSync4 } from "node:fs"; import { homedir as homedir4, tmpdir as tmpdir2 } from "node:os"; + +// dist/src/utils/wiki-log.js +import { mkdirSync as mkdirSync3, appendFileSync as appendFileSync2 } from "node:fs"; +import { join as join5 } from "node:path"; +function makeWikiLogger(hooksDir, filename = "deeplake-wiki.log") { + const path = join5(hooksDir, filename); + return { + path, + log(msg) { + try { + mkdirSync3(hooksDir, { recursive: true }); + appendFileSync2(path, `[${utcTimestamp()}] ${msg} +`); + } catch { + } + } + }; +} + +// dist/src/hooks/spawn-wiki-worker.js var HOME = homedir4(); -var WIKI_LOG = join5(HOME, ".claude", "hooks", "deeplake-wiki.log"); +var wikiLogger = makeWikiLogger(join6(HOME, ".claude", "hooks")); +var WIKI_LOG = wikiLogger.path; var WIKI_PROMPT_TEMPLATE = `You are building a personal wiki from a coding session. Your goal is to extract every piece of knowledge \u2014 entities, decisions, relationships, and facts \u2014 into a structured, searchable wiki entry. Think of this as building a knowledge graph, not writing a summary. SESSION JSONL path: __JSONL__ @@ -594,27 +616,20 @@ IMPORTANT: Be exhaustive. Extract EVERY entity, decision, and fact. Future you w PRIVACY: Never include absolute filesystem paths (e.g. /home/user/..., /Users/..., C:\\\\...) in the summary. Use only project-relative paths or the project name. The Source and Project fields above are already correct \u2014 do not change them. LENGTH LIMIT: Keep the total summary under 4000 characters. Be dense and concise \u2014 prioritize facts over prose. If a session is short, the summary should be short too.`; -function wikiLog(msg) { - try { - mkdirSync3(join5(HOME, ".claude", "hooks"), { recursive: true }); - appendFileSync2(WIKI_LOG, `[${utcTimestamp()}] ${msg} -`); - } catch { - } -} +var wikiLog = wikiLogger.log; function findClaudeBin() { try { return execSync("which claude 2>/dev/null", { encoding: "utf-8" }).trim(); } catch { - return join5(HOME, ".claude", "local", "claude"); + return join6(HOME, ".claude", "local", "claude"); } } function spawnWikiWorker(opts) { const { config, sessionId, cwd, bundleDir, reason } = opts; const projectName = cwd.split("/").pop() || "unknown"; - const tmpDir = join5(tmpdir2(), `deeplake-wiki-${sessionId}-${Date.now()}`); - mkdirSync3(tmpDir, { recursive: true }); - const configFile = join5(tmpDir, "config.json"); + const tmpDir = join6(tmpdir2(), `deeplake-wiki-${sessionId}-${Date.now()}`); + mkdirSync4(tmpDir, { recursive: true }); + const configFile = join6(tmpDir, "config.json"); writeFileSync3(configFile, JSON.stringify({ apiUrl: config.apiUrl, token: config.token, @@ -628,11 +643,11 @@ function spawnWikiWorker(opts) { tmpDir, claudeBin: findClaudeBin(), wikiLog: WIKI_LOG, - hooksDir: join5(HOME, ".claude", "hooks"), + hooksDir: join6(HOME, ".claude", "hooks"), promptTemplate: WIKI_PROMPT_TEMPLATE })); wikiLog(`${reason}: spawning summary worker for ${sessionId}`); - const workerPath = join5(bundleDir, "wiki-worker.js"); + const workerPath = join6(bundleDir, "wiki-worker.js"); spawn("nohup", ["node", workerPath, configFile], { detached: true, stdio: ["ignore", "ignore", "ignore"] @@ -640,280 +655,24 @@ function spawnWikiWorker(opts) { wikiLog(`${reason}: spawned summary worker for ${sessionId}`); } function bundleDirFromImportMeta(importMetaUrl) { - return dirname(fileURLToPath2(importMetaUrl)); + return dirname(fileURLToPath(importMetaUrl)); } -// dist/src/hooks/session-queue.js -import { appendFileSync as appendFileSync3, closeSync as closeSync2, existsSync as existsSync4, mkdirSync as mkdirSync4, openSync as openSync2, readFileSync as readFileSync4, readdirSync, renameSync as renameSync2, rmSync, statSync, writeFileSync as writeFileSync4 } from "node:fs"; -import { dirname as dirname2, join as join6 } from "node:path"; -import { homedir as homedir5 } from "node:os"; -var DEFAULT_QUEUE_DIR = join6(homedir5(), ".deeplake", "queue"); -var DEFAULT_MAX_BATCH_ROWS = 50; -var DEFAULT_STALE_INFLIGHT_MS = 6e4; -var DEFAULT_AUTH_FAILURE_TTL_MS = 5 * 6e4; -var BUSY_WAIT_STEP_MS = 100; -var SessionWriteDisabledError = class extends Error { - constructor(message) { - super(message); - this.name = "SessionWriteDisabledError"; - } -}; -function buildSessionPath(config, sessionId) { - return `/sessions/${config.userName}/${config.userName}_${config.orgName}_${config.workspaceId}_${sessionId}.jsonl`; -} -function buildQueuedSessionRow(args) { - return { - id: crypto.randomUUID(), - path: args.sessionPath, - filename: args.sessionPath.split("/").pop() ?? "", - message: args.line, - author: args.userName, - sizeBytes: Buffer.byteLength(args.line, "utf-8"), - project: args.projectName, - description: args.description, - agent: args.agent, - creationDate: args.timestamp, - lastUpdateDate: args.timestamp - }; -} -function appendQueuedSessionRow(row, queueDir = DEFAULT_QUEUE_DIR) { - mkdirSync4(queueDir, { recursive: true }); - const sessionId = extractSessionId(row.path); - const queuePath = getQueuePath(queueDir, sessionId); - appendFileSync3(queuePath, `${JSON.stringify(row)} -`); - return queuePath; -} -function buildSessionInsertSql(sessionsTable, rows) { - if (rows.length === 0) - throw new Error("buildSessionInsertSql: rows must not be empty"); - const table = sqlIdent(sessionsTable); - const values = rows.map((row) => { - const jsonForSql = sqlStr(coerceJsonbPayload(row.message)); - return `('${sqlStr(row.id)}', '${sqlStr(row.path)}', '${sqlStr(row.filename)}', '${jsonForSql}'::jsonb, '${sqlStr(row.author)}', ${row.sizeBytes}, '${sqlStr(row.project)}', '${sqlStr(row.description)}', '${sqlStr(row.agent)}', '${sqlStr(row.creationDate)}', '${sqlStr(row.lastUpdateDate)}')`; - }).join(", "); - return `INSERT INTO "${table}" (id, path, filename, message, author, size_bytes, project, description, agent, creation_date, last_update_date) VALUES ${values}`; -} -function coerceJsonbPayload(message) { - try { - return JSON.stringify(JSON.parse(message)); - } catch { - return JSON.stringify({ - type: "raw_message", - content: message - }); - } -} -async function flushSessionQueue(api, opts) { - const queueDir = opts.queueDir ?? DEFAULT_QUEUE_DIR; - const maxBatchRows = opts.maxBatchRows ?? DEFAULT_MAX_BATCH_ROWS; - const staleInflightMs = opts.staleInflightMs ?? DEFAULT_STALE_INFLIGHT_MS; - const waitIfBusyMs = opts.waitIfBusyMs ?? 0; - const drainAll = opts.drainAll ?? false; - mkdirSync4(queueDir, { recursive: true }); - const queuePath = getQueuePath(queueDir, opts.sessionId); - const inflightPath = getInflightPath(queueDir, opts.sessionId); - if (isSessionWriteDisabled(opts.sessionsTable, queueDir)) { - return existsSync4(queuePath) || existsSync4(inflightPath) ? { status: "disabled", rows: 0, batches: 0 } : { status: "empty", rows: 0, batches: 0 }; - } - let totalRows = 0; - let totalBatches = 0; - let flushedAny = false; - while (true) { - if (opts.allowStaleInflight) - recoverStaleInflight(queuePath, inflightPath, staleInflightMs); - if (existsSync4(inflightPath)) { - if (waitIfBusyMs > 0) { - await waitForInflightToClear(inflightPath, waitIfBusyMs); - if (opts.allowStaleInflight) - recoverStaleInflight(queuePath, inflightPath, staleInflightMs); - } - if (existsSync4(inflightPath)) { - return flushedAny ? { status: "flushed", rows: totalRows, batches: totalBatches } : { status: "busy", rows: 0, batches: 0 }; - } - } - if (!existsSync4(queuePath)) { - return flushedAny ? { status: "flushed", rows: totalRows, batches: totalBatches } : { status: "empty", rows: 0, batches: 0 }; - } - try { - renameSync2(queuePath, inflightPath); - } catch (e) { - if (e?.code === "ENOENT") { - return flushedAny ? { status: "flushed", rows: totalRows, batches: totalBatches } : { status: "empty", rows: 0, batches: 0 }; - } - throw e; - } - try { - const { rows, batches } = await flushInflightFile(api, opts.sessionsTable, inflightPath, maxBatchRows); - totalRows += rows; - totalBatches += batches; - flushedAny = flushedAny || rows > 0; - } catch (e) { - requeueInflight(queuePath, inflightPath); - if (e instanceof SessionWriteDisabledError) { - return { status: "disabled", rows: totalRows, batches: totalBatches }; - } - throw e; - } - if (!drainAll) { - return { status: "flushed", rows: totalRows, batches: totalBatches }; - } - } -} -function getQueuePath(queueDir, sessionId) { - return join6(queueDir, `${sessionId}.jsonl`); -} -function getInflightPath(queueDir, sessionId) { - return join6(queueDir, `${sessionId}.inflight`); -} -function extractSessionId(sessionPath) { - const filename = sessionPath.split("/").pop() ?? ""; - return filename.replace(/\.jsonl$/, "").split("_").pop() ?? filename; -} -async function flushInflightFile(api, sessionsTable, inflightPath, maxBatchRows) { - const rows = readQueuedRows(inflightPath); - if (rows.length === 0) { - rmSync(inflightPath, { force: true }); - return { rows: 0, batches: 0 }; - } - let ensured = false; - let batches = 0; - const queueDir = dirname2(inflightPath); - for (let i = 0; i < rows.length; i += maxBatchRows) { - const chunk = rows.slice(i, i + maxBatchRows); - const sql = buildSessionInsertSql(sessionsTable, chunk); - try { - await api.query(sql); - } catch (e) { - if (isSessionWriteAuthError(e)) { - markSessionWriteDisabled(sessionsTable, errorMessage(e), queueDir); - throw new SessionWriteDisabledError(errorMessage(e)); - } - if (!ensured && isEnsureSessionsTableRetryable(e)) { - try { - await api.ensureSessionsTable(sessionsTable); - } catch (ensureError) { - if (isSessionWriteAuthError(ensureError)) { - markSessionWriteDisabled(sessionsTable, errorMessage(ensureError), queueDir); - throw new SessionWriteDisabledError(errorMessage(ensureError)); - } - throw ensureError; - } - ensured = true; - try { - await api.query(sql); - } catch (retryError) { - if (isSessionWriteAuthError(retryError)) { - markSessionWriteDisabled(sessionsTable, errorMessage(retryError), queueDir); - throw new SessionWriteDisabledError(errorMessage(retryError)); - } - throw retryError; - } - } else { - throw e; - } - } - batches += 1; - } - clearSessionWriteDisabled(sessionsTable, queueDir); - rmSync(inflightPath, { force: true }); - return { rows: rows.length, batches }; -} -function readQueuedRows(path) { - const raw = readFileSync4(path, "utf-8"); - return raw.split("\n").map((line) => line.trim()).filter(Boolean).map((line) => JSON.parse(line)); -} -function requeueInflight(queuePath, inflightPath) { - if (!existsSync4(inflightPath)) +// dist/src/hooks/capture.js +var log3 = (msg) => log("capture", msg); +var CAPTURE = process.env.HIVEMIND_CAPTURE !== "false"; +async function main() { + if (!CAPTURE) return; - const inflight = readFileSync4(inflightPath, "utf-8"); - appendFileSync3(queuePath, inflight); - rmSync(inflightPath, { force: true }); -} -function recoverStaleInflight(queuePath, inflightPath, staleInflightMs) { - if (!existsSync4(inflightPath) || !isStale(inflightPath, staleInflightMs)) + const input = await readStdin(); + const config = loadConfig(); + if (!config) { + log3("no config"); return; - requeueInflight(queuePath, inflightPath); -} -function isStale(path, staleInflightMs) { - return Date.now() - statSync(path).mtimeMs >= staleInflightMs; -} -function isEnsureSessionsTableRetryable(error) { - const message = errorMessage(error).toLowerCase(); - return message.includes("does not exist") || message.includes("doesn't exist") || message.includes("relation") || message.includes("not found"); -} -function isSessionWriteAuthError(error) { - const message = errorMessage(error).toLowerCase(); - return message.includes("403") || message.includes("401") || message.includes("forbidden") || message.includes("unauthorized"); -} -function markSessionWriteDisabled(sessionsTable, reason, queueDir = DEFAULT_QUEUE_DIR) { - mkdirSync4(queueDir, { recursive: true }); - writeFileSync4(getSessionWriteDisabledPath(queueDir, sessionsTable), JSON.stringify({ - disabledAt: (/* @__PURE__ */ new Date()).toISOString(), - reason, - sessionsTable - })); -} -function clearSessionWriteDisabled(sessionsTable, queueDir = DEFAULT_QUEUE_DIR) { - rmSync(getSessionWriteDisabledPath(queueDir, sessionsTable), { force: true }); -} -function isSessionWriteDisabled(sessionsTable, queueDir = DEFAULT_QUEUE_DIR, ttlMs = DEFAULT_AUTH_FAILURE_TTL_MS) { - const path = getSessionWriteDisabledPath(queueDir, sessionsTable); - if (!existsSync4(path)) - return false; - try { - const raw = readFileSync4(path, "utf-8"); - const state = JSON.parse(raw); - const ageMs = Date.now() - new Date(state.disabledAt).getTime(); - if (Number.isNaN(ageMs) || ageMs >= ttlMs) { - rmSync(path, { force: true }); - return false; - } - return true; - } catch { - rmSync(path, { force: true }); - return false; - } -} -function getSessionWriteDisabledPath(queueDir, sessionsTable) { - return join6(queueDir, `.${sessionsTable}.disabled.json`); -} -function errorMessage(error) { - return error instanceof Error ? error.message : String(error); -} -async function waitForInflightToClear(inflightPath, waitIfBusyMs) { - const startedAt = Date.now(); - while (existsSync4(inflightPath) && Date.now() - startedAt < waitIfBusyMs) { - await sleep2(BUSY_WAIT_STEP_MS); } -} -function sleep2(ms) { - return new Promise((resolve2) => setTimeout(resolve2, ms)); -} - -// dist/src/hooks/query-cache.js -import { mkdirSync as mkdirSync5, readFileSync as readFileSync5, rmSync as rmSync2, writeFileSync as writeFileSync5 } from "node:fs"; -import { join as join7 } from "node:path"; -import { homedir as homedir6 } from "node:os"; -var log3 = (msg) => log("query-cache", msg); -var DEFAULT_CACHE_ROOT = join7(homedir6(), ".deeplake", "query-cache"); -function getSessionQueryCacheDir(sessionId, deps = {}) { - const { cacheRoot = DEFAULT_CACHE_ROOT } = deps; - return join7(cacheRoot, sessionId); -} -function clearSessionQueryCache(sessionId, deps = {}) { - const { logFn = log3 } = deps; - try { - rmSync2(getSessionQueryCacheDir(sessionId, deps), { recursive: true, force: true }); - } catch (e) { - logFn(`clear failed for session=${sessionId}: ${e.message}`); - } -} - -// dist/src/hooks/capture.js -var log4 = (msg) => log("capture", msg); -var CAPTURE = (process.env.HIVEMIND_CAPTURE ?? process.env.DEEPLAKE_CAPTURE) !== "false"; -function buildCaptureEntry(input, timestamp) { + const sessionsTable = config.sessionsTableName; + const api = new DeeplakeApi(config.token, config.apiUrl, config.orgId, config.workspaceId, sessionsTable); + const ts = (/* @__PURE__ */ new Date()).toISOString(); const meta = { session_id: input.session_id, transcript_path: input.transcript_path, @@ -922,18 +681,20 @@ function buildCaptureEntry(input, timestamp) { hook_event_name: input.hook_event_name, agent_id: input.agent_id, agent_type: input.agent_type, - timestamp + timestamp: ts }; + let entry; if (input.prompt !== void 0) { - return { + log3(`user session=${input.session_id}`); + entry = { id: crypto.randomUUID(), ...meta, type: "user_message", content: input.prompt }; - } - if (input.tool_name !== void 0) { - return { + } else if (input.tool_name !== void 0) { + log3(`tool=${input.tool_name} session=${input.session_id}`); + entry = { id: crypto.randomUUID(), ...meta, type: "tool_call", @@ -942,103 +703,75 @@ function buildCaptureEntry(input, timestamp) { tool_input: JSON.stringify(input.tool_input), tool_response: JSON.stringify(input.tool_response) }; - } - if (input.last_assistant_message !== void 0) { - return { + } else if (input.last_assistant_message !== void 0) { + log3(`assistant session=${input.session_id}`); + entry = { id: crypto.randomUUID(), ...meta, type: "assistant_message", content: input.last_assistant_message, ...input.agent_transcript_path ? { agent_transcript_path: input.agent_transcript_path } : {} }; + } else { + log3("unknown event, skipping"); + return; + } + const sessionPath = buildSessionPath(config, input.session_id); + const line = JSON.stringify(entry); + log3(`writing to ${sessionPath}`); + const projectName = (input.cwd ?? "").split("/").pop() || "unknown"; + const filename = sessionPath.split("/").pop() ?? ""; + const jsonForSql = line.replace(/'/g, "''"); + const insertSql = `INSERT INTO "${sessionsTable}" (id, path, filename, message, author, size_bytes, project, description, agent, creation_date, last_update_date) VALUES ('${crypto.randomUUID()}', '${sqlStr(sessionPath)}', '${sqlStr(filename)}', '${jsonForSql}'::jsonb, '${sqlStr(config.userName)}', ${Buffer.byteLength(line, "utf-8")}, '${sqlStr(projectName)}', '${sqlStr(input.hook_event_name ?? "")}', 'claude_code', '${ts}', '${ts}')`; + try { + await api.query(insertSql); + } catch (e) { + if (e.message?.includes("permission denied") || e.message?.includes("does not exist")) { + log3("table missing, creating and retrying"); + await api.ensureSessionsTable(sessionsTable); + await api.query(insertSql); + } else { + throw e; + } } - return null; + log3("capture ok \u2192 cloud"); + maybeTriggerPeriodicSummary(input.session_id, input.cwd ?? "", config); } -function maybeTriggerPeriodicSummary(sessionId, cwd, config, deps = {}) { - const { bundleDir = bundleDirFromImportMeta(import.meta.url), wikiWorker = process.env.HIVEMIND_WIKI_WORKER === "1", logFn = log4, bumpTotalCountFn = bumpTotalCount, loadTriggerConfigFn = loadTriggerConfig, shouldTriggerFn = shouldTrigger, tryAcquireLockFn = tryAcquireLock, wikiLogFn = wikiLog, spawnWikiWorkerFn = spawnWikiWorker } = deps; - if (wikiWorker) +function maybeTriggerPeriodicSummary(sessionId, cwd, config) { + if (process.env.HIVEMIND_WIKI_WORKER === "1") return; try { - const state = bumpTotalCountFn(sessionId); - const cfg = loadTriggerConfigFn(); - if (!shouldTriggerFn(state, cfg)) + const state = bumpTotalCount(sessionId); + const cfg = loadTriggerConfig(); + if (!shouldTrigger(state, cfg)) return; - if (!tryAcquireLockFn(sessionId)) { - logFn(`periodic trigger suppressed (lock held) session=${sessionId}`); + if (!tryAcquireLock(sessionId)) { + log3(`periodic trigger suppressed (lock held) session=${sessionId}`); return; } - wikiLogFn(`Periodic: threshold hit (total=${state.totalCount}, since=${state.totalCount - state.lastSummaryCount}, N=${cfg.everyNMessages}, hours=${cfg.everyHours})`); - spawnWikiWorkerFn({ - config, - sessionId, - cwd, - bundleDir, - reason: "Periodic" - }); + wikiLog(`Periodic: threshold hit (total=${state.totalCount}, since=${state.totalCount - state.lastSummaryCount}, N=${cfg.everyNMessages}, hours=${cfg.everyHours})`); + try { + spawnWikiWorker({ + config, + sessionId, + cwd, + bundleDir: bundleDirFromImportMeta(import.meta.url), + reason: "Periodic" + }); + } catch (e) { + log3(`periodic spawn failed: ${e.message}`); + try { + releaseLock(sessionId); + } catch (releaseErr) { + log3(`releaseLock after periodic spawn failure also failed: ${releaseErr.message}`); + } + throw e; + } } catch (e) { - logFn(`periodic trigger error: ${e.message}`); + log3(`periodic trigger error: ${e.message}`); } } -async function runCaptureHook(input, deps = {}) { - const { captureEnabled = CAPTURE, config = loadConfig(), now = () => (/* @__PURE__ */ new Date()).toISOString(), createApi = (activeConfig) => new DeeplakeApi(activeConfig.token, activeConfig.apiUrl, activeConfig.orgId, activeConfig.workspaceId, activeConfig.sessionsTableName), appendQueuedSessionRowFn = appendQueuedSessionRow, buildQueuedSessionRowFn = buildQueuedSessionRow, flushSessionQueueFn = flushSessionQueue, clearSessionQueryCacheFn = clearSessionQueryCache, maybeTriggerPeriodicSummaryFn = maybeTriggerPeriodicSummary, logFn = log4 } = deps; - if (!captureEnabled) - return { status: "disabled" }; - if (!config) { - logFn("no config"); - return { status: "no_config" }; - } - const ts = now(); - const entry = buildCaptureEntry(input, ts); - if (!entry) { - logFn("unknown event, skipping"); - return { status: "ignored" }; - } - if (input.prompt !== void 0) - logFn(`user session=${input.session_id}`); - else if (input.tool_name !== void 0) - logFn(`tool=${input.tool_name} session=${input.session_id}`); - else - logFn(`assistant session=${input.session_id}`); - if (input.hook_event_name === "UserPromptSubmit") { - clearSessionQueryCacheFn(input.session_id); - } - const sessionPath = buildSessionPath(config, input.session_id); - const line = JSON.stringify(entry); - const projectName = (input.cwd ?? "").split("/").pop() || "unknown"; - appendQueuedSessionRowFn(buildQueuedSessionRowFn({ - sessionPath, - line, - userName: config.userName, - projectName, - description: input.hook_event_name ?? "", - agent: "claude_code", - timestamp: ts - })); - logFn(`queued ${input.hook_event_name ?? "event"} for ${sessionPath}`); - maybeTriggerPeriodicSummaryFn(input.session_id, input.cwd ?? "", config); - if (input.hook_event_name === "Stop" || input.hook_event_name === "SubagentStop") { - const result = await flushSessionQueueFn(createApi(config), { - sessionId: input.session_id, - sessionsTable: config.sessionsTableName, - drainAll: true - }); - logFn(`flush ${result.status}: rows=${result.rows} batches=${result.batches}`); - return { status: "queued", entry, flushStatus: result.status }; - } - return { status: "queued", entry }; -} -async function main() { - const input = await readStdin(); - await runCaptureHook(input); -} -if (isDirectRun(import.meta.url)) { - main().catch((e) => { - log4(`fatal: ${e.message}`); - process.exit(0); - }); -} -export { - buildCaptureEntry, - maybeTriggerPeriodicSummary, - runCaptureHook -}; +main().catch((e) => { + log3(`fatal: ${e.message}`); + process.exit(0); +}); diff --git a/claude-code/bundle/commands/auth-login.js b/claude-code/bundle/commands/auth-login.js index ff5e179..064f11e 100755 --- a/claude-code/bundle/commands/auth-login.js +++ b/claude-code/bundle/commands/auth-login.js @@ -263,18 +263,18 @@ function sqlStr(value) { // dist/src/deeplake-api.js var log2 = (msg) => log("sdk", msg); -var TRACE_SQL = (process.env.HIVEMIND_TRACE_SQL ?? process.env.DEEPLAKE_TRACE_SQL) === "1" || (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; -var DEBUG_FILE_LOG = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; function summarizeSql(sql, maxLen = 220) { const compact = sql.replace(/\s+/g, " ").trim(); return compact.length > maxLen ? `${compact.slice(0, maxLen)}...` : compact; } function traceSql(msg) { - if (!TRACE_SQL) + const traceEnabled = (process.env.HIVEMIND_TRACE_SQL ?? process.env.DEEPLAKE_TRACE_SQL) === "1" || (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; + if (!traceEnabled) return; process.stderr.write(`[deeplake-sql] ${msg} `); - if (DEBUG_FILE_LOG) + const debugFileLog = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; + if (debugFileLog) log2(msg); } var RETRYABLE_CODES = /* @__PURE__ */ new Set([429, 500, 502, 503, 504]); diff --git a/claude-code/bundle/pre-tool-use.js b/claude-code/bundle/pre-tool-use.js index e316382..a231ff5 100755 --- a/claude-code/bundle/pre-tool-use.js +++ b/claude-code/bundle/pre-tool-use.js @@ -1,8 +1,9 @@ #!/usr/bin/env node // dist/src/hooks/pre-tool-use.js -import { existsSync as existsSync3 } from "node:fs"; -import { join as join6, dirname } from "node:path"; +import { existsSync as existsSync3, mkdirSync as mkdirSync3, writeFileSync as writeFileSync3 } from "node:fs"; +import { homedir as homedir5 } from "node:os"; +import { join as join6, dirname, sep } from "node:path"; import { fileURLToPath as fileURLToPath2 } from "node:url"; // dist/src/utils/stdin.js @@ -87,18 +88,18 @@ function sqlLike(value) { // dist/src/deeplake-api.js var log2 = (msg) => log("sdk", msg); -var TRACE_SQL = (process.env.HIVEMIND_TRACE_SQL ?? process.env.DEEPLAKE_TRACE_SQL) === "1" || (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; -var DEBUG_FILE_LOG = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; function summarizeSql(sql, maxLen = 220) { const compact = sql.replace(/\s+/g, " ").trim(); return compact.length > maxLen ? `${compact.slice(0, maxLen)}...` : compact; } function traceSql(msg) { - if (!TRACE_SQL) + const traceEnabled = (process.env.HIVEMIND_TRACE_SQL ?? process.env.DEEPLAKE_TRACE_SQL) === "1" || (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; + if (!traceEnabled) return; process.stderr.write(`[deeplake-sql] ${msg} `); - if (DEBUG_FILE_LOG) + const debugFileLog = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; + if (debugFileLog) log2(msg); } var RETRYABLE_CODES = /* @__PURE__ */ new Set([429, 500, 502, 503, 504]); @@ -629,13 +630,13 @@ function buildPathCondition(targetPath) { const clean = targetPath.replace(/\/+$/, ""); if (/[*?]/.test(clean)) { const likePattern = sqlLike(clean).replace(/\*/g, "%").replace(/\?/g, "_"); - return `path LIKE '${likePattern}'`; + return `path LIKE '${likePattern}' ESCAPE '\\'`; } const base = clean.split("/").pop() ?? ""; if (base.includes(".")) { return `path = '${sqlStr(clean)}'`; } - return `(path = '${sqlStr(clean)}' OR path LIKE '${sqlLike(clean)}/%')`; + return `(path = '${sqlStr(clean)}' OR path LIKE '${sqlLike(clean)}/%' ESCAPE '\\')`; } async function searchDeeplakeTables(api, memoryTable, sessionsTable, opts) { const { pathFilter, contentScanOnly, likeOp, escapedPattern, prefilterPattern, prefilterPatterns } = opts; @@ -794,6 +795,42 @@ async function grepBothTables(api, memoryTable, sessionsTable, params, targetPat return refineGrepMatches(normalized, params); } +// dist/src/utils/output-cap.js +var CLAUDE_OUTPUT_CAP_BYTES = 8 * 1024; +function byteLen(str) { + return Buffer.byteLength(str, "utf8"); +} +function capOutputForClaude(output, options = {}) { + const maxBytes = options.maxBytes ?? CLAUDE_OUTPUT_CAP_BYTES; + if (byteLen(output) <= maxBytes) + return output; + const kind = options.kind ?? "output"; + const footerReserve = 220; + const budget = Math.max(1, maxBytes - footerReserve); + let running = 0; + const lines = output.split("\n"); + const keptLines = []; + for (const line of lines) { + const lineBytes = byteLen(line) + 1; + if (running + lineBytes > budget) + break; + keptLines.push(line); + running += lineBytes; + } + if (keptLines.length === 0) { + const slice = Buffer.from(output, "utf8").slice(0, budget).toString("utf8"); + const footer2 = ` +... [${kind} truncated: ${(byteLen(output) / 1024).toFixed(1)} KB total; refine with '| head -N' or a tighter pattern]`; + return slice + footer2; + } + const totalLines = lines.length - (lines[lines.length - 1] === "" ? 1 : 0); + const elidedLines = Math.max(0, totalLines - keptLines.length); + const elidedBytes = byteLen(output) - byteLen(keptLines.join("\n")); + const footer = ` +... [${kind} truncated: ${elidedLines} more lines (${(elidedBytes / 1024).toFixed(1)} KB) elided \u2014 refine with '| head -N' or a tighter pattern]`; + return keptLines.join("\n") + footer; +} + // dist/src/hooks/grep-direct.js function splitFirstPipelineStage(cmd) { const input = cmd.trim(); @@ -1033,21 +1070,40 @@ async function handleGrepDirect(api, table, sessionsTable, params) { fixedString: params.fixedString }; const output = await grepBothTables(api, table, sessionsTable, matchParams, params.targetPath); - return output.join("\n") || "(no matches)"; + const joined = output.join("\n") || "(no matches)"; + return capOutputForClaude(joined, { kind: "grep" }); } // dist/src/hooks/virtual-table-query.js function normalizeSessionPart(path, content) { return normalizeContent(path, content); } -function buildVirtualIndexContent(rows) { - const lines = ["# Memory Index", "", `${rows.length} sessions:`, ""]; - for (const row of rows) { - const path = row["path"]; - const project = row["project"] || ""; - const description = (row["description"] || "").slice(0, 120); - const date = (row["creation_date"] || "").slice(0, 10); - lines.push(`- [${path}](${path}) ${date} ${project ? `[${project}]` : ""} ${description}`); +function buildVirtualIndexContent(summaryRows, sessionRows = []) { + const total = summaryRows.length + sessionRows.length; + const lines = [ + "# Memory Index", + "", + `${total} entries (${summaryRows.length} summaries, ${sessionRows.length} sessions):`, + "" + ]; + if (summaryRows.length > 0) { + lines.push("## Summaries", ""); + for (const row of summaryRows) { + const path = row["path"]; + const project = row["project"] || ""; + const description = (row["description"] || "").slice(0, 120); + const date = (row["creation_date"] || "").slice(0, 10); + lines.push(`- [${path}](${path}) ${date} ${project ? `[${project}]` : ""} ${description}`); + } + lines.push(""); + } + if (sessionRows.length > 0) { + lines.push("## Sessions", ""); + for (const row of sessionRows) { + const path = row["path"]; + const description = (row["description"] || "").slice(0, 120); + lines.push(`- [${path}](${path}) ${description}`); + } } return lines.join("\n"); } @@ -1061,7 +1117,7 @@ function buildDirFilter(dirs) { const cleaned = [...new Set(dirs.map((dir) => dir.replace(/\/+$/, "") || "/"))]; if (cleaned.length === 0 || cleaned.includes("/")) return ""; - const clauses = cleaned.map((dir) => `path LIKE '${sqlLike(dir)}/%'`); + const clauses = cleaned.map((dir) => `path LIKE '${sqlLike(dir)}/%' ESCAPE '\\'`); return ` WHERE ${clauses.join(" OR ")}`; } async function queryUnionRows(api, memoryQuery, sessionsQuery) { @@ -1110,8 +1166,11 @@ async function readVirtualPathContents(api, memoryTable, sessionsTable, virtualP } } if (result.get("/index.md") === null && uniquePaths.includes("/index.md")) { - const rows2 = await api.query(`SELECT path, project, description, creation_date FROM "${memoryTable}" WHERE path LIKE '/summaries/%' ORDER BY creation_date DESC`).catch(() => []); - result.set("/index.md", buildVirtualIndexContent(rows2)); + const [summaryRows, sessionRows] = await Promise.all([ + api.query(`SELECT path, project, description, creation_date FROM "${memoryTable}" WHERE path LIKE '/summaries/%' ORDER BY creation_date DESC`).catch(() => []), + api.query(`SELECT path, description FROM "${sessionsTable}" WHERE path LIKE '/sessions/%' ORDER BY path`).catch(() => []) + ]); + result.set("/index.md", buildVirtualIndexContent(summaryRows, sessionRows)); } return result; } @@ -1148,7 +1207,7 @@ async function listVirtualPathRows(api, memoryTable, sessionsTable, dir) { async function findVirtualPaths(api, memoryTable, sessionsTable, dir, filenamePattern) { const normalizedDir = dir.replace(/\/+$/, "") || "/"; const likePath = `${sqlLike(normalizedDir === "/" ? "" : normalizedDir)}/%`; - const rows = await queryUnionRows(api, `SELECT path, NULL::text AS content, NULL::bigint AS size_bytes, '' AS creation_date, 0 AS source_order FROM "${memoryTable}" WHERE path LIKE '${likePath}' AND filename LIKE '${filenamePattern}'`, `SELECT path, NULL::text AS content, NULL::bigint AS size_bytes, '' AS creation_date, 1 AS source_order FROM "${sessionsTable}" WHERE path LIKE '${likePath}' AND filename LIKE '${filenamePattern}'`); + const rows = await queryUnionRows(api, `SELECT path, NULL::text AS content, NULL::bigint AS size_bytes, '' AS creation_date, 0 AS source_order FROM "${memoryTable}" WHERE path LIKE '${likePath}' ESCAPE '\\' AND filename LIKE '${filenamePattern}' ESCAPE '\\'`, `SELECT path, NULL::text AS content, NULL::bigint AS size_bytes, '' AS creation_date, 1 AS source_order FROM "${sessionsTable}" WHERE path LIKE '${likePath}' ESCAPE '\\' AND filename LIKE '${filenamePattern}' ESCAPE '\\'`); return [...new Set(rows.map((row) => row["path"]).filter((value) => typeof value === "string" && value.length > 0))]; } function dedupeRowsByPath(rows) { @@ -1627,7 +1686,7 @@ async function executeCompiledBashCommand(api, memoryTable, sessionsTable, cmd, continue; } } - return outputs.join("\n"); + return capOutputForClaude(outputs.join("\n"), { kind: "bash" }); } // dist/src/hooks/query-cache.js @@ -1785,6 +1844,23 @@ function rewritePaths(cmd) { var log4 = (msg) => log("pre", msg); var __bundleDir = dirname(fileURLToPath2(import.meta.url)); var SHELL_BUNDLE = existsSync3(join6(__bundleDir, "shell", "deeplake-shell.js")) ? join6(__bundleDir, "shell", "deeplake-shell.js") : join6(__bundleDir, "..", "shell", "deeplake-shell.js"); +var READ_CACHE_ROOT = join6(homedir5(), ".deeplake", "query-cache"); +function writeReadCacheFile(sessionId, virtualPath, content, deps = {}) { + const { cacheRoot = READ_CACHE_ROOT } = deps; + const safeSessionId = sessionId.replace(/[^a-zA-Z0-9._-]/g, "_") || "unknown"; + const rel = virtualPath.replace(/^\/+/, "") || "content"; + const expectedRoot = join6(cacheRoot, safeSessionId, "read"); + const absPath = join6(expectedRoot, rel); + if (absPath !== expectedRoot && !absPath.startsWith(expectedRoot + sep)) { + throw new Error(`writeReadCacheFile: path escapes cache root: ${absPath}`); + } + mkdirSync3(dirname(absPath), { recursive: true }); + writeFileSync3(absPath, content, "utf-8"); + return absPath; +} +function buildReadDecision(file_path, description) { + return { command: "", description, file_path }; +} function getReadTargetPath(toolInput) { const rawPath = toolInput.file_path ?? toolInput.path; return rawPath ? rawPath : null; @@ -1865,7 +1941,7 @@ function buildFallbackDecision(shellCmd, shellBundle = SHELL_BUNDLE) { return buildAllowDecision(`node "${shellBundle}" -c "${shellCmd.replace(/"/g, '\\"')}"`, `[DeepLake shell] ${shellCmd}`); } async function processPreToolUse(input, deps = {}) { - const { config = loadConfig(), createApi = (table2, activeConfig) => new DeeplakeApi(activeConfig.token, activeConfig.apiUrl, activeConfig.orgId, activeConfig.workspaceId, table2), executeCompiledBashCommandFn = executeCompiledBashCommand, handleGrepDirectFn = handleGrepDirect, readVirtualPathContentsFn = readVirtualPathContents, readVirtualPathContentFn = readVirtualPathContent, listVirtualPathRowsFn = listVirtualPathRows, findVirtualPathsFn = findVirtualPaths, readCachedIndexContentFn = readCachedIndexContent, writeCachedIndexContentFn = writeCachedIndexContent, shellBundle = SHELL_BUNDLE, logFn = log4 } = deps; + const { config = loadConfig(), createApi = (table2, activeConfig) => new DeeplakeApi(activeConfig.token, activeConfig.apiUrl, activeConfig.orgId, activeConfig.workspaceId, table2), executeCompiledBashCommandFn = executeCompiledBashCommand, handleGrepDirectFn = handleGrepDirect, readVirtualPathContentsFn = readVirtualPathContents, readVirtualPathContentFn = readVirtualPathContent, listVirtualPathRowsFn = listVirtualPathRows, findVirtualPathsFn = findVirtualPaths, readCachedIndexContentFn = readCachedIndexContent, writeCachedIndexContentFn = writeCachedIndexContent, writeReadCacheFileFn = writeReadCacheFile, shellBundle = SHELL_BUNDLE, logFn = log4 } = deps; const cmd = input.tool_input.command ?? ""; const shellCmd = getShellCommand(input.tool_name, input.tool_input); const toolPath = getReadTargetPath(input.tool_input) ?? input.tool_input.path ?? ""; @@ -1978,18 +2054,6 @@ async function processPreToolUse(input, deps = {}) { if (content === null) { content = await readVirtualPathContentFn(api, table, sessionsTable, virtualPath); } - if (content === null && virtualPath === "/index.md") { - const idxRows = await api.query(`SELECT path, project, description, creation_date FROM "${table}" WHERE path LIKE '/summaries/%' ORDER BY creation_date DESC`); - const lines = ["# Memory Index", "", `${idxRows.length} sessions:`, ""]; - for (const r of idxRows) { - const p = r["path"]; - const proj = r["project"] || ""; - const desc = (r["description"] || "").slice(0, 120); - const date = (r["creation_date"] || "").slice(0, 10); - lines.push(`- [${p}](${p}) ${date} ${proj ? `[${proj}]` : ""} ${desc}`); - } - content = lines.join("\n"); - } if (content !== null) { if (virtualPath === "/index.md") { writeCachedIndexContentFn(input.session_id, content); @@ -2001,7 +2065,12 @@ async function processPreToolUse(input, deps = {}) { content = fromEnd ? lines.slice(-lineLimit).join("\n") : lines.slice(0, lineLimit).join("\n"); } const label = lineLimit > 0 ? fromEnd ? `tail -${lineLimit}` : `head -${lineLimit}` : "cat"; - return buildAllowDecision(`echo ${JSON.stringify(content)}`, `[DeepLake direct] ${label} ${virtualPath}`); + if (input.tool_name === "Read") { + const file_path = writeReadCacheFileFn(input.session_id, virtualPath, content); + return buildReadDecision(file_path, `[DeepLake direct] ${label} ${virtualPath}`); + } + const capped = capOutputForClaude(content, { kind: label }); + return buildAllowDecision(`echo ${JSON.stringify(capped)}`, `[DeepLake direct] ${label} ${virtualPath}`); } } if (!lsDir && input.tool_name === "Glob") { @@ -2046,7 +2115,8 @@ async function processPreToolUse(input, deps = {}) { lines.push(name + (info.isDir ? "/" : "")); } } - return buildAllowDecision(`echo ${JSON.stringify(lines.join("\n") || "(empty directory)")}`, `[DeepLake direct] ls ${dir}`); + const lsOutput = capOutputForClaude(lines.join("\n") || "(empty directory)", { kind: "ls" }); + return buildAllowDecision(`echo ${JSON.stringify(lsOutput)}`, `[DeepLake direct] ls ${dir}`); } if (input.tool_name === "Bash") { const findMatch = shellCmd.match(/^find\s+(\S+)\s+(?:-type\s+\S+\s+)?-name\s+'([^']+)'/); @@ -2058,7 +2128,8 @@ async function processPreToolUse(input, deps = {}) { let result = paths.join("\n") || ""; if (/\|\s*wc\s+-l\s*$/.test(shellCmd)) result = String(paths.length); - return buildAllowDecision(`echo ${JSON.stringify(result || "(no matches)")}`, `[DeepLake direct] find ${dir}`); + const capped = capOutputForClaude(result || "(no matches)", { kind: "find" }); + return buildAllowDecision(`echo ${JSON.stringify(capped)}`, `[DeepLake direct] find ${dir}`); } } } catch (e) { @@ -2071,11 +2142,12 @@ async function main() { const decision = await processPreToolUse(input); if (!decision) return; + const updatedInput = decision.file_path !== void 0 ? { file_path: decision.file_path } : { command: decision.command, description: decision.description }; console.log(JSON.stringify({ hookSpecificOutput: { hookEventName: "PreToolUse", permissionDecision: "allow", - updatedInput: decision + updatedInput } })); } @@ -2087,10 +2159,12 @@ if (isDirectRun(import.meta.url)) { } export { buildAllowDecision, + buildReadDecision, extractGrepParams, getShellCommand, isSafe, processPreToolUse, rewritePaths, - touchesMemory + touchesMemory, + writeReadCacheFile }; diff --git a/claude-code/bundle/session-end.js b/claude-code/bundle/session-end.js index 944977c..c10f5db 100755 --- a/claude-code/bundle/session-end.js +++ b/claude-code/bundle/session-end.js @@ -2,13 +2,13 @@ // dist/src/utils/stdin.js function readStdin() { - return new Promise((resolve2, reject) => { + return new Promise((resolve, reject) => { let data = ""; process.stdin.setEncoding("utf-8"); process.stdin.on("data", (chunk) => data += chunk); process.stdin.on("end", () => { try { - resolve2(JSON.parse(data)); + resolve(JSON.parse(data)); } catch (err) { reject(new Error(`Failed to parse hook input: ${err}`)); } @@ -53,12 +53,6 @@ function loadConfig() { }; } -// dist/src/deeplake-api.js -import { randomUUID } from "node:crypto"; -import { existsSync as existsSync2, mkdirSync, readFileSync as readFileSync2, writeFileSync } from "node:fs"; -import { join as join3 } from "node:path"; -import { tmpdir } from "node:os"; - // dist/src/utils/debug.js import { appendFileSync } from "node:fs"; import { join as join2 } from "node:path"; @@ -75,354 +69,35 @@ function log(tag, msg) { `); } -// dist/src/utils/sql.js -function sqlStr(value) { - return value.replace(/\\/g, "\\\\").replace(/'/g, "''").replace(/\0/g, "").replace(/[\x01-\x08\x0b\x0c\x0e-\x1f\x7f]/g, ""); -} -function sqlIdent(name) { - if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(name)) { - throw new Error(`Invalid SQL identifier: ${JSON.stringify(name)}`); - } - return name; -} +// dist/src/hooks/spawn-wiki-worker.js +import { spawn, execSync } from "node:child_process"; +import { fileURLToPath } from "node:url"; +import { dirname, join as join4 } from "node:path"; +import { writeFileSync, mkdirSync as mkdirSync2 } from "node:fs"; +import { homedir as homedir3, tmpdir } from "node:os"; -// dist/src/deeplake-api.js -var log2 = (msg) => log("sdk", msg); -var TRACE_SQL = (process.env.HIVEMIND_TRACE_SQL ?? process.env.DEEPLAKE_TRACE_SQL) === "1" || (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; -var DEBUG_FILE_LOG = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; -function summarizeSql(sql, maxLen = 220) { - const compact = sql.replace(/\s+/g, " ").trim(); - return compact.length > maxLen ? `${compact.slice(0, maxLen)}...` : compact; -} -function traceSql(msg) { - if (!TRACE_SQL) - return; - process.stderr.write(`[deeplake-sql] ${msg} -`); - if (DEBUG_FILE_LOG) - log2(msg); -} -var RETRYABLE_CODES = /* @__PURE__ */ new Set([429, 500, 502, 503, 504]); -var MAX_RETRIES = 3; -var BASE_DELAY_MS = 500; -var MAX_CONCURRENCY = 5; -var QUERY_TIMEOUT_MS = Number(process.env["HIVEMIND_QUERY_TIMEOUT_MS"] ?? process.env["DEEPLAKE_QUERY_TIMEOUT_MS"] ?? 1e4); -var INDEX_MARKER_TTL_MS = Number(process.env["HIVEMIND_INDEX_MARKER_TTL_MS"] ?? 6 * 60 * 6e4); -function sleep(ms) { - return new Promise((resolve2) => setTimeout(resolve2, ms)); -} -function isTimeoutError(error) { - const name = error instanceof Error ? error.name.toLowerCase() : ""; - const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase(); - return name.includes("timeout") || name === "aborterror" || message.includes("timeout") || message.includes("timed out"); -} -function isDuplicateIndexError(error) { - const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase(); - return message.includes("duplicate key value violates unique constraint") || message.includes("pg_class_relname_nsp_index") || message.includes("already exists"); -} -function isSessionInsertQuery(sql) { - return /^\s*insert\s+into\s+"[^"]+"\s*\(\s*id\s*,\s*path\s*,\s*filename\s*,\s*message\s*,/i.test(sql); -} -function isTransientHtml403(text) { - const body = text.toLowerCase(); - return body.includes(" this.waiting.push(resolve2)); - } - release() { - this.active--; - const next = this.waiting.shift(); - if (next) { - this.active++; - next(); - } - } -}; -var DeeplakeApi = class { - token; - apiUrl; - orgId; - workspaceId; - tableName; - _pendingRows = []; - _sem = new Semaphore(MAX_CONCURRENCY); - _tablesCache = null; - constructor(token, apiUrl, orgId, workspaceId, tableName) { - this.token = token; - this.apiUrl = apiUrl; - this.orgId = orgId; - this.workspaceId = workspaceId; - this.tableName = tableName; - } - /** Execute SQL with retry on transient errors and bounded concurrency. */ - async query(sql) { - const startedAt = Date.now(); - const summary = summarizeSql(sql); - traceSql(`query start: ${summary}`); - await this._sem.acquire(); - try { - const rows = await this._queryWithRetry(sql); - traceSql(`query ok (${Date.now() - startedAt}ms, rows=${rows.length}): ${summary}`); - return rows; - } catch (e) { - const message = e instanceof Error ? e.message : String(e); - traceSql(`query fail (${Date.now() - startedAt}ms): ${summary} :: ${message}`); - throw e; - } finally { - this._sem.release(); - } - } - async _queryWithRetry(sql) { - let lastError; - for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) { - let resp; - try { - const signal = AbortSignal.timeout(QUERY_TIMEOUT_MS); - resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables/query`, { - method: "POST", - headers: { - Authorization: `Bearer ${this.token}`, - "Content-Type": "application/json", - "X-Activeloop-Org-Id": this.orgId - }, - signal, - body: JSON.stringify({ query: sql }) - }); - } catch (e) { - if (isTimeoutError(e)) { - lastError = new Error(`Query timeout after ${QUERY_TIMEOUT_MS}ms`); - throw lastError; - } - lastError = e instanceof Error ? e : new Error(String(e)); - if (attempt < MAX_RETRIES) { - const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200; - log2(`query retry ${attempt + 1}/${MAX_RETRIES} (fetch error: ${lastError.message}) in ${delay.toFixed(0)}ms`); - await sleep(delay); - continue; - } - throw lastError; - } - if (resp.ok) { - const raw = await resp.json(); - if (!raw?.rows || !raw?.columns) - return []; - return raw.rows.map((row) => Object.fromEntries(raw.columns.map((col, i) => [col, row[i]]))); - } - const text = await resp.text().catch(() => ""); - const retryable403 = isSessionInsertQuery(sql) && (resp.status === 401 || resp.status === 403 && (text.length === 0 || isTransientHtml403(text))); - if (attempt < MAX_RETRIES && (RETRYABLE_CODES.has(resp.status) || retryable403)) { - const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200; - log2(`query retry ${attempt + 1}/${MAX_RETRIES} (${resp.status}) in ${delay.toFixed(0)}ms`); - await sleep(delay); - continue; - } - throw new Error(`Query failed: ${resp.status}: ${text.slice(0, 200)}`); - } - throw lastError ?? new Error("Query failed: max retries exceeded"); - } - // ── Writes ────────────────────────────────────────────────────────────────── - /** Queue rows for writing. Call commit() to flush. */ - appendRows(rows) { - this._pendingRows.push(...rows); - } - /** Flush pending rows via SQL. */ - async commit() { - if (this._pendingRows.length === 0) - return; - const rows = this._pendingRows; - this._pendingRows = []; - const CONCURRENCY = 10; - for (let i = 0; i < rows.length; i += CONCURRENCY) { - const chunk = rows.slice(i, i + CONCURRENCY); - await Promise.allSettled(chunk.map((r) => this.upsertRowSql(r))); - } - log2(`commit: ${rows.length} rows`); - } - async upsertRowSql(row) { - const ts = (/* @__PURE__ */ new Date()).toISOString(); - const cd = row.creationDate ?? ts; - const lud = row.lastUpdateDate ?? ts; - const exists = await this.query(`SELECT path FROM "${this.tableName}" WHERE path = '${sqlStr(row.path)}' LIMIT 1`); - if (exists.length > 0) { - let setClauses = `summary = E'${sqlStr(row.contentText)}', mime_type = '${sqlStr(row.mimeType)}', size_bytes = ${row.sizeBytes}, last_update_date = '${lud}'`; - if (row.project !== void 0) - setClauses += `, project = '${sqlStr(row.project)}'`; - if (row.description !== void 0) - setClauses += `, description = '${sqlStr(row.description)}'`; - await this.query(`UPDATE "${this.tableName}" SET ${setClauses} WHERE path = '${sqlStr(row.path)}'`); - } else { - const id = randomUUID(); - let cols = "id, path, filename, summary, mime_type, size_bytes, creation_date, last_update_date"; - let vals = `'${id}', '${sqlStr(row.path)}', '${sqlStr(row.filename)}', E'${sqlStr(row.contentText)}', '${sqlStr(row.mimeType)}', ${row.sizeBytes}, '${cd}', '${lud}'`; - if (row.project !== void 0) { - cols += ", project"; - vals += `, '${sqlStr(row.project)}'`; - } - if (row.description !== void 0) { - cols += ", description"; - vals += `, '${sqlStr(row.description)}'`; - } - await this.query(`INSERT INTO "${this.tableName}" (${cols}) VALUES (${vals})`); - } - } - /** Update specific columns on a row by path. */ - async updateColumns(path, columns) { - const setClauses = Object.entries(columns).map(([col, val]) => typeof val === "number" ? `${col} = ${val}` : `${col} = '${sqlStr(String(val))}'`).join(", "); - await this.query(`UPDATE "${this.tableName}" SET ${setClauses} WHERE path = '${sqlStr(path)}'`); - } - // ── Convenience ───────────────────────────────────────────────────────────── - /** Create a BM25 search index on a column. */ - async createIndex(column) { - await this.query(`CREATE INDEX IF NOT EXISTS idx_${sqlStr(column)}_bm25 ON "${this.tableName}" USING deeplake_index ("${column}")`); - } - buildLookupIndexName(table, suffix) { - return `idx_${table}_${suffix}`.replace(/[^a-zA-Z0-9_]/g, "_"); - } - getLookupIndexMarkerPath(table, suffix) { - const markerKey = [ - this.workspaceId, - this.orgId, - table, - suffix - ].join("__").replace(/[^a-zA-Z0-9_.-]/g, "_"); - return join3(getIndexMarkerDir(), `${markerKey}.json`); - } - hasFreshLookupIndexMarker(table, suffix) { - const markerPath = this.getLookupIndexMarkerPath(table, suffix); - if (!existsSync2(markerPath)) - return false; - try { - const raw = JSON.parse(readFileSync2(markerPath, "utf-8")); - const updatedAt = raw.updatedAt ? new Date(raw.updatedAt).getTime() : NaN; - if (!Number.isFinite(updatedAt) || Date.now() - updatedAt > INDEX_MARKER_TTL_MS) - return false; - return true; - } catch { - return false; - } - } - markLookupIndexReady(table, suffix) { - mkdirSync(getIndexMarkerDir(), { recursive: true }); - writeFileSync(this.getLookupIndexMarkerPath(table, suffix), JSON.stringify({ updatedAt: (/* @__PURE__ */ new Date()).toISOString() }), "utf-8"); - } - async ensureLookupIndex(table, suffix, columnsSql) { - if (this.hasFreshLookupIndexMarker(table, suffix)) - return; - const indexName = this.buildLookupIndexName(table, suffix); - try { - await this.query(`CREATE INDEX IF NOT EXISTS "${indexName}" ON "${table}" ${columnsSql}`); - this.markLookupIndexReady(table, suffix); - } catch (e) { - if (isDuplicateIndexError(e)) { - this.markLookupIndexReady(table, suffix); - return; - } - log2(`index "${indexName}" skipped: ${e.message}`); - } - } - /** List all tables in the workspace (with retry). */ - async listTables(forceRefresh = false) { - if (!forceRefresh && this._tablesCache) - return [...this._tablesCache]; - const { tables, cacheable } = await this._fetchTables(); - if (cacheable) - this._tablesCache = [...tables]; - return tables; - } - async _fetchTables() { - for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) { +// dist/src/utils/wiki-log.js +import { mkdirSync, appendFileSync as appendFileSync2 } from "node:fs"; +import { join as join3 } from "node:path"; +function makeWikiLogger(hooksDir, filename = "deeplake-wiki.log") { + const path = join3(hooksDir, filename); + return { + path, + log(msg) { try { - const resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables`, { - headers: { - Authorization: `Bearer ${this.token}`, - "X-Activeloop-Org-Id": this.orgId - } - }); - if (resp.ok) { - const data = await resp.json(); - return { - tables: (data.tables ?? []).map((t) => t.table_name), - cacheable: true - }; - } - if (attempt < MAX_RETRIES && RETRYABLE_CODES.has(resp.status)) { - await sleep(BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200); - continue; - } - return { tables: [], cacheable: false }; + mkdirSync(hooksDir, { recursive: true }); + appendFileSync2(path, `[${utcTimestamp()}] ${msg} +`); } catch { - if (attempt < MAX_RETRIES) { - await sleep(BASE_DELAY_MS * Math.pow(2, attempt)); - continue; - } - return { tables: [], cacheable: false }; } } - return { tables: [], cacheable: false }; - } - /** Create the memory table if it doesn't already exist. Migrate columns on existing tables. */ - async ensureTable(name) { - const tbl = name ?? this.tableName; - const tables = await this.listTables(); - if (!tables.includes(tbl)) { - log2(`table "${tbl}" not found, creating`); - await this.query(`CREATE TABLE IF NOT EXISTS "${tbl}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', summary TEXT NOT NULL DEFAULT '', author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'text/plain', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`); - log2(`table "${tbl}" created`); - if (!tables.includes(tbl)) - this._tablesCache = [...tables, tbl]; - } - } - /** Create the sessions table (uses JSONB for message since every row is a JSON event). */ - async ensureSessionsTable(name) { - const tables = await this.listTables(); - if (!tables.includes(name)) { - log2(`table "${name}" not found, creating`); - await this.query(`CREATE TABLE IF NOT EXISTS "${name}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', message JSONB, author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'application/json', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`); - log2(`table "${name}" created`); - if (!tables.includes(name)) - this._tablesCache = [...tables, name]; - } - await this.ensureLookupIndex(name, "path_creation_date", `("path", "creation_date")`); - } -}; - -// dist/src/utils/direct-run.js -import { resolve } from "node:path"; -import { fileURLToPath } from "node:url"; -function isDirectRun(metaUrl) { - const entry = process.argv[1]; - if (!entry) - return false; - try { - return resolve(fileURLToPath(metaUrl)) === resolve(entry); - } catch { - return false; - } + }; } // dist/src/hooks/spawn-wiki-worker.js -import { spawn, execSync } from "node:child_process"; -import { fileURLToPath as fileURLToPath2 } from "node:url"; -import { dirname, join as join4 } from "node:path"; -import { writeFileSync as writeFileSync2, mkdirSync as mkdirSync2, appendFileSync as appendFileSync2 } from "node:fs"; -import { homedir as homedir3, tmpdir as tmpdir2 } from "node:os"; var HOME = homedir3(); -var WIKI_LOG = join4(HOME, ".claude", "hooks", "deeplake-wiki.log"); +var wikiLogger = makeWikiLogger(join4(HOME, ".claude", "hooks")); +var WIKI_LOG = wikiLogger.path; var WIKI_PROMPT_TEMPLATE = `You are building a personal wiki from a coding session. Your goal is to extract every piece of knowledge \u2014 entities, decisions, relationships, and facts \u2014 into a structured, searchable wiki entry. Think of this as building a knowledge graph, not writing a summary. SESSION JSONL path: __JSONL__ @@ -475,14 +150,7 @@ IMPORTANT: Be exhaustive. Extract EVERY entity, decision, and fact. Future you w PRIVACY: Never include absolute filesystem paths (e.g. /home/user/..., /Users/..., C:\\\\...) in the summary. Use only project-relative paths or the project name. The Source and Project fields above are already correct \u2014 do not change them. LENGTH LIMIT: Keep the total summary under 4000 characters. Be dense and concise \u2014 prioritize facts over prose. If a session is short, the summary should be short too.`; -function wikiLog(msg) { - try { - mkdirSync2(join4(HOME, ".claude", "hooks"), { recursive: true }); - appendFileSync2(WIKI_LOG, `[${utcTimestamp()}] ${msg} -`); - } catch { - } -} +var wikiLog = wikiLogger.log; function findClaudeBin() { try { return execSync("which claude 2>/dev/null", { encoding: "utf-8" }).trim(); @@ -493,10 +161,10 @@ function findClaudeBin() { function spawnWikiWorker(opts) { const { config, sessionId, cwd, bundleDir, reason } = opts; const projectName = cwd.split("/").pop() || "unknown"; - const tmpDir = join4(tmpdir2(), `deeplake-wiki-${sessionId}-${Date.now()}`); + const tmpDir = join4(tmpdir(), `deeplake-wiki-${sessionId}-${Date.now()}`); mkdirSync2(tmpDir, { recursive: true }); const configFile = join4(tmpDir, "config.json"); - writeFileSync2(configFile, JSON.stringify({ + writeFileSync(configFile, JSON.stringify({ apiUrl: config.apiUrl, token: config.token, orgId: config.orgId, @@ -521,264 +189,102 @@ function spawnWikiWorker(opts) { wikiLog(`${reason}: spawned summary worker for ${sessionId}`); } function bundleDirFromImportMeta(importMetaUrl) { - return dirname(fileURLToPath2(importMetaUrl)); + return dirname(fileURLToPath(importMetaUrl)); } -// dist/src/hooks/session-queue.js -import { appendFileSync as appendFileSync3, closeSync, existsSync as existsSync3, mkdirSync as mkdirSync3, openSync, readFileSync as readFileSync3, readdirSync, renameSync, rmSync, statSync, writeFileSync as writeFileSync3 } from "node:fs"; -import { dirname as dirname2, join as join5 } from "node:path"; +// dist/src/hooks/summary-state.js +import { readFileSync as readFileSync2, writeFileSync as writeFileSync2, writeSync, mkdirSync as mkdirSync3, renameSync, existsSync as existsSync2, unlinkSync, openSync, closeSync } from "node:fs"; import { homedir as homedir4 } from "node:os"; -var DEFAULT_QUEUE_DIR = join5(homedir4(), ".deeplake", "queue"); -var DEFAULT_MAX_BATCH_ROWS = 50; -var DEFAULT_STALE_INFLIGHT_MS = 6e4; -var DEFAULT_AUTH_FAILURE_TTL_MS = 5 * 6e4; -var BUSY_WAIT_STEP_MS = 100; -var SessionWriteDisabledError = class extends Error { - constructor(message) { - super(message); - this.name = "SessionWriteDisabledError"; - } -}; -function buildSessionInsertSql(sessionsTable, rows) { - if (rows.length === 0) - throw new Error("buildSessionInsertSql: rows must not be empty"); - const table = sqlIdent(sessionsTable); - const values = rows.map((row) => { - const jsonForSql = sqlStr(coerceJsonbPayload(row.message)); - return `('${sqlStr(row.id)}', '${sqlStr(row.path)}', '${sqlStr(row.filename)}', '${jsonForSql}'::jsonb, '${sqlStr(row.author)}', ${row.sizeBytes}, '${sqlStr(row.project)}', '${sqlStr(row.description)}', '${sqlStr(row.agent)}', '${sqlStr(row.creationDate)}', '${sqlStr(row.lastUpdateDate)}')`; - }).join(", "); - return `INSERT INTO "${table}" (id, path, filename, message, author, size_bytes, project, description, agent, creation_date, last_update_date) VALUES ${values}`; -} -function coerceJsonbPayload(message) { - try { - return JSON.stringify(JSON.parse(message)); - } catch { - return JSON.stringify({ - type: "raw_message", - content: message - }); - } -} -async function flushSessionQueue(api, opts) { - const queueDir = opts.queueDir ?? DEFAULT_QUEUE_DIR; - const maxBatchRows = opts.maxBatchRows ?? DEFAULT_MAX_BATCH_ROWS; - const staleInflightMs = opts.staleInflightMs ?? DEFAULT_STALE_INFLIGHT_MS; - const waitIfBusyMs = opts.waitIfBusyMs ?? 0; - const drainAll = opts.drainAll ?? false; - mkdirSync3(queueDir, { recursive: true }); - const queuePath = getQueuePath(queueDir, opts.sessionId); - const inflightPath = getInflightPath(queueDir, opts.sessionId); - if (isSessionWriteDisabled(opts.sessionsTable, queueDir)) { - return existsSync3(queuePath) || existsSync3(inflightPath) ? { status: "disabled", rows: 0, batches: 0 } : { status: "empty", rows: 0, batches: 0 }; - } - let totalRows = 0; - let totalBatches = 0; - let flushedAny = false; - while (true) { - if (opts.allowStaleInflight) - recoverStaleInflight(queuePath, inflightPath, staleInflightMs); - if (existsSync3(inflightPath)) { - if (waitIfBusyMs > 0) { - await waitForInflightToClear(inflightPath, waitIfBusyMs); - if (opts.allowStaleInflight) - recoverStaleInflight(queuePath, inflightPath, staleInflightMs); - } - if (existsSync3(inflightPath)) { - return flushedAny ? { status: "flushed", rows: totalRows, batches: totalBatches } : { status: "busy", rows: 0, batches: 0 }; - } - } - if (!existsSync3(queuePath)) { - return flushedAny ? { status: "flushed", rows: totalRows, batches: totalBatches } : { status: "empty", rows: 0, batches: 0 }; - } +import { join as join5 } from "node:path"; +var dlog = (msg) => log("summary-state", msg); +var STATE_DIR = join5(homedir4(), ".claude", "hooks", "summary-state"); +var YIELD_BUF = new Int32Array(new SharedArrayBuffer(4)); +function lockPath(sessionId) { + return join5(STATE_DIR, `${sessionId}.lock`); +} +function tryAcquireLock(sessionId, maxAgeMs = 10 * 60 * 1e3) { + mkdirSync3(STATE_DIR, { recursive: true }); + const p = lockPath(sessionId); + if (existsSync2(p)) { try { - renameSync(queuePath, inflightPath); - } catch (e) { - if (e?.code === "ENOENT") { - return flushedAny ? { status: "flushed", rows: totalRows, batches: totalBatches } : { status: "empty", rows: 0, batches: 0 }; - } - throw e; + const ageMs = Date.now() - parseInt(readFileSync2(p, "utf-8"), 10); + if (Number.isFinite(ageMs) && ageMs < maxAgeMs) + return false; + } catch (readErr) { + dlog(`lock file unreadable for ${sessionId}, treating as stale: ${readErr.message}`); } try { - const { rows, batches } = await flushInflightFile(api, opts.sessionsTable, inflightPath, maxBatchRows); - totalRows += rows; - totalBatches += batches; - flushedAny = flushedAny || rows > 0; - } catch (e) { - requeueInflight(queuePath, inflightPath); - if (e instanceof SessionWriteDisabledError) { - return { status: "disabled", rows: totalRows, batches: totalBatches }; - } - throw e; - } - if (!drainAll) { - return { status: "flushed", rows: totalRows, batches: totalBatches }; + unlinkSync(p); + } catch (unlinkErr) { + dlog(`could not unlink stale lock for ${sessionId}: ${unlinkErr.message}`); + return false; } } -} -function getQueuePath(queueDir, sessionId) { - return join5(queueDir, `${sessionId}.jsonl`); -} -function getInflightPath(queueDir, sessionId) { - return join5(queueDir, `${sessionId}.inflight`); -} -async function flushInflightFile(api, sessionsTable, inflightPath, maxBatchRows) { - const rows = readQueuedRows(inflightPath); - if (rows.length === 0) { - rmSync(inflightPath, { force: true }); - return { rows: 0, batches: 0 }; - } - let ensured = false; - let batches = 0; - const queueDir = dirname2(inflightPath); - for (let i = 0; i < rows.length; i += maxBatchRows) { - const chunk = rows.slice(i, i + maxBatchRows); - const sql = buildSessionInsertSql(sessionsTable, chunk); + try { + const fd = openSync(p, "wx"); try { - await api.query(sql); - } catch (e) { - if (isSessionWriteAuthError(e)) { - markSessionWriteDisabled(sessionsTable, errorMessage(e), queueDir); - throw new SessionWriteDisabledError(errorMessage(e)); - } - if (!ensured && isEnsureSessionsTableRetryable(e)) { - try { - await api.ensureSessionsTable(sessionsTable); - } catch (ensureError) { - if (isSessionWriteAuthError(ensureError)) { - markSessionWriteDisabled(sessionsTable, errorMessage(ensureError), queueDir); - throw new SessionWriteDisabledError(errorMessage(ensureError)); - } - throw ensureError; - } - ensured = true; - try { - await api.query(sql); - } catch (retryError) { - if (isSessionWriteAuthError(retryError)) { - markSessionWriteDisabled(sessionsTable, errorMessage(retryError), queueDir); - throw new SessionWriteDisabledError(errorMessage(retryError)); - } - throw retryError; - } - } else { - throw e; - } + writeSync(fd, String(Date.now())); + } finally { + closeSync(fd); } - batches += 1; + return true; + } catch (e) { + if (e.code === "EEXIST") + return false; + throw e; } - clearSessionWriteDisabled(sessionsTable, queueDir); - rmSync(inflightPath, { force: true }); - return { rows: rows.length, batches }; } -function readQueuedRows(path) { - const raw = readFileSync3(path, "utf-8"); - return raw.split("\n").map((line) => line.trim()).filter(Boolean).map((line) => JSON.parse(line)); -} -function requeueInflight(queuePath, inflightPath) { - if (!existsSync3(inflightPath)) - return; - const inflight = readFileSync3(inflightPath, "utf-8"); - appendFileSync3(queuePath, inflight); - rmSync(inflightPath, { force: true }); -} -function recoverStaleInflight(queuePath, inflightPath, staleInflightMs) { - if (!existsSync3(inflightPath) || !isStale(inflightPath, staleInflightMs)) - return; - requeueInflight(queuePath, inflightPath); -} -function isStale(path, staleInflightMs) { - return Date.now() - statSync(path).mtimeMs >= staleInflightMs; -} -function isEnsureSessionsTableRetryable(error) { - const message = errorMessage(error).toLowerCase(); - return message.includes("does not exist") || message.includes("doesn't exist") || message.includes("relation") || message.includes("not found"); -} -function isSessionWriteAuthError(error) { - const message = errorMessage(error).toLowerCase(); - return message.includes("403") || message.includes("401") || message.includes("forbidden") || message.includes("unauthorized"); -} -function markSessionWriteDisabled(sessionsTable, reason, queueDir = DEFAULT_QUEUE_DIR) { - mkdirSync3(queueDir, { recursive: true }); - writeFileSync3(getSessionWriteDisabledPath(queueDir, sessionsTable), JSON.stringify({ - disabledAt: (/* @__PURE__ */ new Date()).toISOString(), - reason, - sessionsTable - })); -} -function clearSessionWriteDisabled(sessionsTable, queueDir = DEFAULT_QUEUE_DIR) { - rmSync(getSessionWriteDisabledPath(queueDir, sessionsTable), { force: true }); -} -function isSessionWriteDisabled(sessionsTable, queueDir = DEFAULT_QUEUE_DIR, ttlMs = DEFAULT_AUTH_FAILURE_TTL_MS) { - const path = getSessionWriteDisabledPath(queueDir, sessionsTable); - if (!existsSync3(path)) - return false; +function releaseLock(sessionId) { try { - const raw = readFileSync3(path, "utf-8"); - const state = JSON.parse(raw); - const ageMs = Date.now() - new Date(state.disabledAt).getTime(); - if (Number.isNaN(ageMs) || ageMs >= ttlMs) { - rmSync(path, { force: true }); - return false; + unlinkSync(lockPath(sessionId)); + } catch (e) { + if (e?.code !== "ENOENT") { + dlog(`releaseLock unlink failed for ${sessionId}: ${e.message}`); } - return true; - } catch { - rmSync(path, { force: true }); - return false; - } -} -function getSessionWriteDisabledPath(queueDir, sessionsTable) { - return join5(queueDir, `.${sessionsTable}.disabled.json`); -} -function errorMessage(error) { - return error instanceof Error ? error.message : String(error); -} -async function waitForInflightToClear(inflightPath, waitIfBusyMs) { - const startedAt = Date.now(); - while (existsSync3(inflightPath) && Date.now() - startedAt < waitIfBusyMs) { - await sleep2(BUSY_WAIT_STEP_MS); } } -function sleep2(ms) { - return new Promise((resolve2) => setTimeout(resolve2, ms)); -} // dist/src/hooks/session-end.js -var log3 = (msg) => log("session-end", msg); -async function runSessionEndHook(input, deps = {}) { - const { wikiWorker = (process.env.HIVEMIND_WIKI_WORKER ?? process.env.DEEPLAKE_WIKI_WORKER) === "1", captureEnabled = (process.env.HIVEMIND_CAPTURE ?? process.env.DEEPLAKE_CAPTURE) !== "false", config = loadConfig(), createApi = (activeConfig) => new DeeplakeApi(activeConfig.token, activeConfig.apiUrl, activeConfig.orgId, activeConfig.workspaceId, activeConfig.sessionsTableName), flushSessionQueueFn = flushSessionQueue, spawnWikiWorkerFn = spawnWikiWorker, wikiLogFn = wikiLog, bundleDir = bundleDirFromImportMeta(import.meta.url), logFn = log3 } = deps; - if (wikiWorker || !captureEnabled || !input.session_id) - return { status: "skipped" }; - if (!config) { - logFn("no config"); - return { status: "no_config" }; - } - const flush = await flushSessionQueueFn(createApi(config), { - sessionId: input.session_id, - sessionsTable: config.sessionsTableName, - waitIfBusyMs: 5e3, - drainAll: true - }); - logFn(`flush ${flush.status}: rows=${flush.rows} batches=${flush.batches}`); - wikiLogFn(`SessionEnd: triggering summary for ${input.session_id}`); - spawnWikiWorkerFn({ - config, - sessionId: input.session_id, - cwd: input.cwd ?? "", - bundleDir, - reason: "SessionEnd" - }); - return { status: "flushed", flushStatus: flush.status }; -} +var log2 = (msg) => log("session-end", msg); async function main() { + if (process.env.HIVEMIND_WIKI_WORKER === "1") + return; + if (process.env.HIVEMIND_CAPTURE === "false") + return; const input = await readStdin(); - await runSessionEndHook(input); -} -if (isDirectRun(import.meta.url)) { - main().catch((e) => { - log3(`fatal: ${e.message}`); - process.exit(0); - }); + const sessionId = input.session_id; + const cwd = input.cwd ?? ""; + if (!sessionId) + return; + const config = loadConfig(); + if (!config) { + log2("no config"); + return; + } + if (!tryAcquireLock(sessionId)) { + wikiLog(`SessionEnd: periodic worker already running for ${sessionId}, skipping`); + return; + } + wikiLog(`SessionEnd: triggering summary for ${sessionId}`); + try { + spawnWikiWorker({ + config, + sessionId, + cwd, + bundleDir: bundleDirFromImportMeta(import.meta.url), + reason: "SessionEnd" + }); + } catch (e) { + log2(`spawn failed: ${e.message}`); + try { + releaseLock(sessionId); + } catch (releaseErr) { + log2(`releaseLock after spawn failure also failed: ${releaseErr.message}`); + } + throw e; + } } -export { - runSessionEndHook -}; +main().catch((e) => { + log2(`fatal: ${e.message}`); + process.exit(0); +}); diff --git a/claude-code/bundle/session-start-setup.js b/claude-code/bundle/session-start-setup.js index 77621bc..c0f05cc 100755 --- a/claude-code/bundle/session-start-setup.js +++ b/claude-code/bundle/session-start-setup.js @@ -1,11 +1,10 @@ #!/usr/bin/env node // dist/src/hooks/session-start-setup.js -import { fileURLToPath as fileURLToPath2 } from "node:url"; -import { dirname as dirname3, join as join7 } from "node:path"; -import { mkdirSync as mkdirSync5, appendFileSync as appendFileSync3 } from "node:fs"; +import { fileURLToPath } from "node:url"; +import { dirname as dirname2, join as join7 } from "node:path"; import { execSync as execSync2 } from "node:child_process"; -import { homedir as homedir6 } from "node:os"; +import { homedir as homedir4 } from "node:os"; // dist/src/commands/auth.js import { readFileSync, writeFileSync, existsSync, mkdirSync, unlinkSync } from "node:fs"; @@ -91,27 +90,21 @@ function log(tag, msg) { function sqlStr(value) { return value.replace(/\\/g, "\\\\").replace(/'/g, "''").replace(/\0/g, "").replace(/[\x01-\x08\x0b\x0c\x0e-\x1f\x7f]/g, ""); } -function sqlIdent(name) { - if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(name)) { - throw new Error(`Invalid SQL identifier: ${JSON.stringify(name)}`); - } - return name; -} // dist/src/deeplake-api.js var log2 = (msg) => log("sdk", msg); -var TRACE_SQL = (process.env.HIVEMIND_TRACE_SQL ?? process.env.DEEPLAKE_TRACE_SQL) === "1" || (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; -var DEBUG_FILE_LOG = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; function summarizeSql(sql, maxLen = 220) { const compact = sql.replace(/\s+/g, " ").trim(); return compact.length > maxLen ? `${compact.slice(0, maxLen)}...` : compact; } function traceSql(msg) { - if (!TRACE_SQL) + const traceEnabled = (process.env.HIVEMIND_TRACE_SQL ?? process.env.DEEPLAKE_TRACE_SQL) === "1" || (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; + if (!traceEnabled) return; process.stderr.write(`[deeplake-sql] ${msg} `); - if (DEBUG_FILE_LOG) + const debugFileLog = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; + if (debugFileLog) log2(msg); } var RETRYABLE_CODES = /* @__PURE__ */ new Set([429, 500, 502, 503, 504]); @@ -121,7 +114,7 @@ var MAX_CONCURRENCY = 5; var QUERY_TIMEOUT_MS = Number(process.env["HIVEMIND_QUERY_TIMEOUT_MS"] ?? process.env["DEEPLAKE_QUERY_TIMEOUT_MS"] ?? 1e4); var INDEX_MARKER_TTL_MS = Number(process.env["HIVEMIND_INDEX_MARKER_TTL_MS"] ?? 6 * 60 * 6e4); function sleep(ms) { - return new Promise((resolve2) => setTimeout(resolve2, ms)); + return new Promise((resolve) => setTimeout(resolve, ms)); } function isTimeoutError(error) { const name = error instanceof Error ? error.name.toLowerCase() : ""; @@ -154,7 +147,7 @@ var Semaphore = class { this.active++; return; } - await new Promise((resolve2) => this.waiting.push(resolve2)); + await new Promise((resolve) => this.waiting.push(resolve)); } release() { this.active--; @@ -415,13 +408,13 @@ var DeeplakeApi = class { // dist/src/utils/stdin.js function readStdin() { - return new Promise((resolve2, reject) => { + return new Promise((resolve, reject) => { let data = ""; process.stdin.setEncoding("utf-8"); process.stdin.on("data", (chunk) => data += chunk); process.stdin.on("end", () => { try { - resolve2(JSON.parse(data)); + resolve(JSON.parse(data)); } catch (err) { reject(new Error(`Failed to parse hook input: ${err}`)); } @@ -430,538 +423,140 @@ function readStdin() { }); } -// dist/src/utils/direct-run.js -import { resolve } from "node:path"; -import { fileURLToPath } from "node:url"; -function isDirectRun(metaUrl) { - const entry = process.argv[1]; - if (!entry) - return false; - try { - return resolve(fileURLToPath(metaUrl)) === resolve(entry); - } catch { - return false; - } -} - -// dist/src/hooks/session-queue.js -import { appendFileSync as appendFileSync2, closeSync, existsSync as existsSync4, mkdirSync as mkdirSync3, openSync, readFileSync as readFileSync4, readdirSync, renameSync, rmSync, statSync, writeFileSync as writeFileSync3 } from "node:fs"; +// dist/src/utils/version-check.js +import { readFileSync as readFileSync4 } from "node:fs"; import { dirname, join as join5 } from "node:path"; -import { homedir as homedir4 } from "node:os"; -var DEFAULT_QUEUE_DIR = join5(homedir4(), ".deeplake", "queue"); -var DEFAULT_MAX_BATCH_ROWS = 50; -var DEFAULT_STALE_INFLIGHT_MS = 6e4; -var DEFAULT_AUTH_FAILURE_TTL_MS = 5 * 6e4; -var DEFAULT_DRAIN_LOCK_STALE_MS = 3e4; -var BUSY_WAIT_STEP_MS = 100; -var SessionWriteDisabledError = class extends Error { - constructor(message) { - super(message); - this.name = "SessionWriteDisabledError"; - } -}; -function buildSessionInsertSql(sessionsTable, rows) { - if (rows.length === 0) - throw new Error("buildSessionInsertSql: rows must not be empty"); - const table = sqlIdent(sessionsTable); - const values = rows.map((row) => { - const jsonForSql = sqlStr(coerceJsonbPayload(row.message)); - return `('${sqlStr(row.id)}', '${sqlStr(row.path)}', '${sqlStr(row.filename)}', '${jsonForSql}'::jsonb, '${sqlStr(row.author)}', ${row.sizeBytes}, '${sqlStr(row.project)}', '${sqlStr(row.description)}', '${sqlStr(row.agent)}', '${sqlStr(row.creationDate)}', '${sqlStr(row.lastUpdateDate)}')`; - }).join(", "); - return `INSERT INTO "${table}" (id, path, filename, message, author, size_bytes, project, description, agent, creation_date, last_update_date) VALUES ${values}`; -} -function coerceJsonbPayload(message) { - try { - return JSON.stringify(JSON.parse(message)); - } catch { - return JSON.stringify({ - type: "raw_message", - content: message - }); - } -} -async function flushSessionQueue(api, opts) { - const queueDir = opts.queueDir ?? DEFAULT_QUEUE_DIR; - const maxBatchRows = opts.maxBatchRows ?? DEFAULT_MAX_BATCH_ROWS; - const staleInflightMs = opts.staleInflightMs ?? DEFAULT_STALE_INFLIGHT_MS; - const waitIfBusyMs = opts.waitIfBusyMs ?? 0; - const drainAll = opts.drainAll ?? false; - mkdirSync3(queueDir, { recursive: true }); - const queuePath = getQueuePath(queueDir, opts.sessionId); - const inflightPath = getInflightPath(queueDir, opts.sessionId); - if (isSessionWriteDisabled(opts.sessionsTable, queueDir)) { - return existsSync4(queuePath) || existsSync4(inflightPath) ? { status: "disabled", rows: 0, batches: 0 } : { status: "empty", rows: 0, batches: 0 }; - } - let totalRows = 0; - let totalBatches = 0; - let flushedAny = false; - while (true) { - if (opts.allowStaleInflight) - recoverStaleInflight(queuePath, inflightPath, staleInflightMs); - if (existsSync4(inflightPath)) { - if (waitIfBusyMs > 0) { - await waitForInflightToClear(inflightPath, waitIfBusyMs); - if (opts.allowStaleInflight) - recoverStaleInflight(queuePath, inflightPath, staleInflightMs); - } - if (existsSync4(inflightPath)) { - return flushedAny ? { status: "flushed", rows: totalRows, batches: totalBatches } : { status: "busy", rows: 0, batches: 0 }; - } - } - if (!existsSync4(queuePath)) { - return flushedAny ? { status: "flushed", rows: totalRows, batches: totalBatches } : { status: "empty", rows: 0, batches: 0 }; - } - try { - renameSync(queuePath, inflightPath); - } catch (e) { - if (e?.code === "ENOENT") { - return flushedAny ? { status: "flushed", rows: totalRows, batches: totalBatches } : { status: "empty", rows: 0, batches: 0 }; - } - throw e; - } - try { - const { rows, batches } = await flushInflightFile(api, opts.sessionsTable, inflightPath, maxBatchRows); - totalRows += rows; - totalBatches += batches; - flushedAny = flushedAny || rows > 0; - } catch (e) { - requeueInflight(queuePath, inflightPath); - if (e instanceof SessionWriteDisabledError) { - return { status: "disabled", rows: totalRows, batches: totalBatches }; - } - throw e; - } - if (!drainAll) { - return { status: "flushed", rows: totalRows, batches: totalBatches }; - } - } -} -async function drainSessionQueues(api, opts) { - const queueDir = opts.queueDir ?? DEFAULT_QUEUE_DIR; - mkdirSync3(queueDir, { recursive: true }); - const sessionIds = listQueuedSessionIds(queueDir, opts.staleInflightMs ?? DEFAULT_STALE_INFLIGHT_MS); - let flushedSessions = 0; - let rows = 0; - let batches = 0; - for (const sessionId of sessionIds) { - const result = await flushSessionQueue(api, { - sessionId, - sessionsTable: opts.sessionsTable, - queueDir, - maxBatchRows: opts.maxBatchRows, - allowStaleInflight: true, - staleInflightMs: opts.staleInflightMs, - drainAll: true - }); - if (result.status === "flushed") { - flushedSessions += 1; - rows += result.rows; - batches += result.batches; - } - } - return { - queuedSessions: sessionIds.length, - flushedSessions, - rows, - batches - }; -} -function tryAcquireSessionDrainLock(sessionsTable, queueDir = DEFAULT_QUEUE_DIR, staleMs = DEFAULT_DRAIN_LOCK_STALE_MS) { - mkdirSync3(queueDir, { recursive: true }); - const lockPath = getSessionDrainLockPath(queueDir, sessionsTable); - for (let attempt = 0; attempt < 2; attempt++) { - try { - const fd = openSync(lockPath, "wx"); - closeSync(fd); - return () => rmSync(lockPath, { force: true }); - } catch (e) { - if (e?.code !== "EEXIST") - throw e; - if (existsSync4(lockPath) && isStale(lockPath, staleMs)) { - rmSync(lockPath, { force: true }); - continue; - } - return null; - } - } - return null; -} -function getQueuePath(queueDir, sessionId) { - return join5(queueDir, `${sessionId}.jsonl`); -} -function getInflightPath(queueDir, sessionId) { - return join5(queueDir, `${sessionId}.inflight`); -} -async function flushInflightFile(api, sessionsTable, inflightPath, maxBatchRows) { - const rows = readQueuedRows(inflightPath); - if (rows.length === 0) { - rmSync(inflightPath, { force: true }); - return { rows: 0, batches: 0 }; - } - let ensured = false; - let batches = 0; - const queueDir = dirname(inflightPath); - for (let i = 0; i < rows.length; i += maxBatchRows) { - const chunk = rows.slice(i, i + maxBatchRows); - const sql = buildSessionInsertSql(sessionsTable, chunk); - try { - await api.query(sql); - } catch (e) { - if (isSessionWriteAuthError(e)) { - markSessionWriteDisabled(sessionsTable, errorMessage(e), queueDir); - throw new SessionWriteDisabledError(errorMessage(e)); - } - if (!ensured && isEnsureSessionsTableRetryable(e)) { - try { - await api.ensureSessionsTable(sessionsTable); - } catch (ensureError) { - if (isSessionWriteAuthError(ensureError)) { - markSessionWriteDisabled(sessionsTable, errorMessage(ensureError), queueDir); - throw new SessionWriteDisabledError(errorMessage(ensureError)); - } - throw ensureError; - } - ensured = true; - try { - await api.query(sql); - } catch (retryError) { - if (isSessionWriteAuthError(retryError)) { - markSessionWriteDisabled(sessionsTable, errorMessage(retryError), queueDir); - throw new SessionWriteDisabledError(errorMessage(retryError)); - } - throw retryError; - } - } else { - throw e; - } - } - batches += 1; - } - clearSessionWriteDisabled(sessionsTable, queueDir); - rmSync(inflightPath, { force: true }); - return { rows: rows.length, batches }; -} -function readQueuedRows(path) { - const raw = readFileSync4(path, "utf-8"); - return raw.split("\n").map((line) => line.trim()).filter(Boolean).map((line) => JSON.parse(line)); -} -function requeueInflight(queuePath, inflightPath) { - if (!existsSync4(inflightPath)) - return; - const inflight = readFileSync4(inflightPath, "utf-8"); - appendFileSync2(queuePath, inflight); - rmSync(inflightPath, { force: true }); -} -function recoverStaleInflight(queuePath, inflightPath, staleInflightMs) { - if (!existsSync4(inflightPath) || !isStale(inflightPath, staleInflightMs)) - return; - requeueInflight(queuePath, inflightPath); -} -function isStale(path, staleInflightMs) { - return Date.now() - statSync(path).mtimeMs >= staleInflightMs; -} -function listQueuedSessionIds(queueDir, staleInflightMs) { - const sessionIds = /* @__PURE__ */ new Set(); - for (const name of readdirSync(queueDir)) { - if (name.endsWith(".jsonl")) { - sessionIds.add(name.slice(0, -".jsonl".length)); - } else if (name.endsWith(".inflight")) { - const path = join5(queueDir, name); - if (isStale(path, staleInflightMs)) { - sessionIds.add(name.slice(0, -".inflight".length)); - } - } - } - return [...sessionIds].sort(); -} -function isEnsureSessionsTableRetryable(error) { - const message = errorMessage(error).toLowerCase(); - return message.includes("does not exist") || message.includes("doesn't exist") || message.includes("relation") || message.includes("not found"); -} -function isSessionWriteAuthError(error) { - const message = errorMessage(error).toLowerCase(); - return message.includes("403") || message.includes("401") || message.includes("forbidden") || message.includes("unauthorized"); -} -function markSessionWriteDisabled(sessionsTable, reason, queueDir = DEFAULT_QUEUE_DIR) { - mkdirSync3(queueDir, { recursive: true }); - writeFileSync3(getSessionWriteDisabledPath(queueDir, sessionsTable), JSON.stringify({ - disabledAt: (/* @__PURE__ */ new Date()).toISOString(), - reason, - sessionsTable - })); -} -function clearSessionWriteDisabled(sessionsTable, queueDir = DEFAULT_QUEUE_DIR) { - rmSync(getSessionWriteDisabledPath(queueDir, sessionsTable), { force: true }); -} -function isSessionWriteDisabled(sessionsTable, queueDir = DEFAULT_QUEUE_DIR, ttlMs = DEFAULT_AUTH_FAILURE_TTL_MS) { - const path = getSessionWriteDisabledPath(queueDir, sessionsTable); - if (!existsSync4(path)) - return false; - try { - const raw = readFileSync4(path, "utf-8"); - const state = JSON.parse(raw); - const ageMs = Date.now() - new Date(state.disabledAt).getTime(); - if (Number.isNaN(ageMs) || ageMs >= ttlMs) { - rmSync(path, { force: true }); - return false; - } - return true; - } catch { - rmSync(path, { force: true }); - return false; - } -} -function getSessionWriteDisabledPath(queueDir, sessionsTable) { - return join5(queueDir, `.${sessionsTable}.disabled.json`); -} -function getSessionDrainLockPath(queueDir, sessionsTable) { - return join5(queueDir, `.${sessionsTable}.drain.lock`); -} -function errorMessage(error) { - return error instanceof Error ? error.message : String(error); -} -async function waitForInflightToClear(inflightPath, waitIfBusyMs) { - const startedAt = Date.now(); - while (existsSync4(inflightPath) && Date.now() - startedAt < waitIfBusyMs) { - await sleep2(BUSY_WAIT_STEP_MS); - } -} -function sleep2(ms) { - return new Promise((resolve2) => setTimeout(resolve2, ms)); -} - -// dist/src/hooks/version-check.js -import { existsSync as existsSync5, mkdirSync as mkdirSync4, readFileSync as readFileSync5, writeFileSync as writeFileSync4 } from "node:fs"; -import { dirname as dirname2, join as join6 } from "node:path"; -import { homedir as homedir5 } from "node:os"; -var DEFAULT_VERSION_CACHE_PATH = join6(homedir5(), ".deeplake", ".version-check.json"); -var DEFAULT_VERSION_CACHE_TTL_MS = 60 * 60 * 1e3; +var GITHUB_RAW_PKG = "https://raw.githubusercontent.com/activeloopai/hivemind/main/package.json"; function getInstalledVersion(bundleDir, pluginManifestDir) { try { - const pluginJson = join6(bundleDir, "..", pluginManifestDir, "plugin.json"); - const plugin = JSON.parse(readFileSync5(pluginJson, "utf-8")); + const pluginJson = join5(bundleDir, "..", pluginManifestDir, "plugin.json"); + const plugin = JSON.parse(readFileSync4(pluginJson, "utf-8")); if (plugin.version) return plugin.version; } catch { } let dir = bundleDir; for (let i = 0; i < 5; i++) { - const candidate = join6(dir, "package.json"); + const candidate = join5(dir, "package.json"); try { - const pkg = JSON.parse(readFileSync5(candidate, "utf-8")); + const pkg = JSON.parse(readFileSync4(candidate, "utf-8")); if ((pkg.name === "hivemind" || pkg.name === "hivemind-codex") && pkg.version) return pkg.version; } catch { } - const parent = dirname2(dir); + const parent = dirname(dir); if (parent === dir) break; dir = parent; } return null; } +async function getLatestVersion(timeoutMs = 3e3) { + try { + const res = await fetch(GITHUB_RAW_PKG, { signal: AbortSignal.timeout(timeoutMs) }); + if (!res.ok) + return null; + const pkg = await res.json(); + return pkg.version ?? null; + } catch { + return null; + } +} function isNewer(latest, current) { - const parse = (v) => v.replace(/-.*$/, "").split(".").map(Number); + const parse = (v) => v.split(".").map(Number); const [la, lb, lc] = parse(latest); const [ca, cb, cc] = parse(current); return la > ca || la === ca && lb > cb || la === ca && lb === cb && lc > cc; } -function readVersionCache(cachePath = DEFAULT_VERSION_CACHE_PATH) { - if (!existsSync5(cachePath)) - return null; - try { - const parsed = JSON.parse(readFileSync5(cachePath, "utf-8")); - if (parsed && typeof parsed.checkedAt === "number" && typeof parsed.url === "string" && (typeof parsed.latest === "string" || parsed.latest === null)) { - return parsed; + +// dist/src/utils/wiki-log.js +import { mkdirSync as mkdirSync3, appendFileSync as appendFileSync2 } from "node:fs"; +import { join as join6 } from "node:path"; +function makeWikiLogger(hooksDir, filename = "deeplake-wiki.log") { + const path = join6(hooksDir, filename); + return { + path, + log(msg) { + try { + mkdirSync3(hooksDir, { recursive: true }); + appendFileSync2(path, `[${utcTimestamp()}] ${msg} +`); + } catch { + } } - } catch { - } - return null; -} -function writeVersionCache(entry, cachePath = DEFAULT_VERSION_CACHE_PATH) { - mkdirSync4(dirname2(cachePath), { recursive: true }); - writeFileSync4(cachePath, JSON.stringify(entry)); -} -function readFreshCachedLatestVersion(url, ttlMs = DEFAULT_VERSION_CACHE_TTL_MS, cachePath = DEFAULT_VERSION_CACHE_PATH, nowMs = Date.now()) { - const cached = readVersionCache(cachePath); - if (!cached || cached.url !== url) - return void 0; - if (nowMs - cached.checkedAt > ttlMs) - return void 0; - return cached.latest; -} -async function getLatestVersionCached(opts) { - const ttlMs = opts.ttlMs ?? DEFAULT_VERSION_CACHE_TTL_MS; - const cachePath = opts.cachePath ?? DEFAULT_VERSION_CACHE_PATH; - const nowMs = opts.nowMs ?? Date.now(); - const fetchImpl = opts.fetchImpl ?? fetch; - const fresh = readFreshCachedLatestVersion(opts.url, ttlMs, cachePath, nowMs); - if (fresh !== void 0) - return fresh; - const stale = readVersionCache(cachePath); - try { - const res = await fetchImpl(opts.url, { signal: AbortSignal.timeout(opts.timeoutMs) }); - const latest = res.ok ? (await res.json()).version ?? null : stale?.latest ?? null; - writeVersionCache({ - checkedAt: nowMs, - latest, - url: opts.url - }, cachePath); - return latest; - } catch { - const latest = stale?.latest ?? null; - writeVersionCache({ - checkedAt: nowMs, - latest, - url: opts.url - }, cachePath); - return latest; - } + }; } // dist/src/hooks/session-start-setup.js var log3 = (msg) => log("session-setup", msg); -var __bundleDir = dirname3(fileURLToPath2(import.meta.url)); -var GITHUB_RAW_PKG = "https://raw.githubusercontent.com/activeloopai/hivemind/main/package.json"; -var VERSION_CHECK_TIMEOUT = 3e3; -var HOME = homedir6(); -var WIKI_LOG = join7(HOME, ".claude", "hooks", "deeplake-wiki.log"); -function wikiLog(msg) { - try { - mkdirSync5(join7(HOME, ".claude", "hooks"), { recursive: true }); - appendFileSync3(WIKI_LOG, `[${utcTimestamp()}] ${msg} -`); - } catch { - } -} -async function createPlaceholder(api, table, sessionId, cwd, userName, orgName, workspaceId) { - const summaryPath = `/summaries/${userName}/${sessionId}.md`; - const existing = await api.query(`SELECT path FROM "${table}" WHERE path = '${sqlStr(summaryPath)}' LIMIT 1`); - if (existing.length > 0) { - wikiLog(`SessionSetup: summary exists for ${sessionId} (resumed)`); +var __bundleDir = dirname2(fileURLToPath(import.meta.url)); +var { log: wikiLog } = makeWikiLogger(join7(homedir4(), ".claude", "hooks")); +async function main() { + if (process.env.HIVEMIND_WIKI_WORKER === "1") return; - } - const now = (/* @__PURE__ */ new Date()).toISOString(); - const projectName = cwd.split("/").pop() || "unknown"; - const sessionSource = `/sessions/${userName}/${userName}_${orgName}_${workspaceId}_${sessionId}.jsonl`; - const content = [ - `# Session ${sessionId}`, - `- **Source**: ${sessionSource}`, - `- **Started**: ${now}`, - `- **Project**: ${projectName}`, - `- **Status**: in-progress`, - "" - ].join("\n"); - const filename = `${sessionId}.md`; - await api.query(`INSERT INTO "${table}" (id, path, filename, summary, author, mime_type, size_bytes, project, description, agent, creation_date, last_update_date) VALUES ('${crypto.randomUUID()}', '${sqlStr(summaryPath)}', '${sqlStr(filename)}', E'${sqlStr(content)}', '${sqlStr(userName)}', 'text/markdown', ${Buffer.byteLength(content, "utf-8")}, '${sqlStr(projectName)}', 'in progress', 'claude_code', '${now}', '${now}')`); - wikiLog(`SessionSetup: created placeholder for ${sessionId} (${cwd})`); -} -async function runSessionStartSetup(input, deps = {}) { - const { wikiWorker = (process.env.HIVEMIND_WIKI_WORKER ?? process.env.DEEPLAKE_WIKI_WORKER) === "1", creds = loadCredentials(), saveCredentialsFn = saveCredentials, config = loadConfig(), createApi = (activeConfig) => new DeeplakeApi(activeConfig.token, activeConfig.apiUrl, activeConfig.orgId, activeConfig.workspaceId, activeConfig.tableName), captureEnabled = (process.env.HIVEMIND_CAPTURE ?? process.env.DEEPLAKE_CAPTURE) !== "false", drainSessionQueuesFn = drainSessionQueues, isSessionWriteDisabledFn = isSessionWriteDisabled, isSessionWriteAuthErrorFn = isSessionWriteAuthError, markSessionWriteDisabledFn = markSessionWriteDisabled, tryAcquireSessionDrainLockFn = tryAcquireSessionDrainLock, createPlaceholderFn = createPlaceholder, getInstalledVersionFn = getInstalledVersion, getLatestVersionCachedFn = getLatestVersionCached, isNewerFn = isNewer, execSyncFn = execSync2, logFn = log3, wikiLogFn = wikiLog } = deps; - if (wikiWorker) - return { status: "skipped" }; + const input = await readStdin(); + const creds = loadCredentials(); if (!creds?.token) { - logFn("no credentials"); - return { status: "no_credentials" }; + log3("no credentials"); + return; } if (!creds.userName) { try { const { userInfo: userInfo2 } = await import("node:os"); creds.userName = userInfo2().username ?? "unknown"; - saveCredentialsFn(creds); - logFn(`backfilled userName: ${creds.userName}`); + saveCredentials(creds); + log3(`backfilled userName: ${creds.userName}`); } catch { } } - if (input.session_id && config) { + if (input.session_id) { try { - const api = createApi(config); - await api.ensureTable(); - if (captureEnabled) { - if (isSessionWriteDisabledFn(config.sessionsTableName)) { - logFn(`sessions table disabled, skipping setup for "${config.sessionsTableName}"`); - } else { - const releaseDrainLock = tryAcquireSessionDrainLockFn(config.sessionsTableName); - if (!releaseDrainLock) { - logFn(`sessions drain already in progress, skipping duplicate setup for "${config.sessionsTableName}"`); - } else { - try { - await api.ensureSessionsTable(config.sessionsTableName); - const drain = await drainSessionQueuesFn(api, { - sessionsTable: config.sessionsTableName - }); - if (drain.flushedSessions > 0) { - logFn(`drained ${drain.flushedSessions} queued session(s), rows=${drain.rows}, batches=${drain.batches}`); - } - } catch (e) { - if (isSessionWriteAuthErrorFn(e)) { - markSessionWriteDisabledFn(config.sessionsTableName, e.message); - logFn(`sessions table unavailable, skipping setup: ${e.message}`); - } else { - throw e; - } - } finally { - releaseDrainLock(); - } - } - } - await createPlaceholderFn(api, config.tableName, input.session_id, input.cwd ?? "", config.userName, config.orgName, config.workspaceId); + const config = loadConfig(); + if (config) { + const api = new DeeplakeApi(config.token, config.apiUrl, config.orgId, config.workspaceId, config.tableName); + await api.ensureTable(); + await api.ensureSessionsTable(config.sessionsTableName); + log3("setup complete"); } - logFn("setup complete"); } catch (e) { - logFn(`setup failed: ${e.message}`); - wikiLogFn(`SessionSetup: failed for ${input.session_id}: ${e.message}`); + log3(`setup failed: ${e.message}`); + wikiLog(`SessionSetup: failed for ${input.session_id}: ${e.message}`); } } const autoupdate = creds.autoupdate !== false; try { - const current = getInstalledVersionFn(__bundleDir, ".claude-plugin"); + const current = getInstalledVersion(__bundleDir, ".claude-plugin"); if (current) { - const latest = await getLatestVersionCachedFn({ - url: GITHUB_RAW_PKG, - timeoutMs: VERSION_CHECK_TIMEOUT - }); - if (latest && isNewerFn(latest, current)) { + const latest = await getLatestVersion(); + if (latest && isNewer(latest, current)) { if (autoupdate) { - logFn(`autoupdate: updating ${current} \u2192 ${latest}`); + log3(`autoupdate: updating ${current} \u2192 ${latest}`); try { const scopes = ["user", "project", "local", "managed"]; const cmd = scopes.map((s) => `claude plugin update hivemind@hivemind --scope ${s} 2>/dev/null`).join("; "); - execSyncFn(cmd, { stdio: "ignore", timeout: 6e4 }); + execSync2(cmd, { stdio: "ignore", timeout: 6e4 }); process.stderr.write(`\u2705 Hivemind auto-updated: ${current} \u2192 ${latest}. Run /reload-plugins to apply. `); - logFn(`autoupdate succeeded: ${current} \u2192 ${latest}`); + log3(`autoupdate succeeded: ${current} \u2192 ${latest}`); } catch (e) { process.stderr.write(`\u2B06\uFE0F Hivemind update available: ${current} \u2192 ${latest}. Auto-update failed \u2014 run /hivemind:update to upgrade manually. `); - logFn(`autoupdate failed: ${e.message}`); + log3(`autoupdate failed: ${e.message}`); } } else { process.stderr.write(`\u2B06\uFE0F Hivemind update available: ${current} \u2192 ${latest}. Run /hivemind:update to upgrade. `); - logFn(`update available (autoupdate off): ${current} \u2192 ${latest}`); + log3(`update available (autoupdate off): ${current} \u2192 ${latest}`); } } else { - logFn(`version up to date: ${current}`); + log3(`version up to date: ${current}`); } } } catch (e) { - logFn(`version check failed: ${e.message}`); + log3(`version check failed: ${e.message}`); } - return { status: "complete" }; -} -async function main() { - const input = await readStdin(); - await runSessionStartSetup(input); -} -if (isDirectRun(import.meta.url)) { - main().catch((e) => { - log3(`fatal: ${e.message}`); - process.exit(0); - }); } -export { - createPlaceholder, - runSessionStartSetup, - wikiLog -}; +main().catch((e) => { + log3(`fatal: ${e.message}`); + process.exit(0); +}); diff --git a/claude-code/bundle/session-start.js b/claude-code/bundle/session-start.js index ea84c9c..1f815ee 100755 --- a/claude-code/bundle/session-start.js +++ b/claude-code/bundle/session-start.js @@ -1,8 +1,11 @@ #!/usr/bin/env node // dist/src/hooks/session-start.js -import { fileURLToPath as fileURLToPath2 } from "node:url"; -import { dirname as dirname2, join as join4 } from "node:path"; +import { fileURLToPath } from "node:url"; +import { dirname as dirname2, join as join7 } from "node:path"; +import { readdirSync, rmSync } from "node:fs"; +import { execSync as execSync2 } from "node:child_process"; +import { homedir as homedir4 } from "node:os"; // dist/src/commands/auth.js import { readFileSync, writeFileSync, existsSync, mkdirSync, unlinkSync } from "node:fs"; @@ -26,29 +29,57 @@ function saveCredentials(creds) { writeFileSync(CREDS_PATH, JSON.stringify({ ...creds, savedAt: (/* @__PURE__ */ new Date()).toISOString() }, null, 2), { mode: 384 }); } -// dist/src/utils/stdin.js -function readStdin() { - return new Promise((resolve2, reject) => { - let data = ""; - process.stdin.setEncoding("utf-8"); - process.stdin.on("data", (chunk) => data += chunk); - process.stdin.on("end", () => { - try { - resolve2(JSON.parse(data)); - } catch (err) { - reject(new Error(`Failed to parse hook input: ${err}`)); - } - }); - process.stdin.on("error", reject); - }); +// dist/src/config.js +import { readFileSync as readFileSync2, existsSync as existsSync2 } from "node:fs"; +import { join as join2 } from "node:path"; +import { homedir as homedir2, userInfo } from "node:os"; +function loadConfig() { + const home = homedir2(); + const credPath = join2(home, ".deeplake", "credentials.json"); + let creds = null; + if (existsSync2(credPath)) { + try { + creds = JSON.parse(readFileSync2(credPath, "utf-8")); + } catch { + return null; + } + } + const env = process.env; + if (!env.HIVEMIND_TOKEN && env.DEEPLAKE_TOKEN) { + process.stderr.write("[hivemind] DEEPLAKE_* env vars are deprecated; use HIVEMIND_* instead\n"); + } + const token = env.HIVEMIND_TOKEN ?? env.DEEPLAKE_TOKEN ?? creds?.token; + const orgId = env.HIVEMIND_ORG_ID ?? env.DEEPLAKE_ORG_ID ?? creds?.orgId; + if (!token || !orgId) + return null; + return { + token, + orgId, + orgName: creds?.orgName ?? orgId, + userName: creds?.userName || userInfo().username || "unknown", + workspaceId: env.HIVEMIND_WORKSPACE_ID ?? env.DEEPLAKE_WORKSPACE_ID ?? creds?.workspaceId ?? "default", + apiUrl: env.HIVEMIND_API_URL ?? env.DEEPLAKE_API_URL ?? creds?.apiUrl ?? "https://api.deeplake.ai", + tableName: env.HIVEMIND_TABLE ?? env.DEEPLAKE_TABLE ?? "memory", + sessionsTableName: env.HIVEMIND_SESSIONS_TABLE ?? env.DEEPLAKE_SESSIONS_TABLE ?? "sessions", + memoryPath: env.HIVEMIND_MEMORY_PATH ?? env.DEEPLAKE_MEMORY_PATH ?? join2(home, ".deeplake", "memory") + }; } +// dist/src/deeplake-api.js +import { randomUUID } from "node:crypto"; +import { existsSync as existsSync3, mkdirSync as mkdirSync2, readFileSync as readFileSync3, writeFileSync as writeFileSync2 } from "node:fs"; +import { join as join4 } from "node:path"; +import { tmpdir } from "node:os"; + // dist/src/utils/debug.js import { appendFileSync } from "node:fs"; -import { join as join2 } from "node:path"; -import { homedir as homedir2 } from "node:os"; +import { join as join3 } from "node:path"; +import { homedir as homedir3 } from "node:os"; var DEBUG = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; -var LOG = join2(homedir2(), ".deeplake", "hook-debug.log"); +var LOG = join3(homedir3(), ".deeplake", "hook-debug.log"); +function utcTimestamp(d = /* @__PURE__ */ new Date()) { + return d.toISOString().replace("T", " ").slice(0, 19) + " UTC"; +} function log(tag, msg) { if (!DEBUG) return; @@ -56,39 +87,360 @@ function log(tag, msg) { `); } -// dist/src/utils/direct-run.js -import { resolve } from "node:path"; -import { fileURLToPath } from "node:url"; -function isDirectRun(metaUrl) { - const entry = process.argv[1]; - if (!entry) - return false; - try { - return resolve(fileURLToPath(metaUrl)) === resolve(entry); - } catch { - return false; +// dist/src/utils/sql.js +function sqlStr(value) { + return value.replace(/\\/g, "\\\\").replace(/'/g, "''").replace(/\0/g, "").replace(/[\x01-\x08\x0b\x0c\x0e-\x1f\x7f]/g, ""); +} + +// dist/src/deeplake-api.js +var log2 = (msg) => log("sdk", msg); +function summarizeSql(sql, maxLen = 220) { + const compact = sql.replace(/\s+/g, " ").trim(); + return compact.length > maxLen ? `${compact.slice(0, maxLen)}...` : compact; +} +function traceSql(msg) { + const traceEnabled = (process.env.HIVEMIND_TRACE_SQL ?? process.env.DEEPLAKE_TRACE_SQL) === "1" || (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; + if (!traceEnabled) + return; + process.stderr.write(`[deeplake-sql] ${msg} +`); + const debugFileLog = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; + if (debugFileLog) + log2(msg); +} +var RETRYABLE_CODES = /* @__PURE__ */ new Set([429, 500, 502, 503, 504]); +var MAX_RETRIES = 3; +var BASE_DELAY_MS = 500; +var MAX_CONCURRENCY = 5; +var QUERY_TIMEOUT_MS = Number(process.env["HIVEMIND_QUERY_TIMEOUT_MS"] ?? process.env["DEEPLAKE_QUERY_TIMEOUT_MS"] ?? 1e4); +var INDEX_MARKER_TTL_MS = Number(process.env["HIVEMIND_INDEX_MARKER_TTL_MS"] ?? 6 * 60 * 6e4); +function sleep(ms) { + return new Promise((resolve) => setTimeout(resolve, ms)); +} +function isTimeoutError(error) { + const name = error instanceof Error ? error.name.toLowerCase() : ""; + const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase(); + return name.includes("timeout") || name === "aborterror" || message.includes("timeout") || message.includes("timed out"); +} +function isDuplicateIndexError(error) { + const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase(); + return message.includes("duplicate key value violates unique constraint") || message.includes("pg_class_relname_nsp_index") || message.includes("already exists"); +} +function isSessionInsertQuery(sql) { + return /^\s*insert\s+into\s+"[^"]+"\s*\(\s*id\s*,\s*path\s*,\s*filename\s*,\s*message\s*,/i.test(sql); +} +function isTransientHtml403(text) { + const body = text.toLowerCase(); + return body.includes(" this.waiting.push(resolve)); + } + release() { + this.active--; + const next = this.waiting.shift(); + if (next) { + this.active++; + next(); + } + } +}; +var DeeplakeApi = class { + token; + apiUrl; + orgId; + workspaceId; + tableName; + _pendingRows = []; + _sem = new Semaphore(MAX_CONCURRENCY); + _tablesCache = null; + constructor(token, apiUrl, orgId, workspaceId, tableName) { + this.token = token; + this.apiUrl = apiUrl; + this.orgId = orgId; + this.workspaceId = workspaceId; + this.tableName = tableName; + } + /** Execute SQL with retry on transient errors and bounded concurrency. */ + async query(sql) { + const startedAt = Date.now(); + const summary = summarizeSql(sql); + traceSql(`query start: ${summary}`); + await this._sem.acquire(); + try { + const rows = await this._queryWithRetry(sql); + traceSql(`query ok (${Date.now() - startedAt}ms, rows=${rows.length}): ${summary}`); + return rows; + } catch (e) { + const message = e instanceof Error ? e.message : String(e); + traceSql(`query fail (${Date.now() - startedAt}ms): ${summary} :: ${message}`); + throw e; + } finally { + this._sem.release(); + } + } + async _queryWithRetry(sql) { + let lastError; + for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) { + let resp; + try { + const signal = AbortSignal.timeout(QUERY_TIMEOUT_MS); + resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables/query`, { + method: "POST", + headers: { + Authorization: `Bearer ${this.token}`, + "Content-Type": "application/json", + "X-Activeloop-Org-Id": this.orgId + }, + signal, + body: JSON.stringify({ query: sql }) + }); + } catch (e) { + if (isTimeoutError(e)) { + lastError = new Error(`Query timeout after ${QUERY_TIMEOUT_MS}ms`); + throw lastError; + } + lastError = e instanceof Error ? e : new Error(String(e)); + if (attempt < MAX_RETRIES) { + const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200; + log2(`query retry ${attempt + 1}/${MAX_RETRIES} (fetch error: ${lastError.message}) in ${delay.toFixed(0)}ms`); + await sleep(delay); + continue; + } + throw lastError; + } + if (resp.ok) { + const raw = await resp.json(); + if (!raw?.rows || !raw?.columns) + return []; + return raw.rows.map((row) => Object.fromEntries(raw.columns.map((col, i) => [col, row[i]]))); + } + const text = await resp.text().catch(() => ""); + const retryable403 = isSessionInsertQuery(sql) && (resp.status === 401 || resp.status === 403 && (text.length === 0 || isTransientHtml403(text))); + if (attempt < MAX_RETRIES && (RETRYABLE_CODES.has(resp.status) || retryable403)) { + const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200; + log2(`query retry ${attempt + 1}/${MAX_RETRIES} (${resp.status}) in ${delay.toFixed(0)}ms`); + await sleep(delay); + continue; + } + throw new Error(`Query failed: ${resp.status}: ${text.slice(0, 200)}`); + } + throw lastError ?? new Error("Query failed: max retries exceeded"); + } + // ── Writes ────────────────────────────────────────────────────────────────── + /** Queue rows for writing. Call commit() to flush. */ + appendRows(rows) { + this._pendingRows.push(...rows); + } + /** Flush pending rows via SQL. */ + async commit() { + if (this._pendingRows.length === 0) + return; + const rows = this._pendingRows; + this._pendingRows = []; + const CONCURRENCY = 10; + for (let i = 0; i < rows.length; i += CONCURRENCY) { + const chunk = rows.slice(i, i + CONCURRENCY); + await Promise.allSettled(chunk.map((r) => this.upsertRowSql(r))); + } + log2(`commit: ${rows.length} rows`); + } + async upsertRowSql(row) { + const ts = (/* @__PURE__ */ new Date()).toISOString(); + const cd = row.creationDate ?? ts; + const lud = row.lastUpdateDate ?? ts; + const exists = await this.query(`SELECT path FROM "${this.tableName}" WHERE path = '${sqlStr(row.path)}' LIMIT 1`); + if (exists.length > 0) { + let setClauses = `summary = E'${sqlStr(row.contentText)}', mime_type = '${sqlStr(row.mimeType)}', size_bytes = ${row.sizeBytes}, last_update_date = '${lud}'`; + if (row.project !== void 0) + setClauses += `, project = '${sqlStr(row.project)}'`; + if (row.description !== void 0) + setClauses += `, description = '${sqlStr(row.description)}'`; + await this.query(`UPDATE "${this.tableName}" SET ${setClauses} WHERE path = '${sqlStr(row.path)}'`); + } else { + const id = randomUUID(); + let cols = "id, path, filename, summary, mime_type, size_bytes, creation_date, last_update_date"; + let vals = `'${id}', '${sqlStr(row.path)}', '${sqlStr(row.filename)}', E'${sqlStr(row.contentText)}', '${sqlStr(row.mimeType)}', ${row.sizeBytes}, '${cd}', '${lud}'`; + if (row.project !== void 0) { + cols += ", project"; + vals += `, '${sqlStr(row.project)}'`; + } + if (row.description !== void 0) { + cols += ", description"; + vals += `, '${sqlStr(row.description)}'`; + } + await this.query(`INSERT INTO "${this.tableName}" (${cols}) VALUES (${vals})`); + } + } + /** Update specific columns on a row by path. */ + async updateColumns(path, columns) { + const setClauses = Object.entries(columns).map(([col, val]) => typeof val === "number" ? `${col} = ${val}` : `${col} = '${sqlStr(String(val))}'`).join(", "); + await this.query(`UPDATE "${this.tableName}" SET ${setClauses} WHERE path = '${sqlStr(path)}'`); + } + // ── Convenience ───────────────────────────────────────────────────────────── + /** Create a BM25 search index on a column. */ + async createIndex(column) { + await this.query(`CREATE INDEX IF NOT EXISTS idx_${sqlStr(column)}_bm25 ON "${this.tableName}" USING deeplake_index ("${column}")`); + } + buildLookupIndexName(table, suffix) { + return `idx_${table}_${suffix}`.replace(/[^a-zA-Z0-9_]/g, "_"); + } + getLookupIndexMarkerPath(table, suffix) { + const markerKey = [ + this.workspaceId, + this.orgId, + table, + suffix + ].join("__").replace(/[^a-zA-Z0-9_.-]/g, "_"); + return join4(getIndexMarkerDir(), `${markerKey}.json`); + } + hasFreshLookupIndexMarker(table, suffix) { + const markerPath = this.getLookupIndexMarkerPath(table, suffix); + if (!existsSync3(markerPath)) + return false; + try { + const raw = JSON.parse(readFileSync3(markerPath, "utf-8")); + const updatedAt = raw.updatedAt ? new Date(raw.updatedAt).getTime() : NaN; + if (!Number.isFinite(updatedAt) || Date.now() - updatedAt > INDEX_MARKER_TTL_MS) + return false; + return true; + } catch { + return false; + } + } + markLookupIndexReady(table, suffix) { + mkdirSync2(getIndexMarkerDir(), { recursive: true }); + writeFileSync2(this.getLookupIndexMarkerPath(table, suffix), JSON.stringify({ updatedAt: (/* @__PURE__ */ new Date()).toISOString() }), "utf-8"); + } + async ensureLookupIndex(table, suffix, columnsSql) { + if (this.hasFreshLookupIndexMarker(table, suffix)) + return; + const indexName = this.buildLookupIndexName(table, suffix); + try { + await this.query(`CREATE INDEX IF NOT EXISTS "${indexName}" ON "${table}" ${columnsSql}`); + this.markLookupIndexReady(table, suffix); + } catch (e) { + if (isDuplicateIndexError(e)) { + this.markLookupIndexReady(table, suffix); + return; + } + log2(`index "${indexName}" skipped: ${e.message}`); + } + } + /** List all tables in the workspace (with retry). */ + async listTables(forceRefresh = false) { + if (!forceRefresh && this._tablesCache) + return [...this._tablesCache]; + const { tables, cacheable } = await this._fetchTables(); + if (cacheable) + this._tablesCache = [...tables]; + return tables; + } + async _fetchTables() { + for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) { + try { + const resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables`, { + headers: { + Authorization: `Bearer ${this.token}`, + "X-Activeloop-Org-Id": this.orgId + } + }); + if (resp.ok) { + const data = await resp.json(); + return { + tables: (data.tables ?? []).map((t) => t.table_name), + cacheable: true + }; + } + if (attempt < MAX_RETRIES && RETRYABLE_CODES.has(resp.status)) { + await sleep(BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200); + continue; + } + return { tables: [], cacheable: false }; + } catch { + if (attempt < MAX_RETRIES) { + await sleep(BASE_DELAY_MS * Math.pow(2, attempt)); + continue; + } + return { tables: [], cacheable: false }; + } + } + return { tables: [], cacheable: false }; } + /** Create the memory table if it doesn't already exist. Migrate columns on existing tables. */ + async ensureTable(name) { + const tbl = name ?? this.tableName; + const tables = await this.listTables(); + if (!tables.includes(tbl)) { + log2(`table "${tbl}" not found, creating`); + await this.query(`CREATE TABLE IF NOT EXISTS "${tbl}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', summary TEXT NOT NULL DEFAULT '', author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'text/plain', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`); + log2(`table "${tbl}" created`); + if (!tables.includes(tbl)) + this._tablesCache = [...tables, tbl]; + } + } + /** Create the sessions table (uses JSONB for message since every row is a JSON event). */ + async ensureSessionsTable(name) { + const tables = await this.listTables(); + if (!tables.includes(name)) { + log2(`table "${name}" not found, creating`); + await this.query(`CREATE TABLE IF NOT EXISTS "${name}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', message JSONB, author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'application/json', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`); + log2(`table "${name}" created`); + if (!tables.includes(name)) + this._tablesCache = [...tables, name]; + } + await this.ensureLookupIndex(name, "path_creation_date", `("path", "creation_date")`); + } +}; + +// dist/src/utils/stdin.js +function readStdin() { + return new Promise((resolve, reject) => { + let data = ""; + process.stdin.setEncoding("utf-8"); + process.stdin.on("data", (chunk) => data += chunk); + process.stdin.on("end", () => { + try { + resolve(JSON.parse(data)); + } catch (err) { + reject(new Error(`Failed to parse hook input: ${err}`)); + } + }); + process.stdin.on("error", reject); + }); } -// dist/src/hooks/version-check.js -import { existsSync as existsSync2, mkdirSync as mkdirSync2, readFileSync as readFileSync2, writeFileSync as writeFileSync2 } from "node:fs"; -import { dirname, join as join3 } from "node:path"; -import { homedir as homedir3 } from "node:os"; -var DEFAULT_VERSION_CACHE_PATH = join3(homedir3(), ".deeplake", ".version-check.json"); -var DEFAULT_VERSION_CACHE_TTL_MS = 60 * 60 * 1e3; +// dist/src/utils/version-check.js +import { readFileSync as readFileSync4 } from "node:fs"; +import { dirname, join as join5 } from "node:path"; +var GITHUB_RAW_PKG = "https://raw.githubusercontent.com/activeloopai/hivemind/main/package.json"; function getInstalledVersion(bundleDir, pluginManifestDir) { try { - const pluginJson = join3(bundleDir, "..", pluginManifestDir, "plugin.json"); - const plugin = JSON.parse(readFileSync2(pluginJson, "utf-8")); + const pluginJson = join5(bundleDir, "..", pluginManifestDir, "plugin.json"); + const plugin = JSON.parse(readFileSync4(pluginJson, "utf-8")); if (plugin.version) return plugin.version; } catch { } let dir = bundleDir; for (let i = 0; i < 5; i++) { - const candidate = join3(dir, "package.json"); + const candidate = join5(dir, "package.json"); try { - const pkg = JSON.parse(readFileSync2(candidate, "utf-8")); + const pkg = JSON.parse(readFileSync4(candidate, "utf-8")); if ((pkg.name === "hivemind" || pkg.name === "hivemind-codex") && pkg.version) return pkg.version; } catch { @@ -100,38 +452,47 @@ function getInstalledVersion(bundleDir, pluginManifestDir) { } return null; } +async function getLatestVersion(timeoutMs = 3e3) { + try { + const res = await fetch(GITHUB_RAW_PKG, { signal: AbortSignal.timeout(timeoutMs) }); + if (!res.ok) + return null; + const pkg = await res.json(); + return pkg.version ?? null; + } catch { + return null; + } +} function isNewer(latest, current) { - const parse = (v) => v.replace(/-.*$/, "").split(".").map(Number); + const parse = (v) => v.split(".").map(Number); const [la, lb, lc] = parse(latest); const [ca, cb, cc] = parse(current); return la > ca || la === ca && lb > cb || la === ca && lb === cb && lc > cc; } -function readVersionCache(cachePath = DEFAULT_VERSION_CACHE_PATH) { - if (!existsSync2(cachePath)) - return null; - try { - const parsed = JSON.parse(readFileSync2(cachePath, "utf-8")); - if (parsed && typeof parsed.checkedAt === "number" && typeof parsed.url === "string" && (typeof parsed.latest === "string" || parsed.latest === null)) { - return parsed; + +// dist/src/utils/wiki-log.js +import { mkdirSync as mkdirSync3, appendFileSync as appendFileSync2 } from "node:fs"; +import { join as join6 } from "node:path"; +function makeWikiLogger(hooksDir, filename = "deeplake-wiki.log") { + const path = join6(hooksDir, filename); + return { + path, + log(msg) { + try { + mkdirSync3(hooksDir, { recursive: true }); + appendFileSync2(path, `[${utcTimestamp()}] ${msg} +`); + } catch { + } } - } catch { - } - return null; -} -function readFreshCachedLatestVersion(url, ttlMs = DEFAULT_VERSION_CACHE_TTL_MS, cachePath = DEFAULT_VERSION_CACHE_PATH, nowMs = Date.now()) { - const cached = readVersionCache(cachePath); - if (!cached || cached.url !== url) - return void 0; - if (nowMs - cached.checkedAt > ttlMs) - return void 0; - return cached.latest; + }; } // dist/src/hooks/session-start.js -var log2 = (msg) => log("session-start", msg); -var __bundleDir = dirname2(fileURLToPath2(import.meta.url)); -var AUTH_CMD = join4(__bundleDir, "commands", "auth-login.js"); -var CLAUDE_SESSION_START_CONTEXT = `DEEPLAKE MEMORY: You have TWO memory sources. ALWAYS check BOTH when the user asks you to recall, remember, or look up ANY information: +var log3 = (msg) => log("session-start", msg); +var __bundleDir = dirname2(fileURLToPath(import.meta.url)); +var AUTH_CMD = join7(__bundleDir, "commands", "auth-login.js"); +var context = `DEEPLAKE MEMORY: You have TWO memory sources. ALWAYS check BOTH when the user asks you to recall, remember, or look up ANY information: 1. Your built-in memory (~/.claude/) \u2014 personal per-project notes 2. Deeplake global memory (~/.deeplake/memory/) \u2014 global memory shared across all sessions, users, and agents in the org @@ -139,19 +500,9 @@ var CLAUDE_SESSION_START_CONTEXT = `DEEPLAKE MEMORY: You have TWO memory sources Deeplake memory structure: - ~/.deeplake/memory/index.md \u2014 START HERE, table of all sessions - ~/.deeplake/memory/summaries/username/*.md \u2014 AI-generated wiki summaries per session -- ~/.deeplake/memory/sessions/{author}/* \u2014 raw session data (last resort) - -SEARCH STRATEGY: Always read index.md first. Then read specific summaries. Only read raw session files if summaries don't have enough detail. Do NOT jump straight to raw session files. -When index.md points to a likely match, read that exact summary or session file directly before trying broader grep variants. -If index.md already points to likely candidate files, open those exact files before broadening into synonym greps or wide exploratory scans. -Do NOT probe unrelated local paths such as ~/.claude/projects/, arbitrary home directories, or guessed summary roots when the question is about Deeplake memory. -TEMPORAL GROUNDING: If a summary or transcript uses relative time like "last year", "last week", or "next month", resolve it against that session's own date/date_time metadata, not today's date. -TEMPORAL FOLLOW-THROUGH: If a summary only gives a relative time, open the linked source session and use its date/date_time to convert the final answer into an absolute month/date/year or explicit range before responding. -ANSWER SHAPE: Once you have enough evidence, answer with the smallest exact phrase supported by memory. For identity or relationship questions, use just the noun phrase. For education questions, answer with the likely field or credential directly, not the broader life story. For "when" questions, prefer absolute dates/months/years over relative phrases. Avoid extra biography, explanation, or hedging. -NOT-FOUND BAR: Do NOT answer "not found" until you have checked index.md plus at least one likely summary or raw session file for the named person. If keyword grep is empty, grep the person's name alone and inspect the candidate files. -NEGATIVE-EVIDENCE QUESTIONS: For identity, relationship status, and research-topic questions, summaries may omit the exact phrase. If likely summaries are ambiguous, read the candidate raw session transcript and look for positive clues before concluding the answer is absent. -SELF-LABEL PRIORITY: For identity questions, prefer the person's own explicit self-label from the transcript over broader category descriptions or paraphrases. -RELATIONSHIP STATUS INFERENCE: For relationship-status questions, treat explicit self-descriptions about partnership, dating, marriage, or parenting plans as status evidence. If the transcript strongly supports an unpartnered status, answer with the concise status phrase instead of "not found." +- ~/.deeplake/memory/sessions/username/*.jsonl \u2014 raw session data (last resort) + +SEARCH STRATEGY: Always read index.md first. Then read specific summaries. Only read raw JSONL if summaries don't have enough detail. Do NOT jump straight to JSONL files. Search command: Grep pattern="keyword" path="~/.deeplake/memory" @@ -171,71 +522,142 @@ IMPORTANT: Only use bash commands (cat, ls, grep, echo, jq, head, tail, etc.) to LIMITS: Do NOT spawn subagents to read deeplake memory. If a file returns empty after 2 attempts, skip it and move on. Report what you found rather than exhaustively retrying. Debugging: Set HIVEMIND_DEBUG=1 to enable verbose logging to ~/.deeplake/hook-debug.log`; -var GITHUB_RAW_PKG = "https://raw.githubusercontent.com/activeloopai/hivemind/main/package.json"; -function buildSessionStartAdditionalContext(args) { - const resolvedContext = CLAUDE_SESSION_START_CONTEXT.replace(/HIVEMIND_AUTH_CMD/g, args.authCommand); - let updateNotice = ""; - if (args.currentVersion) { - if (args.latestVersion && isNewer(args.latestVersion, args.currentVersion)) { - updateNotice = ` - -\u2B06\uFE0F Hivemind update available: ${args.currentVersion} \u2192 ${args.latestVersion}.`; - } else { - updateNotice = ` - -\u2705 Hivemind v${args.currentVersion}`; - } +var HOME = homedir4(); +var { log: wikiLog } = makeWikiLogger(join7(HOME, ".claude", "hooks")); +async function createPlaceholder(api, table, sessionId, cwd, userName, orgName, workspaceId) { + const summaryPath = `/summaries/${userName}/${sessionId}.md`; + const existing = await api.query(`SELECT path FROM "${table}" WHERE path = '${sqlStr(summaryPath)}' LIMIT 1`); + if (existing.length > 0) { + wikiLog(`SessionStart: summary exists for ${sessionId} (resumed)`); + return; } - return args.creds?.token ? `${resolvedContext} - -Logged in to Deeplake as org: ${args.creds.orgName ?? args.creds.orgId} (workspace: ${args.creds.workspaceId ?? "default"})${updateNotice}` : `${resolvedContext} - -\u26A0\uFE0F Not logged in to Deeplake. Memory search will not work. Ask the user to run /hivemind:login to authenticate.${updateNotice}`; + const now = (/* @__PURE__ */ new Date()).toISOString(); + const projectName = cwd.split("/").pop() ?? "unknown"; + const sessionSource = `/sessions/${userName}/${userName}_${orgName}_${workspaceId}_${sessionId}.jsonl`; + const content = [ + `# Session ${sessionId}`, + `- **Source**: ${sessionSource}`, + `- **Started**: ${now}`, + `- **Project**: ${projectName}`, + `- **Status**: in-progress`, + "" + ].join("\n"); + const filename = `${sessionId}.md`; + await api.query(`INSERT INTO "${table}" (id, path, filename, summary, author, mime_type, size_bytes, project, description, agent, creation_date, last_update_date) VALUES ('${crypto.randomUUID()}', '${sqlStr(summaryPath)}', '${sqlStr(filename)}', E'${sqlStr(content)}', '${sqlStr(userName)}', 'text/markdown', ${Buffer.byteLength(content, "utf-8")}, '${sqlStr(projectName)}', 'in progress', 'claude_code', '${now}', '${now}')`); + wikiLog(`SessionStart: created placeholder for ${sessionId} (${cwd})`); } -async function runSessionStartHook(_input, deps = {}) { - const { wikiWorker = (process.env.HIVEMIND_WIKI_WORKER ?? process.env.DEEPLAKE_WIKI_WORKER) === "1", creds = loadCredentials(), saveCredentialsFn = saveCredentials, currentVersion = getInstalledVersion(__bundleDir, ".claude-plugin"), latestVersion = currentVersion ? readFreshCachedLatestVersion(GITHUB_RAW_PKG, DEFAULT_VERSION_CACHE_TTL_MS) ?? null : null, authCommand = AUTH_CMD, logFn = log2 } = deps; - if (wikiWorker) - return null; +async function main() { + if (process.env.HIVEMIND_WIKI_WORKER === "1") + return; + const input = await readStdin(); + let creds = loadCredentials(); if (!creds?.token) { - logFn("no credentials found \u2014 run /hivemind:login to authenticate"); + log3("no credentials found \u2014 run /hivemind:login to authenticate"); } else { - logFn(`credentials loaded: org=${creds.orgName ?? creds.orgId}`); + log3(`credentials loaded: org=${creds.orgName ?? creds.orgId}`); if (creds.token && !creds.userName) { try { - const { userInfo } = await import("node:os"); - creds.userName = userInfo().username ?? "unknown"; - saveCredentialsFn(creds); - logFn(`backfilled and persisted userName: ${creds.userName}`); + const { userInfo: userInfo2 } = await import("node:os"); + creds.userName = userInfo2().username ?? "unknown"; + saveCredentials(creds); + log3(`backfilled and persisted userName: ${creds.userName}`); } catch { } } } - return { + const captureEnabled = process.env.HIVEMIND_CAPTURE !== "false"; + if (input.session_id && creds?.token) { + try { + const config = loadConfig(); + if (config) { + const table = config.tableName; + const sessionsTable = config.sessionsTableName; + const api = new DeeplakeApi(config.token, config.apiUrl, config.orgId, config.workspaceId, table); + await api.ensureTable(); + await api.ensureSessionsTable(sessionsTable); + if (captureEnabled) { + await createPlaceholder(api, table, input.session_id, input.cwd ?? "", config.userName, config.orgName, config.workspaceId); + log3("placeholder created"); + } else { + log3("placeholder skipped (HIVEMIND_CAPTURE=false)"); + } + } + } catch (e) { + log3(`placeholder failed: ${e.message}`); + wikiLog(`SessionStart: placeholder failed for ${input.session_id}: ${e.message}`); + } + } + const autoupdate = creds?.autoupdate !== false; + let updateNotice = ""; + try { + const current = getInstalledVersion(__bundleDir, ".claude-plugin"); + if (current) { + const latest = await getLatestVersion(); + if (latest && isNewer(latest, current)) { + if (autoupdate) { + log3(`autoupdate: updating ${current} \u2192 ${latest}`); + try { + const scopes = ["user", "project", "local", "managed"]; + const cmd = scopes.map((s) => `claude plugin update hivemind@hivemind --scope ${s} 2>/dev/null || true`).join("; "); + execSync2(cmd, { stdio: "ignore", timeout: 6e4 }); + try { + const cacheParent = join7(homedir4(), ".claude", "plugins", "cache", "hivemind", "hivemind"); + const entries = readdirSync(cacheParent, { withFileTypes: true }); + for (const e of entries) { + if (e.isDirectory() && e.name !== latest) { + rmSync(join7(cacheParent, e.name), { recursive: true, force: true }); + log3(`cache cleanup: removed old version ${e.name}`); + } + } + } catch (e) { + log3(`cache cleanup failed: ${e.message}`); + } + updateNotice = ` + +\u2705 Hivemind auto-updated: ${current} \u2192 ${latest}. Run /reload-plugins to apply.`; + process.stderr.write(`\u2705 Hivemind auto-updated: ${current} \u2192 ${latest}. Run /reload-plugins to apply. +`); + log3(`autoupdate succeeded: ${current} \u2192 ${latest}`); + } catch (e) { + updateNotice = ` + +\u2B06\uFE0F Hivemind update available: ${current} \u2192 ${latest}. Auto-update failed \u2014 run /hivemind:update to upgrade manually.`; + process.stderr.write(`\u2B06\uFE0F Hivemind update available: ${current} \u2192 ${latest}. Auto-update failed \u2014 run /hivemind:update to upgrade manually. +`); + log3(`autoupdate failed: ${e.message}`); + } + } else { + updateNotice = ` + +\u2B06\uFE0F Hivemind update available: ${current} \u2192 ${latest}. Run /hivemind:update to upgrade.`; + process.stderr.write(`\u2B06\uFE0F Hivemind update available: ${current} \u2192 ${latest}. Run /hivemind:update to upgrade. +`); + log3(`update available (autoupdate off): ${current} \u2192 ${latest}`); + } + } else { + log3(`version up to date: ${current}`); + updateNotice = ` + +\u2705 Hivemind v${current} (up to date)`; + } + } + } catch (e) { + log3(`version check failed: ${e.message}`); + } + const resolvedContext = context.replace(/HIVEMIND_AUTH_CMD/g, AUTH_CMD); + const additionalContext = creds?.token ? `${resolvedContext} + +Logged in to Deeplake as org: ${creds.orgName ?? creds.orgId} (workspace: ${creds.workspaceId ?? "default"})${updateNotice}` : `${resolvedContext} + +\u26A0\uFE0F Not logged in to Deeplake. Memory search will not work. Ask the user to run /hivemind:login to authenticate.${updateNotice}`; + console.log(JSON.stringify({ hookSpecificOutput: { hookEventName: "SessionStart", - additionalContext: buildSessionStartAdditionalContext({ - authCommand, - creds, - currentVersion, - latestVersion - }) + additionalContext } - }; + })); } -async function main() { - await readStdin(); - const result = await runSessionStartHook({}); - if (result) - console.log(JSON.stringify(result)); -} -if (isDirectRun(import.meta.url)) { - main().catch((e) => { - log2(`fatal: ${e.message}`); - process.exit(0); - }); -} -export { - CLAUDE_SESSION_START_CONTEXT, - buildSessionStartAdditionalContext, - runSessionStartHook -}; +main().catch((e) => { + log3(`fatal: ${e.message}`); + process.exit(0); +}); diff --git a/claude-code/bundle/shell/deeplake-shell.js b/claude-code/bundle/shell/deeplake-shell.js index 5872059..0793149 100755 --- a/claude-code/bundle/shell/deeplake-shell.js +++ b/claude-code/bundle/shell/deeplake-shell.js @@ -66785,18 +66785,18 @@ function sqlLike(value) { // dist/src/deeplake-api.js var log2 = (msg) => log("sdk", msg); -var TRACE_SQL = (process.env.HIVEMIND_TRACE_SQL ?? process.env.DEEPLAKE_TRACE_SQL) === "1" || (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; -var DEBUG_FILE_LOG = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; function summarizeSql(sql, maxLen = 220) { const compact = sql.replace(/\s+/g, " ").trim(); return compact.length > maxLen ? `${compact.slice(0, maxLen)}...` : compact; } function traceSql(msg) { - if (!TRACE_SQL) + const traceEnabled = (process.env.HIVEMIND_TRACE_SQL ?? process.env.DEEPLAKE_TRACE_SQL) === "1" || (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; + if (!traceEnabled) return; process.stderr.write(`[deeplake-sql] ${msg} `); - if (DEBUG_FILE_LOG) + const debugFileLog = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; + if (debugFileLog) log2(msg); } var RETRYABLE_CODES = /* @__PURE__ */ new Set([429, 500, 502, 503, 504]); @@ -67317,13 +67317,13 @@ function buildPathCondition(targetPath) { const clean = targetPath.replace(/\/+$/, ""); if (/[*?]/.test(clean)) { const likePattern = sqlLike(clean).replace(/\*/g, "%").replace(/\?/g, "_"); - return `path LIKE '${likePattern}'`; + return `path LIKE '${likePattern}' ESCAPE '\\'`; } const base = clean.split("/").pop() ?? ""; if (base.includes(".")) { return `path = '${sqlStr(clean)}'`; } - return `(path = '${sqlStr(clean)}' OR path LIKE '${sqlLike(clean)}/%')`; + return `(path = '${sqlStr(clean)}' OR path LIKE '${sqlLike(clean)}/%' ESCAPE '\\')`; } async function searchDeeplakeTables(api, memoryTable, sessionsTable, opts) { const { pathFilter, contentScanOnly, likeOp, escapedPattern, prefilterPattern, prefilterPatterns } = opts; @@ -69147,6 +69147,13 @@ function createGrepCommand(client, fs3, table, sessionsTable) { // dist/src/shell/deeplake-shell.js async function main() { + const isOneShot = process.argv.includes("-c"); + if (isOneShot) { + delete process.env["HIVEMIND_TRACE_SQL"]; + delete process.env["DEEPLAKE_TRACE_SQL"]; + delete process.env["HIVEMIND_DEBUG"]; + delete process.env["DEEPLAKE_DEBUG"]; + } const config = loadConfig(); if (!config) { process.stderr.write("Deeplake credentials not found.\nSet HIVEMIND_TOKEN + HIVEMIND_ORG_ID in environment, or create ~/.deeplake/credentials.json\n"); @@ -69155,7 +69162,6 @@ async function main() { const table = process.env["HIVEMIND_TABLE"] ?? "memory"; const sessionsTable = process.env["HIVEMIND_SESSIONS_TABLE"] ?? "sessions"; const mount = process.env["HIVEMIND_MOUNT"] ?? "/"; - const isOneShot = process.argv.includes("-c"); const client = new DeeplakeApi(config.token, config.apiUrl, config.orgId, config.workspaceId, table); if (!isOneShot) { process.stderr.write(`Connecting to deeplake://${config.workspaceId}/${table} ... diff --git a/claude-code/bundle/wiki-worker.js b/claude-code/bundle/wiki-worker.js index cd53b4e..02468a3 100755 --- a/claude-code/bundle/wiki-worker.js +++ b/claude-code/bundle/wiki-worker.js @@ -14,11 +14,18 @@ var LOG = join(homedir(), ".deeplake", "hook-debug.log"); function utcTimestamp(d = /* @__PURE__ */ new Date()) { return d.toISOString().replace("T", " ").slice(0, 19) + " UTC"; } +function log(tag, msg) { + if (!DEBUG) + return; + appendFileSync(LOG, `${(/* @__PURE__ */ new Date()).toISOString()} [${tag}] ${msg} +`); +} // dist/src/hooks/summary-state.js import { readFileSync, writeFileSync, writeSync, mkdirSync, renameSync, existsSync, unlinkSync, openSync, closeSync } from "node:fs"; import { homedir as homedir2 } from "node:os"; import { join as join2 } from "node:path"; +var dlog = (msg) => log("summary-state", msg); var STATE_DIR = join2(homedir2(), ".claude", "hooks", "summary-state"); var YIELD_BUF = new Int32Array(new SharedArrayBuffer(4)); function statePath(sessionId) { @@ -56,9 +63,11 @@ function withRmwLock(sessionId, fn) { if (e.code !== "EEXIST") throw e; if (Date.now() > deadline) { + dlog(`rmw lock deadline exceeded for ${sessionId}, reclaiming stale lock`); try { unlinkSync(rmwLock); - } catch { + } catch (unlinkErr) { + dlog(`stale rmw lock unlink failed for ${sessionId}: ${unlinkErr.message}`); } continue; } @@ -71,7 +80,8 @@ function withRmwLock(sessionId, fn) { closeSync(fd); try { unlinkSync(rmwLock); - } catch { + } catch (unlinkErr) { + dlog(`rmw lock cleanup failed for ${sessionId}: ${unlinkErr.message}`); } } } @@ -88,7 +98,10 @@ function finalizeSummary(sessionId, jsonlLines) { function releaseLock(sessionId) { try { unlinkSync(lockPath(sessionId)); - } catch { + } catch (e) { + if (e?.code !== "ENOENT") { + dlog(`releaseLock unlink failed for ${sessionId}: ${e.message}`); + } } } @@ -118,6 +131,7 @@ async function uploadSummary(query2, params) { } // dist/src/hooks/wiki-worker.js +var dlog2 = (msg) => log("wiki-worker", msg); var cfg = JSON.parse(readFileSync2(process.argv[2], "utf-8")); var tmpDir = cfg.tmpDir; var tmpJsonl = join3(tmpDir, "session.jsonl"); @@ -165,7 +179,8 @@ async function query(sql, retries = 4) { function cleanup() { try { rmSync(tmpDir, { recursive: true, force: true }); - } catch { + } catch (cleanupErr) { + dlog2(`cleanup failed to remove ${tmpDir}: ${cleanupErr.message}`); } } async function main() { @@ -248,7 +263,8 @@ async function main() { cleanup(); try { releaseLock(cfg.sessionId); - } catch { + } catch (releaseErr) { + dlog2(`releaseLock failed in finally for ${cfg.sessionId}: ${releaseErr.message}`); } } } diff --git a/claude-code/tests/capture-hook.test.ts b/claude-code/tests/capture-hook.test.ts new file mode 100644 index 0000000..c40e8e6 --- /dev/null +++ b/claude-code/tests/capture-hook.test.ts @@ -0,0 +1,313 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; + +/** + * Direct source-level tests for src/hooks/capture.ts. The module runs + * main() at import time; each scenario resets the registry and imports + * fresh. Mocks: readStdin, loadConfig, DeeplakeApi, spawn-wiki-worker, + * summary-state. Everything else (SQL assembly, entry shape, meta + * merging, JSON escaping) runs for real. + * + * Coverage target: each event-type branch (prompt / tool / assistant / + * unknown), the CAPTURE guard, the table-missing retry, the unrelated + * error re-throw, and every leg of the periodic-trigger helper + * (threshold not met / met + lock free / met + lock held / spawn + * throws / outer catch). + */ + +const stdinMock = vi.fn(); +const loadConfigMock = vi.fn(); +const spawnMock = vi.fn(); +const wikiLogMock = vi.fn(); +const tryAcquireLockMock = vi.fn(); +const releaseLockMock = vi.fn(); +const bumpTotalCountMock = vi.fn(); +const loadTriggerConfigMock = vi.fn(); +const shouldTriggerMock = vi.fn(); +const debugLogMock = vi.fn(); +const queryMock = vi.fn(); +const ensureSessionsTableMock = vi.fn(); + +vi.mock("../../src/utils/stdin.js", () => ({ readStdin: (...a: any[]) => stdinMock(...a) })); +vi.mock("../../src/config.js", () => ({ loadConfig: (...a: any[]) => loadConfigMock(...a) })); +vi.mock("../../src/hooks/spawn-wiki-worker.js", () => ({ + spawnWikiWorker: (...a: any[]) => spawnMock(...a), + wikiLog: (...a: any[]) => wikiLogMock(...a), + bundleDirFromImportMeta: () => "/fake/bundle", +})); +vi.mock("../../src/hooks/summary-state.js", () => ({ + tryAcquireLock: (...a: any[]) => tryAcquireLockMock(...a), + releaseLock: (...a: any[]) => releaseLockMock(...a), + bumpTotalCount: (...a: any[]) => bumpTotalCountMock(...a), + loadTriggerConfig: (...a: any[]) => loadTriggerConfigMock(...a), + shouldTrigger: (...a: any[]) => shouldTriggerMock(...a), +})); +vi.mock("../../src/utils/debug.js", () => ({ + log: (_tag: string, msg: string) => debugLogMock(msg), +})); +vi.mock("../../src/deeplake-api.js", () => ({ + DeeplakeApi: class { + query(sql: string) { return queryMock(sql); } + ensureSessionsTable(t: string) { return ensureSessionsTableMock(t); } + }, +})); + +async function runHook(env: Record = {}): Promise { + delete process.env.HIVEMIND_WIKI_WORKER; + delete process.env.HIVEMIND_CAPTURE; + for (const [k, v] of Object.entries(env)) { + if (v === undefined) delete process.env[k]; + else process.env[k] = v; + } + vi.resetModules(); + await import("../../src/hooks/capture.js"); + await new Promise(r => setImmediate(r)); + await new Promise(r => setImmediate(r)); +} + +const validConfig = { + token: "t", orgId: "o", orgName: "acme", workspaceId: "default", + userName: "alice", apiUrl: "http://example", tableName: "memory", + sessionsTableName: "sessions", +}; + +beforeEach(() => { + stdinMock.mockReset().mockResolvedValue({ + session_id: "sid-1", + cwd: "/workspaces/proj", + hook_event_name: "UserPromptSubmit", + prompt: "hello", + }); + loadConfigMock.mockReset().mockReturnValue(validConfig); + spawnMock.mockReset(); + wikiLogMock.mockReset(); + tryAcquireLockMock.mockReset().mockReturnValue(true); + releaseLockMock.mockReset(); + bumpTotalCountMock.mockReset().mockReturnValue({ + lastSummaryAt: Date.now(), lastSummaryCount: 0, totalCount: 1, + }); + loadTriggerConfigMock.mockReset().mockReturnValue({ everyNMessages: 50, everyHours: 2 }); + shouldTriggerMock.mockReset().mockReturnValue(false); + debugLogMock.mockReset(); + queryMock.mockReset().mockResolvedValue([]); + ensureSessionsTableMock.mockReset().mockResolvedValue(undefined); +}); + +afterEach(() => { vi.restoreAllMocks(); }); + +describe("capture hook — guard", () => { + it("returns without touching stdin when HIVEMIND_CAPTURE=false", async () => { + await runHook({ HIVEMIND_CAPTURE: "false" }); + expect(stdinMock).not.toHaveBeenCalled(); + expect(queryMock).not.toHaveBeenCalled(); + }); + + it("returns when loadConfig returns null", async () => { + loadConfigMock.mockReturnValue(null); + await runHook(); + expect(debugLogMock).toHaveBeenCalledWith("no config"); + expect(queryMock).not.toHaveBeenCalled(); + }); +}); + +describe("capture hook — event-type branches", () => { + it("user_message: INSERT contains prompt content", async () => { + await runHook(); + expect(queryMock).toHaveBeenCalledTimes(1); + const sql = queryMock.mock.calls[0][0] as string; + expect(sql).toMatch(/INSERT INTO "sessions"/); + expect(sql).toContain('"type":"user_message"'); + expect(sql).toContain('"content":"hello"'); + expect(debugLogMock).toHaveBeenCalledWith(expect.stringMatching(/^user session=sid-1$/)); + }); + + it("tool_call: INSERT contains tool_name + serialized input/response", async () => { + stdinMock.mockResolvedValue({ + session_id: "sid-2", + cwd: "/p", + hook_event_name: "PostToolUse", + tool_name: "Bash", + tool_use_id: "tu-1", + tool_input: { command: "ls" }, + tool_response: { stdout: "file" }, + }); + await runHook(); + const sql = queryMock.mock.calls[0][0] as string; + expect(sql).toContain('"type":"tool_call"'); + expect(sql).toContain('"tool_name":"Bash"'); + expect(sql).toContain('tool_input'); + expect(sql).toContain('tool_response'); + expect(debugLogMock).toHaveBeenCalledWith(expect.stringMatching(/^tool=Bash session=sid-2$/)); + }); + + it("assistant_message without agent_transcript_path", async () => { + stdinMock.mockResolvedValue({ + session_id: "sid-3", + cwd: "/p", + hook_event_name: "Stop", + last_assistant_message: "reply text", + }); + await runHook(); + const sql = queryMock.mock.calls[0][0] as string; + expect(sql).toContain('"type":"assistant_message"'); + expect(sql).toContain('"content":"reply text"'); + expect(sql).not.toContain("agent_transcript_path"); + }); + + it("assistant_message WITH agent_transcript_path", async () => { + stdinMock.mockResolvedValue({ + session_id: "sid-4", + cwd: "/p", + hook_event_name: "SubagentStop", + last_assistant_message: "sub reply", + agent_transcript_path: "/tmp/agent.jsonl", + }); + await runHook(); + const sql = queryMock.mock.calls[0][0] as string; + expect(sql).toContain('"agent_transcript_path":"/tmp/agent.jsonl"'); + }); + + it("unknown event: skipped, no INSERT", async () => { + stdinMock.mockResolvedValue({ + session_id: "sid-x", cwd: "/p", hook_event_name: "WeirdHook", + // no prompt, no tool_name, no last_assistant_message + }); + await runHook(); + expect(queryMock).not.toHaveBeenCalled(); + expect(debugLogMock).toHaveBeenCalledWith("unknown event, skipping"); + }); +}); + +describe("capture hook — INSERT fallback + error paths", () => { + it("creates the sessions table and retries when table is missing", async () => { + queryMock + .mockRejectedValueOnce(new Error('relation "sessions" does not exist')) + .mockResolvedValueOnce([]); + await runHook(); + expect(ensureSessionsTableMock).toHaveBeenCalledWith("sessions"); + expect(queryMock).toHaveBeenCalledTimes(2); + expect(debugLogMock).toHaveBeenCalledWith("table missing, creating and retrying"); + }); + + it("creates the sessions table when the API returns 'permission denied'", async () => { + queryMock + .mockRejectedValueOnce(new Error("permission denied for relation sessions")) + .mockResolvedValueOnce([]); + await runHook(); + expect(ensureSessionsTableMock).toHaveBeenCalled(); + expect(queryMock).toHaveBeenCalledTimes(2); + }); + + it("re-throws unrelated errors (caught by main().catch)", async () => { + const exitSpy = vi.spyOn(process, "exit").mockImplementation(() => undefined as never); + queryMock.mockRejectedValue(new Error("random SQL boom")); + await runHook(); + // The outer catch wraps the throw into the fatal log and exits. + expect(debugLogMock).toHaveBeenCalledWith("fatal: random SQL boom"); + expect(exitSpy).toHaveBeenCalledWith(0); + }); +}); + +describe("capture hook — periodic trigger helper", () => { + it("does nothing when HIVEMIND_WIKI_WORKER=1 (nested worker)", async () => { + await runHook({ HIVEMIND_WIKI_WORKER: "1" }); + // The inner call is bypassed — but CAPTURE is also computed at load, + // so with WIKI_WORKER=1 the capture itself still runs (CAPTURE default + // is true). We just assert bumpTotalCount was NOT called. + expect(bumpTotalCountMock).not.toHaveBeenCalled(); + }); + + it("does not spawn when shouldTrigger returns false", async () => { + shouldTriggerMock.mockReturnValue(false); + await runHook(); + expect(bumpTotalCountMock).toHaveBeenCalledTimes(1); + expect(tryAcquireLockMock).not.toHaveBeenCalled(); + expect(spawnMock).not.toHaveBeenCalled(); + }); + + it("spawns the wiki worker when shouldTrigger=true and lock acquired", async () => { + shouldTriggerMock.mockReturnValue(true); + bumpTotalCountMock.mockReturnValue({ + lastSummaryAt: 0, lastSummaryCount: 0, totalCount: 10, + }); + await runHook(); + expect(tryAcquireLockMock).toHaveBeenCalledWith("sid-1"); + expect(wikiLogMock).toHaveBeenCalledWith( + expect.stringMatching(/^Periodic: threshold hit \(total=10,/), + ); + expect(spawnMock).toHaveBeenCalledTimes(1); + expect(spawnMock.mock.calls[0][0]).toMatchObject({ sessionId: "sid-1", reason: "Periodic" }); + }); + + it("logs 'periodic trigger suppressed' when the lock is already held", async () => { + shouldTriggerMock.mockReturnValue(true); + tryAcquireLockMock.mockReturnValue(false); + await runHook(); + expect(spawnMock).not.toHaveBeenCalled(); + expect(debugLogMock).toHaveBeenCalledWith( + expect.stringContaining("periodic trigger suppressed (lock held)"), + ); + }); + + it("releases the lock if spawnWikiWorker throws", async () => { + shouldTriggerMock.mockReturnValue(true); + spawnMock.mockImplementation(() => { throw new Error("spawn failed"); }); + await runHook(); + expect(releaseLockMock).toHaveBeenCalledWith("sid-1"); + expect(debugLogMock).toHaveBeenCalledWith( + expect.stringContaining("periodic trigger error: spawn failed"), + ); + }); + + it("still swallows the error when releaseLock ALSO throws", async () => { + shouldTriggerMock.mockReturnValue(true); + spawnMock.mockImplementation(() => { throw new Error("spawn failed"); }); + releaseLockMock.mockImplementation(() => { throw new Error("release failed"); }); + await runHook(); + // We should still see the outer periodic-trigger error log — the + // release throw is deliberately swallowed. + expect(debugLogMock).toHaveBeenCalledWith( + expect.stringContaining("periodic trigger error: spawn failed"), + ); + }); + + it("catches errors thrown by bumpTotalCount itself (outer try)", async () => { + bumpTotalCountMock.mockImplementation(() => { throw new Error("bump boom"); }); + await runHook(); + expect(debugLogMock).toHaveBeenCalledWith( + expect.stringContaining("periodic trigger error: bump boom"), + ); + }); +}); + +describe("capture hook — defensive fallback branches", () => { + it("falls back to 'default' workspace when config.workspaceId is undefined", async () => { + loadConfigMock.mockReturnValue({ ...validConfig, workspaceId: undefined }); + await runHook(); + const sql = queryMock.mock.calls[0][0] as string; + // sessionPath uses workspace; with undefined it should land on 'default' + expect(sql).toContain("alice_acme_default_sid-1.jsonl"); + }); + + it("projectName falls back to 'unknown' when cwd is undefined", async () => { + stdinMock.mockResolvedValue({ + session_id: "sid-cwd", hook_event_name: "UserPromptSubmit", prompt: "x", + }); + await runHook(); + const sql = queryMock.mock.calls[0][0] as string; + expect(sql).toContain("'unknown'"); + }); + + it("hook_event_name defaults to empty string when missing", async () => { + stdinMock.mockResolvedValue({ + session_id: "sid-no-evt", cwd: "/p", prompt: "hi", + // no hook_event_name + }); + await runHook(); + const sql = queryMock.mock.calls[0][0] as string; + // description column (hook_event_name ?? '') should land as '' + // It appears between the projectName and the author — we just + // assert the INSERT still went through. + expect(queryMock).toHaveBeenCalledTimes(1); + expect(sql).toMatch(/'[^']*', 'claude_code'/); + }); +}); diff --git a/claude-code/tests/codex-capture-hook.test.ts b/claude-code/tests/codex-capture-hook.test.ts new file mode 100644 index 0000000..9992182 --- /dev/null +++ b/claude-code/tests/codex-capture-hook.test.ts @@ -0,0 +1,284 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; + +/** + * Direct source-level tests for src/hooks/codex/capture.ts. Mirrors the + * claude-code capture-hook test: mocks the stdin / config / API / + * summary-state seams and asserts SQL shape, branch coverage for + * UserPromptSubmit / PostToolUse / unknown, and the periodic trigger + * helper. + */ + +const stdinMock = vi.fn(); +const loadConfigMock = vi.fn(); +const spawnMock = vi.fn(); +const wikiLogMock = vi.fn(); +const tryAcquireLockMock = vi.fn(); +const releaseLockMock = vi.fn(); +const bumpTotalCountMock = vi.fn(); +const loadTriggerConfigMock = vi.fn(); +const shouldTriggerMock = vi.fn(); +const debugLogMock = vi.fn(); +const queryMock = vi.fn(); +const ensureSessionsTableMock = vi.fn(); + +vi.mock("../../src/utils/stdin.js", () => ({ readStdin: (...a: any[]) => stdinMock(...a) })); +vi.mock("../../src/config.js", () => ({ loadConfig: (...a: any[]) => loadConfigMock(...a) })); +vi.mock("../../src/hooks/codex/spawn-wiki-worker.js", () => ({ + spawnCodexWikiWorker: (...a: any[]) => spawnMock(...a), + wikiLog: (...a: any[]) => wikiLogMock(...a), + bundleDirFromImportMeta: () => "/fake/codex/bundle", +})); +vi.mock("../../src/hooks/summary-state.js", () => ({ + tryAcquireLock: (...a: any[]) => tryAcquireLockMock(...a), + releaseLock: (...a: any[]) => releaseLockMock(...a), + bumpTotalCount: (...a: any[]) => bumpTotalCountMock(...a), + loadTriggerConfig: (...a: any[]) => loadTriggerConfigMock(...a), + shouldTrigger: (...a: any[]) => shouldTriggerMock(...a), +})); +vi.mock("../../src/utils/debug.js", () => ({ + log: (_tag: string, msg: string) => debugLogMock(msg), +})); +vi.mock("../../src/deeplake-api.js", () => ({ + DeeplakeApi: class { + query(sql: string) { return queryMock(sql); } + ensureSessionsTable(t: string) { return ensureSessionsTableMock(t); } + }, +})); + +async function runHook(env: Record = {}): Promise { + delete process.env.HIVEMIND_WIKI_WORKER; + delete process.env.HIVEMIND_CAPTURE; + for (const [k, v] of Object.entries(env)) { + if (v === undefined) delete process.env[k]; + else process.env[k] = v; + } + vi.resetModules(); + await import("../../src/hooks/codex/capture.js"); + await new Promise(r => setImmediate(r)); + await new Promise(r => setImmediate(r)); +} + +const validConfig = { + token: "t", orgId: "o", orgName: "acme", workspaceId: "default", + userName: "alice", apiUrl: "http://example", tableName: "memory", + sessionsTableName: "sessions", +}; + +beforeEach(() => { + stdinMock.mockReset().mockResolvedValue({ + session_id: "sid-1", + cwd: "/workspaces/proj", + hook_event_name: "UserPromptSubmit", + model: "gpt-5", + prompt: "hello", + }); + loadConfigMock.mockReset().mockReturnValue(validConfig); + spawnMock.mockReset(); + wikiLogMock.mockReset(); + tryAcquireLockMock.mockReset().mockReturnValue(true); + releaseLockMock.mockReset(); + bumpTotalCountMock.mockReset().mockReturnValue({ + lastSummaryAt: 0, lastSummaryCount: 0, totalCount: 1, + }); + loadTriggerConfigMock.mockReset().mockReturnValue({ everyNMessages: 50, everyHours: 2 }); + shouldTriggerMock.mockReset().mockReturnValue(false); + debugLogMock.mockReset(); + queryMock.mockReset().mockResolvedValue([]); + ensureSessionsTableMock.mockReset().mockResolvedValue(undefined); +}); + +afterEach(() => { vi.restoreAllMocks(); }); + +describe("codex capture hook — guards", () => { + it("returns when HIVEMIND_CAPTURE=false", async () => { + await runHook({ HIVEMIND_CAPTURE: "false" }); + expect(stdinMock).not.toHaveBeenCalled(); + }); + + it("returns when loadConfig is null", async () => { + loadConfigMock.mockReturnValue(null); + await runHook(); + expect(debugLogMock).toHaveBeenCalledWith("no config"); + expect(queryMock).not.toHaveBeenCalled(); + }); +}); + +describe("codex capture hook — event-type branches", () => { + it("user_message: INSERT contains prompt", async () => { + await runHook(); + const sql = queryMock.mock.calls[0][0] as string; + expect(sql).toMatch(/INSERT INTO "sessions"/); + expect(sql).toContain('"type":"user_message"'); + expect(sql).toContain('"content":"hello"'); + expect(sql).toContain("'codex'"); + }); + + it("tool_call: INSERT contains tool_name and model metadata", async () => { + stdinMock.mockResolvedValue({ + session_id: "sid-2", cwd: "/p", + hook_event_name: "PostToolUse", + model: "gpt-5", + tool_name: "Bash", + tool_use_id: "tu-1", + tool_input: { command: "ls" }, + tool_response: { stdout: "x" }, + }); + await runHook(); + const sql = queryMock.mock.calls[0][0] as string; + expect(sql).toContain('"type":"tool_call"'); + expect(sql).toContain('"tool_name":"Bash"'); + expect(sql).toContain('"model":"gpt-5"'); + }); + + it("unknown hook_event_name → log and skip", async () => { + stdinMock.mockResolvedValue({ + session_id: "sid-x", cwd: "/p", hook_event_name: "SomethingElse", model: "m", + }); + await runHook(); + expect(queryMock).not.toHaveBeenCalled(); + expect(debugLogMock).toHaveBeenCalledWith("unknown event: SomethingElse, skipping"); + }); + + it("UserPromptSubmit without prompt → skipped (defensive)", async () => { + stdinMock.mockResolvedValue({ + session_id: "sid-y", cwd: "/p", hook_event_name: "UserPromptSubmit", model: "m", + }); + await runHook(); + expect(queryMock).not.toHaveBeenCalled(); + }); + + it("PostToolUse without tool_name → skipped (defensive)", async () => { + stdinMock.mockResolvedValue({ + session_id: "sid-z", cwd: "/p", hook_event_name: "PostToolUse", model: "m", + }); + await runHook(); + expect(queryMock).not.toHaveBeenCalled(); + }); +}); + +describe("codex capture hook — INSERT fallbacks", () => { + it("retries after creating the sessions table on 'does not exist'", async () => { + queryMock + .mockRejectedValueOnce(new Error('relation "sessions" does not exist')) + .mockResolvedValueOnce([]); + await runHook(); + expect(ensureSessionsTableMock).toHaveBeenCalledWith("sessions"); + expect(queryMock).toHaveBeenCalledTimes(2); + }); + + it("retries on 'permission denied' too", async () => { + queryMock + .mockRejectedValueOnce(new Error("permission denied")) + .mockResolvedValueOnce([]); + await runHook(); + expect(ensureSessionsTableMock).toHaveBeenCalled(); + }); + + it("re-throws an unrelated SQL error", async () => { + const exitSpy = vi.spyOn(process, "exit").mockImplementation(() => undefined as never); + queryMock.mockRejectedValue(new Error("syntax error")); + await runHook(); + expect(debugLogMock).toHaveBeenCalledWith("fatal: syntax error"); + expect(exitSpy).toHaveBeenCalledWith(0); + }); +}); + +describe("codex capture hook — periodic trigger", () => { + it("bypasses the trigger when HIVEMIND_WIKI_WORKER=1", async () => { + await runHook({ HIVEMIND_WIKI_WORKER: "1" }); + expect(bumpTotalCountMock).not.toHaveBeenCalled(); + }); + + it("no spawn when shouldTrigger=false", async () => { + shouldTriggerMock.mockReturnValue(false); + await runHook(); + expect(spawnMock).not.toHaveBeenCalled(); + }); + + it("spawns when shouldTrigger=true + lock free", async () => { + shouldTriggerMock.mockReturnValue(true); + bumpTotalCountMock.mockReturnValue({ + lastSummaryAt: 0, lastSummaryCount: 0, totalCount: 10, + }); + await runHook(); + expect(spawnMock).toHaveBeenCalledTimes(1); + expect(spawnMock.mock.calls[0][0]).toMatchObject({ sessionId: "sid-1", reason: "Periodic" }); + }); + + it("suppresses when lock held", async () => { + shouldTriggerMock.mockReturnValue(true); + tryAcquireLockMock.mockReturnValue(false); + await runHook(); + expect(spawnMock).not.toHaveBeenCalled(); + expect(debugLogMock).toHaveBeenCalledWith( + expect.stringContaining("periodic trigger suppressed (lock held)"), + ); + }); + + it("releases the lock when spawn throws", async () => { + shouldTriggerMock.mockReturnValue(true); + spawnMock.mockImplementation(() => { throw new Error("spawn boom"); }); + await runHook(); + expect(releaseLockMock).toHaveBeenCalledWith("sid-1"); + expect(debugLogMock).toHaveBeenCalledWith( + expect.stringContaining("periodic trigger error: spawn boom"), + ); + }); + + it("swallows release failure on top of spawn failure", async () => { + shouldTriggerMock.mockReturnValue(true); + spawnMock.mockImplementation(() => { throw new Error("spawn boom"); }); + releaseLockMock.mockImplementation(() => { throw new Error("release boom"); }); + await runHook(); + expect(debugLogMock).toHaveBeenCalledWith( + expect.stringContaining("periodic trigger error: spawn boom"), + ); + }); + + it("outer try catches bumpTotalCount throw", async () => { + bumpTotalCountMock.mockImplementation(() => { throw new Error("bump boom"); }); + await runHook(); + expect(debugLogMock).toHaveBeenCalledWith( + expect.stringContaining("periodic trigger error: bump boom"), + ); + }); +}); + +describe("codex capture hook — defensive fallbacks", () => { + it("falls back projectName='unknown' when cwd is '' ", async () => { + stdinMock.mockResolvedValue({ + session_id: "sid-c", cwd: "", hook_event_name: "UserPromptSubmit", model: "m", prompt: "x", + }); + await runHook(); + const sql = queryMock.mock.calls[0][0] as string; + expect(sql).toContain("'unknown'"); + }); + + it("falls back projectName='unknown' when cwd is undefined at runtime", async () => { + // The interface types cwd as string, but runtime values can arrive + // undefined from untyped hook inputs. The ?? fallbacks exist for this. + stdinMock.mockResolvedValue({ + session_id: "sid-d", hook_event_name: "UserPromptSubmit", model: "m", prompt: "x", + }); + await runHook(); + const sql = queryMock.mock.calls[0][0] as string; + expect(sql).toContain("'unknown'"); + }); + + it("passes empty hook_event_name through the description column fallback", async () => { + // `input.hook_event_name ?? ''` — construct an input where the field + // is legitimately missing to exercise the nullish coalesce. + stdinMock.mockResolvedValue({ + session_id: "sid-e", cwd: "/p", model: "m", + }); + await runHook(); + // UserPromptSubmit / PostToolUse are the only types the codex + // capture handles, so this falls into "unknown event, skipping". + // That's fine — the branch we want is the `?? ''` in the INSERT + // string which runs later; to reach it we supply a prompt and + // leave hook_event_name undefined. Codex capture gates on + // hook_event_name === 'UserPromptSubmit', so undefined won't match + // and the INSERT is skipped. That is itself a useful branch. + expect(queryMock).not.toHaveBeenCalled(); + }); +}); diff --git a/claude-code/tests/codex-session-start-hook.test.ts b/claude-code/tests/codex-session-start-hook.test.ts new file mode 100644 index 0000000..5f47909 --- /dev/null +++ b/claude-code/tests/codex-session-start-hook.test.ts @@ -0,0 +1,175 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; +import { EventEmitter } from "node:events"; + +/** + * Source-level tests for src/hooks/codex/session-start.ts. Codex has + * no async-hook mechanism, so this fast-path hook synchronously reads + * creds, emits context on stdout, and SPAWNS a detached node process + * running session-start-setup.js for the heavy work. + * + * Mocks: readStdin, loadCredentials, and child_process.spawn. The + * spawn mock returns a fake child with a writable stdin and an + * unref() method so the hook body can drive it end-to-end without + * actually forking a process. + */ + +const stdinMock = vi.fn(); +const loadCredsMock = vi.fn(); +const debugLogMock = vi.fn(); +const spawnMock = vi.fn(); + +vi.mock("../../src/utils/stdin.js", () => ({ readStdin: (...a: any[]) => stdinMock(...a) })); +vi.mock("../../src/commands/auth.js", () => ({ + loadCredentials: (...a: any[]) => loadCredsMock(...a), +})); +vi.mock("../../src/utils/debug.js", () => ({ + log: (_t: string, msg: string) => debugLogMock(msg), +})); +vi.mock("node:child_process", async () => { + const actual = await vi.importActual("node:child_process"); + return { ...actual, spawn: (...a: any[]) => spawnMock(...a) }; +}); + +function makeFakeChild() { + const stdin = new EventEmitter() as any; + stdin.write = vi.fn(); + stdin.end = vi.fn(); + return { + stdin, + unref: vi.fn(), + }; +} + +async function runHook(env: Record = {}): Promise { + delete process.env.HIVEMIND_WIKI_WORKER; + for (const [k, v] of Object.entries(env)) { + if (v === undefined) delete process.env[k]; + else process.env[k] = v; + } + vi.resetModules(); + const collected: string[] = []; + const originalLog = console.log; + console.log = (...args: any[]) => { collected.push(args.join(" ")); }; + try { + await import("../../src/hooks/codex/session-start.js"); + await new Promise(r => setImmediate(r)); + return collected.join("\n") || null; + } finally { + console.log = originalLog; + } +} + +beforeEach(() => { + stdinMock.mockReset().mockResolvedValue({ + session_id: "sid-1", cwd: "/x", hook_event_name: "SessionStart", model: "gpt-5", + }); + loadCredsMock.mockReset().mockReturnValue({ + token: "tok", orgId: "org-id", orgName: "acme", userName: "alice", workspaceId: "default", + }); + debugLogMock.mockReset(); + spawnMock.mockReset().mockImplementation(() => makeFakeChild()); +}); + +afterEach(() => { vi.restoreAllMocks(); }); + +describe("codex session-start hook — guards", () => { + it("returns immediately when HIVEMIND_WIKI_WORKER=1 (nested worker)", async () => { + const out = await runHook({ HIVEMIND_WIKI_WORKER: "1" }); + expect(stdinMock).not.toHaveBeenCalled(); + expect(spawnMock).not.toHaveBeenCalled(); + expect(out).toBeNull(); + }); + + it("emits not-logged-in context when creds are missing (no token)", async () => { + loadCredsMock.mockReturnValue(null); + const out = await runHook(); + expect(spawnMock).not.toHaveBeenCalled(); + expect(out).toContain("Not logged in to Deeplake"); + expect(debugLogMock).toHaveBeenCalledWith( + expect.stringContaining("no credentials found"), + ); + }); + + it("logs org name when creds are present", async () => { + const out = await runHook(); + expect(debugLogMock).toHaveBeenCalledWith( + expect.stringContaining("credentials loaded: org=acme"), + ); + expect(out).toContain("Logged in to Deeplake as org: acme"); + expect(out).toContain("workspace: default"); + }); + + it("falls back to orgId when orgName is missing", async () => { + loadCredsMock.mockReturnValue({ + token: "tok", orgId: "org-uuid-123", userName: "alice", workspaceId: "staging", + }); + const out = await runHook(); + expect(debugLogMock).toHaveBeenCalledWith( + expect.stringContaining("credentials loaded: org=org-uuid-123"), + ); + expect(out).toContain("Logged in to Deeplake as org: org-uuid-123"); + expect(out).toContain("workspace: staging"); + }); + + it("defaults workspace to 'default' when creds omit workspaceId", async () => { + loadCredsMock.mockReturnValue({ + token: "tok", orgId: "o", orgName: "acme", userName: "alice", + }); + const out = await runHook(); + expect(out).toContain("workspace: default"); + }); +}); + +describe("codex session-start hook — spawn async setup", () => { + it("spawns session-start-setup.js and feeds the same stdin input", async () => { + const fake = makeFakeChild(); + spawnMock.mockReturnValue(fake); + await runHook(); + expect(spawnMock).toHaveBeenCalledTimes(1); + const [cmd, args, opts] = spawnMock.mock.calls[0]; + expect(cmd).toBe("node"); + expect(args[0]).toContain("session-start-setup.js"); + expect(opts.detached).toBe(true); + expect(fake.stdin.write).toHaveBeenCalledWith(expect.stringContaining("sid-1")); + expect(fake.stdin.end).toHaveBeenCalled(); + expect(fake.unref).toHaveBeenCalled(); + expect(debugLogMock).toHaveBeenCalledWith("spawned async setup process"); + }); + + it("does not spawn when creds are missing", async () => { + loadCredsMock.mockReturnValue({ token: "" }); + await runHook(); + expect(spawnMock).not.toHaveBeenCalled(); + }); +}); + +describe("codex session-start hook — fatal catch", () => { + it("catches a stdin throw and exits 0", async () => { + stdinMock.mockRejectedValue(new Error("stdin boom")); + const exitSpy = vi.spyOn(process, "exit").mockImplementation(() => undefined as never); + await runHook(); + await new Promise(r => setImmediate(r)); + expect(debugLogMock).toHaveBeenCalledWith("fatal: stdin boom"); + expect(exitSpy).toHaveBeenCalledWith(0); + }); +}); + +describe("codex session-start hook — spawn pipes the hook input verbatim", () => { + it("forwards the full CodexSessionStartInput JSON to the setup process stdin", async () => { + // The detached setup process parses the SAME stdin input that was + // fed to this hook. If the contract breaks (e.g. we re-serialize a + // subset), the async setup would receive a different payload and + // the placeholder row would have the wrong session/cwd. Assert the + // exact JSON round-trips. + const fake = makeFakeChild(); + spawnMock.mockReturnValue(fake); + const customInput = { + session_id: "custom-sid", cwd: "/custom/path", + hook_event_name: "SessionStart", model: "gpt-5", source: "codex-cli", + }; + stdinMock.mockResolvedValue(customInput); + await runHook(); + const written = fake.stdin.write.mock.calls[0][0]; + expect(JSON.parse(written)).toMatchObject(customInput); + }); +}); diff --git a/claude-code/tests/codex-session-start-setup-hook.test.ts b/claude-code/tests/codex-session-start-setup-hook.test.ts new file mode 100644 index 0000000..3c05a71 --- /dev/null +++ b/claude-code/tests/codex-session-start-setup-hook.test.ts @@ -0,0 +1,276 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; + +/** + * Source-level tests for src/hooks/codex/session-start-setup.ts. The + * codex async setup hook does the same work as its claude-code + * counterpart (table setup, placeholder, version check + autoupdate) + * but with a different autoupdate strategy — it runs a shell pipeline + * that git clones the release tag into the codex plugin cache. + * + * Mocks: readStdin, loadCredentials/saveCredentials, loadConfig, + * DeeplakeApi (ensureTable, ensureSessionsTable, query), global.fetch, + * child_process.execSync. + */ + +const stdinMock = vi.fn(); +const loadCredsMock = vi.fn(); +const saveCredsMock = vi.fn(); +const loadConfigMock = vi.fn(); +const debugLogMock = vi.fn(); +const ensureTableMock = vi.fn(); +const ensureSessionsTableMock = vi.fn(); +const queryMock = vi.fn(); +const execSyncMock = vi.fn(); + +vi.mock("../../src/utils/stdin.js", () => ({ readStdin: (...a: any[]) => stdinMock(...a) })); +vi.mock("../../src/commands/auth.js", () => ({ + loadCredentials: (...a: any[]) => loadCredsMock(...a), + saveCredentials: (...a: any[]) => saveCredsMock(...a), +})); +vi.mock("../../src/config.js", () => ({ loadConfig: (...a: any[]) => loadConfigMock(...a) })); +vi.mock("../../src/utils/debug.js", () => ({ + log: (_t: string, msg: string) => debugLogMock(msg), +})); +vi.mock("../../src/deeplake-api.js", () => ({ + DeeplakeApi: class { + ensureTable() { return ensureTableMock(); } + ensureSessionsTable(t: string) { return ensureSessionsTableMock(t); } + query(sql: string) { return queryMock(sql); } + }, +})); +vi.mock("node:child_process", async () => { + const actual = await vi.importActual("node:child_process"); + return { ...actual, execSync: (...a: any[]) => execSyncMock(...a) }; +}); + +const originalFetch = global.fetch; +const fetchMock = vi.fn(); + +async function runHook(env: Record = {}): Promise { + delete process.env.HIVEMIND_WIKI_WORKER; + delete process.env.HIVEMIND_CAPTURE; + for (const [k, v] of Object.entries(env)) { + if (v === undefined) delete process.env[k]; + else process.env[k] = v; + } + vi.resetModules(); + // @ts-expect-error + global.fetch = fetchMock; + await import("../../src/hooks/codex/session-start-setup.js"); + await new Promise(r => setImmediate(r)); + await new Promise(r => setImmediate(r)); +} + +const validConfig = { + token: "t", orgId: "o", orgName: "acme", workspaceId: "default", + userName: "alice", apiUrl: "http://example", tableName: "memory", + sessionsTableName: "sessions", +}; + +beforeEach(() => { + stdinMock.mockReset().mockResolvedValue({ + session_id: "sid-1", cwd: "/workspaces/proj", + hook_event_name: "SessionStart", model: "gpt-5", + }); + loadCredsMock.mockReset().mockReturnValue({ + token: "tok", orgId: "o", orgName: "acme", userName: "alice", + }); + saveCredsMock.mockReset(); + loadConfigMock.mockReset().mockReturnValue(validConfig); + debugLogMock.mockReset(); + ensureTableMock.mockReset().mockResolvedValue(undefined); + ensureSessionsTableMock.mockReset().mockResolvedValue(undefined); + queryMock.mockReset().mockResolvedValue([]); // placeholder SELECT → empty, INSERT will follow + execSyncMock.mockReset(); + fetchMock.mockReset().mockResolvedValue({ + ok: true, + json: async () => ({ version: "0.0.1" }), + }); +}); + +afterEach(() => { + vi.restoreAllMocks(); + // @ts-expect-error + global.fetch = originalFetch; +}); + +describe("codex session-start-setup hook — guards", () => { + it("returns when HIVEMIND_WIKI_WORKER=1", async () => { + await runHook({ HIVEMIND_WIKI_WORKER: "1" }); + expect(stdinMock).not.toHaveBeenCalled(); + }); + + it("returns when no credentials are loaded", async () => { + loadCredsMock.mockReturnValue(null); + await runHook(); + expect(debugLogMock).toHaveBeenCalledWith("no credentials"); + expect(ensureTableMock).not.toHaveBeenCalled(); + }); +}); + +describe("codex session-start-setup hook — userName backfill", () => { + it("backfills userName when missing and saves creds", async () => { + loadCredsMock.mockReturnValue({ token: "tok", orgId: "o", orgName: "acme" }); + await runHook(); + expect(saveCredsMock).toHaveBeenCalled(); + expect(debugLogMock).toHaveBeenCalledWith( + expect.stringMatching(/^backfilled userName: /), + ); + }); + + it("does not save when userName present", async () => { + await runHook(); + expect(saveCredsMock).not.toHaveBeenCalled(); + }); +}); + +describe("codex session-start-setup hook — placeholder branching", () => { + it("creates placeholder when none exists (SELECT returns [] → INSERT)", async () => { + await runHook(); + expect(ensureTableMock).toHaveBeenCalled(); + expect(ensureSessionsTableMock).toHaveBeenCalledWith("sessions"); + expect(queryMock).toHaveBeenCalledTimes(2); + expect(queryMock.mock.calls[0][0]).toMatch(/^SELECT path FROM/); + expect(queryMock.mock.calls[1][0]).toMatch(/^INSERT INTO/); + expect(queryMock.mock.calls[1][0]).toContain("'codex'"); + expect(debugLogMock).toHaveBeenCalledWith("setup complete"); + }); + + it("skips INSERT on resumed session (SELECT returns a row)", async () => { + queryMock.mockResolvedValueOnce([{ path: "/summaries/alice/sid-1.md" }]); + await runHook(); + expect(queryMock).toHaveBeenCalledTimes(1); + }); + + it("skips placeholder when HIVEMIND_CAPTURE=false but still ensures tables", async () => { + await runHook({ HIVEMIND_CAPTURE: "false" }); + expect(ensureTableMock).toHaveBeenCalled(); + expect(ensureSessionsTableMock).toHaveBeenCalled(); + expect(queryMock).not.toHaveBeenCalled(); + }); + + it("swallows setup errors and logs them", async () => { + ensureTableMock.mockRejectedValue(new Error("table boom")); + await runHook(); + expect(debugLogMock).toHaveBeenCalledWith( + expect.stringContaining("setup failed: table boom"), + ); + }); + + it("skips setup when session_id is empty", async () => { + stdinMock.mockResolvedValue({ + session_id: "", cwd: "/x", hook_event_name: "SessionStart", model: "m", + }); + await runHook(); + expect(ensureTableMock).not.toHaveBeenCalled(); + }); + + it("skips setup when loadConfig returns null", async () => { + loadConfigMock.mockReturnValue(null); + await runHook(); + expect(ensureTableMock).not.toHaveBeenCalled(); + }); +}); + +describe("codex session-start-setup hook — version check + autoupdate", () => { + it("runs the git-clone autoupdate when a newer version is available", async () => { + fetchMock.mockResolvedValue({ + ok: true, + json: async () => ({ version: "999.0.0" }), + }); + const stderrSpy = vi.spyOn(process.stderr, "write").mockReturnValue(true); + await runHook(); + expect(execSyncMock).toHaveBeenCalled(); + // The shell pipeline builds the tag from the version — verify the + // safe version regex accepted it and the tag embedded. + expect(execSyncMock.mock.calls[0][0]).toContain("v999.0.0"); + expect(stderrSpy).toHaveBeenCalledWith( + expect.stringContaining("auto-updated"), + ); + }); + + it("uses the manual-upgrade message when autoupdate is disabled", async () => { + loadCredsMock.mockReturnValue({ + token: "t", orgId: "o", orgName: "acme", userName: "u", + autoupdate: false, + }); + fetchMock.mockResolvedValue({ ok: true, json: async () => ({ version: "999.0.0" }) }); + const stderrSpy = vi.spyOn(process.stderr, "write").mockReturnValue(true); + await runHook(); + expect(execSyncMock).not.toHaveBeenCalled(); + expect(stderrSpy).toHaveBeenCalledWith( + expect.stringContaining("update available"), + ); + }); + + it("emits 'Auto-update failed' when execSync throws", async () => { + fetchMock.mockResolvedValue({ ok: true, json: async () => ({ version: "999.0.0" }) }); + execSyncMock.mockImplementation(() => { throw new Error("git fail"); }); + const stderrSpy = vi.spyOn(process.stderr, "write").mockReturnValue(true); + await runHook(); + expect(stderrSpy).toHaveBeenCalledWith( + expect.stringContaining("Auto-update failed"), + ); + }); + + it("tolerates a fetch error (GitHub unreachable)", async () => { + fetchMock.mockRejectedValue(new Error("offline")); + await runHook(); + expect(execSyncMock).not.toHaveBeenCalled(); + }); +}); + +describe("codex session-start-setup hook — fatal catch", () => { + it("catches stdin throw and exits 0", async () => { + stdinMock.mockRejectedValue(new Error("stdin boom")); + const exitSpy = vi.spyOn(process, "exit").mockImplementation(() => undefined as never); + await runHook(); + await new Promise(r => setImmediate(r)); + expect(debugLogMock).toHaveBeenCalledWith("fatal: stdin boom"); + expect(exitSpy).toHaveBeenCalledWith(0); + }); +}); + +// Additional branch coverage for version helpers +describe("codex session-start-setup hook — version helpers edge cases", () => { + it("fetch ok:false short-circuits getLatestVersion", async () => { + fetchMock.mockResolvedValue({ ok: false, json: async () => ({ version: "999.0.0" }) }); + await runHook(); + expect(execSyncMock).not.toHaveBeenCalled(); + }); + + it("response without 'version' field falls through to null", async () => { + fetchMock.mockResolvedValue({ ok: true, json: async () => ({}) }); + await runHook(); + expect(execSyncMock).not.toHaveBeenCalled(); + }); + + it("rejects unsafe version tags without executing git clone", async () => { + // The hook builds `v${latest}` and validates against /^v\d+\.\d+\.\d+$/. + // Feed a version that fails the regex; the inner try throws the + // 'unsafe version tag' guard error, which is caught and surfaces + // the manual-upgrade path. + fetchMock.mockResolvedValue({ + ok: true, + json: async () => ({ version: "999.0.0-dangerous;rm -rf" }), + }); + const stderrSpy = vi.spyOn(process.stderr, "write").mockReturnValue(true); + await runHook(); + expect(execSyncMock).not.toHaveBeenCalled(); + expect(stderrSpy).toHaveBeenCalledWith( + expect.stringContaining("Auto-update failed"), + ); + }); + + it("treats latest == current as 'up to date' (isNewer false)", async () => { + const pkg = JSON.parse( + require("node:fs").readFileSync( + require("node:path").join(__dirname, "..", ".claude-plugin", "plugin.json"), + "utf-8", + ), + ); + fetchMock.mockResolvedValue({ ok: true, json: async () => ({ version: pkg.version }) }); + await runHook(); + expect(execSyncMock).not.toHaveBeenCalled(); + }); +}); diff --git a/claude-code/tests/codex-stop-hook.test.ts b/claude-code/tests/codex-stop-hook.test.ts new file mode 100644 index 0000000..0f3cbbf --- /dev/null +++ b/claude-code/tests/codex-stop-hook.test.ts @@ -0,0 +1,279 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; +import { mkdtempSync, writeFileSync, rmSync } from "node:fs"; +import { tmpdir } from "node:os"; +import { join } from "node:path"; + +/** + * Direct source-level tests for src/hooks/codex/stop.ts. Covers the + * whole hook: WIKI_WORKER guard, CAPTURE guard (computed at module + * load — we resetModules per scenario), missing session_id, missing + * config, transcript parsing (string / array / bad / missing), INSERT + * failure path, lock held vs free, the spawn call, and the fatal catch. + */ + +const stdinMock = vi.fn(); +const loadConfigMock = vi.fn(); +const spawnMock = vi.fn(); +const wikiLogMock = vi.fn(); +const tryAcquireLockMock = vi.fn(); +const releaseLockMock = vi.fn(); +const debugLogMock = vi.fn(); +const queryMock = vi.fn(); + +vi.mock("../../src/utils/stdin.js", () => ({ readStdin: (...args: any[]) => stdinMock(...args) })); +vi.mock("../../src/config.js", () => ({ loadConfig: (...args: any[]) => loadConfigMock(...args) })); +vi.mock("../../src/hooks/codex/spawn-wiki-worker.js", () => ({ + spawnCodexWikiWorker: (...args: any[]) => spawnMock(...args), + wikiLog: (...args: any[]) => wikiLogMock(...args), + bundleDirFromImportMeta: () => "/fake/codex/bundle", +})); +vi.mock("../../src/hooks/summary-state.js", () => ({ + tryAcquireLock: (...args: any[]) => tryAcquireLockMock(...args), + releaseLock: (...args: any[]) => releaseLockMock(...args), +})); +vi.mock("../../src/utils/debug.js", () => ({ + log: (_tag: string, msg: string) => debugLogMock(msg), +})); +vi.mock("../../src/deeplake-api.js", () => ({ + DeeplakeApi: class { query(sql: string) { return queryMock(sql); } }, +})); + +async function runHook(env: Record = {}): Promise { + delete process.env.HIVEMIND_WIKI_WORKER; + delete process.env.HIVEMIND_CAPTURE; + for (const [k, v] of Object.entries(env)) { + if (v === undefined) delete process.env[k]; + else process.env[k] = v; + } + vi.resetModules(); + await import("../../src/hooks/codex/stop.js"); + await new Promise(r => setImmediate(r)); + await new Promise(r => setImmediate(r)); +} + +const validConfig = { + token: "t", orgId: "o", orgName: "org", workspaceId: "default", + userName: "u", apiUrl: "http://example", tableName: "memory", + sessionsTableName: "sessions", +}; + +let tmpDir: string; + +beforeEach(() => { + tmpDir = mkdtempSync(join(tmpdir(), "codex-stop-test-")); + stdinMock.mockReset().mockResolvedValue({ + session_id: "sid-1", cwd: "/proj/foo", hook_event_name: "Stop", model: "gpt-5", + transcript_path: null, + }); + loadConfigMock.mockReset().mockReturnValue(validConfig); + spawnMock.mockReset(); + wikiLogMock.mockReset(); + tryAcquireLockMock.mockReset().mockReturnValue(true); + releaseLockMock.mockReset(); + debugLogMock.mockReset(); + queryMock.mockReset().mockResolvedValue([]); +}); + +afterEach(() => { + vi.restoreAllMocks(); + try { rmSync(tmpDir, { recursive: true, force: true }); } catch { /* ignore */ } +}); + +describe("codex stop hook — guard paths", () => { + it("returns immediately when HIVEMIND_WIKI_WORKER=1", async () => { + await runHook({ HIVEMIND_WIKI_WORKER: "1" }); + expect(stdinMock).not.toHaveBeenCalled(); + expect(queryMock).not.toHaveBeenCalled(); + expect(spawnMock).not.toHaveBeenCalled(); + }); + + it("returns without spawning when session_id is empty", async () => { + stdinMock.mockResolvedValue({ session_id: "", cwd: "/x", hook_event_name: "Stop", model: "m" }); + await runHook(); + expect(loadConfigMock).not.toHaveBeenCalled(); + expect(queryMock).not.toHaveBeenCalled(); + expect(spawnMock).not.toHaveBeenCalled(); + }); + + it("returns without spawning when loadConfig returns null", async () => { + loadConfigMock.mockReturnValue(null); + await runHook(); + expect(queryMock).not.toHaveBeenCalled(); + expect(spawnMock).not.toHaveBeenCalled(); + expect(debugLogMock).toHaveBeenCalledWith("no config"); + }); + + it("skips capture AND spawn when HIVEMIND_CAPTURE=false", async () => { + await runHook({ HIVEMIND_CAPTURE: "false" }); + expect(queryMock).not.toHaveBeenCalled(); + expect(tryAcquireLockMock).not.toHaveBeenCalled(); + expect(spawnMock).not.toHaveBeenCalled(); + }); +}); + +describe("codex stop hook — capture path + INSERT shape", () => { + it("issues exactly one INSERT against the sessions table on the happy path", async () => { + await runHook(); + expect(queryMock).toHaveBeenCalledTimes(1); + const sql = queryMock.mock.calls[0][0] as string; + expect(sql).toMatch(/^INSERT INTO "sessions"/); + expect(sql).toContain("'Stop'"); + expect(sql).toContain("'codex'"); + expect(sql).toContain("sid-1"); + expect(sql).toContain("::jsonb"); + expect(debugLogMock).toHaveBeenCalledWith("stop event captured"); + }); + + it("swallows an INSERT failure and still tries to spawn the wiki worker", async () => { + queryMock.mockRejectedValue(new Error("network down")); + await runHook(); + expect(debugLogMock).toHaveBeenCalledWith("capture failed: network down"); + expect(spawnMock).toHaveBeenCalledTimes(1); + }); + + it("derives projectName=unknown when cwd is the empty string", async () => { + stdinMock.mockResolvedValue({ + session_id: "sid-x", cwd: "", hook_event_name: "Stop", model: "m", transcript_path: null, + }); + await runHook(); + const sql = queryMock.mock.calls[0][0] as string; + expect(sql).toContain("'unknown'"); + }); +}); + +describe("codex stop hook — transcript parsing", () => { + const writeTranscript = (lines: string[]): string => { + const p = join(tmpDir, "transcript.jsonl"); + writeFileSync(p, lines.join("\n")); + return p; + }; + + it("extracts the last assistant message when content is a plain string", async () => { + const path = writeTranscript([ + JSON.stringify({ payload: { role: "user", content: "hi" } }), + JSON.stringify({ payload: { role: "assistant", content: "hello there" } }), + ]); + stdinMock.mockResolvedValue({ + session_id: "sid-1", cwd: "/x", hook_event_name: "Stop", model: "m", transcript_path: path, + }); + await runHook(); + expect(debugLogMock).toHaveBeenCalledWith( + expect.stringContaining("extracted assistant message from transcript"), + ); + const sql = queryMock.mock.calls[0][0] as string; + expect(sql).toContain("hello there"); + expect(sql).toContain('"type":"assistant_message"'); + }); + + it("extracts from content arrays, joining output_text / text blocks", async () => { + const path = writeTranscript([ + JSON.stringify({ + payload: { + role: "assistant", + content: [ + { type: "output_text", text: "part A" }, + { type: "reasoning", text: "ignored" }, + { type: "text", text: "part B" }, + ], + }, + }), + ]); + stdinMock.mockResolvedValue({ + session_id: "sid-1", cwd: "/x", hook_event_name: "Stop", model: "m", transcript_path: path, + }); + await runHook(); + const sql = queryMock.mock.calls[0][0] as string; + expect(sql).toContain("part A"); + expect(sql).toContain("part B"); + }); + + it("skips malformed JSONL lines and falls back to assistant_stop when no valid message", async () => { + const path = writeTranscript([ + "{not json", + JSON.stringify({ payload: { role: "user", content: "hey" } }), + ]); + stdinMock.mockResolvedValue({ + session_id: "sid-1", cwd: "/x", hook_event_name: "Stop", model: "m", transcript_path: path, + }); + await runHook(); + const sql = queryMock.mock.calls[0][0] as string; + expect(sql).toContain('"type":"assistant_stop"'); + }); + + it("handles a transcript_path that does not exist on disk (no log, no content)", async () => { + stdinMock.mockResolvedValue({ + session_id: "sid-1", cwd: "/x", hook_event_name: "Stop", model: "m", + transcript_path: join(tmpDir, "missing.jsonl"), + }); + await runHook(); + const sql = queryMock.mock.calls[0][0] as string; + expect(sql).toContain('"type":"assistant_stop"'); + expect(debugLogMock).not.toHaveBeenCalledWith( + expect.stringContaining("extracted assistant message"), + ); + }); + + it("treats content as empty when it is neither string nor array (defensive branch)", async () => { + const path = writeTranscript([ + JSON.stringify({ payload: { role: "assistant", content: { weird: true } } }), + ]); + stdinMock.mockResolvedValue({ + session_id: "sid-1", cwd: "/x", hook_event_name: "Stop", model: "m", transcript_path: path, + }); + await runHook(); + const sql = queryMock.mock.calls[0][0] as string; + expect(sql).toContain('"type":"assistant_stop"'); + }); +}); + +describe("codex stop hook — wiki spawn + lock coordination", () => { + it("skips the wiki spawn with a log line when tryAcquireLock returns false", async () => { + tryAcquireLockMock.mockReturnValue(false); + await runHook(); + expect(spawnMock).not.toHaveBeenCalled(); + expect(wikiLogMock).toHaveBeenCalledWith( + expect.stringContaining("periodic worker already running for sid-1, skipping"), + ); + }); + + it("spawns the codex wiki worker on the happy path with the right arguments", async () => { + await runHook(); + expect(spawnMock).toHaveBeenCalledTimes(1); + const arg = spawnMock.mock.calls[0][0]; + expect(arg.sessionId).toBe("sid-1"); + expect(arg.cwd).toBe("/proj/foo"); + expect(arg.reason).toBe("Stop"); + expect(arg.config).toBe(validConfig); + }); +}); + +describe("codex stop hook — fatal catch", () => { + it("catches a thrown readStdin error and exits 0 without crashing", async () => { + stdinMock.mockRejectedValue(new Error("bad stdin")); + const exitSpy = vi.spyOn(process, "exit").mockImplementation(() => undefined as never); + await runHook(); + await new Promise(r => setImmediate(r)); + expect(debugLogMock).toHaveBeenCalledWith("fatal: bad stdin"); + expect(exitSpy).toHaveBeenCalledWith(0); + }); + + it("releases the lock if spawnCodexWikiWorker throws (no lock leak)", async () => { + spawnMock.mockImplementation(() => { throw new Error("codex spawn exploded"); }); + const exitSpy = vi.spyOn(process, "exit").mockImplementation(() => undefined as never); + await runHook(); + await new Promise(r => setImmediate(r)); + expect(releaseLockMock).toHaveBeenCalledWith("sid-1"); + expect(debugLogMock).toHaveBeenCalledWith("fatal: codex spawn exploded"); + expect(exitSpy).toHaveBeenCalledWith(0); + }); + + it("swallows release errors when spawn also throws (no double-fault)", async () => { + spawnMock.mockImplementation(() => { throw new Error("codex spawn exploded"); }); + releaseLockMock.mockImplementation(() => { throw new Error("release broken"); }); + const exitSpy = vi.spyOn(process, "exit").mockImplementation(() => undefined as never); + await runHook(); + await new Promise(r => setImmediate(r)); + expect(debugLogMock).toHaveBeenCalledWith("fatal: codex spawn exploded"); + expect(exitSpy).toHaveBeenCalledWith(0); + }); +}); diff --git a/claude-code/tests/codex-wiki-worker.test.ts b/claude-code/tests/codex-wiki-worker.test.ts new file mode 100644 index 0000000..6a4260a --- /dev/null +++ b/claude-code/tests/codex-wiki-worker.test.ts @@ -0,0 +1,358 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; +import { mkdtempSync, rmSync, writeFileSync, readFileSync, mkdirSync } from "node:fs"; +import { tmpdir } from "node:os"; +import { join } from "node:path"; + +/** + * Source-level tests for src/hooks/codex/wiki-worker.ts. Mirrors the + * CC wiki-worker test: mock fetch + execFileSync + summary-state + + * upload-summary, feed a config file via process.argv[2], drive the + * module through every branch. + * + * Codex-specific differences vs the CC worker: + * - binary key is `codexBin` (not `claudeBin`) + * - invoked as `codex exec --dangerously-bypass-approvals-and-sandbox ` + * - agent label on upload is `"codex"` (not `"claude_code"`) + */ + +const finalizeSummaryMock = vi.fn(); +const releaseLockMock = vi.fn(); +const uploadSummaryMock = vi.fn(); +const execFileSyncMock = vi.fn(); + +vi.mock("../../src/hooks/summary-state.js", () => ({ + finalizeSummary: (...a: any[]) => finalizeSummaryMock(...a), + releaseLock: (...a: any[]) => releaseLockMock(...a), +})); +vi.mock("../../src/hooks/upload-summary.js", () => ({ + uploadSummary: (...a: any[]) => uploadSummaryMock(...a), +})); +vi.mock("node:child_process", async () => { + const actual = await vi.importActual("node:child_process"); + return { ...actual, execFileSync: (...a: any[]) => execFileSyncMock(...a) }; +}); + +const originalFetch = global.fetch; +const fetchMock = vi.fn(); +const originalArgv2 = process.argv[2]; + +let rootDir: string; +let tmpDir: string; +let hooksDir: string; +let configPath: string; + +const defaultConfig = () => ({ + apiUrl: "http://fake.local", + token: "tok", + orgId: "org", + workspaceId: "default", + memoryTable: "memory", + sessionsTable: "sessions", + sessionId: "sid-codex", + userName: "alice", + project: "proj", + tmpDir, + codexBin: "/fake/codex", + wikiLog: join(hooksDir, "wiki.log"), + hooksDir, + promptTemplate: "JSONL=__JSONL__ SUMMARY=__SUMMARY__ SID=__SESSION_ID__ PROJ=__PROJECT__ OFFSET=__PREV_OFFSET__ LINES=__JSONL_LINES__ SRC=__JSONL_SERVER_PATH__", +}); + +function writeConfig(overrides: Partial> = {}): void { + const cfg = { ...defaultConfig(), ...overrides }; + writeFileSync(configPath, JSON.stringify(cfg)); +} + +function jsonResp(body: unknown, ok = true, status = 200): Response { + return { + ok, status, + json: async () => body, + text: async () => typeof body === "string" ? body : JSON.stringify(body), + } as Response; +} + +async function runWorker(): Promise { + vi.resetModules(); + // @ts-expect-error + global.fetch = fetchMock; + await import("../../src/hooks/codex/wiki-worker.js"); + await new Promise(r => setImmediate(r)); + await new Promise(r => setImmediate(r)); + await new Promise(r => setImmediate(r)); +} + +beforeEach(() => { + rootDir = mkdtempSync(join(tmpdir(), "codex-wiki-worker-test-")); + tmpDir = join(rootDir, "tmp"); + hooksDir = join(rootDir, "hooks"); + mkdirSync(tmpDir, { recursive: true }); + mkdirSync(hooksDir, { recursive: true }); + configPath = join(rootDir, "config.json"); + writeConfig(); + process.argv[2] = configPath; + fetchMock.mockReset(); + finalizeSummaryMock.mockReset(); + releaseLockMock.mockReset(); + uploadSummaryMock.mockReset().mockResolvedValue({ path: "insert", summaryLength: 80, descLength: 15, sql: "..." }); + execFileSyncMock.mockReset(); +}); + +afterEach(() => { + // @ts-expect-error + global.fetch = originalFetch; + process.argv[2] = originalArgv2; + try { rmSync(rootDir, { recursive: true, force: true }); } catch { /* ignore */ } + vi.restoreAllMocks(); +}); + +// ═══ early exit ═════════════════════════════════════════════════════════════ + +describe("codex wiki-worker — no events", () => { + it("exits early when the sessions table has no rows for this session", async () => { + fetchMock.mockResolvedValue(jsonResp({ columns: ["message", "creation_date"], rows: [] })); + await runWorker(); + const log = readFileSync(join(hooksDir, "wiki.log"), "utf-8"); + expect(log).toContain("no session events found — exiting"); + expect(execFileSyncMock).not.toHaveBeenCalled(); + expect(uploadSummaryMock).not.toHaveBeenCalled(); + expect(releaseLockMock).toHaveBeenCalledWith("sid-codex"); + }); + + it("handles a response with null rows as empty", async () => { + fetchMock.mockResolvedValue(jsonResp({})); + await runWorker(); + expect(execFileSyncMock).not.toHaveBeenCalled(); + }); +}); + +// ═══ happy path ═════════════════════════════════════════════════════════════ + +describe("codex wiki-worker — happy path", () => { + const eventRow = [ + { message: JSON.stringify({ type: "user_message", content: "hello codex" }), creation_date: "2026-04-20T00:00:00Z" }, + ]; + + const mkFetch = (pathRows = 1, hasSummary = false) => { + return fetchMock.mockImplementation(async (_url: string, init: any) => { + const sql = JSON.parse(init.body).query as string; + if (sql.startsWith("SELECT message, creation_date")) { + return jsonResp({ columns: ["message", "creation_date"], rows: eventRow.map(r => [r.message, r.creation_date]) }); + } + if (sql.startsWith("SELECT DISTINCT path")) { + return jsonResp({ + columns: ["path"], + rows: pathRows > 0 ? [["/sessions/alice/alice_org_default_sid-codex.jsonl"]] : [], + }); + } + if (sql.startsWith("SELECT summary FROM")) { + if (hasSummary) { + return jsonResp({ columns: ["summary"], rows: [["# Session X\n- **JSONL offset**: 7\n\n## What Happened\nprior"]] }); + } + return jsonResp({ columns: ["summary"], rows: [] }); + } + throw new Error(`unexpected query: ${sql}`); + }); + }; + + it("runs `codex exec --dangerously-bypass-approvals-and-sandbox ` and uploads summary", async () => { + mkFetch(); + let capturedJsonl: string | null = null; + execFileSyncMock.mockImplementation((bin: string, args: string[]) => { + expect(bin).toBe("/fake/codex"); + expect(args[0]).toBe("exec"); + expect(args[1]).toBe("--dangerously-bypass-approvals-and-sandbox"); + const prompt = args[2]; + const jsonlPath = prompt.match(/JSONL=(\S+)/)![1]; + capturedJsonl = readFileSync(jsonlPath, "utf-8"); + const summaryPath = prompt.match(/SUMMARY=(\S+)/)![1]; + writeFileSync(summaryPath, "# Session sid-codex\n\n## What Happened\ndone.\n"); + return Buffer.from(""); + }); + await runWorker(); + + expect(capturedJsonl).toContain('"type":"user_message"'); + expect(capturedJsonl).toContain('"content":"hello codex"'); + + // codex exec is invoked with HIVEMIND_WIKI_WORKER=1 to prevent the + // child's own capture hook from recursing back into this worker. + const execOpts = execFileSyncMock.mock.calls[0][2]; + expect(execOpts.env.HIVEMIND_WIKI_WORKER).toBe("1"); + expect(execOpts.env.HIVEMIND_CAPTURE).toBe("false"); + + // Upload agent is 'codex' (not 'claude_code') + expect(uploadSummaryMock).toHaveBeenCalledTimes(1); + const params = uploadSummaryMock.mock.calls[0][1]; + expect(params.agent).toBe("codex"); + expect(params.sessionId).toBe("sid-codex"); + + expect(finalizeSummaryMock).toHaveBeenCalledWith("sid-codex", 1); + expect(releaseLockMock).toHaveBeenCalledWith("sid-codex"); + }); + + it("parses JSONL offset from an existing summary on resumed session", async () => { + mkFetch(1, true); + execFileSyncMock.mockImplementation((_bin: string, args: string[]) => { + const prompt = args[2]; + const summaryPath = prompt.match(/SUMMARY=(\S+)/)![1]; + writeFileSync(summaryPath, "# updated\n\n## What Happened\n...\n"); + return Buffer.from(""); + }); + await runWorker(); + const prompt = execFileSyncMock.mock.calls[0][1][2] as string; + expect(prompt).toContain("OFFSET=7"); + const log = readFileSync(join(hooksDir, "wiki.log"), "utf-8"); + expect(log).toContain("existing summary found, offset=7"); + }); + + it("falls back to /sessions/unknown/ when path SELECT empty", async () => { + mkFetch(0); + execFileSyncMock.mockImplementation((_bin: string, args: string[]) => { + const summaryPath = args[2].match(/SUMMARY=(\S+)/)![1]; + writeFileSync(summaryPath, "x\n"); + return Buffer.from(""); + }); + await runWorker(); + const prompt = execFileSyncMock.mock.calls[0][1][2] as string; + expect(prompt).toContain("SRC=/sessions/unknown/sid-codex.jsonl"); + }); + + it("serializes JSONB object rows by stringifying them", async () => { + fetchMock.mockImplementation(async (_url: string, init: any) => { + const sql = JSON.parse(init.body).query as string; + if (sql.startsWith("SELECT message, creation_date")) { + return jsonResp({ + columns: ["message", "creation_date"], + rows: [[{ type: "user_message", content: "obj" }, "t"]], + }); + } + if (sql.startsWith("SELECT DISTINCT path")) return jsonResp({ columns: ["path"], rows: [["/x.jsonl"]] }); + return jsonResp({ columns: ["summary"], rows: [] }); + }); + let capturedJsonl: string | null = null; + execFileSyncMock.mockImplementation((_bin: string, args: string[]) => { + const jsonlPath = args[2].match(/JSONL=(\S+)/)![1]; + capturedJsonl = readFileSync(jsonlPath, "utf-8"); + const summaryPath = args[2].match(/SUMMARY=(\S+)/)![1]; + writeFileSync(summaryPath, "x"); + return Buffer.from(""); + }); + await runWorker(); + expect(capturedJsonl).toContain('"type":"user_message"'); + }); +}); + +// ═══ codex exec failure ════════════════════════════════════════════════════ + +describe("codex wiki-worker — codex exec failure", () => { + beforeEach(() => { + fetchMock.mockImplementation(async (_url: string, init: any) => { + const sql = JSON.parse(init.body).query as string; + if (sql.startsWith("SELECT message")) return jsonResp({ columns: ["message", "creation_date"], rows: [["{}", "t"]] }); + if (sql.startsWith("SELECT DISTINCT path")) return jsonResp({ columns: ["path"], rows: [["/x.jsonl"]] }); + return jsonResp({ columns: ["summary"], rows: [] }); + }); + }); + + it("logs status and skips upload when codex exec throws without producing a summary", async () => { + const err: any = new Error("codex crashed"); + err.status = 99; + execFileSyncMock.mockImplementation(() => { throw err; }); + await runWorker(); + const log = readFileSync(join(hooksDir, "wiki.log"), "utf-8"); + expect(log).toContain("codex exec failed: 99"); + expect(log).toContain("no summary file generated"); + expect(uploadSummaryMock).not.toHaveBeenCalled(); + expect(releaseLockMock).toHaveBeenCalled(); + }); + + it("falls back to err.message when err.status is absent", async () => { + execFileSyncMock.mockImplementation(() => { throw new Error("no status here"); }); + await runWorker(); + const log = readFileSync(join(hooksDir, "wiki.log"), "utf-8"); + expect(log).toContain("codex exec failed: no status here"); + }); +}); + +// ═══ query retry logic ═════════════════════════════════════════════════════ + +describe("codex wiki-worker — query retry logic", () => { + beforeEach(() => { + vi.spyOn(global, "setTimeout").mockImplementation(((cb: any) => { + cb(); + return 0 as any; + }) as any); + }); + + it("retries on 500 until success", async () => { + const responses = [ + jsonResp("server error", false, 500), + jsonResp({ columns: ["message", "creation_date"], rows: [] }), + ]; + fetchMock.mockImplementation(async () => responses.shift()!); + await runWorker(); + expect(fetchMock.mock.calls.length).toBeGreaterThanOrEqual(2); + }); + + it("retries on CloudFlare rate-limit class 401/403/429", async () => { + for (const status of [401, 403, 429]) { + fetchMock.mockReset(); + fetchMock + .mockResolvedValueOnce(jsonResp("", false, status)) + .mockResolvedValue(jsonResp({ columns: ["message", "creation_date"], rows: [] })); + await runWorker(); + expect(fetchMock.mock.calls.length).toBeGreaterThanOrEqual(2); + } + }); + + it("throws on 400 (non-retryable) and main catches", async () => { + fetchMock.mockResolvedValue(jsonResp("bad", false, 400)); + await runWorker(); + const log = readFileSync(join(hooksDir, "wiki.log"), "utf-8"); + expect(log).toMatch(/fatal: API 400/); + expect(releaseLockMock).toHaveBeenCalled(); + }); +}); + +// ═══ finalize + release + empty summary ═══════════════════════════════════ + +describe("codex wiki-worker — finalize + release edges", () => { + beforeEach(() => { + fetchMock.mockImplementation(async (_url: string, init: any) => { + const sql = JSON.parse(init.body).query as string; + if (sql.startsWith("SELECT message")) return jsonResp({ columns: ["message", "creation_date"], rows: [["{}", "t"]] }); + if (sql.startsWith("SELECT DISTINCT path")) return jsonResp({ columns: ["path"], rows: [["/x.jsonl"]] }); + return jsonResp({ columns: ["summary"], rows: [] }); + }); + execFileSyncMock.mockImplementation((_bin: string, args: string[]) => { + const summaryPath = args[2].match(/SUMMARY=(\S+)/)![1]; + writeFileSync(summaryPath, "# s\n\n## What Happened\nX\n"); + return Buffer.from(""); + }); + }); + + it("logs sidecar update failure but still releases lock", async () => { + finalizeSummaryMock.mockImplementation(() => { throw new Error("sidecar boom"); }); + await runWorker(); + const log = readFileSync(join(hooksDir, "wiki.log"), "utf-8"); + expect(log).toContain("sidecar update failed: sidecar boom"); + expect(releaseLockMock).toHaveBeenCalled(); + }); + + it("swallows releaseLock throw in finally", async () => { + releaseLockMock.mockImplementation(() => { throw new Error("release boom"); }); + await runWorker(); + const log = readFileSync(join(hooksDir, "wiki.log"), "utf-8"); + expect(log).toContain("done"); + }); + + it("skips upload when summary file is whitespace-only", async () => { + execFileSyncMock.mockImplementation((_bin: string, args: string[]) => { + const summaryPath = args[2].match(/SUMMARY=(\S+)/)![1]; + writeFileSync(summaryPath, " \n\n"); + return Buffer.from(""); + }); + await runWorker(); + expect(uploadSummaryMock).not.toHaveBeenCalled(); + expect(finalizeSummaryMock).not.toHaveBeenCalled(); + }); +}); diff --git a/claude-code/tests/grep-core.test.ts b/claude-code/tests/grep-core.test.ts index 2a9a409..51339ff 100644 --- a/claude-code/tests/grep-core.test.ts +++ b/claude-code/tests/grep-core.test.ts @@ -447,11 +447,16 @@ describe("buildPathFilter", () => { ); }); it("uses LIKE matching for glob targets instead of exact file matching", () => { + // Fix #4 appends `ESCAPE '\'` so sqlLike-escaped underscores (`\_`) and + // percent signs (`\%`) in the pattern match their literal characters on + // the Deeplake backend. Without the ESCAPE clause `\_` was treated as + // two literal characters and `/sessions/conv_0_session_*.json`-style + // globs silently returned zero rows. expect(buildPathFilter("/summaries/projects/*.md")).toBe( - " AND path LIKE '/summaries/projects/%.md'", + " AND path LIKE '/summaries/projects/%.md' ESCAPE '\\'", ); const filter = buildPathFilter("/sessions/alice/chat_?.json"); - expect(filter).toMatch(/^ AND path LIKE '\/sessions\/alice\/chat.*\.json'$/); + expect(filter).toMatch(/^ AND path LIKE '\/sessions\/alice\/chat.*\.json' ESCAPE '\\'$/); }); }); diff --git a/claude-code/tests/hooks-source.test.ts b/claude-code/tests/hooks-source.test.ts deleted file mode 100644 index 10c4595..0000000 --- a/claude-code/tests/hooks-source.test.ts +++ /dev/null @@ -1,872 +0,0 @@ -import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; -import type { Config } from "../../src/config.js"; -import type { Credentials } from "../../src/commands/auth.js"; -import { - buildCaptureEntry, - maybeTriggerPeriodicSummary, - runCaptureHook, -} from "../../src/hooks/capture.js"; -import { - extractGrepParams, - getShellCommand, - isSafe, - processPreToolUse, - rewritePaths, - touchesMemory, -} from "../../src/hooks/pre-tool-use.js"; -import { - buildSessionStartAdditionalContext, - runSessionStartHook, -} from "../../src/hooks/session-start.js"; -import { - createPlaceholder, - runSessionStartSetup, -} from "../../src/hooks/session-start-setup.js"; -import { runSessionEndHook } from "../../src/hooks/session-end.js"; -import { isDirectRun } from "../../src/utils/direct-run.js"; - -const baseConfig: Config = { - token: "token", - orgId: "org-1", - orgName: "Acme", - userName: "alice", - workspaceId: "default", - apiUrl: "https://api.example.com", - tableName: "memory", - sessionsTableName: "sessions", - memoryPath: "/tmp/.deeplake/memory", -}; - -const baseCreds: Credentials = { - token: "token", - orgId: "org-1", - orgName: "Acme", - userName: "alice", - workspaceId: "default", - apiUrl: "https://api.example.com", - savedAt: "2026-01-01T00:00:00.000Z", -}; - -let originalArgv1: string | undefined; - -beforeEach(() => { - originalArgv1 = process.argv[1]; -}); - -afterEach(() => { - if (originalArgv1 === undefined) delete process.argv[1]; - else process.argv[1] = originalArgv1; - vi.restoreAllMocks(); -}); - -describe("direct-run", () => { - it("returns true when the current entry matches the module path", () => { - process.argv[1] = "/tmp/hook.js"; - expect(isDirectRun("file:///tmp/hook.js")).toBe(true); - }); - - it("returns false when the current entry differs", () => { - process.argv[1] = "/tmp/other.js"; - expect(isDirectRun("file:///tmp/hook.js")).toBe(false); - }); - - it("returns false when there is no entry script", () => { - delete process.argv[1]; - expect(isDirectRun("file:///tmp/hook.js")).toBe(false); - }); - - it("returns false when the meta url cannot be converted to a file path", () => { - process.argv[1] = "/tmp/hook.js"; - expect(isDirectRun("not-a-valid-file-url")).toBe(false); - }); -}); - -describe("claude capture source", () => { - it("builds user, tool, and assistant entries", () => { - const user = buildCaptureEntry({ - session_id: "s1", - hook_event_name: "UserPromptSubmit", - prompt: "hello", - }, "2026-01-01T00:00:00.000Z"); - const tool = buildCaptureEntry({ - session_id: "s1", - hook_event_name: "PostToolUse", - tool_name: "Read", - tool_input: { file_path: "/tmp/a.ts" }, - tool_response: { content: "ok" }, - tool_use_id: "tu-1", - }, "2026-01-01T00:00:01.000Z"); - const assistant = buildCaptureEntry({ - session_id: "s1", - hook_event_name: "Stop", - last_assistant_message: "done", - agent_transcript_path: "/tmp/agent.jsonl", - }, "2026-01-01T00:00:02.000Z"); - - expect(user?.type).toBe("user_message"); - expect(user?.content).toBe("hello"); - expect(tool?.type).toBe("tool_call"); - expect(tool?.tool_name).toBe("Read"); - expect(JSON.parse(tool?.tool_input as string)).toEqual({ file_path: "/tmp/a.ts" }); - expect(assistant?.type).toBe("assistant_message"); - expect(assistant?.agent_transcript_path).toBe("/tmp/agent.jsonl"); - expect(buildCaptureEntry({ session_id: "s1" }, "2026-01-01T00:00:00.000Z")).toBeNull(); - }); - - it("triggers periodic summaries only when the threshold is met and the lock is acquired", () => { - const bump = vi.fn(() => ({ totalCount: 10, lastSummaryCount: 4 })); - const load = vi.fn(() => ({ everyNMessages: 5, everyHours: 24 })); - const should = vi.fn(() => true); - const lock = vi.fn(() => true); - const spawn = vi.fn(); - const wiki = vi.fn(); - - maybeTriggerPeriodicSummary("s1", "/repo", baseConfig, { - bumpTotalCountFn: bump as any, - loadTriggerConfigFn: load as any, - shouldTriggerFn: should as any, - tryAcquireLockFn: lock as any, - spawnWikiWorkerFn: spawn as any, - wikiLogFn: wiki as any, - bundleDir: "/tmp/bundle", - }); - - expect(spawn).toHaveBeenCalledWith({ - config: baseConfig, - sessionId: "s1", - cwd: "/repo", - bundleDir: "/tmp/bundle", - reason: "Periodic", - }); - expect(wiki).toHaveBeenCalled(); - }); - - it("suppresses periodic summaries when the lock is held", () => { - const spawn = vi.fn(); - const logFn = vi.fn(); - - maybeTriggerPeriodicSummary("s1", "/repo", baseConfig, { - bumpTotalCountFn: vi.fn(() => ({ totalCount: 10, lastSummaryCount: 4 })) as any, - loadTriggerConfigFn: vi.fn(() => ({ everyNMessages: 5, everyHours: 24 })) as any, - shouldTriggerFn: vi.fn(() => true) as any, - tryAcquireLockFn: vi.fn(() => false) as any, - spawnWikiWorkerFn: spawn as any, - logFn, - }); - - expect(spawn).not.toHaveBeenCalled(); - expect(logFn).toHaveBeenCalledWith(expect.stringContaining("lock held")); - }); - - it("returns disabled, no_config, ignored, queued, and flushed states", async () => { - expect(await runCaptureHook({ session_id: "s1", prompt: "hi" }, { - captureEnabled: false, - config: baseConfig, - })).toEqual({ status: "disabled" }); - - expect(await runCaptureHook({ session_id: "s1", prompt: "hi" }, { - config: null, - })).toEqual({ status: "no_config" }); - - expect(await runCaptureHook({ session_id: "s1" }, { - config: baseConfig, - })).toEqual({ status: "ignored" }); - - const append = vi.fn(); - const maybe = vi.fn(); - const clear = vi.fn(); - const queued = await runCaptureHook({ - session_id: "s1", - cwd: "/repo", - hook_event_name: "UserPromptSubmit", - prompt: "hi", - }, { - config: baseConfig, - now: () => "2026-01-01T00:00:00.000Z", - appendQueuedSessionRowFn: append as any, - clearSessionQueryCacheFn: clear as any, - maybeTriggerPeriodicSummaryFn: maybe as any, - }); - expect(queued.status).toBe("queued"); - expect(append).toHaveBeenCalledTimes(1); - expect(clear).toHaveBeenCalledWith("s1"); - expect(maybe).toHaveBeenCalledWith("s1", "/repo", baseConfig); - - const flush = vi.fn(async () => ({ status: "flushed", rows: 2, batches: 1 })); - const flushed = await runCaptureHook({ - session_id: "s1", - cwd: "/repo", - hook_event_name: "Stop", - last_assistant_message: "done", - }, { - config: baseConfig, - now: () => "2026-01-01T00:00:01.000Z", - appendQueuedSessionRowFn: vi.fn() as any, - flushSessionQueueFn: flush as any, - }); - expect(flushed).toMatchObject({ status: "queued", flushStatus: "flushed" }); - expect(flush).toHaveBeenCalledTimes(1); - }); - - it("suppresses periodic summaries when skipped or when the helper throws", () => { - const spawn = vi.fn(); - maybeTriggerPeriodicSummary("s1", "/repo", baseConfig, { - wikiWorker: true, - spawnWikiWorkerFn: spawn as any, - }); - maybeTriggerPeriodicSummary("s1", "/repo", baseConfig, { - bumpTotalCountFn: vi.fn(() => { throw new Error("boom"); }) as any, - spawnWikiWorkerFn: spawn as any, - logFn: vi.fn(), - }); - maybeTriggerPeriodicSummary("s1", "/repo", baseConfig, { - bumpTotalCountFn: vi.fn(() => ({ totalCount: 1, lastSummaryCount: 1 })) as any, - loadTriggerConfigFn: vi.fn(() => ({ everyNMessages: 5, everyHours: 24 })) as any, - shouldTriggerFn: vi.fn(() => false) as any, - spawnWikiWorkerFn: spawn as any, - }); - expect(spawn).not.toHaveBeenCalled(); - }); - - it("queues assistant events with fallback project and description metadata", async () => { - const append = vi.fn(); - const build = vi.fn((row) => row); - const result = await runCaptureHook({ - session_id: "s1", - last_assistant_message: "done", - }, { - config: baseConfig, - appendQueuedSessionRowFn: append as any, - buildQueuedSessionRowFn: build as any, - maybeTriggerPeriodicSummaryFn: vi.fn() as any, - now: () => "2026-01-01T00:00:00.000Z", - }); - expect(result.status).toBe("queued"); - expect(build).toHaveBeenCalledWith(expect.objectContaining({ - projectName: "unknown", - description: "", - })); - }); -}); - -describe("claude pre-tool source", () => { - it("detects, rewrites, and validates memory commands", () => { - expect(touchesMemory("cat ~/.deeplake/memory/index.md")).toBe(true); - expect(rewritePaths("cat ~/.deeplake/memory/index.md")).toBe("cat /index.md"); - expect(isSafe("cat /index.md | head -20")).toBe(true); - expect(isSafe("python3 -c 'print(1)' /index.md")).toBe(false); - }); - - it("builds shell commands and grep params for supported tools", () => { - expect(getShellCommand("Read", { file_path: "~/.deeplake/memory/index.md" })).toBe("cat /index.md"); - expect(getShellCommand("Read", { path: "~/.deeplake/memory" })).toBe("ls /"); - expect(getShellCommand("Glob", { path: "~/.deeplake/memory/summaries" })).toBe("ls /"); - expect(getShellCommand("Bash", { command: "cat ~/.deeplake/memory/index.md" })).toBe("cat /index.md"); - expect(getShellCommand("Bash", { command: "python3 ~/.deeplake/memory/index.md" })).toBeNull(); - - const grep = extractGrepParams("Grep", { - pattern: "needle", - path: "~/.deeplake/memory/index.md", - output_mode: "count", - "-i": true, - "-n": true, - }, "grep -r needle /"); - expect(grep).toMatchObject({ - pattern: "needle", - targetPath: "/index.md", - ignoreCase: true, - countOnly: true, - lineNumber: true, - }); - }); - - it("returns guidance for unsupported memory commands and passthrough for non-memory commands", async () => { - const guidance = await processPreToolUse({ - session_id: "s1", - tool_name: "Bash", - tool_input: { command: "python3 -c 'print(1)' ~/.deeplake/memory" }, - tool_use_id: "tu-1", - }, { - config: baseConfig, - }); - expect(guidance?.command).toContain("RETRY REQUIRED"); - - const passthrough = await processPreToolUse({ - session_id: "s1", - tool_name: "Bash", - tool_input: { command: "ls -la /tmp" }, - tool_use_id: "tu-2", - }, { - config: baseConfig, - }); - expect(passthrough).toBeNull(); - }); - - it("uses direct grep, direct reads, listings, finds, and shell fallback", async () => { - const grepDecision = await processPreToolUse({ - session_id: "s1", - tool_name: "Grep", - tool_input: { - pattern: "needle", - path: "~/.deeplake/memory/index.md", - output_mode: "files_with_matches", - }, - tool_use_id: "tu-1", - }, { - config: baseConfig, - handleGrepDirectFn: vi.fn(async () => "/index.md:needle") as any, - executeCompiledBashCommandFn: vi.fn(async () => null) as any, - }); - expect(grepDecision?.command).toContain("/index.md:needle"); - - const api = { - query: vi.fn(async () => [ - { - path: "/summaries/alice/s1.md", - project: "repo", - description: "session summary", - creation_date: "2026-01-01T00:00:00.000Z", - }, - ]), - }; - const readDecision = await processPreToolUse({ - session_id: "s1", - tool_name: "Read", - tool_input: { file_path: "~/.deeplake/memory/index.md" }, - tool_use_id: "tu-2", - }, { - config: baseConfig, - createApi: vi.fn(() => api as any), - readVirtualPathContentFn: vi.fn(async () => null) as any, - executeCompiledBashCommandFn: vi.fn(async () => null) as any, - }); - expect(readDecision?.command).toContain("# Memory Index"); - - const readDirDecision = await processPreToolUse({ - session_id: "s1", - tool_name: "Read", - tool_input: { path: "~/.deeplake/memory" }, - tool_use_id: "tu-2b", - }, { - config: baseConfig, - listVirtualPathRowsFn: vi.fn(async () => [ - { path: "/summaries/alice/s1.md", size_bytes: 42 }, - ]) as any, - executeCompiledBashCommandFn: vi.fn(async () => null) as any, - }); - expect(readDirDecision?.command).toContain("summaries/"); - - const lsDecision = await processPreToolUse({ - session_id: "s1", - tool_name: "Bash", - tool_input: { command: "ls -la ~/.deeplake/memory/summaries" }, - tool_use_id: "tu-3", - }, { - config: baseConfig, - listVirtualPathRowsFn: vi.fn(async () => [ - { path: "/summaries/alice/s1.md", size_bytes: 42 }, - ]) as any, - executeCompiledBashCommandFn: vi.fn(async () => null) as any, - }); - expect(lsDecision?.command).toContain("drwxr-xr-x"); - expect(lsDecision?.command).toContain("alice/"); - - const findDecision = await processPreToolUse({ - session_id: "s1", - tool_name: "Bash", - tool_input: { command: "find ~/.deeplake/memory/summaries -name '*.md'" }, - tool_use_id: "tu-4", - }, { - config: baseConfig, - findVirtualPathsFn: vi.fn(async () => ["/summaries/alice/s1.md"]) as any, - executeCompiledBashCommandFn: vi.fn(async () => null) as any, - }); - expect(findDecision?.command).toContain("/summaries/alice/s1.md"); - - const fallback = await processPreToolUse({ - session_id: "s1", - tool_name: "Bash", - tool_input: { command: "echo hi > ~/.deeplake/memory/test.md" }, - tool_use_id: "tu-5", - }, { - config: null, - shellBundle: "/tmp/deeplake-shell.js", - }); - expect(fallback?.command).toContain('node "/tmp/deeplake-shell.js"'); - }); - - it("reuses cached /index.md content for direct and compiled reads within a session", async () => { - const readVirtualPathContentFn = vi.fn(async () => "fresh index"); - const readVirtualPathContentsFn = vi.fn(async (_api, _memory, _sessions, paths: string[]) => new Map( - paths.map((path) => [path, path === "/index.md" ? "fresh index" : null]), - )) as any; - const readCachedIndexContentFn = vi.fn(() => "cached index"); - const writeCachedIndexContentFn = vi.fn(); - - const directDecision = await processPreToolUse({ - session_id: "s1", - tool_name: "Read", - tool_input: { file_path: "~/.deeplake/memory/index.md" }, - tool_use_id: "tu-cache-1", - }, { - config: baseConfig, - readCachedIndexContentFn: readCachedIndexContentFn as any, - writeCachedIndexContentFn: writeCachedIndexContentFn as any, - readVirtualPathContentFn: readVirtualPathContentFn as any, - executeCompiledBashCommandFn: vi.fn(async () => null) as any, - }); - expect(directDecision?.command).toContain("cached index"); - expect(readVirtualPathContentFn).not.toHaveBeenCalled(); - expect(writeCachedIndexContentFn).toHaveBeenCalledWith("s1", "cached index"); - - const compiledDecision = await processPreToolUse({ - session_id: "s1", - tool_name: "Bash", - tool_input: { command: "cat ~/.deeplake/memory/index.md && ls ~/.deeplake/memory/summaries" }, - tool_use_id: "tu-cache-2", - }, { - config: baseConfig, - readCachedIndexContentFn: readCachedIndexContentFn as any, - writeCachedIndexContentFn: writeCachedIndexContentFn as any, - readVirtualPathContentsFn, - executeCompiledBashCommandFn: vi.fn(async (_api, _table, _sessions, _cmd, deps) => { - const map = await deps.readVirtualPathContentsFn(_api, _table, _sessions, ["/index.md"]); - return map.get("/index.md") ?? null; - }) as any, - }); - expect(compiledDecision?.command).toContain("cached index"); - expect(readVirtualPathContentsFn).not.toHaveBeenCalled(); - }); - - it("supports head, tail, wc -l, empty directories, and shell fallback after direct-query errors", async () => { - const contentReader = vi.fn(async () => "line1\nline2\nline3"); - - const headDecision = await processPreToolUse({ - session_id: "s1", - tool_name: "Bash", - tool_input: { command: "head -2 ~/.deeplake/memory/index.md" }, - tool_use_id: "tu-6", - }, { - config: baseConfig, - readCachedIndexContentFn: vi.fn(() => null) as any, - writeCachedIndexContentFn: vi.fn() as any, - readVirtualPathContentFn: contentReader as any, - executeCompiledBashCommandFn: vi.fn(async () => null) as any, - }); - expect(headDecision?.command).toContain("line1\\nline2"); - - const tailDecision = await processPreToolUse({ - session_id: "s1", - tool_name: "Bash", - tool_input: { command: "tail -2 ~/.deeplake/memory/index.md" }, - tool_use_id: "tu-7", - }, { - config: baseConfig, - readCachedIndexContentFn: vi.fn(() => null) as any, - writeCachedIndexContentFn: vi.fn() as any, - readVirtualPathContentFn: contentReader as any, - executeCompiledBashCommandFn: vi.fn(async () => null) as any, - }); - expect(tailDecision?.command).toContain("line2\\nline3"); - - const wcDecision = await processPreToolUse({ - session_id: "s1", - tool_name: "Bash", - tool_input: { command: "wc -l ~/.deeplake/memory/index.md" }, - tool_use_id: "tu-8", - }, { - config: baseConfig, - readCachedIndexContentFn: vi.fn(() => null) as any, - writeCachedIndexContentFn: vi.fn() as any, - readVirtualPathContentFn: contentReader as any, - executeCompiledBashCommandFn: vi.fn(async () => null) as any, - }); - expect(wcDecision?.command).toContain("3 /index.md"); - - const emptyDir = await processPreToolUse({ - session_id: "s1", - tool_name: "Glob", - tool_input: { path: "~/.deeplake/memory/empty" }, - tool_use_id: "tu-9", - }, { - config: baseConfig, - listVirtualPathRowsFn: vi.fn(async () => []) as any, - executeCompiledBashCommandFn: vi.fn(async () => null) as any, - }); - expect(emptyDir?.command).toContain("(empty directory)"); - - const fallback = await processPreToolUse({ - session_id: "s1", - tool_name: "Grep", - tool_input: { - pattern: "needle", - path: "~/.deeplake/memory/index.md", - }, - tool_use_id: "tu-10", - }, { - config: baseConfig, - handleGrepDirectFn: vi.fn(async () => { throw new Error("boom"); }) as any, - shellBundle: "/tmp/deeplake-shell.js", - executeCompiledBashCommandFn: vi.fn(async () => null) as any, - }); - expect(fallback?.description).toContain("DeepLake shell"); - }); - - it("returns compiled output when the bash compiler can satisfy the command directly", async () => { - const decision = await processPreToolUse({ - session_id: "s1", - tool_name: "Bash", - tool_input: { command: "cat ~/.deeplake/memory/index.md && ls ~/.deeplake/memory/summaries" }, - tool_use_id: "tu-11", - }, { - config: baseConfig, - executeCompiledBashCommandFn: vi.fn(async () => "compiled output") as any, - }); - - expect(decision?.command).toContain("compiled output"); - expect(decision?.description).toContain("DeepLake compiled"); - }); -}); - -describe("claude session start source", () => { - it("builds logged-in and logged-out context with update notices", () => { - const loggedIn = buildSessionStartAdditionalContext({ - authCommand: "/tmp/auth-login.js", - creds: baseCreds, - currentVersion: "0.6.0", - latestVersion: "0.6.0", - }); - const loggedOut = buildSessionStartAdditionalContext({ - authCommand: "/tmp/auth-login.js", - creds: null, - currentVersion: "0.6.0", - latestVersion: "0.7.0", - }); - - expect(loggedIn).toContain("Logged in to Deeplake"); - expect(loggedIn).toContain("Hivemind v0.6.0"); - expect(loggedIn).toContain("resolve it against that session's own date/date_time metadata"); - expect(loggedIn).toContain("convert the final answer into an absolute month/date/year"); - expect(loggedIn).toContain("answer with the smallest exact phrase supported by memory"); - expect(loggedIn).toContain('Do NOT answer "not found"'); - expect(loggedOut).toContain("Not logged in to Deeplake"); - expect(loggedOut).toContain("update available"); - }); - - it("skips in wiki-worker mode and backfills usernames when needed", async () => { - expect(await runSessionStartHook({}, { wikiWorker: true })).toBeNull(); - - const save = vi.fn(); - const result = await runSessionStartHook({}, { - creds: { ...baseCreds, userName: undefined }, - saveCredentialsFn: save as any, - currentVersion: "0.6.0", - latestVersion: "0.6.0", - authCommand: "/tmp/auth-login.js", - }); - - expect(result?.hookSpecificOutput.additionalContext).toContain("Logged in to Deeplake"); - expect(save).toHaveBeenCalledTimes(1); - }); - - it("logs unauthenticated startup and still returns context", async () => { - const logFn = vi.fn(); - const result = await runSessionStartHook({}, { - creds: null, - currentVersion: null, - latestVersion: null, - authCommand: "/tmp/auth-login.js", - logFn, - }); - - expect(result?.hookSpecificOutput.additionalContext).toContain("Not logged in to Deeplake"); - expect(logFn).toHaveBeenCalledWith(expect.stringContaining("no credentials")); - }); - - it("falls back to org id and default workspace when names are missing", () => { - const context = buildSessionStartAdditionalContext({ - authCommand: "/tmp/auth-login.js", - creds: { ...baseCreds, orgName: undefined, workspaceId: undefined } as any, - currentVersion: null, - latestVersion: null, - }); - expect(context).toContain("org-1"); - expect(context).toContain("workspace: default"); - expect(context).not.toContain("Hivemind v"); - }); - - it("logs authenticated startup without backfilling when the username is already present", async () => { - const logFn = vi.fn(); - const save = vi.fn(); - await runSessionStartHook({}, { - creds: { ...baseCreds, orgName: undefined }, - saveCredentialsFn: save as any, - currentVersion: "0.6.0", - latestVersion: null, - authCommand: "/tmp/auth-login.js", - logFn, - }); - expect(save).not.toHaveBeenCalled(); - expect(logFn).toHaveBeenCalledWith(expect.stringContaining("org=org-1")); - }); -}); - -describe("claude session start setup source", () => { - it("creates placeholders only when summaries do not already exist", async () => { - const query = vi.fn(async (sql: string) => { - if (sql.startsWith("SELECT path")) return []; - return []; - }); - const api = { query } as any; - - await createPlaceholder(api, "memory", "s1", "/repo", "alice", "Acme", "default"); - - expect(query).toHaveBeenCalledTimes(2); - expect(String(query.mock.calls[1]?.[0])).toContain('INSERT INTO "memory"'); - expect(String(query.mock.calls[1]?.[0])).toContain("/summaries/alice/s1.md"); - expect(String(query.mock.calls[1]?.[0])).toContain("/sessions/alice/alice_Acme_default_s1.jsonl"); - - query.mockReset(); - query.mockResolvedValueOnce([{ path: "/summaries/alice/s1.md" }]); - await createPlaceholder(api, "memory", "s1", "/repo", "alice", "Acme", "default"); - expect(query).toHaveBeenCalledTimes(1); - }); - - it("handles no credentials, disabled session writes, auth failures, and update notices", async () => { - expect(await runSessionStartSetup({ session_id: "s1" }, { - creds: null, - })).toEqual({ status: "no_credentials" }); - - const createApi = vi.fn(() => ({ - ensureTable: vi.fn(async () => undefined), - ensureSessionsTable: vi.fn(async () => undefined), - query: vi.fn(async () => []), - }) as any); - const placeholder = vi.fn(async () => undefined); - - await runSessionStartSetup({ session_id: "s1", cwd: "/repo" }, { - creds: baseCreds, - config: baseConfig, - createApi, - isSessionWriteDisabledFn: vi.fn(() => true) as any, - createPlaceholderFn: placeholder as any, - getInstalledVersionFn: vi.fn(() => "0.6.0") as any, - getLatestVersionCachedFn: vi.fn(async () => "0.7.0") as any, - execSyncFn: vi.fn() as any, - }); - expect(placeholder).toHaveBeenCalledTimes(1); - expect(createApi).toHaveBeenCalledTimes(1); - - const markDisabled = vi.fn(); - const stderr = vi.spyOn(process.stderr, "write").mockImplementation(() => true as any); - await runSessionStartSetup({ session_id: "s1", cwd: "/repo" }, { - creds: { ...baseCreds, autoupdate: false }, - config: baseConfig, - createApi: vi.fn(() => ({ - ensureTable: vi.fn(async () => undefined), - ensureSessionsTable: vi.fn(async () => { throw new Error("403 Forbidden"); }), - query: vi.fn(async () => []), - }) as any), - isSessionWriteDisabledFn: vi.fn(() => false) as any, - isSessionWriteAuthErrorFn: vi.fn(() => true) as any, - markSessionWriteDisabledFn: markDisabled as any, - tryAcquireSessionDrainLockFn: vi.fn(() => (() => undefined)) as any, - createPlaceholderFn: vi.fn(async () => undefined) as any, - getInstalledVersionFn: vi.fn(() => "0.6.0") as any, - getLatestVersionCachedFn: vi.fn(async () => "0.7.0") as any, - }); - expect(markDisabled).toHaveBeenCalledTimes(1); - expect(stderr).toHaveBeenCalledWith(expect.stringContaining("update available")); - }); - - it("backfills usernames, logs drained queues, and handles setup/version failures", async () => { - const save = vi.fn(); - const logFn = vi.fn(); - const wikiLogFn = vi.fn(); - await runSessionStartSetup({ session_id: "s1", cwd: "/repo" }, { - creds: { ...baseCreds, userName: undefined, autoupdate: true }, - saveCredentialsFn: save as any, - config: baseConfig, - createApi: vi.fn(() => ({ - ensureTable: vi.fn(async () => undefined), - ensureSessionsTable: vi.fn(async () => undefined), - query: vi.fn(async () => []), - }) as any), - drainSessionQueuesFn: vi.fn(async () => ({ - queuedSessions: 1, - flushedSessions: 1, - rows: 3, - batches: 1, - })) as any, - isSessionWriteDisabledFn: vi.fn(() => false) as any, - tryAcquireSessionDrainLockFn: vi.fn(() => (() => undefined)) as any, - createPlaceholderFn: vi.fn(async () => undefined) as any, - getInstalledVersionFn: vi.fn(() => "0.6.0") as any, - getLatestVersionCachedFn: vi.fn(async () => "0.6.0") as any, - logFn, - wikiLogFn, - }); - expect(save).toHaveBeenCalledTimes(1); - expect(logFn).toHaveBeenCalledWith(expect.stringContaining("drained 1 queued session")); - expect(logFn).toHaveBeenCalledWith("version up to date: 0.6.0"); - expect(wikiLogFn).not.toHaveBeenCalledWith(expect.stringContaining("failed")); - - await runSessionStartSetup({ session_id: "s1", cwd: "/repo" }, { - creds: baseCreds, - config: baseConfig, - createApi: vi.fn(() => ({ - ensureTable: vi.fn(async () => { throw new Error("boom"); }), - }) as any), - getInstalledVersionFn: vi.fn(() => "0.6.0") as any, - getLatestVersionCachedFn: vi.fn(async () => { throw new Error("offline"); }) as any, - logFn, - wikiLogFn, - }); - expect(logFn).toHaveBeenCalledWith(expect.stringContaining("setup failed: boom")); - expect(logFn).toHaveBeenCalledWith(expect.stringContaining("version check failed: offline")); - expect(wikiLogFn).toHaveBeenCalledWith(expect.stringContaining("failed for s1: boom")); - }); - - it("skips duplicate queue drains while another session-start setup is already handling sessions", async () => { - const logFn = vi.fn(); - const createPlaceholderFn = vi.fn(async () => undefined); - const ensureSessionsTable = vi.fn(async () => undefined); - const drainSessionQueuesFn = vi.fn(async () => ({ - queuedSessions: 1, - flushedSessions: 1, - rows: 1, - batches: 1, - })); - - await runSessionStartSetup({ session_id: "s1", cwd: "/repo" }, { - creds: baseCreds, - config: baseConfig, - createApi: vi.fn(() => ({ - ensureTable: vi.fn(async () => undefined), - ensureSessionsTable, - query: vi.fn(async () => []), - }) as any), - isSessionWriteDisabledFn: vi.fn(() => false) as any, - tryAcquireSessionDrainLockFn: vi.fn(() => null) as any, - drainSessionQueuesFn: drainSessionQueuesFn as any, - createPlaceholderFn: createPlaceholderFn as any, - getInstalledVersionFn: vi.fn(() => null) as any, - logFn, - }); - - expect(ensureSessionsTable).not.toHaveBeenCalled(); - expect(drainSessionQueuesFn).not.toHaveBeenCalled(); - expect(createPlaceholderFn).toHaveBeenCalledTimes(1); - expect(logFn).toHaveBeenCalledWith(expect.stringContaining("sessions drain already in progress")); - }); - - it("handles capture-disabled, successful autoupdate, and skipped setup work", async () => { - const stderr = vi.spyOn(process.stderr, "write").mockImplementation(() => true as any); - const execSyncFn = vi.fn(); - const createPlaceholderFn = vi.fn(); - await runSessionStartSetup({ session_id: "s1", cwd: "/repo" }, { - creds: baseCreds, - config: baseConfig, - captureEnabled: false, - createApi: vi.fn(() => ({ - ensureTable: vi.fn(async () => undefined), - }) as any), - createPlaceholderFn: createPlaceholderFn as any, - getInstalledVersionFn: vi.fn(() => "0.6.0") as any, - getLatestVersionCachedFn: vi.fn(async () => "0.7.0") as any, - execSyncFn: execSyncFn as any, - }); - expect(createPlaceholderFn).not.toHaveBeenCalled(); - expect(execSyncFn).toHaveBeenCalledTimes(1); - expect(stderr).toHaveBeenCalledWith(expect.stringContaining("auto-updated")); - - await expect(runSessionStartSetup({ session_id: "", cwd: "/repo" }, { - creds: baseCreds, - config: baseConfig, - getInstalledVersionFn: vi.fn(() => null) as any, - })).resolves.toEqual({ status: "complete" }); - }); - - it("treats non-auth session setup errors as setup failures", async () => { - const wikiLogFn = vi.fn(); - const createPlaceholderFn = vi.fn(); - await runSessionStartSetup({ session_id: "s1", cwd: "/repo" }, { - creds: baseCreds, - config: baseConfig, - createApi: vi.fn(() => ({ - ensureTable: vi.fn(async () => undefined), - ensureSessionsTable: vi.fn(async () => { throw new Error("boom"); }), - }) as any), - isSessionWriteDisabledFn: vi.fn(() => false) as any, - isSessionWriteAuthErrorFn: vi.fn(() => false) as any, - tryAcquireSessionDrainLockFn: vi.fn(() => (() => undefined)) as any, - createPlaceholderFn: createPlaceholderFn as any, - getInstalledVersionFn: vi.fn(() => null) as any, - wikiLogFn, - }); - expect(createPlaceholderFn).not.toHaveBeenCalled(); - expect(wikiLogFn).toHaveBeenCalledWith(expect.stringContaining("failed for s1: boom")); - }); - - it("skips in wiki-worker mode and handles zero-drain session writes", async () => { - expect(await runSessionStartSetup({ session_id: "s1" }, { - wikiWorker: true, - })).toEqual({ status: "skipped" }); - - const createPlaceholderFn = vi.fn(async () => undefined); - await runSessionStartSetup({ session_id: "s1", cwd: undefined as any }, { - creds: baseCreds, - config: baseConfig, - createApi: vi.fn(() => ({ - ensureTable: vi.fn(async () => undefined), - ensureSessionsTable: vi.fn(async () => undefined), - }) as any), - drainSessionQueuesFn: vi.fn(async () => ({ - queuedSessions: 0, - flushedSessions: 0, - rows: 0, - batches: 0, - })) as any, - isSessionWriteDisabledFn: vi.fn(() => false) as any, - tryAcquireSessionDrainLockFn: vi.fn(() => (() => undefined)) as any, - createPlaceholderFn: createPlaceholderFn as any, - getInstalledVersionFn: vi.fn(() => null) as any, - }); - expect(createPlaceholderFn).toHaveBeenCalledWith(expect.anything(), "memory", "s1", "", "alice", "Acme", "default"); - }); -}); - -describe("claude session end source", () => { - it("skips when disabled, returns no_config, and flushes when active", async () => { - expect(await runSessionEndHook({ session_id: "s1" }, { - captureEnabled: false, - config: baseConfig, - })).toEqual({ status: "skipped" }); - - expect(await runSessionEndHook({ session_id: "s1" }, { - config: null, - })).toEqual({ status: "no_config" }); - - const flush = vi.fn(async () => ({ status: "flushed", rows: 3, batches: 1 })); - const spawn = vi.fn(); - const wiki = vi.fn(); - const result = await runSessionEndHook({ session_id: "s1", cwd: "/repo" }, { - config: baseConfig, - flushSessionQueueFn: flush as any, - spawnWikiWorkerFn: spawn as any, - wikiLogFn: wiki as any, - bundleDir: "/tmp/bundle", - }); - - expect(result).toEqual({ status: "flushed", flushStatus: "flushed" }); - expect(flush).toHaveBeenCalledTimes(1); - expect(spawn).toHaveBeenCalledWith({ - config: baseConfig, - sessionId: "s1", - cwd: "/repo", - bundleDir: "/tmp/bundle", - reason: "SessionEnd", - }); - expect(wiki).toHaveBeenCalled(); - }); -}); diff --git a/claude-code/tests/output-cap.test.ts b/claude-code/tests/output-cap.test.ts new file mode 100644 index 0000000..cebc217 --- /dev/null +++ b/claude-code/tests/output-cap.test.ts @@ -0,0 +1,134 @@ +/** + * Cap for large tool outputs (fix #5). + * + * Claude Code's Bash tool silently persists tool_result strings larger + * than ~16 KB to disk and shows the model a 2 KB preview plus a path. + * In the locomo baseline_cloud_100qa_fix123 run, 11 of 14 losing QAs + * that hit this path never recovered the persisted file — the preview + * was too small to carry the answer and the model gave up. `capOutput- + * ForClaude` truncates at line boundaries below Claude Code's threshold + * and replaces the tail with a footer that tells the model how to + * refine the next call. + */ + +import { describe, expect, it } from "vitest"; +import { + CLAUDE_OUTPUT_CAP_BYTES, + capOutputForClaude, +} from "../../src/utils/output-cap.js"; + +describe("capOutputForClaude", () => { + it("returns the input unchanged when it fits under the cap", () => { + const short = "line1\nline2\nline3"; + expect(capOutputForClaude(short)).toBe(short); + }); + + it("is a no-op for an empty string and single short line", () => { + expect(capOutputForClaude("")).toBe(""); + expect(capOutputForClaude("hello")).toBe("hello"); + }); + + it("truncates at a line boundary once the input exceeds the cap", () => { + const line = "x".repeat(100); + const input = Array.from({ length: 200 }, (_, i) => `${i}:${line}`).join("\n"); + const out = capOutputForClaude(input, { kind: "grep" }); + + expect(Buffer.byteLength(out, "utf8")).toBeLessThanOrEqual(CLAUDE_OUTPUT_CAP_BYTES); + // Last surviving line must be whole — no dangling partial line before the footer. + const body = out.split("\n... [")[0]; + expect(body.split("\n").every((l) => l.startsWith(""))).toBe(true); + // Footer names the kind and reports elided line count / byte count. + expect(out).toMatch(/\[grep truncated: \d+ more lines \([\d.]+ KB\) elided — refine with '\| head -N' or a tighter pattern\]/); + }); + + it("reports the correct number of elided lines in the footer", () => { + const line = "x".repeat(100); + const input = Array.from({ length: 500 }, () => line).join("\n"); + const out = capOutputForClaude(input, { kind: "cat" }); + + const bodyLines = out.split("\n... [")[0].split("\n").length; + const footerMatch = out.match(/(\d+) more lines/); + expect(footerMatch).not.toBeNull(); + const elided = Number(footerMatch![1]); + // Body + elided should account for all original lines. + expect(bodyLines + elided).toBe(500); + }); + + it("handles a single oversized line by taking a byte prefix", () => { + // One giant line — no newlines to cut on. + const input = "a".repeat(CLAUDE_OUTPUT_CAP_BYTES * 3); + const out = capOutputForClaude(input, { kind: "grep" }); + + expect(Buffer.byteLength(out, "utf8")).toBeLessThanOrEqual(CLAUDE_OUTPUT_CAP_BYTES); + expect(out).toContain("[grep truncated:"); + expect(out).toMatch(/[\d.]+ KB total/); + }); + + it("uses a custom maxBytes when provided", () => { + const input = Array.from({ length: 20 }, (_, i) => `line${i}:${"x".repeat(80)}`).join("\n"); + const out = capOutputForClaude(input, { maxBytes: 500, kind: "ls" }); + + expect(Buffer.byteLength(out, "utf8")).toBeLessThanOrEqual(500); + expect(out).toContain("[ls truncated:"); + }); + + it("defaults the footer kind to 'output' when no kind is provided", () => { + const input = "x".repeat(CLAUDE_OUTPUT_CAP_BYTES * 2); + const out = capOutputForClaude(input); + expect(out).toContain("[output truncated:"); + }); + + it("produces output well under Claude Code's ~16 KB persist threshold", () => { + const bigGrepLine = (i: number) => + `/sessions/conv_${i % 10}_session_${i}.json:[D${i}:1] Caroline: ${"x".repeat(160)}`; + const input = Array.from({ length: 400 }, (_, i) => bigGrepLine(i)).join("\n"); + const inputSize = Buffer.byteLength(input, "utf8"); + expect(inputSize).toBeGreaterThan(16 * 1024); // confirm the fixture triggers truncation + + const out = capOutputForClaude(input, { kind: "grep" }); + // 2 KB preview was the painful case — we must give the model notably more + // than that, but still fit comfortably below the 16 KB persist threshold. + expect(Buffer.byteLength(out, "utf8")).toBeGreaterThan(4 * 1024); + expect(Buffer.byteLength(out, "utf8")).toBeLessThanOrEqual(CLAUDE_OUTPUT_CAP_BYTES); + }); + + // ── Regression: trailing newline shouldn't inflate the elided-line count ── + // + // `output.split("\n")` on "a\nb\n" returns ["a", "b", ""]. Treating the + // trailing empty entry as a "real" line made the footer's "N more lines + // elided" number off by one whenever the original input ended with a + // newline (which grep and cat both do in practice). + + it("does not count a trailing newline as an extra line when reporting elided lines", () => { + const line = "x".repeat(100); + // 500 real content lines followed by a terminating "\n". Input ends with \n. + const input = Array.from({ length: 500 }, () => line).join("\n") + "\n"; + const out = capOutputForClaude(input, { kind: "grep" }); + + const footerMatch = out.match(/(\d+) more lines/); + expect(footerMatch).not.toBeNull(); + const elided = Number(footerMatch![1]); + + // Parse the kept-body to count surviving real lines. Split produces a + // trailing "" entry when the kept body itself ends with a newline; drop + // it the same way the production code does. + const body = out.split("\n... [")[0]; + const bodySplit = body.split("\n"); + const keptLines = bodySplit[bodySplit.length - 1] === "" ? bodySplit.length - 1 : bodySplit.length; + + // The 500 real lines must be accounted for exactly once — no double + // counting of the trailing newline. + expect(keptLines + elided).toBe(500); + }); + + it("the elided count matches exactly when there is no trailing newline", () => { + const line = "x".repeat(100); + const input = Array.from({ length: 500 }, () => line).join("\n"); // no trailing \n + const out = capOutputForClaude(input, { kind: "grep" }); + + const bodyLines = out.split("\n... [")[0].split("\n").length; + const footerMatch = out.match(/(\d+) more lines/); + expect(footerMatch).not.toBeNull(); + expect(bodyLines + Number(footerMatch![1])).toBe(500); + }); +}); diff --git a/claude-code/tests/periodic-summary-bundles.test.ts b/claude-code/tests/periodic-summary-bundles.test.ts new file mode 100644 index 0000000..e0ee786 --- /dev/null +++ b/claude-code/tests/periodic-summary-bundles.test.ts @@ -0,0 +1,129 @@ +import { describe, it, expect } from "vitest"; +import { readFileSync, existsSync } from "node:fs"; +import { resolve } from "node:path"; + +/** + * Bundle-level anti-regression for the periodic-summary feature. These + * tests scan the SHIPPED bundles (claude-code + codex) to confirm: + * + * 1. The SessionEnd race fix is present: before spawning the worker, the + * hook checks tryAcquireLock and bails when another worker is running. + * Two concurrent workers writing the same summary row trip the Deeplake + * UPDATE-coalescing quirk and drop one write. + * + * 2. The periodic trigger in the capture hook also acquires the lock + * before spawning — same reason. + * + * 3. The internal wiki-worker flag uses ONLY the new HIVEMIND_WIKI_WORKER + * name. DEEPLAKE_WIKI_WORKER was a migration-only fallback and is a + * plugin-internal signal, so there is no reason to keep it shipped. + * + * 4. HIVEMIND_CAPTURE=false is respected everywhere the guard existed — + * the rename left one path reading the old name only, which we fixed. + * + * Source tests (summary-state.test.ts) prove the lock module is correct; + * these bundle checks prove the build didn't drop the call sites. + */ + +const BUNDLE_ROOT = resolve(__dirname, "..", ".."); + +const SESSION_END_HOOKS: Array<[string, string]> = [ + ["claude-code session-end", resolve(BUNDLE_ROOT, "claude-code", "bundle", "session-end.js")], + ["codex stop", resolve(BUNDLE_ROOT, "codex", "bundle", "stop.js")], +]; + +const CAPTURE_HOOKS: Array<[string, string]> = [ + ["claude-code capture", resolve(BUNDLE_ROOT, "claude-code", "bundle", "capture.js")], + ["codex capture", resolve(BUNDLE_ROOT, "codex", "bundle", "capture.js")], +]; + +const ALL_BUNDLES: Array<[string, string]> = [ + ...SESSION_END_HOOKS, + ...CAPTURE_HOOKS, + ["claude-code session-start", resolve(BUNDLE_ROOT, "claude-code", "bundle", "session-start.js")], + ["claude-code session-start-setup", resolve(BUNDLE_ROOT, "claude-code", "bundle", "session-start-setup.js")], + ["codex session-start", resolve(BUNDLE_ROOT, "codex", "bundle", "session-start.js")], + ["codex session-start-setup", resolve(BUNDLE_ROOT, "codex", "bundle", "session-start-setup.js")], +]; + +describe("bundles exist", () => { + it.each(ALL_BUNDLES)("%s bundle file is present", (_label, path) => { + expect(existsSync(path)).toBe(true); + }); +}); + +// ══ SessionEnd-style hooks: must acquire the lock before spawning ══════════ +describe.each(SESSION_END_HOOKS)("%s bundle — race fix", (_label, path) => { + const src = readFileSync(path, "utf-8"); + + it("calls tryAcquireLock before spawning the worker", () => { + expect(src).toMatch(/tryAcquireLock/); + // The bail-out branch that exists only because of the race fix: when + // the lock is held, we log and return without spawning. + expect(src).toMatch(/periodic worker already running/); + }); + + it("spawns the wiki worker only on the happy path", () => { + // Must still reference the spawn helper — a full removal would also + // match "no race" but would break the feature. + expect(src).toMatch(/spawn(Codex)?WikiWorker/); + }); +}); + +// ══ Capture hooks: periodic trigger also acquires the lock ═════════════════ +describe.each(CAPTURE_HOOKS)("%s bundle — periodic trigger", (_label, path) => { + const src = readFileSync(path, "utf-8"); + + it("acquires the lock before spawning from the periodic path", () => { + expect(src).toMatch(/tryAcquireLock/); + expect(src).toMatch(/shouldTrigger/); + expect(src).toMatch(/bumpTotalCount/); + }); + + it("references the summary-state helpers (feature wired end-to-end)", () => { + expect(src).toMatch(/loadTriggerConfig/); + }); +}); + +// ══ Internal flag uses only the new name ═══════════════════════════════════ +describe.each(ALL_BUNDLES)("%s bundle — clean env flags", (_label, path) => { + const src = readFileSync(path, "utf-8"); + + it("uses HIVEMIND_WIKI_WORKER and not the legacy DEEPLAKE_WIKI_WORKER", () => { + // HIVEMIND_WIKI_WORKER is the internal signal the wiki worker sets on + // itself; every hook must gate on it. The old DEEPLAKE_* fallback was + // pure back-compat noise for an internal flag and is removed. + if (!src.includes("HIVEMIND_WIKI_WORKER")) { + // Some bundles don't need the guard (e.g. pure utility bundles) — + // skip. Every bundle in this suite actually does gate, but be lenient. + return; + } + expect(src).not.toMatch(/DEEPLAKE_WIKI_WORKER/); + }); + + it("does not fall back to DEEPLAKE_CAPTURE for the capture-disabled guard", () => { + // The guard must read HIVEMIND_CAPTURE only. DEEPLAKE_CAPTURE is a + // pre-rename alias that would mask a user setting HIVEMIND_CAPTURE=false. + expect(src).not.toMatch(/DEEPLAKE_CAPTURE/); + }); +}); + +// ══ summary-state module is inlined into every bundle that needs it ════════ +describe("summary-state helpers are inlined into the hook bundles", () => { + // SessionEnd-style hooks only need tryAcquireLock (the worker itself + // releases the lock in its finally block). esbuild tree-shakes + // releaseLock out of those bundles, which is expected. + it.each(SESSION_END_HOOKS)("%s bundle inlines tryAcquireLock", (_label, path) => { + const src = readFileSync(path, "utf-8"); + expect(src).toMatch(/function tryAcquireLock/); + }); + + // Capture hooks need both: tryAcquireLock to gate the spawn, and + // releaseLock as the error-path fallback when spawn throws before the + // worker takes ownership of the lock. + it.each(CAPTURE_HOOKS)("%s bundle inlines tryAcquireLock + releaseLock", (_label, path) => { + const src = readFileSync(path, "utf-8"); + expect(src).toMatch(/function tryAcquireLock/); + expect(src).toMatch(/function releaseLock/); + }); +}); diff --git a/claude-code/tests/pre-tool-use-baseline-cloud.test.ts b/claude-code/tests/pre-tool-use-baseline-cloud.test.ts new file mode 100644 index 0000000..f07831a --- /dev/null +++ b/claude-code/tests/pre-tool-use-baseline-cloud.test.ts @@ -0,0 +1,411 @@ +/** + * Integration coverage for the three real LoCoMo QAs that the + * `locomo_benchmark/baseline` cloud baseline run got wrong before fix + * #1 landed. Each case exercises the Read/Bash entry points of + * `processPreToolUse` against a workspace snapshot that mirrors the + * real baseline workspace at the time of the regression: + * + * - `memory` table: empty (summaries have been dropped) + * - `sessions` table: 272 rows, one per LoCoMo session file + * + * The fix (commit 4271baf) taught `buildVirtualIndexContent` and the + * /index.md fallback in `readVirtualPathContents` to merge session rows + * alongside summary rows. Without that fix the synthesized index + * reported "0 sessions:" in this workspace and agents concluded memory + * was empty. These tests fail loudly if the regression returns. + */ + +import { describe, expect, it, vi } from "vitest"; +import { existsSync, mkdtempSync, readFileSync, rmSync } from "node:fs"; +import { tmpdir } from "node:os"; +import { join } from "node:path"; +import { processPreToolUse, writeReadCacheFile } from "../../src/hooks/pre-tool-use.js"; +import { + buildVirtualIndexContent, + readVirtualPathContents, +} from "../../src/hooks/virtual-table-query.js"; + +// ── Fixture: 272 session rows matching the real `locomo_benchmark/baseline` +// workspace shape — `/sessions/conv__session_.json` — spanning +// conv 0..9 with session counts matching the LoCoMo dataset. +const SESSION_COUNTS_PER_CONV: Record = { + 0: 35, 1: 34, 2: 28, 3: 25, 4: 26, 5: 27, 6: 23, 7: 27, 8: 26, 9: 21, +}; + +function makeSessionRows(): Array<{ path: string; description: string }> { + const rows: Array<{ path: string; description: string }> = []; + for (const [conv, count] of Object.entries(SESSION_COUNTS_PER_CONV)) { + for (let s = 1; s <= count; s++) { + rows.push({ + path: `/sessions/conv_${conv}_session_${s}.json`, + description: `LoCoMo conv ${conv} session ${s}`, + }); + } + } + return rows; +} + +const SESSION_ROWS = makeSessionRows(); + +// Sanity-check the fixture shape so a bad edit fails here, not deep in a test. +if (SESSION_ROWS.length !== 272) { + throw new Error(`fixture should model 272 rows, got ${SESSION_ROWS.length}`); +} + +// ── Real QAs from `results/baseline_cloud/scored_baseline_cloud.jsonl` +// that baseline-local got right and baseline-cloud got wrong before the +// fix. Each row is verbatim from the scored JSONL except `session_file` +// which records the session we'd expect Claude to land on. +const REAL_QAS = [ + { + name: "qa_3: Caroline's research (fix #2 smoke — real run did Read x3)", + question: "What did Caroline research?", + gold_answer: "Adoption agencies", + expected_session_file: "/sessions/conv_0_session_1.json", + }, + { + name: "qa_6: Melanie's camping plans", + question: "When is Melanie planning on going camping?", + gold_answer: "June 2023", + expected_session_file: "/sessions/conv_0_session_2.json", + }, + { + name: "qa_25: Caroline's LGBTQ conference", + question: "When did Caroline go to the LGBTQ conference?", + gold_answer: "10 July 2023", + expected_session_file: "/sessions/conv_0_session_7.json", + }, + { + name: "qa_29: Melanie's pottery workshop", + question: "When did Melanie go to the pottery workshop?", + gold_answer: "The Friday before 15 July 2023", + expected_session_file: "/sessions/conv_0_session_7.json", + }, + { + name: "qa_46: Melanie as an ally", + question: "Would Melanie be considered an ally to the transgender community?", + gold_answer: "Yes, she is supportive", + expected_session_file: "/sessions/conv_0_session_10.json", + }, +] as const; + +const BASE_CONFIG = { + token: "test-token", + apiUrl: "https://api.test", + orgId: "locomo_benchmark", + workspaceId: "baseline", +}; + +/** Simulates the real baseline workspace: memory empty, sessions populated. */ +function makeBaselineWorkspaceApi(sessionRows = SESSION_ROWS) { + return { + query: vi.fn(async (sql: string) => { + // Memory-table queries return 0 rows (memory table dropped). + if (/FROM\s+"memory"/i.test(sql)) return []; + // Sessions-table fallback query for the virtual /index.md: + if (/FROM\s+"sessions".*\/sessions\/%/i.test(sql)) return sessionRows; + // Union query for exact-path reads of /index.md resolves to nothing — + // forces the fallback branch that builds the synthetic index. + if (/UNION ALL/i.test(sql)) return []; + return []; + }), + } as any; +} + +describe("baseline_cloud 3-QA regression: sessions-only workspace", () => { + it("pure builder renders a real 272-row index without the old '0 sessions:' bug", () => { + const content = buildVirtualIndexContent([], SESSION_ROWS); + + expect(content).toContain("272 entries (0 summaries, 272 sessions):"); + expect(content).toContain("## Sessions"); + expect(content).not.toContain("## Summaries"); + // Bug guard: the old output had a lone "${n} sessions:" header with + // n taken from summary rows only. In this workspace that would be 0. + expect(content).not.toMatch(/^0 sessions:$/m); + expect(content).not.toContain("\n0 sessions:\n"); + + // Every real session path from the fixture must appear in the index. + for (const row of SESSION_ROWS) { + expect(content).toContain(row.path); + } + }); + + it("readVirtualPathContents fallback pulls sessions into /index.md for the baseline workspace", async () => { + const api = makeBaselineWorkspaceApi(); + const result = await readVirtualPathContents(api, "memory", "sessions", ["/index.md"]); + const indexContent = result.get("/index.md") ?? ""; + + expect(indexContent).toContain("272 entries (0 summaries, 272 sessions):"); + // Must land on the three sessions that carry answers for our 3 real QAs. + for (const qa of REAL_QAS) { + expect(indexContent).toContain(qa.expected_session_file); + } + }); + + for (const qa of REAL_QAS) { + describe(qa.name, () => { + it("Read /home/.deeplake/memory/index.md intercept returns file_path (Read-tool shape) pointing to the real session listing", async () => { + const api = makeBaselineWorkspaceApi(); + const capturedReadFiles: Array<{ sessionId: string; virtualPath: string; content: string; returnedPath: string }> = []; + + const decision = await processPreToolUse( + { + session_id: `s-${qa.expected_session_file}`, + tool_name: "Read", + tool_input: { file_path: "~/.deeplake/memory/index.md" }, + tool_use_id: "tu-read-index", + }, + { + config: BASE_CONFIG, + createApi: vi.fn(() => api), + executeCompiledBashCommandFn: vi.fn(async () => null) as any, + readCachedIndexContentFn: () => null, + writeCachedIndexContentFn: () => undefined, + writeReadCacheFileFn: ((sessionId: string, virtualPath: string, content: string) => { + const returnedPath = `/tmp/baseline-cloud-3qa-test-${sessionId.replace(/[^a-zA-Z0-9._-]/g, "_")}${virtualPath}`; + capturedReadFiles.push({ sessionId, virtualPath, content, returnedPath }); + return returnedPath; + }) as any, + }, + ); + + // Regression guard for bug #2: Read intercept MUST return a decision + // that causes main() to emit `updatedInput: {file_path}`. Today that + // means the decision carries `file_path`. If this asserts "undefined", + // Claude Code's Read tool will error with "path must be of type string". + expect(decision).not.toBeNull(); + expect(decision?.file_path).toBeDefined(); + expect(typeof decision?.file_path).toBe("string"); + + // Content must be materialized once, with the real index shape. + expect(capturedReadFiles).toHaveLength(1); + const materialized = capturedReadFiles[0]; + expect(materialized?.virtualPath).toBe("/index.md"); + expect(decision?.file_path).toBe(materialized?.returnedPath); + + const body = materialized?.content ?? ""; + expect(body).toContain("# Memory Index"); + expect(body).toContain("272 entries (0 summaries, 272 sessions):"); + expect(body).toContain(qa.expected_session_file); + // Fix #1 regression guard (still important after fix #2): the old + // synthesized index reported sessions from the memory table only. + expect(body).not.toMatch(/\b0 sessions:/); + expect(body).not.toMatch(/\b1 sessions:/); + }); + + it("Bash cat index.md intercept returns the same listing via {command} (bash shape preserved)", async () => { + const api = makeBaselineWorkspaceApi(); + + const decision = await processPreToolUse( + { + session_id: `s-bash-${qa.expected_session_file}`, + tool_name: "Bash", + tool_input: { command: "cat ~/.deeplake/memory/index.md" }, + tool_use_id: "tu-cat-index", + }, + { + config: BASE_CONFIG, + createApi: vi.fn(() => api), + executeCompiledBashCommandFn: vi.fn(async () => null) as any, + readCachedIndexContentFn: () => null, + writeCachedIndexContentFn: () => undefined, + }, + ); + + expect(decision).not.toBeNull(); + // Bash intercepts keep the historical {command, description} shape — + // Claude Code's Bash tool reads `command`. The content is inlined as + // an `echo "..."` payload so the virtual shell isn't needed here. + expect(decision?.file_path).toBeUndefined(); + const body = decision?.command ?? ""; + expect(body).toContain("272 entries (0 summaries, 272 sessions):"); + expect(body).toContain(qa.expected_session_file); + }); + }); + } + + // ── Regression coverage anchored in a real benchmark run ───────────── + // + // In `baseline_cloud_9qa_read_candidates_fix2` (2026-04-20), haiku chose + // to call the Read tool directly against session files — not just + // /index.md. Specifically, qa_3 did three Read calls including + // Read /home/.deeplake/memory/sessions/conv_0_session_1.json and + // Read /home/.deeplake/memory/sessions/conv_0_session_2.json, and all + // three succeeded (zero "path must be of type string" errors) after + // fix #2 landed. The previous run on the same workspace without the fix + // produced that error on every memory-path Read call. + // + // This test drives the same session-file Read through processPreToolUse + // and asserts the decision shape matches what Claude Code's Read tool + // expects — i.e. `updatedInput: {file_path}`, not `{command}`. + + it("Read /sessions/ intercept returns file_path pointing to the session content (qa_3 real-run path)", async () => { + const sessionJson = JSON.stringify({ + conversation_id: 0, + session_number: 1, + date_time: "8 May, 2023", + speakers: { speaker_a: "Caroline", speaker_b: "Melanie" }, + turns: [ + { speaker: "Caroline", dia_id: "D1:1", text: "Hey Mel! Good to see you!" }, + ], + }); + + const api = { + query: vi.fn(async (sql: string) => { + // Exact-path read hits the sessions table. + if (/FROM\s+"sessions"/i.test(sql) && /conv_0_session_1\.json/.test(sql)) { + return [{ path: "/sessions/conv_0_session_1.json", content: sessionJson, source_order: 1 }]; + } + if (/FROM\s+"memory"/i.test(sql)) return []; + return []; + }), + } as any; + const capturedReadFiles: Array<{ sessionId: string; virtualPath: string; content: string }> = []; + + const decision = await processPreToolUse( + { + session_id: "s-qa3-session-read", + tool_name: "Read", + tool_input: { file_path: "~/.deeplake/memory/sessions/conv_0_session_1.json" }, + tool_use_id: "tu-read-session-1", + }, + { + config: BASE_CONFIG, + createApi: vi.fn(() => api), + executeCompiledBashCommandFn: vi.fn(async () => null) as any, + readCachedIndexContentFn: () => null, + writeCachedIndexContentFn: () => undefined, + writeReadCacheFileFn: ((sessionId: string, virtualPath: string, content: string) => { + capturedReadFiles.push({ sessionId, virtualPath, content }); + return `/tmp/test-${sessionId}${virtualPath}`; + }) as any, + }, + ); + + // Read-tool shape: decision must carry file_path, not just command. + expect(decision).not.toBeNull(); + expect(decision?.file_path).toBe("/tmp/test-s-qa3-session-read/sessions/conv_0_session_1.json"); + + // Content materialized exactly once, at the right virtual path, with + // the real session payload Claude needs to answer qa_3. + expect(capturedReadFiles).toHaveLength(1); + expect(capturedReadFiles[0]?.virtualPath).toBe("/sessions/conv_0_session_1.json"); + expect(capturedReadFiles[0]?.content).toContain("Caroline"); + expect(capturedReadFiles[0]?.content).toContain("8 May, 2023"); + }); + + // ── writeReadCacheFile security guard ───────────────────────────────────── + // + // Claude Code's Read intercept materializes fetched content into + // ~/.deeplake/query-cache//read/. DB-derived + // virtualPaths are user-controlled (anyone with write access to the + // `sessions` / `memory` tables controls them), so `..` segments must not + // be allowed to escape the per-session cache dir. The PR #63 bot review + // flagged this. + + describe("writeReadCacheFile path-traversal guard", () => { + it("writes a well-formed virtualPath inside the per-session cache root", () => { + const cacheRoot = mkdtempSync(join(tmpdir(), "writeReadCache-ok-")); + try { + const abs = writeReadCacheFile("sess-1", "/sessions/conv_0_session_1.json", "hello", { cacheRoot }); + expect(abs).toBe(join(cacheRoot, "sess-1", "read", "sessions", "conv_0_session_1.json")); + expect(existsSync(abs)).toBe(true); + expect(readFileSync(abs, "utf-8")).toBe("hello"); + } finally { + rmSync(cacheRoot, { recursive: true, force: true }); + } + }); + + it("refuses a virtualPath that escapes the cache root via ../ segments", () => { + const cacheRoot = mkdtempSync(join(tmpdir(), "writeReadCache-trav-")); + try { + expect(() => + writeReadCacheFile("sess-2", "/sessions/../../../etc/passwd", "pwned", { cacheRoot }) + ).toThrow(/path escapes cache root/); + // Guard must fire BEFORE any write lands anywhere under cacheRoot. + expect(existsSync(join(cacheRoot, "sess-2", "read", "sessions"))).toBe(false); + expect(existsSync(join(cacheRoot, "etc"))).toBe(false); + } finally { + rmSync(cacheRoot, { recursive: true, force: true }); + } + }); + + it("refuses traversal that lands outside the cache root entirely", () => { + const cacheRoot = mkdtempSync(join(tmpdir(), "writeReadCache-out-")); + try { + // Resolves to something like /tmp/writeReadCache-out-XXX/sess-3/read/../../../../../../etc/shadow + // → /etc/shadow — fully outside cacheRoot. + expect(() => + writeReadCacheFile("sess-3", "/../../../../../../etc/shadow", "x", { cacheRoot }) + ).toThrow(/path escapes cache root/); + } finally { + rmSync(cacheRoot, { recursive: true, force: true }); + } + }); + + it("accepts a path that normalizes back inside the cache root", () => { + const cacheRoot = mkdtempSync(join(tmpdir(), "writeReadCache-norm-")); + try { + // `/sessions/foo/../bar.json` → `/sessions/bar.json`, still inside. + const abs = writeReadCacheFile("sess-4", "/sessions/foo/../bar.json", "ok", { cacheRoot }); + expect(abs).toBe(join(cacheRoot, "sess-4", "read", "sessions", "bar.json")); + expect(readFileSync(abs, "utf-8")).toBe("ok"); + } finally { + rmSync(cacheRoot, { recursive: true, force: true }); + } + }); + }); + + // ── /index.md fallback lives in virtual-table-query.ts only ─────────────── + // + // An earlier draft of fix #1 duplicated the synthesized-index builder + // inside pre-tool-use.ts. The bot review flagged that duplicate as + // unreachable + using the old single-table SQL ("N sessions:" header, + // missing `## Sessions`). The duplicate has since been removed; this + // test locks in that removal — `processPreToolUse` must use the dual- + // table builder and never synthesize its own broken fallback. + + it("index.md intercept never falls back to the single-table inline builder", async () => { + // readVirtualPathContentFn returns non-null for /index.md (fix #1 + // guarantee), so the old inline fallback is now unreachable. If + // somebody re-introduces it, this test fails because the bad string + // "${n} sessions:" would appear in the output instead of the dual- + // table "${total} entries (${s} summaries, ${n} sessions):" header. + const api = { query: vi.fn(async () => []) } as any; + const readVirtualPathContentFn = vi.fn(async () => "# Memory Index\n\n272 entries (0 summaries, 272 sessions):\n"); + let materialized: string | undefined; + + const decision = await processPreToolUse( + { + session_id: "s-index-fallback", + tool_name: "Read", + tool_input: { file_path: "~/.deeplake/memory/index.md" }, + tool_use_id: "tu-fallback", + }, + { + config: BASE_CONFIG, + createApi: vi.fn(() => api), + readVirtualPathContentFn: readVirtualPathContentFn as any, + readCachedIndexContentFn: () => null, + writeCachedIndexContentFn: () => undefined, + writeReadCacheFileFn: ((_sid: string, _vp: string, content: string) => { + materialized = content; + return "/tmp/fake-index-path"; + }) as any, + }, + ); + + expect(decision).not.toBeNull(); + expect(materialized).toBeDefined(); + // The dual-table builder's content was materialized, not the + // single-table "N sessions:" fallback. + expect(materialized).toContain("272 entries (0 summaries, 272 sessions):"); + expect(materialized).not.toMatch(/\n\d+ sessions:\n/); + // Production code must not issue its own fallback SELECT against + // memory for /index.md — it delegates entirely to readVirtualPath. + const summariesOnlyFallback = api.query.mock.calls.find((call: any[]) => + String(call[0] || "").includes(`FROM "memory" WHERE path LIKE '/summaries/%'`) + ); + expect(summariesOnlyFallback).toBeUndefined(); + }); +}); diff --git a/claude-code/tests/pre-tool-use-branches.test.ts b/claude-code/tests/pre-tool-use-branches.test.ts new file mode 100644 index 0000000..cb3de12 --- /dev/null +++ b/claude-code/tests/pre-tool-use-branches.test.ts @@ -0,0 +1,640 @@ +/** + * Branch-coverage suite for `src/hooks/pre-tool-use.ts`. + * + * The PR already has an end-to-end regression suite in + * `pre-tool-use-baseline-cloud.test.ts`, but that file anchors to real + * LoCoMo QAs and only exercises the `/index.md` and `/sessions/*` Read + * paths plus one Bash `cat`. This file fills in the remaining branches + * that the hook supports — Glob, Grep, Bash ls/head/tail/wc/find, the + * unsafe-command guidance path, and the no-config fallback — so the + * whole file can stay above the 90% coverage bar. + */ + +import { describe, expect, it, vi } from "vitest"; +import { homedir } from "node:os"; +import { join } from "node:path"; +import { + buildAllowDecision, + buildReadDecision, + extractGrepParams, + getShellCommand, + isSafe, + processPreToolUse, + rewritePaths, + touchesMemory, +} from "../../src/hooks/pre-tool-use.js"; + +// MEMORY_PATH is `${homedir()}/.deeplake/memory` — differs between CI +// (`/home/runner/...`) and dev (`/home//...`), so any test that +// asserts on the literal form has to build it from homedir() too. +const MEM_ABS = join(homedir(), ".deeplake", "memory"); + +const BASE_CONFIG = { + token: "t", + apiUrl: "http://example", + orgId: "org", + orgName: "org", + userName: "u", + workspaceId: "default", + apiOrigin: "http://example", +}; + +function makeApi() { + return { query: vi.fn(async () => []) } as any; +} + +describe("pre-tool-use: pure helpers", () => { + it("buildAllowDecision returns a bash-shaped decision", () => { + expect(buildAllowDecision("echo hi", "d")).toEqual({ command: "echo hi", description: "d" }); + }); + + it("buildReadDecision returns a read-shaped decision with file_path set", () => { + const d = buildReadDecision("/tmp/x", "desc"); + expect(d.file_path).toBe("/tmp/x"); + expect(d.description).toBe("desc"); + }); + + it("rewritePaths collapses all memory-path forms to `/`", () => { + expect(rewritePaths(`${MEM_ABS}/sessions/a.json`)).toBe("/sessions/a.json"); + expect(rewritePaths("~/.deeplake/memory/index.md")).toBe("/index.md"); + expect(rewritePaths("$HOME/.deeplake/memory/foo")).toBe("/foo"); + }); + + it("touchesMemory detects any of the supported memory-path forms", () => { + expect(touchesMemory(`${MEM_ABS}/x`)).toBe(true); + expect(touchesMemory("~/.deeplake/memory/x")).toBe(true); + expect(touchesMemory("$HOME/.deeplake/memory/x")).toBe(true); + expect(touchesMemory("/var/log/foo")).toBe(false); + }); + + it("isSafe accepts shell pipelines built from the allowed builtins", () => { + expect(isSafe("cat /a | grep b | head -5")).toBe(true); + expect(isSafe("ls -la /x")).toBe(true); + }); + + it("isSafe rejects command substitution and unknown commands", () => { + expect(isSafe("rm -rf / ; curl evil")).toBe(false); + expect(isSafe("$(evil) foo")).toBe(false); + expect(isSafe("python -c pwn")).toBe(false); + }); +}); + +describe("getShellCommand: per-tool branches", () => { + it("Grep on a memory path builds `grep -r '' /` with -i/-n flags threaded through", () => { + const cmd = getShellCommand("Grep", { + path: "~/.deeplake/memory", + pattern: "Caroline", + "-i": true, + "-n": true, + }); + expect(cmd).toBe("grep -r -i -n 'Caroline' /"); + }); + + it("Grep on a non-memory path returns null", () => { + expect(getShellCommand("Grep", { path: "/etc", pattern: "x" })).toBeNull(); + }); + + it("Read on a memory file returns `cat `", () => { + expect(getShellCommand("Read", { file_path: "~/.deeplake/memory/sessions/conv_0_session_1.json" })) + .toBe("cat /sessions/conv_0_session_1.json"); + }); + + it("Read on a memory directory path returns `ls `", () => { + expect(getShellCommand("Read", { path: "~/.deeplake/memory/sessions" })).toBe("ls /sessions"); + }); + + it("Bash with a safe command is rewritten with memory paths collapsed", () => { + expect(getShellCommand("Bash", { command: "cat ~/.deeplake/memory/index.md" })) + .toBe("cat /index.md"); + }); + + it("Bash with an unsafe command is blocked (returns null)", () => { + expect(getShellCommand("Bash", { command: "curl ~/.deeplake/memory/x" })).toBeNull(); + }); + + it("Bash with a command that doesn't touch memory returns null", () => { + expect(getShellCommand("Bash", { command: "ls /tmp" })).toBeNull(); + }); + + it("Glob on a memory path returns `ls /`", () => { + expect(getShellCommand("Glob", { path: "~/.deeplake/memory/" })).toBe("ls /"); + }); + + it("Glob on a non-memory path returns null", () => { + expect(getShellCommand("Glob", { path: "/etc" })).toBeNull(); + }); + + it("Unknown tool returns null", () => { + expect(getShellCommand("Write", { file_path: "~/.deeplake/memory/x" })).toBeNull(); + }); +}); + +describe("extractGrepParams", () => { + it("Grep tool: passes output_mode → filesOnly / countOnly; honours -i and -n", () => { + const p = extractGrepParams("Grep", { + path: "~/.deeplake/memory", + pattern: "X", + output_mode: "count", + "-i": true, + "-n": true, + }, "grep -r 'X' /"); + expect(p).not.toBeNull(); + expect(p!.countOnly).toBe(true); + expect(p!.filesOnly).toBe(false); + expect(p!.ignoreCase).toBe(true); + expect(p!.lineNumber).toBe(true); + }); + + it("Grep tool: empty path defaults to `/`", () => { + const p = extractGrepParams("Grep", { pattern: "X" }, "grep -r 'X' /"); + expect(p!.targetPath).toBe("/"); + }); + + it("Bash grep: delegates to parseBashGrep", () => { + const p = extractGrepParams("Bash", {}, "grep -l needle /sessions/*.json"); + expect(p).not.toBeNull(); + expect(p!.pattern).toBe("needle"); + }); + + it("Bash non-grep: returns null", () => { + expect(extractGrepParams("Bash", {}, "cat /x")).toBeNull(); + }); + + it("Unknown tool: returns null", () => { + expect(extractGrepParams("Write", {}, "x")).toBeNull(); + }); +}); + +describe("processPreToolUse: non-memory / no-op paths", () => { + it("returns null when the command doesn't touch memory and there's no shellCmd", async () => { + const d = await processPreToolUse( + { session_id: "s", tool_name: "Bash", tool_input: { command: "ls /tmp" }, tool_use_id: "t" }, + { config: BASE_CONFIG as any }, + ); + expect(d).toBeNull(); + }); + + it("returns [RETRY REQUIRED] guidance when an unsupported command mentions the memory path", async () => { + const d = await processPreToolUse( + { session_id: "s", tool_name: "Bash", tool_input: { command: "curl ~/.deeplake/memory/x" }, tool_use_id: "t" }, + { config: BASE_CONFIG as any, logFn: vi.fn() }, + ); + expect(d?.command).toContain("[RETRY REQUIRED]"); + expect(d?.command).toContain("bash builtins"); + }); + + it("falls back to the shell bundle when no config is loaded", async () => { + const d = await processPreToolUse( + { session_id: "s", tool_name: "Bash", tool_input: { command: "cat ~/.deeplake/memory/index.md" }, tool_use_id: "t" }, + { config: null as any, shellBundle: "/SHELL" }, + ); + expect(d?.command).toContain(`node "/SHELL" -c`); + expect(d?.description).toContain("[DeepLake shell]"); + }); +}); + +describe("processPreToolUse: Glob / ls branches", () => { + it("Glob on memory routes through listVirtualPathRows and renders a directory listing", async () => { + const listVirtualPathRowsFn = vi.fn(async () => [ + { path: "/sessions/conv_0_session_1.json", size_bytes: 100 }, + { path: "/sessions/conv_0_session_2.json", size_bytes: 200 }, + { path: "/summaries/alice/s1.md", size_bytes: 50 }, + ]) as any; + + const d = await processPreToolUse( + { session_id: "s", tool_name: "Glob", tool_input: { path: "~/.deeplake/memory/" }, tool_use_id: "t" }, + { + config: BASE_CONFIG as any, + createApi: vi.fn(() => makeApi()), + listVirtualPathRowsFn, + executeCompiledBashCommandFn: vi.fn(async () => null) as any, + }, + ); + expect(d?.command).toContain("sessions/"); + expect(d?.command).toContain("summaries/"); + expect(d?.description).toContain("[DeepLake direct] ls /"); + }); + + it("Bash `ls -la ` returns a long-format listing", async () => { + const listVirtualPathRowsFn = vi.fn(async () => [ + { path: "/summaries/alice/s1.md", size_bytes: 42 }, + ]) as any; + + const d = await processPreToolUse( + { session_id: "s", tool_name: "Bash", tool_input: { command: "ls -la ~/.deeplake/memory/summaries" }, tool_use_id: "t" }, + { + config: BASE_CONFIG as any, + createApi: vi.fn(() => makeApi()), + listVirtualPathRowsFn, + executeCompiledBashCommandFn: vi.fn(async () => null) as any, + }, + ); + expect(d?.command).toContain("drwxr-xr-x"); + expect(d?.command).toContain("alice/"); + }); + + it("ls on an empty directory reports `(empty directory)` — not a bogus path listing", async () => { + const d = await processPreToolUse( + { session_id: "s", tool_name: "Bash", tool_input: { command: "ls ~/.deeplake/memory/nope" }, tool_use_id: "t" }, + { + config: BASE_CONFIG as any, + createApi: vi.fn(() => makeApi()), + listVirtualPathRowsFn: vi.fn(async () => []) as any, + executeCompiledBashCommandFn: vi.fn(async () => null) as any, + }, + ); + expect(d?.command).toContain("(empty directory)"); + }); +}); + +describe("processPreToolUse: Bash read-shape intercepts", () => { + const makeApiWith = (content: string | null) => ({ + api: makeApi(), + readVirtualPathContentFn: vi.fn(async () => content) as any, + }); + + it("`cat ` returns the raw content", async () => { + const { api, readVirtualPathContentFn } = makeApiWith("line1\nline2\nline3"); + const d = await processPreToolUse( + { session_id: "s", tool_name: "Bash", tool_input: { command: "cat ~/.deeplake/memory/sessions/a.json" }, tool_use_id: "t" }, + { + config: BASE_CONFIG as any, + createApi: vi.fn(() => api), + readVirtualPathContentFn, + executeCompiledBashCommandFn: vi.fn(async () => null) as any, + }, + ); + expect(d?.command).toContain("line1"); + expect(d?.description).toContain("[DeepLake direct] cat"); + }); + + it("`head -N ` limits to the first N lines", async () => { + const { api, readVirtualPathContentFn } = makeApiWith("l1\nl2\nl3\nl4"); + const d = await processPreToolUse( + { session_id: "s", tool_name: "Bash", tool_input: { command: "head -2 ~/.deeplake/memory/sessions/a.json" }, tool_use_id: "t" }, + { + config: BASE_CONFIG as any, + createApi: vi.fn(() => api), + readVirtualPathContentFn, + executeCompiledBashCommandFn: vi.fn(async () => null) as any, + }, + ); + expect(d?.command).toContain("l1\\nl2"); + expect(d?.command).not.toContain("l3"); + }); + + it("`tail -N ` limits to the last N lines", async () => { + const { api, readVirtualPathContentFn } = makeApiWith("l1\nl2\nl3\nl4"); + const d = await processPreToolUse( + { session_id: "s", tool_name: "Bash", tool_input: { command: "tail -2 ~/.deeplake/memory/sessions/a.json" }, tool_use_id: "t" }, + { + config: BASE_CONFIG as any, + createApi: vi.fn(() => api), + readVirtualPathContentFn, + executeCompiledBashCommandFn: vi.fn(async () => null) as any, + }, + ); + expect(d?.command).toContain("l3\\nl4"); + expect(d?.command).not.toContain("l1"); + }); + + it("`wc -l ` returns the line count with the virtual path", async () => { + const { api, readVirtualPathContentFn } = makeApiWith("a\nb\nc"); + const d = await processPreToolUse( + { session_id: "s", tool_name: "Bash", tool_input: { command: "wc -l ~/.deeplake/memory/sessions/a.json" }, tool_use_id: "t" }, + { + config: BASE_CONFIG as any, + createApi: vi.fn(() => api), + readVirtualPathContentFn, + executeCompiledBashCommandFn: vi.fn(async () => null) as any, + }, + ); + expect(d?.command).toContain("3 /sessions/a.json"); + expect(d?.description).toContain("wc -l"); + }); +}); + +describe("processPreToolUse: find / grep / fallback", () => { + it("Bash `find -name ''` lists matching paths", async () => { + const findVirtualPathsFn = vi.fn(async () => [ + "/sessions/conv_0_session_1.json", + "/sessions/conv_0_session_2.json", + ]) as any; + + const d = await processPreToolUse( + { session_id: "s", tool_name: "Bash", tool_input: { command: "find ~/.deeplake/memory/sessions -name '*.json'" }, tool_use_id: "t" }, + { + config: BASE_CONFIG as any, + createApi: vi.fn(() => makeApi()), + findVirtualPathsFn, + executeCompiledBashCommandFn: vi.fn(async () => null) as any, + }, + ); + expect(d?.command).toContain("/sessions/conv_0_session_1.json"); + expect(d?.description).toContain("[DeepLake direct] find"); + }); + + it("Bash `find … | wc -l` returns the count", async () => { + const findVirtualPathsFn = vi.fn(async () => ["/a.json", "/b.json", "/c.json"]) as any; + const d = await processPreToolUse( + { session_id: "s", tool_name: "Bash", tool_input: { command: "find ~/.deeplake/memory/sessions -name '*.json' | wc -l" }, tool_use_id: "t" }, + { + config: BASE_CONFIG as any, + createApi: vi.fn(() => makeApi()), + findVirtualPathsFn, + executeCompiledBashCommandFn: vi.fn(async () => null) as any, + }, + ); + expect(d?.command).toContain('"3"'); + }); + + it("Grep tool: falls through to handleGrepDirect and returns the matches", async () => { + const handleGrepDirectFn = vi.fn(async () => "/sessions/a.json:match line") as any; + const d = await processPreToolUse( + { + session_id: "s", + tool_name: "Grep", + tool_input: { path: "~/.deeplake/memory", pattern: "match", output_mode: "content" }, + tool_use_id: "t", + }, + { + config: BASE_CONFIG as any, + createApi: vi.fn(() => makeApi()), + handleGrepDirectFn, + executeCompiledBashCommandFn: vi.fn(async () => null) as any, + }, + ); + expect(d?.command).toContain("match line"); + }); + + it("throws in direct-read path → falls back to the shell bundle", async () => { + const d = await processPreToolUse( + { session_id: "s", tool_name: "Bash", tool_input: { command: "cat ~/.deeplake/memory/sessions/a.json" }, tool_use_id: "t" }, + { + config: BASE_CONFIG as any, + createApi: vi.fn(() => makeApi()), + readVirtualPathContentFn: vi.fn(async () => { throw new Error("boom"); }) as any, + executeCompiledBashCommandFn: vi.fn(async () => null) as any, + shellBundle: "/SHELL", + logFn: vi.fn(), + }, + ); + expect(d?.command).toContain('node "/SHELL" -c'); + }); +}); + +describe("processPreToolUse: index cache short-circuit", () => { + // `readVirtualPathContentsWithCache` is an inline callback the hook + // passes to `executeCompiledBashCommand` so the compiled-segments path + // can reuse the already-fetched /index.md content without hitting SQL + // twice. The happy path is only exercised when the compiler actually + // invokes the callback — these tests simulate exactly that. + + it("returns the cached /index.md immediately without calling readVirtualPathContents", async () => { + const readVirtualPathContentsFn = vi.fn(async (_api, _m, _s, paths: string[]) => + new Map(paths.map(p => [p, `FETCHED:${p}`])), + ) as any; + const readCachedIndexContentFn = vi.fn(() => "CACHED INDEX"); + const writeCachedIndexContentFn = vi.fn(); + + const executeCompiledBashCommandFn = vi.fn(async (_api, _memory, _sessions, _cmd, deps) => { + // Mimic what the real compiler does when it needs /index.md content. + const fetched = await deps.readVirtualPathContentsFn(_api, _memory, _sessions, ["/index.md", "/sessions/x.json"]); + return `idx=${fetched.get("/index.md")}\nx=${fetched.get("/sessions/x.json")}`; + }) as any; + + const d = await processPreToolUse( + { session_id: "s1", tool_name: "Bash", tool_input: { command: "cat ~/.deeplake/memory/index.md && cat ~/.deeplake/memory/sessions/x.json" }, tool_use_id: "t" }, + { + config: BASE_CONFIG as any, + createApi: vi.fn(() => makeApi()), + readCachedIndexContentFn, + writeCachedIndexContentFn, + readVirtualPathContentsFn, + executeCompiledBashCommandFn, + }, + ); + + expect(d?.command).toContain("idx=CACHED INDEX"); + expect(d?.command).toContain("x=FETCHED:/sessions/x.json"); + // /index.md came from the per-session cache; only the /sessions/x.json + // path went to the API. + expect(readCachedIndexContentFn).toHaveBeenCalledWith("s1"); + expect(readVirtualPathContentsFn).toHaveBeenCalledWith( + expect.anything(), + expect.anything(), + expect.anything(), + ["/sessions/x.json"], + ); + // Cache re-write always fires when /index.md is in the result set — + // idempotent for the hit path (same content in, same content out). + expect(writeCachedIndexContentFn).toHaveBeenCalledWith("s1", "CACHED INDEX"); + }); + + it("writes the freshly-fetched /index.md into the session cache when there's no hit", async () => { + const readVirtualPathContentsFn = vi.fn(async (_api, _m, _s, paths: string[]) => + new Map(paths.map(p => [p, p === "/index.md" ? "FRESH INDEX" : null])), + ) as any; + const readCachedIndexContentFn = vi.fn(() => null); + const writeCachedIndexContentFn = vi.fn(); + + const executeCompiledBashCommandFn = vi.fn(async (_api, _m, _s, _cmd, deps) => { + const fetched = await deps.readVirtualPathContentsFn(_api, _m, _s, ["/index.md"]); + return `out=${fetched.get("/index.md")}`; + }) as any; + + const d = await processPreToolUse( + { session_id: "s2", tool_name: "Bash", tool_input: { command: "cat ~/.deeplake/memory/index.md" }, tool_use_id: "t" }, + { + config: BASE_CONFIG as any, + createApi: vi.fn(() => makeApi()), + readCachedIndexContentFn, + writeCachedIndexContentFn, + readVirtualPathContentsFn, + executeCompiledBashCommandFn, + }, + ); + + expect(d?.command).toContain("FRESH INDEX"); + expect(writeCachedIndexContentFn).toHaveBeenCalledWith("s2", "FRESH INDEX"); + }); + + it("Read on the memory root (no extension in basename) routes to the ls directory branch", async () => { + const listVirtualPathRowsFn = vi.fn(async () => [ + { path: "/sessions/conv_0_session_1.json", size_bytes: 100 }, + { path: "/summaries/alice/s1.md" /* no size_bytes → null branch */ }, + ]) as any; + + const d = await processPreToolUse( + { session_id: "s", tool_name: "Read", tool_input: { file_path: "~/.deeplake/memory/" }, tool_use_id: "t" }, + { + config: BASE_CONFIG as any, + createApi: vi.fn(() => makeApi()), + listVirtualPathRowsFn, + executeCompiledBashCommandFn: vi.fn(async () => null) as any, + }, + ); + expect(d?.command).toContain("sessions/"); + expect(d?.command).toContain("summaries/"); + }); + + it("Read on a directory with trailing slashes strips them before listing", async () => { + const listVirtualPathRowsFn = vi.fn(async () => [ + { path: "/sessions/conv_0_session_1.json", size_bytes: 42 }, + ]) as any; + + const d = await processPreToolUse( + { session_id: "s", tool_name: "Read", tool_input: { file_path: "~/.deeplake/memory/sessions///" }, tool_use_id: "t" }, + { + config: BASE_CONFIG as any, + createApi: vi.fn(() => makeApi()), + listVirtualPathRowsFn, + executeCompiledBashCommandFn: vi.fn(async () => null) as any, + }, + ); + expect(d?.command).toContain("conv_0_session_1.json"); + }); + + it("`head ` (no explicit -N) defaults to 10 lines", async () => { + const readVirtualPathContentFn = vi.fn(async () => + Array.from({ length: 20 }, (_, i) => `L${i}`).join("\n") + ) as any; + const d = await processPreToolUse( + { session_id: "s", tool_name: "Bash", tool_input: { command: "head ~/.deeplake/memory/sessions/a.json" }, tool_use_id: "t" }, + { + config: BASE_CONFIG as any, + createApi: vi.fn(() => makeApi()), + readVirtualPathContentFn, + executeCompiledBashCommandFn: vi.fn(async () => null) as any, + }, + ); + expect(d?.command).toContain("L0"); + expect(d?.command).toContain("L9"); + expect(d?.command).not.toContain("L10"); + }); + + it("`tail ` (no explicit -N) defaults to the last 10 lines", async () => { + const readVirtualPathContentFn = vi.fn(async () => + Array.from({ length: 20 }, (_, i) => `L${i}`).join("\n") + ) as any; + const d = await processPreToolUse( + { session_id: "s", tool_name: "Bash", tool_input: { command: "tail ~/.deeplake/memory/sessions/a.json" }, tool_use_id: "t" }, + { + config: BASE_CONFIG as any, + createApi: vi.fn(() => makeApi()), + readVirtualPathContentFn, + executeCompiledBashCommandFn: vi.fn(async () => null) as any, + }, + ); + expect(d?.command).toContain("L19"); + expect(d?.command).toContain("L10"); + expect(d?.command).not.toContain("L9"); + }); + + it("ls -la listing includes both file entries (-rw-) and directory entries (drwx)", async () => { + // A flat file directly under the listed dir → file entry (isDir=false). + // A nested path under a subdir → directory entry (isDir=true). + const listVirtualPathRowsFn = vi.fn(async () => [ + { path: "/summaries/top-level.md", size_bytes: 42 }, + { path: "/summaries/alice/s1.md", size_bytes: 100 }, + { path: "/summaries/", size_bytes: 0 }, // empty suffix — skipped by `if (!name) continue` + ]) as any; + + const d = await processPreToolUse( + { session_id: "s", tool_name: "Bash", tool_input: { command: "ls -la ~/.deeplake/memory/summaries" }, tool_use_id: "t" }, + { + config: BASE_CONFIG as any, + createApi: vi.fn(() => makeApi()), + listVirtualPathRowsFn, + executeCompiledBashCommandFn: vi.fn(async () => null) as any, + }, + ); + // File entry → -rw-r--r-- prefix + expect(d?.command).toContain("-rw-r--r--"); + expect(d?.command).toContain("top-level.md"); + // Directory entry → drwxr-xr-x prefix + expect(d?.command).toContain("drwxr-xr-x"); + expect(d?.command).toContain("alice/"); + }); + + it("cat | head pipeline routes to the head fast-path", async () => { + const readVirtualPathContentFn = vi.fn(async () => + Array.from({ length: 30 }, (_, i) => `L${i}`).join("\n") + ) as any; + const d = await processPreToolUse( + { session_id: "s", tool_name: "Bash", tool_input: { command: "cat ~/.deeplake/memory/sessions/a.json | head -3" }, tool_use_id: "t" }, + { + config: BASE_CONFIG as any, + createApi: vi.fn(() => makeApi()), + readVirtualPathContentFn, + executeCompiledBashCommandFn: vi.fn(async () => null) as any, + }, + ); + expect(d?.command).toContain("L0"); + expect(d?.command).toContain("L2"); + expect(d?.command).not.toContain("L3"); + }); + + it("Grep whose handleGrepDirect returns null falls through — no decision from grep path", async () => { + const handleGrepDirectFn = vi.fn(async () => null) as any; + const listVirtualPathRowsFn = vi.fn(async () => [ + { path: "/summaries/alice/s1.md", size_bytes: 100 }, + ]) as any; + // We send a Read on a directory so after grep-null fall-through the ls + // branch takes over with a real decision — proving the flow continues + // past the null grep result instead of erroring. + const d = await processPreToolUse( + { session_id: "s", tool_name: "Read", tool_input: { path: "~/.deeplake/memory/summaries" }, tool_use_id: "t" }, + { + config: BASE_CONFIG as any, + createApi: vi.fn(() => makeApi()), + handleGrepDirectFn, + listVirtualPathRowsFn, + executeCompiledBashCommandFn: vi.fn(async () => null) as any, + }, + ); + expect(d?.command).toContain("alice/"); + }); + + it("Bash `ls ` without -l uses short-format listing (no permissions prefix)", async () => { + const listVirtualPathRowsFn = vi.fn(async () => [ + { path: "/sessions/conv_0_session_1.json", size_bytes: 100 }, + ]) as any; + const d = await processPreToolUse( + { session_id: "s", tool_name: "Bash", tool_input: { command: "ls ~/.deeplake/memory/sessions" }, tool_use_id: "t" }, + { + config: BASE_CONFIG as any, + createApi: vi.fn(() => makeApi()), + listVirtualPathRowsFn, + executeCompiledBashCommandFn: vi.fn(async () => null) as any, + }, + ); + expect(d?.command).not.toContain("drwxr-xr-x"); + expect(d?.command).toContain("conv_0_session_1.json"); + }); + + it("handles the no-paths edge case (empty cachePaths passed by the compiler)", async () => { + const readVirtualPathContentsFn = vi.fn(async () => new Map()) as any; + const readCachedIndexContentFn = vi.fn(() => null); + + const executeCompiledBashCommandFn = vi.fn(async (_api, _m, _s, _cmd, deps) => { + const result = await deps.readVirtualPathContentsFn(_api, _m, _s, []); + return `size=${result.size}`; + }) as any; + + const d = await processPreToolUse( + { session_id: "s3", tool_name: "Bash", tool_input: { command: "cat ~/.deeplake/memory/sessions/a.json" }, tool_use_id: "t" }, + { + config: BASE_CONFIG as any, + createApi: vi.fn(() => makeApi()), + readCachedIndexContentFn, + writeCachedIndexContentFn: vi.fn(), + readVirtualPathContentsFn, + executeCompiledBashCommandFn, + }, + ); + expect(d?.command).toContain("size=0"); + // Didn't touch SQL because paths were empty. + expect(readVirtualPathContentsFn).not.toHaveBeenCalled(); + }); +}); diff --git a/claude-code/tests/session-end-hook.test.ts b/claude-code/tests/session-end-hook.test.ts new file mode 100644 index 0000000..aaf4cff --- /dev/null +++ b/claude-code/tests/session-end-hook.test.ts @@ -0,0 +1,165 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; + +/** + * Direct source-level tests for src/hooks/session-end.ts. The hook's + * `main()` runs at module import time, so each test resets the module + * registry, wires mocks, then dynamically imports the module and waits + * for the main promise chain to settle. + * + * Coverage target: every branch of the hook — the WIKI_WORKER / CAPTURE + * early-exits, empty session_id, missing config, lock held, happy path, + * and the outer catch for thrown errors. + * + * CLAUDE.md rule #2: mock only at the boundary. readStdin, loadConfig, + * spawnWikiWorker, wikiLog, and tryAcquireLock are the seams. The rest + * of the hook body runs for real. + */ + +const stdinMock = vi.fn(); +const loadConfigMock = vi.fn(); +const spawnMock = vi.fn(); +const wikiLogMock = vi.fn(); +const tryAcquireLockMock = vi.fn(); +const releaseLockMock = vi.fn(); +const debugLogMock = vi.fn(); + +vi.mock("../../src/utils/stdin.js", () => ({ readStdin: (...a: any[]) => stdinMock(...a) })); +vi.mock("../../src/config.js", () => ({ loadConfig: (...a: any[]) => loadConfigMock(...a) })); +vi.mock("../../src/hooks/spawn-wiki-worker.js", () => ({ + spawnWikiWorker: (...a: any[]) => spawnMock(...a), + wikiLog: (...a: any[]) => wikiLogMock(...a), + bundleDirFromImportMeta: () => "/fake/bundle", +})); +vi.mock("../../src/hooks/summary-state.js", () => ({ + tryAcquireLock: (...a: any[]) => tryAcquireLockMock(...a), + releaseLock: (...a: any[]) => releaseLockMock(...a), +})); +vi.mock("../../src/utils/debug.js", () => ({ + log: (_tag: string, msg: string) => debugLogMock(msg), +})); + +async function runHook(): Promise { + vi.resetModules(); + await import("../../src/hooks/session-end.js"); + // main() is async and fires on import; give the microtask queue a + // chance to drain before we assert on the mocks. + await new Promise(r => setImmediate(r)); +} + +const validConfig = { + token: "t", orgId: "o", orgName: "o", workspaceId: "default", + userName: "u", apiUrl: "http://example", tableName: "memory", + sessionsTableName: "sessions", +}; + +beforeEach(() => { + delete process.env.HIVEMIND_WIKI_WORKER; + delete process.env.HIVEMIND_CAPTURE; + stdinMock.mockReset().mockResolvedValue({ session_id: "sid-1", cwd: "/proj" }); + loadConfigMock.mockReset().mockReturnValue(validConfig); + spawnMock.mockReset(); + wikiLogMock.mockReset(); + tryAcquireLockMock.mockReset().mockReturnValue(true); + releaseLockMock.mockReset(); + debugLogMock.mockReset(); +}); + +afterEach(() => { vi.restoreAllMocks(); }); + +describe("session-end hook", () => { + it("returns immediately when HIVEMIND_WIKI_WORKER=1 (nested worker invocation)", async () => { + process.env.HIVEMIND_WIKI_WORKER = "1"; + await runHook(); + expect(stdinMock).not.toHaveBeenCalled(); + expect(spawnMock).not.toHaveBeenCalled(); + expect(tryAcquireLockMock).not.toHaveBeenCalled(); + }); + + it("returns immediately when HIVEMIND_CAPTURE=false (opt-out)", async () => { + process.env.HIVEMIND_CAPTURE = "false"; + await runHook(); + expect(stdinMock).not.toHaveBeenCalled(); + expect(spawnMock).not.toHaveBeenCalled(); + }); + + it("returns without spawning when session_id is missing", async () => { + stdinMock.mockResolvedValue({ session_id: "", cwd: "/proj" }); + await runHook(); + expect(loadConfigMock).not.toHaveBeenCalled(); + expect(tryAcquireLockMock).not.toHaveBeenCalled(); + expect(spawnMock).not.toHaveBeenCalled(); + }); + + it("returns without spawning when loadConfig returns null (no credentials)", async () => { + loadConfigMock.mockReturnValue(null); + await runHook(); + expect(tryAcquireLockMock).not.toHaveBeenCalled(); + expect(spawnMock).not.toHaveBeenCalled(); + expect(debugLogMock).toHaveBeenCalledWith("no config"); + }); + + it("skips spawn with a wiki log line when the periodic worker holds the lock", async () => { + tryAcquireLockMock.mockReturnValue(false); + await runHook(); + expect(spawnMock).not.toHaveBeenCalled(); + expect(wikiLogMock).toHaveBeenCalledWith( + expect.stringContaining("periodic worker already running for sid-1, skipping"), + ); + }); + + it("spawns the wiki worker on the happy path and logs 'triggering summary'", async () => { + await runHook(); + expect(tryAcquireLockMock).toHaveBeenCalledWith("sid-1"); + expect(wikiLogMock).toHaveBeenCalledWith( + expect.stringContaining("triggering summary for sid-1"), + ); + expect(spawnMock).toHaveBeenCalledTimes(1); + const callArg = spawnMock.mock.calls[0][0]; + expect(callArg.sessionId).toBe("sid-1"); + expect(callArg.cwd).toBe("/proj"); + expect(callArg.reason).toBe("SessionEnd"); + expect(callArg.config).toBe(validConfig); + }); + + it("falls back to empty cwd when stdin omits the field", async () => { + stdinMock.mockResolvedValue({ session_id: "sid-2" }); + await runHook(); + expect(spawnMock).toHaveBeenCalledWith( + expect.objectContaining({ sessionId: "sid-2", cwd: "" }), + ); + }); + + it("catches and logs a fatal error from readStdin without crashing the process", async () => { + const boom = new Error("stdin boom"); + stdinMock.mockRejectedValue(boom); + const exitSpy = vi.spyOn(process, "exit").mockImplementation(() => undefined as never); + await runHook(); + // Let the catch in `main().catch(...)` run. + await new Promise(r => setImmediate(r)); + expect(debugLogMock).toHaveBeenCalledWith("fatal: stdin boom"); + expect(exitSpy).toHaveBeenCalledWith(0); + }); + + it("releases the lock if spawnWikiWorker throws (no lock leak)", async () => { + spawnMock.mockImplementation(() => { throw new Error("spawn exploded"); }); + const exitSpy = vi.spyOn(process, "exit").mockImplementation(() => undefined as never); + await runHook(); + // Let the outer main().catch run. + await new Promise(r => setImmediate(r)); + expect(releaseLockMock).toHaveBeenCalledWith("sid-1"); + // The throw bubbles to main().catch and logs "fatal: ..." + expect(debugLogMock).toHaveBeenCalledWith("fatal: spawn exploded"); + expect(exitSpy).toHaveBeenCalledWith(0); + }); + + it("still swallows release errors when spawn throws (no double-fault)", async () => { + spawnMock.mockImplementation(() => { throw new Error("spawn exploded"); }); + releaseLockMock.mockImplementation(() => { throw new Error("release also broken"); }); + const exitSpy = vi.spyOn(process, "exit").mockImplementation(() => undefined as never); + await runHook(); + await new Promise(r => setImmediate(r)); + // Outer fatal is the ORIGINAL spawn failure, not the release failure + expect(debugLogMock).toHaveBeenCalledWith("fatal: spawn exploded"); + expect(exitSpy).toHaveBeenCalledWith(0); + }); +}); diff --git a/claude-code/tests/session-start-hook.test.ts b/claude-code/tests/session-start-hook.test.ts new file mode 100644 index 0000000..27b15c8 --- /dev/null +++ b/claude-code/tests/session-start-hook.test.ts @@ -0,0 +1,338 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; +import { mkdtempSync, rmSync } from "node:fs"; +import { tmpdir } from "node:os"; +import { join } from "node:path"; + +/** + * Direct source-level tests for src/hooks/session-start.ts. The hook + * orchestrates: credential load, userName backfill, table+placeholder + * setup, version check + auto-update, and the additionalContext output. + * + * Mocks: readStdin, loadCredentials/saveCredentials, loadConfig, + * DeeplakeApi, global.fetch, child_process.execSync, and the two + * node:fs helpers used by the cache-cleanup path (readdirSync, rmSync). + */ + +const stdinMock = vi.fn(); +const loadCredsMock = vi.fn(); +const saveCredsMock = vi.fn(); +const loginMock = vi.fn(); +const loadConfigMock = vi.fn(); +const debugLogMock = vi.fn(); +const ensureTableMock = vi.fn(); +const ensureSessionsTableMock = vi.fn(); +const queryMock = vi.fn(); +const execSyncMock = vi.fn(); +const readdirSyncMock = vi.fn(); +const rmSyncMock = vi.fn(); + +vi.mock("../../src/utils/stdin.js", () => ({ readStdin: (...a: any[]) => stdinMock(...a) })); +vi.mock("../../src/commands/auth.js", () => ({ + loadCredentials: (...a: any[]) => loadCredsMock(...a), + saveCredentials: (...a: any[]) => saveCredsMock(...a), + login: (...a: any[]) => loginMock(...a), +})); +vi.mock("../../src/config.js", () => ({ loadConfig: (...a: any[]) => loadConfigMock(...a) })); +vi.mock("../../src/utils/debug.js", () => ({ + log: (_t: string, msg: string) => debugLogMock(msg), + utcTimestamp: () => "2026-04-17 00:00:00 UTC", +})); +vi.mock("../../src/deeplake-api.js", () => ({ + DeeplakeApi: class { + ensureTable() { return ensureTableMock(); } + ensureSessionsTable(t: string) { return ensureSessionsTableMock(t); } + query(sql: string) { return queryMock(sql); } + }, +})); +vi.mock("node:child_process", async () => { + const actual = await vi.importActual("node:child_process"); + return { ...actual, execSync: (...a: any[]) => execSyncMock(...a) }; +}); +vi.mock("node:fs", async () => { + const actual = await vi.importActual("node:fs"); + return { + ...actual, + readdirSync: (...a: any[]) => readdirSyncMock(...a), + rmSync: (...a: any[]) => rmSyncMock(...a), + }; +}); + +const originalFetch = global.fetch; +const fetchMock = vi.fn(); + +let stdoutLines: string[] = []; +const stdoutSpy = vi.spyOn(process.stdout, "write"); + +async function runHook(env: Record = {}): Promise { + delete process.env.HIVEMIND_WIKI_WORKER; + delete process.env.HIVEMIND_CAPTURE; + for (const [k, v] of Object.entries(env)) { + if (v === undefined) delete process.env[k]; + else process.env[k] = v; + } + stdoutLines = []; + stdoutSpy.mockImplementation((chunk: any) => { stdoutLines.push(String(chunk)); return true; }); + vi.resetModules(); + // @ts-expect-error + global.fetch = fetchMock; + // Intercept console.log which session-start.ts uses for the JSON emit + const originalLog = console.log; + const collected: string[] = []; + console.log = (...args: any[]) => { collected.push(args.join(" ")); }; + try { + await import("../../src/hooks/session-start.js"); + await new Promise(r => setImmediate(r)); + await new Promise(r => setImmediate(r)); + return collected.join("\n") || null; + } finally { + console.log = originalLog; + } +} + +const validConfig = { + token: "t", orgId: "o", orgName: "acme", workspaceId: "default", + userName: "alice", apiUrl: "http://example", tableName: "memory", + sessionsTableName: "sessions", +}; + +let cacheTmp: string; + +beforeEach(() => { + cacheTmp = mkdtempSync(join(tmpdir(), "session-start-test-")); + stdinMock.mockReset().mockResolvedValue({ session_id: "sid-1", cwd: "/workspaces/proj" }); + loadCredsMock.mockReset().mockReturnValue({ + token: "tok", orgId: "o", orgName: "acme", userName: "alice", workspaceId: "default", + }); + saveCredsMock.mockReset(); + loadConfigMock.mockReset().mockReturnValue(validConfig); + debugLogMock.mockReset(); + ensureTableMock.mockReset().mockResolvedValue(undefined); + ensureSessionsTableMock.mockReset().mockResolvedValue(undefined); + queryMock.mockReset().mockResolvedValue([]); // "no existing summary" + execSyncMock.mockReset(); + readdirSyncMock.mockReset().mockReturnValue([]); + rmSyncMock.mockReset(); + fetchMock.mockReset().mockResolvedValue({ + ok: true, + json: async () => ({ version: "0.0.1" }), // older-or-equal → no update + }); +}); + +afterEach(() => { + vi.restoreAllMocks(); + // @ts-expect-error + global.fetch = originalFetch; + try { rmSync(cacheTmp, { recursive: true, force: true }); } catch { /* ignore */ } +}); + +// ═══ Guard + credential branches ═══════════════════════════════════════════ + +describe("session-start hook — guards", () => { + it("returns immediately when HIVEMIND_WIKI_WORKER=1", async () => { + const out = await runHook({ HIVEMIND_WIKI_WORKER: "1" }); + expect(stdinMock).not.toHaveBeenCalled(); + expect(out).toBeNull(); + }); + + it("emits additionalContext with the not-logged-in warning when no creds", async () => { + loadCredsMock.mockReturnValue(null); + const out = await runHook(); + expect(out).not.toBeNull(); + const parsed = JSON.parse(out!); + expect(parsed.hookSpecificOutput.additionalContext).toContain("Not logged in to Deeplake"); + expect(debugLogMock).toHaveBeenCalledWith( + expect.stringContaining("no credentials found"), + ); + }); + + it("emits the logged-in context when creds are present", async () => { + const out = await runHook(); + const parsed = JSON.parse(out!); + expect(parsed.hookSpecificOutput.additionalContext).toContain("Logged in to Deeplake as org: acme"); + expect(parsed.hookSpecificOutput.additionalContext).toContain("workspace: default"); + }); + + it("falls back to orgId when orgName is missing", async () => { + loadCredsMock.mockReturnValue({ + token: "t", orgId: "org-uuid", userName: "u", workspaceId: "default", + }); + const out = await runHook(); + const parsed = JSON.parse(out!); + expect(parsed.hookSpecificOutput.additionalContext).toContain("Logged in to Deeplake as org: org-uuid"); + }); + + it("backfills userName via node:os when credentials lack one", async () => { + loadCredsMock.mockReturnValue({ + token: "t", orgId: "o", orgName: "acme", workspaceId: "default", + }); + await runHook(); + expect(saveCredsMock).toHaveBeenCalled(); + expect(debugLogMock).toHaveBeenCalledWith( + expect.stringMatching(/^backfilled and persisted userName: /), + ); + }); +}); + +// ═══ Table setup + placeholder ═════════════════════════════════════════════ + +describe("session-start hook — placeholder branching", () => { + it("creates placeholder when summary does not exist (query returns [])", async () => { + await runHook(); + expect(ensureTableMock).toHaveBeenCalled(); + expect(ensureSessionsTableMock).toHaveBeenCalledWith("sessions"); + // 1 SELECT (existing check) + 1 INSERT = 2 queries. + expect(queryMock).toHaveBeenCalledTimes(2); + expect(queryMock.mock.calls[0][0]).toMatch(/^SELECT path FROM/); + expect(queryMock.mock.calls[1][0]).toMatch(/^INSERT INTO/); + expect(debugLogMock).toHaveBeenCalledWith("placeholder created"); + }); + + it("skips placeholder INSERT when summary already exists (resumed session)", async () => { + queryMock.mockResolvedValueOnce([{ path: "/summaries/alice/sid-1.md" }]); + await runHook(); + expect(queryMock).toHaveBeenCalledTimes(1); // only the SELECT + }); + + it("skips placeholder INSERT when HIVEMIND_CAPTURE=false but still ensures tables", async () => { + await runHook({ HIVEMIND_CAPTURE: "false" }); + expect(ensureTableMock).toHaveBeenCalled(); + expect(ensureSessionsTableMock).toHaveBeenCalled(); + expect(queryMock).not.toHaveBeenCalled(); + expect(debugLogMock).toHaveBeenCalledWith( + "placeholder skipped (HIVEMIND_CAPTURE=false)", + ); + }); + + it("swallows placeholder errors and logs via both loggers", async () => { + ensureTableMock.mockRejectedValue(new Error("table boom")); + await runHook(); + expect(debugLogMock).toHaveBeenCalledWith( + expect.stringContaining("placeholder failed: table boom"), + ); + }); + + it("skips setup when loadConfig returns null", async () => { + loadConfigMock.mockReturnValue(null); + await runHook(); + expect(ensureTableMock).not.toHaveBeenCalled(); + }); + + it("skips setup when session_id is empty", async () => { + stdinMock.mockResolvedValue({ session_id: "", cwd: "/x" }); + await runHook(); + expect(ensureTableMock).not.toHaveBeenCalled(); + }); +}); + +// ═══ Version check + autoupdate ═════════════════════════════════════════════ + +describe("session-start hook — version check", () => { + it("runs execSync and cleans old cache entries when a newer version is available", async () => { + fetchMock.mockResolvedValue({ + ok: true, + json: async () => ({ version: "999.0.0" }), + }); + readdirSyncMock.mockReturnValue([ + { name: "0.0.1", isDirectory: () => true }, + { name: "999.0.0", isDirectory: () => true }, // latest, must NOT be removed + ]); + const stderrSpy = vi.spyOn(process.stderr, "write").mockReturnValue(true); + const out = await runHook(); + expect(execSyncMock).toHaveBeenCalled(); + expect(rmSyncMock).toHaveBeenCalledTimes(1); + expect(rmSyncMock.mock.calls[0][0]).toContain("0.0.1"); + expect(stderrSpy).toHaveBeenCalledWith(expect.stringContaining("auto-updated")); + const parsed = JSON.parse(out!); + expect(parsed.hookSpecificOutput.additionalContext).toContain("auto-updated"); + }); + + it("falls back to manual-upgrade message when autoupdate is disabled", async () => { + loadCredsMock.mockReturnValue({ + token: "t", orgId: "o", orgName: "acme", userName: "u", workspaceId: "default", + autoupdate: false, + }); + fetchMock.mockResolvedValue({ ok: true, json: async () => ({ version: "999.0.0" }) }); + const stderrSpy = vi.spyOn(process.stderr, "write").mockReturnValue(true); + await runHook(); + expect(execSyncMock).not.toHaveBeenCalled(); + expect(stderrSpy).toHaveBeenCalledWith( + expect.stringContaining("update available"), + ); + }); + + it("emits the 'auto-update failed' message when execSync throws", async () => { + fetchMock.mockResolvedValue({ ok: true, json: async () => ({ version: "999.0.0" }) }); + execSyncMock.mockImplementation(() => { throw new Error("npm unreachable"); }); + const stderrSpy = vi.spyOn(process.stderr, "write").mockReturnValue(true); + await runHook(); + expect(stderrSpy).toHaveBeenCalledWith( + expect.stringContaining("Auto-update failed"), + ); + }); + + it("tolerates fetch failure (GitHub unreachable)", async () => { + fetchMock.mockRejectedValue(new Error("offline")); + await runHook(); + expect(execSyncMock).not.toHaveBeenCalled(); + }); + + it("tolerates readdirSync throw during cache cleanup", async () => { + fetchMock.mockResolvedValue({ ok: true, json: async () => ({ version: "999.0.0" }) }); + readdirSyncMock.mockImplementation(() => { throw new Error("readdir boom"); }); + await runHook(); + expect(debugLogMock).toHaveBeenCalledWith( + expect.stringContaining("cache cleanup failed: readdir boom"), + ); + }); + + it("emits 'up to date' context when latest == current", async () => { + // Real getInstalledVersion reads plugin.json from the real repo; we + // simulate "latest equals current" by returning the same version. + // Since we don't know the installed version at runtime, we use + // readFileSync-based indirection: fetchMock returns a version that + // is definitely older (0.0.1). The file read picks up the repo's + // real version → latest 0.0.1 is NOT newer → "up to date" branch. + fetchMock.mockResolvedValue({ ok: true, json: async () => ({ version: "0.0.1" }) }); + const out = await runHook(); + const parsed = JSON.parse(out!); + expect(parsed.hookSpecificOutput.additionalContext).toContain("up to date"); + }); +}); + +// ═══ Fatal catch ════════════════════════════════════════════════════════════ + +describe("session-start hook — fatal catch", () => { + it("catches a stdin throw and exits 0", async () => { + stdinMock.mockRejectedValue(new Error("bad stdin")); + const exitSpy = vi.spyOn(process, "exit").mockImplementation(() => undefined as never); + await runHook(); + await new Promise(r => setImmediate(r)); + expect(debugLogMock).toHaveBeenCalledWith("fatal: bad stdin"); + expect(exitSpy).toHaveBeenCalledWith(0); + }); +}); + +// Additional branch coverage +describe("session-start hook — version helpers edge cases", () => { + it("fetch ok:false short-circuits getLatestVersion (no autoupdate)", async () => { + fetchMock.mockResolvedValue({ ok: false, json: async () => ({ version: "999.0.0" }) }); + await runHook(); + expect(execSyncMock).not.toHaveBeenCalled(); + }); + + it("GitHub response without a version field falls through to null", async () => { + fetchMock.mockResolvedValue({ ok: true, json: async () => ({}) }); + await runHook(); + expect(execSyncMock).not.toHaveBeenCalled(); + }); + + it("workspaceId missing on creds falls back to 'default' in context", async () => { + loadCredsMock.mockReturnValue({ + token: "t", orgId: "o", orgName: "acme", userName: "alice", + // workspaceId omitted + }); + const out = await runHook(); + const parsed = JSON.parse(out!); + expect(parsed.hookSpecificOutput.additionalContext).toContain("workspace: default"); + }); +}); diff --git a/claude-code/tests/session-start-setup-hook.test.ts b/claude-code/tests/session-start-setup-hook.test.ts new file mode 100644 index 0000000..e3c9ca6 --- /dev/null +++ b/claude-code/tests/session-start-setup-hook.test.ts @@ -0,0 +1,262 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; + +/** + * Source-level tests for src/hooks/session-start-setup.ts. This hook + * handles three things on a fresh session: table setup, userName + * backfill, and version check + auto-update. Mocks the boundaries: + * readStdin, loadCredentials, saveCredentials, loadConfig, DeeplakeApi, + * global fetch (for the GitHub version lookup), and execSync (for the + * claude-plugin update call). + */ + +const stdinMock = vi.fn(); +const loadCredsMock = vi.fn(); +const saveCredsMock = vi.fn(); +const loadConfigMock = vi.fn(); +const debugLogMock = vi.fn(); +const ensureTableMock = vi.fn(); +const ensureSessionsTableMock = vi.fn(); +const execSyncMock = vi.fn(); + +vi.mock("../../src/utils/stdin.js", () => ({ readStdin: (...a: any[]) => stdinMock(...a) })); +vi.mock("../../src/commands/auth.js", () => ({ + loadCredentials: (...a: any[]) => loadCredsMock(...a), + saveCredentials: (...a: any[]) => saveCredsMock(...a), +})); +vi.mock("../../src/config.js", () => ({ loadConfig: (...a: any[]) => loadConfigMock(...a) })); +vi.mock("../../src/utils/debug.js", () => ({ + log: (_t: string, msg: string) => debugLogMock(msg), + utcTimestamp: () => "2026-04-17 00:00:00 UTC", +})); +vi.mock("../../src/deeplake-api.js", () => ({ + DeeplakeApi: class { + ensureTable() { return ensureTableMock(); } + ensureSessionsTable(t: string) { return ensureSessionsTableMock(t); } + }, +})); +vi.mock("node:child_process", async () => { + const actual = await vi.importActual("node:child_process"); + return { ...actual, execSync: (...a: any[]) => execSyncMock(...a) }; +}); + +// We also need to control global.fetch for the GitHub version lookup. +const originalFetch = global.fetch; +const fetchMock = vi.fn(); + +async function runHook(env: Record = {}): Promise { + delete process.env.HIVEMIND_WIKI_WORKER; + for (const [k, v] of Object.entries(env)) { + if (v === undefined) delete process.env[k]; + else process.env[k] = v; + } + vi.resetModules(); + // @ts-expect-error: replace global fetch for the GitHub lookup + global.fetch = fetchMock; + await import("../../src/hooks/session-start-setup.js"); + await new Promise(r => setImmediate(r)); + await new Promise(r => setImmediate(r)); +} + +const validConfig = { + token: "t", orgId: "o", orgName: "acme", workspaceId: "default", + userName: "alice", apiUrl: "http://example", tableName: "memory", + sessionsTableName: "sessions", +}; + +beforeEach(() => { + stdinMock.mockReset().mockResolvedValue({ session_id: "sid-1", cwd: "/x" }); + loadCredsMock.mockReset().mockReturnValue({ + token: "tok", orgId: "o", orgName: "acme", userName: "alice", + }); + saveCredsMock.mockReset(); + loadConfigMock.mockReset().mockReturnValue(validConfig); + debugLogMock.mockReset(); + ensureTableMock.mockReset().mockResolvedValue(undefined); + ensureSessionsTableMock.mockReset().mockResolvedValue(undefined); + execSyncMock.mockReset(); + fetchMock.mockReset().mockResolvedValue({ + ok: true, + json: async () => ({ version: "0.0.1" }), // same-as-current: no update + }); +}); + +afterEach(() => { + vi.restoreAllMocks(); + // @ts-expect-error + global.fetch = originalFetch; +}); + +describe("session-start-setup hook — guards", () => { + it("returns without reading stdin when HIVEMIND_WIKI_WORKER=1", async () => { + await runHook({ HIVEMIND_WIKI_WORKER: "1" }); + expect(stdinMock).not.toHaveBeenCalled(); + }); + + it("returns when no credentials are loaded", async () => { + loadCredsMock.mockReturnValue(null); + await runHook(); + expect(debugLogMock).toHaveBeenCalledWith("no credentials"); + expect(ensureTableMock).not.toHaveBeenCalled(); + }); + + it("returns when credentials have no token", async () => { + loadCredsMock.mockReturnValue({ token: "", userName: "alice" }); + await runHook(); + expect(debugLogMock).toHaveBeenCalledWith("no credentials"); + }); +}); + +describe("session-start-setup hook — userName backfill", () => { + it("backfills userName via node:os when missing and saves creds", async () => { + loadCredsMock.mockReturnValue({ token: "tok", orgId: "o", orgName: "acme" }); + await runHook(); + expect(saveCredsMock).toHaveBeenCalled(); + expect(debugLogMock).toHaveBeenCalledWith( + expect.stringMatching(/^backfilled userName: /), + ); + }); + + it("does not call saveCredentials when userName already set", async () => { + // Default creds in beforeEach have userName=alice. + await runHook(); + expect(saveCredsMock).not.toHaveBeenCalled(); + }); +}); + +describe("session-start-setup hook — table setup", () => { + it("ensures both tables on the happy path", async () => { + await runHook(); + expect(ensureTableMock).toHaveBeenCalled(); + expect(ensureSessionsTableMock).toHaveBeenCalledWith("sessions"); + expect(debugLogMock).toHaveBeenCalledWith("setup complete"); + }); + + it("swallows setup errors and logs them", async () => { + ensureTableMock.mockRejectedValue(new Error("table boom")); + await runHook(); + expect(debugLogMock).toHaveBeenCalledWith("setup failed: table boom"); + }); + + it("skips setup entirely when session_id is empty", async () => { + stdinMock.mockResolvedValue({ session_id: "", cwd: "/x" }); + await runHook(); + expect(ensureTableMock).not.toHaveBeenCalled(); + }); + + it("skips setup when loadConfig returns null", async () => { + loadConfigMock.mockReturnValue(null); + await runHook(); + expect(ensureTableMock).not.toHaveBeenCalled(); + }); +}); + +describe("session-start-setup hook — version check + autoupdate", () => { + it("runs the autoupdate path when newer version is available", async () => { + fetchMock.mockResolvedValue({ + ok: true, + json: async () => ({ version: "999.0.0" }), // clearly newer + }); + const stderrSpy = vi.spyOn(process.stderr, "write").mockReturnValue(true); + await runHook(); + expect(execSyncMock).toHaveBeenCalled(); + expect(stderrSpy).toHaveBeenCalledWith( + expect.stringContaining("auto-updated"), + ); + }); + + it("emits a manual-upgrade message when autoupdate is disabled and newer exists", async () => { + loadCredsMock.mockReturnValue({ + token: "t", orgId: "o", orgName: "acme", userName: "alice", + autoupdate: false, + }); + fetchMock.mockResolvedValue({ + ok: true, + json: async () => ({ version: "999.0.0" }), + }); + const stderrSpy = vi.spyOn(process.stderr, "write").mockReturnValue(true); + await runHook(); + expect(execSyncMock).not.toHaveBeenCalled(); + expect(stderrSpy).toHaveBeenCalledWith( + expect.stringContaining("update available"), + ); + }); + + it("emits the 'auto-update failed' message when execSync throws", async () => { + fetchMock.mockResolvedValue({ + ok: true, + json: async () => ({ version: "999.0.0" }), + }); + execSyncMock.mockImplementation(() => { throw new Error("npm down"); }); + const stderrSpy = vi.spyOn(process.stderr, "write").mockReturnValue(true); + await runHook(); + expect(stderrSpy).toHaveBeenCalledWith( + expect.stringContaining("Auto-update failed"), + ); + }); + + it("logs 'up to date' when installed version matches latest", async () => { + // fetchMock default returns 0.0.1; getInstalledVersion reads plugin.json + // from the real filesystem, which will be 0.6.x. So we force the + // GitHub answer to match by returning ok=false → latest=null → + // falls through the else. + fetchMock.mockResolvedValue({ ok: false }); + await runHook(); + // The "version up to date" branch is reached when latest is non-null + // but not newer. Hard to hit deterministically without also mocking + // the file read; covering the fetch-error branch (ok=false → null) + // at least keeps the outer try from throwing. + // Assert we did not log an autoupdate: + expect(execSyncMock).not.toHaveBeenCalled(); + }); + + it("tolerates a fetch error (GitHub unreachable)", async () => { + fetchMock.mockRejectedValue(new Error("network down")); + await runHook(); + // Inner try/catch in getLatestVersion swallows; no autoupdate triggers. + expect(execSyncMock).not.toHaveBeenCalled(); + }); +}); + +describe("session-start-setup hook — fatal catch", () => { + it("catches a stdin throw and exits 0", async () => { + stdinMock.mockRejectedValue(new Error("stdin boom")); + const exitSpy = vi.spyOn(process, "exit").mockImplementation(() => undefined as never); + await runHook(); + await new Promise(r => setImmediate(r)); + expect(debugLogMock).toHaveBeenCalledWith("fatal: stdin boom"); + expect(exitSpy).toHaveBeenCalledWith(0); + }); +}); + +// Extra branch coverage: getLatestVersion edge cases + version-compare chain +describe("session-start-setup hook — version helpers edge cases", () => { + it("treats fetch with ok:false as no-new-version (line 61 branch)", async () => { + fetchMock.mockResolvedValue({ ok: false, json: async () => ({ version: "999.0.0" }) }); + await runHook(); + expect(execSyncMock).not.toHaveBeenCalled(); + }); + + it("treats a response missing the 'version' field as null (?? null fallback)", async () => { + fetchMock.mockResolvedValue({ ok: true, json: async () => ({}) }); + await runHook(); + expect(execSyncMock).not.toHaveBeenCalled(); + }); + + it("treats latest == current as 'up to date' (isNewer false)", async () => { + // Force current to be a version that fetchMock exactly matches. + // We can't change what getInstalledVersion reads from disk, but we + // can make fetch return the installed version. With equal strings, + // isNewer returns false and the else-branch fires. + const pkg = JSON.parse( + require("node:fs").readFileSync( + require("node:path").join( + __dirname, "..", ".claude-plugin", "plugin.json", + ), + "utf-8", + ), + ); + fetchMock.mockResolvedValue({ ok: true, json: async () => ({ version: pkg.version }) }); + await runHook(); + expect(execSyncMock).not.toHaveBeenCalled(); + }); +}); diff --git a/claude-code/tests/session-start.test.ts b/claude-code/tests/session-start.test.ts index 858f544..0d311cf 100644 --- a/claude-code/tests/session-start.test.ts +++ b/claude-code/tests/session-start.test.ts @@ -137,17 +137,6 @@ describe("claude-code integration: session-start.js (sync hook)", () => { expect(ctx).toMatch(/Logged in to Deeplake|Not logged in to Deeplake/); }); - it("steers recall tasks toward index-first exact file reads", () => { - const raw = runHook("session-start.js", baseInput); - const parsed = JSON.parse(raw); - const ctx = parsed.hookSpecificOutput.additionalContext; - expect(ctx).toContain("Always read index.md first"); - expect(ctx).toContain("read that exact summary or session file directly"); - expect(ctx).toContain("Do NOT probe unrelated local paths"); - expect(ctx).toContain("answer with the smallest exact phrase supported by memory"); - expect(ctx).toContain("convert the final answer into an absolute month/date/year"); - }); - it("completes within 3s with no credentials (no server calls)", () => { const start = Date.now(); runHook("session-start.js", baseInput); diff --git a/claude-code/tests/shell-bundle-sql-trace-silence.test.ts b/claude-code/tests/shell-bundle-sql-trace-silence.test.ts new file mode 100644 index 0000000..2c55dd7 --- /dev/null +++ b/claude-code/tests/shell-bundle-sql-trace-silence.test.ts @@ -0,0 +1,86 @@ +/** + * Bundle-level regression guard for fix #3 — the shell bundle invoked by the + * pre-tool-use hook as `node shell-bundle -c "..."` must not leak + * `[deeplake-sql]` trace output onto stderr. Claude Code's Bash tool merges + * the child process's stderr into the tool_result string the model sees, so + * any trace line shows up as noise in Claude's view of the command output + * (observed in the original `baseline_cloud-100` transcripts, where 35+ + * lines of `[deeplake-sql]` noise polluted bash command results). + * + * The fix has two parts: + * 1. `traceSql` reads the HIVEMIND_TRACE_SQL / HIVEMIND_DEBUG env vars at + * call time (not at module load), so callers can turn tracing off after + * importing the SDK. + * 2. The shell bundle's one-shot entry point (`node ... -c "cmd"`) deletes + * those env vars before opening any SQL connection. + * + * This test spawns the shipped shell bundle with the trace vars set + * explicitly, runs a trivial command that's guaranteed not to touch the + * network (we point the SDK at an unreachable URL and expect the command to + * fail fast), and asserts that the combined stderr output contains zero + * `[deeplake-sql]` lines. If either fix is reverted, stderr fills with the + * trace messages and the test fails. + */ + +import { describe, expect, it } from "vitest"; +import { spawnSync } from "node:child_process"; +import { existsSync } from "node:fs"; +import { join, dirname } from "node:path"; +import { fileURLToPath } from "node:url"; + +const __dirname = dirname(fileURLToPath(import.meta.url)); +const BUNDLE_PATH = join(__dirname, "..", "bundle", "shell", "deeplake-shell.js"); + +describe("shell bundle one-shot: SQL trace silence (fix #3)", () => { + it("does not write [deeplake-sql] to stderr even when trace env vars are set", () => { + if (!existsSync(BUNDLE_PATH)) { + throw new Error(`shell bundle missing at ${BUNDLE_PATH} — run 'npm run build' first`); + } + + // Drive the bundle through a path that DEFINITELY calls DeeplakeApi.query() + // (so traceSql fires). Fake creds are good enough — the API call will fail + // fast against an unreachable host, and if the trace silencer regresses, + // the first `[deeplake-sql] query start:` line hits stderr before the + // failure. Point at 127.0.0.1:1 (closed port) with a 200ms timeout so the + // test finishes in well under a second. + const cleanEnv: NodeJS.ProcessEnv = { + PATH: process.env.PATH, + HIVEMIND_TOKEN: "fake-token-for-trace-test", + HIVEMIND_ORG_ID: "fake-org", + HIVEMIND_WORKSPACE_ID: "fake-ws", + HIVEMIND_API_URL: "http://127.0.0.1:1", + HIVEMIND_QUERY_TIMEOUT_MS: "200", + // Pre-silenced env: our fix must keep these from leaking stderr. + HIVEMIND_TRACE_SQL: "1", + DEEPLAKE_TRACE_SQL: "1", + HIVEMIND_DEBUG: "1", + DEEPLAKE_DEBUG: "1", + }; + + const result = spawnSync(process.execPath, [BUNDLE_PATH, "-c", "echo hello"], { + env: cleanEnv, + encoding: "utf-8", + timeout: 15_000, + }); + + const combined = `${result.stdout ?? ""}\n${result.stderr ?? ""}`; + // With the one-shot silencer in place there must be zero SQL trace lines, + // even though the bundle issued SQL queries (that then failed against the + // unreachable host). If the fix regresses, expect lines like: + // "[deeplake-sql] query start: SELECT path, size_bytes ..." + expect(combined).not.toContain("[deeplake-sql]"); + }, 20_000); + + it("keeps interactive mode tracing available (env vars not deleted outside one-shot)", () => { + // Sanity check that the one-shot silencing is scoped: traceSql source + // still honours the env vars, so interactive usage (no -c) with + // HIVEMIND_TRACE_SQL=1 would still emit trace lines. We can't easily + // spawn the REPL here, so we just verify the condition in source — this + // guards against an over-eager fix that silences tracing globally. + const { readFileSync } = require("node:fs"); + const apiSource = readFileSync(join(__dirname, "..", "..", "src", "deeplake-api.ts"), "utf-8"); + expect(apiSource).toMatch(/function traceSql\([^)]*\): void \{[\s\S]*process\.env\.HIVEMIND_TRACE_SQL/); + // Ensure the env read is inside the function (runtime), not a top-level const. + expect(apiSource).not.toMatch(/^const TRACE_SQL =/m); + }); +}); diff --git a/claude-code/tests/summary-state.test.ts b/claude-code/tests/summary-state.test.ts new file mode 100644 index 0000000..0c32a5d --- /dev/null +++ b/claude-code/tests/summary-state.test.ts @@ -0,0 +1,434 @@ +import { describe, it, expect, beforeAll, afterAll, beforeEach } from "vitest"; +import { mkdtempSync, rmSync, writeFileSync, existsSync, mkdirSync, readFileSync } from "node:fs"; +import { tmpdir } from "node:os"; +import { join, dirname } from "node:path"; +import { spawn } from "node:child_process"; + +/** + * Functional tests for summary-state. The module computes STATE_DIR from + * homedir() at module-load time, so we redirect $HOME to a tmp dir BEFORE + * importing. Every test uses a unique session id so there is no cross-test + * contamination. + * + * What these tests pin down: + * - bumpTotalCount seeds fresh state and increments existing state + * - shouldTrigger fires the first summary at 10 events, obeys msg/time + * cadence, and guards time-cadence with msgsSince > 0 + * - tryAcquireLock is mutually exclusive, reclaims stale locks, and rejects + * held locks + * - finalizeSummary advances lastSummaryCount and preserves the highest + * observed totalCount + * - loadTriggerConfig respects env overrides and falls back to defaults + */ + +let tmpHome: string; +let mod: typeof import("../../src/hooks/summary-state.js"); + +beforeAll(async () => { + tmpHome = mkdtempSync(join(tmpdir(), "summary-state-test-")); + process.env.HOME = tmpHome; + mod = await import("../../src/hooks/summary-state.js"); +}); + +afterAll(() => { + try { rmSync(tmpHome, { recursive: true, force: true }); } catch { /* ignore */ } +}); + +const newSessionId = () => `test-${crypto.randomUUID()}`; + +describe("bumpTotalCount", () => { + it("seeds fresh state with totalCount=1 and lastSummaryCount=0", () => { + const sid = newSessionId(); + const state = mod.bumpTotalCount(sid); + expect(state.totalCount).toBe(1); + expect(state.lastSummaryCount).toBe(0); + expect(typeof state.lastSummaryAt).toBe("number"); + }); + + it("increments existing totalCount and preserves lastSummaryAt/lastSummaryCount", () => { + const sid = newSessionId(); + const first = mod.bumpTotalCount(sid); + const second = mod.bumpTotalCount(sid); + const third = mod.bumpTotalCount(sid); + expect(second.totalCount).toBe(2); + expect(third.totalCount).toBe(3); + expect(second.lastSummaryAt).toBe(first.lastSummaryAt); + expect(third.lastSummaryCount).toBe(0); + }); +}); + +describe("shouldTrigger", () => { + const cfg = { everyNMessages: 50, everyHours: 2 }; + + it("does NOT fire before 10 events on a fresh session", () => { + const now = Date.now(); + for (let n = 1; n <= 9; n++) { + expect(mod.shouldTrigger( + { lastSummaryAt: now, lastSummaryCount: 0, totalCount: n }, cfg, now, + )).toBe(false); + } + }); + + it("fires the first summary at exactly 10 events", () => { + const now = Date.now(); + expect(mod.shouldTrigger( + { lastSummaryAt: now, lastSummaryCount: 0, totalCount: 10 }, cfg, now, + )).toBe(true); + }); + + it("fires when msgsSince reaches everyNMessages", () => { + const now = Date.now(); + expect(mod.shouldTrigger( + { lastSummaryAt: now, lastSummaryCount: 10, totalCount: 59 }, cfg, now, + )).toBe(false); + expect(mod.shouldTrigger( + { lastSummaryAt: now, lastSummaryCount: 10, totalCount: 60 }, cfg, now, + )).toBe(true); + }); + + it("fires when enough time has elapsed and there is at least one new event", () => { + const now = Date.now(); + const twoHoursAgo = now - 2 * 3600 * 1000; + expect(mod.shouldTrigger( + { lastSummaryAt: twoHoursAgo, lastSummaryCount: 10, totalCount: 11 }, cfg, now, + )).toBe(true); + }); + + it("does NOT fire on time alone when no new events have arrived", () => { + const now = Date.now(); + const twoHoursAgo = now - 2 * 3600 * 1000; + expect(mod.shouldTrigger( + { lastSummaryAt: twoHoursAgo, lastSummaryCount: 42, totalCount: 42 }, cfg, now, + )).toBe(false); + }); + + it("does NOT fire when below both thresholds", () => { + const now = Date.now(); + expect(mod.shouldTrigger( + { lastSummaryAt: now - 30 * 60 * 1000, lastSummaryCount: 10, totalCount: 30 }, cfg, now, + )).toBe(false); + }); +}); + +describe("tryAcquireLock", () => { + it("succeeds on a fresh session and blocks a second acquire", () => { + const sid = newSessionId(); + expect(mod.tryAcquireLock(sid)).toBe(true); + expect(mod.tryAcquireLock(sid)).toBe(false); + mod.releaseLock(sid); + }); + + it("reclaims a stale lock past maxAge", () => { + const sid = newSessionId(); + // Seed a stale lock file directly: timestamp well in the past. + const p = mod.lockPath(sid); + mkdirSync(dirname(p), { recursive: true }); + writeFileSync(p, String(Date.now() - 11 * 60 * 1000)); + // 10-minute default maxAge: the stale lock must be reclaimed. + expect(mod.tryAcquireLock(sid)).toBe(true); + mod.releaseLock(sid); + }); + + it("honors a fresh lock younger than maxAge", () => { + const sid = newSessionId(); + expect(mod.tryAcquireLock(sid)).toBe(true); + // Second acquire must fail — lock timestamp is ~now, well inside maxAge. + expect(mod.tryAcquireLock(sid)).toBe(false); + mod.releaseLock(sid); + }); + + it("releaseLock on a non-existent lock is a no-op", () => { + const sid = newSessionId(); + expect(() => mod.releaseLock(sid)).not.toThrow(); + }); + + it("treats an unreadable lock (non-numeric contents) as stale", () => { + const sid = newSessionId(); + const p = mod.lockPath(sid); + mkdirSync(dirname(p), { recursive: true }); + writeFileSync(p, "garbage-not-a-number"); + expect(mod.tryAcquireLock(sid)).toBe(true); + mod.releaseLock(sid); + }); +}); + +describe("finalizeSummary", () => { + it("sets lastSummaryCount to the jsonl line count and advances lastSummaryAt", () => { + const sid = newSessionId(); + mod.bumpTotalCount(sid); + mod.bumpTotalCount(sid); + const before = Date.now(); + mod.finalizeSummary(sid, 2); + // Re-read: totalCount must be preserved (max of previous and jsonlLines) + const s = JSON.parse(readFileSync(mod.statePath(sid), "utf-8")); + expect(s.lastSummaryCount).toBe(2); + expect(s.totalCount).toBe(2); + expect(s.lastSummaryAt).toBeGreaterThanOrEqual(before); + }); + + it("preserves totalCount when jsonlLines is lower than totalCount", () => { + const sid = newSessionId(); + for (let i = 0; i < 5; i++) mod.bumpTotalCount(sid); + mod.finalizeSummary(sid, 3); + const s = JSON.parse(readFileSync(mod.statePath(sid), "utf-8")); + expect(s.lastSummaryCount).toBe(3); + expect(s.totalCount).toBe(5); + }); + + it("handles missing prior state (no earlier bumpTotalCount)", () => { + const sid = newSessionId(); + mod.finalizeSummary(sid, 4); + const s = JSON.parse(readFileSync(mod.statePath(sid), "utf-8")); + expect(s.lastSummaryCount).toBe(4); + expect(s.totalCount).toBe(4); + }); +}); + +describe("loadTriggerConfig", () => { + const origN = process.env.HIVEMIND_SUMMARY_EVERY_N_MSGS; + const origH = process.env.HIVEMIND_SUMMARY_EVERY_HOURS; + + beforeEach(() => { + delete process.env.HIVEMIND_SUMMARY_EVERY_N_MSGS; + delete process.env.HIVEMIND_SUMMARY_EVERY_HOURS; + }); + + afterAll(() => { + if (origN !== undefined) process.env.HIVEMIND_SUMMARY_EVERY_N_MSGS = origN; + if (origH !== undefined) process.env.HIVEMIND_SUMMARY_EVERY_HOURS = origH; + }); + + it("falls back to defaults when env vars are unset", () => { + const cfg = mod.loadTriggerConfig(); + expect(cfg.everyNMessages).toBe(50); + expect(cfg.everyHours).toBe(2); + }); + + it("respects valid env overrides", () => { + process.env.HIVEMIND_SUMMARY_EVERY_N_MSGS = "30"; + process.env.HIVEMIND_SUMMARY_EVERY_HOURS = "1"; + const cfg = mod.loadTriggerConfig(); + expect(cfg.everyNMessages).toBe(30); + expect(cfg.everyHours).toBe(1); + }); + + it("ignores invalid values and uses defaults", () => { + process.env.HIVEMIND_SUMMARY_EVERY_N_MSGS = "not-a-number"; + process.env.HIVEMIND_SUMMARY_EVERY_HOURS = "-5"; + const cfg = mod.loadTriggerConfig(); + expect(cfg.everyNMessages).toBe(50); + expect(cfg.everyHours).toBe(2); + }); + + it("accepts fractional hours", () => { + process.env.HIVEMIND_SUMMARY_EVERY_HOURS = "0.5"; + const cfg = mod.loadTriggerConfig(); + expect(cfg.everyHours).toBe(0.5); + }); +}); + +describe("state files live under $HOME/.claude/hooks/summary-state/", () => { + it("writeState creates the directory and writes JSON", () => { + const sid = newSessionId(); + mod.bumpTotalCount(sid); + const expected = join(tmpHome, ".claude", "hooks", "summary-state", `${sid}.json`); + expect(existsSync(expected)).toBe(true); + }); +}); + +// ══════════════════════════════════════════════════════════════════════════════ +// Edge-case and integration tests — these pin down the full periodic-summary +// state machine and the bounds that the capture hook relies on. +// ══════════════════════════════════════════════════════════════════════════════ + +describe("shouldTrigger — boundary conditions", () => { + const cfg = { everyNMessages: 50, everyHours: 2 }; + + it("first-summary rule only applies while lastSummaryCount is 0", () => { + const now = Date.now(); + // lastSummaryCount > 0 means the first-summary path is no longer active: + // totalCount=15 with lastSummaryCount=10 is 5 new messages, well below 50. + expect(mod.shouldTrigger( + { lastSummaryAt: now, lastSummaryCount: 10, totalCount: 15 }, cfg, now, + )).toBe(false); + }); + + it("time trigger fires exactly at the cadence boundary", () => { + const now = Date.now(); + const twoHoursExact = now - 2 * 3600 * 1000; + expect(mod.shouldTrigger( + { lastSummaryAt: twoHoursExact, lastSummaryCount: 10, totalCount: 11 }, cfg, now, + )).toBe(true); + }); + + it("time trigger does NOT fire just below the cadence boundary", () => { + const now = Date.now(); + const justUnder = now - (2 * 3600 * 1000 - 1); + expect(mod.shouldTrigger( + { lastSummaryAt: justUnder, lastSummaryCount: 10, totalCount: 11 }, cfg, now, + )).toBe(false); + }); + + it("msg trigger respects custom everyNMessages", () => { + const now = Date.now(); + const tightCfg = { everyNMessages: 3, everyHours: 999 }; + expect(mod.shouldTrigger( + { lastSummaryAt: now, lastSummaryCount: 10, totalCount: 12 }, tightCfg, now, + )).toBe(false); + expect(mod.shouldTrigger( + { lastSummaryAt: now, lastSummaryCount: 10, totalCount: 13 }, tightCfg, now, + )).toBe(true); + }); +}); + +describe("tryAcquireLock — age boundaries and custom maxAge", () => { + it("honors a custom maxAgeMs (short TTL reclaims quickly)", async () => { + const sid = newSessionId(); + expect(mod.tryAcquireLock(sid, 50)).toBe(true); + // With 50ms TTL, sleep past the window and try again from a "new process" + await new Promise(r => setTimeout(r, 80)); + // The existing lock must now look stale even though the current process + // holds it — a separate caller (simulated here) would reclaim it. + expect(mod.tryAcquireLock(sid, 50)).toBe(true); + mod.releaseLock(sid); + }); + + it("a lock timestamp of exactly Date.now() is considered fresh", () => { + const sid = newSessionId(); + const p = mod.lockPath(sid); + mkdirSync(dirname(p), { recursive: true }); + writeFileSync(p, String(Date.now())); + expect(mod.tryAcquireLock(sid)).toBe(false); + try { rmSync(p); } catch { /* ignore */ } + }); + + it("a lock timestamp from the future (clock skew) is treated as fresh", () => { + const sid = newSessionId(); + const p = mod.lockPath(sid); + mkdirSync(dirname(p), { recursive: true }); + writeFileSync(p, String(Date.now() + 60_000)); + // ageMs is negative (< maxAgeMs), so the lock is held. + expect(mod.tryAcquireLock(sid)).toBe(false); + try { rmSync(p); } catch { /* ignore */ } + }); +}); + +describe("full periodic-summary cycle", () => { + it("bump → trigger → acquire → finalize → next bump no longer triggers", () => { + const sid = newSessionId(); + const cfg = { everyNMessages: 50, everyHours: 24 }; + + // Bump 9 times — first-summary threshold is 10, so nothing yet. + for (let i = 0; i < 9; i++) { + const s = mod.bumpTotalCount(sid); + expect(mod.shouldTrigger(s, cfg)).toBe(false); + } + + // 10th bump crosses the first-summary threshold. + const tenth = mod.bumpTotalCount(sid); + expect(tenth.totalCount).toBe(10); + expect(mod.shouldTrigger(tenth, cfg)).toBe(true); + + // Acquire the lock so the capture hook would spawn exactly one worker. + expect(mod.tryAcquireLock(sid)).toBe(true); + // A second capture within the same window cannot acquire — this is what + // prevents duplicate workers when events arrive in quick succession. + expect(mod.tryAcquireLock(sid)).toBe(false); + + // Worker finishes: finalize + release. + mod.finalizeSummary(sid, 10); + mod.releaseLock(sid); + + // Next bump: lastSummaryCount is now 10, msgsSince=1, well below 50. + const eleventh = mod.bumpTotalCount(sid); + expect(eleventh.lastSummaryCount).toBe(10); + expect(eleventh.totalCount).toBe(11); + expect(mod.shouldTrigger(eleventh, cfg)).toBe(false); + }); + + it("second summary fires after everyNMessages messages past lastSummaryCount", () => { + const sid = newSessionId(); + const cfg = { everyNMessages: 50, everyHours: 24 }; + + // Fast-forward state as if a first summary already landed at 10. + for (let i = 0; i < 10; i++) mod.bumpTotalCount(sid); + mod.finalizeSummary(sid, 10); + + // Bump 49 more times: msgsSince=49, still below 50. + for (let i = 0; i < 49; i++) { + const s = mod.bumpTotalCount(sid); + expect(mod.shouldTrigger(s, cfg)).toBe(false); + } + + // 50th bump past lastSummaryCount triggers. + const trigger = mod.bumpTotalCount(sid); + expect(trigger.totalCount).toBe(60); + expect(mod.shouldTrigger(trigger, cfg)).toBe(true); + }); + + it("releaseLock is idempotent across calls", () => { + const sid = newSessionId(); + mod.tryAcquireLock(sid); + mod.releaseLock(sid); + expect(() => mod.releaseLock(sid)).not.toThrow(); + expect(() => mod.releaseLock(sid)).not.toThrow(); + // After release, a fresh acquire must succeed again. + expect(mod.tryAcquireLock(sid)).toBe(true); + mod.releaseLock(sid); + }); +}); + +describe("cross-process concurrency", () => { + // Each subprocess imports summary-state with the same $HOME + a sessionId + // passed via env var. The file-based RMW lock is the ONLY thing preventing + // lost updates (bumpTotalCount) and preventing multiple winners + // (tryAcquireLock) across processes, so these tests are a real stress test + // of the lock. Session id comes via env (TEST_SID) because tsx's `-e` flag + // does not forward positional args reliably across node versions. + const modPath = new URL("../../src/hooks/summary-state.ts", import.meta.url).pathname; + + const runParallel = async (code: string, N: number, sid: string): Promise => { + const runs = Array.from({ length: N }, () => + new Promise((resolve, reject) => { + const child = spawn("npx", ["tsx", "-e", code], { + env: { ...process.env, HOME: tmpHome, TEST_SID: sid }, + stdio: ["ignore", "pipe", "pipe"], + }); + let out = ""; + child.stdout.on("data", (d: Buffer) => { out += d.toString(); }); + child.on("exit", (c: number | null) => c === 0 ? resolve(out) : reject(new Error(`exit ${c}`))); + child.on("error", reject); + }), + ); + return Promise.all(runs); + }; + + it("N parallel subprocesses each bump once and the total equals N", async () => { + const sid = newSessionId(); + const N = 8; + const code = + `import("${modPath}").then(m => { ` + + ` const s = m.bumpTotalCount(process.env.TEST_SID); ` + + ` process.stdout.write(String(s.totalCount)); ` + + `});`; + + await runParallel(code, N, sid); + + const finalState = JSON.parse(readFileSync(mod.statePath(sid), "utf-8")); + expect(finalState.totalCount).toBe(N); + }, 30_000); + + it("N parallel subprocesses racing on tryAcquireLock — exactly one wins", async () => { + const sid = newSessionId(); + const N = 8; + const code = + `import("${modPath}").then(m => { ` + + ` process.stdout.write(m.tryAcquireLock(process.env.TEST_SID) ? "1" : "0"); ` + + `});`; + + const results = await runParallel(code, N, sid); + const winners = results.filter(r => r === "1").length; + expect(winners).toBe(1); + mod.releaseLock(sid); + }, 30_000); +}); diff --git a/claude-code/tests/virtual-table-query.test.ts b/claude-code/tests/virtual-table-query.test.ts index bcace78..013c6c0 100644 --- a/claude-code/tests/virtual-table-query.test.ts +++ b/claude-code/tests/virtual-table-query.test.ts @@ -80,14 +80,16 @@ describe("virtual-table-query", () => { description: "session summary", creation_date: "2026-01-01T00:00:00.000Z", }, - ]), + ]) + .mockResolvedValueOnce([]), } as any; const content = await readVirtualPathContents(api, "memory", "sessions", ["/summaries/a.md", "/index.md"]); expect(content.get("/summaries/a.md")).toBe("summary body"); expect(content.get("/index.md")).toContain("# Memory Index"); - expect(api.query).toHaveBeenCalledTimes(2); + // 1 union query for exact paths + 2 parallel fallback queries (summaries + sessions) for /index.md + expect(api.query).toHaveBeenCalledTimes(3); }); it("ignores invalid exact-read rows before merging content", async () => { @@ -218,4 +220,140 @@ describe("virtual-table-query", () => { expect(String(api.query.mock.calls[0]?.[0])).toContain("path LIKE '/summaries/a/%'"); }); + + // ── Regression coverage: /index.md must list session files too ─────────── + // + // Bug: in workspaces where the `memory` table is empty or dropped (e.g. the + // sessions-only `locomo_benchmark/baseline` workspace), the synthesized + // /index.md used to report "0 sessions:" and list nothing, even when the + // `sessions` table held hundreds of rows. Agents reading that index + // concluded memory was empty and gave up on retrieval. + + describe("buildVirtualIndexContent: sessions + summaries", () => { + it("renders both sections with a combined header when both tables have rows", () => { + const content = buildVirtualIndexContent( + [ + { + path: "/summaries/alice/s1.md", + project: "repo", + description: "summary one", + creation_date: "2026-01-01T00:00:00.000Z", + }, + ], + [ + { path: "/sessions/conv_0_session_1.json", description: "session one" }, + { path: "/sessions/conv_0_session_2.json", description: "session two" }, + ], + ); + + expect(content).toContain("3 entries (1 summaries, 2 sessions):"); + expect(content).toContain("## Summaries"); + expect(content).toContain("## Sessions"); + expect(content).toContain("/summaries/alice/s1.md"); + expect(content).toContain("/sessions/conv_0_session_1.json"); + expect(content).toContain("/sessions/conv_0_session_2.json"); + // Summaries section comes before Sessions section + expect(content.indexOf("## Summaries")).toBeLessThan(content.indexOf("## Sessions")); + }); + + it("renders only sessions when the memory table is empty (the baseline_cloud regression)", () => { + const content = buildVirtualIndexContent( + [], + [ + { path: "/sessions/conv_0_session_1.json", description: "first" }, + { path: "/sessions/conv_0_session_2.json", description: "second" }, + ], + ); + + expect(content).toContain("2 entries (0 summaries, 2 sessions):"); + expect(content).toContain("## Sessions"); + expect(content).not.toContain("## Summaries"); + expect(content).toContain("/sessions/conv_0_session_1.json"); + // Guard against the old bug: must not report "0 sessions:" as the total. + expect(content).not.toMatch(/\n0 sessions:/); + }); + + it("stays backwards-compatible when called with only summary rows", () => { + const content = buildVirtualIndexContent([ + { + path: "/summaries/alice/s1.md", + project: "repo", + description: "summary only", + creation_date: "2026-01-01T00:00:00.000Z", + }, + ]); + + expect(content).toContain("1 entries (1 summaries, 0 sessions):"); + expect(content).toContain("/summaries/alice/s1.md"); + expect(content).not.toContain("## Sessions"); + }); + + it("produces a well-formed empty index when both tables are empty", () => { + const content = buildVirtualIndexContent([], []); + expect(content).toContain("# Memory Index"); + expect(content).toContain("0 entries (0 summaries, 0 sessions):"); + expect(content).not.toContain("## Summaries"); + expect(content).not.toContain("## Sessions"); + }); + }); + + describe("readVirtualPathContents: /index.md fallback queries both tables", () => { + it("queries both memory and sessions tables in parallel when /index.md has no physical row", async () => { + const api = { + query: vi.fn() + // 1. Union query for the exact-path read (no /index.md row present) + .mockResolvedValueOnce([]) + // 2. Parallel fallback: summaries from memory (empty — baseline_cloud case) + .mockResolvedValueOnce([]) + // 3. Parallel fallback: sessions table (272 rows) + .mockResolvedValueOnce([ + { path: "/sessions/conv_0_session_1.json", description: "conv 0 sess 1" }, + { path: "/sessions/conv_0_session_2.json", description: "conv 0 sess 2" }, + ]), + } as any; + + const result = await readVirtualPathContents(api, "memory", "sessions", ["/index.md"]); + const indexContent = result.get("/index.md") ?? ""; + + expect(api.query).toHaveBeenCalledTimes(3); + + const fallbackSqls = [ + String(api.query.mock.calls[1]?.[0] ?? ""), + String(api.query.mock.calls[2]?.[0] ?? ""), + ]; + const summarySql = fallbackSqls.find(sql => sql.includes("/summaries/%")) ?? ""; + const sessionsSql = fallbackSqls.find(sql => sql.includes("/sessions/%")) ?? ""; + + expect(summarySql).toContain('FROM "memory"'); + expect(summarySql).toContain("path LIKE '/summaries/%'"); + expect(sessionsSql).toContain('FROM "sessions"'); + expect(sessionsSql).toContain("path LIKE '/sessions/%'"); + + expect(indexContent).toContain("2 entries (0 summaries, 2 sessions):"); + expect(indexContent).toContain("/sessions/conv_0_session_1.json"); + expect(indexContent).toContain("/sessions/conv_0_session_2.json"); + }); + + it("still produces an index when the sessions-table fallback query fails", async () => { + const api = { + query: vi.fn() + .mockResolvedValueOnce([]) // union query for exact paths + .mockResolvedValueOnce([ + { + path: "/summaries/alice/s1.md", + project: "repo", + description: "summary", + creation_date: "2026-01-01T00:00:00.000Z", + }, + ]) + .mockRejectedValueOnce(new Error("sessions table down")), + } as any; + + const result = await readVirtualPathContents(api, "memory", "sessions", ["/index.md"]); + const indexContent = result.get("/index.md") ?? ""; + + expect(indexContent).toContain("1 entries (1 summaries, 0 sessions):"); + expect(indexContent).toContain("/summaries/alice/s1.md"); + }); + }); }); diff --git a/claude-code/tests/wiki-worker.test.ts b/claude-code/tests/wiki-worker.test.ts new file mode 100644 index 0000000..f287cc1 --- /dev/null +++ b/claude-code/tests/wiki-worker.test.ts @@ -0,0 +1,422 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; +import { mkdtempSync, rmSync, writeFileSync, readFileSync, existsSync } from "node:fs"; +import { tmpdir } from "node:os"; +import { join } from "node:path"; + +/** + * Direct source-level tests for src/hooks/wiki-worker.ts. The module + * reads its config JSON from process.argv[2] at module load, then + * runs main() immediately. Each scenario writes a fresh config file + * under a tmp dir, points process.argv[2] at it, wires the mocks, and + * dynamically imports the worker. + * + * Mocks: + * - global.fetch (the query() helper) + * - child_process.execFileSync (the claude -p invocation) + * - summary-state (finalizeSummary + releaseLock) + * - upload-summary (uploadSummary) + * + * fs stays real: the worker writes the reconstructed JSONL and the + * summary markdown to the tmp dir, and main() reads the summary back + * after claude -p has "written" it. The execFileSync mock simulates + * claude by writing the summary file directly, which is how the real + * binary behaves from the worker's perspective. + */ + +const finalizeSummaryMock = vi.fn(); +const releaseLockMock = vi.fn(); +const uploadSummaryMock = vi.fn(); +const execFileSyncMock = vi.fn(); + +vi.mock("../../src/hooks/summary-state.js", () => ({ + finalizeSummary: (...a: any[]) => finalizeSummaryMock(...a), + releaseLock: (...a: any[]) => releaseLockMock(...a), +})); +vi.mock("../../src/hooks/upload-summary.js", () => ({ + uploadSummary: (...a: any[]) => uploadSummaryMock(...a), +})); +vi.mock("node:child_process", async () => { + const actual = await vi.importActual("node:child_process"); + return { ...actual, execFileSync: (...a: any[]) => execFileSyncMock(...a) }; +}); + +const originalFetch = global.fetch; +const fetchMock = vi.fn(); + +const originalArgv2 = process.argv[2]; + +let rootDir: string; // shared parent — NOT removed by the worker +let tmpDir: string; // worker's tmpDir, rmSync'd in cleanup() +let hooksDir: string; // wiki.log lives here; must outlive tmpDir +let configPath: string; + +const defaultConfig = () => ({ + apiUrl: "http://fake.local", + token: "tok", + orgId: "org", + workspaceId: "default", + memoryTable: "memory", + sessionsTable: "sessions", + sessionId: "sid-worker", + userName: "alice", + project: "proj", + tmpDir, + claudeBin: "/fake/claude", + wikiLog: join(hooksDir, "wiki.log"), + hooksDir, + promptTemplate: "JSONL=__JSONL__ SUMMARY=__SUMMARY__ SID=__SESSION_ID__ PROJ=__PROJECT__ OFFSET=__PREV_OFFSET__ LINES=__JSONL_LINES__ SRC=__JSONL_SERVER_PATH__", +}); + +function writeConfig(overrides: Partial> = {}): void { + const cfg = { ...defaultConfig(), ...overrides }; + writeFileSync(configPath, JSON.stringify(cfg)); +} + +function jsonResp(body: unknown, ok = true, status = 200): Response { + return { + ok, + status, + json: async () => body, + text: async () => typeof body === "string" ? body : JSON.stringify(body), + } as Response; +} + +async function runWorker(): Promise { + vi.resetModules(); + // @ts-expect-error + global.fetch = fetchMock; + await import("../../src/hooks/wiki-worker.js"); + // Let main() and all its awaits complete. + await new Promise(r => setImmediate(r)); + await new Promise(r => setImmediate(r)); + await new Promise(r => setImmediate(r)); +} + +beforeEach(() => { + rootDir = mkdtempSync(join(tmpdir(), "wiki-worker-test-")); + tmpDir = join(rootDir, "tmp"); + hooksDir = join(rootDir, "hooks"); + // The worker will mkdir hooksDir lazily via wlog, but it needs tmpDir + // to exist for writeFileSync(tmpJsonl, ...). + require("node:fs").mkdirSync(tmpDir, { recursive: true }); + require("node:fs").mkdirSync(hooksDir, { recursive: true }); + configPath = join(rootDir, "config.json"); + writeConfig(); + process.argv[2] = configPath; + fetchMock.mockReset(); + finalizeSummaryMock.mockReset(); + releaseLockMock.mockReset(); + uploadSummaryMock.mockReset().mockResolvedValue({ path: "insert", summaryLength: 100, descLength: 20, sql: "..." }); + execFileSyncMock.mockReset(); +}); + +afterEach(() => { + // @ts-expect-error + global.fetch = originalFetch; + process.argv[2] = originalArgv2; + try { rmSync(rootDir, { recursive: true, force: true }); } catch { /* ignore */ } + vi.restoreAllMocks(); +}); + +// ═══ early exit: zero events ═══════════════════════════════════════════════ + +describe("wiki-worker — no events", () => { + it("exits early when the sessions table has no rows for this session", async () => { + fetchMock.mockResolvedValue(jsonResp({ columns: ["message", "creation_date"], rows: [] })); + await runWorker(); + const log = readFileSync(join(hooksDir, "wiki.log"), "utf-8"); + expect(log).toContain("no session events found — exiting"); + expect(execFileSyncMock).not.toHaveBeenCalled(); + expect(uploadSummaryMock).not.toHaveBeenCalled(); + expect(finalizeSummaryMock).not.toHaveBeenCalled(); + // The finally block must still release the lock. + expect(releaseLockMock).toHaveBeenCalledWith("sid-worker"); + }); + + it("treats a response with null rows/columns as empty", async () => { + fetchMock.mockResolvedValue(jsonResp({})); + await runWorker(); + expect(execFileSyncMock).not.toHaveBeenCalled(); + expect(releaseLockMock).toHaveBeenCalled(); + }); +}); + +// ═══ happy path: events + claude -p + upload ═══════════════════════════════ + +describe("wiki-worker — happy path", () => { + const eventRows = [ + { message: JSON.stringify({ type: "user_message", content: "hi" }), creation_date: "2026-04-20T00:00:00Z" }, + { message: JSON.stringify({ type: "assistant_message", content: "hello" }), creation_date: "2026-04-20T00:00:01Z" }, + ]; + + const mkFetch = (eventsCol: string[] = ["message", "creation_date"], pathRows = 1, hasSummary = false) => { + let call = 0; + return fetchMock.mockImplementation(async (_url: string, init: any) => { + const sql = JSON.parse(init.body).query as string; + if (sql.startsWith("SELECT message, creation_date")) { + return jsonResp({ columns: eventsCol, rows: eventRows.map(r => [r.message, r.creation_date]) }); + } + if (sql.startsWith("SELECT DISTINCT path")) { + return jsonResp({ + columns: ["path"], + rows: pathRows > 0 ? [["/sessions/alice/alice_org_default_sid-worker.jsonl"]] : [], + }); + } + if (sql.startsWith("SELECT summary FROM")) { + if (hasSummary) { + return jsonResp({ columns: ["summary"], rows: [["# Session X\n- **JSONL offset**: 12\n\n## What Happened\nprior"]] }); + } + return jsonResp({ columns: ["summary"], rows: [] }); + } + call++; + throw new Error(`unexpected query (${call}): ${sql}`); + }); + }; + + it("fetches events, writes JSONL, runs claude -p, uploads, finalizes, releases", async () => { + mkFetch(); + let capturedJsonl: string | null = null; + // Simulate claude -p producing a summary file. We also snapshot the + // reconstructed JSONL here because cleanup() will rmSync tmpDir + // before the test can read it back from disk. + execFileSyncMock.mockImplementation((_bin: string, args: string[]) => { + const promptIdx = args.indexOf("-p") + 1; + const prompt = args[promptIdx]; + const jsonlPath = prompt.match(/JSONL=(\S+)/)![1]; + capturedJsonl = readFileSync(jsonlPath, "utf-8"); + const summaryPath = prompt.match(/SUMMARY=(\S+)/)![1]; + writeFileSync(summaryPath, "# Session sid-worker\n\n## What Happened\nStuff happened.\n"); + return Buffer.from(""); + }); + await runWorker(); + + // JSONL was written with the two events joined (captured before cleanup) + expect(capturedJsonl).not.toBeNull(); + expect(capturedJsonl!.split("\n")).toHaveLength(2); + + // claude -p was called with the prompt template expanded + expect(execFileSyncMock).toHaveBeenCalledTimes(1); + const calledArgs = execFileSyncMock.mock.calls[0][1] as string[]; + expect(calledArgs[0]).toBe("-p"); + expect(calledArgs).toContain("--no-session-persistence"); + expect(calledArgs).toContain("--model"); + expect(calledArgs).toContain("haiku"); + expect(calledArgs).toContain("--permission-mode"); + expect(calledArgs).toContain("bypassPermissions"); + + // Prompt template was expanded with real values + const prompt = calledArgs[1]; + expect(prompt).toContain("SID=sid-worker"); + expect(prompt).toContain("PROJ=proj"); + expect(prompt).toContain("LINES=2"); + expect(prompt).toContain("OFFSET=0"); + expect(prompt).toContain("SRC=/sessions/alice/alice_org_default_sid-worker.jsonl"); + + // env flags on execFileSync to prevent runaway recursion + const execOpts = execFileSyncMock.mock.calls[0][2]; + expect(execOpts.env.HIVEMIND_WIKI_WORKER).toBe("1"); + expect(execOpts.env.HIVEMIND_CAPTURE).toBe("false"); + + // upload was called with the full summary + expect(uploadSummaryMock).toHaveBeenCalledTimes(1); + const uploadParams = uploadSummaryMock.mock.calls[0][1]; + expect(uploadParams.tableName).toBe("memory"); + expect(uploadParams.agent).toBe("claude_code"); + expect(uploadParams.text).toContain("## What Happened"); + + // finalize + release + expect(finalizeSummaryMock).toHaveBeenCalledWith("sid-worker", 2); + expect(releaseLockMock).toHaveBeenCalledWith("sid-worker"); + }); + + it("parses JSONL offset from an existing summary on a resumed session", async () => { + mkFetch(undefined, 1, true); + execFileSyncMock.mockImplementation((_bin: string, args: string[]) => { + const summaryPath = args[1].match(/SUMMARY=(\S+)/)![1]; + writeFileSync(summaryPath, "# Session sid-worker\n\n## What Happened\ndone.\n"); + return Buffer.from(""); + }); + await runWorker(); + const prompt = execFileSyncMock.mock.calls[0][1][1] as string; + expect(prompt).toContain("OFFSET=12"); + // tmpSummary was pre-seeded with the existing summary so claude -p + // can merge on top. Verify the worker did write it. + const log = readFileSync(join(hooksDir, "wiki.log"), "utf-8"); + expect(log).toContain("existing summary found, offset=12"); + }); + + it("defaults to /sessions/unknown/ when the path SELECT returns no rows", async () => { + mkFetch(undefined, 0); + execFileSyncMock.mockImplementation((_bin: string, args: string[]) => { + const summaryPath = args[1].match(/SUMMARY=(\S+)/)![1]; + writeFileSync(summaryPath, "# Session\n\n## What Happened\nfallback.\n"); + return Buffer.from(""); + }); + await runWorker(); + const prompt = execFileSyncMock.mock.calls[0][1][1] as string; + expect(prompt).toContain("SRC=/sessions/unknown/sid-worker.jsonl"); + }); + + it("serializes event rows that arrive as objects (JSONB) instead of strings", async () => { + fetchMock.mockImplementation(async (_url: string, init: any) => { + const sql = JSON.parse(init.body).query as string; + if (sql.startsWith("SELECT message, creation_date")) { + return jsonResp({ + columns: ["message", "creation_date"], + rows: [ + [{ type: "user_message", content: "hi" }, "2026-04-20T00:00:00Z"], + [{ type: "tool_call", tool_name: "Bash" }, "2026-04-20T00:00:01Z"], + ], + }); + } + if (sql.startsWith("SELECT DISTINCT path")) { + return jsonResp({ columns: ["path"], rows: [["/sessions/alice/x.jsonl"]] }); + } + return jsonResp({ columns: ["summary"], rows: [] }); + }); + let capturedJsonl: string | null = null; + execFileSyncMock.mockImplementation((_bin: string, args: string[]) => { + const jsonlPath = args[1].match(/JSONL=(\S+)/)![1]; + capturedJsonl = readFileSync(jsonlPath, "utf-8"); + const summaryPath = args[1].match(/SUMMARY=(\S+)/)![1]; + writeFileSync(summaryPath, "x"); + return Buffer.from(""); + }); + await runWorker(); + expect(capturedJsonl).toContain('"type":"user_message"'); + expect(capturedJsonl).toContain('"type":"tool_call"'); + }); +}); + +// ═══ claude -p failure paths ═══════════════════════════════════════════════ + +describe("wiki-worker — claude -p failure", () => { + it("logs the claude exit code and skips the upload when no summary file lands", async () => { + fetchMock.mockImplementation(async (_url: string, init: any) => { + const sql = JSON.parse(init.body).query as string; + if (sql.startsWith("SELECT message")) return jsonResp({ columns: ["message", "creation_date"], rows: [["{}", "t"]] }); + if (sql.startsWith("SELECT DISTINCT path")) return jsonResp({ columns: ["path"], rows: [["/sessions/x.jsonl"]] }); + return jsonResp({ columns: ["summary"], rows: [] }); + }); + const err: any = new Error("claude boom"); + err.status = 42; + execFileSyncMock.mockImplementation(() => { throw err; }); + await runWorker(); + + const log = readFileSync(join(hooksDir, "wiki.log"), "utf-8"); + expect(log).toContain("claude -p failed: 42"); + expect(log).toContain("no summary file generated"); + expect(uploadSummaryMock).not.toHaveBeenCalled(); + expect(finalizeSummaryMock).not.toHaveBeenCalled(); + expect(releaseLockMock).toHaveBeenCalled(); + }); + + it("falls back to err.message when err.status is absent", async () => { + fetchMock.mockImplementation(async (_url: string, init: any) => { + const sql = JSON.parse(init.body).query as string; + if (sql.startsWith("SELECT message")) return jsonResp({ columns: ["message", "creation_date"], rows: [["{}", "t"]] }); + if (sql.startsWith("SELECT DISTINCT path")) return jsonResp({ columns: ["path"], rows: [["/x.jsonl"]] }); + return jsonResp({ columns: ["summary"], rows: [] }); + }); + execFileSyncMock.mockImplementation(() => { throw new Error("no status"); }); + await runWorker(); + const log = readFileSync(join(hooksDir, "wiki.log"), "utf-8"); + expect(log).toContain("claude -p failed: no status"); + }); +}); + +// ═══ query retry logic ═════════════════════════════════════════════════════ + +describe("wiki-worker — query retry logic", () => { + beforeEach(() => { + // Stub setTimeout so retries don't actually sleep. + vi.spyOn(global, "setTimeout").mockImplementation(((cb: any) => { + cb(); + return 0 as any; + }) as any); + }); + + it("retries on 500 and eventually succeeds", async () => { + const responses = [ + jsonResp("server error", false, 500), + jsonResp("server error", false, 500), + jsonResp({ columns: ["message", "creation_date"], rows: [] }), + ]; + fetchMock.mockImplementation(async () => responses.shift()!); + await runWorker(); + // First query to sessions table was retried 2 times before success. + expect(fetchMock.mock.calls.length).toBeGreaterThanOrEqual(3); + expect(releaseLockMock).toHaveBeenCalled(); + }); + + it("retries on 401/403/429/502/503 (CloudFlare rate-limit class)", async () => { + for (const status of [401, 403, 429, 502, 503]) { + fetchMock.mockReset(); + fetchMock + .mockResolvedValueOnce(jsonResp("", false, status)) + .mockResolvedValue(jsonResp({ columns: ["message", "creation_date"], rows: [] })); + await runWorker(); + expect(fetchMock.mock.calls.length).toBeGreaterThanOrEqual(2); + } + }); + + it("throws (and main catches) on a non-retryable 400", async () => { + fetchMock.mockResolvedValue(jsonResp("bad request", false, 400)); + await runWorker(); + const log = readFileSync(join(hooksDir, "wiki.log"), "utf-8"); + expect(log).toMatch(/fatal: API 400/); + expect(releaseLockMock).toHaveBeenCalled(); + }); + + it("gives up after exhausting retries on persistent 500", async () => { + fetchMock.mockResolvedValue(jsonResp("still down", false, 500)); + await runWorker(); + const log = readFileSync(join(hooksDir, "wiki.log"), "utf-8"); + expect(log).toMatch(/fatal: API 500/); + }); +}); + +// ═══ finalize + release edge cases ═════════════════════════════════════════ + +describe("wiki-worker — finalize + release edge cases", () => { + beforeEach(() => { + fetchMock.mockImplementation(async (_url: string, init: any) => { + const sql = JSON.parse(init.body).query as string; + if (sql.startsWith("SELECT message")) return jsonResp({ columns: ["message", "creation_date"], rows: [["{}", "t"]] }); + if (sql.startsWith("SELECT DISTINCT path")) return jsonResp({ columns: ["path"], rows: [["/x.jsonl"]] }); + return jsonResp({ columns: ["summary"], rows: [] }); + }); + execFileSyncMock.mockImplementation((_bin: string, args: string[]) => { + const summaryPath = args[1].match(/SUMMARY=(\S+)/)![1]; + writeFileSync(summaryPath, "# s\n## What Happened\nX\n"); + return Buffer.from(""); + }); + }); + + it("logs sidecar update failure but still releases the lock", async () => { + finalizeSummaryMock.mockImplementation(() => { throw new Error("sidecar boom"); }); + await runWorker(); + const log = readFileSync(join(hooksDir, "wiki.log"), "utf-8"); + expect(log).toContain("sidecar update failed: sidecar boom"); + expect(releaseLockMock).toHaveBeenCalled(); + }); + + it("keeps going when releaseLock throws — the finally swallows it", async () => { + releaseLockMock.mockImplementation(() => { throw new Error("release boom"); }); + await runWorker(); + // Worker still completes; the failure is caught in the finally. + const log = readFileSync(join(hooksDir, "wiki.log"), "utf-8"); + expect(log).toContain("done"); + }); + + it("does not upload when the summary file is present but empty", async () => { + execFileSyncMock.mockImplementation((_bin: string, args: string[]) => { + const summaryPath = args[1].match(/SUMMARY=(\S+)/)![1]; + writeFileSync(summaryPath, " \n"); + return Buffer.from(""); + }); + await runWorker(); + expect(uploadSummaryMock).not.toHaveBeenCalled(); + expect(finalizeSummaryMock).not.toHaveBeenCalled(); + }); +}); diff --git a/codex/bundle/capture.js b/codex/bundle/capture.js index 764460e..67b7919 100755 --- a/codex/bundle/capture.js +++ b/codex/bundle/capture.js @@ -2,13 +2,13 @@ // dist/src/utils/stdin.js function readStdin() { - return new Promise((resolve2, reject) => { + return new Promise((resolve, reject) => { let data = ""; process.stdin.setEncoding("utf-8"); process.stdin.on("data", (chunk) => data += chunk); process.stdin.on("end", () => { try { - resolve2(JSON.parse(data)); + resolve(JSON.parse(data)); } catch (err) { reject(new Error(`Failed to parse hook input: ${err}`)); } @@ -53,12 +53,21 @@ function loadConfig() { }; } +// dist/src/deeplake-api.js +import { randomUUID } from "node:crypto"; +import { existsSync as existsSync2, mkdirSync, readFileSync as readFileSync2, writeFileSync } from "node:fs"; +import { join as join3 } from "node:path"; +import { tmpdir } from "node:os"; + // dist/src/utils/debug.js import { appendFileSync } from "node:fs"; import { join as join2 } from "node:path"; import { homedir as homedir2 } from "node:os"; var DEBUG = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; var LOG = join2(homedir2(), ".deeplake", "hook-debug.log"); +function utcTimestamp(d = /* @__PURE__ */ new Date()) { + return d.toISOString().replace("T", " ").slice(0, 19) + " UTC"; +} function log(tag, msg) { if (!DEBUG) return; @@ -66,51 +75,364 @@ function log(tag, msg) { `); } -// dist/src/utils/direct-run.js -import { resolve } from "node:path"; -import { fileURLToPath } from "node:url"; -function isDirectRun(metaUrl) { - const entry = process.argv[1]; - if (!entry) - return false; - try { - return resolve(fileURLToPath(metaUrl)) === resolve(entry); - } catch { - return false; +// dist/src/utils/sql.js +function sqlStr(value) { + return value.replace(/\\/g, "\\\\").replace(/'/g, "''").replace(/\0/g, "").replace(/[\x01-\x08\x0b\x0c\x0e-\x1f\x7f]/g, ""); +} + +// dist/src/deeplake-api.js +var log2 = (msg) => log("sdk", msg); +function summarizeSql(sql, maxLen = 220) { + const compact = sql.replace(/\s+/g, " ").trim(); + return compact.length > maxLen ? `${compact.slice(0, maxLen)}...` : compact; +} +function traceSql(msg) { + const traceEnabled = (process.env.HIVEMIND_TRACE_SQL ?? process.env.DEEPLAKE_TRACE_SQL) === "1" || (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; + if (!traceEnabled) + return; + process.stderr.write(`[deeplake-sql] ${msg} +`); + const debugFileLog = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; + if (debugFileLog) + log2(msg); +} +var RETRYABLE_CODES = /* @__PURE__ */ new Set([429, 500, 502, 503, 504]); +var MAX_RETRIES = 3; +var BASE_DELAY_MS = 500; +var MAX_CONCURRENCY = 5; +var QUERY_TIMEOUT_MS = Number(process.env["HIVEMIND_QUERY_TIMEOUT_MS"] ?? process.env["DEEPLAKE_QUERY_TIMEOUT_MS"] ?? 1e4); +var INDEX_MARKER_TTL_MS = Number(process.env["HIVEMIND_INDEX_MARKER_TTL_MS"] ?? 6 * 60 * 6e4); +function sleep(ms) { + return new Promise((resolve) => setTimeout(resolve, ms)); +} +function isTimeoutError(error) { + const name = error instanceof Error ? error.name.toLowerCase() : ""; + const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase(); + return name.includes("timeout") || name === "aborterror" || message.includes("timeout") || message.includes("timed out"); +} +function isDuplicateIndexError(error) { + const message = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase(); + return message.includes("duplicate key value violates unique constraint") || message.includes("pg_class_relname_nsp_index") || message.includes("already exists"); +} +function isSessionInsertQuery(sql) { + return /^\s*insert\s+into\s+"[^"]+"\s*\(\s*id\s*,\s*path\s*,\s*filename\s*,\s*message\s*,/i.test(sql); +} +function isTransientHtml403(text) { + const body = text.toLowerCase(); + return body.includes(" this.waiting.push(resolve)); + } + release() { + this.active--; + const next = this.waiting.shift(); + if (next) { + this.active++; + next(); + } + } +}; +var DeeplakeApi = class { + token; + apiUrl; + orgId; + workspaceId; + tableName; + _pendingRows = []; + _sem = new Semaphore(MAX_CONCURRENCY); + _tablesCache = null; + constructor(token, apiUrl, orgId, workspaceId, tableName) { + this.token = token; + this.apiUrl = apiUrl; + this.orgId = orgId; + this.workspaceId = workspaceId; + this.tableName = tableName; + } + /** Execute SQL with retry on transient errors and bounded concurrency. */ + async query(sql) { + const startedAt = Date.now(); + const summary = summarizeSql(sql); + traceSql(`query start: ${summary}`); + await this._sem.acquire(); + try { + const rows = await this._queryWithRetry(sql); + traceSql(`query ok (${Date.now() - startedAt}ms, rows=${rows.length}): ${summary}`); + return rows; + } catch (e) { + const message = e instanceof Error ? e.message : String(e); + traceSql(`query fail (${Date.now() - startedAt}ms): ${summary} :: ${message}`); + throw e; + } finally { + this._sem.release(); + } + } + async _queryWithRetry(sql) { + let lastError; + for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) { + let resp; + try { + const signal = AbortSignal.timeout(QUERY_TIMEOUT_MS); + resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables/query`, { + method: "POST", + headers: { + Authorization: `Bearer ${this.token}`, + "Content-Type": "application/json", + "X-Activeloop-Org-Id": this.orgId + }, + signal, + body: JSON.stringify({ query: sql }) + }); + } catch (e) { + if (isTimeoutError(e)) { + lastError = new Error(`Query timeout after ${QUERY_TIMEOUT_MS}ms`); + throw lastError; + } + lastError = e instanceof Error ? e : new Error(String(e)); + if (attempt < MAX_RETRIES) { + const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200; + log2(`query retry ${attempt + 1}/${MAX_RETRIES} (fetch error: ${lastError.message}) in ${delay.toFixed(0)}ms`); + await sleep(delay); + continue; + } + throw lastError; + } + if (resp.ok) { + const raw = await resp.json(); + if (!raw?.rows || !raw?.columns) + return []; + return raw.rows.map((row) => Object.fromEntries(raw.columns.map((col, i) => [col, row[i]]))); + } + const text = await resp.text().catch(() => ""); + const retryable403 = isSessionInsertQuery(sql) && (resp.status === 401 || resp.status === 403 && (text.length === 0 || isTransientHtml403(text))); + if (attempt < MAX_RETRIES && (RETRYABLE_CODES.has(resp.status) || retryable403)) { + const delay = BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200; + log2(`query retry ${attempt + 1}/${MAX_RETRIES} (${resp.status}) in ${delay.toFixed(0)}ms`); + await sleep(delay); + continue; + } + throw new Error(`Query failed: ${resp.status}: ${text.slice(0, 200)}`); + } + throw lastError ?? new Error("Query failed: max retries exceeded"); + } + // ── Writes ────────────────────────────────────────────────────────────────── + /** Queue rows for writing. Call commit() to flush. */ + appendRows(rows) { + this._pendingRows.push(...rows); } + /** Flush pending rows via SQL. */ + async commit() { + if (this._pendingRows.length === 0) + return; + const rows = this._pendingRows; + this._pendingRows = []; + const CONCURRENCY = 10; + for (let i = 0; i < rows.length; i += CONCURRENCY) { + const chunk = rows.slice(i, i + CONCURRENCY); + await Promise.allSettled(chunk.map((r) => this.upsertRowSql(r))); + } + log2(`commit: ${rows.length} rows`); + } + async upsertRowSql(row) { + const ts = (/* @__PURE__ */ new Date()).toISOString(); + const cd = row.creationDate ?? ts; + const lud = row.lastUpdateDate ?? ts; + const exists = await this.query(`SELECT path FROM "${this.tableName}" WHERE path = '${sqlStr(row.path)}' LIMIT 1`); + if (exists.length > 0) { + let setClauses = `summary = E'${sqlStr(row.contentText)}', mime_type = '${sqlStr(row.mimeType)}', size_bytes = ${row.sizeBytes}, last_update_date = '${lud}'`; + if (row.project !== void 0) + setClauses += `, project = '${sqlStr(row.project)}'`; + if (row.description !== void 0) + setClauses += `, description = '${sqlStr(row.description)}'`; + await this.query(`UPDATE "${this.tableName}" SET ${setClauses} WHERE path = '${sqlStr(row.path)}'`); + } else { + const id = randomUUID(); + let cols = "id, path, filename, summary, mime_type, size_bytes, creation_date, last_update_date"; + let vals = `'${id}', '${sqlStr(row.path)}', '${sqlStr(row.filename)}', E'${sqlStr(row.contentText)}', '${sqlStr(row.mimeType)}', ${row.sizeBytes}, '${cd}', '${lud}'`; + if (row.project !== void 0) { + cols += ", project"; + vals += `, '${sqlStr(row.project)}'`; + } + if (row.description !== void 0) { + cols += ", description"; + vals += `, '${sqlStr(row.description)}'`; + } + await this.query(`INSERT INTO "${this.tableName}" (${cols}) VALUES (${vals})`); + } + } + /** Update specific columns on a row by path. */ + async updateColumns(path, columns) { + const setClauses = Object.entries(columns).map(([col, val]) => typeof val === "number" ? `${col} = ${val}` : `${col} = '${sqlStr(String(val))}'`).join(", "); + await this.query(`UPDATE "${this.tableName}" SET ${setClauses} WHERE path = '${sqlStr(path)}'`); + } + // ── Convenience ───────────────────────────────────────────────────────────── + /** Create a BM25 search index on a column. */ + async createIndex(column) { + await this.query(`CREATE INDEX IF NOT EXISTS idx_${sqlStr(column)}_bm25 ON "${this.tableName}" USING deeplake_index ("${column}")`); + } + buildLookupIndexName(table, suffix) { + return `idx_${table}_${suffix}`.replace(/[^a-zA-Z0-9_]/g, "_"); + } + getLookupIndexMarkerPath(table, suffix) { + const markerKey = [ + this.workspaceId, + this.orgId, + table, + suffix + ].join("__").replace(/[^a-zA-Z0-9_.-]/g, "_"); + return join3(getIndexMarkerDir(), `${markerKey}.json`); + } + hasFreshLookupIndexMarker(table, suffix) { + const markerPath = this.getLookupIndexMarkerPath(table, suffix); + if (!existsSync2(markerPath)) + return false; + try { + const raw = JSON.parse(readFileSync2(markerPath, "utf-8")); + const updatedAt = raw.updatedAt ? new Date(raw.updatedAt).getTime() : NaN; + if (!Number.isFinite(updatedAt) || Date.now() - updatedAt > INDEX_MARKER_TTL_MS) + return false; + return true; + } catch { + return false; + } + } + markLookupIndexReady(table, suffix) { + mkdirSync(getIndexMarkerDir(), { recursive: true }); + writeFileSync(this.getLookupIndexMarkerPath(table, suffix), JSON.stringify({ updatedAt: (/* @__PURE__ */ new Date()).toISOString() }), "utf-8"); + } + async ensureLookupIndex(table, suffix, columnsSql) { + if (this.hasFreshLookupIndexMarker(table, suffix)) + return; + const indexName = this.buildLookupIndexName(table, suffix); + try { + await this.query(`CREATE INDEX IF NOT EXISTS "${indexName}" ON "${table}" ${columnsSql}`); + this.markLookupIndexReady(table, suffix); + } catch (e) { + if (isDuplicateIndexError(e)) { + this.markLookupIndexReady(table, suffix); + return; + } + log2(`index "${indexName}" skipped: ${e.message}`); + } + } + /** List all tables in the workspace (with retry). */ + async listTables(forceRefresh = false) { + if (!forceRefresh && this._tablesCache) + return [...this._tablesCache]; + const { tables, cacheable } = await this._fetchTables(); + if (cacheable) + this._tablesCache = [...tables]; + return tables; + } + async _fetchTables() { + for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) { + try { + const resp = await fetch(`${this.apiUrl}/workspaces/${this.workspaceId}/tables`, { + headers: { + Authorization: `Bearer ${this.token}`, + "X-Activeloop-Org-Id": this.orgId + } + }); + if (resp.ok) { + const data = await resp.json(); + return { + tables: (data.tables ?? []).map((t) => t.table_name), + cacheable: true + }; + } + if (attempt < MAX_RETRIES && RETRYABLE_CODES.has(resp.status)) { + await sleep(BASE_DELAY_MS * Math.pow(2, attempt) + Math.random() * 200); + continue; + } + return { tables: [], cacheable: false }; + } catch { + if (attempt < MAX_RETRIES) { + await sleep(BASE_DELAY_MS * Math.pow(2, attempt)); + continue; + } + return { tables: [], cacheable: false }; + } + } + return { tables: [], cacheable: false }; + } + /** Create the memory table if it doesn't already exist. Migrate columns on existing tables. */ + async ensureTable(name) { + const tbl = name ?? this.tableName; + const tables = await this.listTables(); + if (!tables.includes(tbl)) { + log2(`table "${tbl}" not found, creating`); + await this.query(`CREATE TABLE IF NOT EXISTS "${tbl}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', summary TEXT NOT NULL DEFAULT '', author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'text/plain', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`); + log2(`table "${tbl}" created`); + if (!tables.includes(tbl)) + this._tablesCache = [...tables, tbl]; + } + } + /** Create the sessions table (uses JSONB for message since every row is a JSON event). */ + async ensureSessionsTable(name) { + const tables = await this.listTables(); + if (!tables.includes(name)) { + log2(`table "${name}" not found, creating`); + await this.query(`CREATE TABLE IF NOT EXISTS "${name}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', message JSONB, author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'application/json', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', agent TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`); + log2(`table "${name}" created`); + if (!tables.includes(name)) + this._tablesCache = [...tables, name]; + } + await this.ensureLookupIndex(name, "path_creation_date", `("path", "creation_date")`); + } +}; + +// dist/src/utils/session-path.js +function buildSessionPath(config, sessionId) { + const workspace = config.workspaceId ?? "default"; + return `/sessions/${config.userName}/${config.userName}_${config.orgName}_${workspace}_${sessionId}.jsonl`; } // dist/src/hooks/summary-state.js -import { readFileSync as readFileSync2, writeFileSync, writeSync, mkdirSync, renameSync, existsSync as existsSync2, unlinkSync, openSync, closeSync } from "node:fs"; +import { readFileSync as readFileSync3, writeFileSync as writeFileSync2, writeSync, mkdirSync as mkdirSync2, renameSync, existsSync as existsSync3, unlinkSync, openSync, closeSync } from "node:fs"; import { homedir as homedir3 } from "node:os"; -import { join as join3 } from "node:path"; -var STATE_DIR = join3(homedir3(), ".claude", "hooks", "summary-state"); +import { join as join4 } from "node:path"; +var dlog = (msg) => log("summary-state", msg); +var STATE_DIR = join4(homedir3(), ".claude", "hooks", "summary-state"); var YIELD_BUF = new Int32Array(new SharedArrayBuffer(4)); function statePath(sessionId) { - return join3(STATE_DIR, `${sessionId}.json`); + return join4(STATE_DIR, `${sessionId}.json`); } function lockPath(sessionId) { - return join3(STATE_DIR, `${sessionId}.lock`); + return join4(STATE_DIR, `${sessionId}.lock`); } function readState(sessionId) { const p = statePath(sessionId); - if (!existsSync2(p)) + if (!existsSync3(p)) return null; try { - return JSON.parse(readFileSync2(p, "utf-8")); + return JSON.parse(readFileSync3(p, "utf-8")); } catch { return null; } } function writeState(sessionId, state) { - mkdirSync(STATE_DIR, { recursive: true }); + mkdirSync2(STATE_DIR, { recursive: true }); const p = statePath(sessionId); const tmp = `${p}.${process.pid}.${Date.now()}.tmp`; - writeFileSync(tmp, JSON.stringify(state)); + writeFileSync2(tmp, JSON.stringify(state)); renameSync(tmp, p); } function withRmwLock(sessionId, fn) { - mkdirSync(STATE_DIR, { recursive: true }); + mkdirSync2(STATE_DIR, { recursive: true }); const rmwLock = statePath(sessionId) + ".rmw"; const deadline = Date.now() + 2e3; let fd = null; @@ -121,9 +443,11 @@ function withRmwLock(sessionId, fn) { if (e.code !== "EEXIST") throw e; if (Date.now() > deadline) { + dlog(`rmw lock deadline exceeded for ${sessionId}, reclaiming stale lock`); try { unlinkSync(rmwLock); - } catch { + } catch (unlinkErr) { + dlog(`stale rmw lock unlink failed for ${sessionId}: ${unlinkErr.message}`); } continue; } @@ -136,7 +460,8 @@ function withRmwLock(sessionId, fn) { closeSync(fd); try { unlinkSync(rmwLock); - } catch { + } catch (unlinkErr) { + dlog(`rmw lock cleanup failed for ${sessionId}: ${unlinkErr.message}`); } } } @@ -169,18 +494,20 @@ function shouldTrigger(state, cfg, now = Date.now()) { return false; } function tryAcquireLock(sessionId, maxAgeMs = 10 * 60 * 1e3) { - mkdirSync(STATE_DIR, { recursive: true }); + mkdirSync2(STATE_DIR, { recursive: true }); const p = lockPath(sessionId); - if (existsSync2(p)) { + if (existsSync3(p)) { try { - const ageMs = Date.now() - parseInt(readFileSync2(p, "utf-8"), 10); + const ageMs = Date.now() - parseInt(readFileSync3(p, "utf-8"), 10); if (Number.isFinite(ageMs) && ageMs < maxAgeMs) return false; - } catch { + } catch (readErr) { + dlog(`lock file unreadable for ${sessionId}, treating as stale: ${readErr.message}`); } try { unlinkSync(p); - } catch { + } catch (unlinkErr) { + dlog(`could not unlink stale lock for ${sessionId}: ${unlinkErr.message}`); return false; } } @@ -198,15 +525,45 @@ function tryAcquireLock(sessionId, maxAgeMs = 10 * 60 * 1e3) { throw e; } } +function releaseLock(sessionId) { + try { + unlinkSync(lockPath(sessionId)); + } catch (e) { + if (e?.code !== "ENOENT") { + dlog(`releaseLock unlink failed for ${sessionId}: ${e.message}`); + } + } +} // dist/src/hooks/codex/spawn-wiki-worker.js import { spawn, execSync } from "node:child_process"; -import { fileURLToPath as fileURLToPath2 } from "node:url"; -import { dirname, join as join4 } from "node:path"; -import { writeFileSync as writeFileSync2, mkdirSync as mkdirSync2, appendFileSync as appendFileSync2 } from "node:fs"; -import { homedir as homedir4, tmpdir } from "node:os"; +import { fileURLToPath } from "node:url"; +import { dirname, join as join6 } from "node:path"; +import { writeFileSync as writeFileSync3, mkdirSync as mkdirSync4 } from "node:fs"; +import { homedir as homedir4, tmpdir as tmpdir2 } from "node:os"; + +// dist/src/utils/wiki-log.js +import { mkdirSync as mkdirSync3, appendFileSync as appendFileSync2 } from "node:fs"; +import { join as join5 } from "node:path"; +function makeWikiLogger(hooksDir, filename = "deeplake-wiki.log") { + const path = join5(hooksDir, filename); + return { + path, + log(msg) { + try { + mkdirSync3(hooksDir, { recursive: true }); + appendFileSync2(path, `[${utcTimestamp()}] ${msg} +`); + } catch { + } + } + }; +} + +// dist/src/hooks/codex/spawn-wiki-worker.js var HOME = homedir4(); -var WIKI_LOG = join4(HOME, ".codex", "hooks", "deeplake-wiki.log"); +var wikiLogger = makeWikiLogger(join6(HOME, ".codex", "hooks")); +var WIKI_LOG = wikiLogger.path; var WIKI_PROMPT_TEMPLATE = `You are building a personal wiki from a coding session. Your goal is to extract every piece of knowledge \u2014 entities, decisions, relationships, and facts \u2014 into a structured, searchable wiki entry. SESSION JSONL path: __JSONL__ @@ -256,14 +613,7 @@ Format: **entity** (type) \u2014 what was done with it, its current state> IMPORTANT: Be exhaustive. Extract EVERY entity, decision, and fact. PRIVACY: Never include absolute filesystem paths in the summary. LENGTH LIMIT: Keep the total summary under 4000 characters.`; -function wikiLog(msg) { - try { - mkdirSync2(join4(HOME, ".codex", "hooks"), { recursive: true }); - appendFileSync2(WIKI_LOG, `[${(/* @__PURE__ */ new Date()).toISOString().replace("T", " ").slice(0, 19)}] ${msg} -`); - } catch { - } -} +var wikiLog = wikiLogger.log; function findCodexBin() { try { return execSync("which codex 2>/dev/null", { encoding: "utf-8" }).trim(); @@ -274,10 +624,10 @@ function findCodexBin() { function spawnCodexWikiWorker(opts) { const { config, sessionId, cwd, bundleDir, reason } = opts; const projectName = cwd.split("/").pop() || "unknown"; - const tmpDir = join4(tmpdir(), `deeplake-wiki-${sessionId}-${Date.now()}`); - mkdirSync2(tmpDir, { recursive: true }); - const configFile = join4(tmpDir, "config.json"); - writeFileSync2(configFile, JSON.stringify({ + const tmpDir = join6(tmpdir2(), `deeplake-wiki-${sessionId}-${Date.now()}`); + mkdirSync4(tmpDir, { recursive: true }); + const configFile = join6(tmpDir, "config.json"); + writeFileSync3(configFile, JSON.stringify({ apiUrl: config.apiUrl, token: config.token, orgId: config.orgId, @@ -290,11 +640,11 @@ function spawnCodexWikiWorker(opts) { tmpDir, codexBin: findCodexBin(), wikiLog: WIKI_LOG, - hooksDir: join4(HOME, ".codex", "hooks"), + hooksDir: join6(HOME, ".codex", "hooks"), promptTemplate: WIKI_PROMPT_TEMPLATE })); wikiLog(`${reason}: spawning summary worker for ${sessionId}`); - const workerPath = join4(bundleDir, "wiki-worker.js"); + const workerPath = join6(bundleDir, "wiki-worker.js"); spawn("nohup", ["node", workerPath, configFile], { detached: true, stdio: ["ignore", "ignore", "ignore"] @@ -302,72 +652,24 @@ function spawnCodexWikiWorker(opts) { wikiLog(`${reason}: spawned summary worker for ${sessionId}`); } function bundleDirFromImportMeta(importMetaUrl) { - return dirname(fileURLToPath2(importMetaUrl)); -} - -// dist/src/hooks/session-queue.js -import { appendFileSync as appendFileSync3, closeSync as closeSync2, existsSync as existsSync3, mkdirSync as mkdirSync3, openSync as openSync2, readFileSync as readFileSync3, readdirSync, renameSync as renameSync2, rmSync, statSync, writeFileSync as writeFileSync3 } from "node:fs"; -import { dirname as dirname2, join as join5 } from "node:path"; -import { homedir as homedir5 } from "node:os"; -var DEFAULT_QUEUE_DIR = join5(homedir5(), ".deeplake", "queue"); -var DEFAULT_AUTH_FAILURE_TTL_MS = 5 * 6e4; -function buildSessionPath(config, sessionId) { - return `/sessions/${config.userName}/${config.userName}_${config.orgName}_${config.workspaceId}_${sessionId}.jsonl`; -} -function buildQueuedSessionRow(args) { - return { - id: crypto.randomUUID(), - path: args.sessionPath, - filename: args.sessionPath.split("/").pop() ?? "", - message: args.line, - author: args.userName, - sizeBytes: Buffer.byteLength(args.line, "utf-8"), - project: args.projectName, - description: args.description, - agent: args.agent, - creationDate: args.timestamp, - lastUpdateDate: args.timestamp - }; -} -function appendQueuedSessionRow(row, queueDir = DEFAULT_QUEUE_DIR) { - mkdirSync3(queueDir, { recursive: true }); - const sessionId = extractSessionId(row.path); - const queuePath = getQueuePath(queueDir, sessionId); - appendFileSync3(queuePath, `${JSON.stringify(row)} -`); - return queuePath; -} -function getQueuePath(queueDir, sessionId) { - return join5(queueDir, `${sessionId}.jsonl`); -} -function extractSessionId(sessionPath) { - const filename = sessionPath.split("/").pop() ?? ""; - return filename.replace(/\.jsonl$/, "").split("_").pop() ?? filename; -} - -// dist/src/hooks/query-cache.js -import { mkdirSync as mkdirSync4, readFileSync as readFileSync4, rmSync as rmSync2, writeFileSync as writeFileSync4 } from "node:fs"; -import { join as join6 } from "node:path"; -import { homedir as homedir6 } from "node:os"; -var log2 = (msg) => log("query-cache", msg); -var DEFAULT_CACHE_ROOT = join6(homedir6(), ".deeplake", "query-cache"); -function getSessionQueryCacheDir(sessionId, deps = {}) { - const { cacheRoot = DEFAULT_CACHE_ROOT } = deps; - return join6(cacheRoot, sessionId); -} -function clearSessionQueryCache(sessionId, deps = {}) { - const { logFn = log2 } = deps; - try { - rmSync2(getSessionQueryCacheDir(sessionId, deps), { recursive: true, force: true }); - } catch (e) { - logFn(`clear failed for session=${sessionId}: ${e.message}`); - } + return dirname(fileURLToPath(importMetaUrl)); } // dist/src/hooks/codex/capture.js var log3 = (msg) => log("codex-capture", msg); -var CAPTURE = (process.env.HIVEMIND_CAPTURE ?? process.env.DEEPLAKE_CAPTURE) !== "false"; -function buildCodexCaptureEntry(input, timestamp) { +var CAPTURE = process.env.HIVEMIND_CAPTURE !== "false"; +async function main() { + if (!CAPTURE) + return; + const input = await readStdin(); + const config = loadConfig(); + if (!config) { + log3("no config"); + return; + } + const sessionsTable = config.sessionsTableName; + const api = new DeeplakeApi(config.token, config.apiUrl, config.orgId, config.workspaceId, sessionsTable); + const ts = (/* @__PURE__ */ new Date()).toISOString(); const meta = { session_id: input.session_id, transcript_path: input.transcript_path, @@ -375,18 +677,20 @@ function buildCodexCaptureEntry(input, timestamp) { hook_event_name: input.hook_event_name, model: input.model, turn_id: input.turn_id, - timestamp + timestamp: ts }; + let entry; if (input.hook_event_name === "UserPromptSubmit" && input.prompt !== void 0) { - return { + log3(`user session=${input.session_id}`); + entry = { id: crypto.randomUUID(), ...meta, type: "user_message", content: input.prompt }; - } - if (input.hook_event_name === "PostToolUse" && input.tool_name !== void 0) { - return { + } else if (input.hook_event_name === "PostToolUse" && input.tool_name !== void 0) { + log3(`tool=${input.tool_name} session=${input.session_id}`); + entry = { id: crypto.randomUUID(), ...meta, type: "tool_call", @@ -395,83 +699,66 @@ function buildCodexCaptureEntry(input, timestamp) { tool_input: JSON.stringify(input.tool_input), tool_response: JSON.stringify(input.tool_response) }; + } else { + log3(`unknown event: ${input.hook_event_name}, skipping`); + return; + } + const sessionPath = buildSessionPath(config, input.session_id); + const line = JSON.stringify(entry); + log3(`writing to ${sessionPath}`); + const projectName = (input.cwd ?? "").split("/").pop() || "unknown"; + const filename = sessionPath.split("/").pop() ?? ""; + const jsonForSql = sqlStr(line); + const insertSql = `INSERT INTO "${sessionsTable}" (id, path, filename, message, author, size_bytes, project, description, agent, creation_date, last_update_date) VALUES ('${crypto.randomUUID()}', '${sqlStr(sessionPath)}', '${sqlStr(filename)}', '${jsonForSql}'::jsonb, '${sqlStr(config.userName)}', ${Buffer.byteLength(line, "utf-8")}, '${sqlStr(projectName)}', '${sqlStr(input.hook_event_name ?? "")}', 'codex', '${ts}', '${ts}')`; + try { + await api.query(insertSql); + } catch (e) { + if (e.message?.includes("permission denied") || e.message?.includes("does not exist")) { + log3("table missing, creating and retrying"); + await api.ensureSessionsTable(sessionsTable); + await api.query(insertSql); + } else { + throw e; + } } - return null; + log3("capture ok"); + maybeTriggerPeriodicSummary(input.session_id, input.cwd ?? "", config); } -function maybeTriggerPeriodicSummary(sessionId, cwd, config, deps = {}) { - const { bundleDir = bundleDirFromImportMeta(import.meta.url), wikiWorker = process.env.HIVEMIND_WIKI_WORKER === "1", logFn = log3, bumpTotalCountFn = bumpTotalCount, loadTriggerConfigFn = loadTriggerConfig, shouldTriggerFn = shouldTrigger, tryAcquireLockFn = tryAcquireLock, wikiLogFn = wikiLog, spawnCodexWikiWorkerFn = spawnCodexWikiWorker } = deps; - if (wikiWorker) +function maybeTriggerPeriodicSummary(sessionId, cwd, config) { + if (process.env.HIVEMIND_WIKI_WORKER === "1") return; try { - const state = bumpTotalCountFn(sessionId); - const cfg = loadTriggerConfigFn(); - if (!shouldTriggerFn(state, cfg)) + const state = bumpTotalCount(sessionId); + const cfg = loadTriggerConfig(); + if (!shouldTrigger(state, cfg)) return; - if (!tryAcquireLockFn(sessionId)) { - logFn(`periodic trigger suppressed (lock held) session=${sessionId}`); + if (!tryAcquireLock(sessionId)) { + log3(`periodic trigger suppressed (lock held) session=${sessionId}`); return; } - wikiLogFn(`Periodic: threshold hit (total=${state.totalCount}, since=${state.totalCount - state.lastSummaryCount}, N=${cfg.everyNMessages}, hours=${cfg.everyHours})`); - spawnCodexWikiWorkerFn({ - config, - sessionId, - cwd, - bundleDir, - reason: "Periodic" - }); + wikiLog(`Periodic: threshold hit (total=${state.totalCount}, since=${state.totalCount - state.lastSummaryCount}, N=${cfg.everyNMessages}, hours=${cfg.everyHours})`); + try { + spawnCodexWikiWorker({ + config, + sessionId, + cwd, + bundleDir: bundleDirFromImportMeta(import.meta.url), + reason: "Periodic" + }); + } catch (e) { + log3(`periodic spawn failed: ${e.message}`); + try { + releaseLock(sessionId); + } catch (releaseErr) { + log3(`releaseLock after periodic spawn failure also failed: ${releaseErr.message}`); + } + throw e; + } } catch (e) { - logFn(`periodic trigger error: ${e.message}`); + log3(`periodic trigger error: ${e.message}`); } } -async function runCodexCaptureHook(input, deps = {}) { - const { captureEnabled = CAPTURE, config = loadConfig(), now = () => (/* @__PURE__ */ new Date()).toISOString(), appendQueuedSessionRowFn = appendQueuedSessionRow, buildQueuedSessionRowFn = buildQueuedSessionRow, clearSessionQueryCacheFn = clearSessionQueryCache, maybeTriggerPeriodicSummaryFn = maybeTriggerPeriodicSummary, logFn = log3 } = deps; - if (!captureEnabled) - return { status: "disabled" }; - if (!config) { - logFn("no config"); - return { status: "no_config" }; - } - const ts = now(); - const entry = buildCodexCaptureEntry(input, ts); - if (!entry) { - logFn(`unknown event: ${input.hook_event_name}, skipping`); - return { status: "ignored" }; - } - if (input.hook_event_name === "UserPromptSubmit") - logFn(`user session=${input.session_id}`); - else - logFn(`tool=${input.tool_name} session=${input.session_id}`); - if (input.hook_event_name === "UserPromptSubmit") { - clearSessionQueryCacheFn(input.session_id); - } - const sessionPath = buildSessionPath(config, input.session_id); - const line = JSON.stringify(entry); - const projectName = (input.cwd ?? "").split("/").pop() || "unknown"; - appendQueuedSessionRowFn(buildQueuedSessionRowFn({ - sessionPath, - line, - userName: config.userName, - projectName, - description: input.hook_event_name ?? "", - agent: "codex", - timestamp: ts - })); - logFn(`queued ${input.hook_event_name} for ${sessionPath}`); - maybeTriggerPeriodicSummaryFn(input.session_id, input.cwd ?? "", config); - return { status: "queued", entry }; -} -async function main() { - const input = await readStdin(); - await runCodexCaptureHook(input); -} -if (isDirectRun(import.meta.url)) { - main().catch((e) => { - log3(`fatal: ${e.message}`); - process.exit(0); - }); -} -export { - buildCodexCaptureEntry, - maybeTriggerPeriodicSummary, - runCodexCaptureHook -}; +main().catch((e) => { + log3(`fatal: ${e.message}`); + process.exit(0); +}); diff --git a/codex/bundle/commands/auth-login.js b/codex/bundle/commands/auth-login.js index ff5e179..064f11e 100755 --- a/codex/bundle/commands/auth-login.js +++ b/codex/bundle/commands/auth-login.js @@ -263,18 +263,18 @@ function sqlStr(value) { // dist/src/deeplake-api.js var log2 = (msg) => log("sdk", msg); -var TRACE_SQL = (process.env.HIVEMIND_TRACE_SQL ?? process.env.DEEPLAKE_TRACE_SQL) === "1" || (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; -var DEBUG_FILE_LOG = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; function summarizeSql(sql, maxLen = 220) { const compact = sql.replace(/\s+/g, " ").trim(); return compact.length > maxLen ? `${compact.slice(0, maxLen)}...` : compact; } function traceSql(msg) { - if (!TRACE_SQL) + const traceEnabled = (process.env.HIVEMIND_TRACE_SQL ?? process.env.DEEPLAKE_TRACE_SQL) === "1" || (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; + if (!traceEnabled) return; process.stderr.write(`[deeplake-sql] ${msg} `); - if (DEBUG_FILE_LOG) + const debugFileLog = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; + if (debugFileLog) log2(msg); } var RETRYABLE_CODES = /* @__PURE__ */ new Set([429, 500, 502, 503, 504]); diff --git a/codex/bundle/pre-tool-use.js b/codex/bundle/pre-tool-use.js index a31916a..997faff 100755 --- a/codex/bundle/pre-tool-use.js +++ b/codex/bundle/pre-tool-use.js @@ -88,18 +88,18 @@ function sqlLike(value) { // dist/src/deeplake-api.js var log2 = (msg) => log("sdk", msg); -var TRACE_SQL = (process.env.HIVEMIND_TRACE_SQL ?? process.env.DEEPLAKE_TRACE_SQL) === "1" || (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; -var DEBUG_FILE_LOG = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; function summarizeSql(sql, maxLen = 220) { const compact = sql.replace(/\s+/g, " ").trim(); return compact.length > maxLen ? `${compact.slice(0, maxLen)}...` : compact; } function traceSql(msg) { - if (!TRACE_SQL) + const traceEnabled = (process.env.HIVEMIND_TRACE_SQL ?? process.env.DEEPLAKE_TRACE_SQL) === "1" || (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; + if (!traceEnabled) return; process.stderr.write(`[deeplake-sql] ${msg} `); - if (DEBUG_FILE_LOG) + const debugFileLog = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; + if (debugFileLog) log2(msg); } var RETRYABLE_CODES = /* @__PURE__ */ new Set([429, 500, 502, 503, 504]); @@ -616,13 +616,13 @@ function buildPathCondition(targetPath) { const clean = targetPath.replace(/\/+$/, ""); if (/[*?]/.test(clean)) { const likePattern = sqlLike(clean).replace(/\*/g, "%").replace(/\?/g, "_"); - return `path LIKE '${likePattern}'`; + return `path LIKE '${likePattern}' ESCAPE '\\'`; } const base = clean.split("/").pop() ?? ""; if (base.includes(".")) { return `path = '${sqlStr(clean)}'`; } - return `(path = '${sqlStr(clean)}' OR path LIKE '${sqlLike(clean)}/%')`; + return `(path = '${sqlStr(clean)}' OR path LIKE '${sqlLike(clean)}/%' ESCAPE '\\')`; } async function searchDeeplakeTables(api, memoryTable, sessionsTable, opts) { const { pathFilter, contentScanOnly, likeOp, escapedPattern, prefilterPattern, prefilterPatterns } = opts; @@ -781,6 +781,42 @@ async function grepBothTables(api, memoryTable, sessionsTable, params, targetPat return refineGrepMatches(normalized, params); } +// dist/src/utils/output-cap.js +var CLAUDE_OUTPUT_CAP_BYTES = 8 * 1024; +function byteLen(str) { + return Buffer.byteLength(str, "utf8"); +} +function capOutputForClaude(output, options = {}) { + const maxBytes = options.maxBytes ?? CLAUDE_OUTPUT_CAP_BYTES; + if (byteLen(output) <= maxBytes) + return output; + const kind = options.kind ?? "output"; + const footerReserve = 220; + const budget = Math.max(1, maxBytes - footerReserve); + let running = 0; + const lines = output.split("\n"); + const keptLines = []; + for (const line of lines) { + const lineBytes = byteLen(line) + 1; + if (running + lineBytes > budget) + break; + keptLines.push(line); + running += lineBytes; + } + if (keptLines.length === 0) { + const slice = Buffer.from(output, "utf8").slice(0, budget).toString("utf8"); + const footer2 = ` +... [${kind} truncated: ${(byteLen(output) / 1024).toFixed(1)} KB total; refine with '| head -N' or a tighter pattern]`; + return slice + footer2; + } + const totalLines = lines.length - (lines[lines.length - 1] === "" ? 1 : 0); + const elidedLines = Math.max(0, totalLines - keptLines.length); + const elidedBytes = byteLen(output) - byteLen(keptLines.join("\n")); + const footer = ` +... [${kind} truncated: ${elidedLines} more lines (${(elidedBytes / 1024).toFixed(1)} KB) elided \u2014 refine with '| head -N' or a tighter pattern]`; + return keptLines.join("\n") + footer; +} + // dist/src/hooks/grep-direct.js function splitFirstPipelineStage(cmd) { const input = cmd.trim(); @@ -1020,21 +1056,40 @@ async function handleGrepDirect(api, table, sessionsTable, params) { fixedString: params.fixedString }; const output = await grepBothTables(api, table, sessionsTable, matchParams, params.targetPath); - return output.join("\n") || "(no matches)"; + const joined = output.join("\n") || "(no matches)"; + return capOutputForClaude(joined, { kind: "grep" }); } // dist/src/hooks/virtual-table-query.js function normalizeSessionPart(path, content) { return normalizeContent(path, content); } -function buildVirtualIndexContent(rows) { - const lines = ["# Memory Index", "", `${rows.length} sessions:`, ""]; - for (const row of rows) { - const path = row["path"]; - const project = row["project"] || ""; - const description = (row["description"] || "").slice(0, 120); - const date = (row["creation_date"] || "").slice(0, 10); - lines.push(`- [${path}](${path}) ${date} ${project ? `[${project}]` : ""} ${description}`); +function buildVirtualIndexContent(summaryRows, sessionRows = []) { + const total = summaryRows.length + sessionRows.length; + const lines = [ + "# Memory Index", + "", + `${total} entries (${summaryRows.length} summaries, ${sessionRows.length} sessions):`, + "" + ]; + if (summaryRows.length > 0) { + lines.push("## Summaries", ""); + for (const row of summaryRows) { + const path = row["path"]; + const project = row["project"] || ""; + const description = (row["description"] || "").slice(0, 120); + const date = (row["creation_date"] || "").slice(0, 10); + lines.push(`- [${path}](${path}) ${date} ${project ? `[${project}]` : ""} ${description}`); + } + lines.push(""); + } + if (sessionRows.length > 0) { + lines.push("## Sessions", ""); + for (const row of sessionRows) { + const path = row["path"]; + const description = (row["description"] || "").slice(0, 120); + lines.push(`- [${path}](${path}) ${description}`); + } } return lines.join("\n"); } @@ -1048,7 +1103,7 @@ function buildDirFilter(dirs) { const cleaned = [...new Set(dirs.map((dir) => dir.replace(/\/+$/, "") || "/"))]; if (cleaned.length === 0 || cleaned.includes("/")) return ""; - const clauses = cleaned.map((dir) => `path LIKE '${sqlLike(dir)}/%'`); + const clauses = cleaned.map((dir) => `path LIKE '${sqlLike(dir)}/%' ESCAPE '\\'`); return ` WHERE ${clauses.join(" OR ")}`; } async function queryUnionRows(api, memoryQuery, sessionsQuery) { @@ -1097,8 +1152,11 @@ async function readVirtualPathContents(api, memoryTable, sessionsTable, virtualP } } if (result.get("/index.md") === null && uniquePaths.includes("/index.md")) { - const rows2 = await api.query(`SELECT path, project, description, creation_date FROM "${memoryTable}" WHERE path LIKE '/summaries/%' ORDER BY creation_date DESC`).catch(() => []); - result.set("/index.md", buildVirtualIndexContent(rows2)); + const [summaryRows, sessionRows] = await Promise.all([ + api.query(`SELECT path, project, description, creation_date FROM "${memoryTable}" WHERE path LIKE '/summaries/%' ORDER BY creation_date DESC`).catch(() => []), + api.query(`SELECT path, description FROM "${sessionsTable}" WHERE path LIKE '/sessions/%' ORDER BY path`).catch(() => []) + ]); + result.set("/index.md", buildVirtualIndexContent(summaryRows, sessionRows)); } return result; } @@ -1135,7 +1193,7 @@ async function listVirtualPathRows(api, memoryTable, sessionsTable, dir) { async function findVirtualPaths(api, memoryTable, sessionsTable, dir, filenamePattern) { const normalizedDir = dir.replace(/\/+$/, "") || "/"; const likePath = `${sqlLike(normalizedDir === "/" ? "" : normalizedDir)}/%`; - const rows = await queryUnionRows(api, `SELECT path, NULL::text AS content, NULL::bigint AS size_bytes, '' AS creation_date, 0 AS source_order FROM "${memoryTable}" WHERE path LIKE '${likePath}' AND filename LIKE '${filenamePattern}'`, `SELECT path, NULL::text AS content, NULL::bigint AS size_bytes, '' AS creation_date, 1 AS source_order FROM "${sessionsTable}" WHERE path LIKE '${likePath}' AND filename LIKE '${filenamePattern}'`); + const rows = await queryUnionRows(api, `SELECT path, NULL::text AS content, NULL::bigint AS size_bytes, '' AS creation_date, 0 AS source_order FROM "${memoryTable}" WHERE path LIKE '${likePath}' ESCAPE '\\' AND filename LIKE '${filenamePattern}' ESCAPE '\\'`, `SELECT path, NULL::text AS content, NULL::bigint AS size_bytes, '' AS creation_date, 1 AS source_order FROM "${sessionsTable}" WHERE path LIKE '${likePath}' ESCAPE '\\' AND filename LIKE '${filenamePattern}' ESCAPE '\\'`); return [...new Set(rows.map((row) => row["path"]).filter((value) => typeof value === "string" && value.length > 0))]; } function dedupeRowsByPath(rows) { @@ -1614,7 +1672,7 @@ async function executeCompiledBashCommand(api, memoryTable, sessionsTable, cmd, continue; } } - return outputs.join("\n"); + return capOutputForClaude(outputs.join("\n"), { kind: "bash" }); } // dist/src/hooks/query-cache.js diff --git a/codex/bundle/session-start-setup.js b/codex/bundle/session-start-setup.js index e13a5e2..21609fa 100755 --- a/codex/bundle/session-start-setup.js +++ b/codex/bundle/session-start-setup.js @@ -1,11 +1,10 @@ #!/usr/bin/env node // dist/src/hooks/codex/session-start-setup.js -import { fileURLToPath as fileURLToPath2 } from "node:url"; -import { dirname as dirname3, join as join7 } from "node:path"; -import { mkdirSync as mkdirSync5, appendFileSync as appendFileSync3 } from "node:fs"; +import { fileURLToPath } from "node:url"; +import { dirname as dirname2, join as join7 } from "node:path"; import { execSync as execSync2 } from "node:child_process"; -import { homedir as homedir6 } from "node:os"; +import { homedir as homedir4 } from "node:os"; // dist/src/commands/auth.js import { readFileSync, writeFileSync, existsSync, mkdirSync, unlinkSync } from "node:fs"; @@ -77,6 +76,9 @@ import { join as join3 } from "node:path"; import { homedir as homedir3 } from "node:os"; var DEBUG = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; var LOG = join3(homedir3(), ".deeplake", "hook-debug.log"); +function utcTimestamp(d = /* @__PURE__ */ new Date()) { + return d.toISOString().replace("T", " ").slice(0, 19) + " UTC"; +} function log(tag, msg) { if (!DEBUG) return; @@ -88,27 +90,21 @@ function log(tag, msg) { function sqlStr(value) { return value.replace(/\\/g, "\\\\").replace(/'/g, "''").replace(/\0/g, "").replace(/[\x01-\x08\x0b\x0c\x0e-\x1f\x7f]/g, ""); } -function sqlIdent(name) { - if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(name)) { - throw new Error(`Invalid SQL identifier: ${JSON.stringify(name)}`); - } - return name; -} // dist/src/deeplake-api.js var log2 = (msg) => log("sdk", msg); -var TRACE_SQL = (process.env.HIVEMIND_TRACE_SQL ?? process.env.DEEPLAKE_TRACE_SQL) === "1" || (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; -var DEBUG_FILE_LOG = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; function summarizeSql(sql, maxLen = 220) { const compact = sql.replace(/\s+/g, " ").trim(); return compact.length > maxLen ? `${compact.slice(0, maxLen)}...` : compact; } function traceSql(msg) { - if (!TRACE_SQL) + const traceEnabled = (process.env.HIVEMIND_TRACE_SQL ?? process.env.DEEPLAKE_TRACE_SQL) === "1" || (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; + if (!traceEnabled) return; process.stderr.write(`[deeplake-sql] ${msg} `); - if (DEBUG_FILE_LOG) + const debugFileLog = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; + if (debugFileLog) log2(msg); } var RETRYABLE_CODES = /* @__PURE__ */ new Set([429, 500, 502, 503, 504]); @@ -118,7 +114,7 @@ var MAX_CONCURRENCY = 5; var QUERY_TIMEOUT_MS = Number(process.env["HIVEMIND_QUERY_TIMEOUT_MS"] ?? process.env["DEEPLAKE_QUERY_TIMEOUT_MS"] ?? 1e4); var INDEX_MARKER_TTL_MS = Number(process.env["HIVEMIND_INDEX_MARKER_TTL_MS"] ?? 6 * 60 * 6e4); function sleep(ms) { - return new Promise((resolve2) => setTimeout(resolve2, ms)); + return new Promise((resolve) => setTimeout(resolve, ms)); } function isTimeoutError(error) { const name = error instanceof Error ? error.name.toLowerCase() : ""; @@ -151,7 +147,7 @@ var Semaphore = class { this.active++; return; } - await new Promise((resolve2) => this.waiting.push(resolve2)); + await new Promise((resolve) => this.waiting.push(resolve)); } release() { this.active--; @@ -412,13 +408,13 @@ var DeeplakeApi = class { // dist/src/utils/stdin.js function readStdin() { - return new Promise((resolve2, reject) => { + return new Promise((resolve, reject) => { let data = ""; process.stdin.setEncoding("utf-8"); process.stdin.on("data", (chunk) => data += chunk); process.stdin.on("end", () => { try { - resolve2(JSON.parse(data)); + resolve(JSON.parse(data)); } catch (err) { reject(new Error(`Failed to parse hook input: ${err}`)); } @@ -427,410 +423,74 @@ function readStdin() { }); } -// dist/src/utils/direct-run.js -import { resolve } from "node:path"; -import { fileURLToPath } from "node:url"; -function isDirectRun(metaUrl) { - const entry = process.argv[1]; - if (!entry) - return false; - try { - return resolve(fileURLToPath(metaUrl)) === resolve(entry); - } catch { - return false; - } -} - -// dist/src/hooks/session-queue.js -import { appendFileSync as appendFileSync2, closeSync, existsSync as existsSync4, mkdirSync as mkdirSync3, openSync, readFileSync as readFileSync4, readdirSync, renameSync, rmSync, statSync, writeFileSync as writeFileSync3 } from "node:fs"; +// dist/src/utils/version-check.js +import { readFileSync as readFileSync4 } from "node:fs"; import { dirname, join as join5 } from "node:path"; -import { homedir as homedir4 } from "node:os"; -var DEFAULT_QUEUE_DIR = join5(homedir4(), ".deeplake", "queue"); -var DEFAULT_MAX_BATCH_ROWS = 50; -var DEFAULT_STALE_INFLIGHT_MS = 6e4; -var DEFAULT_AUTH_FAILURE_TTL_MS = 5 * 6e4; -var DEFAULT_DRAIN_LOCK_STALE_MS = 3e4; -var BUSY_WAIT_STEP_MS = 100; -var SessionWriteDisabledError = class extends Error { - constructor(message) { - super(message); - this.name = "SessionWriteDisabledError"; - } -}; -function buildSessionInsertSql(sessionsTable, rows) { - if (rows.length === 0) - throw new Error("buildSessionInsertSql: rows must not be empty"); - const table = sqlIdent(sessionsTable); - const values = rows.map((row) => { - const jsonForSql = sqlStr(coerceJsonbPayload(row.message)); - return `('${sqlStr(row.id)}', '${sqlStr(row.path)}', '${sqlStr(row.filename)}', '${jsonForSql}'::jsonb, '${sqlStr(row.author)}', ${row.sizeBytes}, '${sqlStr(row.project)}', '${sqlStr(row.description)}', '${sqlStr(row.agent)}', '${sqlStr(row.creationDate)}', '${sqlStr(row.lastUpdateDate)}')`; - }).join(", "); - return `INSERT INTO "${table}" (id, path, filename, message, author, size_bytes, project, description, agent, creation_date, last_update_date) VALUES ${values}`; -} -function coerceJsonbPayload(message) { - try { - return JSON.stringify(JSON.parse(message)); - } catch { - return JSON.stringify({ - type: "raw_message", - content: message - }); - } -} -async function flushSessionQueue(api, opts) { - const queueDir = opts.queueDir ?? DEFAULT_QUEUE_DIR; - const maxBatchRows = opts.maxBatchRows ?? DEFAULT_MAX_BATCH_ROWS; - const staleInflightMs = opts.staleInflightMs ?? DEFAULT_STALE_INFLIGHT_MS; - const waitIfBusyMs = opts.waitIfBusyMs ?? 0; - const drainAll = opts.drainAll ?? false; - mkdirSync3(queueDir, { recursive: true }); - const queuePath = getQueuePath(queueDir, opts.sessionId); - const inflightPath = getInflightPath(queueDir, opts.sessionId); - if (isSessionWriteDisabled(opts.sessionsTable, queueDir)) { - return existsSync4(queuePath) || existsSync4(inflightPath) ? { status: "disabled", rows: 0, batches: 0 } : { status: "empty", rows: 0, batches: 0 }; - } - let totalRows = 0; - let totalBatches = 0; - let flushedAny = false; - while (true) { - if (opts.allowStaleInflight) - recoverStaleInflight(queuePath, inflightPath, staleInflightMs); - if (existsSync4(inflightPath)) { - if (waitIfBusyMs > 0) { - await waitForInflightToClear(inflightPath, waitIfBusyMs); - if (opts.allowStaleInflight) - recoverStaleInflight(queuePath, inflightPath, staleInflightMs); - } - if (existsSync4(inflightPath)) { - return flushedAny ? { status: "flushed", rows: totalRows, batches: totalBatches } : { status: "busy", rows: 0, batches: 0 }; - } - } - if (!existsSync4(queuePath)) { - return flushedAny ? { status: "flushed", rows: totalRows, batches: totalBatches } : { status: "empty", rows: 0, batches: 0 }; - } - try { - renameSync(queuePath, inflightPath); - } catch (e) { - if (e?.code === "ENOENT") { - return flushedAny ? { status: "flushed", rows: totalRows, batches: totalBatches } : { status: "empty", rows: 0, batches: 0 }; - } - throw e; - } - try { - const { rows, batches } = await flushInflightFile(api, opts.sessionsTable, inflightPath, maxBatchRows); - totalRows += rows; - totalBatches += batches; - flushedAny = flushedAny || rows > 0; - } catch (e) { - requeueInflight(queuePath, inflightPath); - if (e instanceof SessionWriteDisabledError) { - return { status: "disabled", rows: totalRows, batches: totalBatches }; - } - throw e; - } - if (!drainAll) { - return { status: "flushed", rows: totalRows, batches: totalBatches }; - } - } -} -async function drainSessionQueues(api, opts) { - const queueDir = opts.queueDir ?? DEFAULT_QUEUE_DIR; - mkdirSync3(queueDir, { recursive: true }); - const sessionIds = listQueuedSessionIds(queueDir, opts.staleInflightMs ?? DEFAULT_STALE_INFLIGHT_MS); - let flushedSessions = 0; - let rows = 0; - let batches = 0; - for (const sessionId of sessionIds) { - const result = await flushSessionQueue(api, { - sessionId, - sessionsTable: opts.sessionsTable, - queueDir, - maxBatchRows: opts.maxBatchRows, - allowStaleInflight: true, - staleInflightMs: opts.staleInflightMs, - drainAll: true - }); - if (result.status === "flushed") { - flushedSessions += 1; - rows += result.rows; - batches += result.batches; - } - } - return { - queuedSessions: sessionIds.length, - flushedSessions, - rows, - batches - }; -} -function tryAcquireSessionDrainLock(sessionsTable, queueDir = DEFAULT_QUEUE_DIR, staleMs = DEFAULT_DRAIN_LOCK_STALE_MS) { - mkdirSync3(queueDir, { recursive: true }); - const lockPath = getSessionDrainLockPath(queueDir, sessionsTable); - for (let attempt = 0; attempt < 2; attempt++) { - try { - const fd = openSync(lockPath, "wx"); - closeSync(fd); - return () => rmSync(lockPath, { force: true }); - } catch (e) { - if (e?.code !== "EEXIST") - throw e; - if (existsSync4(lockPath) && isStale(lockPath, staleMs)) { - rmSync(lockPath, { force: true }); - continue; - } - return null; - } - } - return null; -} -function getQueuePath(queueDir, sessionId) { - return join5(queueDir, `${sessionId}.jsonl`); -} -function getInflightPath(queueDir, sessionId) { - return join5(queueDir, `${sessionId}.inflight`); -} -async function flushInflightFile(api, sessionsTable, inflightPath, maxBatchRows) { - const rows = readQueuedRows(inflightPath); - if (rows.length === 0) { - rmSync(inflightPath, { force: true }); - return { rows: 0, batches: 0 }; - } - let ensured = false; - let batches = 0; - const queueDir = dirname(inflightPath); - for (let i = 0; i < rows.length; i += maxBatchRows) { - const chunk = rows.slice(i, i + maxBatchRows); - const sql = buildSessionInsertSql(sessionsTable, chunk); - try { - await api.query(sql); - } catch (e) { - if (isSessionWriteAuthError(e)) { - markSessionWriteDisabled(sessionsTable, errorMessage(e), queueDir); - throw new SessionWriteDisabledError(errorMessage(e)); - } - if (!ensured && isEnsureSessionsTableRetryable(e)) { - try { - await api.ensureSessionsTable(sessionsTable); - } catch (ensureError) { - if (isSessionWriteAuthError(ensureError)) { - markSessionWriteDisabled(sessionsTable, errorMessage(ensureError), queueDir); - throw new SessionWriteDisabledError(errorMessage(ensureError)); - } - throw ensureError; - } - ensured = true; - try { - await api.query(sql); - } catch (retryError) { - if (isSessionWriteAuthError(retryError)) { - markSessionWriteDisabled(sessionsTable, errorMessage(retryError), queueDir); - throw new SessionWriteDisabledError(errorMessage(retryError)); - } - throw retryError; - } - } else { - throw e; - } - } - batches += 1; - } - clearSessionWriteDisabled(sessionsTable, queueDir); - rmSync(inflightPath, { force: true }); - return { rows: rows.length, batches }; -} -function readQueuedRows(path) { - const raw = readFileSync4(path, "utf-8"); - return raw.split("\n").map((line) => line.trim()).filter(Boolean).map((line) => JSON.parse(line)); -} -function requeueInflight(queuePath, inflightPath) { - if (!existsSync4(inflightPath)) - return; - const inflight = readFileSync4(inflightPath, "utf-8"); - appendFileSync2(queuePath, inflight); - rmSync(inflightPath, { force: true }); -} -function recoverStaleInflight(queuePath, inflightPath, staleInflightMs) { - if (!existsSync4(inflightPath) || !isStale(inflightPath, staleInflightMs)) - return; - requeueInflight(queuePath, inflightPath); -} -function isStale(path, staleInflightMs) { - return Date.now() - statSync(path).mtimeMs >= staleInflightMs; -} -function listQueuedSessionIds(queueDir, staleInflightMs) { - const sessionIds = /* @__PURE__ */ new Set(); - for (const name of readdirSync(queueDir)) { - if (name.endsWith(".jsonl")) { - sessionIds.add(name.slice(0, -".jsonl".length)); - } else if (name.endsWith(".inflight")) { - const path = join5(queueDir, name); - if (isStale(path, staleInflightMs)) { - sessionIds.add(name.slice(0, -".inflight".length)); - } - } - } - return [...sessionIds].sort(); -} -function isEnsureSessionsTableRetryable(error) { - const message = errorMessage(error).toLowerCase(); - return message.includes("does not exist") || message.includes("doesn't exist") || message.includes("relation") || message.includes("not found"); -} -function isSessionWriteAuthError(error) { - const message = errorMessage(error).toLowerCase(); - return message.includes("403") || message.includes("401") || message.includes("forbidden") || message.includes("unauthorized"); -} -function markSessionWriteDisabled(sessionsTable, reason, queueDir = DEFAULT_QUEUE_DIR) { - mkdirSync3(queueDir, { recursive: true }); - writeFileSync3(getSessionWriteDisabledPath(queueDir, sessionsTable), JSON.stringify({ - disabledAt: (/* @__PURE__ */ new Date()).toISOString(), - reason, - sessionsTable - })); -} -function clearSessionWriteDisabled(sessionsTable, queueDir = DEFAULT_QUEUE_DIR) { - rmSync(getSessionWriteDisabledPath(queueDir, sessionsTable), { force: true }); -} -function isSessionWriteDisabled(sessionsTable, queueDir = DEFAULT_QUEUE_DIR, ttlMs = DEFAULT_AUTH_FAILURE_TTL_MS) { - const path = getSessionWriteDisabledPath(queueDir, sessionsTable); - if (!existsSync4(path)) - return false; - try { - const raw = readFileSync4(path, "utf-8"); - const state = JSON.parse(raw); - const ageMs = Date.now() - new Date(state.disabledAt).getTime(); - if (Number.isNaN(ageMs) || ageMs >= ttlMs) { - rmSync(path, { force: true }); - return false; - } - return true; - } catch { - rmSync(path, { force: true }); - return false; - } -} -function getSessionWriteDisabledPath(queueDir, sessionsTable) { - return join5(queueDir, `.${sessionsTable}.disabled.json`); -} -function getSessionDrainLockPath(queueDir, sessionsTable) { - return join5(queueDir, `.${sessionsTable}.drain.lock`); -} -function errorMessage(error) { - return error instanceof Error ? error.message : String(error); -} -async function waitForInflightToClear(inflightPath, waitIfBusyMs) { - const startedAt = Date.now(); - while (existsSync4(inflightPath) && Date.now() - startedAt < waitIfBusyMs) { - await sleep2(BUSY_WAIT_STEP_MS); - } -} -function sleep2(ms) { - return new Promise((resolve2) => setTimeout(resolve2, ms)); -} - -// dist/src/hooks/version-check.js -import { existsSync as existsSync5, mkdirSync as mkdirSync4, readFileSync as readFileSync5, writeFileSync as writeFileSync4 } from "node:fs"; -import { dirname as dirname2, join as join6 } from "node:path"; -import { homedir as homedir5 } from "node:os"; -var DEFAULT_VERSION_CACHE_PATH = join6(homedir5(), ".deeplake", ".version-check.json"); -var DEFAULT_VERSION_CACHE_TTL_MS = 60 * 60 * 1e3; +var GITHUB_RAW_PKG = "https://raw.githubusercontent.com/activeloopai/hivemind/main/package.json"; function getInstalledVersion(bundleDir, pluginManifestDir) { try { - const pluginJson = join6(bundleDir, "..", pluginManifestDir, "plugin.json"); - const plugin = JSON.parse(readFileSync5(pluginJson, "utf-8")); + const pluginJson = join5(bundleDir, "..", pluginManifestDir, "plugin.json"); + const plugin = JSON.parse(readFileSync4(pluginJson, "utf-8")); if (plugin.version) return plugin.version; } catch { } let dir = bundleDir; for (let i = 0; i < 5; i++) { - const candidate = join6(dir, "package.json"); + const candidate = join5(dir, "package.json"); try { - const pkg = JSON.parse(readFileSync5(candidate, "utf-8")); + const pkg = JSON.parse(readFileSync4(candidate, "utf-8")); if ((pkg.name === "hivemind" || pkg.name === "hivemind-codex") && pkg.version) return pkg.version; } catch { } - const parent = dirname2(dir); + const parent = dirname(dir); if (parent === dir) break; dir = parent; } return null; } +async function getLatestVersion(timeoutMs = 3e3) { + try { + const res = await fetch(GITHUB_RAW_PKG, { signal: AbortSignal.timeout(timeoutMs) }); + if (!res.ok) + return null; + const pkg = await res.json(); + return pkg.version ?? null; + } catch { + return null; + } +} function isNewer(latest, current) { - const parse = (v) => v.replace(/-.*$/, "").split(".").map(Number); + const parse = (v) => v.split(".").map(Number); const [la, lb, lc] = parse(latest); const [ca, cb, cc] = parse(current); return la > ca || la === ca && lb > cb || la === ca && lb === cb && lc > cc; } -function readVersionCache(cachePath = DEFAULT_VERSION_CACHE_PATH) { - if (!existsSync5(cachePath)) - return null; - try { - const parsed = JSON.parse(readFileSync5(cachePath, "utf-8")); - if (parsed && typeof parsed.checkedAt === "number" && typeof parsed.url === "string" && (typeof parsed.latest === "string" || parsed.latest === null)) { - return parsed; + +// dist/src/utils/wiki-log.js +import { mkdirSync as mkdirSync3, appendFileSync as appendFileSync2 } from "node:fs"; +import { join as join6 } from "node:path"; +function makeWikiLogger(hooksDir, filename = "deeplake-wiki.log") { + const path = join6(hooksDir, filename); + return { + path, + log(msg) { + try { + mkdirSync3(hooksDir, { recursive: true }); + appendFileSync2(path, `[${utcTimestamp()}] ${msg} +`); + } catch { + } } - } catch { - } - return null; -} -function writeVersionCache(entry, cachePath = DEFAULT_VERSION_CACHE_PATH) { - mkdirSync4(dirname2(cachePath), { recursive: true }); - writeFileSync4(cachePath, JSON.stringify(entry)); -} -function readFreshCachedLatestVersion(url, ttlMs = DEFAULT_VERSION_CACHE_TTL_MS, cachePath = DEFAULT_VERSION_CACHE_PATH, nowMs = Date.now()) { - const cached = readVersionCache(cachePath); - if (!cached || cached.url !== url) - return void 0; - if (nowMs - cached.checkedAt > ttlMs) - return void 0; - return cached.latest; -} -async function getLatestVersionCached(opts) { - const ttlMs = opts.ttlMs ?? DEFAULT_VERSION_CACHE_TTL_MS; - const cachePath = opts.cachePath ?? DEFAULT_VERSION_CACHE_PATH; - const nowMs = opts.nowMs ?? Date.now(); - const fetchImpl = opts.fetchImpl ?? fetch; - const fresh = readFreshCachedLatestVersion(opts.url, ttlMs, cachePath, nowMs); - if (fresh !== void 0) - return fresh; - const stale = readVersionCache(cachePath); - try { - const res = await fetchImpl(opts.url, { signal: AbortSignal.timeout(opts.timeoutMs) }); - const latest = res.ok ? (await res.json()).version ?? null : stale?.latest ?? null; - writeVersionCache({ - checkedAt: nowMs, - latest, - url: opts.url - }, cachePath); - return latest; - } catch { - const latest = stale?.latest ?? null; - writeVersionCache({ - checkedAt: nowMs, - latest, - url: opts.url - }, cachePath); - return latest; - } + }; } // dist/src/hooks/codex/session-start-setup.js var log3 = (msg) => log("codex-session-setup", msg); -var __bundleDir = dirname3(fileURLToPath2(import.meta.url)); -var GITHUB_RAW_PKG = "https://raw.githubusercontent.com/activeloopai/hivemind/main/package.json"; -var VERSION_CHECK_TIMEOUT = 3e3; -var HOME = homedir6(); -var WIKI_LOG = join7(HOME, ".codex", "hooks", "deeplake-wiki.log"); -function wikiLog(msg) { - try { - mkdirSync5(join7(HOME, ".codex", "hooks"), { recursive: true }); - appendFileSync3(WIKI_LOG, `[${(/* @__PURE__ */ new Date()).toISOString().replace("T", " ").slice(0, 19)}] ${msg} -`); - } catch { - } -} +var __bundleDir = dirname2(fileURLToPath(import.meta.url)); +var { log: wikiLog } = makeWikiLogger(join7(homedir4(), ".codex", "hooks")); async function createPlaceholder(api, table, sessionId, cwd, userName, orgName, workspaceId) { const summaryPath = `/summaries/${userName}/${sessionId}.md`; const existing = await api.query(`SELECT path FROM "${table}" WHERE path = '${sqlStr(summaryPath)}' LIMIT 1`); @@ -839,7 +499,7 @@ async function createPlaceholder(api, table, sessionId, cwd, userName, orgName, return; } const now = (/* @__PURE__ */ new Date()).toISOString(); - const projectName = cwd.split("/").pop() || "unknown"; + const projectName = cwd.split("/").pop() ?? "unknown"; const sessionSource = `/sessions/${userName}/${userName}_${orgName}_${workspaceId}_${sessionId}.jsonl`; const content = [ `# Session ${sessionId}`, @@ -853,114 +513,78 @@ async function createPlaceholder(api, table, sessionId, cwd, userName, orgName, await api.query(`INSERT INTO "${table}" (id, path, filename, summary, author, mime_type, size_bytes, project, description, agent, creation_date, last_update_date) VALUES ('${crypto.randomUUID()}', '${sqlStr(summaryPath)}', '${sqlStr(filename)}', E'${sqlStr(content)}', '${sqlStr(userName)}', 'text/markdown', ${Buffer.byteLength(content, "utf-8")}, '${sqlStr(projectName)}', 'in progress', 'codex', '${now}', '${now}')`); wikiLog(`SessionSetup: created placeholder for ${sessionId} (${cwd})`); } -async function runCodexSessionStartSetup(input, deps = {}) { - const { wikiWorker = (process.env.HIVEMIND_WIKI_WORKER ?? process.env.DEEPLAKE_WIKI_WORKER) === "1", creds = loadCredentials(), saveCredentialsFn = saveCredentials, config = loadConfig(), createApi = (activeConfig) => new DeeplakeApi(activeConfig.token, activeConfig.apiUrl, activeConfig.orgId, activeConfig.workspaceId, activeConfig.tableName), captureEnabled = (process.env.HIVEMIND_CAPTURE ?? process.env.DEEPLAKE_CAPTURE) !== "false", drainSessionQueuesFn = drainSessionQueues, isSessionWriteDisabledFn = isSessionWriteDisabled, isSessionWriteAuthErrorFn = isSessionWriteAuthError, markSessionWriteDisabledFn = markSessionWriteDisabled, tryAcquireSessionDrainLockFn = tryAcquireSessionDrainLock, createPlaceholderFn = createPlaceholder, getInstalledVersionFn = getInstalledVersion, getLatestVersionCachedFn = getLatestVersionCached, isNewerFn = isNewer, execSyncFn = execSync2, logFn = log3, wikiLogFn = wikiLog } = deps; - if (wikiWorker) - return { status: "skipped" }; +async function main() { + if (process.env.HIVEMIND_WIKI_WORKER === "1") + return; + const input = await readStdin(); + const creds = loadCredentials(); if (!creds?.token) { - logFn("no credentials"); - return { status: "no_credentials" }; + log3("no credentials"); + return; } if (!creds.userName) { try { const { userInfo: userInfo2 } = await import("node:os"); creds.userName = userInfo2().username ?? "unknown"; - saveCredentialsFn(creds); - logFn(`backfilled userName: ${creds.userName}`); + saveCredentials(creds); + log3(`backfilled userName: ${creds.userName}`); } catch { } } - if (input.session_id && config) { + const captureEnabled = process.env.HIVEMIND_CAPTURE !== "false"; + if (input.session_id) { try { - const api = createApi(config); - await api.ensureTable(); - if (captureEnabled) { - if (isSessionWriteDisabledFn(config.sessionsTableName)) { - logFn(`sessions table disabled, skipping setup for "${config.sessionsTableName}"`); - } else { - const releaseDrainLock = tryAcquireSessionDrainLockFn(config.sessionsTableName); - if (!releaseDrainLock) { - logFn(`sessions drain already in progress, skipping duplicate setup for "${config.sessionsTableName}"`); - } else { - try { - await api.ensureSessionsTable(config.sessionsTableName); - const drain = await drainSessionQueuesFn(api, { - sessionsTable: config.sessionsTableName - }); - if (drain.flushedSessions > 0) { - logFn(`drained ${drain.flushedSessions} queued session(s), rows=${drain.rows}, batches=${drain.batches}`); - } - } catch (e) { - if (isSessionWriteAuthErrorFn(e)) { - markSessionWriteDisabledFn(config.sessionsTableName, e.message); - logFn(`sessions table unavailable, skipping setup: ${e.message}`); - } else { - throw e; - } - } finally { - releaseDrainLock(); - } - } + const config = loadConfig(); + if (config) { + const api = new DeeplakeApi(config.token, config.apiUrl, config.orgId, config.workspaceId, config.tableName); + await api.ensureTable(); + await api.ensureSessionsTable(config.sessionsTableName); + if (captureEnabled) { + await createPlaceholder(api, config.tableName, input.session_id, input.cwd ?? "", config.userName, config.orgName, config.workspaceId); } - await createPlaceholderFn(api, config.tableName, input.session_id, input.cwd ?? "", config.userName, config.orgName, config.workspaceId); + log3("setup complete"); } - logFn("setup complete"); } catch (e) { - logFn(`setup failed: ${e.message}`); - wikiLogFn(`SessionSetup: failed for ${input.session_id}: ${e.message}`); + log3(`setup failed: ${e.message}`); + wikiLog(`SessionSetup: failed for ${input.session_id}: ${e.message}`); } } const autoupdate = creds.autoupdate !== false; try { - const current = getInstalledVersionFn(__bundleDir, ".codex-plugin"); + const current = getInstalledVersion(__bundleDir, ".codex-plugin"); if (current) { - const latest = await getLatestVersionCachedFn({ - url: GITHUB_RAW_PKG, - timeoutMs: VERSION_CHECK_TIMEOUT - }); - if (latest && isNewerFn(latest, current)) { + const latest = await getLatestVersion(); + if (latest && isNewer(latest, current)) { if (autoupdate) { - logFn(`autoupdate: updating ${current} \u2192 ${latest}`); + log3(`autoupdate: updating ${current} \u2192 ${latest}`); try { const tag = `v${latest}`; if (!/^v\d+\.\d+\.\d+$/.test(tag)) throw new Error(`unsafe version tag: ${tag}`); const findCmd = `INSTALL_DIR=""; CACHE_DIR=$(find ~/.codex/plugins/cache -maxdepth 3 -name "hivemind" -type d 2>/dev/null | head -1); if [ -n "$CACHE_DIR" ]; then INSTALL_DIR=$(ls -1d "$CACHE_DIR"/*/ 2>/dev/null | tail -1); elif [ -d ~/.codex/hivemind ]; then INSTALL_DIR=~/.codex/hivemind; fi; if [ -n "$INSTALL_DIR" ]; then TMPDIR=$(mktemp -d); git clone --depth 1 --branch ${tag} -q https://github.com/activeloopai/hivemind.git "$TMPDIR/hivemind" 2>/dev/null && cp -r "$TMPDIR/hivemind/codex/"* "$INSTALL_DIR/" 2>/dev/null; rm -rf "$TMPDIR"; fi`; - execSyncFn(findCmd, { stdio: "ignore", timeout: 6e4 }); + execSync2(findCmd, { stdio: "ignore", timeout: 6e4 }); process.stderr.write(`Hivemind auto-updated: ${current} \u2192 ${latest}. Restart Codex to apply. `); - logFn(`autoupdate succeeded: ${current} \u2192 ${latest} (tag: ${tag})`); + log3(`autoupdate succeeded: ${current} \u2192 ${latest} (tag: ${tag})`); } catch (e) { process.stderr.write(`Hivemind update available: ${current} \u2192 ${latest}. Auto-update failed. `); - logFn(`autoupdate failed: ${e.message}`); + log3(`autoupdate failed: ${e.message}`); } } else { process.stderr.write(`Hivemind update available: ${current} \u2192 ${latest}. `); - logFn(`update available (autoupdate off): ${current} \u2192 ${latest}`); + log3(`update available (autoupdate off): ${current} \u2192 ${latest}`); } } else { - logFn(`version up to date: ${current}`); + log3(`version up to date: ${current}`); } } } catch (e) { - logFn(`version check failed: ${e.message}`); + log3(`version check failed: ${e.message}`); } - return { status: "complete" }; -} -async function main() { - const input = await readStdin(); - await runCodexSessionStartSetup(input); -} -if (isDirectRun(import.meta.url)) { - main().catch((e) => { - log3(`fatal: ${e.message}`); - process.exit(0); - }); } -export { - createPlaceholder, - runCodexSessionStartSetup, - wikiLog -}; +main().catch((e) => { + log3(`fatal: ${e.message}`); + process.exit(0); +}); diff --git a/codex/bundle/session-start.js b/codex/bundle/session-start.js index bb3ebd0..fe5cfe1 100755 --- a/codex/bundle/session-start.js +++ b/codex/bundle/session-start.js @@ -2,7 +2,7 @@ // dist/src/hooks/codex/session-start.js import { spawn } from "node:child_process"; -import { fileURLToPath as fileURLToPath2 } from "node:url"; +import { fileURLToPath } from "node:url"; import { dirname as dirname2, join as join4 } from "node:path"; // dist/src/commands/auth.js @@ -24,13 +24,13 @@ function loadCredentials() { // dist/src/utils/stdin.js function readStdin() { - return new Promise((resolve2, reject) => { + return new Promise((resolve, reject) => { let data = ""; process.stdin.setEncoding("utf-8"); process.stdin.on("data", (chunk) => data += chunk); process.stdin.on("end", () => { try { - resolve2(JSON.parse(data)); + resolve(JSON.parse(data)); } catch (err) { reject(new Error(`Failed to parse hook input: ${err}`)); } @@ -52,26 +52,9 @@ function log(tag, msg) { `); } -// dist/src/utils/direct-run.js -import { resolve } from "node:path"; -import { fileURLToPath } from "node:url"; -function isDirectRun(metaUrl) { - const entry = process.argv[1]; - if (!entry) - return false; - try { - return resolve(fileURLToPath(metaUrl)) === resolve(entry); - } catch { - return false; - } -} - -// dist/src/hooks/version-check.js -import { existsSync as existsSync2, mkdirSync as mkdirSync2, readFileSync as readFileSync2, writeFileSync as writeFileSync2 } from "node:fs"; +// dist/src/utils/version-check.js +import { readFileSync as readFileSync2 } from "node:fs"; import { dirname, join as join3 } from "node:path"; -import { homedir as homedir3 } from "node:os"; -var DEFAULT_VERSION_CACHE_PATH = join3(homedir3(), ".deeplake", ".version-check.json"); -var DEFAULT_VERSION_CACHE_TTL_MS = 60 * 60 * 1e3; function getInstalledVersion(bundleDir, pluginManifestDir) { try { const pluginJson = join3(bundleDir, "..", pluginManifestDir, "plugin.json"); @@ -99,41 +82,27 @@ function getInstalledVersion(bundleDir, pluginManifestDir) { // dist/src/hooks/codex/session-start.js var log2 = (msg) => log("codex-session-start", msg); -var __bundleDir = dirname2(fileURLToPath2(import.meta.url)); +var __bundleDir = dirname2(fileURLToPath(import.meta.url)); var AUTH_CMD = join4(__bundleDir, "commands", "auth-login.js"); -var CODEX_SESSION_START_CONTEXT = `DEEPLAKE MEMORY: Persistent memory at ~/.deeplake/memory/ shared across sessions, users, and agents. +var context = `DEEPLAKE MEMORY: Persistent memory at ~/.deeplake/memory/ shared across sessions, users, and agents. -Structure: index.md (start here) \u2192 summaries/*.md \u2192 sessions/{author}/* (last resort). Do NOT jump straight to raw session files. -When index.md identifies a likely match, read that exact summary or session path directly before broader grep variants. -If index.md already points to likely candidate files, open those exact files before broader synonym greps or wide exploratory scans. -Do NOT probe unrelated local paths such as ~/.claude/projects/, arbitrary home directories, or guessed summary roots for Deeplake recall tasks. -TEMPORAL GROUNDING: If a summary or transcript uses relative time like "last year", "last week", or "next month", resolve it against that session's own date/date_time metadata, not today's date. -TEMPORAL FOLLOW-THROUGH: If a summary only gives a relative time, open the linked source session and use its date/date_time to convert the final answer into an absolute month/date/year or explicit range before responding. -ANSWER SHAPE: Once you have enough evidence, answer with the smallest exact phrase supported by memory. For identity or relationship questions, use just the noun phrase. For education questions, answer with the likely field or credential directly, not the broader life story. For "when" questions, prefer absolute dates/months/years over relative phrases. Avoid extra biography, explanation, or hedging. -NOT-FOUND BAR: Do NOT answer "not found" until you have checked index.md plus at least one likely summary or raw session file for the named person. If keyword grep is empty, grep the person's name alone and inspect the candidate files. -NEGATIVE-EVIDENCE QUESTIONS: For identity, relationship status, and research-topic questions, summaries may omit the exact phrase. If likely summaries are ambiguous, read the candidate raw session transcript and look for positive clues before concluding the answer is absent. -SELF-LABEL PRIORITY: For identity questions, prefer the person's own explicit self-label from the transcript over broader category descriptions or paraphrases. -RELATIONSHIP STATUS INFERENCE: For relationship-status questions, treat explicit self-descriptions about partnership, dating, marriage, or parenting plans as status evidence. If the transcript strongly supports an unpartnered status, answer with the concise status phrase instead of "not found." +Structure: index.md (start here) \u2192 summaries/*.md \u2192 sessions/*.jsonl (last resort). Do NOT jump straight to JSONL. Search: grep -r "keyword" ~/.deeplake/memory/ IMPORTANT: Only use bash commands (cat, ls, grep, echo, jq, head, tail, sed, awk, etc.) to interact with ~/.deeplake/memory/. Do NOT use python, python3, node, curl, or other interpreters \u2014 they are not available in the memory filesystem. Do NOT spawn subagents to read deeplake memory.`; -function buildCodexSessionStartContext(args) { - const versionNotice = args.currentVersion ? ` -Hivemind v${args.currentVersion}` : ""; - return args.creds?.token ? `${CODEX_SESSION_START_CONTEXT} -Logged in to Deeplake as org: ${args.creds.orgName ?? args.creds.orgId} (workspace: ${args.creds.workspaceId ?? "default"})${versionNotice}` : `${CODEX_SESSION_START_CONTEXT} -Not logged in to Deeplake. Run: node "${args.authCommand}" login${versionNotice}`; -} -async function runCodexSessionStartHook(input, deps = {}) { - const { wikiWorker = (process.env.HIVEMIND_WIKI_WORKER ?? process.env.DEEPLAKE_WIKI_WORKER) === "1", creds = loadCredentials(), spawnFn = spawn, currentVersion = getInstalledVersion(__bundleDir, ".codex-plugin"), authCommand = AUTH_CMD, setupScript = join4(__bundleDir, "session-start-setup.js"), logFn = log2 } = deps; - if (wikiWorker) - return null; - if (!creds?.token) - logFn("no credentials found \u2014 run auth login to authenticate"); - else - logFn(`credentials loaded: org=${creds.orgName ?? creds.orgId}`); +async function main() { + if (process.env.HIVEMIND_WIKI_WORKER === "1") + return; + const input = await readStdin(); + const creds = loadCredentials(); + if (!creds?.token) { + log2("no credentials found \u2014 run auth login to authenticate"); + } else { + log2(`credentials loaded: org=${creds.orgName ?? creds.orgId}`); + } if (creds?.token) { - const child = spawnFn("node", [setupScript], { + const setupScript = join4(__bundleDir, "session-start-setup.js"); + const child = spawn("node", [setupScript], { detached: true, stdio: ["pipe", "ignore", "ignore"], env: { ...process.env } @@ -141,28 +110,20 @@ async function runCodexSessionStartHook(input, deps = {}) { child.stdin?.write(JSON.stringify(input)); child.stdin?.end(); child.unref(); - logFn("spawned async setup process"); + log2("spawned async setup process"); } - return buildCodexSessionStartContext({ - creds, - currentVersion, - authCommand - }); -} -async function main() { - const input = await readStdin(); - const output = await runCodexSessionStartHook(input); - if (output) - console.log(output); -} -if (isDirectRun(import.meta.url)) { - main().catch((e) => { - log2(`fatal: ${e.message}`); - process.exit(0); - }); + let versionNotice = ""; + const current = getInstalledVersion(__bundleDir, ".codex-plugin"); + if (current) { + versionNotice = ` +Hivemind v${current}`; + } + const additionalContext = creds?.token ? `${context} +Logged in to Deeplake as org: ${creds.orgName ?? creds.orgId} (workspace: ${creds.workspaceId ?? "default"})${versionNotice}` : `${context} +Not logged in to Deeplake. Run: node "${AUTH_CMD}" login${versionNotice}`; + console.log(additionalContext); } -export { - CODEX_SESSION_START_CONTEXT, - buildCodexSessionStartContext, - runCodexSessionStartHook -}; +main().catch((e) => { + log2(`fatal: ${e.message}`); + process.exit(0); +}); diff --git a/codex/bundle/shell/deeplake-shell.js b/codex/bundle/shell/deeplake-shell.js index 5872059..0793149 100755 --- a/codex/bundle/shell/deeplake-shell.js +++ b/codex/bundle/shell/deeplake-shell.js @@ -66785,18 +66785,18 @@ function sqlLike(value) { // dist/src/deeplake-api.js var log2 = (msg) => log("sdk", msg); -var TRACE_SQL = (process.env.HIVEMIND_TRACE_SQL ?? process.env.DEEPLAKE_TRACE_SQL) === "1" || (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; -var DEBUG_FILE_LOG = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; function summarizeSql(sql, maxLen = 220) { const compact = sql.replace(/\s+/g, " ").trim(); return compact.length > maxLen ? `${compact.slice(0, maxLen)}...` : compact; } function traceSql(msg) { - if (!TRACE_SQL) + const traceEnabled = (process.env.HIVEMIND_TRACE_SQL ?? process.env.DEEPLAKE_TRACE_SQL) === "1" || (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; + if (!traceEnabled) return; process.stderr.write(`[deeplake-sql] ${msg} `); - if (DEBUG_FILE_LOG) + const debugFileLog = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; + if (debugFileLog) log2(msg); } var RETRYABLE_CODES = /* @__PURE__ */ new Set([429, 500, 502, 503, 504]); @@ -67317,13 +67317,13 @@ function buildPathCondition(targetPath) { const clean = targetPath.replace(/\/+$/, ""); if (/[*?]/.test(clean)) { const likePattern = sqlLike(clean).replace(/\*/g, "%").replace(/\?/g, "_"); - return `path LIKE '${likePattern}'`; + return `path LIKE '${likePattern}' ESCAPE '\\'`; } const base = clean.split("/").pop() ?? ""; if (base.includes(".")) { return `path = '${sqlStr(clean)}'`; } - return `(path = '${sqlStr(clean)}' OR path LIKE '${sqlLike(clean)}/%')`; + return `(path = '${sqlStr(clean)}' OR path LIKE '${sqlLike(clean)}/%' ESCAPE '\\')`; } async function searchDeeplakeTables(api, memoryTable, sessionsTable, opts) { const { pathFilter, contentScanOnly, likeOp, escapedPattern, prefilterPattern, prefilterPatterns } = opts; @@ -69147,6 +69147,13 @@ function createGrepCommand(client, fs3, table, sessionsTable) { // dist/src/shell/deeplake-shell.js async function main() { + const isOneShot = process.argv.includes("-c"); + if (isOneShot) { + delete process.env["HIVEMIND_TRACE_SQL"]; + delete process.env["DEEPLAKE_TRACE_SQL"]; + delete process.env["HIVEMIND_DEBUG"]; + delete process.env["DEEPLAKE_DEBUG"]; + } const config = loadConfig(); if (!config) { process.stderr.write("Deeplake credentials not found.\nSet HIVEMIND_TOKEN + HIVEMIND_ORG_ID in environment, or create ~/.deeplake/credentials.json\n"); @@ -69155,7 +69162,6 @@ async function main() { const table = process.env["HIVEMIND_TABLE"] ?? "memory"; const sessionsTable = process.env["HIVEMIND_SESSIONS_TABLE"] ?? "sessions"; const mount = process.env["HIVEMIND_MOUNT"] ?? "/"; - const isOneShot = process.argv.includes("-c"); const client = new DeeplakeApi(config.token, config.apiUrl, config.orgId, config.workspaceId, table); if (!isOneShot) { process.stderr.write(`Connecting to deeplake://${config.workspaceId}/${table} ... diff --git a/codex/bundle/stop.js b/codex/bundle/stop.js index b2da8a8..e6081b5 100755 --- a/codex/bundle/stop.js +++ b/codex/bundle/stop.js @@ -5,13 +5,13 @@ import { readFileSync as readFileSync4, existsSync as existsSync4 } from "node:f // dist/src/utils/stdin.js function readStdin() { - return new Promise((resolve2, reject) => { + return new Promise((resolve, reject) => { let data = ""; process.stdin.setEncoding("utf-8"); process.stdin.on("data", (chunk) => data += chunk); process.stdin.on("end", () => { try { - resolve2(JSON.parse(data)); + resolve(JSON.parse(data)); } catch (err) { reject(new Error(`Failed to parse hook input: ${err}`)); } @@ -68,6 +68,9 @@ import { join as join2 } from "node:path"; import { homedir as homedir2 } from "node:os"; var DEBUG = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; var LOG = join2(homedir2(), ".deeplake", "hook-debug.log"); +function utcTimestamp(d = /* @__PURE__ */ new Date()) { + return d.toISOString().replace("T", " ").slice(0, 19) + " UTC"; +} function log(tag, msg) { if (!DEBUG) return; @@ -79,27 +82,21 @@ function log(tag, msg) { function sqlStr(value) { return value.replace(/\\/g, "\\\\").replace(/'/g, "''").replace(/\0/g, "").replace(/[\x01-\x08\x0b\x0c\x0e-\x1f\x7f]/g, ""); } -function sqlIdent(name) { - if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(name)) { - throw new Error(`Invalid SQL identifier: ${JSON.stringify(name)}`); - } - return name; -} // dist/src/deeplake-api.js var log2 = (msg) => log("sdk", msg); -var TRACE_SQL = (process.env.HIVEMIND_TRACE_SQL ?? process.env.DEEPLAKE_TRACE_SQL) === "1" || (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; -var DEBUG_FILE_LOG = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; function summarizeSql(sql, maxLen = 220) { const compact = sql.replace(/\s+/g, " ").trim(); return compact.length > maxLen ? `${compact.slice(0, maxLen)}...` : compact; } function traceSql(msg) { - if (!TRACE_SQL) + const traceEnabled = (process.env.HIVEMIND_TRACE_SQL ?? process.env.DEEPLAKE_TRACE_SQL) === "1" || (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; + if (!traceEnabled) return; process.stderr.write(`[deeplake-sql] ${msg} `); - if (DEBUG_FILE_LOG) + const debugFileLog = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; + if (debugFileLog) log2(msg); } var RETRYABLE_CODES = /* @__PURE__ */ new Set([429, 500, 502, 503, 504]); @@ -109,7 +106,7 @@ var MAX_CONCURRENCY = 5; var QUERY_TIMEOUT_MS = Number(process.env["HIVEMIND_QUERY_TIMEOUT_MS"] ?? process.env["DEEPLAKE_QUERY_TIMEOUT_MS"] ?? 1e4); var INDEX_MARKER_TTL_MS = Number(process.env["HIVEMIND_INDEX_MARKER_TTL_MS"] ?? 6 * 60 * 6e4); function sleep(ms) { - return new Promise((resolve2) => setTimeout(resolve2, ms)); + return new Promise((resolve) => setTimeout(resolve, ms)); } function isTimeoutError(error) { const name = error instanceof Error ? error.name.toLowerCase() : ""; @@ -142,7 +139,7 @@ var Semaphore = class { this.active++; return; } - await new Promise((resolve2) => this.waiting.push(resolve2)); + await new Promise((resolve) => this.waiting.push(resolve)); } release() { this.active--; @@ -401,28 +398,35 @@ var DeeplakeApi = class { } }; -// dist/src/utils/direct-run.js -import { resolve } from "node:path"; +// dist/src/hooks/codex/spawn-wiki-worker.js +import { spawn, execSync } from "node:child_process"; import { fileURLToPath } from "node:url"; -function isDirectRun(metaUrl) { - const entry = process.argv[1]; - if (!entry) - return false; - try { - return resolve(fileURLToPath(metaUrl)) === resolve(entry); - } catch { - return false; - } +import { dirname, join as join5 } from "node:path"; +import { writeFileSync as writeFileSync2, mkdirSync as mkdirSync3 } from "node:fs"; +import { homedir as homedir3, tmpdir as tmpdir2 } from "node:os"; + +// dist/src/utils/wiki-log.js +import { mkdirSync as mkdirSync2, appendFileSync as appendFileSync2 } from "node:fs"; +import { join as join4 } from "node:path"; +function makeWikiLogger(hooksDir, filename = "deeplake-wiki.log") { + const path = join4(hooksDir, filename); + return { + path, + log(msg) { + try { + mkdirSync2(hooksDir, { recursive: true }); + appendFileSync2(path, `[${utcTimestamp()}] ${msg} +`); + } catch { + } + } + }; } // dist/src/hooks/codex/spawn-wiki-worker.js -import { spawn, execSync } from "node:child_process"; -import { fileURLToPath as fileURLToPath2 } from "node:url"; -import { dirname, join as join4 } from "node:path"; -import { writeFileSync as writeFileSync2, mkdirSync as mkdirSync2, appendFileSync as appendFileSync2 } from "node:fs"; -import { homedir as homedir3, tmpdir as tmpdir2 } from "node:os"; var HOME = homedir3(); -var WIKI_LOG = join4(HOME, ".codex", "hooks", "deeplake-wiki.log"); +var wikiLogger = makeWikiLogger(join5(HOME, ".codex", "hooks")); +var WIKI_LOG = wikiLogger.path; var WIKI_PROMPT_TEMPLATE = `You are building a personal wiki from a coding session. Your goal is to extract every piece of knowledge \u2014 entities, decisions, relationships, and facts \u2014 into a structured, searchable wiki entry. SESSION JSONL path: __JSONL__ @@ -472,14 +476,7 @@ Format: **entity** (type) \u2014 what was done with it, its current state> IMPORTANT: Be exhaustive. Extract EVERY entity, decision, and fact. PRIVACY: Never include absolute filesystem paths in the summary. LENGTH LIMIT: Keep the total summary under 4000 characters.`; -function wikiLog(msg) { - try { - mkdirSync2(join4(HOME, ".codex", "hooks"), { recursive: true }); - appendFileSync2(WIKI_LOG, `[${(/* @__PURE__ */ new Date()).toISOString().replace("T", " ").slice(0, 19)}] ${msg} -`); - } catch { - } -} +var wikiLog = wikiLogger.log; function findCodexBin() { try { return execSync("which codex 2>/dev/null", { encoding: "utf-8" }).trim(); @@ -490,9 +487,9 @@ function findCodexBin() { function spawnCodexWikiWorker(opts) { const { config, sessionId, cwd, bundleDir, reason } = opts; const projectName = cwd.split("/").pop() || "unknown"; - const tmpDir = join4(tmpdir2(), `deeplake-wiki-${sessionId}-${Date.now()}`); - mkdirSync2(tmpDir, { recursive: true }); - const configFile = join4(tmpDir, "config.json"); + const tmpDir = join5(tmpdir2(), `deeplake-wiki-${sessionId}-${Date.now()}`); + mkdirSync3(tmpDir, { recursive: true }); + const configFile = join5(tmpDir, "config.json"); writeFileSync2(configFile, JSON.stringify({ apiUrl: config.apiUrl, token: config.token, @@ -506,11 +503,11 @@ function spawnCodexWikiWorker(opts) { tmpDir, codexBin: findCodexBin(), wikiLog: WIKI_LOG, - hooksDir: join4(HOME, ".codex", "hooks"), + hooksDir: join5(HOME, ".codex", "hooks"), promptTemplate: WIKI_PROMPT_TEMPLATE })); wikiLog(`${reason}: spawning summary worker for ${sessionId}`); - const workerPath = join4(bundleDir, "wiki-worker.js"); + const workerPath = join5(bundleDir, "wiki-worker.js"); spawn("nohup", ["node", workerPath, configFile], { detached: true, stdio: ["ignore", "ignore", "ignore"] @@ -518,363 +515,164 @@ function spawnCodexWikiWorker(opts) { wikiLog(`${reason}: spawned summary worker for ${sessionId}`); } function bundleDirFromImportMeta(importMetaUrl) { - return dirname(fileURLToPath2(importMetaUrl)); + return dirname(fileURLToPath(importMetaUrl)); } -// dist/src/hooks/session-queue.js -import { appendFileSync as appendFileSync3, closeSync, existsSync as existsSync3, mkdirSync as mkdirSync3, openSync, readFileSync as readFileSync3, readdirSync, renameSync, rmSync, statSync, writeFileSync as writeFileSync3 } from "node:fs"; -import { dirname as dirname2, join as join5 } from "node:path"; +// dist/src/hooks/summary-state.js +import { readFileSync as readFileSync3, writeFileSync as writeFileSync3, writeSync, mkdirSync as mkdirSync4, renameSync, existsSync as existsSync3, unlinkSync, openSync, closeSync } from "node:fs"; import { homedir as homedir4 } from "node:os"; -var DEFAULT_QUEUE_DIR = join5(homedir4(), ".deeplake", "queue"); -var DEFAULT_MAX_BATCH_ROWS = 50; -var DEFAULT_STALE_INFLIGHT_MS = 6e4; -var DEFAULT_AUTH_FAILURE_TTL_MS = 5 * 6e4; -var BUSY_WAIT_STEP_MS = 100; -var SessionWriteDisabledError = class extends Error { - constructor(message) { - super(message); - this.name = "SessionWriteDisabledError"; - } -}; -function buildSessionPath(config, sessionId) { - return `/sessions/${config.userName}/${config.userName}_${config.orgName}_${config.workspaceId}_${sessionId}.jsonl`; -} -function buildQueuedSessionRow(args) { - return { - id: crypto.randomUUID(), - path: args.sessionPath, - filename: args.sessionPath.split("/").pop() ?? "", - message: args.line, - author: args.userName, - sizeBytes: Buffer.byteLength(args.line, "utf-8"), - project: args.projectName, - description: args.description, - agent: args.agent, - creationDate: args.timestamp, - lastUpdateDate: args.timestamp - }; -} -function appendQueuedSessionRow(row, queueDir = DEFAULT_QUEUE_DIR) { - mkdirSync3(queueDir, { recursive: true }); - const sessionId = extractSessionId(row.path); - const queuePath = getQueuePath(queueDir, sessionId); - appendFileSync3(queuePath, `${JSON.stringify(row)} -`); - return queuePath; -} -function buildSessionInsertSql(sessionsTable, rows) { - if (rows.length === 0) - throw new Error("buildSessionInsertSql: rows must not be empty"); - const table = sqlIdent(sessionsTable); - const values = rows.map((row) => { - const jsonForSql = sqlStr(coerceJsonbPayload(row.message)); - return `('${sqlStr(row.id)}', '${sqlStr(row.path)}', '${sqlStr(row.filename)}', '${jsonForSql}'::jsonb, '${sqlStr(row.author)}', ${row.sizeBytes}, '${sqlStr(row.project)}', '${sqlStr(row.description)}', '${sqlStr(row.agent)}', '${sqlStr(row.creationDate)}', '${sqlStr(row.lastUpdateDate)}')`; - }).join(", "); - return `INSERT INTO "${table}" (id, path, filename, message, author, size_bytes, project, description, agent, creation_date, last_update_date) VALUES ${values}`; -} -function coerceJsonbPayload(message) { - try { - return JSON.stringify(JSON.parse(message)); - } catch { - return JSON.stringify({ - type: "raw_message", - content: message - }); - } -} -async function flushSessionQueue(api, opts) { - const queueDir = opts.queueDir ?? DEFAULT_QUEUE_DIR; - const maxBatchRows = opts.maxBatchRows ?? DEFAULT_MAX_BATCH_ROWS; - const staleInflightMs = opts.staleInflightMs ?? DEFAULT_STALE_INFLIGHT_MS; - const waitIfBusyMs = opts.waitIfBusyMs ?? 0; - const drainAll = opts.drainAll ?? false; - mkdirSync3(queueDir, { recursive: true }); - const queuePath = getQueuePath(queueDir, opts.sessionId); - const inflightPath = getInflightPath(queueDir, opts.sessionId); - if (isSessionWriteDisabled(opts.sessionsTable, queueDir)) { - return existsSync3(queuePath) || existsSync3(inflightPath) ? { status: "disabled", rows: 0, batches: 0 } : { status: "empty", rows: 0, batches: 0 }; - } - let totalRows = 0; - let totalBatches = 0; - let flushedAny = false; - while (true) { - if (opts.allowStaleInflight) - recoverStaleInflight(queuePath, inflightPath, staleInflightMs); - if (existsSync3(inflightPath)) { - if (waitIfBusyMs > 0) { - await waitForInflightToClear(inflightPath, waitIfBusyMs); - if (opts.allowStaleInflight) - recoverStaleInflight(queuePath, inflightPath, staleInflightMs); - } - if (existsSync3(inflightPath)) { - return flushedAny ? { status: "flushed", rows: totalRows, batches: totalBatches } : { status: "busy", rows: 0, batches: 0 }; - } - } - if (!existsSync3(queuePath)) { - return flushedAny ? { status: "flushed", rows: totalRows, batches: totalBatches } : { status: "empty", rows: 0, batches: 0 }; - } +import { join as join6 } from "node:path"; +var dlog = (msg) => log("summary-state", msg); +var STATE_DIR = join6(homedir4(), ".claude", "hooks", "summary-state"); +var YIELD_BUF = new Int32Array(new SharedArrayBuffer(4)); +function lockPath(sessionId) { + return join6(STATE_DIR, `${sessionId}.lock`); +} +function tryAcquireLock(sessionId, maxAgeMs = 10 * 60 * 1e3) { + mkdirSync4(STATE_DIR, { recursive: true }); + const p = lockPath(sessionId); + if (existsSync3(p)) { try { - renameSync(queuePath, inflightPath); - } catch (e) { - if (e?.code === "ENOENT") { - return flushedAny ? { status: "flushed", rows: totalRows, batches: totalBatches } : { status: "empty", rows: 0, batches: 0 }; - } - throw e; + const ageMs = Date.now() - parseInt(readFileSync3(p, "utf-8"), 10); + if (Number.isFinite(ageMs) && ageMs < maxAgeMs) + return false; + } catch (readErr) { + dlog(`lock file unreadable for ${sessionId}, treating as stale: ${readErr.message}`); } try { - const { rows, batches } = await flushInflightFile(api, opts.sessionsTable, inflightPath, maxBatchRows); - totalRows += rows; - totalBatches += batches; - flushedAny = flushedAny || rows > 0; - } catch (e) { - requeueInflight(queuePath, inflightPath); - if (e instanceof SessionWriteDisabledError) { - return { status: "disabled", rows: totalRows, batches: totalBatches }; - } - throw e; - } - if (!drainAll) { - return { status: "flushed", rows: totalRows, batches: totalBatches }; + unlinkSync(p); + } catch (unlinkErr) { + dlog(`could not unlink stale lock for ${sessionId}: ${unlinkErr.message}`); + return false; } } -} -function getQueuePath(queueDir, sessionId) { - return join5(queueDir, `${sessionId}.jsonl`); -} -function getInflightPath(queueDir, sessionId) { - return join5(queueDir, `${sessionId}.inflight`); -} -function extractSessionId(sessionPath) { - const filename = sessionPath.split("/").pop() ?? ""; - return filename.replace(/\.jsonl$/, "").split("_").pop() ?? filename; -} -async function flushInflightFile(api, sessionsTable, inflightPath, maxBatchRows) { - const rows = readQueuedRows(inflightPath); - if (rows.length === 0) { - rmSync(inflightPath, { force: true }); - return { rows: 0, batches: 0 }; - } - let ensured = false; - let batches = 0; - const queueDir = dirname2(inflightPath); - for (let i = 0; i < rows.length; i += maxBatchRows) { - const chunk = rows.slice(i, i + maxBatchRows); - const sql = buildSessionInsertSql(sessionsTable, chunk); + try { + const fd = openSync(p, "wx"); try { - await api.query(sql); - } catch (e) { - if (isSessionWriteAuthError(e)) { - markSessionWriteDisabled(sessionsTable, errorMessage(e), queueDir); - throw new SessionWriteDisabledError(errorMessage(e)); - } - if (!ensured && isEnsureSessionsTableRetryable(e)) { - try { - await api.ensureSessionsTable(sessionsTable); - } catch (ensureError) { - if (isSessionWriteAuthError(ensureError)) { - markSessionWriteDisabled(sessionsTable, errorMessage(ensureError), queueDir); - throw new SessionWriteDisabledError(errorMessage(ensureError)); - } - throw ensureError; - } - ensured = true; - try { - await api.query(sql); - } catch (retryError) { - if (isSessionWriteAuthError(retryError)) { - markSessionWriteDisabled(sessionsTable, errorMessage(retryError), queueDir); - throw new SessionWriteDisabledError(errorMessage(retryError)); - } - throw retryError; - } - } else { - throw e; - } + writeSync(fd, String(Date.now())); + } finally { + closeSync(fd); } - batches += 1; + return true; + } catch (e) { + if (e.code === "EEXIST") + return false; + throw e; } - clearSessionWriteDisabled(sessionsTable, queueDir); - rmSync(inflightPath, { force: true }); - return { rows: rows.length, batches }; -} -function readQueuedRows(path) { - const raw = readFileSync3(path, "utf-8"); - return raw.split("\n").map((line) => line.trim()).filter(Boolean).map((line) => JSON.parse(line)); -} -function requeueInflight(queuePath, inflightPath) { - if (!existsSync3(inflightPath)) - return; - const inflight = readFileSync3(inflightPath, "utf-8"); - appendFileSync3(queuePath, inflight); - rmSync(inflightPath, { force: true }); -} -function recoverStaleInflight(queuePath, inflightPath, staleInflightMs) { - if (!existsSync3(inflightPath) || !isStale(inflightPath, staleInflightMs)) - return; - requeueInflight(queuePath, inflightPath); -} -function isStale(path, staleInflightMs) { - return Date.now() - statSync(path).mtimeMs >= staleInflightMs; -} -function isEnsureSessionsTableRetryable(error) { - const message = errorMessage(error).toLowerCase(); - return message.includes("does not exist") || message.includes("doesn't exist") || message.includes("relation") || message.includes("not found"); -} -function isSessionWriteAuthError(error) { - const message = errorMessage(error).toLowerCase(); - return message.includes("403") || message.includes("401") || message.includes("forbidden") || message.includes("unauthorized"); } -function markSessionWriteDisabled(sessionsTable, reason, queueDir = DEFAULT_QUEUE_DIR) { - mkdirSync3(queueDir, { recursive: true }); - writeFileSync3(getSessionWriteDisabledPath(queueDir, sessionsTable), JSON.stringify({ - disabledAt: (/* @__PURE__ */ new Date()).toISOString(), - reason, - sessionsTable - })); -} -function clearSessionWriteDisabled(sessionsTable, queueDir = DEFAULT_QUEUE_DIR) { - rmSync(getSessionWriteDisabledPath(queueDir, sessionsTable), { force: true }); -} -function isSessionWriteDisabled(sessionsTable, queueDir = DEFAULT_QUEUE_DIR, ttlMs = DEFAULT_AUTH_FAILURE_TTL_MS) { - const path = getSessionWriteDisabledPath(queueDir, sessionsTable); - if (!existsSync3(path)) - return false; +function releaseLock(sessionId) { try { - const raw = readFileSync3(path, "utf-8"); - const state = JSON.parse(raw); - const ageMs = Date.now() - new Date(state.disabledAt).getTime(); - if (Number.isNaN(ageMs) || ageMs >= ttlMs) { - rmSync(path, { force: true }); - return false; + unlinkSync(lockPath(sessionId)); + } catch (e) { + if (e?.code !== "ENOENT") { + dlog(`releaseLock unlink failed for ${sessionId}: ${e.message}`); } - return true; - } catch { - rmSync(path, { force: true }); - return false; } } -function getSessionWriteDisabledPath(queueDir, sessionsTable) { - return join5(queueDir, `.${sessionsTable}.disabled.json`); -} -function errorMessage(error) { - return error instanceof Error ? error.message : String(error); -} -async function waitForInflightToClear(inflightPath, waitIfBusyMs) { - const startedAt = Date.now(); - while (existsSync3(inflightPath) && Date.now() - startedAt < waitIfBusyMs) { - await sleep2(BUSY_WAIT_STEP_MS); - } -} -function sleep2(ms) { - return new Promise((resolve2) => setTimeout(resolve2, ms)); + +// dist/src/utils/session-path.js +function buildSessionPath(config, sessionId) { + const workspace = config.workspaceId ?? "default"; + return `/sessions/${config.userName}/${config.userName}_${config.orgName}_${workspace}_${sessionId}.jsonl`; } // dist/src/hooks/codex/stop.js var log3 = (msg) => log("codex-stop", msg); -var CAPTURE = (process.env.HIVEMIND_CAPTURE ?? process.env.DEEPLAKE_CAPTURE) !== "false"; -function extractLastAssistantMessage(transcript) { - const lines = transcript.trim().split("\n").reverse(); - for (const line of lines) { - try { - const entry = JSON.parse(line); - const msg = entry.payload ?? entry; - if (msg.role === "assistant" && msg.content) { - const content = typeof msg.content === "string" ? msg.content : Array.isArray(msg.content) ? msg.content.filter((b) => b.type === "output_text" || b.type === "text").map((b) => b.text).join("\n") : ""; - if (content) - return content.slice(0, 4e3); - } - } catch { - } - } - return ""; -} -function buildCodexStopEntry(input, timestamp, lastAssistantMessage) { - return { - id: crypto.randomUUID(), - session_id: input.session_id, - transcript_path: input.transcript_path, - cwd: input.cwd, - hook_event_name: input.hook_event_name, - model: input.model, - timestamp, - type: lastAssistantMessage ? "assistant_message" : "assistant_stop", - content: lastAssistantMessage - }; -} -async function runCodexStopHook(input, deps = {}) { - const { wikiWorker = (process.env.HIVEMIND_WIKI_WORKER ?? process.env.DEEPLAKE_WIKI_WORKER) === "1", captureEnabled = CAPTURE, config = loadConfig(), now = () => (/* @__PURE__ */ new Date()).toISOString(), transcriptExists = existsSync4, readTranscript = (path) => readFileSync4(path, "utf-8"), createApi = (activeConfig) => new DeeplakeApi(activeConfig.token, activeConfig.apiUrl, activeConfig.orgId, activeConfig.workspaceId, activeConfig.sessionsTableName), appendQueuedSessionRowFn = appendQueuedSessionRow, buildQueuedSessionRowFn = buildQueuedSessionRow, flushSessionQueueFn = flushSessionQueue, spawnCodexWikiWorkerFn = spawnCodexWikiWorker, wikiLogFn = wikiLog, bundleDir = bundleDirFromImportMeta(import.meta.url), logFn = log3 } = deps; - if (wikiWorker || !input.session_id) - return { status: "skipped" }; +var CAPTURE = process.env.HIVEMIND_CAPTURE !== "false"; +async function main() { + if (process.env.HIVEMIND_WIKI_WORKER === "1") + return; + const input = await readStdin(); + const sessionId = input.session_id; + if (!sessionId) + return; + const config = loadConfig(); if (!config) { - logFn("no config"); - return { status: "no_config" }; + log3("no config"); + return; } - let entry; - let flushStatus; - if (captureEnabled) { + if (CAPTURE) { try { - const ts = now(); + const sessionsTable = config.sessionsTableName; + const api = new DeeplakeApi(config.token, config.apiUrl, config.orgId, config.workspaceId, sessionsTable); + const ts = (/* @__PURE__ */ new Date()).toISOString(); let lastAssistantMessage = ""; if (input.transcript_path) { try { - if (transcriptExists(input.transcript_path)) { - lastAssistantMessage = extractLastAssistantMessage(readTranscript(input.transcript_path)); - if (lastAssistantMessage) { - logFn(`extracted assistant message from transcript (${lastAssistantMessage.length} chars)`); + const transcriptPath = input.transcript_path; + if (existsSync4(transcriptPath)) { + const transcript = readFileSync4(transcriptPath, "utf-8"); + const lines = transcript.trim().split("\n").reverse(); + for (const line2 of lines) { + try { + const entry2 = JSON.parse(line2); + const msg = entry2.payload ?? entry2; + if (msg.role === "assistant" && msg.content) { + const content = typeof msg.content === "string" ? msg.content : Array.isArray(msg.content) ? msg.content.filter((b) => b.type === "output_text" || b.type === "text").map((b) => b.text).join("\n") : ""; + if (content) { + lastAssistantMessage = content.slice(0, 4e3); + break; + } + } + } catch { + } } + if (lastAssistantMessage) + log3(`extracted assistant message from transcript (${lastAssistantMessage.length} chars)`); } } catch (e) { - logFn(`transcript read failed: ${e.message}`); + log3(`transcript read failed: ${e.message}`); } } - entry = buildCodexStopEntry(input, ts, lastAssistantMessage); + const entry = { + id: crypto.randomUUID(), + session_id: sessionId, + transcript_path: input.transcript_path, + cwd: input.cwd, + hook_event_name: input.hook_event_name, + model: input.model, + timestamp: ts, + type: lastAssistantMessage ? "assistant_message" : "assistant_stop", + content: lastAssistantMessage + }; const line = JSON.stringify(entry); - const sessionPath = buildSessionPath(config, input.session_id); + const sessionPath = buildSessionPath(config, sessionId); const projectName = (input.cwd ?? "").split("/").pop() || "unknown"; - appendQueuedSessionRowFn(buildQueuedSessionRowFn({ - sessionPath, - line, - userName: config.userName, - projectName, - description: "Stop", - agent: "codex", - timestamp: ts - })); - const flush = await flushSessionQueueFn(createApi(config), { - sessionId: input.session_id, - sessionsTable: config.sessionsTableName, - drainAll: true - }); - flushStatus = flush.status; - logFn(`stop flush ${flush.status}: rows=${flush.rows} batches=${flush.batches}`); + const filename = sessionPath.split("/").pop() ?? ""; + const jsonForSql = sqlStr(line); + const insertSql = `INSERT INTO "${sessionsTable}" (id, path, filename, message, author, size_bytes, project, description, agent, creation_date, last_update_date) VALUES ('${crypto.randomUUID()}', '${sqlStr(sessionPath)}', '${sqlStr(filename)}', '${jsonForSql}'::jsonb, '${sqlStr(config.userName)}', ${Buffer.byteLength(line, "utf-8")}, '${sqlStr(projectName)}', 'Stop', 'codex', '${ts}', '${ts}')`; + await api.query(insertSql); + log3("stop event captured"); } catch (e) { - logFn(`capture failed: ${e.message}`); + log3(`capture failed: ${e.message}`); } } - if (!captureEnabled) - return { status: "complete", entry }; - wikiLogFn(`Stop: triggering summary for ${input.session_id}`); - spawnCodexWikiWorkerFn({ - config, - sessionId: input.session_id, - cwd: input.cwd ?? "", - bundleDir, - reason: "Stop" - }); - return { status: "complete", flushStatus, entry }; -} -async function main() { - const input = await readStdin(); - await runCodexStopHook(input); -} -if (isDirectRun(import.meta.url)) { - main().catch((e) => { - log3(`fatal: ${e.message}`); - process.exit(0); - }); + if (!CAPTURE) + return; + if (!tryAcquireLock(sessionId)) { + wikiLog(`Stop: periodic worker already running for ${sessionId}, skipping`); + return; + } + wikiLog(`Stop: triggering summary for ${sessionId}`); + try { + spawnCodexWikiWorker({ + config, + sessionId, + cwd: input.cwd ?? "", + bundleDir: bundleDirFromImportMeta(import.meta.url), + reason: "Stop" + }); + } catch (e) { + log3(`spawn failed: ${e.message}`); + try { + releaseLock(sessionId); + } catch (releaseErr) { + log3(`releaseLock after spawn failure also failed: ${releaseErr.message}`); + } + throw e; + } } -export { - buildCodexStopEntry, - extractLastAssistantMessage, - runCodexStopHook -}; +main().catch((e) => { + log3(`fatal: ${e.message}`); + process.exit(0); +}); diff --git a/codex/bundle/wiki-worker.js b/codex/bundle/wiki-worker.js index 1b596aa..913c279 100755 --- a/codex/bundle/wiki-worker.js +++ b/codex/bundle/wiki-worker.js @@ -1,21 +1,37 @@ #!/usr/bin/env node // dist/src/hooks/codex/wiki-worker.js -import { readFileSync as readFileSync2, writeFileSync as writeFileSync2, existsSync as existsSync2, appendFileSync, mkdirSync as mkdirSync2, rmSync } from "node:fs"; +import { readFileSync as readFileSync2, writeFileSync as writeFileSync2, existsSync as existsSync2, appendFileSync as appendFileSync2, mkdirSync as mkdirSync2, rmSync } from "node:fs"; import { execFileSync } from "node:child_process"; -import { join as join2 } from "node:path"; +import { join as join3 } from "node:path"; // dist/src/hooks/summary-state.js import { readFileSync, writeFileSync, writeSync, mkdirSync, renameSync, existsSync, unlinkSync, openSync, closeSync } from "node:fs"; -import { homedir } from "node:os"; +import { homedir as homedir2 } from "node:os"; +import { join as join2 } from "node:path"; + +// dist/src/utils/debug.js +import { appendFileSync } from "node:fs"; import { join } from "node:path"; -var STATE_DIR = join(homedir(), ".claude", "hooks", "summary-state"); +import { homedir } from "node:os"; +var DEBUG = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; +var LOG = join(homedir(), ".deeplake", "hook-debug.log"); +function log(tag, msg) { + if (!DEBUG) + return; + appendFileSync(LOG, `${(/* @__PURE__ */ new Date()).toISOString()} [${tag}] ${msg} +`); +} + +// dist/src/hooks/summary-state.js +var dlog = (msg) => log("summary-state", msg); +var STATE_DIR = join2(homedir2(), ".claude", "hooks", "summary-state"); var YIELD_BUF = new Int32Array(new SharedArrayBuffer(4)); function statePath(sessionId) { - return join(STATE_DIR, `${sessionId}.json`); + return join2(STATE_DIR, `${sessionId}.json`); } function lockPath(sessionId) { - return join(STATE_DIR, `${sessionId}.lock`); + return join2(STATE_DIR, `${sessionId}.lock`); } function readState(sessionId) { const p = statePath(sessionId); @@ -46,9 +62,11 @@ function withRmwLock(sessionId, fn) { if (e.code !== "EEXIST") throw e; if (Date.now() > deadline) { + dlog(`rmw lock deadline exceeded for ${sessionId}, reclaiming stale lock`); try { unlinkSync(rmwLock); - } catch { + } catch (unlinkErr) { + dlog(`stale rmw lock unlink failed for ${sessionId}: ${unlinkErr.message}`); } continue; } @@ -61,7 +79,8 @@ function withRmwLock(sessionId, fn) { closeSync(fd); try { unlinkSync(rmwLock); - } catch { + } catch (unlinkErr) { + dlog(`rmw lock cleanup failed for ${sessionId}: ${unlinkErr.message}`); } } } @@ -78,7 +97,10 @@ function finalizeSummary(sessionId, jsonlLines) { function releaseLock(sessionId) { try { unlinkSync(lockPath(sessionId)); - } catch { + } catch (e) { + if (e?.code !== "ENOENT") { + dlog(`releaseLock unlink failed for ${sessionId}: ${e.message}`); + } } } @@ -108,14 +130,15 @@ async function uploadSummary(query2, params) { } // dist/src/hooks/codex/wiki-worker.js +var dlog2 = (msg) => log("codex-wiki-worker", msg); var cfg = JSON.parse(readFileSync2(process.argv[2], "utf-8")); var tmpDir = cfg.tmpDir; -var tmpJsonl = join2(tmpDir, "session.jsonl"); -var tmpSummary = join2(tmpDir, "summary.md"); +var tmpJsonl = join3(tmpDir, "session.jsonl"); +var tmpSummary = join3(tmpDir, "summary.md"); function wlog(msg) { try { mkdirSync2(cfg.hooksDir, { recursive: true }); - appendFileSync(cfg.wikiLog, `[${(/* @__PURE__ */ new Date()).toISOString().replace("T", " ").slice(0, 19)}] wiki-worker(${cfg.sessionId}): ${msg} + appendFileSync2(cfg.wikiLog, `[${(/* @__PURE__ */ new Date()).toISOString().replace("T", " ").slice(0, 19)}] wiki-worker(${cfg.sessionId}): ${msg} `); } catch { } @@ -155,7 +178,8 @@ async function query(sql, retries = 4) { function cleanup() { try { rmSync(tmpDir, { recursive: true, force: true }); - } catch { + } catch (cleanupErr) { + dlog2(`cleanup failed to remove ${tmpDir}: ${cleanupErr.message}`); } } async function main() { @@ -234,7 +258,8 @@ async function main() { cleanup(); try { releaseLock(cfg.sessionId); - } catch { + } catch (releaseErr) { + dlog2(`releaseLock failed in finally for ${cfg.sessionId}: ${releaseErr.message}`); } } } diff --git a/codex/package.json b/codex/package.json index 377c98a..0a42990 100644 --- a/codex/package.json +++ b/codex/package.json @@ -1,6 +1,6 @@ { "name": "hivemind-codex", - "version": "0.6.37", + "version": "0.6.38", "description": "Cloud-backed persistent shared memory for OpenAI Codex CLI powered by Deeplake", "type": "module" } diff --git a/codex/tests/codex-integration.test.ts b/codex/tests/codex-integration.test.ts index 44b41dd..d399a9d 100644 --- a/codex/tests/codex-integration.test.ts +++ b/codex/tests/codex-integration.test.ts @@ -106,27 +106,14 @@ describe("codex integration: session-start", () => { expect(raw).toContain("Do NOT spawn subagents"); }); - it("context includes raw session file warning", () => { + it("context includes JSONL warning", () => { const raw = runHook("session-start.js", { session_id: "test-session-004", cwd: "/tmp", hook_event_name: "SessionStart", model: "gpt-5.2", }); - expect(raw).toContain("Do NOT jump straight to raw session files"); - }); - - it("context steers recall tasks to index-first exact file reads", () => { - const raw = runHook("session-start.js", { - session_id: "test-session-004b", - cwd: "/tmp", - hook_event_name: "SessionStart", - model: "gpt-5.2", - }); - expect(raw).toContain("read that exact summary or session path directly"); - expect(raw).toContain("Do NOT probe unrelated local paths"); - expect(raw).toContain("answer with the smallest exact phrase supported by memory"); - expect(raw).toContain("convert the final answer into an absolute month/date/year"); + expect(raw).toContain("Do NOT jump straight to JSONL"); }); }); diff --git a/codex/tests/codex-source-hooks.test.ts b/codex/tests/codex-source-hooks.test.ts deleted file mode 100644 index 263a473..0000000 --- a/codex/tests/codex-source-hooks.test.ts +++ /dev/null @@ -1,1126 +0,0 @@ -import { afterEach, describe, expect, it, vi } from "vitest"; -import type { Config } from "../../src/config.js"; -import type { Credentials } from "../../src/commands/auth.js"; -import { - buildCodexCaptureEntry, - maybeTriggerPeriodicSummary, - runCodexCaptureHook, -} from "../../src/hooks/codex/capture.js"; -import { - buildUnsupportedGuidance, - isSafe, - processCodexPreToolUse, - rewritePaths, - touchesMemory, -} from "../../src/hooks/codex/pre-tool-use.js"; -import { - buildCodexSessionStartContext, - runCodexSessionStartHook, -} from "../../src/hooks/codex/session-start.js"; -import { - createPlaceholder, - runCodexSessionStartSetup, -} from "../../src/hooks/codex/session-start-setup.js"; -import { - buildCodexStopEntry, - extractLastAssistantMessage, - runCodexStopHook, -} from "../../src/hooks/codex/stop.js"; - -const baseConfig: Config = { - token: "token", - orgId: "org-1", - orgName: "Acme", - userName: "alice", - workspaceId: "default", - apiUrl: "https://api.example.com", - tableName: "memory", - sessionsTableName: "sessions", - memoryPath: "/tmp/.deeplake/memory", -}; - -const baseCreds: Credentials = { - token: "token", - orgId: "org-1", - orgName: "Acme", - userName: "alice", - workspaceId: "default", - apiUrl: "https://api.example.com", - savedAt: "2026-01-01T00:00:00.000Z", -}; - -afterEach(() => { - vi.restoreAllMocks(); -}); - -describe("codex capture source", () => { - it("builds user/tool entries and ignores unsupported events", () => { - const user = buildCodexCaptureEntry({ - session_id: "s1", - cwd: "/repo", - hook_event_name: "UserPromptSubmit", - model: "gpt-5.2", - prompt: "hello", - }, "2026-01-01T00:00:00.000Z"); - const tool = buildCodexCaptureEntry({ - session_id: "s1", - cwd: "/repo", - hook_event_name: "PostToolUse", - model: "gpt-5.2", - tool_name: "Bash", - tool_use_id: "tu-1", - tool_input: { command: "ls" }, - tool_response: { stdout: "ok" }, - }, "2026-01-01T00:00:01.000Z"); - - expect(user?.type).toBe("user_message"); - expect(tool?.type).toBe("tool_call"); - expect(buildCodexCaptureEntry({ - session_id: "s1", - cwd: "/repo", - hook_event_name: "Stop", - model: "gpt-5.2", - }, "2026-01-01T00:00:02.000Z")).toBeNull(); - }); - - it("triggers periodic summaries and queues capture rows", async () => { - const spawn = vi.fn(); - maybeTriggerPeriodicSummary("s1", "/repo", baseConfig, { - bumpTotalCountFn: vi.fn(() => ({ totalCount: 10, lastSummaryCount: 4 })) as any, - loadTriggerConfigFn: vi.fn(() => ({ everyNMessages: 5, everyHours: 24 })) as any, - shouldTriggerFn: vi.fn(() => true) as any, - tryAcquireLockFn: vi.fn(() => true) as any, - spawnCodexWikiWorkerFn: spawn as any, - wikiLogFn: vi.fn() as any, - bundleDir: "/tmp/bundle", - }); - expect(spawn).toHaveBeenCalledTimes(1); - - const append = vi.fn(); - const clear = vi.fn(); - const queued = await runCodexCaptureHook({ - session_id: "s1", - cwd: "/repo", - hook_event_name: "PostToolUse", - model: "gpt-5.2", - tool_name: "Bash", - tool_use_id: "tu-1", - tool_input: { command: "ls" }, - tool_response: { stdout: "ok" }, - }, { - config: baseConfig, - appendQueuedSessionRowFn: append as any, - clearSessionQueryCacheFn: clear as any, - }); - expect(queued.status).toBe("queued"); - expect(append).toHaveBeenCalledTimes(1); - expect(clear).not.toHaveBeenCalled(); - - await runCodexCaptureHook({ - session_id: "s1", - cwd: "/repo", - hook_event_name: "UserPromptSubmit", - model: "gpt-5.2", - prompt: "hi", - }, { - config: baseConfig, - appendQueuedSessionRowFn: vi.fn() as any, - clearSessionQueryCacheFn: clear as any, - }); - expect(clear).toHaveBeenCalledWith("s1"); - }); - - it("returns disabled, no_config, and ignored states", async () => { - expect(await runCodexCaptureHook({ - session_id: "s1", - cwd: "/repo", - hook_event_name: "UserPromptSubmit", - model: "gpt-5.2", - prompt: "hi", - }, { - captureEnabled: false, - config: baseConfig, - })).toEqual({ status: "disabled" }); - - expect(await runCodexCaptureHook({ - session_id: "s1", - cwd: "/repo", - hook_event_name: "UserPromptSubmit", - model: "gpt-5.2", - prompt: "hi", - }, { - config: null, - })).toEqual({ status: "no_config" }); - - expect(await runCodexCaptureHook({ - session_id: "s1", - cwd: "/repo", - hook_event_name: "Unknown", - model: "gpt-5.2", - }, { - config: baseConfig, - })).toEqual({ status: "ignored" }); - }); - - it("suppresses periodic summaries when skipped or when the helper throws", () => { - const spawn = vi.fn(); - maybeTriggerPeriodicSummary("s1", "/repo", baseConfig, { - wikiWorker: true, - spawnCodexWikiWorkerFn: spawn as any, - }); - maybeTriggerPeriodicSummary("s1", "/repo", baseConfig, { - bumpTotalCountFn: vi.fn(() => { throw new Error("boom"); }) as any, - spawnCodexWikiWorkerFn: spawn as any, - logFn: vi.fn(), - }); - maybeTriggerPeriodicSummary("s1", "/repo", baseConfig, { - bumpTotalCountFn: vi.fn(() => ({ totalCount: 1, lastSummaryCount: 1 })) as any, - loadTriggerConfigFn: vi.fn(() => ({ everyNMessages: 5, everyHours: 24 })) as any, - shouldTriggerFn: vi.fn(() => false) as any, - spawnCodexWikiWorkerFn: spawn as any, - }); - expect(spawn).not.toHaveBeenCalled(); - }); -}); - -describe("codex pre-tool source", () => { - it("detects, rewrites, and validates memory commands", () => { - expect(touchesMemory("cat ~/.deeplake/memory/index.md")).toBe(true); - expect(rewritePaths("cat $HOME/.deeplake/memory/index.md")).toBe("cat /index.md"); - expect(isSafe("grep -r needle /")).toBe(true); - expect(isSafe("node -e '1' /")).toBe(false); - expect(isSafe("echo $(uname)")).toBe(false); - expect(buildUnsupportedGuidance()).toContain("Do NOT use python"); - }); - - it("passes through non-memory commands and guides unsafe ones", async () => { - expect(await processCodexPreToolUse({ - session_id: "s1", - tool_name: "Bash", - tool_use_id: "tu-1", - tool_input: { command: "ls -la /tmp" }, - cwd: "/repo", - hook_event_name: "PreToolUse", - model: "gpt-5.2", - })).toEqual({ action: "pass" }); - - const guidance = await processCodexPreToolUse({ - session_id: "s1", - tool_name: "Bash", - tool_use_id: "tu-2", - tool_input: { command: "python3 -c 'print(1)' ~/.deeplake/memory" }, - cwd: "/repo", - hook_event_name: "PreToolUse", - model: "gpt-5.2", - }, { - config: baseConfig, - }); - expect(guidance.action).toBe("guide"); - expect(guidance.output).toContain("Only bash builtins"); - }); - - it("uses direct read, direct grep, and shell fallback", async () => { - const api = { - query: vi.fn(async () => [ - { - path: "/summaries/alice/s1.md", - project: "repo", - description: "session summary", - creation_date: "2026-01-01T00:00:00.000Z", - }, - ]), - }; - const readDecision = await processCodexPreToolUse({ - session_id: "s1", - tool_name: "Bash", - tool_use_id: "tu-1", - tool_input: { command: "cat ~/.deeplake/memory/index.md | head -20" }, - cwd: "/repo", - hook_event_name: "PreToolUse", - model: "gpt-5.2", - }, { - config: baseConfig, - createApi: vi.fn(() => api as any), - readVirtualPathContentFn: vi.fn(async () => null) as any, - executeCompiledBashCommandFn: vi.fn(async () => null) as any, - }); - expect(readDecision.action).toBe("block"); - expect(readDecision.output).toContain("# Memory Index"); - - const grepDecision = await processCodexPreToolUse({ - session_id: "s1", - tool_name: "Bash", - tool_use_id: "tu-2", - tool_input: { command: "grep -r needle ~/.deeplake/memory/" }, - cwd: "/repo", - hook_event_name: "PreToolUse", - model: "gpt-5.2", - }, { - config: baseConfig, - handleGrepDirectFn: vi.fn(async () => "/index.md:needle") as any, - executeCompiledBashCommandFn: vi.fn(async () => null) as any, - }); - expect(grepDecision.output).toContain("/index.md:needle"); - - const fallback = await processCodexPreToolUse({ - session_id: "s1", - tool_name: "Bash", - tool_use_id: "tu-3", - tool_input: { command: "echo hi > ~/.deeplake/memory/test.md" }, - cwd: "/repo", - hook_event_name: "PreToolUse", - model: "gpt-5.2", - }, { - config: null, - runVirtualShellFn: vi.fn(() => "ok") as any, - }); - expect(fallback).toEqual({ - action: "block", - output: "ok", - rewrittenCommand: "echo hi > /test.md", - }); - }); - - it("supports head, tail, wc -l, find counts, missing ls paths, and default empty-shell output", async () => { - const contentReader = vi.fn(async () => "line1\nline2\nline3"); - - const headDecision = await processCodexPreToolUse({ - session_id: "s1", - tool_name: "Bash", - tool_use_id: "tu-4", - tool_input: { command: "head -2 ~/.deeplake/memory/index.md" }, - cwd: "/repo", - hook_event_name: "PreToolUse", - model: "gpt-5.2", - }, { - config: baseConfig, - readCachedIndexContentFn: vi.fn(() => null) as any, - writeCachedIndexContentFn: vi.fn() as any, - readVirtualPathContentFn: contentReader as any, - executeCompiledBashCommandFn: vi.fn(async () => null) as any, - }); - expect(headDecision.output).toBe("line1\nline2"); - - const tailDecision = await processCodexPreToolUse({ - session_id: "s1", - tool_name: "Bash", - tool_use_id: "tu-5", - tool_input: { command: "tail -2 ~/.deeplake/memory/index.md" }, - cwd: "/repo", - hook_event_name: "PreToolUse", - model: "gpt-5.2", - }, { - config: baseConfig, - readCachedIndexContentFn: vi.fn(() => null) as any, - writeCachedIndexContentFn: vi.fn() as any, - readVirtualPathContentFn: contentReader as any, - executeCompiledBashCommandFn: vi.fn(async () => null) as any, - }); - expect(tailDecision.output).toBe("line2\nline3"); - - const wcDecision = await processCodexPreToolUse({ - session_id: "s1", - tool_name: "Bash", - tool_use_id: "tu-6", - tool_input: { command: "wc -l ~/.deeplake/memory/index.md" }, - cwd: "/repo", - hook_event_name: "PreToolUse", - model: "gpt-5.2", - }, { - config: baseConfig, - readCachedIndexContentFn: vi.fn(() => null) as any, - writeCachedIndexContentFn: vi.fn() as any, - readVirtualPathContentFn: contentReader as any, - executeCompiledBashCommandFn: vi.fn(async () => null) as any, - }); - expect(wcDecision.output).toBe("3 /index.md"); - - const findDecision = await processCodexPreToolUse({ - session_id: "s1", - tool_name: "Bash", - tool_use_id: "tu-7", - tool_input: { command: "find ~/.deeplake/memory/summaries -name '*.md' | wc -l" }, - cwd: "/repo", - hook_event_name: "PreToolUse", - model: "gpt-5.2", - }, { - config: baseConfig, - findVirtualPathsFn: vi.fn(async () => ["/summaries/alice/s1.md", "/summaries/alice/s2.md"]) as any, - executeCompiledBashCommandFn: vi.fn(async () => null) as any, - }); - expect(findDecision.output).toBe("2"); - - const missingLs = await processCodexPreToolUse({ - session_id: "s1", - tool_name: "Bash", - tool_use_id: "tu-8", - tool_input: { command: "ls ~/.deeplake/memory/missing" }, - cwd: "/repo", - hook_event_name: "PreToolUse", - model: "gpt-5.2", - }, { - config: baseConfig, - listVirtualPathRowsFn: vi.fn(async () => []) as any, - executeCompiledBashCommandFn: vi.fn(async () => null) as any, - }); - expect(missingLs.output).toContain("No such file or directory"); - - const emptyShell = await processCodexPreToolUse({ - session_id: "s1", - tool_name: "Bash", - tool_use_id: "tu-9", - tool_input: { command: "echo hi > ~/.deeplake/memory/test.md" }, - cwd: "/repo", - hook_event_name: "PreToolUse", - model: "gpt-5.2", - }, { - config: baseConfig, - runVirtualShellFn: vi.fn(() => "") as any, - }); - expect(emptyShell.output).toContain("Command returned empty"); - }); - - it("returns compiled output when the bash compiler can satisfy the command directly", async () => { - const decision = await processCodexPreToolUse({ - session_id: "s1", - tool_name: "Bash", - tool_use_id: "tu-10", - tool_input: { command: "cat ~/.deeplake/memory/index.md && ls ~/.deeplake/memory/summaries" }, - cwd: "/repo", - hook_event_name: "PreToolUse", - model: "gpt-5.2", - }, { - config: baseConfig, - executeCompiledBashCommandFn: vi.fn(async () => "compiled output") as any, - }); - - expect(decision).toEqual({ - action: "block", - output: "compiled output", - rewrittenCommand: "cat /index.md && ls /summaries", - }); - }); - - it("reuses cached /index.md content for direct and compiled reads within a session", async () => { - const readVirtualPathContentFn = vi.fn(async () => "fresh index"); - const readVirtualPathContentsFn = vi.fn(async (_api, _memory, _sessions, paths: string[]) => new Map( - paths.map((path) => [path, path === "/index.md" ? "fresh index" : null]), - )) as any; - const readCachedIndexContentFn = vi.fn(() => "cached index"); - const writeCachedIndexContentFn = vi.fn(); - - const directDecision = await processCodexPreToolUse({ - session_id: "s1", - tool_name: "Bash", - tool_use_id: "tu-cache-1", - tool_input: { command: "cat ~/.deeplake/memory/index.md" }, - cwd: "/repo", - hook_event_name: "PreToolUse", - model: "gpt-5.2", - }, { - config: baseConfig, - readCachedIndexContentFn: readCachedIndexContentFn as any, - writeCachedIndexContentFn: writeCachedIndexContentFn as any, - readVirtualPathContentFn: readVirtualPathContentFn as any, - executeCompiledBashCommandFn: vi.fn(async () => null) as any, - }); - expect(directDecision.output).toBe("cached index"); - expect(readVirtualPathContentFn).not.toHaveBeenCalled(); - expect(writeCachedIndexContentFn).toHaveBeenCalledWith("s1", "cached index"); - - const compiledDecision = await processCodexPreToolUse({ - session_id: "s1", - tool_name: "Bash", - tool_use_id: "tu-cache-2", - tool_input: { command: "cat ~/.deeplake/memory/index.md && ls ~/.deeplake/memory/summaries" }, - cwd: "/repo", - hook_event_name: "PreToolUse", - model: "gpt-5.2", - }, { - config: baseConfig, - readCachedIndexContentFn: readCachedIndexContentFn as any, - writeCachedIndexContentFn: writeCachedIndexContentFn as any, - readVirtualPathContentsFn, - executeCompiledBashCommandFn: vi.fn(async (_api, _table, _sessions, _cmd, deps) => { - const map = await deps.readVirtualPathContentsFn(_api, _table, _sessions, ["/index.md"]); - return map.get("/index.md") ?? null; - }) as any, - }); - expect(compiledDecision.output).toBe("cached index"); - expect(readVirtualPathContentsFn).not.toHaveBeenCalled(); - }); - - it("covers plain cat, directory listings, non-count find, grep fallback, and direct-query exceptions", async () => { - const plainCat = await processCodexPreToolUse({ - session_id: "s1", - tool_name: "Bash", - tool_use_id: "tu-11", - tool_input: { command: "cat ~/.deeplake/memory/index.md" }, - cwd: "/repo", - hook_event_name: "PreToolUse", - model: "gpt-5.2", - }, { - config: baseConfig, - readCachedIndexContentFn: vi.fn(() => null) as any, - writeCachedIndexContentFn: vi.fn() as any, - readVirtualPathContentFn: vi.fn(async () => "line1\nline2") as any, - executeCompiledBashCommandFn: vi.fn(async () => null) as any, - }); - expect(plainCat).toEqual({ - action: "block", - output: "line1\nline2", - rewrittenCommand: "cat /index.md", - }); - - const listed = await processCodexPreToolUse({ - session_id: "s1", - tool_name: "Bash", - tool_use_id: "tu-12", - tool_input: { command: "ls ~/.deeplake/memory/summaries" }, - cwd: "/repo", - hook_event_name: "PreToolUse", - model: "gpt-5.2", - }, { - config: baseConfig, - listVirtualPathRowsFn: vi.fn(async () => [ - { path: "/other/place.md", size_bytes: 1 }, - { path: "/summaries/", size_bytes: 0 }, - { path: "/summaries/alice/s1.md", size_bytes: 10 }, - { path: "/summaries/bob/nested/file.md", size_bytes: 20 }, - ]) as any, - executeCompiledBashCommandFn: vi.fn(async () => null) as any, - }); - expect(listed.output).toContain("alice/"); - expect(listed.output).toContain("bob/"); - expect(listed.output).not.toContain("other"); - - const rootLs = await processCodexPreToolUse({ - session_id: "s1", - tool_name: "Bash", - tool_use_id: "tu-13", - tool_input: { command: "ls ~/.deeplake/memory" }, - cwd: "/repo", - hook_event_name: "PreToolUse", - model: "gpt-5.2", - }, { - config: baseConfig, - listVirtualPathRowsFn: vi.fn(async () => [ - { path: "/", size_bytes: 0 }, - { path: "/root.md", size_bytes: 5 }, - { path: "/summaries/alice/s1.md", size_bytes: 10 }, - ]) as any, - executeCompiledBashCommandFn: vi.fn(async () => null) as any, - }); - expect(rootLs.output).toContain("root.md"); - expect(rootLs.output).toContain("summaries/"); - - const findNoMatches = await processCodexPreToolUse({ - session_id: "s1", - tool_name: "Bash", - tool_use_id: "tu-14", - tool_input: { command: "find ~/.deeplake/memory/summaries -name '*.md'" }, - cwd: "/repo", - hook_event_name: "PreToolUse", - model: "gpt-5.2", - }, { - config: baseConfig, - findVirtualPathsFn: vi.fn(async () => []) as any, - executeCompiledBashCommandFn: vi.fn(async () => null) as any, - }); - expect(findNoMatches.output).toBe("(no matches)"); - - const findRoot = await processCodexPreToolUse({ - session_id: "s1", - tool_name: "Bash", - tool_use_id: "tu-14b", - tool_input: { command: "find ~/.deeplake/memory -name '*.md'" }, - cwd: "/repo", - hook_event_name: "PreToolUse", - model: "gpt-5.2", - }, { - config: baseConfig, - findVirtualPathsFn: vi.fn(async () => ["/summaries/a.md", "/notes.md"]) as any, - executeCompiledBashCommandFn: vi.fn(async () => null) as any, - }); - expect(findRoot.output).toContain("/summaries/a.md"); - expect(findRoot.output).toContain("/notes.md"); - - const grepFallback = await processCodexPreToolUse({ - session_id: "s1", - tool_name: "Bash", - tool_use_id: "tu-15", - tool_input: { command: "grep needle ~/.deeplake/memory/index.md" }, - cwd: "/repo", - hook_event_name: "PreToolUse", - model: "gpt-5.2", - }, { - config: baseConfig, - handleGrepDirectFn: vi.fn(async () => null) as any, - runVirtualShellFn: vi.fn(() => "shell fallback") as any, - executeCompiledBashCommandFn: vi.fn(async () => null) as any, - }); - expect(grepFallback.output).toBe("shell fallback"); - - const errorFallback = await processCodexPreToolUse({ - session_id: "s1", - tool_name: "Bash", - tool_use_id: "tu-16", - tool_input: { command: "cat ~/.deeplake/memory/index.md" }, - cwd: "/repo", - hook_event_name: "PreToolUse", - model: "gpt-5.2", - }, { - config: baseConfig, - executeCompiledBashCommandFn: vi.fn(async () => { throw new Error("boom"); }) as any, - runVirtualShellFn: vi.fn(() => "fallback after error") as any, - }); - expect(errorFallback.output).toBe("fallback after error"); - }); - - it("covers default head/tail forms, synthetic index rows, and long ls formatting", async () => { - const headDecision = await processCodexPreToolUse({ - session_id: "s1", - tool_name: "Bash", - tool_use_id: "tu-17", - tool_input: { command: "head ~/.deeplake/memory/index.md" }, - cwd: "/repo", - hook_event_name: "PreToolUse", - model: "gpt-5.2", - }, { - config: baseConfig, - readCachedIndexContentFn: vi.fn(() => null) as any, - writeCachedIndexContentFn: vi.fn() as any, - readVirtualPathContentFn: vi.fn(async () => "a\nb\nc") as any, - executeCompiledBashCommandFn: vi.fn(async () => null) as any, - }); - expect(headDecision.output).toBe("a\nb\nc"); - - const tailDecision = await processCodexPreToolUse({ - session_id: "s1", - tool_name: "Bash", - tool_use_id: "tu-18", - tool_input: { command: "tail ~/.deeplake/memory/index.md" }, - cwd: "/repo", - hook_event_name: "PreToolUse", - model: "gpt-5.2", - }, { - config: baseConfig, - readCachedIndexContentFn: vi.fn(() => null) as any, - writeCachedIndexContentFn: vi.fn() as any, - readVirtualPathContentFn: vi.fn(async () => "a\nb\nc") as any, - executeCompiledBashCommandFn: vi.fn(async () => null) as any, - }); - expect(tailDecision.output).toBe("a\nb\nc"); - - const api = { - query: vi.fn(async () => [{ path: "/summaries/alice/s1.md" }]), - }; - const syntheticIndex = await processCodexPreToolUse({ - session_id: "s1", - tool_name: "Bash", - tool_use_id: "tu-19", - tool_input: { command: "cat ~/.deeplake/memory/index.md" }, - cwd: "/repo", - hook_event_name: "PreToolUse", - model: "gpt-5.2", - }, { - config: baseConfig, - createApi: vi.fn(() => api as any), - readVirtualPathContentFn: vi.fn(async () => null) as any, - executeCompiledBashCommandFn: vi.fn(async () => null) as any, - }); - expect(syntheticIndex.output).toContain("# Memory Index"); - - const longLs = await processCodexPreToolUse({ - session_id: "s1", - tool_name: "Bash", - tool_use_id: "tu-20", - tool_input: { command: "ls -l ~/.deeplake/memory/summaries" }, - cwd: "/repo", - hook_event_name: "PreToolUse", - model: "gpt-5.2", - }, { - config: baseConfig, - listVirtualPathRowsFn: vi.fn(async () => [ - { path: "/summaries/alice/file.md" }, - { path: "/summaries/alice/another.md", size_bytes: 3 }, - { path: "/summaries/team/nested/file.md", size_bytes: 5 }, - ]) as any, - executeCompiledBashCommandFn: vi.fn(async () => null) as any, - }); - expect(longLs.output).toContain("alice/"); - expect(longLs.output).toContain("team/"); - expect(longLs.output).toContain("drwxr-xr-x"); - }); -}); - -describe("codex session start source", () => { - it("builds logged-in and logged-out context", () => { - const loggedIn = buildCodexSessionStartContext({ - creds: baseCreds, - currentVersion: "0.6.0", - authCommand: "/tmp/auth-login.js", - }); - const loggedOut = buildCodexSessionStartContext({ - creds: null, - currentVersion: "0.6.0", - authCommand: "/tmp/auth-login.js", - }); - - expect(loggedIn).toContain("Logged in to Deeplake"); - expect(loggedIn).toContain("Hivemind v0.6.0"); - expect(loggedIn).toContain("resolve it against that session's own date/date_time metadata"); - expect(loggedIn).toContain("convert the final answer into an absolute month/date/year"); - expect(loggedIn).toContain("answer with the smallest exact phrase supported by memory"); - expect(loggedIn).toContain('Do NOT answer "not found"'); - expect(loggedOut).toContain('Run: node "/tmp/auth-login.js" login'); - }); - - it("skips in wiki-worker mode and spawns async setup when authenticated", async () => { - expect(await runCodexSessionStartHook({ - session_id: "s1", - cwd: "/repo", - hook_event_name: "SessionStart", - model: "gpt-5.2", - }, { - wikiWorker: true, - })).toBeNull(); - - const write = vi.fn(); - const end = vi.fn(); - const unref = vi.fn(); - const spawnFn = vi.fn(() => ({ - stdin: { write, end }, - unref, - }) as any); - const result = await runCodexSessionStartHook({ - session_id: "s1", - cwd: "/repo", - hook_event_name: "SessionStart", - model: "gpt-5.2", - }, { - creds: baseCreds, - currentVersion: "0.6.0", - spawnFn: spawnFn as any, - setupScript: "/tmp/session-start-setup.js", - authCommand: "/tmp/auth-login.js", - }); - - expect(result).toContain("Logged in to Deeplake"); - expect(spawnFn).toHaveBeenCalledTimes(1); - expect(write).toHaveBeenCalled(); - expect(end).toHaveBeenCalled(); - expect(unref).toHaveBeenCalled(); - }); - - it("returns logged-out context without spawning setup when unauthenticated", async () => { - const spawnFn = vi.fn(); - const result = await runCodexSessionStartHook({ - session_id: "s1", - cwd: "/repo", - hook_event_name: "SessionStart", - model: "gpt-5.2", - }, { - creds: null, - spawnFn: spawnFn as any, - currentVersion: null, - authCommand: "/tmp/auth-login.js", - }); - - expect(result).toContain("Not logged in to Deeplake"); - expect(spawnFn).not.toHaveBeenCalled(); - }); - - it("falls back to org id and default workspace when names are missing", () => { - const context = buildCodexSessionStartContext({ - creds: { ...baseCreds, orgName: undefined, workspaceId: undefined } as any, - currentVersion: null, - authCommand: "/tmp/auth-login.js", - }); - expect(context).toContain("org-1"); - expect(context).toContain("workspace: default"); - expect(context).not.toContain("Hivemind v"); - }); -}); - -describe("codex session start setup source", () => { - it("creates placeholders only when summaries do not already exist", async () => { - const query = vi.fn(async () => []); - const api = { query } as any; - await createPlaceholder(api, "memory", "s1", "/repo", "alice", "Acme", "default"); - expect(query).toHaveBeenCalledTimes(2); - expect(String(query.mock.calls[1]?.[0])).toContain('INSERT INTO "memory"'); - - query.mockReset(); - query.mockResolvedValueOnce([{ path: "/summaries/alice/s1.md" }]); - await createPlaceholder(api, "memory", "s1", "/repo", "alice", "Acme", "default"); - expect(query).toHaveBeenCalledTimes(1); - }); - - it("handles no credentials, disabled session writes, and update notices", async () => { - expect(await runCodexSessionStartSetup({ - session_id: "s1", - cwd: "/repo", - hook_event_name: "SessionStart", - model: "gpt-5.2", - }, { - creds: null, - })).toEqual({ status: "no_credentials" }); - - const stderr = vi.spyOn(process.stderr, "write").mockImplementation(() => true as any); - const placeholder = vi.fn(async () => undefined); - await runCodexSessionStartSetup({ - session_id: "s1", - cwd: "/repo", - hook_event_name: "SessionStart", - model: "gpt-5.2", - }, { - creds: { ...baseCreds, autoupdate: false }, - config: baseConfig, - createApi: vi.fn(() => ({ - ensureTable: vi.fn(async () => undefined), - ensureSessionsTable: vi.fn(async () => undefined), - query: vi.fn(async () => []), - }) as any), - isSessionWriteDisabledFn: vi.fn(() => true) as any, - createPlaceholderFn: placeholder as any, - getInstalledVersionFn: vi.fn(() => "0.6.0") as any, - getLatestVersionCachedFn: vi.fn(async () => "0.7.0") as any, - }); - expect(placeholder).toHaveBeenCalledTimes(1); - expect(stderr).toHaveBeenCalledWith(expect.stringContaining("update available")); - }); - - it("skips in wiki-worker mode and logs setup/version failures", async () => { - expect(await runCodexSessionStartSetup({ - session_id: "s1", - cwd: "/repo", - hook_event_name: "SessionStart", - model: "gpt-5.2", - }, { - wikiWorker: true, - })).toEqual({ status: "skipped" }); - - const logFn = vi.fn(); - const wikiLogFn = vi.fn(); - await runCodexSessionStartSetup({ - session_id: "s1", - cwd: "/repo", - hook_event_name: "SessionStart", - model: "gpt-5.2", - }, { - creds: baseCreds, - config: baseConfig, - createApi: vi.fn(() => ({ - ensureTable: vi.fn(async () => { throw new Error("boom"); }), - }) as any), - getInstalledVersionFn: vi.fn(() => "0.6.0") as any, - getLatestVersionCachedFn: vi.fn(async () => { throw new Error("offline"); }) as any, - logFn, - wikiLogFn, - }); - - expect(logFn).toHaveBeenCalledWith(expect.stringContaining("setup failed: boom")); - expect(logFn).toHaveBeenCalledWith(expect.stringContaining("version check failed: offline")); - expect(wikiLogFn).toHaveBeenCalledWith(expect.stringContaining("failed for s1: boom")); - }); - - it("handles capture-disabled and successful autoupdate flows", async () => { - const placeholder = vi.fn(); - const stderr = vi.spyOn(process.stderr, "write").mockImplementation(() => true as any); - const execSyncFn = vi.fn(); - await runCodexSessionStartSetup({ - session_id: "s1", - cwd: "/repo", - hook_event_name: "SessionStart", - model: "gpt-5.2", - }, { - creds: baseCreds, - config: baseConfig, - captureEnabled: false, - createApi: vi.fn(() => ({ - ensureTable: vi.fn(async () => undefined), - }) as any), - createPlaceholderFn: placeholder as any, - getInstalledVersionFn: vi.fn(() => "0.6.0") as any, - getLatestVersionCachedFn: vi.fn(async () => "0.7.0") as any, - execSyncFn: execSyncFn as any, - }); - expect(placeholder).not.toHaveBeenCalled(); - expect(execSyncFn).toHaveBeenCalledTimes(1); - expect(stderr).toHaveBeenCalledWith(expect.stringContaining("auto-updated")); - }); - - it("handles non-auth setup errors and skips setup when session metadata is absent", async () => { - const wikiLogFn = vi.fn(); - const createPlaceholderFn = vi.fn(); - await runCodexSessionStartSetup({ - session_id: "s1", - cwd: "/repo", - hook_event_name: "SessionStart", - model: "gpt-5.2", - }, { - creds: baseCreds, - config: baseConfig, - createApi: vi.fn(() => ({ - ensureTable: vi.fn(async () => undefined), - ensureSessionsTable: vi.fn(async () => { throw new Error("boom"); }), - }) as any), - isSessionWriteDisabledFn: vi.fn(() => false) as any, - isSessionWriteAuthErrorFn: vi.fn(() => false) as any, - tryAcquireSessionDrainLockFn: vi.fn(() => (() => undefined)) as any, - createPlaceholderFn: createPlaceholderFn as any, - getInstalledVersionFn: vi.fn(() => null) as any, - wikiLogFn, - }); - expect(createPlaceholderFn).not.toHaveBeenCalled(); - expect(wikiLogFn).toHaveBeenCalledWith(expect.stringContaining("failed for s1: boom")); - - await expect(runCodexSessionStartSetup({ - session_id: "", - cwd: "/repo", - hook_event_name: "SessionStart", - model: "gpt-5.2", - }, { - creds: baseCreds, - config: baseConfig, - getInstalledVersionFn: vi.fn(() => null) as any, - })).resolves.toEqual({ status: "complete" }); - }); - - it("backfills missing usernames, handles auth-disabled session writes, and treats missing cwd as unknown", async () => { - const save = vi.fn(); - const placeholder = vi.fn(async () => undefined); - await runCodexSessionStartSetup({ - session_id: "s1", - cwd: undefined as any, - hook_event_name: "SessionStart", - model: "gpt-5.2", - }, { - creds: { ...baseCreds, userName: undefined }, - saveCredentialsFn: save as any, - config: baseConfig, - createApi: vi.fn(() => ({ - ensureTable: vi.fn(async () => undefined), - ensureSessionsTable: vi.fn(async () => { throw new Error("403 Forbidden"); }), - }) as any), - isSessionWriteDisabledFn: vi.fn(() => false) as any, - isSessionWriteAuthErrorFn: vi.fn(() => true) as any, - markSessionWriteDisabledFn: vi.fn() as any, - tryAcquireSessionDrainLockFn: vi.fn(() => (() => undefined)) as any, - createPlaceholderFn: placeholder as any, - getInstalledVersionFn: vi.fn(() => "0.6.0") as any, - getLatestVersionCachedFn: vi.fn(async () => "0.6.0") as any, - }); - expect(save).toHaveBeenCalledTimes(1); - expect(placeholder).toHaveBeenCalledWith(expect.anything(), "memory", "s1", "", "alice", "Acme", "default"); - - const query = vi.fn(async () => []); - await createPlaceholder({ query } as any, "memory", "s2", "", "alice", "Acme", "default"); - expect(String(query.mock.calls[1]?.[0])).toContain("'unknown'"); - }); - - it("skips duplicate queue drains while another codex session-start setup is already handling sessions", async () => { - const logFn = vi.fn(); - const createPlaceholderFn = vi.fn(async () => undefined); - const ensureSessionsTable = vi.fn(async () => undefined); - const drainSessionQueuesFn = vi.fn(async () => ({ - queuedSessions: 1, - flushedSessions: 1, - rows: 1, - batches: 1, - })); - - await runCodexSessionStartSetup({ - session_id: "s1", - cwd: "/repo", - hook_event_name: "SessionStart", - model: "gpt-5.2", - }, { - creds: baseCreds, - config: baseConfig, - createApi: vi.fn(() => ({ - ensureTable: vi.fn(async () => undefined), - ensureSessionsTable, - query: vi.fn(async () => []), - }) as any), - isSessionWriteDisabledFn: vi.fn(() => false) as any, - tryAcquireSessionDrainLockFn: vi.fn(() => null) as any, - drainSessionQueuesFn: drainSessionQueuesFn as any, - createPlaceholderFn: createPlaceholderFn as any, - getInstalledVersionFn: vi.fn(() => null) as any, - logFn, - }); - - expect(ensureSessionsTable).not.toHaveBeenCalled(); - expect(drainSessionQueuesFn).not.toHaveBeenCalled(); - expect(createPlaceholderFn).toHaveBeenCalledTimes(1); - expect(logFn).toHaveBeenCalledWith(expect.stringContaining("sessions drain already in progress")); - }); -}); - -describe("codex stop source", () => { - it("extracts assistant messages from string and block transcripts", () => { - expect(extractLastAssistantMessage([ - '{"role":"assistant","content":"done"}', - ].join("\n"))).toBe("done"); - - expect(extractLastAssistantMessage([ - '{"payload":{"role":"assistant","content":[{"type":"output_text","text":"first"},{"type":"text","text":"second"}]}}', - ].join("\n"))).toBe("first\nsecond"); - - expect(extractLastAssistantMessage("not json")).toBe(""); - }); - - it("builds stop entries for assistant messages and assistant stops", () => { - const message = buildCodexStopEntry({ - session_id: "s1", - transcript_path: "/tmp/t.jsonl", - cwd: "/repo", - hook_event_name: "Stop", - model: "gpt-5.2", - }, "2026-01-01T00:00:00.000Z", "done"); - const stop = buildCodexStopEntry({ - session_id: "s1", - transcript_path: null, - cwd: "/repo", - hook_event_name: "Stop", - model: "gpt-5.2", - }, "2026-01-01T00:00:01.000Z", ""); - - expect(message.type).toBe("assistant_message"); - expect(stop.type).toBe("assistant_stop"); - }); - - it("skips, returns no_config, and flushes plus spawns summaries", async () => { - expect(await runCodexStopHook({ - session_id: "", - cwd: "/repo", - hook_event_name: "Stop", - model: "gpt-5.2", - }, { - config: baseConfig, - })).toEqual({ status: "skipped" }); - - expect(await runCodexStopHook({ - session_id: "s1", - cwd: "/repo", - hook_event_name: "Stop", - model: "gpt-5.2", - }, { - config: null, - })).toEqual({ status: "no_config" }); - - const flush = vi.fn(async () => ({ status: "flushed", rows: 2, batches: 1 })); - const spawn = vi.fn(); - const result = await runCodexStopHook({ - session_id: "s1", - transcript_path: "/tmp/t.jsonl", - cwd: "/repo", - hook_event_name: "Stop", - model: "gpt-5.2", - }, { - config: baseConfig, - transcriptExists: vi.fn(() => true) as any, - readTranscript: vi.fn(() => '{"role":"assistant","content":"done"}') as any, - appendQueuedSessionRowFn: vi.fn() as any, - flushSessionQueueFn: flush as any, - spawnCodexWikiWorkerFn: spawn as any, - wikiLogFn: vi.fn() as any, - bundleDir: "/tmp/bundle", - }); - - expect(result).toMatchObject({ status: "complete", flushStatus: "flushed" }); - expect(flush).toHaveBeenCalledTimes(1); - expect(spawn).toHaveBeenCalledWith({ - config: baseConfig, - sessionId: "s1", - cwd: "/repo", - bundleDir: "/tmp/bundle", - reason: "Stop", - }); - - const noCapture = await runCodexStopHook({ - session_id: "s1", - cwd: "/repo", - hook_event_name: "Stop", - model: "gpt-5.2", - }, { - config: baseConfig, - captureEnabled: false, - }); - expect(noCapture).toEqual({ status: "complete", entry: undefined }); - }); - - it("continues when transcript reads fail and when wiki-worker mode is active", async () => { - expect(await runCodexStopHook({ - session_id: "s1", - cwd: "/repo", - hook_event_name: "Stop", - model: "gpt-5.2", - }, { - wikiWorker: true, - config: baseConfig, - })).toEqual({ status: "skipped" }); - - const flush = vi.fn(async () => ({ status: "flushed", rows: 1, batches: 1 })); - const result = await runCodexStopHook({ - session_id: "s1", - transcript_path: "/tmp/t.jsonl", - cwd: "/repo", - hook_event_name: "Stop", - model: "gpt-5.2", - }, { - config: baseConfig, - transcriptExists: vi.fn(() => true) as any, - readTranscript: vi.fn(() => { throw new Error("boom"); }) as any, - appendQueuedSessionRowFn: vi.fn() as any, - flushSessionQueueFn: flush as any, - spawnCodexWikiWorkerFn: vi.fn() as any, - wikiLogFn: vi.fn() as any, - bundleDir: "/tmp/bundle", - }); - - expect(result.flushStatus).toBe("flushed"); - expect(flush).toHaveBeenCalledTimes(1); - }); - - it("returns empty when assistant blocks have no text and keeps going after capture failures", async () => { - expect(extractLastAssistantMessage([ - "{\"role\":\"assistant\",\"content\":[{\"type\":\"image\",\"url\":\"x\"}]}", - "{\"role\":\"user\",\"content\":\"hi\"}", - ].join("\n"))).toBe(""); - - const spawn = vi.fn(); - const logFn = vi.fn(); - const result = await runCodexStopHook({ - session_id: "s1", - transcript_path: "/tmp/missing.jsonl", - cwd: undefined as any, - hook_event_name: "Stop", - model: "gpt-5.2", - }, { - config: baseConfig, - transcriptExists: vi.fn(() => false) as any, - appendQueuedSessionRowFn: vi.fn() as any, - flushSessionQueueFn: vi.fn(async () => { throw new Error("flush boom"); }) as any, - spawnCodexWikiWorkerFn: spawn as any, - wikiLogFn: vi.fn() as any, - logFn, - bundleDir: "/tmp/bundle", - }); - - expect(result).toMatchObject({ - status: "complete", - entry: expect.objectContaining({ type: "assistant_stop" }), - }); - expect(logFn).toHaveBeenCalledWith(expect.stringContaining("capture failed: flush boom")); - expect(spawn).toHaveBeenCalledWith({ - config: baseConfig, - sessionId: "s1", - cwd: "", - bundleDir: "/tmp/bundle", - reason: "Stop", - }); - }); -}); diff --git a/openclaw/openclaw.plugin.json b/openclaw/openclaw.plugin.json index 485df8d..04cdf6c 100644 --- a/openclaw/openclaw.plugin.json +++ b/openclaw/openclaw.plugin.json @@ -23,5 +23,5 @@ } } }, - "version": "0.6.37" + "version": "0.6.38" } diff --git a/openclaw/package.json b/openclaw/package.json index 712bffd..31161cb 100644 --- a/openclaw/package.json +++ b/openclaw/package.json @@ -1,6 +1,6 @@ { "name": "hivemind", - "version": "0.6.37", + "version": "0.6.38", "type": "module", "description": "Hivemind — cloud-backed persistent shared memory for AI agents, powered by DeepLake", "license": "Apache-2.0", diff --git a/package-lock.json b/package-lock.json index 7ec599d..f0ebfcc 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "hivemind", - "version": "0.6.37", + "version": "0.6.38", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "hivemind", - "version": "0.6.37", + "version": "0.6.38", "dependencies": { "deeplake": "^0.3.30", "just-bash": "^2.14.0", @@ -21,6 +21,7 @@ "@vitest/coverage-v8": "^4.1.3", "esbuild": "^0.28.0", "husky": "^9.1.7", + "jscpd": "^4.0.9", "lint-staged": "^16.4.0", "tsx": "^4.7.0", "typescript": "^6.0.0", @@ -1057,6 +1058,17 @@ "url": "https://github.com/sponsors/Borewit" } }, + "node_modules/@colors/colors": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz", + "integrity": "sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=0.1.90" + } + }, "node_modules/@emnapi/core": { "version": "1.9.1", "resolved": "https://registry.npmjs.org/@emnapi/core/-/core-1.9.1.tgz", @@ -1080,6 +1092,16 @@ "tslib": "^2.4.0" } }, + "node_modules/@emnapi/runtime": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.10.0.tgz", + "integrity": "sha512-ewvYlk86xUoGI0zQRNq/mC+16R1QeDlKQy21Ki3oSYXNgLb45GV1P6A0M+/s6nyCuNDqe5VpaY84BzXGwVbwFA==", + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, "node_modules/@esbuild/aix-ppc64": { "version": "0.28.0", "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.28.0.tgz", @@ -2058,6 +2080,71 @@ "@jridgewell/sourcemap-codec": "^1.4.14" } }, + "node_modules/@jscpd/badge-reporter": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/@jscpd/badge-reporter/-/badge-reporter-4.0.5.tgz", + "integrity": "sha512-SLVhP00R9lkQ//Ivaanfm7k0L9sewpBven670kk1uGec2SWUOa7MVQcuad/TV59KEZ73UIC1lXvi6O9hAnbpUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "badgen": "^3.2.3", + "colors": "^1.4.0", + "fs-extra": "^11.2.0" + } + }, + "node_modules/@jscpd/core": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/@jscpd/core/-/core-4.0.5.tgz", + "integrity": "sha512-Udvym21nWzxjYRVXwwpYNBqZ6b50QV2zHN3fFNzOPPg4cfQVYOZerILB7xNDUsXHC1PCr/N52Tq3q7AElvjWWA==", + "dev": true, + "license": "MIT", + "dependencies": { + "eventemitter3": "^5.0.1" + } + }, + "node_modules/@jscpd/finder": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/@jscpd/finder/-/finder-4.0.5.tgz", + "integrity": "sha512-/2VkRoVrrfya+51sitZo5I9MdwsRaPKB8X3L3khAYoHFXk4L/mUuG81RmGazDHjUIGg22ItlkQtwzorNZ2+aPw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jscpd/core": "4.0.5", + "@jscpd/tokenizer": "4.0.5", + "blamer": "^1.0.6", + "bytes": "^3.1.2", + "cli-table3": "^0.6.5", + "colors": "^1.4.0", + "fast-glob": "^3.3.2", + "fs-extra": "^11.2.0", + "markdown-table": "^2.0.0", + "pug": "^3.0.3" + } + }, + "node_modules/@jscpd/html-reporter": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/@jscpd/html-reporter/-/html-reporter-4.0.5.tgz", + "integrity": "sha512-drK2J8KyPIW9wvaElSIobZFp4dBO9GA++JW4gx3oihvLdDSp8qSo/CNqH47Dw0XkjQTxND3j/+Wz5JWvYRBgFQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "colors": "1.4.0", + "fs-extra": "^11.2.0", + "pug": "^3.0.3" + } + }, + "node_modules/@jscpd/tokenizer": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/@jscpd/tokenizer/-/tokenizer-4.0.5.tgz", + "integrity": "sha512-WzRujQtN5WedxZVDKuoanxmKAFrxcLrHpcA6kaM4z8AhGtWXZ325yseqgL5TZ8OK7Auwu7kQLlqhfk05fGYG7A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jscpd/core": "4.0.5", + "reprism": "^0.0.11", + "spark-md5": "^3.0.2" + } + }, "node_modules/@mixmark-io/domino": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/@mixmark-io/domino/-/domino-2.2.0.tgz", @@ -2098,6 +2185,44 @@ "@emnapi/runtime": "^1.7.1" } }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, "node_modules/@oxc-project/types": { "version": "0.123.0", "resolved": "https://registry.npmjs.org/@oxc-project/types/-/types-0.123.0.tgz", @@ -3223,11 +3348,17 @@ "integrity": "sha512-tO4ZIRKNC+MDWV4qKVZe3Ql/woTnmHDr5JD8UI5hn2pwBrHEwOEMZK7WlNb5RKB6EoJ02gwmQS9OrjuFnZYdpg==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "undici-types": "~7.18.0" } }, + "node_modules/@types/sarif": { + "version": "2.1.7", + "resolved": "https://registry.npmjs.org/@types/sarif/-/sarif-2.1.7.tgz", + "integrity": "sha512-kRz0VEkJqWLf1LLVN4pT1cg1Z9wAuvI6L97V3m2f5B76Tg8d413ddvLBPTEHAZJlnn4XSvu0FkZtViCQGVyrXQ==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/yargs-parser": { "version": "21.0.3", "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.3.tgz", @@ -3241,7 +3372,6 @@ "integrity": "sha512-/MBdrkA8t6hbdCWFKs09dPik774xvs4Z6L4bycdCxYNLHM8oZuRyosumQMG19LUlBsB6GeVpL1q4kFFazvyKGA==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@bcoe/v8-coverage": "^1.0.2", "@vitest/utils": "4.1.3", @@ -3380,6 +3510,19 @@ "url": "https://opencollective.com/vitest" } }, + "node_modules/acorn": { + "version": "7.4.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", + "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==", + "dev": true, + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, "node_modules/amdefine": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/amdefine/-/amdefine-1.0.1.tgz", @@ -3431,6 +3574,20 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, + "node_modules/asap": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", + "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==", + "dev": true, + "license": "MIT" + }, + "node_modules/assert-never": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/assert-never/-/assert-never-1.4.0.tgz", + "integrity": "sha512-5oJg84os6NMQNl27T9LnZkvvqzvAnHu03ShCnoj6bsJwS7L8AO4lf+C/XjK/nvzEqQB744moC6V128RucQd1jA==", + "dev": true, + "license": "MIT" + }, "node_modules/assertion-error": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-2.0.1.tgz", @@ -3453,6 +3610,26 @@ "js-tokens": "^10.0.0" } }, + "node_modules/babel-walk": { + "version": "3.0.0-canary-5", + "resolved": "https://registry.npmjs.org/babel-walk/-/babel-walk-3.0.0-canary-5.tgz", + "integrity": "sha512-GAwkz0AihzY5bkwIY5QDR+LvsRQgB/B+1foMPvi0FZPMl5fjD7ICiznUiBdLYMH1QYe6vqu4gWYytZOccLouFw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.9.6" + }, + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/badgen": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/badgen/-/badgen-3.2.3.tgz", + "integrity": "sha512-svDuwkc63E/z0ky3drpUppB83s/nlgDciH9m+STwwQoWyq7yCgew1qEfJ+9axkKdNq7MskByptWUN9j1PGMwFA==", + "dev": true, + "license": "MIT" + }, "node_modules/balanced-match": { "version": "4.0.4", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-4.0.4.tgz", @@ -3495,6 +3672,20 @@ "readable-stream": "^3.4.0" } }, + "node_modules/blamer": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/blamer/-/blamer-1.0.7.tgz", + "integrity": "sha512-GbBStl/EVlSWkiJQBZps3H1iARBrC7vt++Jb/TTmCNu/jZ04VW7tSN1nScbFXBUy1AN+jzeL7Zep9sbQxLhXKA==", + "dev": true, + "license": "MIT", + "dependencies": { + "execa": "^4.0.0", + "which": "^2.0.2" + }, + "engines": { + "node": ">=8.9" + } + }, "node_modules/bowser": { "version": "2.14.1", "resolved": "https://registry.npmjs.org/bowser/-/bowser-2.14.1.tgz", @@ -3514,6 +3705,19 @@ "node": "18 || 20 || >=22" } }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "license": "MIT", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/buffer": { "version": "5.7.1", "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", @@ -3539,6 +3743,47 @@ "ieee754": "^1.1.13" } }, + "node_modules/bytes": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/call-bound": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", + "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "get-intrinsic": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/chai": { "version": "6.2.2", "resolved": "https://registry.npmjs.org/chai/-/chai-6.2.2.tgz", @@ -3549,6 +3794,16 @@ "node": ">=18" } }, + "node_modules/character-parser": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/character-parser/-/character-parser-2.2.0.tgz", + "integrity": "sha512-+UqJQjFEFaTAs3bNsF2j2kEN1baG/zghZbdqoYEDxGZtJo9LBzl1A+m0D4n3qKx8N2FNv8/Xp6yV9mQmBuptaw==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-regex": "^1.0.3" + } + }, "node_modules/chownr": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz", @@ -3572,6 +3827,77 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/cli-table3": { + "version": "0.6.5", + "resolved": "https://registry.npmjs.org/cli-table3/-/cli-table3-0.6.5.tgz", + "integrity": "sha512-+W/5efTR7y5HRD7gACw9yQjqMVvEMLBHmboM/kPWam+H+Hmyrgjh6YncVKK122YZkXrLudzTuAukUw9FnMf7IQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "string-width": "^4.2.0" + }, + "engines": { + "node": "10.* || >= 12.*" + }, + "optionalDependencies": { + "@colors/colors": "1.5.0" + } + }, + "node_modules/cli-table3/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/cli-table3/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true, + "license": "MIT" + }, + "node_modules/cli-table3/node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/cli-table3/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cli-table3/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/cli-truncate": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-5.2.0.tgz", @@ -3596,6 +3922,16 @@ "dev": true, "license": "MIT" }, + "node_modules/colors": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/colors/-/colors-1.4.0.tgz", + "integrity": "sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.1.90" + } + }, "node_modules/commander": { "version": "2.8.1", "resolved": "https://registry.npmjs.org/commander/-/commander-2.8.1.tgz", @@ -3621,6 +3957,17 @@ "compressjs": "bin/compressjs" } }, + "node_modules/constantinople": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/constantinople/-/constantinople-4.0.1.tgz", + "integrity": "sha512-vCrqcSIq4//Gx74TXXCGnHpulY1dskqLTFGDmhrGxzeXL8lF8kvXv6mpNWlJj1uD4DW23D4ljAqbY4RRaaUZIw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.6.0", + "@babel/types": "^7.6.1" + } + }, "node_modules/convert-source-map": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", @@ -3628,6 +3975,21 @@ "dev": true, "license": "MIT" }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, "node_modules/debug": { "version": "4.4.3", "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", @@ -3705,6 +4067,28 @@ "node": ">=0.3.1" } }, + "node_modules/doctypes": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/doctypes/-/doctypes-1.1.0.tgz", + "integrity": "sha512-LLBi6pEqS6Do3EKQ3J0NqHWV5hhb78Pi8vvESYwyOy2c31ZEZVdtitdzsQsKb7878PEERhzUk0ftqGhG6Mz+pQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/emoji-regex": { "version": "10.6.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.6.0.tgz", @@ -3716,8 +4100,8 @@ "version": "1.4.5", "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.5.tgz", "integrity": "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==", + "devOptional": true, "license": "MIT", - "optional": true, "dependencies": { "once": "^1.4.0" } @@ -3735,6 +4119,26 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, "node_modules/es-module-lexer": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-2.0.0.tgz", @@ -3742,6 +4146,19 @@ "dev": true, "license": "MIT" }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/esbuild": { "version": "0.28.0", "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.28.0.tgz", @@ -3749,7 +4166,6 @@ "dev": true, "hasInstallScript": true, "license": "MIT", - "peer": true, "bin": { "esbuild": "bin/esbuild" }, @@ -3802,6 +4218,53 @@ "dev": true, "license": "MIT" }, + "node_modules/execa": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-4.1.0.tgz", + "integrity": "sha512-j5W0//W7f8UxAn8hXVnwG8tLwdiUy4FJLcSupCg6maBYZDpyBvTApK7KyuI4bKj8KOh1r2YH+6ucuYtJv1bTZA==", + "dev": true, + "license": "MIT", + "dependencies": { + "cross-spawn": "^7.0.0", + "get-stream": "^5.0.0", + "human-signals": "^1.1.1", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.0", + "onetime": "^5.1.0", + "signal-exit": "^3.0.2", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/execa/node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/execa/node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true, + "license": "ISC" + }, "node_modules/expand-template": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/expand-template/-/expand-template-2.0.3.tgz", @@ -3822,8 +4285,25 @@ "node": ">=12.0.0" } }, - "node_modules/fast-xml-builder": { - "version": "1.1.4", + "node_modules/fast-glob": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", + "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.8" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-xml-builder": { + "version": "1.1.4", "resolved": "https://registry.npmjs.org/fast-xml-builder/-/fast-xml-builder-1.1.4.tgz", "integrity": "sha512-f2jhpN4Eccy0/Uz9csxh3Nu6q4ErKxf0XIsasomfOihuSUa3/xw6w8dnOtCDgEItQFJG8KyXPzQXzcODDrrbOg==", "funding": [ @@ -3857,6 +4337,16 @@ "fxparser": "src/cli/cli.js" } }, + "node_modules/fastq": { + "version": "1.20.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.20.1.tgz", + "integrity": "sha512-GGToxJ/w1x32s/D2EKND7kTil4n8OVk/9mycTc4VDza13lOvpUZTGX3mFSCtV9ksdGBVzvsyAVLM6mHFThxXxw==", + "dev": true, + "license": "ISC", + "dependencies": { + "reusify": "^1.0.4" + } + }, "node_modules/fdir": { "version": "6.5.0", "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", @@ -3893,6 +4383,19 @@ "url": "https://github.com/sindresorhus/file-type?sponsor=1" } }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/fs-constants": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", @@ -3900,6 +4403,21 @@ "license": "MIT", "optional": true }, + "node_modules/fs-extra": { + "version": "11.3.4", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.4.tgz", + "integrity": "sha512-CTXd6rk/M3/ULNQj8FBqBWHYBVYybQ3VPBw0xGKFe3tuH7ytT6ACnvzpIQ3UZtB8yvUKC2cXn1a+x+5EVQLovA==", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=14.14" + } + }, "node_modules/fsevents": { "version": "2.3.3", "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", @@ -3915,6 +4433,16 @@ "node": "^8.16.0 || ^10.6.0 || >=11.0.0" } }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/get-east-asian-width": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.5.0.tgz", @@ -3928,6 +4456,61 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "dev": true, + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/get-stream": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", + "integrity": "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==", + "dev": true, + "license": "MIT", + "dependencies": { + "pump": "^3.0.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/get-tsconfig": { "version": "4.13.7", "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.13.7.tgz", @@ -3948,6 +4531,39 @@ "license": "MIT", "optional": true }, + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true, + "license": "ISC" + }, "node_modules/graceful-readlink": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/graceful-readlink/-/graceful-readlink-1.0.1.tgz", @@ -3964,6 +4580,48 @@ "node": ">=8" } }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-symbols": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.3.tgz", + "integrity": "sha512-ej4AhfhfL2Q2zpMmLo7U1Uv9+PyhIZpgQLGT1F9miIGmiCJIoCgSmczFdrc97mWT4kVY72KA+WnnhJ5pghSvSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/html-escaper": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", @@ -3971,6 +4629,16 @@ "dev": true, "license": "MIT" }, + "node_modules/human-signals": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-1.1.1.tgz", + "integrity": "sha512-SEQu7vl8KjNL2eoGBLF3+wAjpsNfA9XMlXAYj/3EdaNfAlxKthD1xjEQfGOUhllCGGJVNY34bRr6lPINhNjyZw==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=8.12.0" + } + }, "node_modules/husky": { "version": "9.1.7", "resolved": "https://registry.npmjs.org/husky/-/husky-9.1.7.tgz", @@ -4023,6 +4691,43 @@ "node": "^20.17.0 || >=22.9.0" } }, + "node_modules/is-core-module": { + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", + "dev": true, + "license": "MIT", + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-expression": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/is-expression/-/is-expression-4.0.0.tgz", + "integrity": "sha512-zMIXX63sxzG3XrkHkrAPvm/OVZVSCPNkwMHU8oTX7/U3AL78I0QXCEICXUM13BIa8TYGZ68PiTKfQz3yaTNr4A==", + "dev": true, + "license": "MIT", + "dependencies": { + "acorn": "^7.1.1", + "object-assign": "^4.1.1" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/is-fullwidth-code-point": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-5.1.0.tgz", @@ -4039,6 +4744,75 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-promise": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-2.2.2.tgz", + "integrity": "sha512-+lP4/6lKUBfQjZ2pdxThZvLUAafmZb8OAxFb8XXtiQmS35INgr85hdOGoEs124ez1FCnZJt6jau/T+alh58QFQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/is-regex": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.2.1.tgz", + "integrity": "sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "gopd": "^1.2.0", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true, + "license": "ISC" + }, "node_modules/istanbul-lib-coverage": { "version": "3.2.2", "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz", @@ -4078,6 +4852,13 @@ "node": ">=8" } }, + "node_modules/js-stringify": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/js-stringify/-/js-stringify-1.0.2.tgz", + "integrity": "sha512-rtS5ATOo2Q5k1G+DADISilDA6lv79zIiwFd6CcjuIxGKLFm5C+RLImRscVap9k55i+MOZwgliw+NejvkLuGD5g==", + "dev": true, + "license": "MIT" + }, "node_modules/js-tokens": { "version": "10.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-10.0.0.tgz", @@ -4085,6 +4866,73 @@ "dev": true, "license": "MIT" }, + "node_modules/jscpd": { + "version": "4.0.9", + "resolved": "https://registry.npmjs.org/jscpd/-/jscpd-4.0.9.tgz", + "integrity": "sha512-fp6Sh42W3mIPoQgZmgYmKDLQzEDnnX2vaGlTN4haILkB2vsi+ewcCHEtWR/2CR/QbsBvAvsNo8U5Sa+p9aHiGw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jscpd/badge-reporter": "4.0.5", + "@jscpd/core": "4.0.5", + "@jscpd/finder": "4.0.5", + "@jscpd/html-reporter": "4.0.5", + "@jscpd/tokenizer": "4.0.5", + "colors": "^1.4.0", + "commander": "^5.0.0", + "fs-extra": "^11.2.0", + "jscpd-sarif-reporter": "4.0.7" + }, + "bin": { + "jscpd": "bin/jscpd" + } + }, + "node_modules/jscpd-sarif-reporter": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/jscpd-sarif-reporter/-/jscpd-sarif-reporter-4.0.7.tgz", + "integrity": "sha512-Q/VlfTI/Nbjc8dZ/2pDVIf1aRi2bM2CTYujcAoeYr7brRnS4o5ZeW86W8q7MM7cQu40gezlNckl+E9wKFSMFiA==", + "dev": true, + "license": "MIT", + "dependencies": { + "colors": "^1.4.0", + "fs-extra": "^11.2.0", + "node-sarif-builder": "^3.4.0" + } + }, + "node_modules/jscpd/node_modules/commander": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-5.1.0.tgz", + "integrity": "sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/jsonfile": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.1.tgz", + "integrity": "sha512-zwOTdL3rFQ/lRdBnntKVOX6k5cKJwEc1HdilT71BWEu7J41gXIB2MRp+vxduPSwZJPWBxEzv4yH1wYLJGUHX4Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "universalify": "^2.0.0" + }, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/jstransformer": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/jstransformer/-/jstransformer-1.0.0.tgz", + "integrity": "sha512-C9YK3Rf8q6VAPDCCU9fnqo3mAfOH6vUGnMcP4AQAYIEpWtfGLpwOTmZ+igtdK5y+VvI2n3CyYSzy4Qh34eq24A==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-promise": "^2.0.0", + "promise": "^7.0.1" + } + }, "node_modules/just-bash": { "version": "2.14.0", "resolved": "https://registry.npmjs.org/just-bash/-/just-bash-2.14.0.tgz", @@ -4504,6 +5352,84 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/markdown-table": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-2.0.0.tgz", + "integrity": "sha512-Ezda85ToJUBhM6WGaG6veasyym+Tbs3cMAw/ZhOPqXiYsr0jgocBV3j3nx+4lk47plLlIqjwuTm/ywVI+zjJ/A==", + "dev": true, + "license": "MIT", + "dependencies": { + "repeat-string": "^1.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true, + "license": "MIT" + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/micromatch/node_modules/picomatch": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.2.tgz", + "integrity": "sha512-V7+vQEJ06Z+c5tSye8S+nHUfI51xoXIXjHQ99cQtKUkQqqO1kO/KCJUfZXuB47h/YBlDhah2H3hdUGXn8ie0oA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/mimic-function": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/mimic-function/-/mimic-function-5.0.1.tgz", @@ -4653,11 +5579,48 @@ "nxz": "lib/cli/nxz.js" }, "engines": { - "node": ">=16.0.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/oorabona" + "node": ">=16.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/oorabona" + } + }, + "node_modules/node-sarif-builder": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/node-sarif-builder/-/node-sarif-builder-3.4.0.tgz", + "integrity": "sha512-tGnJW6OKRii9u/b2WiUViTJS+h7Apxx17qsMUjsUeNDiMMX5ZFf8F8Fcz7PAQ6omvOxHZtvDTmOYKJQwmfpjeg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/sarif": "^2.1.7", + "fs-extra": "^11.1.1" + }, + "engines": { + "node": ">=20" + } + }, + "node_modules/npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" } }, "node_modules/obug": { @@ -4675,8 +5638,8 @@ "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "devOptional": true, "license": "ISC", - "optional": true, "dependencies": { "wrappy": "1" } @@ -4718,6 +5681,23 @@ "node": ">=14.0.0" } }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true, + "license": "MIT" + }, "node_modules/pathe": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", @@ -4725,6 +5705,34 @@ "dev": true, "license": "MIT" }, + "node_modules/pg": { + "version": "8.20.0", + "resolved": "https://registry.npmjs.org/pg/-/pg-8.20.0.tgz", + "integrity": "sha512-ldhMxz2r8fl/6QkXnBD3CR9/xg694oT6DZQ2s6c/RI28OjtSOpxnPrUCGOBJ46RCUxcWdx3p6kw/xnDHjKvaRA==", + "license": "MIT", + "optional": true, + "dependencies": { + "pg-connection-string": "^2.12.0", + "pg-pool": "^3.13.0", + "pg-protocol": "^1.13.0", + "pg-types": "2.2.0", + "pgpass": "1.0.5" + }, + "engines": { + "node": ">= 16.0.0" + }, + "optionalDependencies": { + "pg-cloudflare": "^1.3.0" + }, + "peerDependencies": { + "pg-native": ">=3.0.1" + }, + "peerDependenciesMeta": { + "pg-native": { + "optional": true + } + } + }, "node_modules/pg-cloudflare": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/pg-cloudflare/-/pg-cloudflare-1.3.0.tgz", @@ -4913,17 +5921,184 @@ "node": ">=10" } }, + "node_modules/promise": { + "version": "7.3.1", + "resolved": "https://registry.npmjs.org/promise/-/promise-7.3.1.tgz", + "integrity": "sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg==", + "dev": true, + "license": "MIT", + "dependencies": { + "asap": "~2.0.3" + } + }, + "node_modules/pug": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/pug/-/pug-3.0.4.tgz", + "integrity": "sha512-kFfq5mMzrS7+wrl5pLJzZEzemx34OQ0w4SARfhy/3yxTlhbstsudDwJzhf1hP02yHzbjoVMSXUj/Sz6RNfMyXg==", + "dev": true, + "license": "MIT", + "dependencies": { + "pug-code-gen": "^3.0.4", + "pug-filters": "^4.0.0", + "pug-lexer": "^5.0.1", + "pug-linker": "^4.0.0", + "pug-load": "^3.0.0", + "pug-parser": "^6.0.0", + "pug-runtime": "^3.0.1", + "pug-strip-comments": "^2.0.0" + } + }, + "node_modules/pug-attrs": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pug-attrs/-/pug-attrs-3.0.0.tgz", + "integrity": "sha512-azINV9dUtzPMFQktvTXciNAfAuVh/L/JCl0vtPCwvOA21uZrC08K/UnmrL+SXGEVc1FwzjW62+xw5S/uaLj6cA==", + "dev": true, + "license": "MIT", + "dependencies": { + "constantinople": "^4.0.1", + "js-stringify": "^1.0.2", + "pug-runtime": "^3.0.0" + } + }, + "node_modules/pug-code-gen": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/pug-code-gen/-/pug-code-gen-3.0.4.tgz", + "integrity": "sha512-6okWYIKdasTyXICyEtvobmTZAVX57JkzgzIi4iRJlin8kmhG+Xry2dsus+Mun/nGCn6F2U49haHI5mkELXB14g==", + "dev": true, + "license": "MIT", + "dependencies": { + "constantinople": "^4.0.1", + "doctypes": "^1.1.0", + "js-stringify": "^1.0.2", + "pug-attrs": "^3.0.0", + "pug-error": "^2.1.0", + "pug-runtime": "^3.0.1", + "void-elements": "^3.1.0", + "with": "^7.0.0" + } + }, + "node_modules/pug-error": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/pug-error/-/pug-error-2.1.0.tgz", + "integrity": "sha512-lv7sU9e5Jk8IeUheHata6/UThZ7RK2jnaaNztxfPYUY+VxZyk/ePVaNZ/vwmH8WqGvDz3LrNYt/+gA55NDg6Pg==", + "dev": true, + "license": "MIT" + }, + "node_modules/pug-filters": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/pug-filters/-/pug-filters-4.0.0.tgz", + "integrity": "sha512-yeNFtq5Yxmfz0f9z2rMXGw/8/4i1cCFecw/Q7+D0V2DdtII5UvqE12VaZ2AY7ri6o5RNXiweGH79OCq+2RQU4A==", + "dev": true, + "license": "MIT", + "dependencies": { + "constantinople": "^4.0.1", + "jstransformer": "1.0.0", + "pug-error": "^2.0.0", + "pug-walk": "^2.0.0", + "resolve": "^1.15.1" + } + }, + "node_modules/pug-lexer": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/pug-lexer/-/pug-lexer-5.0.1.tgz", + "integrity": "sha512-0I6C62+keXlZPZkOJeVam9aBLVP2EnbeDw3An+k0/QlqdwH6rv8284nko14Na7c0TtqtogfWXcRoFE4O4Ff20w==", + "dev": true, + "license": "MIT", + "dependencies": { + "character-parser": "^2.2.0", + "is-expression": "^4.0.0", + "pug-error": "^2.0.0" + } + }, + "node_modules/pug-linker": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/pug-linker/-/pug-linker-4.0.0.tgz", + "integrity": "sha512-gjD1yzp0yxbQqnzBAdlhbgoJL5qIFJw78juN1NpTLt/mfPJ5VgC4BvkoD3G23qKzJtIIXBbcCt6FioLSFLOHdw==", + "dev": true, + "license": "MIT", + "dependencies": { + "pug-error": "^2.0.0", + "pug-walk": "^2.0.0" + } + }, + "node_modules/pug-load": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pug-load/-/pug-load-3.0.0.tgz", + "integrity": "sha512-OCjTEnhLWZBvS4zni/WUMjH2YSUosnsmjGBB1An7CsKQarYSWQ0GCVyd4eQPMFJqZ8w9xgs01QdiZXKVjk92EQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "object-assign": "^4.1.1", + "pug-walk": "^2.0.0" + } + }, + "node_modules/pug-parser": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/pug-parser/-/pug-parser-6.0.0.tgz", + "integrity": "sha512-ukiYM/9cH6Cml+AOl5kETtM9NR3WulyVP2y4HOU45DyMim1IeP/OOiyEWRr6qk5I5klpsBnbuHpwKmTx6WURnw==", + "dev": true, + "license": "MIT", + "dependencies": { + "pug-error": "^2.0.0", + "token-stream": "1.0.0" + } + }, + "node_modules/pug-runtime": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/pug-runtime/-/pug-runtime-3.0.1.tgz", + "integrity": "sha512-L50zbvrQ35TkpHwv0G6aLSuueDRwc/97XdY8kL3tOT0FmhgG7UypU3VztfV/LATAvmUfYi4wNxSajhSAeNN+Kg==", + "dev": true, + "license": "MIT" + }, + "node_modules/pug-strip-comments": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/pug-strip-comments/-/pug-strip-comments-2.0.0.tgz", + "integrity": "sha512-zo8DsDpH7eTkPHCXFeAk1xZXJbyoTfdPlNR0bK7rpOMuhBYb0f5qUVCO1xlsitYd3w5FQTK7zpNVKb3rZoUrrQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "pug-error": "^2.0.0" + } + }, + "node_modules/pug-walk": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/pug-walk/-/pug-walk-2.0.0.tgz", + "integrity": "sha512-yYELe9Q5q9IQhuvqsZNwA5hfPkMJ8u92bQLIMcsMxf/VADjNtEYptU+inlufAFYcWdHlwNfZOEnOOQrZrcyJCQ==", + "dev": true, + "license": "MIT" + }, "node_modules/pump": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.4.tgz", "integrity": "sha512-VS7sjc6KR7e1ukRFhQSY5LM2uBWAUPiOPa/A3mkKmiMwSmRFUITt0xuj+/lesgnCv+dPIEYlkzrcyXgquIHMcA==", + "devOptional": true, "license": "MIT", - "optional": true, "dependencies": { "end-of-stream": "^1.1.0", "once": "^1.3.1" } }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, "node_modules/quickjs-emscripten": { "version": "0.32.0", "resolved": "https://registry.npmjs.org/quickjs-emscripten/-/quickjs-emscripten-0.32.0.tgz", @@ -4993,6 +6168,45 @@ "node": ">= 6" } }, + "node_modules/repeat-string": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", + "integrity": "sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10" + } + }, + "node_modules/reprism": { + "version": "0.0.11", + "resolved": "https://registry.npmjs.org/reprism/-/reprism-0.0.11.tgz", + "integrity": "sha512-VsxDR5QxZo08M/3nRypNlScw5r3rKeSOPdU/QhDmu3Ai3BJxHn/qgfXGWQp/tAxUtzwYNo9W6997JZR0tPLZsA==", + "dev": true, + "license": "MIT" + }, + "node_modules/resolve": { + "version": "1.22.12", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.12.tgz", + "integrity": "sha512-TyeJ1zif53BPfHootBGwPRYT1RUt6oGWsaQr8UyZW/eAm9bKoijtvruSDEmZHm92CwS9nj7/fWttqPCgzep8CA==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "is-core-module": "^2.16.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/resolve-pkg-maps": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz", @@ -5020,6 +6234,17 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/reusify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", + "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, "node_modules/rfdc": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.4.1.tgz", @@ -5061,6 +6286,30 @@ "@rolldown/binding-win32-x64-msvc": "1.0.0-rc.13" } }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, "node_modules/safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", @@ -5140,6 +6389,29 @@ "@img/sharp-win32-x64": "0.34.5" } }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, "node_modules/siginfo": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/siginfo/-/siginfo-2.0.0.tgz", @@ -5246,6 +6518,13 @@ "node": ">=0.10.0" } }, + "node_modules/spark-md5": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/spark-md5/-/spark-md5-3.0.2.tgz", + "integrity": "sha512-wcFzz9cDfbuqe0FZzfi2or1sgyIrsDwmPwfZC4hiNidPdPINjeUwNfv5kldczoEAcjl9Y1L3SM7Uz2PUEQzxQw==", + "dev": true, + "license": "(WTFPL OR MIT)" + }, "node_modules/split2": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz", @@ -5335,6 +6614,16 @@ "url": "https://github.com/chalk/strip-ansi?sponsor=1" } }, + "node_modules/strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/strip-json-comments": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", @@ -5386,6 +6675,19 @@ "node": ">=8" } }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/tar-fs": { "version": "2.1.4", "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.4.tgz", @@ -5460,6 +6762,26 @@ "node": ">=14.0.0" } }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/token-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/token-stream/-/token-stream-1.0.0.tgz", + "integrity": "sha512-VSsyNPPW74RpHwR8Fc21uubwHY7wMDeJLys2IX5zJNih+OnAnaifKHo+1LHT7DAdloQ7apeaaWg8l7qnf/TnEg==", + "dev": true, + "license": "MIT" + }, "node_modules/token-types": { "version": "6.1.2", "resolved": "https://registry.npmjs.org/token-types/-/token-types-6.1.2.tgz", @@ -6048,6 +7370,16 @@ "dev": true, "license": "MIT" }, + "node_modules/universalify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", + "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 10.0.0" + } + }, "node_modules/util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", @@ -6061,7 +7393,6 @@ "integrity": "sha512-P1PbweD+2/udplnThz3btF4cf6AgPky7kk23RtHUkJIU5BIxwPprhRGmOAHs6FTI7UiGbTNrgNP6jSYD6JaRnw==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "lightningcss": "^1.32.0", "picomatch": "^4.0.4", @@ -6140,7 +7471,6 @@ "integrity": "sha512-DBc4Tx0MPNsqb9isoyOq00lHftVx/KIU44QOm2q59npZyLUkENn8TMFsuzuO+4U2FUa9rgbbPt3udrP25GcjXw==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@vitest/expect": "4.1.3", "@vitest/mocker": "4.1.3", @@ -6225,6 +7555,32 @@ } } }, + "node_modules/void-elements": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/void-elements/-/void-elements-3.1.0.tgz", + "integrity": "sha512-Dhxzh5HZuiHQhbvTW9AMetFfBHDMYpo23Uo9btPXgdYP+3T5S+p+jgNy7spra+veYhBP2dCSgxR/i2Y02h5/6w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, "node_modules/why-is-node-running": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/why-is-node-running/-/why-is-node-running-2.3.0.tgz", @@ -6242,6 +7598,22 @@ "node": ">=8" } }, + "node_modules/with": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/with/-/with-7.0.2.tgz", + "integrity": "sha512-RNGKj82nUPg3g5ygxkQl0R937xLyho1J24ItRCBTr/m1YnZkzJy1hUiHUJrc/VlsDQzsCnInEGSg3bci0Lmd4w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.9.6", + "@babel/types": "^7.9.6", + "assert-never": "^1.2.1", + "babel-walk": "3.0.0-canary-5" + }, + "engines": { + "node": ">= 10.0.0" + } + }, "node_modules/wrap-ansi": { "version": "9.0.2", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.2.tgz", @@ -6282,8 +7654,8 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", - "license": "ISC", - "optional": true + "devOptional": true, + "license": "ISC" }, "node_modules/xtend": { "version": "4.0.2", diff --git a/package.json b/package.json index 81f2b6c..c503dd2 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "hivemind", - "version": "0.6.37", + "version": "0.6.38", "description": "Cloud-backed persistent shared memory for AI agents powered by Deeplake", "type": "module", "bin": { @@ -13,7 +13,8 @@ "shell": "tsx src/shell/deeplake-shell.ts", "test": "vitest run", "typecheck": "tsc --noEmit", - "ci": "npm run typecheck && npm test", + "dup": "jscpd src", + "ci": "npm run typecheck && npm run dup && npm test", "prepare": "husky" }, "lint-staged": { @@ -33,6 +34,7 @@ "@vitest/coverage-v8": "^4.1.3", "esbuild": "^0.28.0", "husky": "^9.1.7", + "jscpd": "^4.0.9", "lint-staged": "^16.4.0", "tsx": "^4.7.0", "typescript": "^6.0.0", diff --git a/src/deeplake-api.ts b/src/deeplake-api.ts index 4b1dfed..a003b04 100644 --- a/src/deeplake-api.ts +++ b/src/deeplake-api.ts @@ -6,18 +6,25 @@ import { log as _log } from "./utils/debug.js"; import { sqlStr } from "./utils/sql.js"; const log = (msg: string) => _log("sdk", msg); -const TRACE_SQL = (process.env.HIVEMIND_TRACE_SQL ?? process.env.DEEPLAKE_TRACE_SQL) === "1" || (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; -const DEBUG_FILE_LOG = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; function summarizeSql(sql: string, maxLen = 220): string { const compact = sql.replace(/\s+/g, " ").trim(); return compact.length > maxLen ? `${compact.slice(0, maxLen)}...` : compact; } +/** + * SQL tracing is opt-in and evaluated on every call so callers can flip the + * env vars after module load (e.g. the one-shot shell bundle silences + * `[deeplake-sql]` stderr writes so they don't land in Claude Code's + * Bash-tool result — Claude Code merges child stderr into tool_result). + */ function traceSql(msg: string): void { - if (!TRACE_SQL) return; + const traceEnabled = (process.env.HIVEMIND_TRACE_SQL ?? process.env.DEEPLAKE_TRACE_SQL) === "1" + || (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; + if (!traceEnabled) return; process.stderr.write(`[deeplake-sql] ${msg}\n`); - if (DEBUG_FILE_LOG) log(msg); + const debugFileLog = (process.env.HIVEMIND_DEBUG ?? process.env.DEEPLAKE_DEBUG) === "1"; + if (debugFileLog) log(msg); } // ── Retry & concurrency primitives ────────────────────────────────────────── diff --git a/src/hooks/bash-command-compiler.ts b/src/hooks/bash-command-compiler.ts index 4bf6ce0..68e1534 100644 --- a/src/hooks/bash-command-compiler.ts +++ b/src/hooks/bash-command-compiler.ts @@ -2,6 +2,7 @@ import type { DeeplakeApi } from "../deeplake-api.js"; import { sqlLike } from "../utils/sql.js"; import { type GrepParams, handleGrepDirect, parseBashGrep } from "./grep-direct.js"; import { normalizeContent, refineGrepMatches } from "../shell/grep-core.js"; +import { capOutputForClaude } from "../utils/output-cap.js"; import { listVirtualPathRowsForDirs, readVirtualPathContents, @@ -520,5 +521,5 @@ export async function executeCompiledBashCommand( } } - return outputs.join("\n"); + return capOutputForClaude(outputs.join("\n"), { kind: "bash" }); } diff --git a/src/hooks/capture.ts b/src/hooks/capture.ts index ae90ad8..81c8385 100644 --- a/src/hooks/capture.ts +++ b/src/hooks/capture.ts @@ -1,8 +1,8 @@ #!/usr/bin/env node /** - * Capture hook — appends session events to a local queue on the hot path. - * Stop/SubagentStop flush that queue to the sessions table in batched INSERTs. + * Capture hook — writes each session event as a separate row in the sessions table. + * One INSERT per event, no concat, no race conditions. * * Used by: UserPromptSubmit, PostToolUse (async), Stop, SubagentStop */ @@ -10,26 +10,20 @@ import { readStdin } from "../utils/stdin.js"; import { loadConfig, type Config } from "../config.js"; import { DeeplakeApi } from "../deeplake-api.js"; +import { sqlStr } from "../utils/sql.js"; import { log as _log } from "../utils/debug.js"; -import { isDirectRun } from "../utils/direct-run.js"; +import { buildSessionPath } from "../utils/session-path.js"; import { bumpTotalCount, loadTriggerConfig, shouldTrigger, tryAcquireLock, + releaseLock, } from "./summary-state.js"; import { bundleDirFromImportMeta, spawnWikiWorker, wikiLog } from "./spawn-wiki-worker.js"; -import { - appendQueuedSessionRow, - buildQueuedSessionRow, - buildSessionPath, - flushSessionQueue, -} from "./session-queue.js"; -import { clearSessionQueryCache } from "./query-cache.js"; - const log = (msg: string) => _log("capture", msg); -export interface HookInput { +interface HookInput { session_id: string; transcript_path?: string; cwd?: string; @@ -37,19 +31,32 @@ export interface HookInput { hook_event_name?: string; agent_id?: string; agent_type?: string; + // UserPromptSubmit prompt?: string; + // PostToolUse tool_name?: string; tool_input?: Record; tool_response?: Record; tool_use_id?: string; + // Stop / SubagentStop last_assistant_message?: string; stop_hook_active?: boolean; agent_transcript_path?: string; } -const CAPTURE = (process.env.HIVEMIND_CAPTURE ?? process.env.DEEPLAKE_CAPTURE) !== "false"; +const CAPTURE = process.env.HIVEMIND_CAPTURE !== "false"; + +async function main(): Promise { + if (!CAPTURE) return; + const input = await readStdin(); + const config = loadConfig(); + if (!config) { log("no config"); return; } + + const sessionsTable = config.sessionsTableName; + const api = new DeeplakeApi(config.token, config.apiUrl, config.orgId, config.workspaceId, sessionsTable); -export function buildCaptureEntry(input: HookInput, timestamp: string): Record | null { + // Build the event entry + const ts = new Date().toISOString(); const meta = { session_id: input.session_id, transcript_path: input.transcript_path, @@ -58,20 +65,22 @@ export function buildCaptureEntry(input: HookInput, timestamp: string): Record; + if (input.prompt !== undefined) { - return { + log(`user session=${input.session_id}`); + entry = { id: crypto.randomUUID(), ...meta, type: "user_message", content: input.prompt, }; - } - - if (input.tool_name !== undefined) { - return { + } else if (input.tool_name !== undefined) { + log(`tool=${input.tool_name} session=${input.session_id}`); + entry = { id: crypto.randomUUID(), ...meta, type: "tool_call", @@ -80,165 +89,91 @@ export function buildCaptureEntry(input: HookInput, timestamp: string): Record void; - bumpTotalCountFn?: typeof bumpTotalCount; - loadTriggerConfigFn?: typeof loadTriggerConfig; - shouldTriggerFn?: typeof shouldTrigger; - tryAcquireLockFn?: typeof tryAcquireLock; - wikiLogFn?: typeof wikiLog; - spawnWikiWorkerFn?: typeof spawnWikiWorker; -} + const sessionPath = buildSessionPath(config, input.session_id); + const line = JSON.stringify(entry); + log(`writing to ${sessionPath}`); -export function maybeTriggerPeriodicSummary(sessionId: string, cwd: string, config: Config, deps: PeriodicSummaryDeps = {}): void { - const { - bundleDir = bundleDirFromImportMeta(import.meta.url), - wikiWorker = process.env.HIVEMIND_WIKI_WORKER === "1", - logFn = log, - bumpTotalCountFn = bumpTotalCount, - loadTriggerConfigFn = loadTriggerConfig, - shouldTriggerFn = shouldTrigger, - tryAcquireLockFn = tryAcquireLock, - wikiLogFn = wikiLog, - spawnWikiWorkerFn = spawnWikiWorker, - } = deps; - - if (wikiWorker) return; + // Simple INSERT — one row per event, no concat, no race conditions. + const projectName = (input.cwd ?? "").split("/").pop() || "unknown"; + const filename = sessionPath.split("/").pop() ?? ""; - try { - const state = bumpTotalCountFn(sessionId); - const cfg = loadTriggerConfigFn(); - if (!shouldTriggerFn(state, cfg)) return; + // For JSONB: only escape single quotes for the SQL literal, keep JSON structure intact. + // sqlStr() would also escape backslashes and strip control chars, corrupting the JSON. + const jsonForSql = line.replace(/'/g, "''"); - if (!tryAcquireLockFn(sessionId)) { - logFn(`periodic trigger suppressed (lock held) session=${sessionId}`); - return; - } + const insertSql = + `INSERT INTO "${sessionsTable}" (id, path, filename, message, author, size_bytes, project, description, agent, creation_date, last_update_date) ` + + `VALUES ('${crypto.randomUUID()}', '${sqlStr(sessionPath)}', '${sqlStr(filename)}', '${jsonForSql}'::jsonb, '${sqlStr(config.userName)}', ` + + `${Buffer.byteLength(line, "utf-8")}, '${sqlStr(projectName)}', '${sqlStr(input.hook_event_name ?? "")}', 'claude_code', '${ts}', '${ts}')`; - wikiLogFn(`Periodic: threshold hit (total=${state.totalCount}, since=${state.totalCount - state.lastSummaryCount}, N=${cfg.everyNMessages}, hours=${cfg.everyHours})`); - spawnWikiWorkerFn({ - config, - sessionId, - cwd, - bundleDir, - reason: "Periodic", - }); + try { + await api.query(insertSql); } catch (e: any) { - logFn(`periodic trigger error: ${e.message}`); + // Fallback: table might not exist (session-start failed or org switched mid-session). + // Create it and retry once. + if (e.message?.includes("permission denied") || e.message?.includes("does not exist")) { + log("table missing, creating and retrying"); + await api.ensureSessionsTable(sessionsTable); + await api.query(insertSql); + } else { + throw e; + } } -} -interface CaptureHookDeps { - captureEnabled?: boolean; - config?: Config | null; - now?: () => string; - createApi?: (config: Config) => DeeplakeApi; - appendQueuedSessionRowFn?: typeof appendQueuedSessionRow; - buildQueuedSessionRowFn?: typeof buildQueuedSessionRow; - flushSessionQueueFn?: typeof flushSessionQueue; - clearSessionQueryCacheFn?: typeof clearSessionQueryCache; - maybeTriggerPeriodicSummaryFn?: typeof maybeTriggerPeriodicSummary; - logFn?: (msg: string) => void; -} + log("capture ok → cloud"); -export async function runCaptureHook(input: HookInput, deps: CaptureHookDeps = {}): Promise<{ - status: "disabled" | "no_config" | "ignored" | "queued"; - entry?: Record; - flushStatus?: string; -}> { - const { - captureEnabled = CAPTURE, - config = loadConfig(), - now = () => new Date().toISOString(), - createApi = (activeConfig) => new DeeplakeApi( - activeConfig.token, - activeConfig.apiUrl, - activeConfig.orgId, - activeConfig.workspaceId, - activeConfig.sessionsTableName, - ), - appendQueuedSessionRowFn = appendQueuedSessionRow, - buildQueuedSessionRowFn = buildQueuedSessionRow, - flushSessionQueueFn = flushSessionQueue, - clearSessionQueryCacheFn = clearSessionQueryCache, - maybeTriggerPeriodicSummaryFn = maybeTriggerPeriodicSummary, - logFn = log, - } = deps; - - if (!captureEnabled) return { status: "disabled" }; - if (!config) { - logFn("no config"); - return { status: "no_config" }; - } + maybeTriggerPeriodicSummary(input.session_id, input.cwd ?? "", config); +} - const ts = now(); - const entry = buildCaptureEntry(input, ts); - if (!entry) { - logFn("unknown event, skipping"); - return { status: "ignored" }; - } +/** Increment the event counter and, if the threshold is crossed, spawn a background wiki worker. */ +function maybeTriggerPeriodicSummary(sessionId: string, cwd: string, config: Config): void { + if (process.env.HIVEMIND_WIKI_WORKER === "1") return; - if (input.prompt !== undefined) logFn(`user session=${input.session_id}`); - else if (input.tool_name !== undefined) logFn(`tool=${input.tool_name} session=${input.session_id}`); - else logFn(`assistant session=${input.session_id}`); + try { + const state = bumpTotalCount(sessionId); + const cfg = loadTriggerConfig(); + if (!shouldTrigger(state, cfg)) return; - if (input.hook_event_name === "UserPromptSubmit") { - clearSessionQueryCacheFn(input.session_id); - } + if (!tryAcquireLock(sessionId)) { + log(`periodic trigger suppressed (lock held) session=${sessionId}`); + return; + } - const sessionPath = buildSessionPath(config, input.session_id); - const line = JSON.stringify(entry); - const projectName = (input.cwd ?? "").split("/").pop() || "unknown"; - appendQueuedSessionRowFn(buildQueuedSessionRowFn({ - sessionPath, - line, - userName: config.userName, - projectName, - description: input.hook_event_name ?? "", - agent: "claude_code", - timestamp: ts, - })); - logFn(`queued ${input.hook_event_name ?? "event"} for ${sessionPath}`); - - maybeTriggerPeriodicSummaryFn(input.session_id, input.cwd ?? "", config); - - if (input.hook_event_name === "Stop" || input.hook_event_name === "SubagentStop") { - const result = await flushSessionQueueFn(createApi(config), { - sessionId: input.session_id, - sessionsTable: config.sessionsTableName, - drainAll: true, - }); - logFn(`flush ${result.status}: rows=${result.rows} batches=${result.batches}`); - return { status: "queued", entry, flushStatus: result.status }; + wikiLog(`Periodic: threshold hit (total=${state.totalCount}, since=${state.totalCount - state.lastSummaryCount}, N=${cfg.everyNMessages}, hours=${cfg.everyHours})`); + try { + spawnWikiWorker({ + config, + sessionId, + cwd, + bundleDir: bundleDirFromImportMeta(import.meta.url), + reason: "Periodic", + }); + } catch (e: any) { + log(`periodic spawn failed: ${e.message}`); + try { + releaseLock(sessionId); + } catch (releaseErr: any) { + log(`releaseLock after periodic spawn failure also failed: ${releaseErr.message}`); + } + throw e; + } + } catch (e: any) { + log(`periodic trigger error: ${e.message}`); } - - return { status: "queued", entry }; } -/* c8 ignore start */ -async function main(): Promise { - const input = await readStdin(); - await runCaptureHook(input); -} - -if (isDirectRun(import.meta.url)) { - main().catch((e) => { log(`fatal: ${e.message}`); process.exit(0); }); -} -/* c8 ignore stop */ +main().catch((e) => { log(`fatal: ${e.message}`); process.exit(0); }); diff --git a/src/hooks/codex/capture.ts b/src/hooks/codex/capture.ts index 615b72d..0c80802 100644 --- a/src/hooks/codex/capture.ts +++ b/src/hooks/codex/capture.ts @@ -1,48 +1,61 @@ #!/usr/bin/env node /** - * Codex Capture hook — appends session events to a local queue on the hot path. + * Codex Capture hook — writes each session event as a row in the sessions table. * * Used by: UserPromptSubmit, PostToolUse + * + * Codex input fields: + * All events: session_id, transcript_path, cwd, hook_event_name, model + * UserPromptSubmit: prompt (user text) + * PostToolUse: tool_name, tool_use_id, tool_input, tool_response + * Stop: (no extra fields — Codex has no last_assistant_message equivalent) */ import { readStdin } from "../../utils/stdin.js"; import { loadConfig, type Config } from "../../config.js"; +import { DeeplakeApi } from "../../deeplake-api.js"; +import { sqlStr } from "../../utils/sql.js"; import { log as _log } from "../../utils/debug.js"; -import { isDirectRun } from "../../utils/direct-run.js"; +import { buildSessionPath } from "../../utils/session-path.js"; import { bumpTotalCount, loadTriggerConfig, shouldTrigger, tryAcquireLock, + releaseLock, } from "../summary-state.js"; import { bundleDirFromImportMeta, spawnCodexWikiWorker, wikiLog } from "./spawn-wiki-worker.js"; -import { - appendQueuedSessionRow, - buildQueuedSessionRow, - buildSessionPath, -} from "../session-queue.js"; -import { clearSessionQueryCache } from "../query-cache.js"; - const log = (msg: string) => _log("codex-capture", msg); -export interface CodexHookInput { +interface CodexHookInput { session_id: string; transcript_path?: string | null; cwd: string; hook_event_name: string; model: string; turn_id?: string; + // UserPromptSubmit prompt?: string; + // PostToolUse (Bash only in Codex) tool_name?: string; tool_use_id?: string; tool_input?: { command?: string }; tool_response?: Record; } -const CAPTURE = (process.env.HIVEMIND_CAPTURE ?? process.env.DEEPLAKE_CAPTURE) !== "false"; +const CAPTURE = process.env.HIVEMIND_CAPTURE !== "false"; + +async function main(): Promise { + if (!CAPTURE) return; + const input = await readStdin(); + const config = loadConfig(); + if (!config) { log("no config"); return; } -export function buildCodexCaptureEntry(input: CodexHookInput, timestamp: string): Record | null { + const sessionsTable = config.sessionsTableName; + const api = new DeeplakeApi(config.token, config.apiUrl, config.orgId, config.workspaceId, sessionsTable); + + const ts = new Date().toISOString(); const meta = { session_id: input.session_id, transcript_path: input.transcript_path, @@ -50,20 +63,22 @@ export function buildCodexCaptureEntry(input: CodexHookInput, timestamp: string) hook_event_name: input.hook_event_name, model: input.model, turn_id: input.turn_id, - timestamp, + timestamp: ts, }; + let entry: Record; + if (input.hook_event_name === "UserPromptSubmit" && input.prompt !== undefined) { - return { + log(`user session=${input.session_id}`); + entry = { id: crypto.randomUUID(), ...meta, type: "user_message", content: input.prompt, }; - } - - if (input.hook_event_name === "PostToolUse" && input.tool_name !== undefined) { - return { + } else if (input.hook_event_name === "PostToolUse" && input.tool_name !== undefined) { + log(`tool=${input.tool_name} session=${input.session_id}`); + entry = { id: crypto.randomUUID(), ...meta, type: "tool_call", @@ -72,132 +87,75 @@ export function buildCodexCaptureEntry(input: CodexHookInput, timestamp: string) tool_input: JSON.stringify(input.tool_input), tool_response: JSON.stringify(input.tool_response), }; + } else { + log(`unknown event: ${input.hook_event_name}, skipping`); + return; } - return null; -} + const sessionPath = buildSessionPath(config, input.session_id); + const line = JSON.stringify(entry); + log(`writing to ${sessionPath}`); -interface PeriodicSummaryDeps { - bundleDir?: string; - wikiWorker?: boolean; - logFn?: (msg: string) => void; - bumpTotalCountFn?: typeof bumpTotalCount; - loadTriggerConfigFn?: typeof loadTriggerConfig; - shouldTriggerFn?: typeof shouldTrigger; - tryAcquireLockFn?: typeof tryAcquireLock; - wikiLogFn?: typeof wikiLog; - spawnCodexWikiWorkerFn?: typeof spawnCodexWikiWorker; -} + const projectName = (input.cwd ?? "").split("/").pop() || "unknown"; + const filename = sessionPath.split("/").pop() ?? ""; + const jsonForSql = sqlStr(line); -export function maybeTriggerPeriodicSummary(sessionId: string, cwd: string, config: Config, deps: PeriodicSummaryDeps = {}): void { - const { - bundleDir = bundleDirFromImportMeta(import.meta.url), - wikiWorker = process.env.HIVEMIND_WIKI_WORKER === "1", - logFn = log, - bumpTotalCountFn = bumpTotalCount, - loadTriggerConfigFn = loadTriggerConfig, - shouldTriggerFn = shouldTrigger, - tryAcquireLockFn = tryAcquireLock, - wikiLogFn = wikiLog, - spawnCodexWikiWorkerFn = spawnCodexWikiWorker, - } = deps; - - if (wikiWorker) return; + const insertSql = + `INSERT INTO "${sessionsTable}" (id, path, filename, message, author, size_bytes, project, description, agent, creation_date, last_update_date) ` + + `VALUES ('${crypto.randomUUID()}', '${sqlStr(sessionPath)}', '${sqlStr(filename)}', '${jsonForSql}'::jsonb, '${sqlStr(config.userName)}', ` + + `${Buffer.byteLength(line, "utf-8")}, '${sqlStr(projectName)}', '${sqlStr(input.hook_event_name ?? "")}', 'codex', '${ts}', '${ts}')`; try { - const state = bumpTotalCountFn(sessionId); - const cfg = loadTriggerConfigFn(); - if (!shouldTriggerFn(state, cfg)) return; - - if (!tryAcquireLockFn(sessionId)) { - logFn(`periodic trigger suppressed (lock held) session=${sessionId}`); - return; - } - - wikiLogFn(`Periodic: threshold hit (total=${state.totalCount}, since=${state.totalCount - state.lastSummaryCount}, N=${cfg.everyNMessages}, hours=${cfg.everyHours})`); - spawnCodexWikiWorkerFn({ - config, - sessionId, - cwd, - bundleDir, - reason: "Periodic", - }); + await api.query(insertSql); } catch (e: any) { - logFn(`periodic trigger error: ${e.message}`); + if (e.message?.includes("permission denied") || e.message?.includes("does not exist")) { + log("table missing, creating and retrying"); + await api.ensureSessionsTable(sessionsTable); + await api.query(insertSql); + } else { + throw e; + } } -} -interface CodexCaptureDeps { - captureEnabled?: boolean; - config?: Config | null; - now?: () => string; - appendQueuedSessionRowFn?: typeof appendQueuedSessionRow; - buildQueuedSessionRowFn?: typeof buildQueuedSessionRow; - clearSessionQueryCacheFn?: typeof clearSessionQueryCache; - maybeTriggerPeriodicSummaryFn?: typeof maybeTriggerPeriodicSummary; - logFn?: (msg: string) => void; + log("capture ok"); + + maybeTriggerPeriodicSummary(input.session_id, input.cwd ?? "", config); } -export async function runCodexCaptureHook(input: CodexHookInput, deps: CodexCaptureDeps = {}): Promise<{ - status: "disabled" | "no_config" | "ignored" | "queued"; - entry?: Record; -}> { - const { - captureEnabled = CAPTURE, - config = loadConfig(), - now = () => new Date().toISOString(), - appendQueuedSessionRowFn = appendQueuedSessionRow, - buildQueuedSessionRowFn = buildQueuedSessionRow, - clearSessionQueryCacheFn = clearSessionQueryCache, - maybeTriggerPeriodicSummaryFn = maybeTriggerPeriodicSummary, - logFn = log, - } = deps; - - if (!captureEnabled) return { status: "disabled" }; - if (!config) { - logFn("no config"); - return { status: "no_config" }; - } +function maybeTriggerPeriodicSummary(sessionId: string, cwd: string, config: Config): void { + if (process.env.HIVEMIND_WIKI_WORKER === "1") return; - const ts = now(); - const entry = buildCodexCaptureEntry(input, ts); - if (!entry) { - logFn(`unknown event: ${input.hook_event_name}, skipping`); - return { status: "ignored" }; - } + try { + const state = bumpTotalCount(sessionId); + const cfg = loadTriggerConfig(); + if (!shouldTrigger(state, cfg)) return; - if (input.hook_event_name === "UserPromptSubmit") logFn(`user session=${input.session_id}`); - else logFn(`tool=${input.tool_name} session=${input.session_id}`); + if (!tryAcquireLock(sessionId)) { + log(`periodic trigger suppressed (lock held) session=${sessionId}`); + return; + } - if (input.hook_event_name === "UserPromptSubmit") { - clearSessionQueryCacheFn(input.session_id); + wikiLog(`Periodic: threshold hit (total=${state.totalCount}, since=${state.totalCount - state.lastSummaryCount}, N=${cfg.everyNMessages}, hours=${cfg.everyHours})`); + try { + spawnCodexWikiWorker({ + config, + sessionId, + cwd, + bundleDir: bundleDirFromImportMeta(import.meta.url), + reason: "Periodic", + }); + } catch (e: any) { + log(`periodic spawn failed: ${e.message}`); + try { + releaseLock(sessionId); + } catch (releaseErr: any) { + log(`releaseLock after periodic spawn failure also failed: ${releaseErr.message}`); + } + throw e; + } + } catch (e: any) { + log(`periodic trigger error: ${e.message}`); } - - const sessionPath = buildSessionPath(config, input.session_id); - const line = JSON.stringify(entry); - const projectName = (input.cwd ?? "").split("/").pop() || "unknown"; - appendQueuedSessionRowFn(buildQueuedSessionRowFn({ - sessionPath, - line, - userName: config.userName, - projectName, - description: input.hook_event_name ?? "", - agent: "codex", - timestamp: ts, - })); - logFn(`queued ${input.hook_event_name} for ${sessionPath}`); - - maybeTriggerPeriodicSummaryFn(input.session_id, input.cwd ?? "", config); - return { status: "queued", entry }; } -/* c8 ignore start */ -async function main(): Promise { - const input = await readStdin(); - await runCodexCaptureHook(input); -} - -if (isDirectRun(import.meta.url)) { - main().catch((e) => { log(`fatal: ${e.message}`); process.exit(0); }); -} -/* c8 ignore stop */ +main().catch((e) => { log(`fatal: ${e.message}`); process.exit(0); }); diff --git a/src/hooks/codex/session-start-setup.ts b/src/hooks/codex/session-start-setup.ts index 8645d98..8dfb984 100644 --- a/src/hooks/codex/session-start-setup.ts +++ b/src/hooks/codex/session-start-setup.ts @@ -8,7 +8,6 @@ import { fileURLToPath } from "node:url"; import { dirname, join } from "node:path"; -import { mkdirSync, appendFileSync } from "node:fs"; import { execSync } from "node:child_process"; import { homedir } from "node:os"; import { loadCredentials, saveCredentials } from "../../commands/auth.js"; @@ -17,54 +16,15 @@ import { DeeplakeApi } from "../../deeplake-api.js"; import { sqlStr } from "../../utils/sql.js"; import { readStdin } from "../../utils/stdin.js"; import { log as _log } from "../../utils/debug.js"; -import { isDirectRun } from "../../utils/direct-run.js"; -import { - drainSessionQueues, - isSessionWriteAuthError, - isSessionWriteDisabled, - markSessionWriteDisabled, - tryAcquireSessionDrainLock, -} from "../session-queue.js"; -import { - getInstalledVersion, - getLatestVersionCached, - isNewer, -} from "../version-check.js"; - +import { getInstalledVersion, getLatestVersion, isNewer } from "../../utils/version-check.js"; +import { makeWikiLogger } from "../../utils/wiki-log.js"; const log = (msg: string) => _log("codex-session-setup", msg); const __bundleDir = dirname(fileURLToPath(import.meta.url)); -const GITHUB_RAW_PKG = "https://raw.githubusercontent.com/activeloopai/hivemind/main/package.json"; -const VERSION_CHECK_TIMEOUT = 3000; - -const HOME = homedir(); -const WIKI_LOG = join(HOME, ".codex", "hooks", "deeplake-wiki.log"); - -export function wikiLog(msg: string): void { - try { - mkdirSync(join(HOME, ".codex", "hooks"), { recursive: true }); - appendFileSync(WIKI_LOG, `[${new Date().toISOString().replace("T", " ").slice(0, 19)}] ${msg}\n`); - } catch { /* ignore */ } -} +const { log: wikiLog } = makeWikiLogger(join(homedir(), ".codex", "hooks")); -export interface CodexSessionStartInput { - session_id: string; - transcript_path?: string | null; - cwd: string; - hook_event_name: string; - model: string; - source?: string; -} - -export async function createPlaceholder( - api: DeeplakeApi, - table: string, - sessionId: string, - cwd: string, - userName: string, - orgName: string, - workspaceId: string, -): Promise { +/** Create a placeholder summary via direct SQL INSERT. */ +async function createPlaceholder(api: DeeplakeApi, table: string, sessionId: string, cwd: string, userName: string, orgName: string, workspaceId: string): Promise { const summaryPath = `/summaries/${userName}/${sessionId}.md`; const existing = await api.query( @@ -76,7 +36,7 @@ export async function createPlaceholder( } const now = new Date().toISOString(); - const projectName = cwd.split("/").pop() || "unknown"; + const projectName = cwd.split("/").pop() ?? "unknown"; const sessionSource = `/sessions/${userName}/${userName}_${orgName}_${workspaceId}_${sessionId}.jsonl`; const content = [ `# Session ${sessionId}`, @@ -97,124 +57,61 @@ export async function createPlaceholder( wikiLog(`SessionSetup: created placeholder for ${sessionId} (${cwd})`); } -interface CodexSessionStartSetupDeps { - wikiWorker?: boolean; - creds?: ReturnType; - saveCredentialsFn?: typeof saveCredentials; - config?: ReturnType; - createApi?: (config: NonNullable>) => DeeplakeApi; - captureEnabled?: boolean; - drainSessionQueuesFn?: typeof drainSessionQueues; - isSessionWriteDisabledFn?: typeof isSessionWriteDisabled; - isSessionWriteAuthErrorFn?: typeof isSessionWriteAuthError; - markSessionWriteDisabledFn?: typeof markSessionWriteDisabled; - tryAcquireSessionDrainLockFn?: typeof tryAcquireSessionDrainLock; - createPlaceholderFn?: typeof createPlaceholder; - getInstalledVersionFn?: typeof getInstalledVersion; - getLatestVersionCachedFn?: typeof getLatestVersionCached; - isNewerFn?: typeof isNewer; - execSyncFn?: typeof execSync; - logFn?: (msg: string) => void; - wikiLogFn?: typeof wikiLog; +interface CodexSessionStartInput { + session_id: string; + transcript_path?: string | null; + cwd: string; + hook_event_name: string; + model: string; + source?: string; } -export async function runCodexSessionStartSetup(input: CodexSessionStartInput, deps: CodexSessionStartSetupDeps = {}): Promise<{ - status: "skipped" | "no_credentials" | "complete"; -}> { - const { - wikiWorker = (process.env.HIVEMIND_WIKI_WORKER ?? process.env.DEEPLAKE_WIKI_WORKER) === "1", - creds = loadCredentials(), - saveCredentialsFn = saveCredentials, - config = loadConfig(), - createApi = (activeConfig) => new DeeplakeApi( - activeConfig.token, - activeConfig.apiUrl, - activeConfig.orgId, - activeConfig.workspaceId, - activeConfig.tableName, - ), - captureEnabled = (process.env.HIVEMIND_CAPTURE ?? process.env.DEEPLAKE_CAPTURE) !== "false", - drainSessionQueuesFn = drainSessionQueues, - isSessionWriteDisabledFn = isSessionWriteDisabled, - isSessionWriteAuthErrorFn = isSessionWriteAuthError, - markSessionWriteDisabledFn = markSessionWriteDisabled, - tryAcquireSessionDrainLockFn = tryAcquireSessionDrainLock, - createPlaceholderFn = createPlaceholder, - getInstalledVersionFn = getInstalledVersion, - getLatestVersionCachedFn = getLatestVersionCached, - isNewerFn = isNewer, - execSyncFn = execSync, - logFn = log, - wikiLogFn = wikiLog, - } = deps; +async function main(): Promise { + if (process.env.HIVEMIND_WIKI_WORKER === "1") return; - if (wikiWorker) return { status: "skipped" }; - if (!creds?.token) { - logFn("no credentials"); - return { status: "no_credentials" }; - } + const input = await readStdin(); + const creds = loadCredentials(); + if (!creds?.token) { log("no credentials"); return; } + // Backfill userName if missing if (!creds.userName) { try { const { userInfo } = await import("node:os"); creds.userName = userInfo().username ?? "unknown"; - saveCredentialsFn(creds); - logFn(`backfilled userName: ${creds.userName}`); + saveCredentials(creds); + log(`backfilled userName: ${creds.userName}`); } catch { /* non-fatal */ } } - if (input.session_id && config) { + // Table setup + sync — always sync, only skip placeholder when capture disabled + const captureEnabled = process.env.HIVEMIND_CAPTURE !== "false"; + if (input.session_id) { try { - const api = createApi(config); - await api.ensureTable(); - if (captureEnabled) { - if (isSessionWriteDisabledFn(config.sessionsTableName)) { - logFn(`sessions table disabled, skipping setup for "${config.sessionsTableName}"`); - } else { - const releaseDrainLock = tryAcquireSessionDrainLockFn(config.sessionsTableName); - if (!releaseDrainLock) { - logFn(`sessions drain already in progress, skipping duplicate setup for "${config.sessionsTableName}"`); - } else { - try { - await api.ensureSessionsTable(config.sessionsTableName); - const drain = await drainSessionQueuesFn(api, { - sessionsTable: config.sessionsTableName, - }); - if (drain.flushedSessions > 0) { - logFn(`drained ${drain.flushedSessions} queued session(s), rows=${drain.rows}, batches=${drain.batches}`); - } - } catch (e: any) { - if (isSessionWriteAuthErrorFn(e)) { - markSessionWriteDisabledFn(config.sessionsTableName, e.message); - logFn(`sessions table unavailable, skipping setup: ${e.message}`); - } else { - throw e; - } - } finally { - releaseDrainLock(); - } - } + const config = loadConfig(); + if (config) { + const api = new DeeplakeApi(config.token, config.apiUrl, config.orgId, config.workspaceId, config.tableName); + await api.ensureTable(); + await api.ensureSessionsTable(config.sessionsTableName); + if (captureEnabled) { + await createPlaceholder(api, config.tableName, input.session_id, input.cwd ?? "", config.userName, config.orgName, config.workspaceId); } - await createPlaceholderFn(api, config.tableName, input.session_id, input.cwd ?? "", config.userName, config.orgName, config.workspaceId); + log("setup complete"); } - logFn("setup complete"); } catch (e: any) { - logFn(`setup failed: ${e.message}`); - wikiLogFn(`SessionSetup: failed for ${input.session_id}: ${e.message}`); + log(`setup failed: ${e.message}`); + wikiLog(`SessionSetup: failed for ${input.session_id}: ${e.message}`); } } + // Version check + auto-update const autoupdate = creds.autoupdate !== false; try { - const current = getInstalledVersionFn(__bundleDir, ".codex-plugin"); + const current = getInstalledVersion(__bundleDir, ".codex-plugin"); if (current) { - const latest = await getLatestVersionCachedFn({ - url: GITHUB_RAW_PKG, - timeoutMs: VERSION_CHECK_TIMEOUT, - }); - if (latest && isNewerFn(latest, current)) { + const latest = await getLatestVersion(); + if (latest && isNewer(latest, current)) { if (autoupdate) { - logFn(`autoupdate: updating ${current} → ${latest}`); + log(`autoupdate: updating ${current} → ${latest}`); try { const tag = `v${latest}`; if (!/^v\d+\.\d+\.\d+$/.test(tag)) throw new Error(`unsafe version tag: ${tag}`); @@ -227,35 +124,24 @@ export async function runCodexSessionStartSetup(input: CodexSessionStartInput, d `git clone --depth 1 --branch ${tag} -q https://github.com/activeloopai/hivemind.git "$TMPDIR/hivemind" 2>/dev/null && ` + `cp -r "$TMPDIR/hivemind/codex/"* "$INSTALL_DIR/" 2>/dev/null; ` + `rm -rf "$TMPDIR"; fi`; - execSyncFn(findCmd, { stdio: "ignore", timeout: 60_000 }); + execSync(findCmd, { stdio: "ignore", timeout: 60_000 }); process.stderr.write(`Hivemind auto-updated: ${current} → ${latest}. Restart Codex to apply.\n`); - logFn(`autoupdate succeeded: ${current} → ${latest} (tag: ${tag})`); + log(`autoupdate succeeded: ${current} → ${latest} (tag: ${tag})`); } catch (e: any) { process.stderr.write(`Hivemind update available: ${current} → ${latest}. Auto-update failed.\n`); - logFn(`autoupdate failed: ${e.message}`); + log(`autoupdate failed: ${e.message}`); } } else { process.stderr.write(`Hivemind update available: ${current} → ${latest}.\n`); - logFn(`update available (autoupdate off): ${current} → ${latest}`); + log(`update available (autoupdate off): ${current} → ${latest}`); } } else { - logFn(`version up to date: ${current}`); + log(`version up to date: ${current}`); } } } catch (e: any) { - logFn(`version check failed: ${e.message}`); + log(`version check failed: ${e.message}`); } - - return { status: "complete" }; } -/* c8 ignore start */ -async function main(): Promise { - const input = await readStdin(); - await runCodexSessionStartSetup(input); -} - -if (isDirectRun(import.meta.url)) { - main().catch((e) => { log(`fatal: ${e.message}`); process.exit(0); }); -} -/* c8 ignore stop */ +main().catch((e) => { log(`fatal: ${e.message}`); process.exit(0); }); diff --git a/src/hooks/codex/session-start.ts b/src/hooks/codex/session-start.ts index 3873a3b..81d25e4 100644 --- a/src/hooks/codex/session-start.ts +++ b/src/hooks/codex/session-start.ts @@ -5,6 +5,9 @@ * Only reads local credentials and injects context into Codex's developer prompt. * All server calls (table setup, placeholder, version check) are handled by * session-start-setup.js which runs as a separate async hook. + * + * Codex input: { session_id, transcript_path, cwd, hook_event_name, model, source } + * Codex output: plain text on stdout (added as developer context) */ import { spawn } from "node:child_process"; @@ -13,32 +16,20 @@ import { dirname, join } from "node:path"; import { loadCredentials } from "../../commands/auth.js"; import { readStdin } from "../../utils/stdin.js"; import { log as _log } from "../../utils/debug.js"; -import { isDirectRun } from "../../utils/direct-run.js"; -import { getInstalledVersion } from "../version-check.js"; - +import { getInstalledVersion } from "../../utils/version-check.js"; const log = (msg: string) => _log("codex-session-start", msg); const __bundleDir = dirname(fileURLToPath(import.meta.url)); const AUTH_CMD = join(__bundleDir, "commands", "auth-login.js"); -export const CODEX_SESSION_START_CONTEXT = `DEEPLAKE MEMORY: Persistent memory at ~/.deeplake/memory/ shared across sessions, users, and agents. +const context = `DEEPLAKE MEMORY: Persistent memory at ~/.deeplake/memory/ shared across sessions, users, and agents. -Structure: index.md (start here) → summaries/*.md → sessions/{author}/* (last resort). Do NOT jump straight to raw session files. -When index.md identifies a likely match, read that exact summary or session path directly before broader grep variants. -If index.md already points to likely candidate files, open those exact files before broader synonym greps or wide exploratory scans. -Do NOT probe unrelated local paths such as ~/.claude/projects/, arbitrary home directories, or guessed summary roots for Deeplake recall tasks. -TEMPORAL GROUNDING: If a summary or transcript uses relative time like "last year", "last week", or "next month", resolve it against that session's own date/date_time metadata, not today's date. -TEMPORAL FOLLOW-THROUGH: If a summary only gives a relative time, open the linked source session and use its date/date_time to convert the final answer into an absolute month/date/year or explicit range before responding. -ANSWER SHAPE: Once you have enough evidence, answer with the smallest exact phrase supported by memory. For identity or relationship questions, use just the noun phrase. For education questions, answer with the likely field or credential directly, not the broader life story. For "when" questions, prefer absolute dates/months/years over relative phrases. Avoid extra biography, explanation, or hedging. -NOT-FOUND BAR: Do NOT answer "not found" until you have checked index.md plus at least one likely summary or raw session file for the named person. If keyword grep is empty, grep the person's name alone and inspect the candidate files. -NEGATIVE-EVIDENCE QUESTIONS: For identity, relationship status, and research-topic questions, summaries may omit the exact phrase. If likely summaries are ambiguous, read the candidate raw session transcript and look for positive clues before concluding the answer is absent. -SELF-LABEL PRIORITY: For identity questions, prefer the person's own explicit self-label from the transcript over broader category descriptions or paraphrases. -RELATIONSHIP STATUS INFERENCE: For relationship-status questions, treat explicit self-descriptions about partnership, dating, marriage, or parenting plans as status evidence. If the transcript strongly supports an unpartnered status, answer with the concise status phrase instead of "not found." +Structure: index.md (start here) → summaries/*.md → sessions/*.jsonl (last resort). Do NOT jump straight to JSONL. Search: grep -r "keyword" ~/.deeplake/memory/ IMPORTANT: Only use bash commands (cat, ls, grep, echo, jq, head, tail, sed, awk, etc.) to interact with ~/.deeplake/memory/. Do NOT use python, python3, node, curl, or other interpreters — they are not available in the memory filesystem. Do NOT spawn subagents to read deeplake memory.`; -export interface CodexSessionStartInput { +interface CodexSessionStartInput { session_id: string; transcript_path?: string | null; cwd: string; @@ -47,70 +38,48 @@ export interface CodexSessionStartInput { source?: string; } -export function buildCodexSessionStartContext(args: { - creds: ReturnType; - currentVersion: string | null; - authCommand: string; -}): string { - const versionNotice = args.currentVersion ? `\nHivemind v${args.currentVersion}` : ""; - return args.creds?.token - ? `${CODEX_SESSION_START_CONTEXT}\nLogged in to Deeplake as org: ${args.creds.orgName ?? args.creds.orgId} (workspace: ${args.creds.workspaceId ?? "default"})${versionNotice}` - : `${CODEX_SESSION_START_CONTEXT}\nNot logged in to Deeplake. Run: node "${args.authCommand}" login${versionNotice}`; -} - -interface CodexSessionStartDeps { - wikiWorker?: boolean; - creds?: ReturnType; - spawnFn?: typeof spawn; - currentVersion?: string | null; - authCommand?: string; - setupScript?: string; - logFn?: (msg: string) => void; -} +async function main(): Promise { + if (process.env.HIVEMIND_WIKI_WORKER === "1") return; -export async function runCodexSessionStartHook(input: CodexSessionStartInput, deps: CodexSessionStartDeps = {}): Promise { - const { - wikiWorker = (process.env.HIVEMIND_WIKI_WORKER ?? process.env.DEEPLAKE_WIKI_WORKER) === "1", - creds = loadCredentials(), - spawnFn = spawn, - currentVersion = getInstalledVersion(__bundleDir, ".codex-plugin"), - authCommand = AUTH_CMD, - setupScript = join(__bundleDir, "session-start-setup.js"), - logFn = log, - } = deps; + const input = await readStdin(); - if (wikiWorker) return null; + const creds = loadCredentials(); - if (!creds?.token) logFn("no credentials found — run auth login to authenticate"); - else logFn(`credentials loaded: org=${creds.orgName ?? creds.orgId}`); + if (!creds?.token) { + log("no credentials found — run auth login to authenticate"); + } else { + log(`credentials loaded: org=${creds.orgName ?? creds.orgId}`); + } + // Spawn async setup (table creation, placeholder, version check) as detached process. + // Codex doesn't support async hooks, so we use the same pattern as the wiki worker. if (creds?.token) { - const child = spawnFn("node", [setupScript], { + const setupScript = join(__bundleDir, "session-start-setup.js"); + const child = spawn("node", [setupScript], { detached: true, stdio: ["pipe", "ignore", "ignore"], env: { ...process.env }, }); + // Feed the same stdin input to the setup process child.stdin?.write(JSON.stringify(input)); child.stdin?.end(); child.unref(); - logFn("spawned async setup process"); + log("spawned async setup process"); } - return buildCodexSessionStartContext({ - creds, - currentVersion, - authCommand, - }); -} + let versionNotice = ""; + const current = getInstalledVersion(__bundleDir, ".codex-plugin"); + if (current) { + versionNotice = `\nHivemind v${current}`; + } -/* c8 ignore start */ -async function main(): Promise { - const input = await readStdin(); - const output = await runCodexSessionStartHook(input); - if (output) console.log(output); -} + const additionalContext = creds?.token + ? `${context}\nLogged in to Deeplake as org: ${creds.orgName ?? creds.orgId} (workspace: ${creds.workspaceId ?? "default"})${versionNotice}` + : `${context}\nNot logged in to Deeplake. Run: node "${AUTH_CMD}" login${versionNotice}`; -if (isDirectRun(import.meta.url)) { - main().catch((e) => { log(`fatal: ${e.message}`); process.exit(0); }); + // Codex SessionStart: plain text on stdout is added as developer context. + // JSON { additionalContext } format is rejected by Codex 0.118.0. + console.log(additionalContext); } -/* c8 ignore stop */ + +main().catch((e) => { log(`fatal: ${e.message}`); process.exit(0); }); diff --git a/src/hooks/codex/spawn-wiki-worker.ts b/src/hooks/codex/spawn-wiki-worker.ts index d7c57e1..06bc89e 100644 --- a/src/hooks/codex/spawn-wiki-worker.ts +++ b/src/hooks/codex/spawn-wiki-worker.ts @@ -6,12 +6,14 @@ import { spawn, execSync } from "node:child_process"; import { fileURLToPath } from "node:url"; import { dirname, join } from "node:path"; -import { writeFileSync, mkdirSync, appendFileSync } from "node:fs"; +import { writeFileSync, mkdirSync } from "node:fs"; import { homedir, tmpdir } from "node:os"; import type { Config } from "../../config.js"; +import { makeWikiLogger } from "../../utils/wiki-log.js"; const HOME = homedir(); -export const WIKI_LOG = join(HOME, ".codex", "hooks", "deeplake-wiki.log"); +const wikiLogger = makeWikiLogger(join(HOME, ".codex", "hooks")); +export const WIKI_LOG = wikiLogger.path; export const WIKI_PROMPT_TEMPLATE = `You are building a personal wiki from a coding session. Your goal is to extract every piece of knowledge — entities, decisions, relationships, and facts — into a structured, searchable wiki entry. @@ -63,12 +65,7 @@ IMPORTANT: Be exhaustive. Extract EVERY entity, decision, and fact. PRIVACY: Never include absolute filesystem paths in the summary. LENGTH LIMIT: Keep the total summary under 4000 characters.`; -export function wikiLog(msg: string): void { - try { - mkdirSync(join(HOME, ".codex", "hooks"), { recursive: true }); - appendFileSync(WIKI_LOG, `[${new Date().toISOString().replace("T", " ").slice(0, 19)}] ${msg}\n`); - } catch { /* ignore */ } -} +export const wikiLog = wikiLogger.log; export function findCodexBin(): string { try { diff --git a/src/hooks/codex/stop.ts b/src/hooks/codex/stop.ts index 9118f97..39eb330 100644 --- a/src/hooks/codex/stop.ts +++ b/src/hooks/codex/stop.ts @@ -4,27 +4,26 @@ * Codex Stop hook — handles both capture and session-end (wiki summary spawn). * * Codex has no SessionEnd event, so this hook does double duty: - * 1. Captures the stop event to the sessions table - * 2. Spawns the wiki worker to generate the session summary + * 1. Captures the stop event to the sessions table (like capture.ts) + * 2. Spawns the wiki worker to generate the session summary (like session-end.ts) + * + * Codex input: { session_id, transcript_path, cwd, hook_event_name, model } + * Codex output: JSON with optional { decision: "block", reason: "..." } to continue */ import { readFileSync, existsSync } from "node:fs"; import { readStdin } from "../../utils/stdin.js"; -import { loadConfig, type Config } from "../../config.js"; +import { loadConfig } from "../../config.js"; import { DeeplakeApi } from "../../deeplake-api.js"; +import { sqlStr } from "../../utils/sql.js"; import { log as _log } from "../../utils/debug.js"; -import { isDirectRun } from "../../utils/direct-run.js"; import { bundleDirFromImportMeta, spawnCodexWikiWorker, wikiLog } from "./spawn-wiki-worker.js"; -import { - appendQueuedSessionRow, - buildQueuedSessionRow, - buildSessionPath, - flushSessionQueue, -} from "../session-queue.js"; +import { tryAcquireLock, releaseLock } from "../summary-state.js"; +import { buildSessionPath } from "../../utils/session-path.js"; const log = (msg: string) => _log("codex-stop", msg); -export interface CodexStopInput { +interface CodexStopInput { session_id: string; transcript_path?: string | null; cwd: string; @@ -32,162 +31,122 @@ export interface CodexStopInput { model: string; } -const CAPTURE = (process.env.HIVEMIND_CAPTURE ?? process.env.DEEPLAKE_CAPTURE) !== "false"; +const CAPTURE = process.env.HIVEMIND_CAPTURE !== "false"; -export function extractLastAssistantMessage(transcript: string): string { - const lines = transcript.trim().split("\n").reverse(); - for (const line of lines) { - try { - const entry = JSON.parse(line); - const msg = entry.payload ?? entry; - if (msg.role === "assistant" && msg.content) { - const content = typeof msg.content === "string" - ? msg.content - : Array.isArray(msg.content) - ? msg.content - .filter((b: any) => b.type === "output_text" || b.type === "text") - .map((b: any) => b.text) - .join("\n") - : ""; - if (content) return content.slice(0, 4000); - } - } catch { /* skip malformed line */ } - } - return ""; -} - -export function buildCodexStopEntry(input: CodexStopInput, timestamp: string, lastAssistantMessage: string): Record { - return { - id: crypto.randomUUID(), - session_id: input.session_id, - transcript_path: input.transcript_path, - cwd: input.cwd, - hook_event_name: input.hook_event_name, - model: input.model, - timestamp, - type: lastAssistantMessage ? "assistant_message" : "assistant_stop", - content: lastAssistantMessage, - }; -} - -interface CodexStopDeps { - wikiWorker?: boolean; - captureEnabled?: boolean; - config?: Config | null; - now?: () => string; - transcriptExists?: (path: string) => boolean; - readTranscript?: (path: string) => string; - createApi?: (config: Config) => DeeplakeApi; - appendQueuedSessionRowFn?: typeof appendQueuedSessionRow; - buildQueuedSessionRowFn?: typeof buildQueuedSessionRow; - flushSessionQueueFn?: typeof flushSessionQueue; - spawnCodexWikiWorkerFn?: typeof spawnCodexWikiWorker; - wikiLogFn?: typeof wikiLog; - bundleDir?: string; - logFn?: (msg: string) => void; -} +async function main(): Promise { + if (process.env.HIVEMIND_WIKI_WORKER === "1") return; -export async function runCodexStopHook(input: CodexStopInput, deps: CodexStopDeps = {}): Promise<{ - status: "skipped" | "no_config" | "complete"; - flushStatus?: string; - entry?: Record; -}> { - const { - wikiWorker = (process.env.HIVEMIND_WIKI_WORKER ?? process.env.DEEPLAKE_WIKI_WORKER) === "1", - captureEnabled = CAPTURE, - config = loadConfig(), - now = () => new Date().toISOString(), - transcriptExists = existsSync, - readTranscript = (path) => readFileSync(path, "utf-8"), - createApi = (activeConfig) => new DeeplakeApi( - activeConfig.token, - activeConfig.apiUrl, - activeConfig.orgId, - activeConfig.workspaceId, - activeConfig.sessionsTableName, - ), - appendQueuedSessionRowFn = appendQueuedSessionRow, - buildQueuedSessionRowFn = buildQueuedSessionRow, - flushSessionQueueFn = flushSessionQueue, - spawnCodexWikiWorkerFn = spawnCodexWikiWorker, - wikiLogFn = wikiLog, - bundleDir = bundleDirFromImportMeta(import.meta.url), - logFn = log, - } = deps; - - if (wikiWorker || !input.session_id) return { status: "skipped" }; - if (!config) { - logFn("no config"); - return { status: "no_config" }; - } + const input = await readStdin(); + const sessionId = input.session_id; + if (!sessionId) return; - let entry: Record | undefined; - let flushStatus: string | undefined; + const config = loadConfig(); + if (!config) { log("no config"); return; } - if (captureEnabled) { + // 1. Capture the stop event (try to extract last assistant message from transcript) + if (CAPTURE) { try { - const ts = now(); + const sessionsTable = config.sessionsTableName; + const api = new DeeplakeApi(config.token, config.apiUrl, config.orgId, config.workspaceId, sessionsTable); + const ts = new Date().toISOString(); + + // Codex Stop doesn't include last_assistant_message, but it provides + // transcript_path. Try to extract the last assistant message from it. let lastAssistantMessage = ""; if (input.transcript_path) { try { - if (transcriptExists(input.transcript_path)) { - lastAssistantMessage = extractLastAssistantMessage(readTranscript(input.transcript_path)); - if (lastAssistantMessage) { - logFn(`extracted assistant message from transcript (${lastAssistantMessage.length} chars)`); + const transcriptPath = input.transcript_path; + if (existsSync(transcriptPath)) { + const transcript = readFileSync(transcriptPath, "utf-8"); + // Codex transcript is JSONL with format: + // {"type":"response_item","payload":{"type":"message","role":"assistant","content":[{"type":"output_text","text":"..."}]}} + const lines = transcript.trim().split("\n").reverse(); + for (const line of lines) { + try { + const entry = JSON.parse(line); + // Codex nests the message inside payload + const msg = entry.payload ?? entry; + if (msg.role === "assistant" && msg.content) { + const content = typeof msg.content === "string" + ? msg.content + : Array.isArray(msg.content) + ? msg.content.filter((b: any) => b.type === "output_text" || b.type === "text").map((b: any) => b.text).join("\n") + : ""; + if (content) { + lastAssistantMessage = content.slice(0, 4000); + break; + } + } + } catch { /* skip malformed line */ } } + if (lastAssistantMessage) log(`extracted assistant message from transcript (${lastAssistantMessage.length} chars)`); } } catch (e: any) { - logFn(`transcript read failed: ${e.message}`); + log(`transcript read failed: ${e.message}`); } } - entry = buildCodexStopEntry(input, ts, lastAssistantMessage); + const entry = { + id: crypto.randomUUID(), + session_id: sessionId, + transcript_path: input.transcript_path, + cwd: input.cwd, + hook_event_name: input.hook_event_name, + model: input.model, + timestamp: ts, + type: lastAssistantMessage ? "assistant_message" : "assistant_stop", + content: lastAssistantMessage, + }; const line = JSON.stringify(entry); - const sessionPath = buildSessionPath(config, input.session_id); + const sessionPath = buildSessionPath(config, sessionId); const projectName = (input.cwd ?? "").split("/").pop() || "unknown"; - appendQueuedSessionRowFn(buildQueuedSessionRowFn({ - sessionPath, - line, - userName: config.userName, - projectName, - description: "Stop", - agent: "codex", - timestamp: ts, - })); - - const flush = await flushSessionQueueFn(createApi(config), { - sessionId: input.session_id, - sessionsTable: config.sessionsTableName, - drainAll: true, - }); - flushStatus = flush.status; - logFn(`stop flush ${flush.status}: rows=${flush.rows} batches=${flush.batches}`); + const filename = sessionPath.split("/").pop() ?? ""; + const jsonForSql = sqlStr(line); + + const insertSql = + `INSERT INTO "${sessionsTable}" (id, path, filename, message, author, size_bytes, project, description, agent, creation_date, last_update_date) ` + + `VALUES ('${crypto.randomUUID()}', '${sqlStr(sessionPath)}', '${sqlStr(filename)}', '${jsonForSql}'::jsonb, '${sqlStr(config.userName)}', ` + + `${Buffer.byteLength(line, "utf-8")}, '${sqlStr(projectName)}', 'Stop', 'codex', '${ts}', '${ts}')`; + + await api.query(insertSql); + log("stop event captured"); } catch (e: any) { - logFn(`capture failed: ${e.message}`); + log(`capture failed: ${e.message}`); } } - if (!captureEnabled) return { status: "complete", entry }; + // 2. Spawn wiki worker — skip when capture disabled + if (!CAPTURE) return; - wikiLogFn(`Stop: triggering summary for ${input.session_id}`); - spawnCodexWikiWorkerFn({ - config, - sessionId: input.session_id, - cwd: input.cwd ?? "", - bundleDir, - reason: "Stop", - }); - - return { status: "complete", flushStatus, entry }; -} + // Coordinate with the periodic worker: if one is already running for this + // session, skip. Two workers writing the same summary row trip the + // Deeplake UPDATE-coalescing quirk (see CLAUDE.md) and drop one write. + if (!tryAcquireLock(sessionId)) { + wikiLog(`Stop: periodic worker already running for ${sessionId}, skipping`); + return; + } -/* c8 ignore start */ -async function main(): Promise { - const input = await readStdin(); - await runCodexStopHook(input); + wikiLog(`Stop: triggering summary for ${sessionId}`); + try { + spawnCodexWikiWorker({ + config, + sessionId, + cwd: input.cwd ?? "", + bundleDir: bundleDirFromImportMeta(import.meta.url), + reason: "Stop", + }); + } catch (e: any) { + // Spawn threw before the worker took ownership of the lock: release + // it here so a --resume can retrigger periodic summaries without + // waiting for the 10-minute stale reclaim. + log(`spawn failed: ${e.message}`); + try { + releaseLock(sessionId); + } catch (releaseErr: any) { + log(`releaseLock after spawn failure also failed: ${releaseErr.message}`); + } + throw e; + } } -if (isDirectRun(import.meta.url)) { - main().catch((e) => { log(`fatal: ${e.message}`); process.exit(0); }); -} -/* c8 ignore stop */ +main().catch((e) => { log(`fatal: ${e.message}`); process.exit(0); }); diff --git a/src/hooks/codex/wiki-worker.ts b/src/hooks/codex/wiki-worker.ts index a7c50f8..7d74f75 100644 --- a/src/hooks/codex/wiki-worker.ts +++ b/src/hooks/codex/wiki-worker.ts @@ -12,6 +12,9 @@ import { execFileSync } from "node:child_process"; import { join } from "node:path"; import { finalizeSummary, releaseLock } from "../summary-state.js"; import { uploadSummary } from "../upload-summary.js"; +import { log as _log } from "../../utils/debug.js"; + +const dlog = (msg: string) => _log("codex-wiki-worker", msg); interface WorkerConfig { apiUrl: string; @@ -88,7 +91,11 @@ async function query(sql: string, retries = 4): Promise[ } function cleanup(): void { - try { rmSync(tmpDir, { recursive: true, force: true }); } catch { /* ignore */ } + try { + rmSync(tmpDir, { recursive: true, force: true }); + } catch (cleanupErr: any) { + dlog(`cleanup failed to remove ${tmpDir}: ${cleanupErr.message}`); + } } async function main(): Promise { @@ -196,7 +203,11 @@ async function main(): Promise { wlog(`fatal: ${e.message}`); } finally { cleanup(); - try { releaseLock(cfg.sessionId); } catch { /* ignore */ } + try { + releaseLock(cfg.sessionId); + } catch (releaseErr: any) { + dlog(`releaseLock failed in finally for ${cfg.sessionId}: ${releaseErr.message}`); + } } } diff --git a/src/hooks/grep-direct.ts b/src/hooks/grep-direct.ts index 77427bf..95e15d9 100644 --- a/src/hooks/grep-direct.ts +++ b/src/hooks/grep-direct.ts @@ -7,6 +7,7 @@ import type { DeeplakeApi } from "../deeplake-api.js"; import { grepBothTables, type GrepMatchParams } from "../shell/grep-core.js"; +import { capOutputForClaude } from "../utils/output-cap.js"; export interface GrepParams { pattern: string; @@ -229,5 +230,6 @@ export async function handleGrepDirect( }; const output = await grepBothTables(api, table, sessionsTable, matchParams, params.targetPath); - return output.join("\n") || "(no matches)"; + const joined = output.join("\n") || "(no matches)"; + return capOutputForClaude(joined, { kind: "grep" }); } diff --git a/src/hooks/pre-tool-use.ts b/src/hooks/pre-tool-use.ts index 2dc6498..34c45db 100644 --- a/src/hooks/pre-tool-use.ts +++ b/src/hooks/pre-tool-use.ts @@ -1,7 +1,8 @@ #!/usr/bin/env node -import { existsSync } from "node:fs"; -import { join, dirname } from "node:path"; +import { existsSync, mkdirSync, writeFileSync } from "node:fs"; +import { homedir } from "node:os"; +import { join, dirname, sep } from "node:path"; import { fileURLToPath } from "node:url"; import { readStdin } from "../utils/stdin.js"; import { loadConfig } from "../config.js"; @@ -22,6 +23,7 @@ import { writeCachedIndexContent, } from "./query-cache.js"; import { isSafe, touchesMemory, rewritePaths } from "./memory-path-utils.js"; +import { capOutputForClaude } from "../utils/output-cap.js"; export { isSafe, touchesMemory, rewritePaths }; @@ -42,6 +44,50 @@ export interface PreToolUseInput { export interface ClaudePreToolDecision { command: string; description: string; + /** + * When set, main() emits the hook response as `updatedInput: {file_path}` + * instead of `updatedInput: {command, description}`. This is required for + * Read-tool intercepts: Claude Code's Read implementation reads + * `updatedInput.file_path` and errors with "path must be of type string, + * got undefined" if the hook hands it the Bash-shaped input. + */ + file_path?: string; +} + +const READ_CACHE_ROOT = join(homedir(), ".deeplake", "query-cache"); + +/** + * Materialize fetched content for a Read intercept into a real file on disk + * so Claude Code's Read tool can read it via `updatedInput.file_path`. The + * file lives under `~/.deeplake/query-cache//read/` and mirrors + * the virtual path structure (e.g. `/sessions/conv_0_session_1.json` → + * `.../read/sessions/conv_0_session_1.json`). Per-session dirs are cleaned + * alongside the index cache at session end. + */ +export function writeReadCacheFile( + sessionId: string, + virtualPath: string, + content: string, + deps: { cacheRoot?: string } = {}, +): string { + const { cacheRoot = READ_CACHE_ROOT } = deps; + const safeSessionId = sessionId.replace(/[^a-zA-Z0-9._-]/g, "_") || "unknown"; + const rel = virtualPath.replace(/^\/+/, "") || "content"; + const expectedRoot = join(cacheRoot, safeSessionId, "read"); + const absPath = join(expectedRoot, rel); + // Containment guard: if the DB-derived virtualPath contains `..` segments, + // `join` resolves them and absPath can escape the per-session cache dir. + // Refuse the write rather than silently writing outside the sandbox. + if (absPath !== expectedRoot && !absPath.startsWith(expectedRoot + sep)) { + throw new Error(`writeReadCacheFile: path escapes cache root: ${absPath}`); + } + mkdirSync(dirname(absPath), { recursive: true }); + writeFileSync(absPath, content, "utf-8"); + return absPath; +} + +export function buildReadDecision(file_path: string, description: string): ClaudePreToolDecision { + return { command: "", description, file_path }; } function getReadTargetPath(toolInput: Record): string | null { @@ -141,6 +187,7 @@ interface ClaudePreToolDeps { findVirtualPathsFn?: typeof findVirtualPaths; readCachedIndexContentFn?: typeof readCachedIndexContent; writeCachedIndexContentFn?: typeof writeCachedIndexContent; + writeReadCacheFileFn?: typeof writeReadCacheFile; shellBundle?: string; logFn?: (msg: string) => void; } @@ -163,6 +210,7 @@ export async function processPreToolUse(input: PreToolUseInput, deps: ClaudePreT findVirtualPathsFn = findVirtualPaths, readCachedIndexContentFn = readCachedIndexContent, writeCachedIndexContentFn = writeCachedIndexContent, + writeReadCacheFileFn = writeReadCacheFile, shellBundle = SHELL_BUNDLE, logFn = log, } = deps; @@ -288,22 +336,12 @@ export async function processPreToolUse(input: PreToolUseInput, deps: ClaudePreT : null; if (content === null) { + // `/index.md` goes through the dual-table builder inside + // `readVirtualPathContents` (fix #1). Other paths fall back to the + // same helper which returns null when neither table has a row, at + // which point we let the shell bundle handle the miss below. content = await readVirtualPathContentFn(api, table, sessionsTable, virtualPath); } - if (content === null && virtualPath === "/index.md") { - const idxRows = await api.query( - `SELECT path, project, description, creation_date FROM "${table}" WHERE path LIKE '/summaries/%' ORDER BY creation_date DESC` - ); - const lines = ["# Memory Index", "", `${idxRows.length} sessions:`, ""]; - for (const r of idxRows) { - const p = r["path"] as string; - const proj = r["project"] as string || ""; - const desc = (r["description"] as string || "").slice(0, 120); - const date = (r["creation_date"] as string || "").slice(0, 10); - lines.push(`- [${p}](${p}) ${date} ${proj ? `[${proj}]` : ""} ${desc}`); - } - content = lines.join("\n"); - } if (content !== null) { if (virtualPath === "/index.md") { writeCachedIndexContentFn(input.session_id, content); @@ -314,7 +352,15 @@ export async function processPreToolUse(input: PreToolUseInput, deps: ClaudePreT content = fromEnd ? lines.slice(-lineLimit).join("\n") : lines.slice(0, lineLimit).join("\n"); } const label = lineLimit > 0 ? (fromEnd ? `tail -${lineLimit}` : `head -${lineLimit}`) : "cat"; - return buildAllowDecision(`echo ${JSON.stringify(content)}`, `[DeepLake direct] ${label} ${virtualPath}`); + // Read tool writes content to disk and Claude Code reads the file directly, + // so no size pressure; keep full content. Bash intercepts flow through + // Claude Code's 16 KB tool_result threshold so we cap before reaching it. + if (input.tool_name === "Read") { + const file_path = writeReadCacheFileFn(input.session_id, virtualPath, content); + return buildReadDecision(file_path, `[DeepLake direct] ${label} ${virtualPath}`); + } + const capped = capOutputForClaude(content, { kind: label }); + return buildAllowDecision(`echo ${JSON.stringify(capped)}`, `[DeepLake direct] ${label} ${virtualPath}`); } } @@ -358,7 +404,8 @@ export async function processPreToolUse(input: PreToolUseInput, deps: ClaudePreT lines.push(name + (info.isDir ? "/" : "")); } } - return buildAllowDecision(`echo ${JSON.stringify(lines.join("\n") || "(empty directory)")}`, `[DeepLake direct] ls ${dir}`); + const lsOutput = capOutputForClaude(lines.join("\n") || "(empty directory)", { kind: "ls" }); + return buildAllowDecision(`echo ${JSON.stringify(lsOutput)}`, `[DeepLake direct] ls ${dir}`); } if (input.tool_name === "Bash") { @@ -370,7 +417,8 @@ export async function processPreToolUse(input: PreToolUseInput, deps: ClaudePreT const paths = await findVirtualPathsFn(api, table, sessionsTable, dir, namePattern); let result = paths.join("\n") || ""; if (/\|\s*wc\s+-l\s*$/.test(shellCmd)) result = String(paths.length); - return buildAllowDecision(`echo ${JSON.stringify(result || "(no matches)")}`, `[DeepLake direct] find ${dir}`); + const capped = capOutputForClaude(result || "(no matches)", { kind: "find" }); + return buildAllowDecision(`echo ${JSON.stringify(capped)}`, `[DeepLake direct] find ${dir}`); } } } catch (e: any) { @@ -385,11 +433,14 @@ async function main(): Promise { const input = await readStdin(); const decision = await processPreToolUse(input); if (!decision) return; + const updatedInput: Record = decision.file_path !== undefined + ? { file_path: decision.file_path } + : { command: decision.command, description: decision.description }; console.log(JSON.stringify({ hookSpecificOutput: { hookEventName: "PreToolUse", permissionDecision: "allow", - updatedInput: decision, + updatedInput, }, })); } diff --git a/src/hooks/session-end.ts b/src/hooks/session-end.ts index 6b163a6..751669a 100644 --- a/src/hooks/session-end.ts +++ b/src/hooks/session-end.ts @@ -1,95 +1,68 @@ #!/usr/bin/env node /** - * SessionEnd hook — flushes any queued session rows, then spawns the summary worker. + * SessionEnd hook — spawns a background worker that builds the session summary. * - * The queue flush is synchronous so the worker sees the latest turn. - * All heavy summary work happens in the detached wiki-worker process. + * The hook writes a config file and spawns the bundled wiki-worker.js process. + * It exits immediately — no API calls, no timeout risk. + * All heavy work (fetching events, running claude -p, uploading) happens in the worker. */ import { readStdin } from "../utils/stdin.js"; -import { loadConfig, type Config } from "../config.js"; -import { DeeplakeApi } from "../deeplake-api.js"; +import { loadConfig } from "../config.js"; import { log as _log } from "../utils/debug.js"; -import { isDirectRun } from "../utils/direct-run.js"; import { bundleDirFromImportMeta, spawnWikiWorker, wikiLog } from "./spawn-wiki-worker.js"; -import { flushSessionQueue } from "./session-queue.js"; +import { tryAcquireLock, releaseLock } from "./summary-state.js"; const log = (msg: string) => _log("session-end", msg); -export interface StopInput { +interface StopInput { session_id: string; cwd?: string; hook_event_name?: string; } -interface SessionEndDeps { - wikiWorker?: boolean; - captureEnabled?: boolean; - config?: Config | null; - createApi?: (config: Config) => DeeplakeApi; - flushSessionQueueFn?: typeof flushSessionQueue; - spawnWikiWorkerFn?: typeof spawnWikiWorker; - wikiLogFn?: typeof wikiLog; - bundleDir?: string; - logFn?: (msg: string) => void; -} - -export async function runSessionEndHook(input: StopInput, deps: SessionEndDeps = {}): Promise<{ - status: "skipped" | "no_config" | "flushed"; - flushStatus?: string; -}> { - const { - wikiWorker = (process.env.HIVEMIND_WIKI_WORKER ?? process.env.DEEPLAKE_WIKI_WORKER) === "1", - captureEnabled = (process.env.HIVEMIND_CAPTURE ?? process.env.DEEPLAKE_CAPTURE) !== "false", - config = loadConfig(), - createApi = (activeConfig) => new DeeplakeApi( - activeConfig.token, - activeConfig.apiUrl, - activeConfig.orgId, - activeConfig.workspaceId, - activeConfig.sessionsTableName, - ), - flushSessionQueueFn = flushSessionQueue, - spawnWikiWorkerFn = spawnWikiWorker, - wikiLogFn = wikiLog, - bundleDir = bundleDirFromImportMeta(import.meta.url), - logFn = log, - } = deps; - - if (wikiWorker || !captureEnabled || !input.session_id) return { status: "skipped" }; - if (!config) { - logFn("no config"); - return { status: "no_config" }; - } +async function main(): Promise { + if (process.env.HIVEMIND_WIKI_WORKER === "1") return; + if (process.env.HIVEMIND_CAPTURE === "false") return; - const flush = await flushSessionQueueFn(createApi(config), { - sessionId: input.session_id, - sessionsTable: config.sessionsTableName, - waitIfBusyMs: 5000, - drainAll: true, - }); - logFn(`flush ${flush.status}: rows=${flush.rows} batches=${flush.batches}`); + const input = await readStdin(); + const sessionId = input.session_id; + const cwd = input.cwd ?? ""; + if (!sessionId) return; - wikiLogFn(`SessionEnd: triggering summary for ${input.session_id}`); - spawnWikiWorkerFn({ - config, - sessionId: input.session_id, - cwd: input.cwd ?? "", - bundleDir, - reason: "SessionEnd", - }); + const config = loadConfig(); + if (!config) { log("no config"); return; } - return { status: "flushed", flushStatus: flush.status }; -} + // Coordinate with the periodic worker: if one is already running for this + // session, skip. Two workers writing the same summary row trip the + // Deeplake UPDATE-coalescing quirk (see CLAUDE.md) and drop one write. + if (!tryAcquireLock(sessionId)) { + wikiLog(`SessionEnd: periodic worker already running for ${sessionId}, skipping`); + return; + } -/* c8 ignore start */ -async function main(): Promise { - const input = await readStdin(); - await runSessionEndHook(input); + wikiLog(`SessionEnd: triggering summary for ${sessionId}`); + try { + spawnWikiWorker({ + config, + sessionId, + cwd, + bundleDir: bundleDirFromImportMeta(import.meta.url), + reason: "SessionEnd", + }); + } catch (e: any) { + // Spawn threw before the worker took ownership of the lock: release + // it here so a --resume can retrigger periodic summaries without + // waiting for the 10-minute stale reclaim. + log(`spawn failed: ${e.message}`); + try { + releaseLock(sessionId); + } catch (releaseErr: any) { + log(`releaseLock after spawn failure also failed: ${releaseErr.message}`); + } + throw e; + } } -if (isDirectRun(import.meta.url)) { - main().catch((e) => { log(`fatal: ${e.message}`); process.exit(0); }); -} -/* c8 ignore stop */ +main().catch((e) => { log(`fatal: ${e.message}`); process.exit(0); }); diff --git a/src/hooks/session-start-setup.ts b/src/hooks/session-start-setup.ts index a1cb722..f78ceb0 100644 --- a/src/hooks/session-start-setup.ts +++ b/src/hooks/session-start-setup.ts @@ -8,243 +8,89 @@ import { fileURLToPath } from "node:url"; import { dirname, join } from "node:path"; -import { mkdirSync, appendFileSync } from "node:fs"; import { execSync } from "node:child_process"; import { homedir } from "node:os"; import { loadCredentials, saveCredentials } from "../commands/auth.js"; import { loadConfig } from "../config.js"; import { DeeplakeApi } from "../deeplake-api.js"; -import { sqlStr } from "../utils/sql.js"; import { readStdin } from "../utils/stdin.js"; -import { log as _log, utcTimestamp } from "../utils/debug.js"; -import { isDirectRun } from "../utils/direct-run.js"; -import { - drainSessionQueues, - isSessionWriteAuthError, - isSessionWriteDisabled, - markSessionWriteDisabled, - tryAcquireSessionDrainLock, -} from "./session-queue.js"; -import { - getInstalledVersion, - getLatestVersionCached, - isNewer, -} from "./version-check.js"; - +import { log as _log } from "../utils/debug.js"; +import { getInstalledVersion, getLatestVersion, isNewer } from "../utils/version-check.js"; +import { makeWikiLogger } from "../utils/wiki-log.js"; const log = (msg: string) => _log("session-setup", msg); const __bundleDir = dirname(fileURLToPath(import.meta.url)); -const GITHUB_RAW_PKG = "https://raw.githubusercontent.com/activeloopai/hivemind/main/package.json"; -const VERSION_CHECK_TIMEOUT = 3000; - -const HOME = homedir(); -const WIKI_LOG = join(HOME, ".claude", "hooks", "deeplake-wiki.log"); +const { log: wikiLog } = makeWikiLogger(join(homedir(), ".claude", "hooks")); -export function wikiLog(msg: string): void { - try { - mkdirSync(join(HOME, ".claude", "hooks"), { recursive: true }); - appendFileSync(WIKI_LOG, `[${utcTimestamp()}] ${msg}\n`); - } catch { /* ignore */ } -} - -export interface SessionStartInput { +interface SessionStartInput { session_id: string; cwd?: string; } -export async function createPlaceholder( - api: DeeplakeApi, - table: string, - sessionId: string, - cwd: string, - userName: string, - orgName: string, - workspaceId: string, -): Promise { - const summaryPath = `/summaries/${userName}/${sessionId}.md`; - - const existing = await api.query( - `SELECT path FROM "${table}" WHERE path = '${sqlStr(summaryPath)}' LIMIT 1` - ); - if (existing.length > 0) { - wikiLog(`SessionSetup: summary exists for ${sessionId} (resumed)`); - return; - } - - const now = new Date().toISOString(); - const projectName = cwd.split("/").pop() || "unknown"; - const sessionSource = `/sessions/${userName}/${userName}_${orgName}_${workspaceId}_${sessionId}.jsonl`; - const content = [ - `# Session ${sessionId}`, - `- **Source**: ${sessionSource}`, - `- **Started**: ${now}`, - `- **Project**: ${projectName}`, - `- **Status**: in-progress`, - "", - ].join("\n"); - const filename = `${sessionId}.md`; - - await api.query( - `INSERT INTO "${table}" (id, path, filename, summary, author, mime_type, size_bytes, project, description, agent, creation_date, last_update_date) ` + - `VALUES ('${crypto.randomUUID()}', '${sqlStr(summaryPath)}', '${sqlStr(filename)}', E'${sqlStr(content)}', '${sqlStr(userName)}', 'text/markdown', ` + - `${Buffer.byteLength(content, "utf-8")}, '${sqlStr(projectName)}', 'in progress', 'claude_code', '${now}', '${now}')` - ); - - wikiLog(`SessionSetup: created placeholder for ${sessionId} (${cwd})`); -} - -interface SessionStartSetupDeps { - wikiWorker?: boolean; - creds?: ReturnType; - saveCredentialsFn?: typeof saveCredentials; - config?: ReturnType; - createApi?: (config: NonNullable>) => DeeplakeApi; - captureEnabled?: boolean; - drainSessionQueuesFn?: typeof drainSessionQueues; - isSessionWriteDisabledFn?: typeof isSessionWriteDisabled; - isSessionWriteAuthErrorFn?: typeof isSessionWriteAuthError; - markSessionWriteDisabledFn?: typeof markSessionWriteDisabled; - tryAcquireSessionDrainLockFn?: typeof tryAcquireSessionDrainLock; - createPlaceholderFn?: typeof createPlaceholder; - getInstalledVersionFn?: typeof getInstalledVersion; - getLatestVersionCachedFn?: typeof getLatestVersionCached; - isNewerFn?: typeof isNewer; - execSyncFn?: typeof execSync; - logFn?: (msg: string) => void; - wikiLogFn?: typeof wikiLog; -} - -export async function runSessionStartSetup(input: SessionStartInput, deps: SessionStartSetupDeps = {}): Promise<{ - status: "skipped" | "no_credentials" | "complete"; -}> { - const { - wikiWorker = (process.env.HIVEMIND_WIKI_WORKER ?? process.env.DEEPLAKE_WIKI_WORKER) === "1", - creds = loadCredentials(), - saveCredentialsFn = saveCredentials, - config = loadConfig(), - createApi = (activeConfig) => new DeeplakeApi( - activeConfig.token, - activeConfig.apiUrl, - activeConfig.orgId, - activeConfig.workspaceId, - activeConfig.tableName, - ), - captureEnabled = (process.env.HIVEMIND_CAPTURE ?? process.env.DEEPLAKE_CAPTURE) !== "false", - drainSessionQueuesFn = drainSessionQueues, - isSessionWriteDisabledFn = isSessionWriteDisabled, - isSessionWriteAuthErrorFn = isSessionWriteAuthError, - markSessionWriteDisabledFn = markSessionWriteDisabled, - tryAcquireSessionDrainLockFn = tryAcquireSessionDrainLock, - createPlaceholderFn = createPlaceholder, - getInstalledVersionFn = getInstalledVersion, - getLatestVersionCachedFn = getLatestVersionCached, - isNewerFn = isNewer, - execSyncFn = execSync, - logFn = log, - wikiLogFn = wikiLog, - } = deps; +async function main(): Promise { + if (process.env.HIVEMIND_WIKI_WORKER === "1") return; - if (wikiWorker) return { status: "skipped" }; - if (!creds?.token) { - logFn("no credentials"); - return { status: "no_credentials" }; - } + const input = await readStdin(); + const creds = loadCredentials(); + if (!creds?.token) { log("no credentials"); return; } + // Backfill userName if missing if (!creds.userName) { try { const { userInfo } = await import("node:os"); creds.userName = userInfo().username ?? "unknown"; - saveCredentialsFn(creds); - logFn(`backfilled userName: ${creds.userName}`); + saveCredentials(creds); + log(`backfilled userName: ${creds.userName}`); } catch { /* non-fatal */ } } - if (input.session_id && config) { + if (input.session_id) { try { - const api = createApi(config); - await api.ensureTable(); - if (captureEnabled) { - if (isSessionWriteDisabledFn(config.sessionsTableName)) { - logFn(`sessions table disabled, skipping setup for "${config.sessionsTableName}"`); - } else { - const releaseDrainLock = tryAcquireSessionDrainLockFn(config.sessionsTableName); - if (!releaseDrainLock) { - logFn(`sessions drain already in progress, skipping duplicate setup for "${config.sessionsTableName}"`); - } else { - try { - await api.ensureSessionsTable(config.sessionsTableName); - const drain = await drainSessionQueuesFn(api, { - sessionsTable: config.sessionsTableName, - }); - if (drain.flushedSessions > 0) { - logFn(`drained ${drain.flushedSessions} queued session(s), rows=${drain.rows}, batches=${drain.batches}`); - } - } catch (e: any) { - if (isSessionWriteAuthErrorFn(e)) { - markSessionWriteDisabledFn(config.sessionsTableName, e.message); - logFn(`sessions table unavailable, skipping setup: ${e.message}`); - } else { - throw e; - } - } finally { - releaseDrainLock(); - } - } - } - await createPlaceholderFn(api, config.tableName, input.session_id, input.cwd ?? "", config.userName, config.orgName, config.workspaceId); + const config = loadConfig(); + if (config) { + const api = new DeeplakeApi(config.token, config.apiUrl, config.orgId, config.workspaceId, config.tableName); + await api.ensureTable(); + await api.ensureSessionsTable(config.sessionsTableName); + log("setup complete"); } - logFn("setup complete"); } catch (e: any) { - logFn(`setup failed: ${e.message}`); - wikiLogFn(`SessionSetup: failed for ${input.session_id}: ${e.message}`); + log(`setup failed: ${e.message}`); + wikiLog(`SessionSetup: failed for ${input.session_id}: ${e.message}`); } } + // Version check + auto-update const autoupdate = creds.autoupdate !== false; try { - const current = getInstalledVersionFn(__bundleDir, ".claude-plugin"); + const current = getInstalledVersion(__bundleDir, ".claude-plugin"); if (current) { - const latest = await getLatestVersionCachedFn({ - url: GITHUB_RAW_PKG, - timeoutMs: VERSION_CHECK_TIMEOUT, - }); - if (latest && isNewerFn(latest, current)) { + const latest = await getLatestVersion(); + if (latest && isNewer(latest, current)) { if (autoupdate) { - logFn(`autoupdate: updating ${current} → ${latest}`); + log(`autoupdate: updating ${current} → ${latest}`); try { const scopes = ["user", "project", "local", "managed"]; const cmd = scopes .map(s => `claude plugin update hivemind@hivemind --scope ${s} 2>/dev/null`) .join("; "); - execSyncFn(cmd, { stdio: "ignore", timeout: 60_000 }); + execSync(cmd, { stdio: "ignore", timeout: 60_000 }); process.stderr.write(`✅ Hivemind auto-updated: ${current} → ${latest}. Run /reload-plugins to apply.\n`); - logFn(`autoupdate succeeded: ${current} → ${latest}`); + log(`autoupdate succeeded: ${current} → ${latest}`); } catch (e: any) { process.stderr.write(`⬆️ Hivemind update available: ${current} → ${latest}. Auto-update failed — run /hivemind:update to upgrade manually.\n`); - logFn(`autoupdate failed: ${e.message}`); + log(`autoupdate failed: ${e.message}`); } } else { process.stderr.write(`⬆️ Hivemind update available: ${current} → ${latest}. Run /hivemind:update to upgrade.\n`); - logFn(`update available (autoupdate off): ${current} → ${latest}`); + log(`update available (autoupdate off): ${current} → ${latest}`); } } else { - logFn(`version up to date: ${current}`); + log(`version up to date: ${current}`); } } } catch (e: any) { - logFn(`version check failed: ${e.message}`); + log(`version check failed: ${e.message}`); } - - return { status: "complete" }; -} - -/* c8 ignore start */ -async function main(): Promise { - const input = await readStdin(); - await runSessionStartSetup(input); } -if (isDirectRun(import.meta.url)) { - main().catch((e) => { log(`fatal: ${e.message}`); process.exit(0); }); -} -/* c8 ignore stop */ +main().catch((e) => { log(`fatal: ${e.message}`); process.exit(0); }); diff --git a/src/hooks/session-start.ts b/src/hooks/session-start.ts index e3ec180..60e402b 100644 --- a/src/hooks/session-start.ts +++ b/src/hooks/session-start.ts @@ -4,30 +4,27 @@ * SessionStart hook: * 1. If no credentials → run device flow login (opens browser) * 2. Inject Deeplake memory instructions into Claude's context - * - * This sync hook stays local-only. All network work (table setup, placeholder, - * queue drain, version refresh, auto-update) runs in session-start-setup.ts. */ import { fileURLToPath } from "node:url"; import { dirname, join } from "node:path"; -import { loadCredentials, saveCredentials } from "../commands/auth.js"; +import { readdirSync, rmSync } from "node:fs"; +import { execSync } from "node:child_process"; +import { homedir } from "node:os"; +import { loadCredentials, saveCredentials, login } from "../commands/auth.js"; +import { loadConfig } from "../config.js"; +import { DeeplakeApi } from "../deeplake-api.js"; +import { sqlStr } from "../utils/sql.js"; import { readStdin } from "../utils/stdin.js"; import { log as _log } from "../utils/debug.js"; -import { isDirectRun } from "../utils/direct-run.js"; -import { - DEFAULT_VERSION_CACHE_TTL_MS, - getInstalledVersion, - isNewer, - readFreshCachedLatestVersion, -} from "./version-check.js"; - +import { getInstalledVersion, getLatestVersion, isNewer } from "../utils/version-check.js"; +import { makeWikiLogger } from "../utils/wiki-log.js"; const log = (msg: string) => _log("session-start", msg); const __bundleDir = dirname(fileURLToPath(import.meta.url)); const AUTH_CMD = join(__bundleDir, "commands", "auth-login.js"); -export const CLAUDE_SESSION_START_CONTEXT = `DEEPLAKE MEMORY: You have TWO memory sources. ALWAYS check BOTH when the user asks you to recall, remember, or look up ANY information: +const context = `DEEPLAKE MEMORY: You have TWO memory sources. ALWAYS check BOTH when the user asks you to recall, remember, or look up ANY information: 1. Your built-in memory (~/.claude/) — personal per-project notes 2. Deeplake global memory (~/.deeplake/memory/) — global memory shared across all sessions, users, and agents in the org @@ -35,19 +32,9 @@ export const CLAUDE_SESSION_START_CONTEXT = `DEEPLAKE MEMORY: You have TWO memor Deeplake memory structure: - ~/.deeplake/memory/index.md — START HERE, table of all sessions - ~/.deeplake/memory/summaries/username/*.md — AI-generated wiki summaries per session -- ~/.deeplake/memory/sessions/{author}/* — raw session data (last resort) - -SEARCH STRATEGY: Always read index.md first. Then read specific summaries. Only read raw session files if summaries don't have enough detail. Do NOT jump straight to raw session files. -When index.md points to a likely match, read that exact summary or session file directly before trying broader grep variants. -If index.md already points to likely candidate files, open those exact files before broadening into synonym greps or wide exploratory scans. -Do NOT probe unrelated local paths such as ~/.claude/projects/, arbitrary home directories, or guessed summary roots when the question is about Deeplake memory. -TEMPORAL GROUNDING: If a summary or transcript uses relative time like "last year", "last week", or "next month", resolve it against that session's own date/date_time metadata, not today's date. -TEMPORAL FOLLOW-THROUGH: If a summary only gives a relative time, open the linked source session and use its date/date_time to convert the final answer into an absolute month/date/year or explicit range before responding. -ANSWER SHAPE: Once you have enough evidence, answer with the smallest exact phrase supported by memory. For identity or relationship questions, use just the noun phrase. For education questions, answer with the likely field or credential directly, not the broader life story. For "when" questions, prefer absolute dates/months/years over relative phrases. Avoid extra biography, explanation, or hedging. -NOT-FOUND BAR: Do NOT answer "not found" until you have checked index.md plus at least one likely summary or raw session file for the named person. If keyword grep is empty, grep the person's name alone and inspect the candidate files. -NEGATIVE-EVIDENCE QUESTIONS: For identity, relationship status, and research-topic questions, summaries may omit the exact phrase. If likely summaries are ambiguous, read the candidate raw session transcript and look for positive clues before concluding the answer is absent. -SELF-LABEL PRIORITY: For identity questions, prefer the person's own explicit self-label from the transcript over broader category descriptions or paraphrases. -RELATIONSHIP STATUS INFERENCE: For relationship-status questions, treat explicit self-descriptions about partnership, dating, marriage, or parenting plans as status evidence. If the transcript strongly supports an unpartnered status, answer with the concise status phrase instead of "not found." +- ~/.deeplake/memory/sessions/username/*.jsonl — raw session data (last resort) + +SEARCH STRATEGY: Always read index.md first. Then read specific summaries. Only read raw JSONL if summaries don't have enough detail. Do NOT jump straight to JSONL files. Search command: Grep pattern="keyword" path="~/.deeplake/memory" @@ -68,96 +55,161 @@ LIMITS: Do NOT spawn subagents to read deeplake memory. If a file returns empty Debugging: Set HIVEMIND_DEBUG=1 to enable verbose logging to ~/.deeplake/hook-debug.log`; -const GITHUB_RAW_PKG = "https://raw.githubusercontent.com/activeloopai/hivemind/main/package.json"; +const HOME = homedir(); +const { log: wikiLog } = makeWikiLogger(join(HOME, ".claude", "hooks")); -export function buildSessionStartAdditionalContext(args: { - authCommand: string; - creds: ReturnType; - currentVersion: string | null; - latestVersion: string | null; -}): string { - const resolvedContext = CLAUDE_SESSION_START_CONTEXT.replace(/HIVEMIND_AUTH_CMD/g, args.authCommand); +/** Create a placeholder summary via direct SQL INSERT (no DeeplakeFs bootstrap needed). */ +async function createPlaceholder(api: DeeplakeApi, table: string, sessionId: string, cwd: string, userName: string, orgName: string, workspaceId: string): Promise { + const summaryPath = `/summaries/${userName}/${sessionId}.md`; - let updateNotice = ""; - if (args.currentVersion) { - if (args.latestVersion && isNewer(args.latestVersion, args.currentVersion)) { - updateNotice = `\n\n⬆️ Hivemind update available: ${args.currentVersion} → ${args.latestVersion}.`; - } else { - updateNotice = `\n\n✅ Hivemind v${args.currentVersion}`; - } + const existing = await api.query( + `SELECT path FROM "${table}" WHERE path = '${sqlStr(summaryPath)}' LIMIT 1` + ); + if (existing.length > 0) { + wikiLog(`SessionStart: summary exists for ${sessionId} (resumed)`); + return; } - return args.creds?.token - ? `${resolvedContext}\n\nLogged in to Deeplake as org: ${args.creds.orgName ?? args.creds.orgId} (workspace: ${args.creds.workspaceId ?? "default"})${updateNotice}` - : `${resolvedContext}\n\n⚠️ Not logged in to Deeplake. Memory search will not work. Ask the user to run /hivemind:login to authenticate.${updateNotice}`; + const now = new Date().toISOString(); + const projectName = cwd.split("/").pop() ?? "unknown"; + const sessionSource = `/sessions/${userName}/${userName}_${orgName}_${workspaceId}_${sessionId}.jsonl`; + const content = [ + `# Session ${sessionId}`, + `- **Source**: ${sessionSource}`, + `- **Started**: ${now}`, + `- **Project**: ${projectName}`, + `- **Status**: in-progress`, + "", + ].join("\n"); + const filename = `${sessionId}.md`; + + await api.query( + `INSERT INTO "${table}" (id, path, filename, summary, author, mime_type, size_bytes, project, description, agent, creation_date, last_update_date) ` + + `VALUES ('${crypto.randomUUID()}', '${sqlStr(summaryPath)}', '${sqlStr(filename)}', E'${sqlStr(content)}', '${sqlStr(userName)}', 'text/markdown', ` + + `${Buffer.byteLength(content, "utf-8")}, '${sqlStr(projectName)}', 'in progress', 'claude_code', '${now}', '${now}')` + ); + + wikiLog(`SessionStart: created placeholder for ${sessionId} (${cwd})`); } -interface SessionStartHookDeps { - wikiWorker?: boolean; - creds?: ReturnType; - saveCredentialsFn?: typeof saveCredentials; - currentVersion?: string | null; - latestVersion?: string | null; - authCommand?: string; - bundleDir?: string; - logFn?: (msg: string) => void; +interface SessionStartInput { + session_id: string; + cwd?: string; } -export async function runSessionStartHook(_input: Record, deps: SessionStartHookDeps = {}): Promise<{ - hookSpecificOutput: { - hookEventName: "SessionStart"; - additionalContext: string; - }; -} | null> { - const { - wikiWorker = (process.env.HIVEMIND_WIKI_WORKER ?? process.env.DEEPLAKE_WIKI_WORKER) === "1", - creds = loadCredentials(), - saveCredentialsFn = saveCredentials, - currentVersion = getInstalledVersion(__bundleDir, ".claude-plugin"), - latestVersion = currentVersion - ? readFreshCachedLatestVersion(GITHUB_RAW_PKG, DEFAULT_VERSION_CACHE_TTL_MS) ?? null - : null, - authCommand = AUTH_CMD, - logFn = log, - } = deps; - - if (wikiWorker) return null; +async function main(): Promise { + // Skip if this is a sub-session spawned by the wiki worker + if (process.env.HIVEMIND_WIKI_WORKER === "1") return; + + const input = await readStdin(); + + let creds = loadCredentials(); if (!creds?.token) { - logFn("no credentials found — run /hivemind:login to authenticate"); + log("no credentials found — run /hivemind:login to authenticate"); } else { - logFn(`credentials loaded: org=${creds.orgName ?? creds.orgId}`); + log(`credentials loaded: org=${creds.orgName ?? creds.orgId}`); + // Backfill userName if missing (for users who logged in before this field was added) if (creds.token && !creds.userName) { try { const { userInfo } = await import("node:os"); creds.userName = userInfo().username ?? "unknown"; - saveCredentialsFn(creds); - logFn(`backfilled and persisted userName: ${creds.userName}`); + saveCredentials(creds); + log(`backfilled and persisted userName: ${creds.userName}`); } catch { /* non-fatal */ } } } - return { + // Ensure tables exist and (when capture is enabled) create the placeholder + // summary via direct SQL. Tables must always be synced so queries return + // fresh data — only the placeholder INSERT is skipped when HIVEMIND_CAPTURE=false + // (benchmark runs, explicit opt-out). Mirrors the guard already in + // session-start-setup.ts / session-end.ts / codex hooks. + const captureEnabled = process.env.HIVEMIND_CAPTURE !== "false"; + if (input.session_id && creds?.token) { + try { + const config = loadConfig(); + if (config) { + const table = config.tableName; + const sessionsTable = config.sessionsTableName; + const api = new DeeplakeApi(config.token, config.apiUrl, config.orgId, config.workspaceId, table); + await api.ensureTable(); + await api.ensureSessionsTable(sessionsTable); + if (captureEnabled) { + await createPlaceholder(api, table, input.session_id, input.cwd ?? "", config.userName, config.orgName, config.workspaceId); + log("placeholder created"); + } else { + log("placeholder skipped (HIVEMIND_CAPTURE=false)"); + } + } + } catch (e: any) { + log(`placeholder failed: ${e.message}`); + wikiLog(`SessionStart: placeholder failed for ${input.session_id}: ${e.message}`); + } + } + + // Version check (non-blocking — failures are silently ignored) + const autoupdate = creds?.autoupdate !== false; // default: true + let updateNotice = ""; + try { + const current = getInstalledVersion(__bundleDir, ".claude-plugin"); + if (current) { + const latest = await getLatestVersion(); + if (latest && isNewer(latest, current)) { + if (autoupdate) { + log(`autoupdate: updating ${current} → ${latest}`); + try { + const scopes = ["user", "project", "local", "managed"]; + const cmd = scopes + .map(s => `claude plugin update hivemind@hivemind --scope ${s} 2>/dev/null || true`) + .join("; "); + execSync(cmd, { stdio: "ignore", timeout: 60_000 }); + // Clean up old cached versions, keep only the latest + try { + const cacheParent = join(homedir(), ".claude", "plugins", "cache", "hivemind", "hivemind"); + const entries = readdirSync(cacheParent, { withFileTypes: true }); + for (const e of entries) { + if (e.isDirectory() && e.name !== latest) { + rmSync(join(cacheParent, e.name), { recursive: true, force: true }); + log(`cache cleanup: removed old version ${e.name}`); + } + } + } catch (e: any) { + log(`cache cleanup failed: ${e.message}`); + } + updateNotice = `\n\n✅ Hivemind auto-updated: ${current} → ${latest}. Run /reload-plugins to apply.`; + process.stderr.write(`✅ Hivemind auto-updated: ${current} → ${latest}. Run /reload-plugins to apply.\n`); + log(`autoupdate succeeded: ${current} → ${latest}`); + } catch (e: any) { + updateNotice = `\n\n⬆️ Hivemind update available: ${current} → ${latest}. Auto-update failed — run /hivemind:update to upgrade manually.`; + process.stderr.write(`⬆️ Hivemind update available: ${current} → ${latest}. Auto-update failed — run /hivemind:update to upgrade manually.\n`); + log(`autoupdate failed: ${e.message}`); + } + } else { + updateNotice = `\n\n⬆️ Hivemind update available: ${current} → ${latest}. Run /hivemind:update to upgrade.`; + process.stderr.write(`⬆️ Hivemind update available: ${current} → ${latest}. Run /hivemind:update to upgrade.\n`); + log(`update available (autoupdate off): ${current} → ${latest}`); + } + } else { + log(`version up to date: ${current}`); + updateNotice = `\n\n✅ Hivemind v${current} (up to date)`; + } + } + } catch (e: any) { + log(`version check failed: ${e.message}`); + } + + const resolvedContext = context.replace(/HIVEMIND_AUTH_CMD/g, AUTH_CMD); + const additionalContext = creds?.token + ? `${resolvedContext}\n\nLogged in to Deeplake as org: ${creds.orgName ?? creds.orgId} (workspace: ${creds.workspaceId ?? "default"})${updateNotice}` + : `${resolvedContext}\n\n⚠️ Not logged in to Deeplake. Memory search will not work. Ask the user to run /hivemind:login to authenticate.${updateNotice}`; + + console.log(JSON.stringify({ hookSpecificOutput: { hookEventName: "SessionStart", - additionalContext: buildSessionStartAdditionalContext({ - authCommand, - creds, - currentVersion, - latestVersion, - }), + additionalContext, }, - }; + })); } -/* c8 ignore start */ -async function main(): Promise { - await readStdin>(); - const result = await runSessionStartHook({}); - if (result) console.log(JSON.stringify(result)); -} - -if (isDirectRun(import.meta.url)) { - main().catch((e) => { log(`fatal: ${e.message}`); process.exit(0); }); -} -/* c8 ignore stop */ +main().catch((e) => { log(`fatal: ${e.message}`); process.exit(0); }); diff --git a/src/hooks/spawn-wiki-worker.ts b/src/hooks/spawn-wiki-worker.ts index a2440b6..b870bb4 100644 --- a/src/hooks/spawn-wiki-worker.ts +++ b/src/hooks/spawn-wiki-worker.ts @@ -6,13 +6,14 @@ import { spawn, execSync } from "node:child_process"; import { fileURLToPath } from "node:url"; import { dirname, join } from "node:path"; -import { writeFileSync, mkdirSync, appendFileSync } from "node:fs"; +import { writeFileSync, mkdirSync } from "node:fs"; import { homedir, tmpdir } from "node:os"; import type { Config } from "../config.js"; -import { utcTimestamp } from "../utils/debug.js"; +import { makeWikiLogger } from "../utils/wiki-log.js"; const HOME = homedir(); -export const WIKI_LOG = join(HOME, ".claude", "hooks", "deeplake-wiki.log"); +const wikiLogger = makeWikiLogger(join(HOME, ".claude", "hooks")); +export const WIKI_LOG = wikiLogger.path; export const WIKI_PROMPT_TEMPLATE = `You are building a personal wiki from a coding session. Your goal is to extract every piece of knowledge — entities, decisions, relationships, and facts — into a structured, searchable wiki entry. Think of this as building a knowledge graph, not writing a summary. @@ -67,12 +68,7 @@ PRIVACY: Never include absolute filesystem paths (e.g. /home/user/..., /Users/.. LENGTH LIMIT: Keep the total summary under 4000 characters. Be dense and concise — prioritize facts over prose. If a session is short, the summary should be short too.`; -export function wikiLog(msg: string): void { - try { - mkdirSync(join(HOME, ".claude", "hooks"), { recursive: true }); - appendFileSync(WIKI_LOG, `[${utcTimestamp()}] ${msg}\n`); - } catch { /* ignore */ } -} +export const wikiLog = wikiLogger.log; export function findClaudeBin(): string { try { diff --git a/src/hooks/summary-state.ts b/src/hooks/summary-state.ts index b10ba3e..9873e4f 100644 --- a/src/hooks/summary-state.ts +++ b/src/hooks/summary-state.ts @@ -14,6 +14,9 @@ import { } from "node:fs"; import { homedir } from "node:os"; import { join } from "node:path"; +import { log as _log } from "../utils/debug.js"; + +const dlog = (msg: string) => _log("summary-state", msg); export interface SummaryState { lastSummaryAt: number; @@ -61,7 +64,12 @@ export function withRmwLock(sessionId: string, fn: () => T): T { } catch (e: any) { if (e.code !== "EEXIST") throw e; if (Date.now() > deadline) { - try { unlinkSync(rmwLock); } catch { /* ignore */ } + dlog(`rmw lock deadline exceeded for ${sessionId}, reclaiming stale lock`); + try { + unlinkSync(rmwLock); + } catch (unlinkErr: any) { + dlog(`stale rmw lock unlink failed for ${sessionId}: ${unlinkErr.message}`); + } continue; } Atomics.wait(YIELD_BUF, 0, 0, 10); @@ -71,7 +79,11 @@ export function withRmwLock(sessionId: string, fn: () => T): T { return fn(); } finally { closeSync(fd); - try { unlinkSync(rmwLock); } catch { /* ignore */ } + try { + unlinkSync(rmwLock); + } catch (unlinkErr: any) { + dlog(`rmw lock cleanup failed for ${sessionId}: ${unlinkErr.message}`); + } } } @@ -129,8 +141,17 @@ export function tryAcquireLock(sessionId: string, maxAgeMs = 10 * 60 * 1000): bo try { const ageMs = Date.now() - parseInt(readFileSync(p, "utf-8"), 10); if (Number.isFinite(ageMs) && ageMs < maxAgeMs) return false; - } catch { /* treat unreadable as stale */ } - try { unlinkSync(p); } catch { return false; } + } catch (readErr: any) { + // Unreadable lock content: treat as stale and log for visibility + // (HIVEMIND_DEBUG-gated) so we know why stale reclaim fired. + dlog(`lock file unreadable for ${sessionId}, treating as stale: ${readErr.message}`); + } + try { + unlinkSync(p); + } catch (unlinkErr: any) { + dlog(`could not unlink stale lock for ${sessionId}: ${unlinkErr.message}`); + return false; + } } try { const fd = openSync(p, "wx"); @@ -145,5 +166,11 @@ export function tryAcquireLock(sessionId: string, maxAgeMs = 10 * 60 * 1000): bo export function releaseLock(sessionId: string): void { try { unlinkSync(lockPath(sessionId)); - } catch { /* ignore */ } + } catch (e: any) { + // ENOENT is normal (lock wasn't held); everything else is worth + // seeing in debug mode. + if (e?.code !== "ENOENT") { + dlog(`releaseLock unlink failed for ${sessionId}: ${e.message}`); + } + } } diff --git a/src/hooks/virtual-table-query.ts b/src/hooks/virtual-table-query.ts index 34f0bf6..a430a35 100644 --- a/src/hooks/virtual-table-query.ts +++ b/src/hooks/virtual-table-query.ts @@ -8,14 +8,32 @@ function normalizeSessionPart(path: string, content: string): string { return normalizeContent(path, content); } -export function buildVirtualIndexContent(rows: Row[]): string { - const lines = ["# Memory Index", "", `${rows.length} sessions:`, ""]; - for (const row of rows) { - const path = row["path"] as string; - const project = row["project"] as string || ""; - const description = (row["description"] as string || "").slice(0, 120); - const date = (row["creation_date"] as string || "").slice(0, 10); - lines.push(`- [${path}](${path}) ${date} ${project ? `[${project}]` : ""} ${description}`); +export function buildVirtualIndexContent(summaryRows: Row[], sessionRows: Row[] = []): string { + const total = summaryRows.length + sessionRows.length; + const lines = [ + "# Memory Index", + "", + `${total} entries (${summaryRows.length} summaries, ${sessionRows.length} sessions):`, + "", + ]; + if (summaryRows.length > 0) { + lines.push("## Summaries", ""); + for (const row of summaryRows) { + const path = row["path"] as string; + const project = row["project"] as string || ""; + const description = (row["description"] as string || "").slice(0, 120); + const date = (row["creation_date"] as string || "").slice(0, 10); + lines.push(`- [${path}](${path}) ${date} ${project ? `[${project}]` : ""} ${description}`); + } + lines.push(""); + } + if (sessionRows.length > 0) { + lines.push("## Sessions", ""); + for (const row of sessionRows) { + const path = row["path"] as string; + const description = (row["description"] as string || "").slice(0, 120); + lines.push(`- [${path}](${path}) ${description}`); + } } return lines.join("\n"); } @@ -35,7 +53,7 @@ function buildInList(paths: string[]): string { function buildDirFilter(dirs: string[]): string { const cleaned = [...new Set(dirs.map(dir => dir.replace(/\/+$/, "") || "/"))]; if (cleaned.length === 0 || cleaned.includes("/")) return ""; - const clauses = cleaned.map((dir) => `path LIKE '${sqlLike(dir)}/%'`); + const clauses = cleaned.map((dir) => `path LIKE '${sqlLike(dir)}/%' ESCAPE '\\'`); return ` WHERE ${clauses.join(" OR ")}`; } @@ -101,10 +119,15 @@ export async function readVirtualPathContents( } if (result.get("/index.md") === null && uniquePaths.includes("/index.md")) { - const rows = await api.query( - `SELECT path, project, description, creation_date FROM "${memoryTable}" WHERE path LIKE '/summaries/%' ORDER BY creation_date DESC` - ).catch(() => []); - result.set("/index.md", buildVirtualIndexContent(rows)); + const [summaryRows, sessionRows] = await Promise.all([ + api.query( + `SELECT path, project, description, creation_date FROM "${memoryTable}" WHERE path LIKE '/summaries/%' ORDER BY creation_date DESC` + ).catch(() => [] as Row[]), + api.query( + `SELECT path, description FROM "${sessionsTable}" WHERE path LIKE '/sessions/%' ORDER BY path` + ).catch(() => [] as Row[]), + ]); + result.set("/index.md", buildVirtualIndexContent(summaryRows, sessionRows)); } return result; @@ -173,8 +196,8 @@ export async function findVirtualPaths( const likePath = `${sqlLike(normalizedDir === "/" ? "" : normalizedDir)}/%`; const rows = await queryUnionRows( api, - `SELECT path, NULL::text AS content, NULL::bigint AS size_bytes, '' AS creation_date, 0 AS source_order FROM "${memoryTable}" WHERE path LIKE '${likePath}' AND filename LIKE '${filenamePattern}'`, - `SELECT path, NULL::text AS content, NULL::bigint AS size_bytes, '' AS creation_date, 1 AS source_order FROM "${sessionsTable}" WHERE path LIKE '${likePath}' AND filename LIKE '${filenamePattern}'`, + `SELECT path, NULL::text AS content, NULL::bigint AS size_bytes, '' AS creation_date, 0 AS source_order FROM "${memoryTable}" WHERE path LIKE '${likePath}' ESCAPE '\\' AND filename LIKE '${filenamePattern}' ESCAPE '\\'`, + `SELECT path, NULL::text AS content, NULL::bigint AS size_bytes, '' AS creation_date, 1 AS source_order FROM "${sessionsTable}" WHERE path LIKE '${likePath}' ESCAPE '\\' AND filename LIKE '${filenamePattern}' ESCAPE '\\'`, ); return [...new Set( diff --git a/src/hooks/wiki-worker.ts b/src/hooks/wiki-worker.ts index 6e12445..2359ea0 100644 --- a/src/hooks/wiki-worker.ts +++ b/src/hooks/wiki-worker.ts @@ -10,7 +10,9 @@ import { readFileSync, writeFileSync, existsSync, appendFileSync, mkdirSync, rmSync } from "node:fs"; import { execFileSync } from "node:child_process"; import { join } from "node:path"; -import { utcTimestamp } from "../utils/debug.js"; +import { utcTimestamp, log as _log } from "../utils/debug.js"; + +const dlog = (msg: string) => _log("wiki-worker", msg); import { finalizeSummary, releaseLock } from "./summary-state.js"; import { uploadSummary } from "./upload-summary.js"; @@ -91,7 +93,11 @@ async function query(sql: string, retries = 4): Promise[ } function cleanup(): void { - try { rmSync(tmpDir, { recursive: true, force: true }); } catch { /* ignore */ } + try { + rmSync(tmpDir, { recursive: true, force: true }); + } catch (cleanupErr: any) { + dlog(`cleanup failed to remove ${tmpDir}: ${cleanupErr.message}`); + } } async function main(): Promise { @@ -202,7 +208,13 @@ async function main(): Promise { wlog(`fatal: ${e.message}`); } finally { cleanup(); - try { releaseLock(cfg.sessionId); } catch { /* ignore */ } + try { + releaseLock(cfg.sessionId); + } catch (releaseErr: any) { + // Gated on HIVEMIND_DEBUG — we don't want a release failure at + // worker shutdown to pollute the wiki log every run. + dlog(`releaseLock failed in finally for ${cfg.sessionId}: ${releaseErr.message}`); + } } } diff --git a/src/shell/deeplake-shell.ts b/src/shell/deeplake-shell.ts index dcdbfa5..e58dfb8 100644 --- a/src/shell/deeplake-shell.ts +++ b/src/shell/deeplake-shell.ts @@ -29,6 +29,20 @@ import { DeeplakeFs } from "./deeplake-fs.js"; import { createGrepCommand } from "./grep-interceptor.js"; async function main(): Promise { + const isOneShot = process.argv.includes("-c"); + + // One-shot mode is what the pre-tool-use hook invokes via `node shell-bundle -c "..."` + // to execute compound bash commands. Claude Code's Bash tool merges the child's + // stderr into the tool_result string Claude sees, so any `[deeplake-sql]` trace + // written to stderr here pollutes the model's view of the command output. + // Silence trace env vars regardless of how the caller set them. + if (isOneShot) { + delete process.env["HIVEMIND_TRACE_SQL"]; + delete process.env["DEEPLAKE_TRACE_SQL"]; + delete process.env["HIVEMIND_DEBUG"]; + delete process.env["DEEPLAKE_DEBUG"]; + } + const config = loadConfig(); if (!config) { process.stderr.write( @@ -42,8 +56,6 @@ async function main(): Promise { const sessionsTable = process.env["HIVEMIND_SESSIONS_TABLE"] ?? "sessions"; const mount = process.env["HIVEMIND_MOUNT"] ?? "/"; - const isOneShot = process.argv.includes("-c"); - const client = new DeeplakeApi( config.token, config.apiUrl, config.orgId, config.workspaceId, table ); diff --git a/src/shell/grep-core.ts b/src/shell/grep-core.ts index abad499..6e93c5b 100644 --- a/src/shell/grep-core.ts +++ b/src/shell/grep-core.ts @@ -234,13 +234,13 @@ function buildPathCondition(targetPath: string): string { const clean = targetPath.replace(/\/+$/, ""); if (/[*?]/.test(clean)) { const likePattern = sqlLike(clean).replace(/\*/g, "%").replace(/\?/g, "_"); - return `path LIKE '${likePattern}'`; + return `path LIKE '${likePattern}' ESCAPE '\\'`; } const base = clean.split("/").pop() ?? ""; if (base.includes(".")) { return `path = '${sqlStr(clean)}'`; } - return `(path = '${sqlStr(clean)}' OR path LIKE '${sqlLike(clean)}/%')`; + return `(path = '${sqlStr(clean)}' OR path LIKE '${sqlLike(clean)}/%' ESCAPE '\\')`; } /** diff --git a/src/utils/output-cap.ts b/src/utils/output-cap.ts new file mode 100644 index 0000000..1b620a7 --- /dev/null +++ b/src/utils/output-cap.ts @@ -0,0 +1,75 @@ +/** + * Cap large tool outputs before they reach Claude Code. + * + * Claude Code's Bash tool silently persists any tool_result larger than + * ~16 KB to disk and replaces it with a 2 KB "preview" + a path to the + * persisted file. In the locomo `baseline_cloud_100qa_fix123` run, 11 + * out of 14 losing QAs that hit this path NEVER recovered — the model + * saw a 2 KB slice of grep output and gave up instead of reading the + * persisted file. For our workload 8 KB of meaningful content is + * consistently more useful to the model than 2 KB + a dangling file + * pointer, so we cap the plugin-returned output below that threshold + * and replace the tail with a footer that tells the model how to + * narrow the next call. + * + * The cap is applied at line boundaries to keep grep / cat output + * structure intact. A short footer indicates how many lines / bytes + * were elided and suggests refinements ("pipe to | head -N" or + * "tighten the pattern"). + */ + +export const CLAUDE_OUTPUT_CAP_BYTES = 8 * 1024; + +function byteLen(str: string): number { + return Buffer.byteLength(str, "utf8"); +} + +export interface CapOutputOptions { + /** Hint shown in the footer. Examples: "grep", "cat", "for-loop". */ + kind?: string; + /** Override the cap size (bytes). Defaults to CLAUDE_OUTPUT_CAP_BYTES. */ + maxBytes?: number; +} + +/** + * If `output` fits in the cap, return it unchanged. Otherwise truncate + * at the last newline that keeps the total (including footer) under the + * cap, and append a footer describing what was elided. + */ +export function capOutputForClaude(output: string, options: CapOutputOptions = {}): string { + const maxBytes = options.maxBytes ?? CLAUDE_OUTPUT_CAP_BYTES; + if (byteLen(output) <= maxBytes) return output; + + const kind = options.kind ?? "output"; + // Reserve ~200 bytes for the footer so it always fits within maxBytes. + const footerReserve = 220; + const budget = Math.max(1, maxBytes - footerReserve); + + // Find the last newline before the byte budget. Walk forward building + // the slice so the byte boundary stays valid even for multibyte UTF-8. + let running = 0; + const lines = output.split("\n"); + const keptLines: string[] = []; + for (const line of lines) { + const lineBytes = byteLen(line) + 1; // +1 for the newline + if (running + lineBytes > budget) break; + keptLines.push(line); + running += lineBytes; + } + + if (keptLines.length === 0) { + // A single line is already over budget — take a prefix and mark it. + const slice = Buffer.from(output, "utf8").slice(0, budget).toString("utf8"); + const footer = `\n... [${kind} truncated: ${(byteLen(output) / 1024).toFixed(1)} KB total; refine with '| head -N' or a tighter pattern]`; + return slice + footer; + } + + // `split("\n")` on `"a\nb\n"` produces `["a", "b", ""]` — the trailing + // empty entry is a newline terminator, not a real extra line. Counting + // it would over-report the elided-line tally in the footer. + const totalLines = lines.length - (lines[lines.length - 1] === "" ? 1 : 0); + const elidedLines = Math.max(0, totalLines - keptLines.length); + const elidedBytes = byteLen(output) - byteLen(keptLines.join("\n")); + const footer = `\n... [${kind} truncated: ${elidedLines} more lines (${(elidedBytes / 1024).toFixed(1)} KB) elided — refine with '| head -N' or a tighter pattern]`; + return keptLines.join("\n") + footer; +} diff --git a/src/utils/session-path.ts b/src/utils/session-path.ts new file mode 100644 index 0000000..e4b8e5b --- /dev/null +++ b/src/utils/session-path.ts @@ -0,0 +1,13 @@ +/** + * Canonical session JSONL path. Used by every capture hook (CC + Codex) + * and by the placeholder / summary paths in session-start. Keeping it + * in one place prevents the 4-tuple `{userName, orgName, workspaceId, + * sessionId}` from ever being re-assembled in the wrong order. + */ +export function buildSessionPath( + config: { userName: string; orgName: string; workspaceId: string }, + sessionId: string, +): string { + const workspace = config.workspaceId ?? "default"; + return `/sessions/${config.userName}/${config.userName}_${config.orgName}_${workspace}_${sessionId}.jsonl`; +} diff --git a/src/utils/version-check.ts b/src/utils/version-check.ts new file mode 100644 index 0000000..e8e14cb --- /dev/null +++ b/src/utils/version-check.ts @@ -0,0 +1,67 @@ +/** + * Shared install-version / latest-version / version-compare helpers. + * Used by both the CC and Codex session-start hooks. Each side differs + * only in the path of its plugin manifest: + * - claude-code → /../.claude-plugin/plugin.json + * - codex → /../.codex-plugin/plugin.json + * Callers pass the plugin-manifest name explicitly. + */ + +import { readFileSync } from "node:fs"; +import { dirname, join } from "node:path"; + +const GITHUB_RAW_PKG = "https://raw.githubusercontent.com/activeloopai/hivemind/main/package.json"; + +/** + * Read the installed plugin version. + * + * Tries `/../plugin.json` first (both the + * cache layout and the marketplace layout pin the version there), then + * walks up from the bundle dir looking for a `package.json` whose name + * is `hivemind` or `hivemind-codex`. Returns null if nothing is found + * — callers treat that as "skip the update check". + */ +export function getInstalledVersion(bundleDir: string, pluginManifestDir: string): string | null { + try { + const pluginJson = join(bundleDir, "..", pluginManifestDir, "plugin.json"); + const plugin = JSON.parse(readFileSync(pluginJson, "utf-8")); + if (plugin.version) return plugin.version; + } catch { /* fall through */ } + let dir = bundleDir; + for (let i = 0; i < 5; i++) { + const candidate = join(dir, "package.json"); + try { + const pkg = JSON.parse(readFileSync(candidate, "utf-8")); + if ((pkg.name === "hivemind" || pkg.name === "hivemind-codex") && pkg.version) return pkg.version; + } catch { /* not here, keep looking */ } + const parent = dirname(dir); + if (parent === dir) break; + dir = parent; + } + return null; +} + +/** + * Fetch the latest version from GitHub (main branch package.json). + * Returns null on any failure — session-start hooks must never block + * on GitHub being reachable, and their callers treat null as "no + * update available". + */ +export async function getLatestVersion(timeoutMs = 3000): Promise { + try { + const res = await fetch(GITHUB_RAW_PKG, { signal: AbortSignal.timeout(timeoutMs) }); + if (!res.ok) return null; + const pkg = await res.json(); + return pkg.version ?? null; + } catch { + return null; + } +} + +/** Strict semantic "latest is greater than current" for dotted x.y.z strings. */ +export function isNewer(latest: string, current: string): boolean { + const parse = (v: string) => v.split(".").map(Number); + const [la, lb, lc] = parse(latest); + const [ca, cb, cc] = parse(current); + return la > ca || (la === ca && lb > cb) || (la === ca && lb === cb && lc > cc); +} diff --git a/src/utils/wiki-log.ts b/src/utils/wiki-log.ts new file mode 100644 index 0000000..a154afb --- /dev/null +++ b/src/utils/wiki-log.ts @@ -0,0 +1,32 @@ +/** + * wikiLog writer factory. Produces a unconditional append-line logger + * that targets a user-visible wiki-log file. Each plugin variant has + * its own path (CC: ~/.claude/hooks/..., Codex: ~/.codex/hooks/...), + * so the caller constructs the logger once by passing HOOKS_DIR. + * + * This is the *user-visible* log — entries like "SessionEnd: + * triggering summary for " land here regardless of HIVEMIND_DEBUG. + * For debug-gated diagnostics use `_log` from src/utils/debug.ts. + */ + +import { mkdirSync, appendFileSync } from "node:fs"; +import { join } from "node:path"; +import { utcTimestamp } from "./debug.js"; + +export interface WikiLogger { + log: (msg: string) => void; + path: string; +} + +export function makeWikiLogger(hooksDir: string, filename = "deeplake-wiki.log"): WikiLogger { + const path = join(hooksDir, filename); + return { + path, + log(msg: string): void { + try { + mkdirSync(hooksDir, { recursive: true }); + appendFileSync(path, `[${utcTimestamp()}] ${msg}\n`); + } catch { /* ignore — a log failure must never crash the hook */ } + }, + }; +} diff --git a/vitest.config.ts b/vitest.config.ts index 70df29d..dccf756 100644 --- a/vitest.config.ts +++ b/vitest.config.ts @@ -65,6 +65,35 @@ export default defineConfig({ functions: 80, lines: 80, }, + // fix/index-md-include-sessions — 5-fix PR stacked on PR #61. + // output-cap.ts is new in this PR (fix #5); virtual-table-query.ts was + // heavily modified by fix #1 (index.md builder / fallback) and fix #4 + // (ESCAPE '\' on LIKE clauses). Held at 90 to match the rest of the + // plugin-hot-path files already at that bar. + "src/utils/output-cap.ts": { + statements: 90, + branches: 90, + functions: 90, + lines: 90, + }, + "src/hooks/virtual-table-query.ts": { + statements: 90, + branches: 90, + functions: 90, + lines: 90, + }, + "src/hooks/pre-tool-use.ts": { + statements: 90, + branches: 90, + functions: 90, + lines: 90, + }, + "src/hooks/memory-path-utils.ts": { + statements: 90, + branches: 90, + functions: 90, + lines: 90, + }, }, }, },