diff --git a/package-lock.json b/package-lock.json index 1d65735..0d5ac08 100644 --- a/package-lock.json +++ b/package-lock.json @@ -18,11 +18,12 @@ "tough-cookie": "^4.1.4" }, "bin": { + "git-remote-overleaf": "dist/git-helper.js", "olcli": "dist/cli.js" }, "devDependencies": { "@types/adm-zip": "^0.5.7", - "@types/node": "^22.0.0", + "@types/node": "^22.19.17", "@types/tough-cookie": "^4.0.5", "tsx": "^4.7.0", "typescript": "^5.4.0" @@ -484,9 +485,9 @@ } }, "node_modules/@types/node": { - "version": "22.19.7", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.19.7.tgz", - "integrity": "sha512-MciR4AKGHWl7xwxkBa6xUGxQJ4VBOmPTF7sL+iGzuahOFaO0jHCsuEfS80pan1ef4gWId1oWOweIhrDEYLuaOw==", + "version": "22.19.17", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.19.17.tgz", + "integrity": "sha512-wGdMcf+vPYM6jikpS/qhg6WiqSV/OhG+jeeHT/KlVqxYfD40iYJf9/AE1uQxVWFvU7MipKRkRv8NSHiCGgPr8Q==", "dev": true, "license": "MIT", "dependencies": { diff --git a/package.json b/package.json index 6f88c18..251b7c0 100644 --- a/package.json +++ b/package.json @@ -4,7 +4,8 @@ "description": "Command-line interface for Overleaf — Sync, manage, and compile LaTeX projects from your terminal", "type": "module", "bin": { - "olcli": "dist/cli.js" + "olcli": "dist/cli.js", + "git-remote-overleaf": "dist/git-helper.js" }, "scripts": { "build": "tsc", @@ -54,7 +55,7 @@ }, "devDependencies": { "@types/adm-zip": "^0.5.7", - "@types/node": "^22.0.0", + "@types/node": "^22.19.17", "@types/tough-cookie": "^4.0.5", "tsx": "^4.7.0", "typescript": "^5.4.0" diff --git a/src/cli.ts b/src/cli.ts index 473a13b..b0a5db0 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -12,7 +12,7 @@ import ora from 'ora'; import { writeFileSync, readFileSync, existsSync, mkdirSync } from 'node:fs'; import { join, dirname, basename } from 'node:path'; import { fileURLToPath } from 'node:url'; -import { OverleafClient } from './client.js'; +import { OverleafClient, getClient } from './client.js'; // Read version from package.json const __dirname = dirname(fileURLToPath(import.meta.url)); @@ -35,28 +35,12 @@ import { const program = new Command(); program - .name('olcli') - .description('Overleaf CLI - interact with Overleaf projects from the command line') - .version(VERSION) - .option('--base-url ', 'Overleaf instance base URL (overrides OVERLEAF_BASE_URL and config)') - .option('--cookie-name ', 'Session cookie name (default: overleaf_session2, use overleaf.sid for older instances)'); +.name('olcli') +.description('Overleaf CLI - interact with Overleaf projects from the command line') +.version(VERSION) +.option('--base-url ', 'Overleaf instance base URL (overrides OVERLEAF_BASE_URL and config)') +.option('--cookie-name ', 'Session cookie name (default: overleaf_session2, use overleaf.sid for older instances)'); -/** - * Helper to get authenticated client - */ -async function getClient(cookieOpt?: string, baseUrlOpt?: string): Promise { - const cookie = cookieOpt || getSessionCookie(); - if (!cookie) { - console.error(chalk.red('No session cookie found.')); - console.error('Set one with: olcli auth --cookie '); - console.error('Or set OVERLEAF_SESSION environment variable'); - console.error('Or create .olauth file in current directory'); - process.exit(1); - } - const baseUrl = baseUrlOpt || (program.opts().baseUrl as string | undefined) || getBaseUrl(); - const cookieName = (program.opts().cookieName as string | undefined) || getSessionCookieName(); - return OverleafClient.fromSessionCookie(cookie, baseUrl, cookieName); -} /** * Resolve project from argument or .olcli.json in current directory @@ -78,7 +62,7 @@ async function resolveProject( // Trust the ID, use a placeholder name (will be overwritten on next list) return { id: projectArg, name: projectArg }; } - + // Otherwise, look up by name let proj = await client.getProject(projectArg); if (!proj) { @@ -105,164 +89,164 @@ async function resolveProject( // ───────────────────────────────────────────────────────────────────────────── program - .command('auth') - .description('Authenticate with Overleaf using session cookie') - .option('--cookie ', 'Session cookie (overleaf_session2 value)') - .option('--save-local', 'Save to .olauth in current directory') - .action(async (options) => { - if (!options.cookie) { - console.log(chalk.yellow('To authenticate, provide your session cookie:')); - console.log(); - console.log('1. Log into overleaf.com in your browser'); - console.log('2. Open Developer Tools (F12) → Application → Cookies'); - console.log('3. Find the cookie named "overleaf_session2"'); - console.log('4. Copy its value and run:'); - console.log(); - console.log(chalk.cyan(' olcli auth --cookie "your_session_cookie_value"')); - console.log(); - console.log('Or set OVERLEAF_SESSION environment variable'); - return; - } - - const spinner = ora('Verifying session...').start(); - try { - const baseUrl = (program.opts().baseUrl as string | undefined) || getBaseUrl(); - const cookieName = (program.opts().cookieName as string | undefined) || getSessionCookieName(); - const client = await OverleafClient.fromSessionCookie(options.cookie, baseUrl, cookieName); - const projects = await client.listProjects(); +.command('auth') +.description('Authenticate with Overleaf using session cookie') +.option('--cookie ', 'Session cookie (overleaf_session2 value)') +.option('--save-local', 'Save to .olauth in current directory') +.action(async (options) => { + if (!options.cookie) { + console.log(chalk.yellow('To authenticate, provide your session cookie:')); + console.log(); + console.log('1. Log into overleaf.com in your browser'); + console.log('2. Open Developer Tools (F12) → Application → Cookies'); + console.log('3. Find the cookie named "overleaf_session2"'); + console.log('4. Copy its value and run:'); + console.log(); + console.log(chalk.cyan(' olcli auth --cookie "your_session_cookie_value"')); + console.log(); + console.log('Or set OVERLEAF_SESSION environment variable'); + return; + } - setSessionCookie(options.cookie); + const spinner = ora('Verifying session...').start(); + try { + const baseUrl = (program.opts().baseUrl as string | undefined) || getBaseUrl(); + const cookieName = (program.opts().cookieName as string | undefined) || getSessionCookieName(); + const client = await OverleafClient.fromSessionCookie(options.cookie, baseUrl, cookieName); + const projects = await client.listProjects(); - if (options.saveLocal) { - saveOlAuth(options.cookie); - spinner.succeed(`Authenticated! Found ${projects.length} projects. Saved to .olauth`); - } else { - spinner.succeed(`Authenticated! Found ${projects.length} projects.`); - } + setSessionCookie(options.cookie); - console.log(chalk.dim(`Config saved to: ${getConfigPath()}`)); - } catch (error: any) { - spinner.fail(`Authentication failed: ${error.message}`); - process.exit(1); + if (options.saveLocal) { + saveOlAuth(options.cookie); + spinner.succeed(`Authenticated! Found ${projects.length} projects. Saved to .olauth`); + } else { + spinner.succeed(`Authenticated! Found ${projects.length} projects.`); } - }); + + console.log(chalk.dim(`Config saved to: ${getConfigPath()}`)); + } catch (error: any) { + spinner.fail(`Authentication failed: ${error.message}`); + process.exit(1); + } +}); program - .command('whoami') - .description('Show current authentication status') - .action(async () => { - const cookie = getSessionCookie(); - if (!cookie) { - console.log(chalk.yellow('Not authenticated')); - return; - } +.command('whoami') +.description('Show current authentication status') +.action(async () => { + const cookie = getSessionCookie(); + if (!cookie) { + console.log(chalk.yellow('Not authenticated')); + return; + } - const spinner = ora('Checking session...').start(); - try { - const baseUrl = (program.opts().baseUrl as string | undefined) || getBaseUrl(); - const cookieName = (program.opts().cookieName as string | undefined) || getSessionCookieName(); - const client = await OverleafClient.fromSessionCookie(cookie, baseUrl, cookieName); - const projects = await client.listProjects(); - spinner.succeed(`Authenticated with access to ${projects.length} projects`); - } catch (error: any) { - spinner.fail(`Session invalid: ${error.message}`); - } - }); + const spinner = ora('Checking session...').start(); + try { + const baseUrl = (program.opts().baseUrl as string | undefined) || getBaseUrl(); + const cookieName = (program.opts().cookieName as string | undefined) || getSessionCookieName(); + const client = await OverleafClient.fromSessionCookie(cookie, baseUrl, cookieName); + const projects = await client.listProjects(); + spinner.succeed(`Authenticated with access to ${projects.length} projects`); + } catch (error: any) { + spinner.fail(`Session invalid: ${error.message}`); + } +}); program - .command('logout') - .description('Clear stored credentials') - .action(() => { - clearConfig(); - console.log(chalk.green('Credentials cleared')); - }); +.command('logout') +.description('Clear stored credentials') +.action(() => { + clearConfig(); + console.log(chalk.green('Credentials cleared')); +}); // ───────────────────────────────────────────────────────────────────────────── // PROJECT COMMANDS // ───────────────────────────────────────────────────────────────────────────── program - .command('list') - .alias('ls') - .description('List all projects') - .option('--json', 'Output as JSON') - .option('-n, --limit ', 'Limit number of results', parseInt) - .option('--cookie ', 'Session cookie override') - .action(async (options) => { - const spinner = ora('Fetching projects...').start(); - try { - const client = await getClient(options.cookie); - let projects = await client.listProjects(); - - if (options.limit) { - projects = projects.slice(0, options.limit); - } +.command('list') +.alias('ls') +.description('List all projects') +.option('--json', 'Output as JSON') +.option('-n, --limit ', 'Limit number of results', parseInt) +.option('--cookie ', 'Session cookie override') +.action(async (options) => { + const spinner = ora('Fetching projects...').start(); + try { + const client = await getClient(options.cookie); + let projects = await client.listProjects(); + + if (options.limit) { + projects = projects.slice(0, options.limit); + } - spinner.stop(); + spinner.stop(); - if (options.json) { - console.log(JSON.stringify(projects, null, 2)); - return; - } + if (options.json) { + console.log(JSON.stringify(projects, null, 2)); + return; + } - if (projects.length === 0) { - console.log(chalk.yellow('No projects found')); - return; - } + if (projects.length === 0) { + console.log(chalk.yellow('No projects found')); + return; + } - console.log(chalk.bold(`Found ${projects.length} project(s):\n`)); - for (const p of projects) { - const date = new Date(p.lastUpdated).toLocaleDateString(); - console.log(` ${chalk.cyan(p.id)} - ${chalk.bold(p.name)}`); - console.log(` ${chalk.dim(`Last updated: ${date}`)}`); - } - } catch (error: any) { - spinner.fail(`Failed: ${error.message}`); - process.exit(1); + console.log(chalk.bold(`Found ${projects.length} project(s):\n`)); + for (const p of projects) { + const date = new Date(p.lastUpdated).toLocaleDateString(); + console.log(` ${chalk.cyan(p.id)} - ${chalk.bold(p.name)}`); + console.log(` ${chalk.dim(`Last updated: ${date}`)}`); } - }); + } catch (error: any) { + spinner.fail(`Failed: ${error.message}`); + process.exit(1); + } +}); program - .command('info [project]') - .description('Show project details (by name or ID)') - .option('--json', 'Output as JSON') - .option('--cookie ', 'Session cookie override') - .action(async (project, options) => { - const spinner = ora('Fetching project info...').start(); - try { - const client = await getClient(options.cookie); - const proj = await resolveProject(client, project); - - // Get entities (works without parsing HTML) - const entities = await client.getEntities(proj.id); - spinner.stop(); +.command('info [project]') +.description('Show project details (by name or ID)') +.option('--json', 'Output as JSON') +.option('--cookie ', 'Session cookie override') +.action(async (project, options) => { + const spinner = ora('Fetching project info...').start(); + try { + const client = await getClient(options.cookie); + const proj = await resolveProject(client, project); + + // Get entities (works without parsing HTML) + const entities = await client.getEntities(proj.id); + spinner.stop(); + + if (options.json) { + console.log(JSON.stringify({ project: proj, entities }, null, 2)); + return; + } - if (options.json) { - console.log(JSON.stringify({ project: proj, entities }, null, 2)); - return; - } + console.log(chalk.bold(`Project: ${proj.name}`)); + console.log(` ID: ${chalk.cyan(proj.id)}`); + console.log(); - console.log(chalk.bold(`Project: ${proj.name}`)); - console.log(` ID: ${chalk.cyan(proj.id)}`); - console.log(); + // Print file list grouped by folder + console.log(chalk.bold('Files:')); - // Print file list grouped by folder - console.log(chalk.bold('Files:')); - - // Sort entities by path for nice display - const sorted = entities.sort((a, b) => a.path.localeCompare(b.path)); - - for (const entity of sorted) { - const icon = entity.type === 'doc' ? '📄' : '📎'; - console.log(` ${icon} ${entity.path}`); - } + // Sort entities by path for nice display + const sorted = entities.sort((a, b) => a.path.localeCompare(b.path)); - setLastProject(proj.id); - } catch (error: any) { - spinner.fail(`Failed: ${error.message}`); - process.exit(1); + for (const entity of sorted) { + const icon = entity.type === 'doc' ? '📄' : '📎'; + console.log(` ${icon} ${entity.path}`); } - }); + + setLastProject(proj.id); + } catch (error: any) { + spinner.fail(`Failed: ${error.message}`); + process.exit(1); + } +}); function printFolder(folder: any, indent: string): void { // Print subfolders @@ -287,186 +271,186 @@ function printFolder(folder: any, indent: string): void { // ───────────────────────────────────────────────────────────────────────────── program - .command('download [project]') - .description('Download a single file from project') - .option('-o, --output ', 'Output path (default: same as file name)') - .option('--cookie ', 'Session cookie override') - .action(async (file, project, options) => { - const spinner = ora('Downloading file...').start(); - try { - const client = await getClient(options.cookie); - const proj = await resolveProject(client, project); - - const content = await client.downloadByPath(proj.id, file); - const outputPath = options.output || basename(file); - - writeFileSync(outputPath, content); - spinner.succeed(`Downloaded: ${outputPath} (${(content.length / 1024).toFixed(1)} KB)`); - - setLastProject(proj.id); - } catch (error: any) { - spinner.fail(`Failed: ${error.message}`); - process.exit(1); - } - }); +.command('download [project]') +.description('Download a single file from project') +.option('-o, --output ', 'Output path (default: same as file name)') +.option('--cookie ', 'Session cookie override') +.action(async (file, project, options) => { + const spinner = ora('Downloading file...').start(); + try { + const client = await getClient(options.cookie); + const proj = await resolveProject(client, project); + + const content = await client.downloadByPath(proj.id, file); + const outputPath = options.output || basename(file); + + writeFileSync(outputPath, content); + spinner.succeed(`Downloaded: ${outputPath} (${(content.length / 1024).toFixed(1)} KB)`); + + setLastProject(proj.id); + } catch (error: any) { + spinner.fail(`Failed: ${error.message}`); + process.exit(1); + } +}); program - .command('zip [project]') - .description('Download project as zip archive') - .option('-o, --output ', 'Output path (default: .zip)') - .option('--cookie ', 'Session cookie override') - .action(async (project, options) => { - const spinner = ora('Downloading project...').start(); - try { - const client = await getClient(options.cookie); - const proj = await resolveProject(client, project); - - const zip = await client.downloadProject(proj.id); - const outputPath = options.output || `${proj.name.replace(/[^a-zA-Z0-9-_]/g, '_')}.zip`; - - writeFileSync(outputPath, zip); - spinner.succeed(`Downloaded: ${outputPath} (${(zip.length / 1024).toFixed(1)} KB)`); - - setLastProject(proj.id); - } catch (error: any) { - spinner.fail(`Failed: ${error.message}`); - process.exit(1); - } - }); +.command('zip [project]') +.description('Download project as zip archive') +.option('-o, --output ', 'Output path (default: .zip)') +.option('--cookie ', 'Session cookie override') +.action(async (project, options) => { + const spinner = ora('Downloading project...').start(); + try { + const client = await getClient(options.cookie); + const proj = await resolveProject(client, project); + + const zip = await client.downloadProject(proj.id); + const outputPath = options.output || `${proj.name.replace(/[^a-zA-Z0-9-_]/g, '_')}.zip`; + + writeFileSync(outputPath, zip); + spinner.succeed(`Downloaded: ${outputPath} (${(zip.length / 1024).toFixed(1)} KB)`); + + setLastProject(proj.id); + } catch (error: any) { + spinner.fail(`Failed: ${error.message}`); + process.exit(1); + } +}); program - .command('pdf [project]') - .description('Compile and download PDF') - .option('-o, --output ', 'Output path (default: .pdf)') - .option('--cookie ', 'Session cookie override') - .action(async (project, options) => { - const spinner = ora('Compiling project...').start(); - try { - const client = await getClient(options.cookie); - const proj = await resolveProject(client, project); - - spinner.text = 'Compiling...'; - const pdf = await client.downloadPdf(proj.id); - const outputPath = options.output || `${proj.name.replace(/[^a-zA-Z0-9-_]/g, '_')}.pdf`; - - writeFileSync(outputPath, pdf); - spinner.succeed(`Downloaded PDF: ${outputPath} (${(pdf.length / 1024).toFixed(1)} KB)`); - - setLastProject(proj.id); - } catch (error: any) { - spinner.fail(`Failed: ${error.message}`); - process.exit(1); - } - }); +.command('pdf [project]') +.description('Compile and download PDF') +.option('-o, --output ', 'Output path (default: .pdf)') +.option('--cookie ', 'Session cookie override') +.action(async (project, options) => { + const spinner = ora('Compiling project...').start(); + try { + const client = await getClient(options.cookie); + const proj = await resolveProject(client, project); + + spinner.text = 'Compiling...'; + const pdf = await client.downloadPdf(proj.id); + const outputPath = options.output || `${proj.name.replace(/[^a-zA-Z0-9-_]/g, '_')}.pdf`; + + writeFileSync(outputPath, pdf); + spinner.succeed(`Downloaded PDF: ${outputPath} (${(pdf.length / 1024).toFixed(1)} KB)`); + + setLastProject(proj.id); + } catch (error: any) { + spinner.fail(`Failed: ${error.message}`); + process.exit(1); + } +}); program - .command('output [type]') - .description('Download compile output files (bbl, log, aux, etc.)') - .option('-o, --output ', 'Output path') - .option('--list', 'List available output files') - .option('--project ', 'Project name or ID') - .option('--cookie ', 'Session cookie override') - .action(async (type, options) => { - const spinner = ora('Compiling project...').start(); - try { - const client = await getClient(options.cookie); - - // If type looks like a project name (contains spaces or is in project list), treat it as project - let actualType = type; - let projectArg = options.project; - - if (type && !projectArg && !['bbl', 'log', 'aux', 'blg', 'pdf', 'out', 'fls', 'fdb_latexmk', 'stderr', 'pdfxref', 'chktex'].includes(type)) { - // Type might actually be a project name - const projects = await client.listProjects(); - const matchedProject = projects.find(p => p.name === type || p.id === type); - if (matchedProject) { - projectArg = type; - actualType = undefined; - } +.command('output [type]') +.description('Download compile output files (bbl, log, aux, etc.)') +.option('-o, --output ', 'Output path') +.option('--list', 'List available output files') +.option('--project ', 'Project name or ID') +.option('--cookie ', 'Session cookie override') +.action(async (type, options) => { + const spinner = ora('Compiling project...').start(); + try { + const client = await getClient(options.cookie); + + // If type looks like a project name (contains spaces or is in project list), treat it as project + let actualType = type; + let projectArg = options.project; + + if (type && !projectArg && !['bbl', 'log', 'aux', 'blg', 'pdf', 'out', 'fls', 'fdb_latexmk', 'stderr', 'pdfxref', 'chktex'].includes(type)) { + // Type might actually be a project name + const projects = await client.listProjects(); + const matchedProject = projects.find(p => p.name === type || p.id === type); + if (matchedProject) { + projectArg = type; + actualType = undefined; } + } - const proj = await resolveProject(client, projectArg); - const result = await client.compileWithOutputs(proj.id); + const proj = await resolveProject(client, projectArg); + const result = await client.compileWithOutputs(proj.id); - if (result.status !== 'success') { - spinner.warn(`Compilation ${result.status}, but output files may still be available`); - } + if (result.status !== 'success') { + spinner.warn(`Compilation ${result.status}, but output files may still be available`); + } - if (options.list || !actualType) { - spinner.stop(); - console.log(chalk.bold('Available output files:')); - for (const file of result.outputFiles) { - console.log(` ${chalk.cyan(file.type.padEnd(12))} ${file.path}`); - } - console.log(); - console.log(chalk.dim('Usage: olcli output ')); - console.log(chalk.dim('Example: olcli output bbl')); - return; + if (options.list || !actualType) { + spinner.stop(); + console.log(chalk.bold('Available output files:')); + for (const file of result.outputFiles) { + console.log(` ${chalk.cyan(file.type.padEnd(12))} ${file.path}`); } + console.log(); + console.log(chalk.dim('Usage: olcli output ')); + console.log(chalk.dim('Example: olcli output bbl')); + return; + } - // Find matching output file - const outputFile = result.outputFiles.find(f => f.type === actualType || f.path.endsWith(`.${actualType}`)); - if (!outputFile) { - spinner.fail(`Output file not found: ${actualType}`); - console.log(chalk.dim('Use --list to see available files')); - process.exit(1); - } + // Find matching output file + const outputFile = result.outputFiles.find(f => f.type === actualType || f.path.endsWith(`.${actualType}`)); + if (!outputFile) { + spinner.fail(`Output file not found: ${actualType}`); + console.log(chalk.dim('Use --list to see available files')); + process.exit(1); + } - spinner.text = `Downloading ${outputFile.path}...`; - const content = await client.downloadOutputFile(outputFile.url); - const outputPath = options.output || outputFile.path.replace('output.', ''); + spinner.text = `Downloading ${outputFile.path}...`; + const content = await client.downloadOutputFile(outputFile.url); + const outputPath = options.output || outputFile.path.replace('output.', ''); - writeFileSync(outputPath, content); - spinner.succeed(`Downloaded: ${outputPath} (${(content.length / 1024).toFixed(1)} KB)`); + writeFileSync(outputPath, content); + spinner.succeed(`Downloaded: ${outputPath} (${(content.length / 1024).toFixed(1)} KB)`); - setLastProject(proj.id); - } catch (error: any) { - spinner.fail(`Failed: ${error.message}`); - process.exit(1); - } - }); + setLastProject(proj.id); + } catch (error: any) { + spinner.fail(`Failed: ${error.message}`); + process.exit(1); + } +}); // ───────────────────────────────────────────────────────────────────────────── // UPLOAD COMMANDS // ───────────────────────────────────────────────────────────────────────────── program - .command('upload [project]') - .description('Upload a file to a project') - .option('--folder ', 'Target folder ID (default: root)') - .option('--cookie ', 'Session cookie override') - .action(async (file, project, options) => { - const spinner = ora('Uploading file...').start(); - try { - const client = await getClient(options.cookie); - const proj = await resolveProject(client, project); - - if (!existsSync(file)) { - spinner.fail(`File not found: ${file}`); - process.exit(1); - } - - const content = readFileSync(file); - const fileName = basename(file); +.command('upload [project]') +.description('Upload a file to a project') +.option('--folder ', 'Target folder ID (default: root)') +.option('--cookie ', 'Session cookie override') +.action(async (file, project, options) => { + const spinner = ora('Uploading file...').start(); + try { + const client = await getClient(options.cookie); + const proj = await resolveProject(client, project); + + if (!existsSync(file)) { + spinner.fail(`File not found: ${file}`); + process.exit(1); + } - // Pass folder ID or null for root folder (client will compute it) - const folderId = options.folder || null; + const content = readFileSync(file); + const fileName = basename(file); - const result = await client.uploadFile(proj.id, folderId, fileName, content); + // Pass folder ID or null for root folder (client will compute it) + const folderId = options.folder || null; - if (result.success) { - spinner.succeed(`Uploaded: ${fileName} → "${proj.name}"`); - } else { - spinner.fail(`Upload failed for: ${fileName}`); - process.exit(1); - } + const result = await client.uploadFile(proj.id, folderId, fileName, content); - setLastProject(proj.id); - } catch (error: any) { - spinner.fail(`Failed: ${error.message}`); + if (result.success) { + spinner.succeed(`Uploaded: ${fileName} → "${proj.name}"`); + } else { + spinner.fail(`Upload failed for: ${fileName}`); process.exit(1); } - }); + + setLastProject(proj.id); + } catch (error: any) { + spinner.fail(`Failed: ${error.message}`); + process.exit(1); + } +}); // NOTE: delete and rename commands are disabled - they require entity IDs // which are not exposed via the current Overleaf API without Socket.IO. @@ -515,586 +499,627 @@ program // ───────────────────────────────────────────────────────────────────────────── program - .command('compile [project]') - .description('Compile a project (trigger PDF generation)') - .option('--cookie ', 'Session cookie override') - .action(async (project, options) => { - const spinner = ora('Compiling...').start(); - try { - const client = await getClient(options.cookie); - const proj = await resolveProject(client, project); - - const result = await client.compileProject(proj.id); - spinner.succeed(`Compiled "${proj.name}"`); - console.log(chalk.dim(`PDF URL: ${result.pdfUrl}`)); - - setLastProject(proj.id); - } catch (error: any) { - spinner.fail(`Compilation failed: ${error.message}`); - process.exit(1); - } - }); +.command('compile [project]') +.description('Compile a project (trigger PDF generation)') +.option('--cookie ', 'Session cookie override') +.action(async (project, options) => { + const spinner = ora('Compiling...').start(); + try { + const client = await getClient(options.cookie); + const proj = await resolveProject(client, project); + + const result = await client.compileProject(proj.id); + spinner.succeed(`Compiled "${proj.name}"`); + console.log(chalk.dim(`PDF URL: ${result.pdfUrl}`)); + + setLastProject(proj.id); + } catch (error: any) { + spinner.fail(`Compilation failed: ${error.message}`); + process.exit(1); + } +}); // ───────────────────────────────────────────────────────────────────────────── // SYNC COMMANDS // ───────────────────────────────────────────────────────────────────────────── program - .command('pull [project] [dir]') - .description('Download project files to local directory') - .option('--force', 'Overwrite local files even if newer') - .option('--cookie ', 'Session cookie override') - .action(async (project, dir, options) => { - let targetDir = dir || '.'; - let projectId: string | undefined; - let projectName: string | undefined; - - // Check for existing .olcli.json if no project specified - const metaPath = join(targetDir, '.olcli.json'); - if (!project && existsSync(metaPath)) { - const meta = JSON.parse(readFileSync(metaPath, 'utf-8')); - projectId = meta.projectId; - projectName = meta.projectName; - } else if (!project) { - console.error(chalk.red('No project specified.')); - console.error('Usage: olcli pull [dir]'); - console.error('Or run from a directory with .olcli.json'); - process.exit(1); - } +.command('pull [project] [dir]') +.description('Download project files to local directory') +.option('--force', 'Overwrite local files even if newer') +.option('--cookie ', 'Session cookie override') +.action(async (project, dir, options) => { + let targetDir = dir || '.'; + let projectId: string | undefined; + let projectName: string | undefined; + + // Check for existing .olcli.json if no project specified + const metaPath = join(targetDir, '.olcli.json'); + if (!project && existsSync(metaPath)) { + const meta = JSON.parse(readFileSync(metaPath, 'utf-8')); + projectId = meta.projectId; + projectName = meta.projectName; + } else if (!project) { + console.error(chalk.red('No project specified.')); + console.error('Usage: olcli pull [dir]'); + console.error('Or run from a directory with .olcli.json'); + process.exit(1); + } - const spinner = ora('Fetching project...').start(); - try { - const client = await getClient(options.cookie); + const spinner = ora('Fetching project...').start(); + try { + const client = await getClient(options.cookie); - // Resolve project if needed - if (!projectId) { - let proj = await client.getProjectById(project!); - if (!proj) { - proj = await client.getProject(project!); - } - if (!proj) { - spinner.fail(`Project not found: ${project}`); - process.exit(1); - } - projectId = proj.id; - projectName = proj.name; - // Default directory is project name (sanitized) if not specified - if (!dir) { - targetDir = proj.name.replace(/[^a-zA-Z0-9-_]/g, '_'); - } + // Resolve project if needed + if (!projectId) { + let proj = await client.getProjectById(project!); + if (!proj) { + proj = await client.getProject(project!); } + if (!proj) { + spinner.fail(`Project not found: ${project}`); + process.exit(1); + } + projectId = proj.id; + projectName = proj.name; + // Default directory is project name (sanitized) if not specified + if (!dir) { + targetDir = proj.name.replace(/[^a-zA-Z0-9-_]/g, '_'); + } + } - spinner.text = 'Downloading project...'; - const zipBuffer = await client.downloadProject(projectId); + spinner.text = 'Downloading project...'; + const zipBuffer = await client.downloadProject(projectId); - // Extract zip - spinner.text = 'Extracting files...'; - const AdmZip = (await import('adm-zip')).default; - const zip = new AdmZip(zipBuffer); + // Extract zip + spinner.text = 'Extracting files...'; + const AdmZip = (await import('adm-zip')).default; + const zip = new AdmZip(zipBuffer); - // Create target directory - if (!existsSync(targetDir)) { - mkdirSync(targetDir, { recursive: true }); - } + // Create target directory + if (!existsSync(targetDir)) { + mkdirSync(targetDir, { recursive: true }); + } - // Get local file modification times for safety check - const { statSync } = await import('node:fs'); - const localMetaPath = join(targetDir, '.olcli.json'); - let lastPull: Date | undefined; - if (existsSync(localMetaPath)) { - const meta = JSON.parse(readFileSync(localMetaPath, 'utf-8')); - lastPull = meta.lastPull ? new Date(meta.lastPull) : undefined; - } + // Get local file modification times for safety check + const { statSync } = await import('node:fs'); + const localMetaPath = join(targetDir, '.olcli.json'); + let lastPull: Date | undefined; + if (existsSync(localMetaPath)) { + const meta = JSON.parse(readFileSync(localMetaPath, 'utf-8')); + lastPull = meta.lastPull ? new Date(meta.lastPull) : undefined; + } - // Extract files with safety check - const entries = zip.getEntries(); - let fileCount = 0; - let skippedCount = 0; - const skippedFiles: string[] = []; + // Extract files with safety check + const entries = zip.getEntries(); + let fileCount = 0; + let skippedCount = 0; + const skippedFiles: string[] = []; - for (const entry of entries) { - if (!entry.isDirectory) { - const filePath = join(targetDir, entry.entryName); - const fileDir = dirname(filePath); - - // Check if local file exists and is newer than last pull - if (!options.force && existsSync(filePath) && lastPull) { - try { - const stats = statSync(filePath); - if (stats.mtime > lastPull) { - // Local file is newer - skip unless --force - skippedCount++; - skippedFiles.push(entry.entryName); - continue; - } - } catch (e) { - // File doesn't exist or can't stat, proceed with download - } - } + for (const entry of entries) { + if (!entry.isDirectory) { + const filePath = join(targetDir, entry.entryName); + const fileDir = dirname(filePath); - if (!existsSync(fileDir)) { - mkdirSync(fileDir, { recursive: true }); + // Check if local file exists and is newer than last pull + if (!options.force && existsSync(filePath) && lastPull) { + try { + const stats = statSync(filePath); + if (stats.mtime > lastPull) { + // Local file is newer - skip unless --force + skippedCount++; + skippedFiles.push(entry.entryName); + continue; + } + } catch (e) { + // File doesn't exist or can't stat, proceed with download } - writeFileSync(filePath, entry.getData()); - fileCount++; } - } - // Save project metadata - writeFileSync(join(targetDir, '.olcli.json'), JSON.stringify({ - projectId, - projectName, - lastPull: new Date().toISOString() - }, null, 2)); - - if (skippedCount > 0) { - spinner.warn(`Downloaded ${fileCount} files, skipped ${skippedCount} locally modified files`); - console.log(chalk.yellow(' Skipped (local is newer):')); - for (const f of skippedFiles.slice(0, 5)) { - console.log(chalk.dim(` ${f}`)); - } - if (skippedFiles.length > 5) { - console.log(chalk.dim(` ... and ${skippedFiles.length - 5} more`)); + if (!existsSync(fileDir)) { + mkdirSync(fileDir, { recursive: true }); } - console.log(chalk.dim(' Use --force to overwrite')); - } else { - spinner.succeed(`Downloaded ${fileCount} files to ${targetDir}/`); + writeFileSync(filePath, entry.getData()); + fileCount++; } + } - setLastProject(projectId); - } catch (error: any) { - spinner.fail(`Failed: ${error.message}`); - process.exit(1); + // Save project metadata + writeFileSync(join(targetDir, '.olcli.json'), JSON.stringify({ + projectId, + projectName, + lastPull: new Date().toISOString() + }, null, 2)); + + if (skippedCount > 0) { + spinner.warn(`Downloaded ${fileCount} files, skipped ${skippedCount} locally modified files`); + console.log(chalk.yellow(' Skipped (local is newer):')); + for (const f of skippedFiles.slice(0, 5)) { + console.log(chalk.dim(` ${f}`)); + } + if (skippedFiles.length > 5) { + console.log(chalk.dim(` ... and ${skippedFiles.length - 5} more`)); + } + console.log(chalk.dim(' Use --force to overwrite')); + } else { + spinner.succeed(`Downloaded ${fileCount} files to ${targetDir}/`); } - }); + + setLastProject(projectId); + } catch (error: any) { + spinner.fail(`Failed: ${error.message}`); + process.exit(1); + } +}); + program - .command('push [dir]') - .description('Upload local changes to Overleaf project') - .option('--project ', 'Project name or ID (overrides .olcli.json)') - .option('--all', 'Upload all files (not just changed)') - .option('--dry-run', 'Show what would be uploaded without uploading') - .option('--probe-folder', 'Probe for correct folder ID (use if uploads fail with folder_not_found)') - .option('--cookie ', 'Session cookie override') - .action(async (dir, options) => { - const targetDir = dir || '.'; - const metaPath = join(targetDir, '.olcli.json'); - - // Check for project metadata - let projectId: string | undefined; - let projectName: string | undefined; - let lastPull: Date | undefined; - let rootFolderId: string | undefined; +.command('push [dir]') +.description('Upload local changes to Overleaf project') +.option('--project ', 'Project name or ID (overrides .olcli.json)') +.option('--all', 'Upload all files (not just changed)') +.option('--dry-run', 'Show what would be uploaded/deleted without changing anything') +.option('--probe-folder', 'Probe for correct folder ID') +.option('--cookie ', 'Session cookie override') +.action(async (dir, options) => { + const targetDir = dir || '.'; + const metaPath = join(targetDir, '.olcli.json'); + + // Check for project metadata + let projectId: string | undefined; + let projectName: string | undefined; + let lastPull: Date | undefined; + let rootFolderId: string | undefined; - if (existsSync(metaPath)) { - const meta = JSON.parse(readFileSync(metaPath, 'utf-8')); - projectId = meta.projectId; - projectName = meta.projectName; - lastPull = meta.lastPull ? new Date(meta.lastPull) : undefined; - rootFolderId = meta.rootFolderId; - } + if (existsSync(metaPath)) { + const meta = JSON.parse(readFileSync(metaPath, 'utf-8')); + projectId = meta.projectId; + projectName = meta.projectName; + lastPull = meta.lastPull ? new Date(meta.lastPull) : undefined; + rootFolderId = meta.rootFolderId; + } - if (options.project) { - // Override with command line option - projectId = undefined; - projectName = options.project; - } + if (options.project) { + projectId = undefined; + projectName = options.project; + } - if (!projectId && !projectName) { - console.error(chalk.red('No project specified.')); - console.error('Either run from a directory with .olcli.json or use --project'); - process.exit(1); - } + if (!projectId && !projectName) { + console.error(chalk.red('No project specified.')); + console.error('Either run from a directory with .olcli.json or use --project'); + process.exit(1); + } - const spinner = ora('Connecting...').start(); - try { - const client = await getClient(options.cookie); + const spinner = ora('Connecting...').start(); + try { + const client = await getClient(options.cookie); - // Resolve project if needed - if (!projectId) { - let proj = await client.getProjectById(projectName!); - if (!proj) { - proj = await client.getProject(projectName!); - } - if (!proj) { - spinner.fail(`Project not found: ${projectName}`); - process.exit(1); - } - projectId = proj.id; - projectName = proj.name; + // Resolve project if needed + if (!projectId) { + let proj = await client.getProjectById(projectName!); + if (!proj) { + proj = await client.getProject(projectName!); } + if (!proj) { + spinner.fail(`Project not found: ${projectName}`); + process.exit(1); + } + projectId = proj.id; + projectName = proj.name; + } - spinner.text = 'Scanning files...'; + spinner.text = 'Scanning files...'; - // Get list of files to upload - const { readdirSync, statSync } = await import('node:fs'); + // Get list of files to upload + const { readdirSync, statSync } = await import('node:fs'); - const filesToUpload: { path: string; relativePath: string }[] = []; + const filesToUpload: { path: string; relativePath: string }[] = []; + const allLocalPaths = new Set(); - function scanDir(currentDir: string, relativeBase: string = '') { - const entries = readdirSync(currentDir, { withFileTypes: true }); - for (const entry of entries) { - // Skip hidden files and .olcli.json - if (entry.name.startsWith('.')) continue; + function scanDir(currentDir: string, relativeBase: string = '') { + const entries = readdirSync(currentDir, { withFileTypes: true }); + for (const entry of entries) { + // Skip hidden files and .olcli.json + if (entry.name.startsWith('.')) continue; - const fullPath = join(currentDir, entry.name); - const relativePath = relativeBase ? `${relativeBase}/${entry.name}` : entry.name; + const fullPath = join(currentDir, entry.name); + const relativePath = relativeBase ? `${relativeBase}/${entry.name}` : entry.name; - if (!entry.isDirectory() && entry.name === 'output.pdf') continue; + if (!entry.isDirectory() && entry.name === 'output.pdf') continue; - if (entry.isDirectory()) { - scanDir(fullPath, relativePath); + if (entry.isDirectory()) { + scanDir(fullPath, relativePath); + } else { + allLocalPaths.add(relativePath); + // Check if file is newer than last pull (unless --all) + if (options.all || !lastPull) { + filesToUpload.push({ path: fullPath, relativePath }); } else { - // Check if file is newer than last pull (unless --all) - if (options.all || !lastPull) { + const stats = statSync(fullPath); + if (stats.mtime > lastPull) { filesToUpload.push({ path: fullPath, relativePath }); - } else { - const stats = statSync(fullPath); - if (stats.mtime > lastPull) { - filesToUpload.push({ path: fullPath, relativePath }); - } } } } } + } - scanDir(targetDir); + scanDir(targetDir); - if (filesToUpload.length === 0) { - spinner.info('No files to upload'); - return; - } + // ========================================== + // THE DELETION LOGIC + // ========================================== + const filesToDelete: { id: string; type: 'doc' | 'file' | 'folder' ; path: string }[] = []; - if (options.dryRun) { - spinner.stop(); - console.log(chalk.bold(`Would upload ${filesToUpload.length} file(s) to "${projectName}":`)); - for (const f of filesToUpload) { - console.log(` ${chalk.cyan(f.relativePath)}`); - } - return; - } + const projectInfo = await client.getProjectInfo(projectId); + if (projectInfo && projectInfo.rootFolder && projectInfo.rootFolder[0]) { - // If --probe-folder is set, or if we don't have a cached rootFolderId, try probing - if (options.probeFolder && !rootFolderId) { - spinner.text = 'Probing for correct folder ID...'; - rootFolderId = await client.probeRootFolderId(projectId!) ?? undefined; - if (rootFolderId) { - // Save the discovered folder ID - if (existsSync(metaPath)) { - const meta = JSON.parse(readFileSync(metaPath, 'utf-8')); - meta.rootFolderId = rootFolderId; - writeFileSync(metaPath, JSON.stringify(meta, null, 2)); - } - spinner.succeed(`Found root folder ID: ${rootFolderId}`); - spinner.start(`Uploading ${filesToUpload.length} file(s)...`); - } else { - spinner.fail('Could not find valid root folder ID'); - console.log(chalk.yellow('Try manually specifying rootFolderId in .olcli.json')); - process.exit(1); + // Helper function to flatten Overleaf's nested tree + function flattenRemoteTree(folder: any, currentPath: string = '') { + // Text files + for (const doc of folder.docs || []) { + const docPath = currentPath ? `${currentPath}/${doc.name}` : doc.name; + if (!allLocalPaths.has(docPath)) filesToDelete.push({ id: doc._id, type: 'doc', path: docPath }); + } + // Binary files (images, pdfs) + for (const file of folder.fileRefs || []) { + const filePath = currentPath ? `${currentPath}/${file.name}` : file.name; + if (!allLocalPaths.has(filePath)) filesToDelete.push({ id: file._id, type: 'file', path: filePath }); + } + // Subfolders + for (const sub of folder.folders || []) { + const subPath = currentPath ? `${currentPath}/${sub.name}` : sub.name; + flattenRemoteTree(sub, subPath); } } - // Fetch folder tree once so uploads go into correct subfolders - spinner.text = 'Resolving folder structure...'; - let folderTree = await client.getFolderTreeFromSocket(projectId!); - if (!folderTree) { - // Fallback: build minimal tree with just root - const resolvedRootId = rootFolderId || await client.getRootFolderId(projectId!); - folderTree = { '': resolvedRootId }; - } + flattenRemoteTree(projectInfo.rootFolder[0]); + } - spinner.text = `Uploading ${filesToUpload.length} file(s)...`; + // Early out + if (filesToUpload.length === 0 && filesToDelete.length === 0){ + spinner.succeed('No local changes to upload.'); + return; + } - let uploaded = 0; - let failed = 0; - let folderNotFoundCount = 0; + // Handle Dry Run + if (options.dryRun) { + spinner.stop(); + console.log(chalk.bold(`Would upload ${filesToUpload.length} file(s):`)); + filesToUpload.forEach(f => console.log(` ${chalk.green('+ ' + f.relativePath)}`)); - for (const file of filesToUpload) { - try { - const content = readFileSync(file.path); - await client.uploadFile(projectId!, rootFolderId || null, file.relativePath, content, folderTree); - uploaded++; - spinner.text = `Uploading... (${uploaded}/${filesToUpload.length})`; - } catch (error: any) { - console.error(chalk.yellow(`\n Warning: Failed to upload ${file.relativePath}: ${error.message}`)); - failed++; - if (error.message.includes('folder_not_found')) { - folderNotFoundCount++; - } + console.log(chalk.bold(`Would delete ${filesToDelete.length} remote file(s):`)); + filesToDelete.forEach(f => console.log(` ${chalk.red('- ' + f.path)}`)); + return; + } + + let deleted = 0; + let failed = 0; + let folderNotFoundCount = 0; + + // Execute Deletions + spinner.text = `Deleting ${filesToDelete.length} orphan files...`; + for (const file of filesToDelete) { + try { + await client.deleteEntity(projectId!, file.id, file.type); + deleted++; + spinner.text = `Deleting... (${deleted}/${filesToDelete.length})`; + } catch (error: any) { + console.error(chalk.yellow(`\nWarning: Failed to delete ${file.path}: ${error.message}`)); + failed++; + if (error.message.includes('folder_not_found')) { + folderNotFoundCount++; } } + } + // ========================================== - // Update last push time - if (existsSync(metaPath)) { - const meta = JSON.parse(readFileSync(metaPath, 'utf-8')); - meta.lastPush = new Date().toISOString(); - writeFileSync(metaPath, JSON.stringify(meta, null, 2)); - } - if (failed > 0) { - spinner.warn(`Uploaded ${uploaded} file(s), ${failed} failed`); - if (folderNotFoundCount > 0 && !rootFolderId) { - console.log(chalk.yellow(' Tip: Try running with --probe-folder to find the correct folder ID')); + // Fetch folder tree once so uploads go into correct subfolders + spinner.text = 'Resolving folder structure...'; + let folderTree = await client.getFolderTreeFromSocket(projectId!); + if (!folderTree) { + // Fallback: build minimal tree with just root + const resolvedRootId = rootFolderId || await client.getRootFolderId(projectId!); + folderTree = { '': resolvedRootId }; + } + + spinner.text = `Uploading ${filesToUpload.length} file(s)...`; + + let uploaded = 0; + + for (const file of filesToUpload) { + try { + const content = readFileSync(file.path); + await client.uploadFile(projectId!, rootFolderId || null, file.relativePath, content, folderTree); + uploaded++; + spinner.text = `Uploading... (${uploaded}/${filesToUpload.length})`; + } catch (error: any) { + console.error(chalk.yellow(`\n Warning: Failed to upload ${file.relativePath}: ${error.message}`)); + failed++; + if (error.message.includes('folder_not_found')) { + folderNotFoundCount++; } - } else { - spinner.succeed(`Uploaded ${uploaded} file(s) to "${projectName}"`); } - - setLastProject(projectId!); - } catch (error: any) { - spinner.fail(`Failed: ${error.message}`); - process.exit(1); } - }); - -program - .command('sync [dir]') - .description('Pull then push (bidirectional sync)') - .option('--project ', 'Project name or ID') - .option('--verbose', 'Show detailed file operations') - .option('--cookie ', 'Session cookie override') - .action(async (dir, options) => { - const targetDir = dir || '.'; - - // Check if this is an existing project directory - const metaPath = join(targetDir, '.olcli.json'); - let projectId: string | undefined; - let projectName: string | undefined; + // Update last push time if (existsSync(metaPath)) { const meta = JSON.parse(readFileSync(metaPath, 'utf-8')); - projectId = meta.projectId; - projectName = meta.projectName; + meta.lastPush = new Date().toISOString(); + writeFileSync(metaPath, JSON.stringify(meta, null, 2)); } - if (options.project) { - projectName = options.project; - projectId = undefined; + if (failed > 0) { + spinner.warn(`Uploaded ${uploaded} file(s), deleted ${deleted} and ${failed} failed`); + if (folderNotFoundCount > 0 && !rootFolderId) { + console.log(chalk.yellow(' Tip: Try running with --probe-folder to find the correct folder ID')); + } + } else { + if(deleted ==0) { + spinner.succeed(`Uploaded ${uploaded} file(s) to "${projectName}"`); + }else if(uploaded ==0){ + spinner.succeed(`Deleted ${deleted} file(s) from "${projectName}"`); + }else{ + spinner.succeed(`Uploaded ${uploaded} file(s) to and deleted ${deleted} file(s)`); + } } - if (!projectId && !projectName) { - console.error(chalk.red('No project specified.')); - console.error('Either run from a directory with .olcli.json or use --project'); - process.exit(1); - } + setLastProject(projectId!); + } catch (error: any) { + spinner.fail(`Failed: ${error.message}`); + process.exit(1); + } +}); - const spinner = ora('Connecting...').start(); - try { - const client = await getClient(options.cookie); +program +.command('sync [dir]') +.description('Pull then push (bidirectional sync)') +.option('--project ', 'Project name or ID') +.option('--verbose', 'Show detailed file operations') +.option('--cookie ', 'Session cookie override') +.action(async (dir, options) => { + const targetDir = dir || '.'; + + // Check if this is an existing project directory + const metaPath = join(targetDir, '.olcli.json'); + let projectId: string | undefined; + let projectName: string | undefined; - // Resolve project - if (!projectId) { - let proj = await client.getProjectById(projectName!); - if (!proj) { - proj = await client.getProject(projectName!); - } - if (!proj) { - spinner.fail(`Project not found: ${projectName}`); - process.exit(1); - } - projectId = proj.id; - projectName = proj.name; - } + if (existsSync(metaPath)) { + const meta = JSON.parse(readFileSync(metaPath, 'utf-8')); + projectId = meta.projectId; + projectName = meta.projectName; + } - // Step 1: Download current state - spinner.text = 'Downloading project...'; - const zipBuffer = await client.downloadProject(projectId); + if (options.project) { + projectName = options.project; + projectId = undefined; + } - const AdmZip = (await import('adm-zip')).default; - const zip = new AdmZip(zipBuffer); + if (!projectId && !projectName) { + console.error(chalk.red('No project specified.')); + console.error('Either run from a directory with .olcli.json or use --project'); + process.exit(1); + } - // Create target directory - if (!existsSync(targetDir)) { - mkdirSync(targetDir, { recursive: true }); - } + const spinner = ora('Connecting...').start(); + try { + const client = await getClient(options.cookie); - // Track local modifications - const localFiles = new Map(); - const { readdirSync, statSync } = await import('node:fs'); - - function scanLocalFiles(currentDir: string, relativeBase: string = '') { - if (!existsSync(currentDir)) return; - const entries = readdirSync(currentDir, { withFileTypes: true }); - for (const entry of entries) { - if (entry.name.startsWith('.')) continue; - const fullPath = join(currentDir, entry.name); - const relativePath = relativeBase ? `${relativeBase}/${entry.name}` : entry.name; - if (entry.isDirectory()) { - scanLocalFiles(fullPath, relativePath); - } else { - const stats = statSync(fullPath); - localFiles.set(relativePath, { - mtime: stats.mtime, - content: readFileSync(fullPath) - }); - } - } + // Resolve project + if (!projectId) { + let proj = await client.getProjectById(projectName!); + if (!proj) { + proj = await client.getProject(projectName!); } - - // Read local files before overwriting - if (existsSync(metaPath)) { - scanLocalFiles(targetDir); + if (!proj) { + spinner.fail(`Project not found: ${projectName}`); + process.exit(1); } + projectId = proj.id; + projectName = proj.name; + } + + // Step 1: Download current state + spinner.text = 'Downloading project...'; + const zipBuffer = await client.downloadProject(projectId); + + const AdmZip = (await import('adm-zip')).default; + const zip = new AdmZip(zipBuffer); + + // Create target directory + if (!existsSync(targetDir)) { + mkdirSync(targetDir, { recursive: true }); + } - // Extract remote files - const remoteFiles = new Map(); - for (const entry of zip.getEntries()) { - if (!entry.isDirectory) { - remoteFiles.set(entry.entryName, entry.getData()); + // Track local modifications + const localFiles = new Map(); + const { readdirSync, statSync } = await import('node:fs'); + + function scanLocalFiles(currentDir: string, relativeBase: string = '') { + if (!existsSync(currentDir)) return; + const entries = readdirSync(currentDir, { withFileTypes: true }); + for (const entry of entries) { + if (entry.name.startsWith('.')) continue; + const fullPath = join(currentDir, entry.name); + const relativePath = relativeBase ? `${relativeBase}/${entry.name}` : entry.name; + if (entry.isDirectory()) { + scanLocalFiles(fullPath, relativePath); + } else { + const stats = statSync(fullPath); + localFiles.set(relativePath, { + mtime: stats.mtime, + content: readFileSync(fullPath) + }); } } + } - // Merge: local changes take precedence for files modified after last pull - let lastPull: Date | undefined; - if (existsSync(metaPath)) { - const meta = JSON.parse(readFileSync(metaPath, 'utf-8')); - lastPull = meta.lastPull ? new Date(meta.lastPull) : undefined; + // Read local files before overwriting + if (existsSync(metaPath)) { + scanLocalFiles(targetDir); + } + + // Extract remote files + const remoteFiles = new Map(); + for (const entry of zip.getEntries()) { + if (!entry.isDirectory) { + remoteFiles.set(entry.entryName, entry.getData()); } + } - const filesToUpload: { path: string; content: Buffer }[] = []; - const filesUpdatedLocally: string[] = []; - const filesKeptLocal: string[] = []; - const filesNewLocal: string[] = []; + // Merge: local changes take precedence for files modified after last pull + let lastPull: Date | undefined; + if (existsSync(metaPath)) { + const meta = JSON.parse(readFileSync(metaPath, 'utf-8')); + lastPull = meta.lastPull ? new Date(meta.lastPull) : undefined; + } - spinner.text = 'Comparing files...'; + const filesToUpload: { path: string; content: Buffer }[] = []; + const filesUpdatedLocally: string[] = []; + const filesKeptLocal: string[] = []; + const filesNewLocal: string[] = []; - // Write remote files, but preserve local modifications - for (const [path, remoteContent] of remoteFiles) { - const filePath = join(targetDir, path); - const fileDir = dirname(filePath); - if (!existsSync(fileDir)) { - mkdirSync(fileDir, { recursive: true }); - } + spinner.text = 'Comparing files...'; - const localFile = localFiles.get(path); - if (localFile && lastPull && localFile.mtime > lastPull) { - // Local file was modified after last pull - keep local, queue for upload if different - if (!localFile.content.equals(remoteContent)) { - filesToUpload.push({ path, content: localFile.content }); - filesKeptLocal.push(path); - } - // Don't overwrite local file - } else { - // Write remote version - writeFileSync(filePath, remoteContent); - filesUpdatedLocally.push(path); - } + // Write remote files, but preserve local modifications + for (const [path, remoteContent] of remoteFiles) { + const filePath = join(targetDir, path); + const fileDir = dirname(filePath); + if (!existsSync(fileDir)) { + mkdirSync(fileDir, { recursive: true }); } - // Check for new local files (not in remote) - for (const [path, localFile] of localFiles) { - if (path === 'output.pdf' || path.endsWith('/output.pdf')) { - continue; - } - if (!remoteFiles.has(path)) { + const localFile = localFiles.get(path); + if (localFile && lastPull && localFile.mtime > lastPull) { + // Local file was modified after last pull - keep local, queue for upload if different + if (!localFile.content.equals(remoteContent)) { filesToUpload.push({ path, content: localFile.content }); - filesNewLocal.push(path); + filesKeptLocal.push(path); } + // Don't overwrite local file + } else { + // Write remote version + writeFileSync(filePath, remoteContent); + filesUpdatedLocally.push(path); } + } - // Upload local changes - if (filesToUpload.length > 0) { - spinner.text = `Uploading ${filesToUpload.length} local change(s)...`; - for (const file of filesToUpload) { - await client.uploadFile(projectId, null, file.path, file.content); - } + // Check for new local files (not in remote) + for (const [path, localFile] of localFiles) { + if (path === 'output.pdf' || path.endsWith('/output.pdf')) { + continue; } + if (!remoteFiles.has(path)) { + filesToUpload.push({ path, content: localFile.content }); + filesNewLocal.push(path); + } + } - // Update metadata - writeFileSync(metaPath, JSON.stringify({ - projectId, - projectName, - lastPull: new Date().toISOString(), - lastSync: new Date().toISOString() - }, null, 2)); - - spinner.succeed(`Synced "${projectName}"`); - - // Summary - console.log(chalk.dim(` ↓ ${filesUpdatedLocally.length} pulled from remote`)); - console.log(chalk.dim(` ↑ ${filesToUpload.length} pushed to remote`)); - - if (options.verbose) { - if (filesKeptLocal.length > 0) { - console.log(chalk.yellow('\n Local changes pushed (local was newer):')); - for (const f of filesKeptLocal) { - console.log(chalk.dim(` ${f}`)); - } + // Upload local changes + if (filesToUpload.length > 0) { + spinner.text = `Uploading ${filesToUpload.length} local change(s)...`; + for (const file of filesToUpload) { + await client.uploadFile(projectId, null, file.path, file.content); + } + } + + // Update metadata + writeFileSync(metaPath, JSON.stringify({ + projectId, + projectName, + lastPull: new Date().toISOString(), + lastSync: new Date().toISOString() + }, null, 2)); + + spinner.succeed(`Synced "${projectName}"`); + + // Summary + console.log(chalk.dim(` ↓ ${filesUpdatedLocally.length} pulled from remote`)); + console.log(chalk.dim(` ↑ ${filesToUpload.length} pushed to remote`)); + + if (options.verbose) { + if (filesKeptLocal.length > 0) { + console.log(chalk.yellow('\n Local changes pushed (local was newer):')); + for (const f of filesKeptLocal) { + console.log(chalk.dim(` ${f}`)); } - if (filesNewLocal.length > 0) { - console.log(chalk.green('\n New local files pushed:')); - for (const f of filesNewLocal) { - console.log(chalk.dim(` ${f}`)); - } + } + if (filesNewLocal.length > 0) { + console.log(chalk.green('\n New local files pushed:')); + for (const f of filesNewLocal) { + console.log(chalk.dim(` ${f}`)); } } - - setLastProject(projectId); - } catch (error: any) { - spinner.fail(`Failed: ${error.message}`); - process.exit(1); } - }); + + setLastProject(projectId); + } catch (error: any) { + spinner.fail(`Failed: ${error.message}`); + process.exit(1); + } +}); // ───────────────────────────────────────────────────────────────────────────── // HELP // ───────────────────────────────────────────────────────────────────────────── const configCmd = program - .command('config') - .description('Manage olcli configuration'); +.command('config') +.description('Manage olcli configuration'); configCmd - .command('set-url ') - .description('Set the Overleaf instance base URL') - .action((url: string) => { - setBaseUrl(url); - console.log(chalk.green(`Base URL set to: ${url}`)); - }); +.command('set-url ') +.description('Set the Overleaf instance base URL') +.action((url: string) => { + setBaseUrl(url); + console.log(chalk.green(`Base URL set to: ${url}`)); +}); configCmd - .command('get-url') - .description('Get the current Overleaf instance base URL') - .action(() => { - console.log(getBaseUrl()); - }); +.command('get-url') +.description('Get the current Overleaf instance base URL') +.action(() => { + console.log(getBaseUrl()); +}); configCmd - .command('set-cookie-name ') - .description('Set the session cookie name (e.g. overleaf.sid for older instances)') - .action((name: string) => { - setSessionCookieName(name); - console.log(chalk.green(`Session cookie name set to: ${name}`)); - }); +.command('set-cookie-name ') +.description('Set the session cookie name (e.g. overleaf.sid for older instances)') +.action((name: string) => { + setSessionCookieName(name); + console.log(chalk.green(`Session cookie name set to: ${name}`)); +}); configCmd - .command('get-cookie-name') - .description('Get the current session cookie name') - .action(() => { - console.log(getSessionCookieName()); - }); +.command('get-cookie-name') +.description('Get the current session cookie name') +.action(() => { + console.log(getSessionCookieName()); +}); program - .command('check') - .description('Show credential sources and config path') - .action(() => { - console.log(chalk.bold('Configuration:')); - console.log(` Config file: ${getConfigPath()}`); - console.log(); - - console.log(chalk.bold('Credential sources (in order):')); - console.log(' 1. OVERLEAF_SESSION environment variable'); - console.log(' 2. .olauth file in current directory'); - console.log(' 3. Global config file'); - console.log(); - - const cookie = getSessionCookie(); - if (cookie) { - console.log(chalk.green('✓ Session cookie found')); - console.log(chalk.dim(` Value: ${cookie.substring(0, 20)}...`)); - } else { - console.log(chalk.yellow('✗ No session cookie found')); - } - }); +.command('check') +.description('Show credential sources and config path') +.action(() => { + console.log(chalk.bold('Configuration:')); + console.log(` Config file: ${getConfigPath()}`); + console.log(); + + console.log(chalk.bold('Credential sources (in order):')); + console.log(' 1. OVERLEAF_SESSION environment variable'); + console.log(' 2. .olauth file in current directory'); + console.log(' 3. Global config file'); + console.log(); + + const cookie = getSessionCookie(); + if (cookie) { + console.log(chalk.green('✓ Session cookie found')); + console.log(chalk.dim(` Value: ${cookie.substring(0, 20)}...`)); + } else { + console.log(chalk.yellow('✗ No session cookie found')); + } +}); program.parse(process.argv); diff --git a/src/client.ts b/src/client.ts index 96406c3..9104fb5 100644 --- a/src/client.ts +++ b/src/client.ts @@ -11,6 +11,19 @@ import { dirname, join } from 'node:path'; import { fileURLToPath } from 'node:url'; import * as https from 'node:https'; import * as http from 'node:http'; +import { + getSessionCookie, + setSessionCookie, + getLastProject, + setLastProject, + getConfigPath, + saveOlAuth, + clearConfig, + getBaseUrl, + setBaseUrl, + getSessionCookieName, + setSessionCookieName +} from './config.js'; // Read version from package.json const __dirname = dirname(fileURLToPath(import.meta.url)); @@ -60,6 +73,23 @@ export interface Credentials { baseUrl?: string; } +/** + * Helper to get authenticated client + */ +export async function getClient(cookieOpt?: string, baseUrlOpt?: string): Promise { + const cookie = cookieOpt || getSessionCookie(); + if (!cookie) { + console.error('No session cookie found.'); + console.error('Set one with: olcli auth --cookie '); + console.error('Or set OVERLEAF_SESSION environment variable'); + console.error('Or create .olauth file in current directory'); + process.exit(1); + } + const baseUrl = baseUrlOpt || getBaseUrl(); + const cookieName = getSessionCookieName(); + return OverleafClient.fromSessionCookie(cookie, baseUrl, cookieName); +} + export class OverleafClient { private cookies: Record; private csrf: string; @@ -101,7 +131,7 @@ export class OverleafClient { static async fromSessionCookie( sessionCookie: string, baseUrl: string = DEFAULT_BASE_URL, - cookieName: string = 'overleaf_session2' + cookieName: string = 'overleaf_session2' ): Promise { const cookies: Record = { [cookieName]: sessionCookie @@ -110,7 +140,7 @@ export class OverleafClient { // Fetch CSRF token from project page const initialHeaders: Record = { 'Cookie': Object.entries(cookies).map(([k, v]) => `${k}=${v}`).join('; '), - 'User-Agent': USER_AGENT + 'User-Agent': USER_AGENT }; const bootstrapClient = new OverleafClient({ cookies, csrf: 'bootstrap', baseUrl }); const response = await bootstrapClient.httpRequest(`${baseUrl}/project`, { @@ -200,7 +230,7 @@ export class OverleafClient { private async httpRequest(url: string, options: { method?: string; headers?: Record; - body?: string | Buffer; + body?: string | Buffer | FormData; timeoutMs?: number; maxRedirects?: number; expect?: 'text' | 'json' | 'buffer'; @@ -210,11 +240,27 @@ export class OverleafClient { const maxRedirects = options.maxRedirects ?? 5; const expect = options.expect ?? 'text'; + // Normalize FormData bodies into a multipart Buffer + headers using Node's + // built-in Web Fetch primitives. Keeps every code path on httpRequest + // (no fetch() reintroduction) while properly serializing multipart uploads. + let bodyBuffer: string | Buffer | undefined; + let extraHeaders: Record = {}; + if (options.body instanceof FormData) { + const req = new Request('http://x/', { method: 'POST', body: options.body }); + const arrayBuf = await req.arrayBuffer(); + bodyBuffer = Buffer.from(arrayBuf); + const ct = req.headers.get('content-type'); + if (ct) extraHeaders['Content-Type'] = ct; + extraHeaders['Content-Length'] = String(bodyBuffer.length); + } else if (options.body !== undefined) { + bodyBuffer = options.body as string | Buffer; + } + const doRequest = (reqUrl: string, redirectsLeft: number): Promise<{ status: number; ok: boolean; headers: Record; body: string | Buffer | any }> => { return new Promise((resolve, reject) => { const parsedUrl = new URL(reqUrl); const transport = parsedUrl.protocol === 'https:' ? https : http; - const headers = this.normalizeHeaders(options.headers); + const headers = this.normalizeHeaders({ ...extraHeaders, ...options.headers }); const req = transport.request(reqUrl, { method, headers }, (res) => { const status = res.statusCode || 0; @@ -254,8 +300,8 @@ export class OverleafClient { }); } - if (options.body) { - req.write(options.body); + if (bodyBuffer !== undefined) { + req.write(bodyBuffer); } req.end(); @@ -330,16 +376,38 @@ export class OverleafClient { // Filter out archived and trashed return projectsData - .filter((p: any) => !p.archived && !p.trashed) - .map((p: any) => ({ - id: p.id || p._id, - name: p.name, - lastUpdated: p.lastUpdated, - lastUpdatedBy: p.lastUpdatedBy, - owner: p.owner, - archived: p.archived, - trashed: p.trashed - })); + .filter((p: any) => !p.archived && !p.trashed) + .map((p: any) => ({ + id: p.id || p._id, + name: p.name, + lastUpdated: p.lastUpdated, + lastUpdatedBy: p.lastUpdatedBy, + owner: p.owner, + archived: p.archived, + trashed: p.trashed + })); + } + + /** + * Apply a Label to the current overleaf state + */ + async applyOverleafLabel(projectId: string, message: string, version: number): Promise { + await new Promise(resolve => setTimeout(resolve, 100)); + + const url = `${this.baseUrl}/project/${projectId}/labels`; + + const response = await fetch(url, { + method: 'POST', + headers: this.getHeaders(true), + body: JSON.stringify({ + comment: message, + version: version + }) + }); + + if (!response.ok) { + throw new Error(`Failed to create label: ${response.status}`); + } } /** @@ -405,6 +473,16 @@ export class OverleafClient { } } + // Fallback: Overleaf no longer ships the project tree in meta tags. + // Use the Socket.IO joinProjectResponse payload (same source used for + // root folder discovery) to retrieve the full project info. + if (!projectInfo) { + const socketProject = await this.getProjectFromSocket(projectId); + if (socketProject) { + projectInfo = socketProject as ProjectInfo; + } + } + if (!projectInfo) { throw new Error('Could not parse project info'); } @@ -412,6 +490,87 @@ export class OverleafClient { return projectInfo; } + /** + * Fetch the full project object via the collaboration socket. + * Returns the `project` field of the joinProjectResponse, which contains + * the rootFolder tree and other metadata that used to live in ol-project. + */ + private async getProjectFromSocket(projectId: string): Promise { + let sid: string | null = null; + try { + const handshakeUrl = `${this.baseUrl}/socket.io/1/?projectId=${encodeURIComponent(projectId)}&t=${Date.now()}`; + const handshakeResponse = await this.httpRequest(handshakeUrl, { + headers: { 'Cookie': this.getCookieHeader(), 'User-Agent': USER_AGENT }, + expect: 'text', + timeoutMs: 5000 + }); + if (!handshakeResponse.ok) return null; + this.applySetCookieHeaders(handshakeResponse.headers['set-cookie'] as string[] | undefined); + const handshakeBody = (handshakeResponse.body as string).trim(); + sid = handshakeBody.split(':')[0]; + if (!sid) return null; + + const buildPollUrl = () => + `${this.baseUrl}/socket.io/1/xhr-polling/${sid}?projectId=${encodeURIComponent(projectId)}&t=${Date.now()}`; + + for (let attempt = 0; attempt < 6; attempt++) { + const pollResponse = await this.httpRequest(buildPollUrl(), { + headers: { 'Cookie': this.getCookieHeader(), 'User-Agent': USER_AGENT }, + expect: 'text', + timeoutMs: 5000 + }); + if (!pollResponse.ok) return null; + this.applySetCookieHeaders(pollResponse.headers['set-cookie'] as string[] | undefined); + const packets = this.decodeSocketIoPayload(pollResponse.body as string); + for (const packet of packets) { + if (packet.startsWith('5:::')) { + try { + const payload = JSON.parse(packet.slice(4)); + if (payload?.name === 'joinProjectResponse' && payload?.args?.[0]?.project) { + return payload.args[0].project; + } + } catch { /* ignore */ } + } + if (packet.startsWith('2::')) { + const heartbeatResponse = await this.httpRequest(buildPollUrl(), { + method: 'POST', + headers: { + 'Cookie': this.getCookieHeader(), + 'User-Agent': USER_AGENT, + 'Content-Type': 'text/plain;charset=UTF-8' + }, + body: '2::', + expect: 'text', + timeoutMs: 5000 + }); + this.applySetCookieHeaders(heartbeatResponse.headers['set-cookie'] as string[] | undefined); + } + } + } + } catch { + // fall through + } finally { + if (sid) { + try { + const disconnectUrl = `${this.baseUrl}/socket.io/1/xhr-polling/${sid}?projectId=${encodeURIComponent(projectId)}&t=${Date.now()}`; + const disconnectResponse = await this.httpRequest(disconnectUrl, { + method: 'POST', + headers: { + 'Cookie': this.getCookieHeader(), + 'User-Agent': USER_AGENT, + 'Content-Type': 'text/plain;charset=UTF-8' + }, + body: '0::', + expect: 'text', + timeoutMs: 5000 + }); + this.applySetCookieHeaders(disconnectResponse.headers['set-cookie'] as string[] | undefined); + } catch { /* ignore */ } + } + } + return null; + } + /** * Download a URL as a Buffer using Node.js http/https modules. * @@ -640,7 +799,7 @@ export class OverleafClient { if (!sid) return null; const buildPollUrl = () => - `${this.baseUrl}/socket.io/1/xhr-polling/${sid}?projectId=${encodeURIComponent(projectId)}&t=${Date.now()}`; + `${this.baseUrl}/socket.io/1/xhr-polling/${sid}?projectId=${encodeURIComponent(projectId)}&t=${Date.now()}`; let discoveredRootFolderId: string | null = null; @@ -743,7 +902,7 @@ export class OverleafClient { if (!sid) return null; const buildPollUrl = () => - `${this.baseUrl}/socket.io/1/xhr-polling/${sid}?projectId=${encodeURIComponent(projectId)}&t=${Date.now()}`; + `${this.baseUrl}/socket.io/1/xhr-polling/${sid}?projectId=${encodeURIComponent(projectId)}&t=${Date.now()}`; for (let attempt = 0; attempt < 3; attempt++) { const pollResponse = await this.httpRequest(buildPollUrl(), { @@ -885,13 +1044,13 @@ export class OverleafClient { */ async probeRootFolderId(projectId: string): Promise { const candidates: string[] = []; - + // Method 1: Try projectId - 1 (most common) candidates.push(this.computeRootFolderId(projectId)); - + const prefix = projectId.slice(0, 16); const suffix = parseInt(projectId.slice(16), 16); - + // Method 2: Try a wide range around the project ID // Some projects have root folder created with different offsets for (let i = 2; i <= 50; i++) { @@ -1122,7 +1281,7 @@ export class OverleafClient { const projectInfo = await this.getProjectInfo(projectId); const normalizedTarget = targetPath.replace(/^\//, ''); - function searchFolder(folder: FolderEntry, currentPath: string): { id: string; type: 'doc' | 'file' | 'folder'; name: string } | null { + function searchFolder(folder: FolderEntry, currentPath: string): { id: string; type: 'doc' | 'file' | 'folder'; name: string } | null { // Check docs for (const doc of folder.docs || []) { const docPath = currentPath ? `${currentPath}/${doc.name}` : doc.name; @@ -1235,39 +1394,39 @@ export class OverleafClient { async downloadByPath(projectId: string, path: string): Promise { const normalizedPath = path.replace(/^\//, ''); - // First check if file exists - const entities = await this.getEntities(projectId); - const entityExists = entities.find(e => - e.path.replace(/^\//, '') === normalizedPath || - e.path === `/${normalizedPath}` - ); - - if (!entityExists) { - throw new Error(`File not found: ${path}`); - } - - // Try to find entity with ID for direct download - try { - const entity = await this.findEntityByPath(projectId, path); - if (entity && entity.type !== 'folder') { - return await this.downloadFile(projectId, entity.id, entity.type); - } - } catch (e) { - // Fall through to zip method - } - - // Fallback: download zip and extract the file - const zipBuffer = await this.downloadProject(projectId); - const AdmZip = (await import('adm-zip')).default; - const zip = new AdmZip(zipBuffer); - - for (const entry of zip.getEntries()) { - if (entry.entryName === normalizedPath || entry.entryName === path) { - return entry.getData(); - } - } - - throw new Error(`File not found in archive: ${path}`); + // First check if file exists + const entities = await this.getEntities(projectId); + const entityExists = entities.find(e => + e.path.replace(/^\//, '') === normalizedPath || + e.path === `/${normalizedPath}` + ); + + if (!entityExists) { + throw new Error(`File not found: ${path}`); + } + + // Try to find entity with ID for direct download + try { + const entity = await this.findEntityByPath(projectId, path); + if (entity && entity.type !== 'folder') { + return await this.downloadFile(projectId, entity.id, entity.type); + } + } catch (e) { + // Fall through to zip method + } + + // Fallback: download zip and extract the file + const zipBuffer = await this.downloadProject(projectId); + const AdmZip = (await import('adm-zip')).default; + const zip = new AdmZip(zipBuffer); + + for (const entry of zip.getEntries()) { + if (entry.entryName === normalizedPath || entry.entryName === path) { + return entry.getData(); + } + } + + throw new Error(`File not found in archive: ${path}`); } /** diff --git a/src/git-helper.ts b/src/git-helper.ts new file mode 100644 index 0000000..8b29f13 --- /dev/null +++ b/src/git-helper.ts @@ -0,0 +1,381 @@ +#!/usr/bin/env node +import * as readline from 'node:readline'; +import { mkdtempSync, rmSync, readFileSync, writeFileSync, readdirSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import { join, relative } from 'node:path'; +import AdmZip from 'adm-zip'; +import { execSync } from 'node:child_process'; +import { OverleafClient } from './client.js'; + +const { getClient } = await import('./client.js'); + + +async function main() { + + const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout, + terminal: false + }); + + let pendingImportRef = ''; + let pendingPushRef = ''; + + const parser = new GitProtocol(process.argv[3], process.argv[2]); + + for await (const line of rl) { + console.error(`[DEBUG] Git asked: ${line}`); + let argv = line.split(' '); + + switch (argv[0]){ + case "capabilities" : + console.log('import'); + //console.error(process.argv); + console.log('refspec refs/heads/*:refs/heads/*'); + //console.log(`refspec refs/heads/*:refs/remotes/${process.argv[2]}/*`); + console.log('option'); + console.log('list'); + console.log('push'); + console.log(''); + break; + case "option": + parser.runOption(argv); + break; + case "list": + parser.runList(argv); + break; + case "push": + pendingPushRef = argv[1].split(':')[1]; + break; + case "import": + pendingImportRef = argv[1]; + break; + + case "": + if (pendingImportRef !== '') { + await parser.runImport(pendingImportRef); + pendingImportRef = ''; + } else if (pendingPushRef !== '') { + await parser.runPush(pendingPushRef); + pendingPushRef = ''; + } else { + process.exit(0); + } + break; + } + } +} + + +class GitProtocol { + private remote: string; + private trackingRef: string;//const trackingRef = `refs/remotes/${remoteName}/${branchName}`; + private baseUrl: string; + private projectId: string; + private client?: OverleafClient; + + constructor(url: string, remote: string){ + this.remote = remote; + const urlT = url.split('/'); + this.projectId = urlT[urlT.length -1]; + this.baseUrl = urlT[0]+"//"+urlT[2]; + this.trackingRef = `refs/remotes/${remote}/main`; + + } + + /* + * Method handling the option request from git-remote-helper + */ + public runOption(argv: string[]): void {//TODO: Actually handle options + console.log("unsupported"); + } + /* + * Method handling the list request from git-remote-helper + */ + public runList(argv: string[]): void { + const isPushing = argv.includes('for-push'); + + if (isPushing) { + try { + const hash = this.getLocalCommitHash(this.trackingRef); + console.log(`${hash} refs/heads/main`); + } catch { + console.log(`? refs/heads/main`); + } + } else { + console.log(`? refs/heads/main`); + } + + console.log(`@refs/heads/main HEAD`); + console.log(''); + } + + /* + * Method handling the push request from git-remote-helper + */ + public async runPush(refToUpdate: string){ + + let commitsStr = ''; + try { + commitsStr = execSync(`git rev-list --reverse ${this.trackingRef}..${refToUpdate}`, { stdio: ['ignore', 'pipe', 'ignore'], encoding: 'utf8' }).trim(); + } catch (e) { + commitsStr = execSync(`git rev-list --reverse ${refToUpdate}`, { encoding: 'utf8' }).trim(); + } + + if (!commitsStr) { + console.log(`ok ${refToUpdate}`); + console.log(''); + return; + } + const commits = commitsStr.split('\n'); + + let tempDir = ''; + try { + if(!this.client) this.client = await getClient(); + + let project = await this.client.getProjectById(this.projectId); + if (!project) { + console.log(`error ${refToUpdate} Could not find project : ${this.projectId}`); + return; + } + + const overleafTime = Math.floor(new Date(project.lastUpdated).getTime() / 1000); + const localTime = this.getLocalCommitTime(refToUpdate); + + if (overleafTime > localTime ){ + console.log(`error ${refToUpdate} Remote has newer changes. Please pull first.`); + console.log(''); + return; + }else{ + + const remoteFiles = new Map(); + const projectInfo = await this.client.getProjectInfo(this.projectId); + + if (projectInfo && projectInfo.rootFolder && projectInfo.rootFolder[0]) { + function buildFileMap(folder: any, currentPath: string = '') { + for (const doc of folder.docs || []) { + remoteFiles.set(currentPath ? `${currentPath}/${doc.name}` : doc.name, { id: doc._id, type: 'doc' }); + } + for (const file of folder.fileRefs || []) { + remoteFiles.set(currentPath ? `${currentPath}/${file.name}` : file.name, { id: file._id, type: 'file' }); + } + for (const sub of folder.folders || []) { + const subPath = currentPath ? `${currentPath}/${sub.name}` : sub.name; + remoteFiles.set(subPath, { id: sub._id, type: 'folder' }); + buildFileMap(sub, subPath); + } + } + buildFileMap(projectInfo.rootFolder[0]); + } + + let folderTree = await this.client.getFolderTreeFromSocket(this.projectId); + if (!folderTree) folderTree = {}; + + for (const hash of commits) { + const commitMsg = execSync(`git show -s --format=%s ${hash}`, { encoding: 'utf8' }).trim(); + + const uploadStr = execSync(`git diff-tree --no-commit-id --name-only --diff-filter=ACMR -r ${hash}`, { encoding: 'utf8' }).trim(); + const filesToUpload = uploadStr ? uploadStr.split('\n') : []; + + const deleteStr = execSync(`git diff-tree --no-commit-id --name-only --diff-filter=D -r ${hash}`, { encoding: 'utf8' }).trim(); + const filesToDelete = deleteStr ? deleteStr.split('\n') : []; + + for (const file of filesToUpload) { + if ( file !== ".gitignore") { + try { + const content = execSync(`git show ${hash}:"${file}"`, { encoding: 'buffer' }); + await this.client.uploadFile(this.projectId!, null, file, content, folderTree); + } catch (error: any) { + console.log(`error ${refToUpdate} Failed to upload ${file}: ${error.message}`); + } + } + } + + for (const file of filesToDelete) { + const entity = remoteFiles.get(file); + if (!entity) { + console.log(`error ${refToUpdate} Failed to delete ${file}: Does not exist remotely`); + }else{ + try { + await this.client.deleteEntity(this.projectId!, entity.id, entity.type); + } catch (error: any) { + console.log(`error ${refToUpdate} Failed to delete ${file}: ${error.message}`); + } + } + } + if(filesToDelete.length > 0) { + const folderEntries = Array.from(remoteFiles.entries()) + .filter(([path, entity]) => entity.type === 'folder'); + + folderEntries.sort(([pathA], [pathB]) => pathB.split('/').length - pathA.split('/').length); + + for (const [folderPath, entity] of folderEntries) { + const folderPrefix = folderPath + '/'; + + // Check if ANY key left in the map starts with this folder's path + if (! Array.from(remoteFiles.keys()).some( + key => key.startsWith(folderPrefix) + )) { + + try { + await this.client.deleteEntity(this.projectId, entity.id, 'folder'); + remoteFiles.delete(folderPath); + } catch (e) { + console.log(`error ${refToUpdate} Failed to delete folder ${folderPath}`); + } + } + } + } + + /* + try { + + const project = await this.client.getProjectInfo(this.projectId); + + //console.error(project.version); + await this.client.applyOverleafLabel(this.projectId, commitMsg, project.version || 0); + + } catch (err: any) { + console.error(` -> Warning: Failed to apply label '${commitMsg}'`); + } + */ + } + + console.log(`ok ${refToUpdate}`); + console.log(''); + } + + } catch (error: any) { + console.log(`error ${refToUpdate} Push failed: ${error.message}`); + console.log(''); + } + } + /* + * Method handling the import request from git-remote-helper + */ + public async runImport(refToUpdate: string){ + let tempDir = ''; + try { + if(!this.client) this.client = await getClient(); + + //this.branch = refToUpdate.split('/').pop() || 'main'; + //const trackingRef = `refs/remotes/${process.argv[2]}/${branchName}`; + + let project = await this.client.getProjectById(this.projectId); + if (!project) { + console.error(`\n[olcli] Error: Could not find project '${this.projectId}'`); + process.exit(1); + } + const overleafTime = Math.floor(new Date(project.lastUpdated).getTime() / 1000); + const localTime = this.getLocalCommitTime(this.trackingRef); + const hasLocalHistory = localTime > 0; + + console.error(overleafTime, localTime); + + //Checking if pulling is necessary + if (overleafTime === localTime) { + + const localHash = this.getLocalCommitHash(this.trackingRef); + + process.stdout.write(`feature done\n`); + process.stdout.write(`reset ${refToUpdate}\n`); + process.stdout.write(`from ${localHash}\n`); + process.stdout.write(`done\n`, () => { + console.log(''); + }); + + }else{ + //Downloading the zip file + const zipBuffer = await this.client.downloadProject(this.projectId); + + tempDir = mkdtempSync(join(tmpdir(), 'overleaf-sync-')); + const zipPath = join(tempDir, 'project.zip'); + const extractDir = join(tempDir, 'extracted'); + + writeFileSync(zipPath, zipBuffer); + const zip = new AdmZip(zipPath); + zip.extractAllTo(extractDir, true); + + function getFilesToImport(dir: string, fileList: string[] = []) { + const items = readdirSync(dir, { withFileTypes: true }); + for (const item of items) { + const fullPath = join(dir, item.name); + if (item.isDirectory()) { + getFilesToImport(fullPath, fileList); + } else { + fileList.push(fullPath); + } + } + return fileList; + } + + const files = getFilesToImport(extractDir); + const timestamp = overleafTime; + const commitMsg = "Sync from Overleaf\n"; + + let streamData = ''; + //streamData += `feature done\n`; + streamData += `commit ${refToUpdate}\n`; + streamData += `mark :1\n`; + streamData += `author Overleaf Sync ${timestamp} +0000\n`; + streamData += `committer Overleaf Sync ${timestamp} +0000\n`; + streamData += `data ${Buffer.byteLength(commitMsg, 'utf8')}\n`; + streamData += commitMsg; + + const parentHash = this.getLocalCommitHash(this.trackingRef); + if (parentHash) { + console.error(parentHash); + streamData += `from ${parentHash}\n`; + } + + + process.stdout.write(streamData); + + for (const filePath of files) { + let repoPath = relative(extractDir, filePath).replace(/\\/g, '/'); + + repoPath = repoPath.replace(/^\/+/, '').replace(/^\.\//, ''); + + const formattedPath = repoPath.includes(' ') ? `"${repoPath}"` : repoPath; + const content = readFileSync(filePath); + + process.stdout.write(`M 100644 inline ${formattedPath}\n`); + process.stdout.write(`data ${content.length}\n`); + process.stdout.write(content); + process.stdout.write(`\n`); + } + + process.stdout.write(`done\n`, () => { + console.log(''); + }); + } + + } catch (error: any) { + console.error(`\n[olcli] Error fetching from Overleaf: ${error.message}`); + process.exit(1); + } finally { + if (tempDir) { + rmSync(tempDir, { recursive: true, force: true }); + } + } + } + + private getLocalCommitHash(ref: string): string { + try { + return execSync(`git rev-parse ${ref}`, { stdio: ['ignore', 'pipe', 'ignore'], encoding: 'utf8' }).trim(); + } catch { + return ''; + } + } + + private getLocalCommitTime(ref: string): number { + try { + return parseInt(execSync(`git log -1 --format=%ct ${ref}`, { stdio: ['ignore', 'pipe', 'ignore'], encoding: 'utf8' }).trim(), 10); + } catch { + return 0; + } + } + +} + +main(); diff --git a/test/e2e.sh b/test/e2e.sh index d7101ba..b3f0560 100755 --- a/test/e2e.sh +++ b/test/e2e.sh @@ -18,6 +18,18 @@ TESTS_PASSED=0 TESTS_FAILED=0 CLEANUP_FILES=() CLEANUP_REMOTE_FILES=() +EXE="$(pwd)/dist/cli.js" + +if test -f $EXE; then + if ! [[ -x "$EXE" ]] + then + chmod +x $EXE + fi +else + echo "Binary file does not exist, compile first." + exit +fi + # Test project name (override with OLCLI_E2E_PROJECT_NAME) PROJECT_NAME="${OLCLI_E2E_PROJECT_NAME:-olcli test}" @@ -58,16 +70,16 @@ run_test() { local name="$1" local cmd="$2" local expect_success="${3:-true}" - + TESTS_RUN=$((TESTS_RUN + 1)) - + echo -n " Testing: $name ... " - + local output local exit_code - + output=$(eval "$cmd" 2>&1) && exit_code=0 || exit_code=$? - + if [ "$expect_success" = "true" ]; then if [ $exit_code -eq 0 ]; then echo -e "${GREEN}✓${NC}" @@ -103,16 +115,16 @@ run_test_with_output() { local name="$1" local cmd="$2" local expected_pattern="$3" - + TESTS_RUN=$((TESTS_RUN + 1)) - + echo -n " Testing: $name ... " - + local output local exit_code - + output=$(eval "$cmd" 2>&1) && exit_code=0 || exit_code=$? - + if [ $exit_code -eq 0 ] && echo "$output" | grep -qE "$expected_pattern"; then echo -e "${GREEN}✓${NC}" TESTS_PASSED=$((TESTS_PASSED + 1)) @@ -132,18 +144,18 @@ run_test_with_output() { # Cleanup function cleanup() { log_section "Cleanup" - + # Remove local temp files if [ -d "$TEST_DIR" ]; then log_info "Removing temp directory: $TEST_DIR" rm -rf "$TEST_DIR" fi - + # Remove remote test files (best effort) for file in "${CLEANUP_REMOTE_FILES[@]}"; do log_info "Note: Test file '$file' may remain on Overleaf (delete manually if needed)" done - + # Summary echo "" log_section "Test Results" @@ -152,7 +164,7 @@ cleanup() { echo -e " ${GREEN}Passed:${NC} $TESTS_PASSED" echo -e " ${RED}Failed:${NC} $TESTS_FAILED" echo "" - + if [ $TESTS_FAILED -eq 0 ]; then log_success "All tests passed! 🎉" exit 0 @@ -178,12 +190,12 @@ log_info "Test directory: $TEST_DIR" log_info "Project: $PROJECT_NAME" # Verify olcli is available -if ! command -v olcli &> /dev/null; then +if ! command -v $EXE &> /dev/null; then log_fail "olcli command not found. Run 'npm link' first." exit 1 fi -log_info "olcli version: $(olcli --version)" +log_info "olcli version: $($EXE --version)" ####################################### # Test: Authentication @@ -192,11 +204,11 @@ log_info "olcli version: $(olcli --version)" log_section "Authentication Tests" run_test_with_output "whoami returns user info" \ - "olcli whoami" \ + "$EXE whoami" \ "(Logged in as|Email:|Authenticated)" run_test "check shows config info" \ - "olcli check" + "$EXE check" ####################################### # Test: Project Listing @@ -205,17 +217,17 @@ run_test "check shows config info" \ log_section "Project Listing Tests" run_test "list shows target project" \ - "olcli list | grep -F \"$PROJECT_NAME\"" + "$EXE list | grep -F \"$PROJECT_NAME\"" run_test_with_output "list --json returns valid JSON" \ - "olcli list --json | jq -e 'type == \"array\"'" \ + "$EXE list --json | jq -e 'type == \"array\"'" \ "true" # Get project ID for later tests log_info "Waiting 5s before API calls to avoid rate limiting..." sleep 5 -PROJECT_ID=$(olcli list --json | jq -r --arg project_name "$PROJECT_NAME" '.[] | select(.name == $project_name) | .id') +PROJECT_ID=$($EXE list --json | jq -r --arg project_name "$PROJECT_NAME" '.[] | select(.name == $project_name) | .id') if [ -z "$PROJECT_ID" ]; then log_fail "Could not find '$PROJECT_NAME' project. Please create it on Overleaf first." exit 1 @@ -231,15 +243,15 @@ log_info "Using project ID directly to minimize API calls" log_section "Project Info Tests" run_test_with_output "info by name" \ - "olcli info '$PROJECT_NAME'" \ + "$EXE info '$PROJECT_NAME'" \ "(Project:|Files:)" run_test_with_output "info by ID" \ - "olcli info '$PROJECT_ID'" \ + "$EXE info '$PROJECT_ID'" \ "(Project:|Files:)" run_test_with_output "info --json returns valid JSON" \ - "olcli info '$PROJECT_ID' --json | jq -e '.project.id'" \ + "$EXE info '$PROJECT_ID' --json | jq -e '.project.id'" \ "$PROJECT_ID" ####################################### @@ -254,7 +266,7 @@ echo "$TEST_CONTENT" > "$TEST_FILE" CLEANUP_REMOTE_FILES+=("${TEST_ID}.txt") run_test "upload file to project" \ - "olcli upload '$TEST_FILE' '$PROJECT_ID'" + "$EXE upload '$TEST_FILE' '$PROJECT_ID'" # Create file in subfolder test TEST_FILE2="$TEST_DIR/${TEST_ID}_2.txt" @@ -262,7 +274,7 @@ echo "Second test file - $TEST_CONTENT" > "$TEST_FILE2" CLEANUP_REMOTE_FILES+=("${TEST_ID}_2.txt") run_test "upload second file" \ - "olcli upload '$TEST_FILE2' '$PROJECT_ID'" + "$EXE upload '$TEST_FILE2' '$PROJECT_ID'" ####################################### # Test: File Download (single file) @@ -273,7 +285,7 @@ log_section "File Download Tests" DOWNLOAD_FILE="$TEST_DIR/downloaded_${TEST_ID}.txt" run_test "download single file" \ - "olcli download '${TEST_ID}.txt' '$PROJECT_ID' -o '$DOWNLOAD_FILE'" + "$EXE download '${TEST_ID}.txt' '$PROJECT_ID' -o '$DOWNLOAD_FILE'" # Verify content matches TESTS_RUN=$((TESTS_RUN + 1)) @@ -299,7 +311,7 @@ sleep 1 # Rate limit # Download second uploaded file (project-agnostic check) DOWNLOAD_FILE2="$TEST_DIR/downloaded_${TEST_ID}_2.txt" run_test "download second uploaded file" \ - "olcli download '${TEST_ID}_2.txt' '$PROJECT_ID' -o '$DOWNLOAD_FILE2'" + "$EXE download '${TEST_ID}_2.txt' '$PROJECT_ID' -o '$DOWNLOAD_FILE2'" run_test_with_output "second downloaded content matches marker" \ "grep -F \"Second test file - $TEST_CONTENT\" '$DOWNLOAD_FILE2'" \ @@ -314,7 +326,7 @@ log_section "Zip Archive Tests" ZIP_FILE="$TEST_DIR/project.zip" run_test "download project as zip" \ - "olcli zip '$PROJECT_ID' -o '$ZIP_FILE'" + "$EXE zip '$PROJECT_ID' -o '$ZIP_FILE'" TESTS_RUN=$((TESTS_RUN + 1)) echo -n " Testing: zip file is valid ... " @@ -346,7 +358,7 @@ fi log_section "Compile Tests" run_test_with_output "compile project" \ - "olcli compile '$PROJECT_ID'" \ + "$EXE compile '$PROJECT_ID'" \ "(success|failure|Compiled)" ####################################### @@ -360,7 +372,7 @@ PDF_FILE="$TEST_DIR/output.pdf" # Note: This may fail if compilation fails TESTS_RUN=$((TESTS_RUN + 1)) echo -n " Testing: download PDF ... " -if olcli pdf "$PROJECT_ID" -o "$PDF_FILE" 2>&1; then +if $EXE pdf "$PROJECT_ID" -o "$PDF_FILE" 2>&1; then if [ -f "$PDF_FILE" ] && [ -s "$PDF_FILE" ]; then # Check PDF magic bytes if head -c 4 "$PDF_FILE" | grep -q "%PDF"; then @@ -390,13 +402,13 @@ sleep 1 # Rate limit log_section "Output Files Tests" run_test_with_output "output --list shows files" \ - "olcli output --list --project '$PROJECT_ID'" \ + "$EXE output --list --project '$PROJECT_ID'" \ "(log|aux|pdf)" # Download log file LOG_FILE="$TEST_DIR/output.log" run_test "download log output" \ - "olcli output log -o '$LOG_FILE' --project '$PROJECT_ID'" + "$EXE output log -o '$LOG_FILE' --project '$PROJECT_ID'" TESTS_RUN=$((TESTS_RUN + 1)) echo -n " Testing: log file has content ... " @@ -414,7 +426,7 @@ sleep 1 # Rate limit BBL_FILE="$TEST_DIR/output.bbl" TESTS_RUN=$((TESTS_RUN + 1)) echo -n " Testing: download bbl output (optional) ... " -if olcli output bbl -o "$BBL_FILE" --project "$PROJECT_ID" > /dev/null 2>&1; then +if $EXE output bbl -o "$BBL_FILE" --project "$PROJECT_ID" > /dev/null 2>&1; then if [ -f "$BBL_FILE" ] && [ -s "$BBL_FILE" ]; then echo -e "${GREEN}✓${NC}" TESTS_PASSED=$((TESTS_PASSED + 1)) @@ -438,7 +450,7 @@ PULL_DIR="$TEST_DIR/pulled_project" mkdir -p "$PULL_DIR" run_test "pull project to directory" \ - "olcli pull '$PROJECT_ID' '$PULL_DIR' --force" + "$EXE pull '$PROJECT_ID' '$PULL_DIR' --force" TESTS_RUN=$((TESTS_RUN + 1)) echo -n " Testing: .olcli.json created ... " @@ -489,16 +501,16 @@ sleep 1 touch "$PUSH_TEST_FILE" run_test "push --dry-run shows changes" \ - "cd '$PULL_DIR' && olcli push --dry-run" + "cd '$PULL_DIR' && $EXE push --dry-run" run_test "push uploads changes" \ - "cd '$PULL_DIR' && olcli push --all" + "cd '$PULL_DIR' && $EXE push --all" # Verify by downloading VERIFY_FILE="$TEST_DIR/verify_push.txt" sleep 2 # Give Overleaf a moment run_test "download pushed file" \ - "olcli download '${TEST_ID}_push.txt' '$PROJECT_ID' -o '$VERIFY_FILE'" + "$EXE download '${TEST_ID}_push.txt' '$PROJECT_ID' -o '$VERIFY_FILE'" TESTS_RUN=$((TESTS_RUN + 1)) echo -n " Testing: pushed content matches ... " @@ -534,13 +546,13 @@ if [ -f "$PULL_DIR/.olcli.json" ]; then fi run_test "push recovers from stale rootFolderId" \ - "cd '$PULL_DIR' && olcli push" + "cd '$PULL_DIR' && $EXE push" # Verify recovery upload by downloading the new file VERIFY_RECOVER_FILE="$TEST_DIR/verify_push_recover.txt" sleep 2 # Give Overleaf a moment run_test "download recovered push file" \ - "olcli download '${TEST_ID}_push_recover.txt' '$PROJECT_ID' -o '$VERIFY_RECOVER_FILE'" + "$EXE download '${TEST_ID}_push_recover.txt' '$PROJECT_ID' -o '$VERIFY_RECOVER_FILE'" TESTS_RUN=$((TESTS_RUN + 1)) echo -n " Testing: recovered push content matches ... " @@ -573,7 +585,7 @@ mkdir -p "$SYNC_DIR" # Initial pull run_test "sync (initial pull)" \ - "olcli pull '$PROJECT_ID' '$SYNC_DIR' --force" + "$EXE pull '$PROJECT_ID' '$SYNC_DIR' --force" # Create local file SYNC_TEST_FILE="$SYNC_DIR/${TEST_ID}_sync.txt" @@ -582,13 +594,13 @@ echo "$SYNC_CONTENT" > "$SYNC_TEST_FILE" CLEANUP_REMOTE_FILES+=("${TEST_ID}_sync.txt") run_test "sync bidirectional" \ - "cd '$SYNC_DIR' && olcli sync" + "cd '$SYNC_DIR' && $EXE sync" # Verify upload SYNC_VERIFY="$TEST_DIR/verify_sync.txt" sleep 2 run_test "verify synced file exists" \ - "olcli download '${TEST_ID}_sync.txt' '$PROJECT_ID' -o '$SYNC_VERIFY'" + "$EXE download '${TEST_ID}_sync.txt' '$PROJECT_ID' -o '$SYNC_VERIFY'" # NOTE: delete and rename commands are disabled in olcli (require Socket.IO) # Delete test files manually via Overleaf web UI @@ -600,11 +612,11 @@ run_test "verify synced file exists" \ log_section "Error Handling Tests" run_test "download nonexistent file fails gracefully" \ - "olcli download 'nonexistent_file_xyz.tex' '$PROJECT_ID'" \ + "$EXE download 'nonexistent_file_xyz.tex' '$PROJECT_ID'" \ false run_test "info for nonexistent project fails gracefully" \ - "olcli info 'project_that_does_not_exist_xyz'" \ + "$EXE info 'project_that_does_not_exist_xyz'" \ false ####################################### @@ -615,7 +627,7 @@ log_section "Edge Case Tests" # Project by ID run_test "commands work with project ID" \ - "olcli info '$PROJECT_ID'" + "$EXE info '$PROJECT_ID'" # Special characters in filename (safe ones only) SPECIAL_FILE="$TEST_DIR/test-file_123.txt" @@ -623,10 +635,10 @@ echo "special filename test" > "$SPECIAL_FILE" CLEANUP_REMOTE_FILES+=("test-file_123.txt") run_test "upload file with dashes and underscores" \ - "olcli upload '$SPECIAL_FILE' '$PROJECT_ID'" + "$EXE upload '$SPECIAL_FILE' '$PROJECT_ID'" run_test "download file with dashes and underscores" \ - "olcli download 'test-file_123.txt' '$PROJECT_ID' -o '$TEST_DIR/dl_special.txt'" + "$EXE download 'test-file_123.txt' '$PROJECT_ID' -o '$TEST_DIR/dl_special.txt'" ####################################### # Cleanup Note