UNPKG

code-finder

Version:

CLI to detect codespaces and update IDE opened histories.

465 lines (454 loc) 13.6 kB
import process from 'node:process'; import * as p from '@clack/prompts'; import c from 'ansis'; import tildify from 'tildify'; import { resolve, join, basename } from 'pathe'; import { existsSync } from 'node:fs'; import { platform, homedir } from 'node:os'; import { execFile } from 'node:child_process'; import { fileURLToPath, pathToFileURL } from 'node:url'; import { promisify } from 'node:util'; import { isPackageExists } from 'local-pkg'; import { stat, access } from 'node:fs/promises'; import { dirname, join as join$1 } from 'node:path'; const pkgJson = {name:"code-finder",version:"0.2.2"}; const NAME = pkgJson.name; const VERSION = pkgJson.version; const MODE_CHOICES = ["update", "detect", "combine"]; const DEFAULT_OPTIONS = { mode: "update", ignorePaths: [], path: false, tildify: false, gitBranch: false, source: false, overwrite: true, json: false, yes: false }; const JSON_MARKER = "<!-- code-finder -->"; const VERSION_CONTROL_DIRECTORIES = [ ".git", ".github", ".hg", ".svn" ]; const IDE_DIRECTORIES = [ ".vscode", ".cursor", ".idea" ]; const INSTALL_DIRECTORIES = [ "node_modules" ]; const WORKSPACE_FILES = [ ".gitignore", "README.md", "LICENSE", "LICENSE.md" ]; const CONFIG_FILES = [ "package.json", "pom.xml", "go.mod" ]; const LOCK_FILES = [ "npm-shrinkwrap.json", "package-lock.json", "pnpm-lock.yaml", "pnpm-workspace.yaml", "yarn.lock", "deno.lock", "bun.lock", "bun.lockb" ]; const IGNORE_DIRECTORIES = ["**/node_modules/**"]; const CODESPACE_DIRECTORIES = [ ...VERSION_CONTROL_DIRECTORIES, ...IDE_DIRECTORIES, ...INSTALL_DIRECTORIES ]; const CODESPACE_FILES = [ ...WORKSPACE_FILES, ...CONFIG_FILES, ...LOCK_FILES ]; const CODE_NAME_CHOICES = [ "Code", "Code - Insiders", "VSCodium", "VSCodium - Insiders", "Cursor", "Windsurf" ]; const EDITOR_NAME_MAP = { "Code": "Visual Studio Code", "Code - Insiders": "Visual Studio Code - Insiders", "VSCodium": "VSCodium", "VSCodium - Insiders": "VSCodium - Insiders", "Cursor": "Cursor", "Windsurf": "Windsurf" }; async function resolveConfig(options) { options = { ...DEFAULT_OPTIONS, ...options }; const { ide = CODE_NAME_CHOICES, ignorePaths = [] } = options; if (options.mode === "update" || options.mode === "detect") { if (!options.cwd && !options.yes) { const result = await p.confirm({ message: `Use ${c.yellow(tildify(process.cwd()))} as the working directory?`, initialValue: true }); if (p.isCancel(result)) { p.outro(c.red`aborting`); process.exit(1); } options.cwd = process.cwd(); } } if (!ide.length) { p.outro(c.red`--ide is required`); process.exit(1); } return { ...options, cwd: options.cwd ? resolve(options.cwd) : void 0, ide, ignorePaths: Array.isArray(ignorePaths) ? ignorePaths : [ignorePaths] }; } const execFileAsync = promisify(execFile); function capitalize(str) { return str.charAt(0).toUpperCase() + str.slice(1); } function normalizePath(path) { if (path.startsWith("file://")) { try { return fileURLToPath(path); } catch { return path; } } return path; } async function hasSqlite3() { try { await execFileAsync("sqlite3", ["--version"]); return true; } catch { return false; } } async function ensureSqlite3() { if (!await hasSqlite3()) { if (!isPackageExists("better-sqlite3")) { const spinner = p.spinner(); spinner.start("Installing better-sqlite3"); const { installPackage } = await import('@antfu/install-pkg'); await installPackage("better-sqlite3", { silent: true }); spinner.stop(c.green`Installed better-sqlite3`); } } } function extractJSON(stdout) { try { const lines = stdout.split("\n"); const start = lines.findIndex((line) => line.includes(JSON_MARKER)); const end = lines.slice(start + 1).findIndex((line) => line.includes(JSON_MARKER)) + start + 1; return JSON.parse(lines.slice(start + 1, end).join("\n")); } catch { } } const READ_SQL = "SELECT value FROM ItemTable WHERE key = 'history.recentlyOpenedPathsList'"; const WRITE_SQL = "INSERT OR REPLACE INTO ItemTable (key, value) VALUES ('history.recentlyOpenedPathsList', ?)"; function detectVSCodeDatabasePaths(codeName, path) { switch (platform()) { case "win32": return [ `${process.env.APPDATA}/${codeName}/User/${path}`, `${process.env.USERPROFILE}/AppData/Roaming/${codeName}/User/${path}` ]; case "darwin": return [ `${process.env.HOME}/Library/Application Support/${codeName}/User/${path}`, `${homedir()}/Library/Application Support/${codeName}/User/${path}` ]; default: return [ `${process.env.HOME}/.config/${codeName}/User/${path}`, `${process.env.XDG_CONFIG_HOME || `${homedir()}/.config`}/${codeName}/User/${path}`, `${homedir()}/.config/${codeName}/User/${path}` ]; } } async function detectVSCodeDatabase(codeName) { const paths = detectVSCodeDatabasePaths(codeName, join("globalStorage", "state.vscdb")); for (const path of paths) { try { if (existsSync(path)) { return path; } } catch { continue; } } return void 0; } async function readVSCodeDatabase(codeName) { const dbPath = await detectVSCodeDatabase(codeName); if (!dbPath) return; const editor = EDITOR_NAME_MAP[codeName]; const spinner = p.spinner(); spinner.start(`Reading ${editor}`); let db = null; try { if (await hasSqlite3()) { const { stdout } = await execFileAsync("sqlite3", [dbPath, READ_SQL]); const result = stdout.trim(); if (!result) return; return JSON.parse(result); } else { const Sqlite3 = (await import('better-sqlite3')).default; db = new Sqlite3(dbPath, { readonly: true }); const result = db.prepare(READ_SQL).get(); if (!result) return; return JSON.parse(result.value); } } catch { spinner.stop(c.red`Failed to read ${editor}`); } finally { db?.close(); spinner.stop(`Read ${editor}`); } } async function writeVSCodeDatabase(codeName, data) { const dbPath = await detectVSCodeDatabase(codeName); if (!dbPath) return; const editor = EDITOR_NAME_MAP[codeName]; const spinner = p.spinner(); spinner.start(`Updating ${editor}`); let db = null; try { if (await hasSqlite3()) { const histories = JSON.stringify(data); const sql = `${WRITE_SQL.replace("?", `'${histories.replace(/'/g, "''")}'`)}`; await execFileAsync("sqlite3", [dbPath, sql]); } else { const DB = (await import('better-sqlite3')).default; db = new DB(dbPath); const stmt = db.prepare(WRITE_SQL); stmt.run(JSON.stringify(data)); } } catch { spinner.stop(c.red`Failed to update ${editor}`); } finally { db?.close(); spinner.stop(`Updated ${editor}`); } } async function updateVSCodeHistories(codeName, entries, overwrite = true) { const data = { entries }; if (overwrite) { await writeVSCodeDatabase(codeName, data); return; } const mergedEntries = await mergeVSCodeHistories(codeName, entries); await writeVSCodeDatabase(codeName, { entries: mergedEntries }); } async function mergeVSCodeHistories(codeName, entries) { const data = { entries }; const histories = await readVSCodeDatabase(codeName); if (!histories) { await writeVSCodeDatabase(codeName, data); return entries; } return uniqVSCodeHistories([histories?.entries ?? [], data.entries ?? []]); } function uniqVSCodeHistories(data) { const uri = /* @__PURE__ */ new Set(); return data.flat().filter((entry) => { if (entry.folderUri) { if (!existsSync(normalizePath(entry.folderUri))) return false; if (!uri.has(entry.folderUri)) { uri.add(entry.folderUri); return true; } else { return false; } } if (entry.fileUri) { if (!existsSync(normalizePath(entry.fileUri))) return false; if (!uri.has(entry.fileUri)) { uri.add(entry.fileUri); return true; } else { return false; } } return true; }); } const vscode = { read: readVSCodeDatabase, write: writeVSCodeDatabase, uniq: uniqVSCodeHistories, merge: mergeVSCodeHistories, update: updateVSCodeHistories }; async function readDirs(path, ignorePaths) { const { glob } = await import('tinyglobby'); return await glob("*/", { cwd: path, dot: true, onlyDirectories: true, absolute: true, ignore: [ ...IGNORE_DIRECTORIES, ...ignorePaths ] }); } async function isCodespace(path, ignorePaths) { const dirs = await readDirs(path, ignorePaths); const cleanDirs = dirs.map((dir) => basename(dir)); const hasDir = CODESPACE_DIRECTORIES.find((dir) => cleanDirs.includes(dir)); if (hasDir) return true; const hasFile = CODESPACE_FILES.find((file) => existsSync(join(path, file))); if (hasFile) return true; return dirs; } async function detectCodespaces(path, ignorePaths = []) { const entries = []; const res = await isCodespace(path, ignorePaths); if (typeof res === "boolean") { const fileURL = pathToFileURL(path); entries.push({ folderUri: fileURL.href.replace(/\/$/, "") }); return entries; } for (const dir of res) { const space = await detectCodespaces(dir, ignorePaths); entries.push(...space); } return entries; } function outputHistories(data, json = false) { if (!data.length) return; if (json) { console.log(JSON_MARKER); console.log(JSON.stringify(data, null, 2)); console.log(JSON_MARKER); } else { const lines = data.map((i) => { const path = i.path || i.folderUri || i.fileUri; return `- ${path}${i.branch ? c.reset` (${c.green(i.branch)})` : ""}`; }); p.note(lines.join("\n")); } } async function getGitBranch(uri) { try { if (uri.startsWith("file://")) { uri = fileURLToPath(uri); } const stats = await stat(uri); if (!stats.isDirectory()) { uri = dirname(uri); } const gitDir = join$1(uri, ".git"); const isGitRepo = await access(gitDir).then(() => true).catch(() => false); if (!isGitRepo) { return; } const { stdout } = await execFileAsync("git", ["rev-parse", "--abbrev-ref", "HEAD"], { cwd: uri, encoding: "utf-8" }); const branch = stdout.trim(); return branch || void 0; } catch { } } async function executeCommand(options) { await ensureSqlite3(); const config = await resolveConfig(options); const entriesRecords = /* @__PURE__ */ new Map(); const traverse = async (data) => { if (!config.path && !config.tildify && !config.gitBranch && !config.source) return; const spinner = p.spinner(); spinner.start("Traversing data"); for (const entry of data) { const uri = entry.folderUri || entry.fileUri; if (config.path || config.tildify) entry.path = config.tildify ? tildify(normalizePath(uri)) : normalizePath(uri); if (config.gitBranch && entry.folderUri) { const branch = await getGitBranch(entry.folderUri); if (branch) entry.branch = branch; } if (config.source) entry.source = entriesRecords.get(uri) || []; } spinner.stop("Traversing data completed"); }; const recordEntries = (entries, source) => { entries.forEach((entry) => { const uri = entry.folderUri || entry.fileUri; if (entriesRecords.has(uri)) entriesRecords.get(uri)?.push(source); else entriesRecords.set(uri, [source]); }); }; const codespaces = config.cwd ? await detectCodespaces(config.cwd, config.ignorePaths) : []; recordEntries(codespaces, "Codespace"); const codespacesInterceptor = () => { if (!codespaces.length) { p.outro(c.yellow`No codespaces found`); process.exit(0); } }; switch (config.mode) { case "update": { codespacesInterceptor(); for (const ide of config.ide) { if (CODE_NAME_CHOICES.includes(ide)) await vscode.update(ide, codespaces, config.overwrite); } return { config, data: codespaces }; } case "detect": { codespacesInterceptor(); await traverse(codespaces); outputHistories(codespaces, config.json); return { config, data: codespaces }; } case "combine": { const entries = [codespaces]; for (const ide of config.ide) { if (CODE_NAME_CHOICES.includes(ide)) { const data2 = await vscode.read(ide); if (data2) { entries.push(data2.entries); recordEntries(data2.entries, ide); } } } const data = vscode.uniq(entries); await traverse(data); outputHistories(data, config.json); return { config, data }; } } } export { CONFIG_FILES as C, DEFAULT_OPTIONS as D, EDITOR_NAME_MAP as E, IDE_DIRECTORIES as I, JSON_MARKER as J, LOCK_FILES as L, MODE_CHOICES as M, NAME as N, VERSION as V, WORKSPACE_FILES as W, VERSION_CONTROL_DIRECTORIES as a, INSTALL_DIRECTORIES as b, IGNORE_DIRECTORIES as c, CODESPACE_DIRECTORIES as d, executeCommand as e, CODESPACE_FILES as f, CODE_NAME_CHOICES as g, uniqVSCodeHistories as h, isCodespace as i, detectCodespaces as j, execFileAsync as k, capitalize as l, mergeVSCodeHistories as m, normalizePath as n, hasSqlite3 as o, ensureSqlite3 as p, extractJSON as q, updateVSCodeHistories as u, vscode as v };