@stackmemoryai/stackmemory
Version:
Lossless, project-scoped memory for AI coding tools. Durable context across sessions with 56 MCP tools, FTS5 search, conductor orchestrator, loop/watch monitoring, snapshot capture, pre-flight overlap checks, Claude/Codex/OpenCode wrappers, Linear sync, a
299 lines (298 loc) • 11.3 kB
JavaScript
import { fileURLToPath as __fileURLToPath } from 'url';
import { dirname as __pathDirname } from 'path';
const __filename = __fileURLToPath(import.meta.url);
const __dirname = __pathDirname(__filename);
import { Command } from "commander";
import chalk from "chalk";
import { existsSync, readFileSync } from "fs";
import { join } from "path";
import {
WikiCompiler
} from "../../core/wiki/wiki-compiler.js";
import {
generateChronologicalDigest
} from "../../core/digest/chronological-digest.js";
function resolveWikiDir() {
const configPath = join(process.cwd(), ".stackmemory", "config.yaml");
if (!existsSync(configPath)) return null;
const content = readFileSync(configPath, "utf-8");
const vaultMatch = content.match(
/obsidian:\s*\n\s+vaultPath:\s*["']?([^\n"']+)/
);
if (!vaultMatch) return null;
const vaultPath = (vaultMatch[1] ?? "").trim();
const subdirMatch = content.match(
/obsidian:\s*\n(?:\s+\w+:.*\n)*\s+subdir:\s*["']?([^\n"']+)/
);
const subdir = subdirMatch?.[1]?.trim() || "stackmemory";
return join(vaultPath, subdir, "wiki");
}
function getCompiler(wikiDirOverride) {
const wikiDir = wikiDirOverride || resolveWikiDir();
if (!wikiDir) {
console.error(
chalk.red(
"Wiki not configured. Set obsidian.vaultPath in .stackmemory/config.yaml"
)
);
console.error(chalk.gray(" Or pass --wiki-dir <path>"));
process.exit(1);
}
return new WikiCompiler({ wikiDir });
}
async function openDb() {
const dbPath = join(process.cwd(), ".stackmemory", "context.db");
if (!existsSync(dbPath)) {
console.error(
chalk.red('StackMemory not initialized. Run "stackmemory init" first.')
);
process.exit(1);
}
const { default: Database } = await import("better-sqlite3");
return new Database(dbPath);
}
function queryContext(db, opts) {
const since = opts.sinceEpoch ?? 0;
const limit = opts.limit ?? 1e4;
const digests = db.prepare(
`SELECT frame_id, name as frame_name, type as frame_type,
digest_text, created_at, closed_at
FROM frames
WHERE state = 'closed' AND digest_text IS NOT NULL
AND created_at >= ?
ORDER BY created_at DESC
LIMIT ?`
).all(since, limit);
const entities = db.prepare(
`SELECT entity_name, relation, value, context,
source_frame_id, valid_from, superseded_at
FROM entity_states
WHERE valid_from >= ?
ORDER BY valid_from DESC
LIMIT ?`
).all(since, limit);
const anchors = db.prepare(
`SELECT a.anchor_id, a.frame_id, f.name as frame_name,
a.type, a.text, a.priority, a.created_at
FROM anchors a
JOIN frames f ON f.frame_id = a.frame_id
WHERE a.created_at >= ?
ORDER BY a.created_at DESC
LIMIT ?`
).all(since, limit);
return { digests, entities, anchors };
}
function createWikiCommand() {
const cmd = new Command("wiki").description(
"LLM knowledge base \u2014 compile context into a persistent wiki"
);
cmd.command("create").description("Generate wiki from all existing context").option("--wiki-dir <path>", "Override wiki directory").option(
"--period <period>",
"Include session digest (today|yesterday|week)",
"week"
).option("--json", "Output as JSON").action(async (options) => {
const compiler = getCompiler(options.wikiDir);
await compiler.initialize();
const db = await openDb();
const ctx = queryContext(db, {});
let sessionDigest;
try {
const projectId = db.prepare(
`SELECT project_id FROM frames ORDER BY created_at DESC LIMIT 1`
).get()?.project_id || "default";
const content = generateChronologicalDigest(
db,
options.period,
projectId
);
sessionDigest = {
period: options.period,
content,
generatedAt: Date.now()
};
} catch {
}
db.close();
const result = await compiler.create({
...ctx,
sessionDigest
});
if (options.json) {
console.log(JSON.stringify(result, null, 2));
return;
}
console.log(chalk.green("\nWiki created."));
console.log(chalk.gray(` Digests compiled: ${ctx.digests.length}`));
console.log(chalk.gray(` Entity states: ${ctx.entities.length}`));
console.log(chalk.gray(` Anchors processed: ${ctx.anchors.length}`));
console.log(chalk.gray(` Articles created: ${result.created.length}`));
console.log(chalk.gray(` Total articles: ${result.totalArticles}`));
});
cmd.command("update").description("Incrementally update wiki with new context").option("--wiki-dir <path>", "Override wiki directory").option(
"--since <date>",
"Update from this date (ISO format, default: last compile)"
).option("--json", "Output as JSON").action(async (options) => {
const compiler = getCompiler(options.wikiDir);
await compiler.initialize();
const db = await openDb();
let sinceEpoch;
if (options.since) {
sinceEpoch = Math.floor(new Date(options.since).getTime() / 1e3);
} else {
const lastCompile = compiler.getLastCompileTime();
sinceEpoch = lastCompile ?? 0;
}
if (sinceEpoch === 0) {
console.log(
chalk.yellow(
"No previous compile found. Running full create instead."
)
);
db.close();
cmd.commands.find((c) => c.name() === "create")?.parseAsync(["create", "--wiki-dir", compiler["config"].wikiDir], {
from: "user"
});
return;
}
const ctx = queryContext(db, { sinceEpoch });
db.close();
if (ctx.digests.length === 0 && ctx.entities.length === 0 && ctx.anchors.length === 0) {
console.log(chalk.yellow("No new context since last compile."));
return;
}
const result = await compiler.update(ctx);
if (options.json) {
console.log(JSON.stringify(result, null, 2));
return;
}
console.log(chalk.green("\nWiki updated."));
console.log(chalk.gray(` New digests: ${ctx.digests.length}`));
console.log(chalk.gray(` New entity states: ${ctx.entities.length}`));
console.log(chalk.gray(` New anchors: ${ctx.anchors.length}`));
console.log(chalk.gray(` Articles created: ${result.created.length}`));
console.log(chalk.gray(` Articles updated: ${result.updated.length}`));
console.log(chalk.gray(` Total articles: ${result.totalArticles}`));
});
cmd.command("ingest <source>").description("Ingest a URL or local path into the wiki").option("--wiki-dir <path>", "Override wiki directory").option("-n, --max-pages <n>", "Max pages to crawl for URLs", "20").option("--json", "Output as JSON").action(async (source, options) => {
const compiler = getCompiler(options.wikiDir);
await compiler.initialize();
const isUrl = source.startsWith("http://") || source.startsWith("https://");
let result;
if (isUrl) {
console.log(
chalk.cyan(`Crawling ${source} (max ${options.maxPages} pages)...`)
);
result = await compiler.ingestUrl(source, {
maxPages: parseInt(options.maxPages)
});
} else {
console.log(chalk.cyan(`Ingesting ${source}...`));
result = await compiler.ingestPath(source);
}
if (options.json) {
console.log(JSON.stringify(result, null, 2));
return;
}
console.log(chalk.green("\nIngested."));
console.log(chalk.gray(` Articles created: ${result.created.length}`));
console.log(chalk.gray(` Total articles: ${result.totalArticles}`));
if (result.created.length > 0) {
console.log(chalk.gray("\n Created:"));
result.created.slice(0, 10).forEach((p) => console.log(chalk.gray(` - ${p}`)));
if (result.created.length > 10) {
console.log(
chalk.gray(` ...and ${result.created.length - 10} more`)
);
}
}
});
cmd.command("lint").description("Health check the wiki for issues").option("--wiki-dir <path>", "Override wiki directory").option("--json", "Output as JSON").action(async (options) => {
const compiler = getCompiler(options.wikiDir);
await compiler.initialize();
const result = await compiler.lint();
if (options.json) {
console.log(JSON.stringify(result, null, 2));
return;
}
console.log(chalk.cyan("\nWiki Lint Report"));
console.log(chalk.gray(` Total articles: ${result.totalArticles}`));
if (result.orphans.length > 0) {
console.log(
chalk.yellow(
`
Orphan pages (no inbound links): ${result.orphans.length}`
)
);
result.orphans.slice(0, 10).forEach((o) => console.log(chalk.gray(` - ${o}`)));
if (result.orphans.length > 10) {
console.log(
chalk.gray(` ...and ${result.orphans.length - 10} more`)
);
}
}
if (result.brokenLinks.length > 0) {
console.log(
chalk.red(`
Broken links: ${result.brokenLinks.length}`)
);
result.brokenLinks.slice(0, 10).forEach(
(l) => console.log(chalk.gray(` - ${l.source} -> ${l.target}`))
);
}
if (result.stale.length > 0) {
console.log(
chalk.yellow(`
Stale articles (>30 days): ${result.stale.length}`)
);
result.stale.slice(0, 10).forEach((s) => console.log(chalk.gray(` - ${s}`)));
}
if (result.orphans.length === 0 && result.brokenLinks.length === 0 && result.stale.length === 0) {
console.log(chalk.green("\n No issues found."));
}
});
cmd.command("search <query>").description("Search wiki articles by keyword").option("--wiki-dir <path>", "Override wiki directory").option("-n, --limit <n>", "Max results", "20").option("--json", "Output as JSON").action(async (query, options) => {
const compiler = getCompiler(options.wikiDir);
await compiler.initialize();
const results = compiler.search(query).slice(0, parseInt(options.limit));
if (options.json) {
console.log(JSON.stringify(results, null, 2));
return;
}
if (results.length === 0) {
console.log(chalk.yellow(`No results for "${query}".`));
return;
}
console.log(
chalk.cyan(`
Search: "${query}" \u2014 ${results.length} results
`)
);
for (const r of results) {
console.log(` ${chalk.white(r.title)}`);
console.log(chalk.gray(` ${r.path} (${r.matches} matches)`));
}
});
cmd.command("status").description("Show wiki statistics").option("--wiki-dir <path>", "Override wiki directory").option("--json", "Output as JSON").action(async (options) => {
const compiler = getCompiler(options.wikiDir);
await compiler.initialize();
const status = compiler.getStatus();
if (options.json) {
console.log(JSON.stringify(status, null, 2));
return;
}
console.log(chalk.cyan("\nWiki Status"));
console.log(chalk.gray(` Total articles: ${status.totalArticles}`));
for (const [cat, count] of Object.entries(status.byCategory)) {
console.log(chalk.gray(` ${cat}: ${count}`));
}
if (status.lastCompile) {
console.log(chalk.gray(` Last compile: ${status.lastCompile}`));
} else {
console.log(chalk.gray(` Last compile: never`));
}
});
return cmd;
}
export {
createWikiCommand
};