@stackmemoryai/stackmemory
Version:
Project-scoped memory for AI coding tools. Durable context across sessions with MCP integration, frames, smart retrieval, Claude Code skills, and automatic hooks.
752 lines (751 loc) • 28 kB
JavaScript
import { fileURLToPath as __fileURLToPath } from 'url';
import { dirname as __pathDirname } from 'path';
const __filename = __fileURLToPath(import.meta.url);
const __dirname = __pathDirname(__filename);
import { Command } from "commander";
import { join } from "path";
import { existsSync } from "fs";
import { logger } from "../../core/monitoring/logger.js";
function createContextRehydrateCommand() {
const command = new Command("rehydrate");
command.description("Enhanced context rehydration after Claude compaction").option("-c, --checkpoint <id>", "Use specific checkpoint ID").option("--create", "Create new rehydration checkpoint").option("-l, --list", "List available checkpoints").option("-v, --verbose", "Verbose output").option("--verify", "Verify checkpoint contents and integrity").option("--with-traces", "Include stack trace context in output").option("--traces", "Show recent stack traces from database").option("--trace-stats", "Show stack trace statistics and patterns").action(async (options) => {
await handleContextRehydrate(options);
});
return command;
}
async function handleContextRehydrate(options) {
const projectRoot = process.cwd();
const dbPath = join(projectRoot, ".stackmemory", "context.db");
if (!existsSync(dbPath)) {
console.log('\u274C StackMemory not initialized. Run "stackmemory init" first.');
return;
}
try {
console.log("\u{1F504} Enhanced Context Rehydration System");
console.log("\u{1F4DA} This system preserves rich context across Claude compactions\n");
if (options.list) {
await listCheckpoints();
return;
}
if (options.create) {
console.log("\u{1F504} Creating rehydration checkpoint...");
await createRehydrationCheckpoint(options.withTraces);
return;
}
if (options.verify) {
await verifyCheckpoints(options.checkpoint);
return;
}
if (options.traces) {
await showStackTraces();
return;
}
if (options.traceStats) {
await showStackTraceStats();
return;
}
console.log("\u{1F4BE} Starting context rehydration...");
if (options.verbose) {
console.log("\u{1F4CB} Analyzing current session state...");
}
const success = await performRehydration(options.checkpoint);
if (success) {
console.log("\u2705 Context successfully rehydrated");
console.log("\u{1F4CA} Rich context has been injected into current session");
if (options.verbose) {
console.log("\n\u{1F4C1} Context includes:");
console.log(" \u2022 File snapshots with content previews");
console.log(" \u2022 Project structure mapping");
console.log(" \u2022 Previous decisions and reasoning");
console.log(" \u2022 Active workflow detection");
console.log(" \u2022 User preferences and pain points");
}
} else {
console.log("\u26A0\uFE0F Context rehydration failed");
console.log("\u{1F4A1} Try creating a checkpoint first with --create");
}
} catch (error) {
logger.error("Context rehydration error:", error);
console.error("\u274C Failed to rehydrate context:", error instanceof Error ? error.message : error);
process.exit(1);
}
}
async function createRehydrationCheckpoint(withTraces = false) {
const fs = await import("fs/promises");
const checkpointDir = join(process.cwd(), ".stackmemory", "rehydration");
try {
await fs.mkdir(checkpointDir, { recursive: true });
const checkpointId = `checkpoint_${Date.now()}`;
const checkpoint = {
id: checkpointId,
timestamp: Date.now(),
created_at: (/* @__PURE__ */ new Date()).toISOString(),
working_directory: process.cwd(),
recent_files: await getRecentFiles(),
project_context: await analyzeProjectContext(),
session_info: {
pid: process.pid,
env: {
NODE_ENV: process.env.NODE_ENV,
PWD: process.env.PWD
}
},
stack_traces: withTraces ? await captureStackTraces() : [],
error_patterns: withTraces ? await detectErrorPatterns() : [],
verification: {
files_captured: 0,
total_size: 0,
integrity_hash: ""
}
};
const checkpointPath = join(checkpointDir, `${checkpointId}.json`);
await fs.writeFile(checkpointPath, JSON.stringify(checkpoint, null, 2));
checkpoint.verification.files_captured = checkpoint.recent_files.length;
checkpoint.verification.total_size = checkpoint.recent_files.reduce((sum, file) => sum + file.size, 0);
checkpoint.verification.integrity_hash = await calculateCheckpointHash(checkpoint);
await fs.writeFile(checkpointPath, JSON.stringify(checkpoint, null, 2));
console.log(`\u2705 Created checkpoint: ${checkpointId}`);
console.log(`\u{1F4C1} Saved to: ${checkpointPath}`);
console.log(`\u{1F4CA} Captured ${checkpoint.recent_files.length} recent files`);
if (withTraces) {
console.log(`\u{1F41B} Captured ${checkpoint.stack_traces.length} stack traces`);
console.log(`\u{1F50D} Detected ${checkpoint.error_patterns.length} error patterns`);
}
} catch (error) {
console.error("\u274C Failed to create checkpoint:", error);
throw error;
}
}
async function performRehydration(checkpointId) {
const fs = await import("fs/promises");
const checkpointDir = join(process.cwd(), ".stackmemory", "rehydration");
try {
let checkpoint;
if (checkpointId) {
const checkpointPath = join(checkpointDir, `${checkpointId}.json`);
const content = await fs.readFile(checkpointPath, "utf8");
checkpoint = JSON.parse(content);
} else {
const files = await fs.readdir(checkpointDir);
const checkpointFiles = files.filter((f) => f.endsWith(".json"));
if (checkpointFiles.length === 0) {
console.log("\u{1F4ED} No checkpoints found");
return false;
}
checkpointFiles.sort((a, b) => b.localeCompare(a));
const latestFile = checkpointFiles[0];
const content = await fs.readFile(join(checkpointDir, latestFile), "utf8");
checkpoint = JSON.parse(content);
}
console.log(`\u{1F504} Rehydrating from checkpoint: ${checkpoint.id}`);
console.log(`\u{1F4C5} Created: ${checkpoint.created_at}`);
console.log(`\u{1F4C1} Working directory: ${checkpoint.working_directory}`);
console.log(`\u{1F4CB} Recent files: ${checkpoint.recent_files.length}`);
console.log("\n\u{1F4CA} Context Summary:");
checkpoint.recent_files.slice(0, 5).forEach((file, i) => {
console.log(` ${i + 1}. ${file.path} (${file.size} bytes, modified ${new Date(file.mtime).toLocaleString()})`);
});
if (checkpoint.project_context.key_files.length > 0) {
console.log(`
\u{1F511} Key project files: ${checkpoint.project_context.key_files.join(", ")}`);
}
return true;
} catch (error) {
console.error("\u274C Failed to rehydrate:", error);
return false;
}
}
async function getRecentFiles() {
const fs = await import("fs/promises");
try {
const files = await fs.readdir(".", { withFileTypes: true });
const recentFiles = [];
for (const file of files.slice(0, 20)) {
if (file.isFile() && !file.name.startsWith(".")) {
try {
const stats = await fs.stat(file.name);
recentFiles.push({
path: file.name,
size: stats.size,
mtime: stats.mtimeMs
});
} catch {
}
}
}
return recentFiles.sort((a, b) => b.mtime - a.mtime);
} catch (error) {
console.warn("Could not analyze recent files:", error);
return [];
}
}
async function analyzeProjectContext() {
const fs = await import("fs/promises");
const context = {
key_files: [],
project_type: "unknown",
framework: "unknown"
};
try {
const projectFiles = ["package.json", "tsconfig.json", "README.md", "docker-compose.yml"];
for (const file of projectFiles) {
try {
await fs.access(file);
context.key_files.push(file);
} catch {
}
}
if (context.key_files.includes("package.json")) {
context.project_type = "node";
try {
const packageContent = await fs.readFile("package.json", "utf8");
const packageJson = JSON.parse(packageContent);
if (packageJson.dependencies?.react) {
context.framework = "react";
} else if (packageJson.dependencies?.vue) {
context.framework = "vue";
} else if (packageJson.dependencies?.next) {
context.framework = "next";
}
} catch {
}
}
} catch (error) {
console.warn("Could not analyze project context:", error);
}
return context;
}
async function listCheckpoints() {
try {
const checkpointDir = "./.stackmemory/rehydration";
const fs = await import("fs/promises");
try {
const files = await fs.readdir(checkpointDir);
const checkpoints = files.filter((f) => f.endsWith(".json"));
if (checkpoints.length === 0) {
console.log("\u{1F4ED} No rehydration checkpoints found");
console.log("\u{1F4A1} Create one with: stackmemory context rehydrate --create");
return;
}
console.log(`\u{1F4CB} Found ${checkpoints.length} rehydration checkpoint(s):
`);
for (const file of checkpoints) {
const id = file.replace(".json", "");
const stats = await fs.stat(`${checkpointDir}/${file}`);
try {
const content = await fs.readFile(`${checkpointDir}/${file}`, "utf8");
const checkpoint = JSON.parse(content);
console.log(`\u{1F516} ${id}`);
console.log(` Created: ${stats.birthtime.toISOString()}`);
console.log(` Size: ${(stats.size / 1024).toFixed(1)} KB`);
console.log(` Files: ${checkpoint.verification?.files_captured || checkpoint.recent_files?.length || 0}`);
if (checkpoint.stack_traces?.length > 0) {
console.log(` Stack traces: ${checkpoint.stack_traces.length}`);
}
if (checkpoint.error_patterns?.length > 0) {
console.log(` Error patterns: ${checkpoint.error_patterns.length}`);
}
console.log("");
} catch {
console.log(`\u{1F516} ${id}`);
console.log(` Created: ${stats.birthtime.toISOString()}`);
console.log(` Size: ${(stats.size / 1024).toFixed(1)} KB
`);
}
}
console.log("\u{1F4A1} Use: stackmemory context rehydrate -c <checkpoint-id>");
console.log("\u{1F4A1} Verify: stackmemory context rehydrate --verify -c <checkpoint-id>");
} catch (err) {
console.log("\u{1F4ED} No rehydration checkpoints directory found");
console.log("\u{1F4A1} Create first checkpoint with: stackmemory context rehydrate --create");
}
} catch (error) {
console.error("\u274C Failed to list checkpoints:", error);
}
}
async function verifyCheckpoints(checkpointId) {
try {
const checkpointDir = "./.stackmemory/rehydration";
const fs = await import("fs/promises");
if (checkpointId) {
const checkpointPath = `${checkpointDir}/${checkpointId}.json`;
await verifyCheckpoint(checkpointPath);
} else {
const files = await fs.readdir(checkpointDir);
const checkpoints = files.filter((f) => f.endsWith(".json"));
console.log(`\u{1F50D} Verifying ${checkpoints.length} checkpoint(s)...
`);
for (const file of checkpoints) {
await verifyCheckpoint(`${checkpointDir}/${file}`);
console.log("");
}
}
} catch (error) {
console.error("\u274C Failed to verify checkpoints:", error);
}
}
async function verifyCheckpoint(checkpointPath) {
const fs = await import("fs/promises");
try {
const content = await fs.readFile(checkpointPath, "utf8");
const checkpoint = JSON.parse(content);
const fileName = checkpointPath.split("/").pop()?.replace(".json", "") || "unknown";
console.log(`\u{1F50D} Verifying checkpoint: ${fileName}`);
const requiredFields = ["id", "timestamp", "working_directory", "recent_files"];
const missingFields = requiredFields.filter((field) => !checkpoint[field]);
if (missingFields.length > 0) {
console.log(`\u274C Missing required fields: ${missingFields.join(", ")}`);
return;
}
console.log(`\u{1F4C1} Files captured: ${checkpoint.recent_files?.length || 0}`);
if (checkpoint.verification) {
console.log(`\u{1F4CA} Total size: ${(checkpoint.verification.total_size / 1024).toFixed(1)} KB`);
console.log(`\u{1F512} Integrity hash: ${checkpoint.verification.integrity_hash.slice(0, 12)}...`);
const recalculatedHash = await calculateCheckpointHash(checkpoint);
if (recalculatedHash === checkpoint.verification.integrity_hash) {
console.log(`\u2705 Integrity check: PASSED`);
} else {
console.log(`\u274C Integrity check: FAILED (data may be corrupted)`);
}
}
if (checkpoint.stack_traces) {
console.log(`\u{1F41B} Stack traces: ${checkpoint.stack_traces.length}`);
const pendingTraces = checkpoint.stack_traces.filter((t) => t.resolution_status === "pending");
const resolvedTraces = checkpoint.stack_traces.filter((t) => t.resolution_status === "resolved");
if (pendingTraces.length > 0) {
console.log(` \u23F3 Pending resolution: ${pendingTraces.length}`);
}
if (resolvedTraces.length > 0) {
console.log(` \u2705 Resolved: ${resolvedTraces.length}`);
}
}
if (checkpoint.error_patterns?.length > 0) {
console.log(`\u{1F50D} Error patterns detected: ${checkpoint.error_patterns.join(", ")}`);
}
console.log(`\u2705 Checkpoint verification complete`);
} catch (error) {
console.log(`\u274C Failed to verify checkpoint: ${error}`);
}
}
async function captureStackTraces() {
try {
const traces = [];
const fs = await import("fs/promises");
const { execSync } = await import("child_process");
const stackMemoryLogs = [
".stackmemory/error.log",
".stackmemory/compaction.log",
".stackmemory/trace.log",
".stackmemory/debug.log"
];
const nodeLogs = [
"npm-debug.log",
"error.log",
"debug.log",
"yarn-error.log",
"pnpm-debug.log"
];
const buildLogs = [
"build-errors.log",
"webpack-errors.log",
"vite-errors.log",
"jest-errors.log",
"test-results.log"
];
const frameworkLogs = [
".next/trace",
"logs/error.log",
"tmp/cache/error.log"
];
const allLogFiles = [...stackMemoryLogs, ...nodeLogs, ...buildLogs, ...frameworkLogs];
for (const logFile of allLogFiles) {
await extractTracesFromLogFile(logFile, traces, fs);
}
await extractFromTerminalHistory(traces);
await extractFromClaudeSession(traces, fs);
await extractFromBuildCommands(traces);
await extractFromGitLogs(traces);
await extractFromBrowserLogs(traces, fs);
return traces;
} catch {
return [];
}
}
async function detectErrorPatterns() {
const traces = await captureStackTraces();
const patterns = /* @__PURE__ */ new Map();
for (const trace of traces) {
const errorType = trace.error_message.split(":")[0].trim();
patterns.set(errorType, (patterns.get(errorType) || 0) + 1);
}
return Array.from(patterns.entries()).filter(([, count]) => count > 1).map(([pattern]) => pattern);
}
async function extractTracesFromLogFile(logFile, traces, fs) {
try {
const logContent = await fs.readFile(logFile, "utf8");
const lines = logContent.split("\n");
const errorPatterns = [
/Error:/i,
/TypeError:/i,
/ReferenceError:/i,
/SyntaxError:/i,
/RangeError:/i,
/URIError:/i,
/EvalError:/i,
/UnhandledPromiseRejectionWarning:/i,
/DeprecationWarning:/i,
/\s+at\s+/,
// Stack trace lines
/Failed to compile/i,
/Build failed/i,
/Test failed/i
];
let currentError = null;
let stackFrames = [];
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
if (errorPatterns.some((pattern) => pattern.test(line))) {
if (currentError && stackFrames.length > 0) {
traces.push({
...currentError,
stack_frames: [...stackFrames],
file_path: logFile,
timestamp: Date.now(),
context: `Extracted from ${logFile} around line ${i}`,
resolution_status: "pending"
});
}
if (line.includes("Error:") || line.includes("TypeError:")) {
currentError = {
error_message: line.trim()
};
stackFrames = [line.trim()];
} else if (line.includes("at ")) {
stackFrames.push(line.trim());
}
} else if (currentError && line.includes("at ")) {
stackFrames.push(line.trim());
}
}
if (currentError && stackFrames.length > 0) {
traces.push({
...currentError,
stack_frames: [...stackFrames],
file_path: logFile,
timestamp: Date.now(),
context: `Extracted from ${logFile}`,
resolution_status: "pending"
});
}
} catch {
}
}
async function extractFromTerminalHistory(traces) {
try {
const { execSync } = await import("child_process");
const historyCommands = [
"npm run build 2>&1 | tail -50",
"npm test 2>&1 | tail -50",
"npm start 2>&1 | tail -50"
];
for (const cmd of historyCommands) {
try {
const output = execSync(cmd, { encoding: "utf8", timeout: 5e3 });
if (output.includes("Error:") || output.includes("failed")) {
const errorLines = output.split("\n").filter(
(line) => line.includes("Error:") || line.includes("at ") || line.includes("failed")
);
if (errorLines.length > 0) {
traces.push({
error_message: errorLines[0],
stack_frames: errorLines,
file_path: "terminal_output",
timestamp: Date.now(),
context: `Recent command: ${cmd}`,
resolution_status: "pending"
});
}
}
} catch {
}
}
} catch {
}
}
async function extractFromClaudeSession(traces, fs) {
try {
const claudePaths = [
"~/.claude/logs",
"~/.local/share/claude/logs",
"/tmp/claude-logs",
".claude-logs"
];
for (const logPath of claudePaths) {
try {
const files = await fs.readdir(logPath);
const recentLogs = files.filter((f) => f.endsWith(".log")).slice(-5);
for (const logFile of recentLogs) {
await extractTracesFromLogFile(`${logPath}/${logFile}`, traces, fs);
}
} catch {
}
}
} catch {
}
}
async function extractFromBuildCommands(traces) {
try {
const { execSync } = await import("child_process");
const buildCommands = [
"npm run lint --silent",
"npm run typecheck --silent",
"npx tsc --noEmit --skipLibCheck"
];
for (const cmd of buildCommands) {
try {
execSync(cmd, { encoding: "utf8", timeout: 1e4 });
} catch (error) {
if (error.stdout || error.stderr) {
const output = error.stdout + error.stderr;
const errorLines = output.split("\n").filter(
(line) => line.includes("Error:") || line.includes("at ") || line.includes("error TS")
);
if (errorLines.length > 0) {
traces.push({
error_message: errorLines[0] || `Build command failed: ${cmd}`,
stack_frames: errorLines,
file_path: "build_output",
timestamp: Date.now(),
context: `Build command: ${cmd}`,
resolution_status: "pending"
});
}
}
}
}
} catch {
}
}
async function extractFromGitLogs(traces) {
try {
const { execSync } = await import("child_process");
const gitOutput = execSync(
'git log --oneline -10 --grep="fix\\|error\\|bug" 2>/dev/null || echo "No git history"',
{ encoding: "utf8", timeout: 5e3 }
);
if (gitOutput.includes("fix") || gitOutput.includes("error")) {
traces.push({
error_message: "Recent git commits indicate error fixes",
stack_frames: gitOutput.split("\n").filter((line) => line.trim()),
file_path: "git_history",
timestamp: Date.now(),
context: "Git commit history analysis",
resolution_status: "resolved"
// These are likely fixed
});
}
} catch {
}
}
async function extractFromBrowserLogs(traces, fs) {
try {
const browserLogPaths = [
"console.log",
"browser-errors.log",
"dev-server.log",
".vscode/dev-console.log",
"tmp/browser-console.log"
];
for (const logPath of browserLogPaths) {
try {
const logContent = await fs.readFile(logPath, "utf8");
const browserPatterns = [
/console\.error/i,
/Uncaught \w+Error/i,
/Promise rejection/i,
/React\s+Warning/i,
/Failed to load/i
];
const lines = logContent.split("\n");
for (const line of lines) {
if (browserPatterns.some((pattern) => pattern.test(line))) {
traces.push({
error_message: line.trim(),
stack_frames: [line.trim()],
file_path: logPath,
timestamp: Date.now(),
context: "Browser console error",
resolution_status: "pending"
});
}
}
} catch {
}
}
} catch {
}
}
async function showStackTraces() {
try {
const projectRoot = process.cwd();
const dbPath = join(projectRoot, ".stackmemory", "context.db");
if (!existsSync(dbPath)) {
console.log('\u274C StackMemory not initialized. Run "stackmemory init" first.');
return;
}
const Database = (await import("better-sqlite3")).default;
const db = new Database(dbPath);
try {
const tableExists = db.prepare(`
SELECT name FROM sqlite_master
WHERE type='table' AND name='stack_traces'
`).get();
if (!tableExists) {
console.log("\u{1F4ED} No stack traces found in database");
console.log("\u{1F4A1} Stack traces are stored when using enhanced rehydration features");
return;
}
const traces = db.prepare(`
SELECT * FROM stack_traces
ORDER BY created_at DESC
LIMIT 20
`).all();
if (traces.length === 0) {
console.log("\u{1F4ED} No stack traces found in database");
return;
}
console.log(`\u{1F41B} Recent Stack Traces (${traces.length} found)
`);
for (const trace of traces) {
const createdAt = new Date(trace.created_at * 1e3).toLocaleString();
const severity = trace.error_severity || "medium";
const severityIcon = severity === "high" ? "\u{1F534}" : severity === "low" ? "\u{1F7E1}" : "\u{1F7E0}";
console.log(`${severityIcon} ${trace.error_type || "Error"} - ${severity.toUpperCase()}`);
console.log(` Message: ${trace.error_message}`);
console.log(` File: ${trace.file_path || "unknown"}${trace.line_number ? `:${trace.line_number}` : ""}`);
console.log(` Function: ${trace.function_name || "unknown"}`);
console.log(` Status: ${trace.resolution_status}`);
console.log(` Created: ${createdAt}`);
console.log(` Context: ${trace.context || "No context"}`);
const stackFrames = JSON.parse(trace.stack_frames || "[]");
if (stackFrames.length > 0) {
console.log(` Stack (first 3 lines):`);
stackFrames.slice(0, 3).forEach((frame) => {
console.log(` ${frame.trim()}`);
});
}
console.log("");
}
console.log("\u{1F4A1} Use --trace-stats for statistics and patterns");
} finally {
db.close();
}
} catch (error) {
console.error("\u274C Failed to show stack traces:", error);
}
}
async function showStackTraceStats() {
try {
const projectRoot = process.cwd();
const dbPath = join(projectRoot, ".stackmemory", "context.db");
if (!existsSync(dbPath)) {
console.log('\u274C StackMemory not initialized. Run "stackmemory init" first.');
return;
}
const Database = (await import("better-sqlite3")).default;
const db = new Database(dbPath);
try {
const tableExists = db.prepare(`
SELECT name FROM sqlite_master
WHERE type='table' AND name='stack_traces'
`).get();
if (!tableExists) {
console.log("\u{1F4ED} No stack trace data available");
return;
}
console.log("\u{1F4CA} Stack Trace Statistics\n");
const totalTraces = db.prepare("SELECT COUNT(*) as count FROM stack_traces").get().count;
console.log(`Total traces: ${totalTraces}`);
const statusStats = db.prepare(`
SELECT resolution_status, COUNT(*) as count
FROM stack_traces
GROUP BY resolution_status
ORDER BY count DESC
`).all();
console.log("\n\u{1F4C8} By Resolution Status:");
for (const stat of statusStats) {
const percentage = (stat.count / totalTraces * 100).toFixed(1);
console.log(` ${stat.resolution_status}: ${stat.count} (${percentage}%)`);
}
const typeStats = db.prepare(`
SELECT error_type, COUNT(*) as count
FROM stack_traces
GROUP BY error_type
ORDER BY count DESC
LIMIT 10
`).all();
console.log("\n\u{1F50D} Top Error Types:");
for (const stat of typeStats) {
const percentage = (stat.count / totalTraces * 100).toFixed(1);
console.log(` ${stat.error_type}: ${stat.count} (${percentage}%)`);
}
const severityStats = db.prepare(`
SELECT error_severity, COUNT(*) as count
FROM stack_traces
GROUP BY error_severity
ORDER BY
CASE error_severity
WHEN 'high' THEN 1
WHEN 'medium' THEN 2
WHEN 'low' THEN 3
END
`).all();
console.log("\n\u26A0\uFE0F By Severity:");
for (const stat of severityStats) {
const percentage = (stat.count / totalTraces * 100).toFixed(1);
const icon = stat.error_severity === "high" ? "\u{1F534}" : stat.error_severity === "low" ? "\u{1F7E1}" : "\u{1F7E0}";
console.log(` ${icon} ${stat.error_severity}: ${stat.count} (${percentage}%)`);
}
const recentTraces = db.prepare(`
SELECT COUNT(*) as count
FROM stack_traces
WHERE created_at > (unixepoch() - 86400)
`).get().count;
console.log(`
\u{1F4C5} Recent Activity (24 hours): ${recentTraces} traces`);
const fileStats = db.prepare(`
SELECT file_path, COUNT(*) as count
FROM stack_traces
WHERE file_path IS NOT NULL
GROUP BY file_path
ORDER BY count DESC
LIMIT 5
`).all();
if (fileStats.length > 0) {
console.log("\n\u{1F5C2}\uFE0F Most Problematic Files:");
for (const stat of fileStats) {
console.log(` ${stat.file_path}: ${stat.count} errors`);
}
}
} finally {
db.close();
}
} catch (error) {
console.error("\u274C Failed to show stack trace statistics:", error);
}
}
async function calculateCheckpointHash(checkpoint) {
const crypto = await import("crypto");
const data = JSON.stringify({
id: checkpoint.id,
timestamp: checkpoint.timestamp,
files_count: checkpoint.recent_files?.length || 0,
project_context: checkpoint.project_context,
stack_traces_count: checkpoint.stack_traces?.length || 0
});
return crypto.createHash("sha256").update(data).digest("hex");
}
export {
createContextRehydrateCommand
};
//# sourceMappingURL=context-rehydrate.js.map