@stackmemoryai/stackmemory
Version:
Project-scoped memory for AI coding tools. Durable context across sessions with MCP integration, frames, smart retrieval, Claude Code skills, and automatic hooks.
995 lines (993 loc) • 31.1 kB
JavaScript
import { fileURLToPath as __fileURLToPath } from 'url';
import { dirname as __pathDirname } from 'path';
const __filename = __fileURLToPath(import.meta.url);
const __dirname = __pathDirname(__filename);
import * as fs from "fs/promises";
import * as path from "path";
import { logger } from "../monitoring/logger.js";
class CompactionHandler {
frameManager;
metrics;
tokenAccumulator = 0;
preservedAnchors = /* @__PURE__ */ new Map();
constructor(frameManager) {
this.frameManager = frameManager;
this.metrics = {
estimatedTokens: 0,
warningThreshold: 15e4,
// 150K tokens
criticalThreshold: 17e4,
// 170K tokens
anchorsPreserved: 0
};
}
/**
* Track token usage from a message
*/
trackTokens(content) {
const estimatedTokens = Math.ceil(content.length / 4);
this.tokenAccumulator += estimatedTokens;
this.metrics.estimatedTokens += estimatedTokens;
if (this.isApproachingCompaction()) {
this.preserveCriticalContext();
}
}
/**
* Check if approaching compaction threshold
*/
isApproachingCompaction() {
return this.metrics.estimatedTokens >= this.metrics.warningThreshold;
}
/**
* Check if past critical threshold
*/
isPastCriticalThreshold() {
return this.metrics.estimatedTokens >= this.metrics.criticalThreshold;
}
/**
* Detect if compaction likely occurred
*/
detectCompactionEvent(content) {
const compactionIndicators = [
"earlier in this conversation",
"previously discussed",
"as mentioned before",
"summarized for brevity",
"[conversation compressed]",
"[context truncated]"
];
const lowerContent = content.toLowerCase();
return compactionIndicators.some(
(indicator) => lowerContent.includes(indicator)
);
}
/**
* Preserve critical context before compaction
*/
async preserveCriticalContext() {
try {
const currentFrameId = this.frameManager.getCurrentFrameId();
if (!currentFrameId) {
logger.warn("No active frame to preserve context from");
return;
}
const events = this.frameManager.getFrameEvents(currentFrameId);
const toolCalls = this.extractToolCalls(events);
const fileOps = this.extractFileOperations(events);
const decisions = this.extractDecisions(events);
const errorPatterns = this.extractErrorPatterns(events);
const anchor = {
anchor_id: `compact_${Date.now()}`,
type: "COMPACTION_PRESERVE",
priority: 10,
content: {
tool_calls: toolCalls,
file_operations: fileOps,
decisions,
error_resolutions: errorPatterns
},
created_at: Date.now(),
token_estimate: this.metrics.estimatedTokens
};
this.frameManager.addAnchor(
"CONSTRAINT",
// Using CONSTRAINT type for now
JSON.stringify(anchor),
10,
{
compaction_preserve: true,
token_count: this.metrics.estimatedTokens
},
currentFrameId
);
this.preservedAnchors.set(anchor.anchor_id, anchor);
this.metrics.anchorsPreserved++;
logger.info(
`Preserved critical context at ${this.metrics.estimatedTokens} tokens`
);
} catch (error) {
logger.error(
"Failed to preserve critical context:",
error instanceof Error ? error : void 0
);
}
}
/**
* Extract tool calls from events
*/
extractToolCalls(events) {
const toolCalls = [];
const toolEvents = events.filter((e) => e.event_type === "tool_call");
for (const event of toolEvents) {
const resultEvent = events.find(
(e) => e.event_type === "tool_result" && e.seq > event.seq && e.payload.tool_name === event.payload.tool_name
);
toolCalls.push({
tool: event.payload.tool_name || "unknown",
timestamp: event.ts,
key_inputs: this.extractKeyInputs(event.payload),
key_outputs: resultEvent ? this.extractKeyOutputs(resultEvent.payload) : {},
files_affected: this.extractAffectedFiles(
event.payload,
resultEvent?.payload
),
success: resultEvent ? !resultEvent.payload.error : false,
error: resultEvent?.payload.error
});
}
return toolCalls;
}
/**
* Extract key inputs from tool call
*/
extractKeyInputs(payload) {
const keys = [
"file_path",
"command",
"query",
"path",
"pattern",
"content"
];
const result = {};
for (const key of keys) {
if (payload.arguments?.[key]) {
result[key] = payload.arguments[key];
}
}
return result;
}
/**
* Extract key outputs from tool result
*/
extractKeyOutputs(payload) {
return {
success: !payload.error,
error: payload.error,
result_type: payload.result_type,
files_created: payload.files_created,
files_modified: payload.files_modified
};
}
/**
* Extract affected files from tool events
*/
extractAffectedFiles(callPayload, resultPayload) {
const files = /* @__PURE__ */ new Set();
if (callPayload?.arguments?.file_path) {
files.add(callPayload.arguments.file_path);
}
if (callPayload?.arguments?.path) {
files.add(callPayload.arguments.path);
}
if (resultPayload?.files_created) {
resultPayload.files_created.forEach((f) => files.add(f));
}
if (resultPayload?.files_modified) {
resultPayload.files_modified.forEach((f) => files.add(f));
}
return Array.from(files);
}
/**
* Extract file operations from events
*/
extractFileOperations(events) {
const fileOps = [];
const fileTools = ["Read", "Write", "Edit", "MultiEdit", "Delete"];
const toolEvents = events.filter(
(e) => e.event_type === "tool_call" && fileTools.includes(e.payload.tool_name)
);
for (const event of toolEvents) {
const operation = this.mapToolToOperation(event.payload.tool_name);
const path2 = event.payload.arguments?.file_path || event.payload.arguments?.path || "unknown";
fileOps.push({
type: operation,
path: path2,
timestamp: event.ts,
success: true,
// Will be updated from result
error: void 0
});
}
return fileOps;
}
/**
* Map tool name to file operation type
*/
mapToolToOperation(toolName) {
const mapping = {
Read: "read",
Write: "write",
Edit: "edit",
MultiEdit: "edit",
Delete: "delete"
};
return mapping[toolName] || "read";
}
/**
* Extract decisions from events
*/
extractDecisions(events) {
const decisions = [];
const decisionEvents = events.filter((e) => e.event_type === "decision");
for (const event of decisionEvents) {
if (event.payload.text) {
decisions.push(event.payload.text);
}
}
return decisions;
}
/**
* Extract error patterns and resolutions
*/
extractErrorPatterns(events) {
const patterns = [];
const errorEvents = events.filter(
(e) => e.event_type === "tool_result" && e.payload.error
);
for (const errorEvent of errorEvents) {
const subsequentTools = events.filter((e) => e.event_type === "tool_call" && e.seq > errorEvent.seq).slice(0, 3);
if (subsequentTools.length > 0) {
patterns.push({
error: errorEvent.payload.error,
resolution: `Attempted resolution with ${subsequentTools.map((t) => t.payload.tool_name).join(", ")}`,
tool_sequence: subsequentTools.map((t) => t.payload.tool_name),
timestamp: errorEvent.ts
});
}
}
return patterns;
}
/**
* Restore context after compaction detected
*/
async restoreContext() {
if (this.preservedAnchors.size === 0) {
logger.warn("No preserved anchors to restore from");
return;
}
const anchors = Array.from(this.preservedAnchors.values());
anchors.sort((a, b) => b.created_at - a.created_at);
const latestAnchor = anchors[0];
const restorationFrame = this.frameManager.createFrame({
type: "review",
name: "Context Restoration After Compaction",
inputs: { reason: "autocompaction_detected" }
});
this.frameManager.addAnchor(
"FACT",
`Context restored from token position ${latestAnchor.token_estimate}`,
10,
{ restoration: true },
restorationFrame
);
const toolSequence = latestAnchor.content.tool_calls.map((t) => t.tool).join(" \u2192 ");
this.frameManager.addAnchor(
"FACT",
`Tool sequence: ${toolSequence}`,
9,
{},
restorationFrame
);
const files = /* @__PURE__ */ new Set();
latestAnchor.content.file_operations.forEach((op) => files.add(op.path));
if (files.size > 0) {
this.frameManager.addAnchor(
"FACT",
`Files touched: ${Array.from(files).join(", ")}`,
8,
{},
restorationFrame
);
}
for (const decision of latestAnchor.content.decisions) {
this.frameManager.addAnchor(
"DECISION",
decision,
7,
{},
restorationFrame
);
}
logger.info("Context restored after compaction detection");
}
/**
* Get current metrics
*/
getMetrics() {
return { ...this.metrics };
}
/**
* Reset token counter (e.g., at session start)
*/
resetTokenCount() {
this.metrics.estimatedTokens = 0;
this.tokenAccumulator = 0;
this.metrics.lastCompactionAt = void 0;
}
}
class EnhancedRehydrationManager {
frameManager;
compactionHandler;
snapshotThreshold = 10;
// Take snapshot every N significant events
eventCount = 0;
rehydrationStorage = /* @__PURE__ */ new Map();
constructor(frameManager, compactionHandler) {
this.frameManager = frameManager;
this.compactionHandler = compactionHandler;
this.setupCompactDetection();
this.initializeStackTraceStorage();
}
/**
* Initialize dedicated stack trace storage in database
*/
initializeStackTraceStorage() {
try {
const db = this.frameManager.db;
db.exec(`
CREATE TABLE IF NOT EXISTS stack_traces (
trace_id TEXT PRIMARY KEY,
frame_id TEXT,
project_id TEXT NOT NULL,
error_message TEXT NOT NULL,
stack_frames TEXT NOT NULL,
file_path TEXT,
line_number INTEGER,
function_name TEXT,
context TEXT,
resolution_attempted TEXT,
resolution_status TEXT NOT NULL DEFAULT 'pending',
error_type TEXT,
error_severity TEXT DEFAULT 'medium',
created_at INTEGER DEFAULT (unixepoch()),
updated_at INTEGER DEFAULT (unixepoch()),
FOREIGN KEY(frame_id) REFERENCES frames(frame_id)
);
CREATE INDEX IF NOT EXISTS idx_stack_traces_frame ON stack_traces(frame_id);
CREATE INDEX IF NOT EXISTS idx_stack_traces_status ON stack_traces(resolution_status);
CREATE INDEX IF NOT EXISTS idx_stack_traces_type ON stack_traces(error_type);
CREATE INDEX IF NOT EXISTS idx_stack_traces_severity ON stack_traces(error_severity);
CREATE INDEX IF NOT EXISTS idx_stack_traces_created ON stack_traces(created_at);
`);
logger.info("Stack trace storage initialized");
} catch (error) {
logger.error("Failed to initialize stack trace storage:", error);
}
}
/**
* Set up automatic compact detection and recovery
*/
setupCompactDetection() {
setInterval(() => this.checkForCompactionEvent(), 3e4);
}
/**
* Enhanced file content snapshot with context
*/
async captureFileSnapshot(filePath, contextTags = []) {
try {
const stats = await fs.stat(filePath);
const content = await fs.readFile(filePath, "utf8");
const hash = this.simpleHash(content);
return {
path: filePath,
content,
size: stats.size,
lastModified: stats.mtimeMs,
hash,
contextTags
};
} catch (error) {
logger.warn(`Failed to capture snapshot for ${filePath}:`, error);
return null;
}
}
/**
* Capture conversation reasoning and decisions including stack traces
*/
captureConversationContext(reasoning, decisions, nextSteps = [], userPrefs = {}, painPoints = [], stackTraces = [], errorPatterns = []) {
return {
timestamp: Date.now(),
reasoning,
decisions_made: decisions,
next_steps: nextSteps,
user_preferences: userPrefs,
pain_points: painPoints,
stack_traces: stackTraces,
error_patterns: errorPatterns
};
}
/**
* Capture stack trace from error with context and store in database
*/
captureStackTrace(error, context, filePath, resolutionAttempts = [], frameId) {
const errorMessage = typeof error === "string" ? error : error.message;
const stackFrames = typeof error === "string" ? [] : error.stack?.split("\n") || [];
let extractedFilePath = filePath;
let lineNumber;
let functionName;
if (stackFrames.length > 0) {
const firstFrame = stackFrames.find((frame) => frame.includes("at "));
if (firstFrame) {
const match = firstFrame.match(/at (.+?) \((.+):(\d+):(\d+)\)/);
if (match) {
functionName = match[1];
extractedFilePath = extractedFilePath || match[2];
lineNumber = parseInt(match[3]);
}
}
}
const stackTrace = {
error_message: errorMessage,
stack_frames: stackFrames,
file_path: extractedFilePath,
line_number: lineNumber,
function_name: functionName,
timestamp: Date.now(),
context,
resolution_attempted: resolutionAttempts,
resolution_status: "pending"
};
this.storeStackTrace(stackTrace, frameId);
return stackTrace;
}
/**
* Store stack trace in database
*/
storeStackTrace(stackTrace, frameId) {
try {
const db = this.frameManager.db;
const traceId = this.generateTraceId();
const currentFrameId = frameId || this.frameManager.getCurrentFrameId();
const errorType = this.extractErrorType(stackTrace.error_message);
const severity = this.determineErrorSeverity(stackTrace);
const stmt = db.prepare(`
INSERT INTO stack_traces (
trace_id, frame_id, project_id, error_message, stack_frames,
file_path, line_number, function_name, context, resolution_attempted,
resolution_status, error_type, error_severity
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`);
stmt.run(
traceId,
currentFrameId,
this.frameManager.projectId,
stackTrace.error_message,
JSON.stringify(stackTrace.stack_frames),
stackTrace.file_path,
stackTrace.line_number,
stackTrace.function_name,
stackTrace.context,
JSON.stringify(stackTrace.resolution_attempted),
stackTrace.resolution_status,
errorType,
severity
);
logger.info(`Stored stack trace ${traceId} for frame ${currentFrameId}`);
return traceId;
} catch (error) {
logger.error("Failed to store stack trace:", error);
return "";
}
}
/**
* Retrieve stack traces from database
*/
getStackTraces(frameId, limit = 50) {
try {
const db = this.frameManager.db;
const traces = [];
let query;
let params;
if (frameId) {
query = `
SELECT * FROM stack_traces
WHERE frame_id = ?
ORDER BY created_at DESC
LIMIT ?
`;
params = [frameId, limit];
} else {
query = `
SELECT * FROM stack_traces
WHERE project_id = ?
ORDER BY created_at DESC
LIMIT ?
`;
params = [this.frameManager.projectId, limit];
}
const rows = db.prepare(query).all(...params);
for (const row of rows) {
traces.push({
error_message: row.error_message,
stack_frames: JSON.parse(row.stack_frames || "[]"),
file_path: row.file_path,
line_number: row.line_number,
function_name: row.function_name,
timestamp: row.created_at * 1e3,
// Convert from unix to JS timestamp
context: row.context,
resolution_attempted: JSON.parse(row.resolution_attempted || "[]"),
resolution_status: row.resolution_status
});
}
return traces;
} catch (error) {
logger.error("Failed to retrieve stack traces:", error);
return [];
}
}
/**
* Update stack trace resolution status
*/
updateStackTraceStatus(traceId, status, resolutionAttempts) {
try {
const db = this.frameManager.db;
const stmt = db.prepare(`
UPDATE stack_traces
SET resolution_status = ?, resolution_attempted = ?, updated_at = unixepoch()
WHERE trace_id = ?
`);
const result = stmt.run(
status,
resolutionAttempts ? JSON.stringify(resolutionAttempts) : void 0,
traceId
);
return result.changes > 0;
} catch (error) {
logger.error("Failed to update stack trace status:", error);
return false;
}
}
/**
* Helper methods for stack trace processing
*/
generateTraceId() {
return `trace_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
}
extractErrorType(errorMessage) {
const typeMatch = errorMessage.match(/^(\w+Error?):/);
return typeMatch ? typeMatch[1] : "Unknown";
}
determineErrorSeverity(stackTrace) {
const message = stackTrace.error_message.toLowerCase();
if (message.includes("critical") || message.includes("fatal") || message.includes("cannot read properties")) {
return "high";
} else if (message.includes("warning") || message.includes("deprecated")) {
return "low";
} else {
return "medium";
}
}
/**
* Auto-detect project structure and relationships
*/
async analyzeProjectMapping(workingDir) {
const mapping = {
file_relationships: {},
workflow_sequences: [],
key_directories: [],
entry_points: [],
configuration_files: []
};
try {
const configPatterns = [
"package.json",
"tsconfig.json",
".env",
"docker-compose.yml",
"*.config.js",
"*.config.ts",
"Dockerfile",
"README.md"
];
const files = await this.getDirectoryFiles(workingDir);
for (const file of files) {
const ext = path.extname(file);
const basename = path.basename(file);
if (configPatterns.some(
(pattern) => pattern.includes("*") ? basename.includes(pattern.replace("*", "")) : basename === pattern
)) {
mapping.configuration_files.push(file);
}
if (basename === "index.js" || basename === "index.ts" || basename === "main.js") {
mapping.entry_points.push(file);
}
const filePrefix = basename.split(".")[0];
const relatedFiles = files.filter(
(f) => f !== file && path.basename(f).startsWith(filePrefix)
);
if (relatedFiles.length > 0) {
mapping.file_relationships[file] = relatedFiles;
}
}
const dirs = files.map((f) => path.dirname(f)).filter((v, i, a) => a.indexOf(v) === i);
mapping.key_directories = dirs.filter(
(dir) => ["src", "lib", "components", "pages", "api", "utils", "types"].some(
(key) => dir.includes(key)
)
);
} catch (error) {
logger.warn("Failed to analyze project mapping:", error);
}
return mapping;
}
/**
* Create comprehensive rehydration context before compaction
*/
async createRehydrationCheckpoint() {
const sessionId = this.frameManager.getSessionId() || "unknown";
const checkpointId = `${sessionId}_${Date.now()}`;
try {
const workingDir = process.cwd();
const fileSnapshots = [];
const recentFiles = await this.getRecentlyModifiedFiles(workingDir);
for (const file of recentFiles.slice(0, 20)) {
const snapshot = await this.captureFileSnapshot(
file,
this.inferContextTags(file)
);
if (snapshot) {
fileSnapshots.push(snapshot);
}
}
const projectMapping = await this.analyzeProjectMapping(workingDir);
const conversationContext = this.extractConversationContext();
const rehydrationContext = {
session_id: sessionId,
compact_detected_at: Date.now(),
pre_compact_state: {
file_snapshots: fileSnapshots,
conversation_context: conversationContext,
project_mapping: projectMapping,
active_workflows: this.detectActiveWorkflows(fileSnapshots),
current_focus: this.inferCurrentFocus(
fileSnapshots,
conversationContext
)
},
recovery_anchors: this.createRecoveryAnchors(
fileSnapshots,
conversationContext
)
};
this.rehydrationStorage.set(checkpointId, rehydrationContext);
await this.persistRehydrationContext(checkpointId, rehydrationContext);
logger.info(
`Created rehydration checkpoint ${checkpointId} with ${fileSnapshots.length} file snapshots`
);
return checkpointId;
} catch (error) {
logger.error("Failed to create rehydration checkpoint:", error);
throw error;
}
}
/**
* Inject rich context after compaction detection
*/
async rehydrateContext(checkpointId) {
try {
let context;
if (checkpointId) {
context = this.rehydrationStorage.get(checkpointId);
if (!context) {
context = await this.loadPersistedContext(checkpointId);
}
} else {
context = await this.findMostRecentContext();
}
if (!context) {
logger.warn("No rehydration context available");
return false;
}
await this.injectRichContext(context);
return true;
} catch (error) {
logger.error("Failed to rehydrate context:", error);
return false;
}
}
/**
* Inject rich context into current session
*/
async injectRichContext(context) {
const frameId = this.frameManager.getCurrentFrameId();
if (!frameId) {
logger.warn("No active frame for context injection");
return;
}
for (const snapshot of context.pre_compact_state.file_snapshots.slice(
0,
5
)) {
this.frameManager.addAnchor(
"FACT",
`File: ${snapshot.path} (${snapshot.contextTags.join(", ")})
Last modified: ${new Date(snapshot.lastModified).toISOString()}
Size: ${snapshot.size} bytes
Content preview: ${this.getContentPreview(snapshot.content)}`,
9,
{
rehydration: true,
file_path: snapshot.path,
context_tags: snapshot.contextTags
},
frameId
);
}
const conv = context.pre_compact_state.conversation_context;
if (conv.decisions_made.length > 0) {
this.frameManager.addAnchor(
"DECISION",
`Previous decisions: ${conv.decisions_made.join("; ")}`,
8,
{ rehydration: true },
frameId
);
}
if (conv.next_steps.length > 0) {
this.frameManager.addAnchor(
"FACT",
`Next steps identified: ${conv.next_steps.join("; ")}`,
7,
{ rehydration: true },
frameId
);
}
if (conv.stack_traces.length > 0) {
for (const trace of conv.stack_traces.slice(0, 3)) {
this.frameManager.addAnchor(
"ERROR",
`Error context: ${trace.error_message}
Context: ${trace.context}
File: ${trace.file_path || "unknown"}${trace.line_number ? `:${trace.line_number}` : ""}
Function: ${trace.function_name || "unknown"}
Status: ${trace.resolution_status}
Stack preview: ${trace.stack_frames.slice(0, 3).join("\n")}`,
9,
{
rehydration: true,
error_type: trace.error_message.split(":")[0],
resolution_status: trace.resolution_status,
file_path: trace.file_path
},
frameId
);
}
}
if (conv.error_patterns.length > 0) {
this.frameManager.addAnchor(
"PATTERN",
`Recurring error patterns detected: ${conv.error_patterns.join(", ")}`,
7,
{ rehydration: true },
frameId
);
}
const mapping = context.pre_compact_state.project_mapping;
if (mapping.entry_points.length > 0) {
this.frameManager.addAnchor(
"FACT",
`Project entry points: ${mapping.entry_points.join(", ")}`,
6,
{ rehydration: true },
frameId
);
}
if (context.pre_compact_state.current_focus) {
this.frameManager.addAnchor(
"CONSTRAINT",
`Previous focus: ${context.pre_compact_state.current_focus}`,
8,
{ rehydration: true },
frameId
);
}
logger.info("Rich context injected successfully");
}
// Helper methods
async getDirectoryFiles(dir) {
return [];
}
async getRecentlyModifiedFiles(dir) {
return [];
}
inferContextTags(filePath) {
const tags = [];
const content = filePath.toLowerCase();
if (content.includes("pipeline") || content.includes("migrate"))
tags.push("migration");
if (content.includes("hubspot")) tags.push("hubspot");
if (content.includes("pipedream")) tags.push("pipedream");
if (content.includes("test")) tags.push("test");
if (content.includes("config")) tags.push("configuration");
return tags;
}
extractConversationContext() {
const recentErrors = this.extractRecentStackTraces();
const errorPatterns = this.detectErrorPatterns(recentErrors);
return {
timestamp: Date.now(),
reasoning: [],
decisions_made: [],
next_steps: [],
user_preferences: {},
pain_points: [],
stack_traces: recentErrors,
error_patterns: errorPatterns
};
}
/**
* Extract recent stack traces from database and frame events
*/
extractRecentStackTraces() {
try {
const dbTraces = this.getStackTraces(void 0, 10);
const eventTraces = this.extractStackTracesFromFrameEvents();
const allTraces = [...dbTraces, ...eventTraces];
const uniqueTraces = allTraces.filter(
(trace, index, array) => array.findIndex(
(t) => t.error_message === trace.error_message && t.file_path === trace.file_path
) === index
);
return uniqueTraces.sort((a, b) => b.timestamp - a.timestamp).slice(0, 5);
} catch (error) {
logger.warn("Failed to extract stack traces:", error);
return [];
}
}
/**
* Extract stack traces from frame events (fallback method)
*/
extractStackTracesFromFrameEvents() {
const traces = [];
try {
const frames = this.frameManager.getActiveFramePath();
for (const frame of frames.slice(-3)) {
const frameData = this.frameManager.getFrame(frame.frame_id);
if (frameData?.events) {
for (const event of frameData.events) {
if (event.type === "error" || event.type === "exception") {
const trace = this.parseStackTraceFromEvent(event);
if (trace) {
traces.push(trace);
}
}
}
}
}
} catch (error) {
logger.warn("Failed to extract frame event traces:", error);
}
return traces;
}
/**
* Parse stack trace from frame event
*/
parseStackTraceFromEvent(event) {
try {
const data = typeof event.data === "string" ? JSON.parse(event.data) : event.data;
return {
error_message: data.error || data.message || "Unknown error",
stack_frames: data.stack ? data.stack.split("\n") : [],
file_path: data.file || data.fileName,
line_number: data.line || data.lineNumber,
function_name: data.function || data.functionName,
timestamp: event.timestamp || Date.now(),
context: data.context || "Error occurred during frame processing",
resolution_attempted: data.resolutionAttempts || [],
resolution_status: data.resolved ? "resolved" : "pending"
};
} catch (error) {
return null;
}
}
/**
* Detect recurring error patterns
*/
detectErrorPatterns(traces) {
const patterns = /* @__PURE__ */ new Map();
for (const trace of traces) {
const errorType = trace.error_message.split(":")[0].trim();
patterns.set(errorType, (patterns.get(errorType) || 0) + 1);
}
return Array.from(patterns.entries()).filter(([, count]) => count > 1).map(([pattern]) => pattern);
}
detectActiveWorkflows(snapshots) {
const workflows = [];
for (const snapshot of snapshots) {
if (snapshot.contextTags.includes("migration")) {
workflows.push("data_migration");
}
if (snapshot.path.includes("test")) {
workflows.push("testing");
}
}
return [...new Set(workflows)];
}
inferCurrentFocus(snapshots, context) {
if (snapshots.some((s) => s.contextTags.includes("migration"))) {
return "Data migration and transformation";
}
if (snapshots.some((s) => s.path.includes("test"))) {
return "Testing and validation";
}
return "Development";
}
createRecoveryAnchors(snapshots, context) {
const anchors = [];
for (const snapshot of snapshots.slice(0, 3)) {
anchors.push(
`File context: ${snapshot.path} with ${snapshot.contextTags.join(", ")}`
);
}
return anchors;
}
async persistRehydrationContext(id, context) {
const contextDir = path.join(process.cwd(), ".stackmemory", "rehydration");
await fs.mkdir(contextDir, { recursive: true });
await fs.writeFile(
path.join(contextDir, `${id}.json`),
JSON.stringify(context, null, 2)
);
}
async loadPersistedContext(id) {
try {
const contextPath = path.join(
process.cwd(),
".stackmemory",
"rehydration",
`${id}.json`
);
const content = await fs.readFile(contextPath, "utf8");
return JSON.parse(content);
} catch {
return void 0;
}
}
async findMostRecentContext() {
return void 0;
}
checkForCompactionEvent() {
if (this.compactionHandler.detectCompactionEvent("")) {
this.rehydrateContext();
}
}
simpleHash(content) {
let hash = 0;
for (let i = 0; i < content.length; i++) {
const char = content.charCodeAt(i);
hash = (hash << 5) - hash + char;
hash = hash & hash;
}
return hash.toString(16);
}
getContentPreview(content, maxLength = 200) {
return content.length > maxLength ? content.substring(0, maxLength) + "..." : content;
}
}
export {
CompactionHandler,
EnhancedRehydrationManager
};
//# sourceMappingURL=enhanced-rehydration.js.map