@stackmemoryai/stackmemory
Version:
Lossless, project-scoped memory for AI coding tools. Durable context across sessions with 56 MCP tools, FTS5 search, conductor orchestrator, loop/watch monitoring, snapshot capture, pre-flight overlap checks, Claude/Codex/OpenCode wrappers, Linear sync, a
594 lines (593 loc) • 17.2 kB
JavaScript
import { fileURLToPath as __fileURLToPath } from 'url';
import { dirname as __pathDirname } from 'path';
const __filename = __fileURLToPath(import.meta.url);
const __dirname = __pathDirname(__filename);
import { logger } from "../monitoring/logger.js";
import { ValidationError, ErrorCode } from "../errors/index.js";
import * as fs from "fs";
import * as path from "path";
class RecursiveContextManager {
dualStackManager;
contextRetriever;
// Context cache for sharing between agents
sharedContextCache = /* @__PURE__ */ new Map();
// Agent-specific configurations
agentConfigs;
constructor(dualStackManager, contextRetriever) {
this.dualStackManager = dualStackManager;
this.contextRetriever = contextRetriever;
this.agentConfigs = this.initializeAgentConfigs();
}
/**
* Initialize agent-specific context configurations
*/
initializeAgentConfigs() {
const configs = /* @__PURE__ */ new Map();
configs.set("planning", {
agent: "planning",
maxTokens: 2e4,
priorityWeights: {
recent: 0.3,
relevant: 0.4,
dependency: 0.2,
error: 0.05,
test: 0.05
},
includeTypes: ["frame", "documentation", "config"],
excludeTypes: []
});
configs.set("code", {
agent: "code",
maxTokens: 3e4,
priorityWeights: {
recent: 0.2,
relevant: 0.5,
dependency: 0.2,
error: 0.05,
test: 0.05
},
includeTypes: ["code", "frame", "test"],
excludeTypes: ["documentation"]
});
configs.set("testing", {
agent: "testing",
maxTokens: 25e3,
priorityWeights: {
recent: 0.1,
relevant: 0.3,
dependency: 0.1,
error: 0.1,
test: 0.4
},
includeTypes: ["code", "test", "frame"],
excludeTypes: ["documentation", "config"]
});
configs.set("linting", {
agent: "linting",
maxTokens: 15e3,
priorityWeights: {
recent: 0.2,
relevant: 0.4,
dependency: 0.1,
error: 0.2,
test: 0.1
},
includeTypes: ["code", "config"],
excludeTypes: ["documentation", "test"]
});
configs.set("review", {
agent: "review",
maxTokens: 25e3,
priorityWeights: {
recent: 0.3,
relevant: 0.3,
dependency: 0.1,
error: 0.2,
test: 0.1
},
includeTypes: ["code", "test", "frame", "documentation"],
excludeTypes: []
});
configs.set("context", {
agent: "context",
maxTokens: 1e4,
priorityWeights: {
recent: 0.1,
relevant: 0.6,
dependency: 0.2,
error: 0.05,
test: 0.05
},
includeTypes: ["frame", "documentation"],
excludeTypes: []
});
configs.set("improve", {
agent: "improve",
maxTokens: 3e4,
priorityWeights: {
recent: 0.3,
relevant: 0.4,
dependency: 0.1,
error: 0.15,
test: 0.05
},
includeTypes: ["code", "test", "frame"],
excludeTypes: ["documentation"]
});
configs.set("publish", {
agent: "publish",
maxTokens: 15e3,
priorityWeights: {
recent: 0.4,
relevant: 0.2,
dependency: 0.1,
error: 0.2,
test: 0.1
},
includeTypes: ["config", "frame"],
excludeTypes: ["code", "test"]
});
return configs;
}
/**
* Prepare context for a specific agent type
*/
async prepareAgentContext(agentType, baseContext, _maxTokens) {
const config = this.agentConfigs.get(agentType);
if (!config) {
throw new ValidationError(
`Unknown agent type: ${agentType}`,
ErrorCode.VALIDATION_FAILED,
{ agentType }
);
}
logger.debug(`Preparing context for ${agentType} agent`, { maxTokens });
const chunks = await this.collectRelevantChunks(
baseContext,
config,
maxTokens
);
const sortedChunks = this.prioritizeChunks(chunks, config.priorityWeights);
const selectedChunks = this.fitChunksToTokenBudget(sortedChunks, maxTokens);
const agentContext = {
...baseContext,
chunks: selectedChunks.map((c) => ({
type: c.type,
content: c.content,
metadata: c.metadata
}))
};
this.sharedContextCache.set(`${agentType}-${Date.now()}`, selectedChunks);
logger.debug(`Prepared context for ${agentType}`, {
chunksSelected: selectedChunks.length,
totalSize: selectedChunks.reduce((sum, c) => sum + c.metadata.size, 0)
});
return agentContext;
}
/**
* Chunk large codebase for processing
*/
async chunkCodebase(rootPath, strategy) {
const chunks = [];
logger.info("Chunking codebase", { rootPath, strategy: strategy.type });
switch (strategy.type) {
case "file":
chunks.push(...await this.chunkByFile(rootPath, strategy));
break;
case "semantic":
chunks.push(...await this.chunkBySemantic(rootPath, strategy));
break;
case "size":
chunks.push(...await this.chunkBySize(rootPath, strategy));
break;
default:
throw new ValidationError(
`Unknown chunking strategy: ${strategy.type}`,
ErrorCode.VALIDATION_FAILED,
{ strategyType: strategy.type }
);
}
logger.info("Codebase chunked", {
totalChunks: chunks.length,
totalSize: chunks.reduce((sum, c) => sum + c.metadata.size, 0)
});
return chunks;
}
/**
* Chunk by file boundaries
*/
async chunkByFile(rootPath, strategy) {
const chunks = [];
const files = await this.walkDirectory(rootPath);
for (const file of files) {
const content = await fs.promises.readFile(file, "utf-8");
if (content.length > strategy.maxChunkSize) {
const fileChunks = this.splitLargeFile(file, content, strategy);
chunks.push(...fileChunks);
} else {
chunks.push({
id: `file-${path.basename(file)}`,
type: "code",
content,
metadata: {
filePath: file,
language: this.detectLanguage(file),
size: content.length,
score: 0.5
},
boundaries: {
start: 0,
end: content.length
}
});
}
}
return chunks;
}
/**
* Chunk by semantic boundaries (classes, functions)
*/
async chunkBySemantic(rootPath, strategy) {
const chunks = [];
const files = await this.walkDirectory(rootPath);
for (const file of files) {
const content = await fs.promises.readFile(file, "utf-8");
const language = this.detectLanguage(file);
const semanticUnits = this.extractSemanticUnits(content, language);
for (const unit of semanticUnits) {
if (unit.content.length <= strategy.maxChunkSize) {
chunks.push({
id: `semantic-${file}-${unit.name}`,
type: "code",
content: unit.content,
metadata: {
filePath: file,
language,
size: unit.content.length,
score: unit.importance
},
boundaries: {
start: unit.start,
end: unit.end
}
});
}
}
}
return chunks;
}
/**
* Chunk by fixed size with overlap
*/
async chunkBySize(rootPath, strategy) {
const chunks = [];
const files = await this.walkDirectory(rootPath);
for (const file of files) {
const content = await fs.promises.readFile(file, "utf-8");
const lines = content.split("\n");
let currentChunk = "";
let startLine = 0;
for (let i = 0; i < lines.length; i++) {
currentChunk += lines[i] + "\n";
if (currentChunk.length >= strategy.maxChunkSize) {
chunks.push({
id: `size-${file}-${startLine}`,
type: "code",
content: currentChunk,
metadata: {
filePath: file,
language: this.detectLanguage(file),
size: currentChunk.length,
score: 0.5
},
boundaries: {
start: startLine,
end: i,
overlap: strategy.overlapSize
}
});
const overlapLines = Math.floor(strategy.overlapSize / 50);
startLine = Math.max(0, i - overlapLines);
currentChunk = lines.slice(startLine, i + 1).join("\n");
}
}
if (currentChunk.trim()) {
chunks.push({
id: `size-${file}-${startLine}`,
type: "code",
content: currentChunk,
metadata: {
filePath: file,
language: this.detectLanguage(file),
size: currentChunk.length,
score: 0.5
},
boundaries: {
start: startLine,
end: lines.length - 1
}
});
}
}
return chunks;
}
/**
* Collect relevant chunks for agent context
*/
async collectRelevantChunks(baseContext, config, _maxTokens) {
const chunks = [];
if (config.includeTypes.includes("frame")) {
const recentFrames = await this.getRecentFrameChunks(10);
chunks.push(...recentFrames);
}
if (config.includeTypes.includes("code") && baseContext.files) {
const codeChunks = await this.getCodeChunks(baseContext.files);
chunks.push(...codeChunks);
}
if (config.includeTypes.includes("test") && baseContext.testFiles) {
const testChunks = await this.getTestChunks(baseContext.testFiles);
chunks.push(...testChunks);
}
if (baseContext.query) {
const searchResults = await this.contextRetriever.retrieve({
query: baseContext.query,
limit: 20
});
for (const result of searchResults) {
chunks.push({
id: `search-${result.frameId}`,
type: "frame",
content: result.content,
metadata: {
frameId: result.frameId,
size: result.content.length,
score: result.score,
timestamp: new Date(result.timestamp)
},
boundaries: {}
});
}
}
const cachedChunks = this.getRelevantCachedChunks(config.agent);
chunks.push(...cachedChunks);
return chunks;
}
/**
* Prioritize chunks based on agent weights
*/
prioritizeChunks(chunks, weights) {
return chunks.map((chunk) => {
let priority = 0;
if (chunk.metadata.timestamp) {
const age = Date.now() - chunk.metadata.timestamp.getTime();
const recentScore = Math.max(0, 1 - age / (24 * 60 * 60 * 1e3));
priority += recentScore * weights.recent;
}
priority += (chunk.metadata.score || 0.5) * weights.relevant;
if (chunk.type === "test") {
priority += weights.test;
}
if (chunk.metadata.filePath?.includes("error")) {
priority += weights.error;
}
return { ...chunk, priority };
}).sort(
(a, b) => b.priority - a.priority
);
}
/**
* Fit chunks within token budget
*/
fitChunksToTokenBudget(chunks, _maxTokens) {
const selected = [];
let totalTokens = 0;
const estimateTokens = (text) => Math.ceil(text.length / 4);
for (const chunk of chunks) {
const chunkTokens = estimateTokens(chunk.content);
if (totalTokens + chunkTokens <= maxTokens) {
selected.push(chunk);
totalTokens += chunkTokens;
} else if (selected.length === 0) {
const truncatedContent = chunk.content.slice(0, maxTokens * 4);
selected.push({
...chunk,
content: truncatedContent,
metadata: {
...chunk.metadata,
size: truncatedContent.length
}
});
break;
} else {
break;
}
}
return selected;
}
/**
* Helper methods
*/
async walkDirectory(dir) {
const files = [];
const entries = await fs.promises.readdir(dir, { withFileTypes: true });
for (const entry of entries) {
const fullPath = path.join(dir, entry.name);
if (entry.isDirectory()) {
if (!["node_modules", ".git", "dist", "build"].includes(entry.name)) {
files.push(...await this.walkDirectory(fullPath));
}
} else if (entry.isFile()) {
if (/\.(ts|tsx|js|jsx|py|java|go|rs|cpp|c|h)$/.test(entry.name)) {
files.push(fullPath);
}
}
}
return files;
}
detectLanguage(filePath) {
const ext = path.extname(filePath);
const langMap = {
".ts": "typescript",
".tsx": "typescript",
".js": "javascript",
".jsx": "javascript",
".py": "python",
".java": "java",
".go": "go",
".rs": "rust",
".cpp": "cpp",
".c": "c",
".h": "c"
};
return langMap[ext] || "unknown";
}
splitLargeFile(filePath, content, strategy) {
const chunks = [];
const lines = content.split("\n");
const linesPerChunk = Math.ceil(strategy.maxChunkSize / 50);
for (let i = 0; i < lines.length; i += linesPerChunk) {
const chunkLines = lines.slice(i, i + linesPerChunk);
const chunkContent = chunkLines.join("\n");
chunks.push({
id: `file-${path.basename(filePath)}-part-${i}`,
type: "code",
content: chunkContent,
metadata: {
filePath,
language: this.detectLanguage(filePath),
size: chunkContent.length,
score: 0.5
},
boundaries: {
start: i,
end: Math.min(i + linesPerChunk, lines.length),
overlap: strategy.overlapSize
}
});
}
return chunks;
}
extractSemanticUnits(content, language) {
const units = [];
if (language === "typescript" || language === "javascript") {
const classRegex = /class\s+(\w+)[^{]*\{[^}]+\}/g;
let match;
while ((match = classRegex.exec(content)) !== null) {
units.push({
name: match[1],
content: match[0],
start: match.index,
end: match.index + match[0].length,
importance: 0.8
});
}
const funcRegex = /(?:function|const|let)\s+(\w+)\s*=?\s*(?:\([^)]*\)|\w+)\s*(?:=>|{)[^}]+}/g;
while ((match = funcRegex.exec(content)) !== null) {
units.push({
name: match[1],
content: match[0],
start: match.index,
end: match.index + match[0].length,
importance: 0.6
});
}
}
return units;
}
async getRecentFrameChunks(limit) {
const activeStack = this.dualStackManager.getActiveStack();
const frames = await activeStack.getAllFrames();
return frames.slice(-limit).map((frame) => ({
id: `frame-${frame.frameId}`,
type: "frame",
content: JSON.stringify(frame, null, 2),
metadata: {
frameId: frame.frameId,
size: JSON.stringify(frame).length,
score: 0.7,
timestamp: new Date(frame.timestamp)
},
boundaries: {}
}));
}
async getCodeChunks(files) {
const chunks = [];
for (const file of files) {
if (fs.existsSync(file)) {
const content = await fs.promises.readFile(file, "utf-8");
chunks.push({
id: `code-${path.basename(file)}`,
type: "code",
content,
metadata: {
filePath: file,
language: this.detectLanguage(file),
size: content.length,
score: 0.8
},
boundaries: {}
});
}
}
return chunks;
}
async getTestChunks(testFiles) {
const chunks = [];
for (const file of testFiles) {
if (fs.existsSync(file)) {
const content = await fs.promises.readFile(file, "utf-8");
chunks.push({
id: `test-${path.basename(file)}`,
type: "test",
content,
metadata: {
filePath: file,
language: this.detectLanguage(file),
size: content.length,
score: 0.7
},
boundaries: {}
});
}
}
return chunks;
}
getRelevantCachedChunks(agentType) {
const relevantChunks = [];
for (const [key, chunks] of this.sharedContextCache.entries()) {
const timestamp = parseInt(key.split("-").pop() || "0");
if (Date.now() - timestamp > 5 * 60 * 1e3) {
continue;
}
if (agentType === "review" || agentType === "improve") {
relevantChunks.push(...chunks.filter((c) => c.type === "code"));
}
}
return relevantChunks;
}
/**
* Clear context cache
*/
clearCache() {
this.sharedContextCache.clear();
logger.debug("Context cache cleared");
}
/**
* Get cache statistics
*/
getCacheStats() {
const stats = {
cacheSize: this.sharedContextCache.size,
totalChunks: 0,
totalBytes: 0
};
for (const chunks of this.sharedContextCache.values()) {
stats.totalChunks += chunks.length;
stats.totalBytes += chunks.reduce((sum, c) => sum + c.metadata.size, 0);
}
return stats;
}
}
export {
RecursiveContextManager
};