UNPKG

create-ai-chat-context-experimental

Version:

Phase 2: TypeScript rewrite - AI Chat Context & Memory System with conversation extraction and AICF format support (powered by aicf-core v2.1.0).

255 lines 10.8 kB
"use strict"; /** * This file is part of create-ai-chat-context-experimental. * Licensed under the GNU Affero General Public License v3.0 or later (AGPL-or-later). * See LICENSE file for details. */ Object.defineProperty(exports, "__esModule", { value: true }); exports.CacheConsolidationAgent = void 0; /** * Cache Consolidation Agent * Phase 6: Cache-First Architecture * * Reads all chunk files from .cache/llm/[platform]/chunk-[number].json * Consolidates, deduplicates, and writes to .aicf/ and .ai/ files * * Pipeline: * .cache/llm/augment/chunk-N.json * .cache/llm/claude/chunk-N.json * | * Consolidation Agent * | * .aicf/[conversationId].aicf * .ai/[conversationId].md */ const fs_1 = require("fs"); const path_1 = require("path"); const ConversationOrchestrator_js_1 = require("../orchestrators/ConversationOrchestrator.js"); const MemoryFileWriter_js_1 = require("../writers/MemoryFileWriter.js"); const AgentRouter_js_1 = require("./AgentRouter.js"); const MessageBuilder_js_1 = require("../utils/MessageBuilder.js"); const result_js_1 = require("../types/result.js"); /** * Consolidates cache chunks into unified memory files */ class CacheConsolidationAgent { orchestrator; memoryWriter; router; cacheDir; outputDir; processedHashes = new Set(); constructor(cwd = process.cwd()) { this.orchestrator = new ConversationOrchestrator_js_1.ConversationOrchestrator(); this.memoryWriter = new MemoryFileWriter_js_1.MemoryFileWriter(); this.router = new AgentRouter_js_1.AgentRouter(); this.cacheDir = (0, path_1.join)(cwd, '.cache', 'llm'); this.outputDir = (0, path_1.join)(cwd, '.aicf'); } /** * Consolidate all cache chunks */ async consolidate() { try { const stats = { totalChunksProcessed: 0, chunksConsolidated: 0, chunksDuplicated: 0, filesWritten: 0, timestamp: new Date().toISOString(), }; // Find all chunk files across all platforms const chunkFiles = this.findAllChunks(); stats.totalChunksProcessed = chunkFiles.length; if (chunkFiles.length === 0) { return (0, result_js_1.Ok)(stats); } // Process each chunk for (const chunkPath of chunkFiles) { const processResult = await this.processChunk(chunkPath); if (processResult.ok) { stats.chunksConsolidated++; stats.filesWritten += processResult.value; } else { stats.chunksDuplicated++; } } return (0, result_js_1.Ok)(stats); } catch (error) { return (0, result_js_1.Err)(error instanceof Error ? error : new Error(`Consolidation failed: ${String(error)}`)); } } /** * Find all chunk files across all platforms */ findAllChunks() { const chunks = []; if (!(0, fs_1.existsSync)(this.cacheDir)) { return chunks; } // Look in each platform directory const platforms = (0, fs_1.readdirSync)(this.cacheDir); for (const platform of platforms) { const platformPath = (0, path_1.join)(this.cacheDir, platform); const conversationsPath = (0, path_1.join)(platformPath, '.conversations'); if (!(0, fs_1.existsSync)(conversationsPath)) { continue; } const files = (0, fs_1.readdirSync)(conversationsPath); for (const file of files) { if (file.startsWith('chunk-') && file.endsWith('.json')) { chunks.push((0, path_1.join)(conversationsPath, file)); } } } return chunks; } /** * Process a single chunk file */ async processChunk(chunkPath) { try { const content = (0, fs_1.readFileSync)(chunkPath, 'utf-8'); const chunk = JSON.parse(content); // Check for duplicates const contentHash = chunk.contentHash; if (this.processedHashes.has(contentHash)) { return (0, result_js_1.Err)(new Error('Duplicate chunk')); } this.processedHashes.add(contentHash); // Extract conversation data const conversationId = chunk.conversationId || chunk.chunkId; const source = chunk.source || 'unknown'; // Extract ORIGINAL conversation timestamp from rawData (CRITICAL for historical conversations) let conversationTimestamp = chunk.timestamp || new Date().toISOString(); if (chunk.rawData) { try { const rawData = typeof chunk.rawData === 'string' ? JSON.parse(chunk.rawData) : chunk.rawData; if (rawData.timestamp) { conversationTimestamp = rawData.timestamp; } } catch { // If parsing fails, use chunk timestamp } } // Create conversation object for orchestrator const conversation = { id: conversationId, messages: this.extractMessages(chunk), timestamp: conversationTimestamp, source, }; // Analyze with orchestrator // Pass chunk.rawData (the messages array) instead of the entire chunk JSON const analysisResult = this.orchestrator.analyze(conversation, chunk.rawData); if (!analysisResult.ok) { return (0, result_js_1.Err)(new Error('Analysis failed')); } // Generate AICF format // Note: generateAICF doesn't take timestamp parameter, we'll replace it in the content below let aicf = this.memoryWriter.generateAICF(analysisResult.value, conversationId); // CRITICAL FIX: Replace the timestamp in AICF content with the original conversation timestamp // aicf-core's generateAICF() uses new Date().toISOString() which gives today's date // We need to preserve the original conversation date for historical conversations // This ensures session consolidation groups conversations by their actual date, not today aicf = aicf.replace(/^timestamp\|.*$/m, `timestamp|${conversationTimestamp}`); // Route content const contentTypes = this.router.classifyContent(chunk); for (const contentType of contentTypes) { const routed = this.router.routeContent(contentType, chunk, chunk.chunkId); if (routed) { // Content is routed, could be used for specialized file writing } } // Write AICF file to .aicf/recent/ with conversation timestamp // CRITICAL: Pass timestamp so historical conversations get correct date in filename // NOW USES aicf-core for enterprise-grade writes (thread-safe, validated, PII redaction) const writeResult = await this.memoryWriter.writeAICF(conversationId, aicf, this.outputDir.replace('/.aicf', ''), conversation.timestamp); if (!writeResult.ok) { return (0, result_js_1.Err)(new Error(`Failed to write AICF file for ${conversationId}: ${writeResult.error.message}`)); } // Delete chunk file after successful processing try { (0, fs_1.unlinkSync)(chunkPath); } catch (deleteError) { // Log but don't fail if chunk deletion fails console.warn(`Warning: Failed to delete chunk file ${chunkPath}:`, deleteError); } return (0, result_js_1.Ok)(1); // 1 file written (AICF only) } catch (error) { return (0, result_js_1.Err)(error instanceof Error ? error : new Error(`Failed to process chunk: ${String(error)}`)); } } /** * Extract messages from chunk */ extractMessages(chunk) { const messages = []; const conversationId = (chunk['conversationId'] || chunk['chunkId'] || 'unknown'); // Handle Augment chunks if (chunk['source'] === 'augment-leveldb' && chunk['rawData']) { try { const rawData = JSON.parse(chunk['rawData']); if (Array.isArray(rawData)) { for (let i = 0; i < rawData.length; i++) { const item = rawData[i]; if (item.role && item.content) { const msg = MessageBuilder_js_1.MessageBuilder.create({ conversationId, role: item.role, content: item.content, timestamp: item.timestamp || new Date().toISOString(), prefix: 'augment', index: i, }); messages.push(msg); } } } } catch { // If rawData is not JSON, treat it as a single message const msg = MessageBuilder_js_1.MessageBuilder.create({ conversationId, role: 'assistant', content: chunk['rawData'], timestamp: new Date().toISOString(), prefix: 'augment', }); messages.push(msg); } } // Handle Claude chunks if ((chunk['source'] === 'claude-cli' || chunk['source'] === 'claude-desktop') && chunk['content']) { const msg = MessageBuilder_js_1.MessageBuilder.create({ conversationId, role: chunk['role'], content: chunk['content'], timestamp: chunk['timestamp'] || new Date().toISOString(), prefix: chunk['source'], }); messages.push(msg); } return messages; } /** * Get consolidation stats */ getStats() { return { totalChunksProcessed: 0, chunksConsolidated: 0, chunksDuplicated: 0, filesWritten: 0, timestamp: new Date().toISOString(), }; } } exports.CacheConsolidationAgent = CacheConsolidationAgent; //# sourceMappingURL=CacheConsolidationAgent.js.map