mnemos-coder
Version:
CLI-based coding agent with graph-based execution loop and terminal UI
397 lines • 14 kB
JavaScript
/**
* Real-time file monitoring and synchronization
* Watches for file changes and updates the codebase index incrementally
*/
import chokidar from 'chokidar';
import * as fs from 'fs';
import * as path from 'path';
import { EventEmitter } from 'events';
export class CodebaseWatcher extends EventEmitter {
watcher;
db;
parser;
embedder;
projectRoot;
options;
isProcessing = false;
changeQueue = [];
debounceTimer;
stats = {
filesProcessed: 0,
filesSkipped: 0,
errors: 0,
totalTime: 0,
averageTime: 0
};
constructor(projectRoot, db, parser, embedder, options = {}) {
super();
this.projectRoot = projectRoot;
this.db = db;
this.parser = parser;
this.embedder = embedder;
this.options = {
ignored: [
'**/node_modules/**',
'**/.git/**',
'**/dist/**',
'**/build/**',
'**/.next/**',
'**/.cache/**',
'**/.mnemos/**',
'**/*.log',
'**/*.tmp',
'**/*.temp'
],
persistent: true,
ignoreInitial: true, // Don't process existing files on startup
debounceMs: 300, // Faster response time
batchSize: 5, // Smaller batches for faster processing
...options
};
}
/**
* Start watching for file changes
*/
async startWatching() {
if (this.watcher) {
await this.stopWatching();
}
console.log(`Starting file watcher for: ${this.projectRoot}`);
this.watcher = chokidar.watch(this.projectRoot, {
ignored: this.options.ignored,
persistent: this.options.persistent,
ignoreInitial: this.options.ignoreInitial,
followSymlinks: false,
alwaysStat: true,
atomic: true
});
this.watcher
.on('add', (filePath, stats) => this.handleFileChange('add', filePath, stats))
.on('change', (filePath, stats) => this.handleFileChange('change', filePath, stats))
.on('unlink', (filePath) => this.handleFileChange('unlink', filePath))
.on('error', (error) => this.emit('error', error))
.on('ready', () => {
console.log('File watcher is ready');
this.emit('ready');
});
}
/**
* Stop watching for file changes
*/
async stopWatching() {
if (this.watcher) {
await this.watcher.close();
this.watcher = undefined;
}
if (this.debounceTimer) {
clearTimeout(this.debounceTimer);
}
// Process remaining changes
if (this.changeQueue.length > 0) {
await this.processChangeQueue();
}
console.log('File watcher stopped');
}
/**
* Perform initial scan of the project
*/
async performInitialScan() {
console.log('Starting initial codebase scan...');
const startTime = Date.now();
try {
const files = await this.getAllCodeFiles();
console.log(`Found ${files.length} code files to process`);
// Process files in batches
const batchSize = this.options.batchSize;
for (let i = 0; i < files.length; i += batchSize) {
const batch = files.slice(i, i + batchSize);
await Promise.all(batch.map(filePath => this.processFile(filePath, 'add')));
// Emit progress
this.emit('progress', {
processed: Math.min(i + batchSize, files.length),
total: files.length
});
}
const totalTime = Date.now() - startTime;
console.log(`Initial scan completed in ${totalTime}ms`);
console.log(`Processed: ${this.stats.filesProcessed}, Skipped: ${this.stats.filesSkipped}, Errors: ${this.stats.errors}`);
this.emit('scanComplete', this.stats);
}
catch (error) {
console.error('Initial scan failed:', error);
this.emit('error', error);
}
}
/**
* Handle file change events
*/
handleFileChange(type, filePath, stats) {
// Filter out non-code files
if (!this.shouldProcessFile(filePath)) {
return;
}
const changeEvent = {
type,
filePath: path.relative(this.projectRoot, filePath),
timestamp: new Date()
};
this.changeQueue.push(changeEvent);
this.debounceProcessing();
}
/**
* Debounce processing to batch rapid changes
*/
debounceProcessing() {
if (this.debounceTimer) {
clearTimeout(this.debounceTimer);
}
this.debounceTimer = setTimeout(() => {
this.processChangeQueue();
}, this.options.debounceMs);
}
/**
* Process queued file changes
*/
async processChangeQueue() {
if (this.isProcessing || this.changeQueue.length === 0) {
return;
}
this.isProcessing = true;
const changes = [...this.changeQueue];
this.changeQueue = [];
try {
// Group changes by file to handle multiple changes to same file
const fileChanges = new Map();
for (const change of changes) {
fileChanges.set(change.filePath, change);
}
// Process changes
const promises = Array.from(fileChanges.values()).map(change => this.processFileChange(change));
await Promise.all(promises);
this.emit('changesProcessed', fileChanges.size);
}
catch (error) {
console.error('Error processing change queue:', error);
this.emit('error', error);
}
finally {
this.isProcessing = false;
}
}
/**
* Process a single file change
*/
async processFileChange(change) {
try {
const fullPath = path.join(this.projectRoot, change.filePath);
switch (change.type) {
case 'add':
case 'change':
await this.processFile(fullPath, change.type);
break;
case 'unlink':
await this.removeFile(change.filePath);
break;
}
this.emit('fileProcessed', change);
}
catch (error) {
this.stats.errors++;
console.error(`Error processing ${change.type} for ${change.filePath}:`, error);
this.emit('processingError', { change, error });
}
}
/**
* Process a single file (add or update)
*/
async processFile(filePath, changeType) {
const startTime = Date.now();
try {
if (!fs.existsSync(filePath)) {
return;
}
const stats = fs.statSync(filePath);
const content = fs.readFileSync(filePath, 'utf-8');
const relativePath = path.relative(this.projectRoot, filePath);
const contentHash = this.createContentHash(content);
// For VectraDatabase, we'll implement a simple check
// Check if file needs reindexing by comparing content hash
if (changeType === 'change') {
try {
const existingChunks = this.db.getChunksByFile(relativePath);
if (existingChunks.length > 0 && existingChunks[0].content_hash === contentHash) {
this.stats.filesSkipped++;
console.log(`[Watcher] Skipped ${relativePath} (no content changes)`);
return;
}
}
catch (error) {
// If there's an error getting existing chunks, continue with processing
console.warn(`[Watcher] Error checking existing chunks for ${relativePath}:`, error);
}
}
// Parse code into chunks
const language = this.parser.detectLanguage(filePath);
const chunks = this.parser.parseCode(content, filePath);
// Remove old chunks if this is an update
if (changeType === 'change') {
try {
await this.db.removeFileChunks(relativePath);
console.log(`[Watcher] Removed old chunks for ${relativePath}`);
}
catch (error) {
console.warn(`[Watcher] Error removing old chunks for ${relativePath}:`, error);
}
}
// Store file info
try {
this.db.storeFile({
file_path: relativePath,
content_hash: contentHash,
last_modified: stats.mtime,
file_size: stats.size,
language,
chunk_count: chunks.length
});
}
catch (error) {
console.warn(`[Watcher] Error storing file info for ${relativePath}:`, error);
}
// Process chunks with proper error handling
for (const chunk of chunks) {
try {
const chunkId = `${relativePath}:${chunk.startLine}-${chunk.endLine}`;
const chunkHash = this.createContentHash(chunk.content);
// Generate embedding
let embedding;
try {
const result = await this.embedder.embed(chunk.content);
embedding = result.embedding;
}
catch (error) {
console.warn(`[Watcher] Failed to generate embedding for chunk ${chunkId}:`, error);
}
// Store chunk with embedding as separate parameter
await this.db.storeChunk({
id: chunkId,
file_path: relativePath,
start_line: chunk.startLine,
end_line: chunk.endLine,
content: chunk.content,
chunk_type: chunk.chunkType,
language,
content_hash: chunkHash,
metadata: chunk.metadata
}, embedding);
}
catch (error) {
console.warn(`[Watcher] Error processing chunk for ${relativePath}:`, error);
}
}
this.stats.filesProcessed++;
const processingTime = Date.now() - startTime;
this.stats.totalTime += processingTime;
this.stats.averageTime = this.stats.totalTime / this.stats.filesProcessed;
const embeddingsGenerated = chunks.filter(chunk => {
// Count chunks that likely have embeddings
return chunk.content.trim().length > 0;
}).length;
console.log(`[Watcher] Successfully processed ${relativePath} (${chunks.length} chunks, ${embeddingsGenerated} embeddings, ${processingTime}ms)`);
}
catch (error) {
this.stats.errors++;
throw error;
}
}
/**
* Remove file from index
*/
async removeFile(relativePath) {
try {
this.db.removeFileChunks(relativePath);
this.emit('fileRemoved', relativePath);
}
catch (error) {
console.error(`Error removing file ${relativePath}:`, error);
throw error;
}
}
/**
* Get all code files in the project
*/
async getAllCodeFiles() {
return new Promise((resolve, reject) => {
const files = [];
const tempWatcher = chokidar.watch(this.projectRoot, {
ignored: this.options.ignored,
persistent: false,
ignoreInitial: false
});
tempWatcher
.on('add', filePath => {
if (this.shouldProcessFile(filePath)) {
files.push(filePath);
}
})
.on('ready', () => {
tempWatcher.close();
resolve(files);
})
.on('error', reject);
});
}
/**
* Check if file should be processed
*/
shouldProcessFile(filePath) {
const ext = path.extname(filePath).toLowerCase();
const supportedExtensions = [
'.ts', '.tsx', '.js', '.jsx', '.mjs', '.cjs', // JavaScript/TypeScript
'.java', // Java
'.py', // Python
'.go', // Go
'.rs', // Rust
'.cpp', '.cc', '.cxx', '.c++', '.hpp', // C++
'.c', '.h' // C
];
return supportedExtensions.includes(ext) && this.parser.shouldParseFile(filePath);
}
/**
* Get processing statistics
*/
getStats() {
return { ...this.stats };
}
/**
* Reset statistics
*/
resetStats() {
this.stats = {
filesProcessed: 0,
filesSkipped: 0,
errors: 0,
totalTime: 0,
averageTime: 0
};
}
/**
* Check if watcher is active
*/
isWatching() {
return !!this.watcher;
}
/**
* Get current queue size
*/
getQueueSize() {
return this.changeQueue.length;
}
/**
* Create content hash for deduplication
*/
createContentHash(content) {
const crypto = require('crypto');
return crypto.createHash('sha256').update(content).digest('hex');
}
}
//# sourceMappingURL=watcher.js.map