remcode
Version:
Turn your AI assistant into a codebase expert. Intelligent code analysis, semantic search, and software engineering guidance through MCP integration.
190 lines (189 loc) • 7.64 kB
JavaScript
Object.defineProperty(exports, "__esModule", { value: true });
exports.ProcessingPipeline = void 0;
const logger_1 = require("../utils/logger");
const change_detector_1 = require("./change-detector");
const file_analyzer_1 = require("./file-analyzer");
const incremental_1 = require("./incremental");
const state_manager_1 = require("./state-manager");
const logger = (0, logger_1.getLogger)('ProcessingPipeline');
/**
* Main processing pipeline that orchestrates incremental code analysis and vectorization
*/
class ProcessingPipeline {
constructor(repoPath, options) {
this.repoPath = repoPath;
this.changeDetector = new change_detector_1.ChangeDetector(repoPath);
this.fileAnalyzer = new file_analyzer_1.FileAnalyzer(repoPath);
this.incrementalProcessor = new incremental_1.IncrementalProcessor(options);
this.stateManager = new state_manager_1.StateManager(repoPath);
logger.info(`Initialized ProcessingPipeline for repository: ${repoPath}`);
}
/**
* Execute the full incremental processing pipeline
*/
async processIncremental(fromCommit) {
logger.info('Starting incremental processing pipeline');
try {
// Step 1: Detect changes since last commit
const currentCommit = this.changeDetector.getCurrentCommit();
const lastCommit = fromCommit || await this.getLastProcessedCommit();
logger.info(`Processing changes from ${lastCommit} to ${currentCommit}`);
const changes = await this.changeDetector.getChangedFiles(lastCommit);
logger.info(`Found ${changes.length} changed files`);
if (changes.length === 0) {
logger.info('No changes detected, skipping processing');
return this.createEmptyStats();
}
// Step 2: Analyze changed files
const analyses = await this.fileAnalyzer.analyzeChangedFiles(changes);
logger.info(`Analyzed ${analyses.length} files`);
// Step 3: Initialize incremental processor
await this.incrementalProcessor.initialize();
// Step 4: Process the changes
const stats = await this.incrementalProcessor.processChangedFiles(changes, analyses);
// Step 5: Update state with new commit
await this.incrementalProcessor.updateProcessingState(currentCommit);
logger.info(`Incremental processing completed successfully`);
return stats;
}
catch (error) {
const errorMsg = error instanceof Error ? error.message : String(error);
logger.error(`Incremental processing failed: ${errorMsg}`);
throw error;
}
}
/**
* Process all files in the repository (full processing)
*/
async processAll() {
logger.info('Starting full repository processing');
try {
// Get all code files in the repository
const allFiles = await this.findAllCodeFiles();
logger.info(`Found ${allFiles.length} code files for full processing`);
// Convert to FileChange format for compatibility
const changes = allFiles.map(filePath => ({
path: filePath,
status: 'added',
size: 0 // Will be filled by analyzer
}));
// Analyze all files
const analyses = await this.fileAnalyzer.analyzeChangedFiles(changes);
logger.info(`Analyzed ${analyses.length} files`);
// Initialize processor
await this.incrementalProcessor.initialize();
// Process all files
const stats = await this.incrementalProcessor.processChangedFiles(changes, analyses);
// Update state
const currentCommit = this.changeDetector.getCurrentCommit();
await this.incrementalProcessor.updateProcessingState(currentCommit);
logger.info(`Full processing completed successfully`);
return stats;
}
catch (error) {
const errorMsg = error instanceof Error ? error.message : String(error);
logger.error(`Full processing failed: ${errorMsg}`);
throw error;
}
}
/**
* Get status of the processing pipeline
*/
async getStatus() {
try {
const state = await this.stateManager.loadState();
if (!state) {
return { isInitialized: false };
}
return {
isInitialized: true,
lastCommit: state.processing?.lastCommit,
lastProcessed: state.processing?.lastUpdated,
totalFiles: state.vectorization?.chunks || 0,
totalVectors: state.vectorization?.vectors || 0
};
}
catch (error) {
logger.error(`Error getting status: ${error instanceof Error ? error.message : String(error)}`);
return { isInitialized: false };
}
}
/**
* Check if there are pending changes to process
*/
async hasPendingChanges() {
try {
const currentCommit = this.changeDetector.getCurrentCommit();
const lastCommit = await this.getLastProcessedCommit();
if (!lastCommit || lastCommit !== currentCommit) {
return true;
}
return await this.changeDetector.hasChanges(lastCommit, currentCommit);
}
catch (error) {
logger.error(`Error checking for pending changes: ${error instanceof Error ? error.message : String(error)}`);
return false;
}
}
/**
* Get the last processed commit from state
*/
async getLastProcessedCommit() {
try {
const state = await this.stateManager.loadState();
return state?.processing?.lastCommit || '';
}
catch (error) {
logger.warn(`Could not load last processed commit: ${error instanceof Error ? error.message : String(error)}`);
return '';
}
}
/**
* Create empty stats for when no processing is needed
*/
createEmptyStats() {
return {
totalFiles: 0,
addedFiles: 0,
modifiedFiles: 0,
deletedFiles: 0,
totalChunks: 0,
totalEmbeddings: 0,
errorCount: 0,
startTime: new Date(),
endTime: new Date(),
durationMs: 0
};
}
/**
* Find all code files in the repository
*/
async findAllCodeFiles() {
const codeExtensions = [
'.js', '.jsx', '.ts', '.tsx', '.vue',
'.py', '.rb', '.java', '.go', '.php',
'.rs', '.swift', '.cs', '.c', '.cpp',
'.h', '.hpp', '.kt', '.scala'
];
const allFiles = [];
try {
const { execSync } = require('child_process');
const gitFiles = execSync('git ls-files', {
cwd: this.repoPath,
encoding: 'utf8'
}).split('\n').filter(Boolean);
for (const file of gitFiles) {
const ext = require('path').extname(file).toLowerCase();
if (codeExtensions.includes(ext)) {
allFiles.push(file);
}
}
}
catch (error) {
logger.error(`Error finding code files: ${error instanceof Error ? error.message : String(error)}`);
}
return allFiles;
}
}
exports.ProcessingPipeline = ProcessingPipeline;
;