vibe-coder-mcp
Version:
Production-ready MCP server with complete agent integration, multi-transport support, and comprehensive development automation tools for AI-assisted workflows.
624 lines (623 loc) • 39.1 kB
JavaScript
import { z } from 'zod';
import { registerTool } from '../../services/routing/toolRegistry.js';
import logger from '../../logger.js';
import path from 'path';
import fs from 'fs/promises';
import fsSync from 'fs';
import { jobManager, JobStatus } from '../../services/job-manager/index.js';
import { sseNotifier } from '../../services/sse-notifier/index.js';
import { formatBackgroundJobInitiationResponse } from '../../services/job-response-formatter/index.js';
import { initializeParser, readAndParseFile, clearCaches, getMemoryStats, languageConfigurations, initializeCaches, getSourceCodeFromCache, getMemoryManager } from './parser.js';
import { takeMemorySample, generateMemoryUsageReport, clearMemoryUsageSamples } from './memoryMonitor.js';
import { initializeImportResolver, disposeImportResolver } from './utils/importResolverInitializer.js';
import { getMetricsCollector, resetMetricsCollector } from './performanceMetrics.js';
import { collectSourceFiles } from './fileScanner.js';
import { extractFunctions, extractClasses, extractImports, getNodeText, generateHeuristicComment } from './astAnalyzer.js';
import { buildFileDependencyGraph, buildClassInheritanceGraph, buildFunctionCallGraph } from './graphBuilder.js';
import { generateMermaidSequenceDiagram } from './diagramGenerator.js';
import { formatCodeMapToMarkdown, optimizeMarkdownOutput } from './outputFormatter.js';
import { extractCodeMapConfig } from './configValidator.js';
import { getLanguageHandler } from './languageHandlers/registry.js';
import { createDirectoryStructure } from './directoryUtils.js';
import { processLanguageBasedBatches } from './batchProcessor.js';
import { generateMarkdownOutput } from './outputGenerator.js';
import { createIncrementalProcessor } from './incrementalProcessor.js';
import { EnhancementConfigManager } from './config/enhancementConfig.js';
import { UniversalClassOptimizer } from './optimization/universalClassOptimizer.js';
import { UniversalDiagramOptimizer } from './optimization/universalDiagramOptimizer.js';
import { AdaptiveOptimizationEngine } from './optimization/adaptiveOptimizer.js';
const sourceCodeCache = new Map();
function filterDuplicateFiles(files, _projectRoot) {
const sourceFiles = new Set();
const compiledFiles = new Map();
files.forEach(file => {
const ext = path.extname(file).toLowerCase();
const baseName = file.replace(/\.[^.]+$/, '');
if (['.ts', '.py', '.java', '.c', '.cpp', '.cs', '.go'].includes(ext)) {
sourceFiles.add(baseName);
}
const compiledExts = ['.js', '.pyc', '.pyo', '.class', '.o', '.obj', '.dll', '.exe'];
if (compiledExts.includes(ext)) {
if (!compiledFiles.has(baseName)) {
compiledFiles.set(baseName, []);
}
compiledFiles.get(baseName).push(file);
}
});
return files.filter(file => {
const ext = path.extname(file).toLowerCase();
const baseName = file.replace(/\.[^.]+$/, '');
if (['.ts', '.py', '.java', '.c', '.cpp', '.cs', '.go'].includes(ext)) {
return true;
}
if (['.js', '.pyc', '.pyo', '.class', '.o', '.obj', '.dll', '.exe'].includes(ext)) {
return !sourceFiles.has(baseName);
}
if (file.endsWith('.js.map') || file.endsWith('.d.ts')) {
return false;
}
return true;
});
}
async function filterTrivialFiles(files, projectRoot) {
const significantFiles = [];
for (const file of files) {
try {
const content = await fs.readFile(path.join(projectRoot, file), 'utf-8');
const lines = content.split('\n').filter(line => line.trim() && !line.trim().startsWith('//')).length;
if (lines >= 10) {
significantFiles.push(file);
}
}
catch {
significantFiles.push(file);
}
}
return significantFiles;
}
export function clearCodeMapCaches() {
sourceCodeCache.clear();
clearCaches();
}
export function getCodeMapCacheSizes() {
return {
sourceCodeCache: sourceCodeCache.size
};
}
const codeMapInputSchemaShape = {
ignored_files_patterns: z.array(z.string()).optional().describe("Optional array of glob patterns for files/directories to ignore."),
output_format: z.enum(['markdown', 'json']).optional().default('markdown').describe("Format for the output (json not yet implemented)."),
};
export const codeMapExecutor = async (params, _config, context) => {
const sessionId = context?.sessionId || 'unknown-session';
const transportType = context?.transportType || 'unknown';
logger.debug({ toolName: 'map-codebase', params, sessionId, transportType }, 'Code-Map Generator invoked.');
const jobId = jobManager.createJob('map-codebase', params);
logger.info({ jobId, sessionId }, 'Created job for code-map-generator');
if (transportType === 'stdio' || sessionId === 'stdio-session') {
const initiationResponse = formatBackgroundJobInitiationResponse(jobId, 'map-codebase', 'Code map generation started. Use get-job-result to check status and retrieve the final result.', { sessionId, transportType });
setTimeout(() => {
executeCodeMapGeneration(params, _config, context, jobId)
.catch(error => {
logger.error({ err: error, jobId }, 'Error in background code map generation');
jobManager.updateJobStatus(jobId, JobStatus.FAILED, `Error: ${error instanceof Error ? error.message : String(error)}`);
sseNotifier.sendProgress(sessionId, jobId, JobStatus.FAILED, `Error: ${error instanceof Error ? error.message : String(error)}`);
});
}, 0);
return initiationResponse;
}
return executeCodeMapGeneration(params, _config, context, jobId);
};
export async function executeCodeMapGeneration(params, _config, context, jobId) {
const sessionId = context?.sessionId || 'unknown-session';
const abortSignal = jobManager.getJobAbortSignal(jobId);
clearMemoryUsageSamples();
takeMemorySample('Initial');
const enhancementManager = EnhancementConfigManager.getInstance();
enhancementManager.enableAggressiveOptimizations();
const classOptimizer = new UniversalClassOptimizer();
const diagramOptimizer = new UniversalDiagramOptimizer();
const adaptiveEngine = new AdaptiveOptimizationEngine();
try {
try {
jobManager.updateJobStatus(jobId, JobStatus.RUNNING, 'Starting code map generation...');
sseNotifier.sendProgress(sessionId, jobId, JobStatus.RUNNING, 'Starting code map generation...');
const { processLifecycleManager } = await import('./parser.js');
if (processLifecycleManager) {
processLifecycleManager.registerJob(jobId);
logger.debug(`Registered job ${jobId} with process lifecycle manager`);
}
let config;
try {
config = await extractCodeMapConfig(_config);
logger.info('Enhanced Code Map Generator initialized with maximum aggressive optimization');
}
catch (error) {
logger.error({ err: error }, 'Failed to extract configuration');
jobManager.updateJobStatus(jobId, JobStatus.FAILED, `Configuration error: ${error instanceof Error ? error.message : String(error)}`);
sseNotifier.sendProgress(sessionId, jobId, JobStatus.FAILED, `Configuration error: ${error instanceof Error ? error.message : String(error)}`);
const errorResult = {
content: [{
type: 'text',
text: `Configuration error: ${error instanceof Error ? error.message : String(error)}\n\nPlease ensure that 'allowedMappingDirectory' is configured in the tool configuration.`
}],
isError: true
};
jobManager.setJobResult(jobId, errorResult);
return errorResult;
}
const validatedParams = z.object(codeMapInputSchemaShape).parse(params);
if (validatedParams.output_format) {
config.output = {
...config.output,
format: validatedParams.output_format
};
logger.info(`Using output format: ${validatedParams.output_format}`);
}
jobManager.updateJobStatus(jobId, JobStatus.RUNNING, 'Initializing directory structure...');
sseNotifier.sendProgress(sessionId, jobId, JobStatus.RUNNING, 'Initializing directory structure...', 5);
const directoryStructure = await createDirectoryStructure(config, jobId);
logger.debug(`Created directory structure: ${JSON.stringify(directoryStructure)}`);
if (config.cache?.enabled !== false) {
jobManager.updateJobStatus(jobId, JobStatus.RUNNING, 'Initializing caches...');
sseNotifier.sendProgress(sessionId, jobId, JobStatus.RUNNING, 'Initializing caches...', 10);
await initializeCaches(config);
logger.debug('Initialized file-based caches');
}
else {
logger.debug('File-based caching is disabled');
}
try {
await fs.access(config.allowedMappingDirectory, fsSync.constants.R_OK);
logger.debug(`Verified allowed mapping directory exists and is readable: ${config.allowedMappingDirectory}`);
}
catch (error) {
logger.error(`Cannot access allowed mapping directory: ${config.allowedMappingDirectory}. Error: ${error instanceof Error ? error.message : String(error)}`);
jobManager.updateJobStatus(jobId, JobStatus.FAILED, `Cannot access allowed mapping directory: ${config.allowedMappingDirectory}`);
sseNotifier.sendProgress(sessionId, jobId, JobStatus.FAILED, `Cannot access allowed mapping directory: ${config.allowedMappingDirectory}`);
const errorResult = {
content: [{
type: 'text',
text: `Cannot access allowed mapping directory: ${config.allowedMappingDirectory}. Error: ${error instanceof Error ? error.message : String(error)}`
}],
isError: true
};
jobManager.setJobResult(jobId, errorResult);
return errorResult;
}
const projectRoot = config.allowedMappingDirectory;
jobManager.updateJobStatus(jobId, JobStatus.RUNNING, 'Initializing parser...');
sseNotifier.sendProgress(sessionId, jobId, JobStatus.RUNNING, 'Initializing parser...');
await initializeParser();
logger.info('Parser and memory management initialized.');
takeMemorySample('After initialization');
initializeImportResolver({
...config,
importResolver: {
...config.importResolver,
enabled: true,
expandSecurityBoundary: true,
enhanceImports: true
}
});
logger.info('Import resolver initialized with expandSecurityBoundary enabled');
const initialMemoryStats = getMemoryStats();
logger.info({ initialMemoryStats }, 'Initial memory usage statistics');
jobManager.updateJobStatus(jobId, JobStatus.RUNNING, 'Preparing file scanning...');
sseNotifier.sendProgress(sessionId, jobId, JobStatus.RUNNING, 'Preparing file scanning...');
const supportedExtensions = Object.keys(languageConfigurations);
const userIgnoredPatterns = validatedParams.ignored_files_patterns?.map(pattern => {
try {
const regexPattern = pattern
.replace(/\*\*/g, '___DOUBLE_STAR___')
.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')
.replace(/___DOUBLE_STAR___/g, '.*')
.replace(/\*/g, '[^/]*');
logger.debug(`Converted pattern "${pattern}" to regex: "${regexPattern}"`);
return new RegExp(regexPattern, 'i');
}
catch (error) {
logger.warn(`Invalid pattern "${pattern}" - skipping. Error: ${error instanceof Error ? error.message : String(error)}`);
return null;
}
}).filter(Boolean) || [];
const defaultIgnoredPatterns = [
/node_modules/i, /\.git/i, /dist/i, /build/i, /out/i, /coverage/i, /vendor/i,
/\.(log|lock|env|bak|tmp|swp|DS_Store|map)$/i, /.*\/\..*/, /^\..*/,
/(?:^|[/\\])__(tests|mocks|snapshots)__[/\\]/i, /(?:^|[/\\])(test|tests)[/\\]/i,
/(?:^|[/\\])(spec|e2e)[/\\]/i,
/\.spec\./i,
/\.e2e\./i,
/(?:^|[/\\])__(mocks|fixtures|snapshots)__[/\\]/i,
/\.min\.(js|css)$/i, /package-lock\.json/i, /yarn\.lock/i,
/\.(png|jpg|jpeg|gif|svg|ico|woff|woff2|ttf|eot|mp3|mp4|webm|ogg|pdf|doc|docx|xls|xlsx|ppt|pptx|zip|tar|gz|rar|7z|exe|dll|bin|obj|o|iso|dmg|pdb|bak)$/i,
];
const combinedIgnoredPatterns = [...defaultIgnoredPatterns, ...userIgnoredPatterns];
logger.debug(`Using ${combinedIgnoredPatterns.length} ignore patterns (${userIgnoredPatterns.length} user-defined, ${defaultIgnoredPatterns.length} default)`);
if (abortSignal?.aborted) {
throw new Error('Job was cancelled or timed out');
}
jobManager.updateJobStatus(jobId, JobStatus.RUNNING, 'Scanning for source files...');
sseNotifier.sendProgress(sessionId, jobId, JobStatus.RUNNING, 'Scanning for source files...');
logger.info(`Scanning for source files in: ${projectRoot}`);
const filePathsResult = await collectSourceFiles(projectRoot, supportedExtensions, combinedIgnoredPatterns, config);
let filePaths = Array.isArray(filePathsResult[0]) ? filePathsResult.flat() : filePathsResult;
jobManager.updateJobStatus(jobId, JobStatus.RUNNING, 'Filtering duplicate and trivial files...');
sseNotifier.sendProgress(sessionId, jobId, JobStatus.RUNNING, 'Filtering duplicate and trivial files...', 25);
const originalFileCount = filePaths.length;
filePaths = filterDuplicateFiles(filePaths, projectRoot);
const afterDuplicateFilter = filePaths.length;
filePaths = await filterTrivialFiles(filePaths, projectRoot);
const afterTrivialFilter = filePaths.length;
logger.info(`File filtering results: ${originalFileCount} → ${afterDuplicateFilter} (after duplicate filter) → ${afterTrivialFilter} (after trivial filter)`);
const postScanningMemoryStats = getMemoryStats();
logger.info({ postScanningMemoryStats }, 'Memory usage after file scanning');
if (filePaths.length === 0) {
jobManager.updateJobStatus(jobId, JobStatus.COMPLETED, 'No files found');
sseNotifier.sendProgress(sessionId, jobId, JobStatus.COMPLETED, 'No files found');
const noFilesResult = {
content: [{
type: 'text',
text: 'No supported source files found to map in the specified path after applying ignore patterns.'
}],
isError: false
};
jobManager.setJobResult(jobId, noFilesResult);
return noFilesResult;
}
jobManager.updateJobStatus(jobId, JobStatus.RUNNING, `Found ${filePaths.length} source files. Parsing files...`);
sseNotifier.sendProgress(sessionId, jobId, JobStatus.RUNNING, `Found ${filePaths.length} source files. Parsing files...`, 30);
logger.info(`Found ${filePaths.length} source files to process.`);
takeMemorySample('After file scanning');
if (abortSignal?.aborted) {
throw new Error('Job was cancelled or timed out');
}
const processFile = async (filePath) => {
const relativePath = path.relative(projectRoot, filePath);
try {
const { tree, sourceCode } = await readAndParseFile(filePath, path.extname(filePath).toLowerCase(), config);
if (!tree) {
logger.warn(`No parser or parsing failed for ${filePath}, creating basic FileInfo.`);
return {
filePath,
relativePath,
classes: [],
functions: [],
imports: [],
comment: `File type ${path.extname(filePath).toLowerCase()} not fully supported for deep AST analysis.`,
};
}
const languageId = path.extname(filePath).toLowerCase();
const functions = extractFunctions(tree.rootNode, sourceCode, languageId);
const classes = extractClasses(tree.rootNode, sourceCode, languageId);
let imports = extractImports(tree.rootNode, sourceCode, languageId);
if (config.importResolver?.enhanceImports) {
try {
const handler = getLanguageHandler(languageId);
if (handler.enhanceImportInfo) {
imports = await handler.enhanceImportInfo(filePath, imports, {
allowedDir: config.allowedMappingDirectory,
outputDir: config.output?.outputDir || path.join(process.env.VIBE_CODER_OUTPUT_DIR || '.', 'code-map-generator'),
maxDepth: config.importResolver.importMaxDepth || 3,
tsConfig: config.importResolver.tsConfig,
pythonPath: config.importResolver.pythonPath,
pythonVersion: config.importResolver.pythonVersion,
venvPath: config.importResolver.venvPath,
clangdPath: config.importResolver.clangdPath,
compileFlags: config.importResolver.compileFlags,
includePaths: config.importResolver.includePaths,
semgrepPatterns: config.importResolver.semgrepPatterns,
semgrepTimeout: config.importResolver.semgrepTimeout,
semgrepMaxMemory: config.importResolver.semgrepMaxMemory,
disableSemgrepFallback: config.importResolver.disableSemgrepFallback
});
logger.debug({ filePath, importsCount: imports.length }, 'Enhanced imports with third-party resolver');
}
}
catch (error) {
logger.error({ err: error, filePath }, 'Error enhancing imports with third-party resolver');
}
}
let fileLevelComment;
const firstChildNode = tree.rootNode.firstChild;
if (firstChildNode?.type === 'comment' && firstChildNode.text.startsWith('/**')) {
fileLevelComment = getNodeText(firstChildNode, sourceCode).substring(3).split('*/')[0].trim().split('\n')[0];
}
else if (firstChildNode?.type === 'comment' && (firstChildNode.text.startsWith('//') || firstChildNode.text.startsWith('#'))) {
fileLevelComment = getNodeText(firstChildNode, sourceCode).substring(firstChildNode.text.startsWith('//') ? 2 : 1).trim();
}
return {
filePath,
relativePath,
classes,
functions,
imports,
comment: fileLevelComment || generateHeuristicComment(path.basename(relativePath), 'file'),
};
}
catch (error) {
logger.error({ err: error, filePath }, `Failed to process file.`);
return {
filePath,
relativePath,
classes: [],
functions: [],
imports: [],
comment: `Error processing file: ${error instanceof Error ? error.message : String(error)}`,
};
}
};
const fileObjects = filePaths.map(filePath => {
const pathStr = Array.isArray(filePath) ? filePath[0] : filePath;
return {
path: pathStr,
extension: path.extname(pathStr).toLowerCase()
};
});
const allFileInfos = await processLanguageBasedBatches(fileObjects, async (fileObj) => processFile(fileObj.path), config, jobId, sessionId, 'Parsing files', 30, 50);
jobManager.updateJobStatus(jobId, JobStatus.RUNNING, 'Enhancing imports...');
sseNotifier.sendProgress(sessionId, jobId, JobStatus.RUNNING, 'Enhancing imports...', 50);
const enhanceImports = async (fileInfo) => {
try {
if (config.importResolver?.enhanceImports) {
const languageId = path.extname(fileInfo.filePath).toLowerCase();
const handler = getLanguageHandler(languageId);
if (handler.enhanceImportInfo) {
const enhancedImports = await handler.enhanceImportInfo(fileInfo.filePath, fileInfo.imports, {
allowedDir: config.allowedMappingDirectory,
outputDir: config.output?.outputDir || path.join(process.env.VIBE_CODER_OUTPUT_DIR || '.', 'code-map-generator'),
maxDepth: config.importResolver.importMaxDepth || 3,
tsConfig: config.importResolver.tsConfig,
pythonPath: config.importResolver.pythonPath,
pythonVersion: config.importResolver.pythonVersion,
venvPath: config.importResolver.venvPath,
clangdPath: config.importResolver.clangdPath,
compileFlags: config.importResolver.compileFlags,
includePaths: config.importResolver.includePaths,
semgrepPatterns: config.importResolver.semgrepPatterns,
semgrepTimeout: config.importResolver.semgrepTimeout,
semgrepMaxMemory: config.importResolver.semgrepMaxMemory,
disableSemgrepFallback: config.importResolver.disableSemgrepFallback
});
logger.debug({ filePath: fileInfo.filePath, importsCount: enhancedImports.length }, 'Enhanced imports with third-party resolver');
return {
...fileInfo,
imports: enhancedImports
};
}
}
return fileInfo;
}
catch (error) {
logger.error({ err: error, filePath: fileInfo.filePath }, 'Error enhancing imports');
return fileInfo;
}
};
const fileInfoObjects = allFileInfos.map(fileInfo => ({
path: fileInfo.filePath,
extension: path.extname(fileInfo.filePath).toLowerCase(),
fileInfo
}));
const fileInfosWithEnhancedImports = await processLanguageBasedBatches(fileInfoObjects, async (fileObj) => enhanceImports(fileObj.fileInfo), config, jobId, sessionId, 'Enhancing imports', 50, 70);
const postParsingMemoryStats = getMemoryStats();
logger.info({ postParsingMemoryStats }, 'Memory usage after parsing and symbol extraction');
takeMemorySample('After processing');
fileInfosWithEnhancedImports.sort((a, b) => a.relativePath.localeCompare(b.relativePath));
jobManager.updateJobStatus(jobId, JobStatus.RUNNING, 'Building dependency graphs...');
sseNotifier.sendProgress(sessionId, jobId, JobStatus.RUNNING, 'Building dependency graphs...', 60);
const codeMapData = { projectPath: projectRoot, files: fileInfosWithEnhancedImports };
const fileDepGraph = await buildFileDependencyGraph(fileInfosWithEnhancedImports, config, jobId);
const classInheritanceGraph = await buildClassInheritanceGraph(fileInfosWithEnhancedImports, config, jobId);
const tempSourceCodeCache = new Map();
for (const fileInfo of fileInfosWithEnhancedImports) {
try {
const cachedSourceCode = await getSourceCodeFromCache(fileInfo.filePath, config.allowedMappingDirectory);
if (cachedSourceCode) {
tempSourceCodeCache.set(fileInfo.filePath, cachedSourceCode);
}
else {
const { sourceCode } = await readAndParseFile(fileInfo.filePath, path.extname(fileInfo.filePath).toLowerCase(), config);
if (sourceCode) {
tempSourceCodeCache.set(fileInfo.filePath, sourceCode);
}
}
}
catch (error) {
logger.warn(`Could not read source code for ${fileInfo.filePath}: ${error}`);
}
}
const functionCallGraph = await buildFunctionCallGraph(fileInfosWithEnhancedImports, tempSourceCodeCache, config, jobId);
const fileDepNodes = fileDepGraph.nodes;
const fileDepEdges = fileDepGraph.edges;
const classInheritanceNodes = classInheritanceGraph.nodes;
const classInheritanceEdges = classInheritanceGraph.edges;
const funcCallNodes = functionCallGraph.nodes;
const funcCallEdges = functionCallGraph.edges;
const postGraphBuildingMemoryStats = getMemoryStats();
logger.info({ postGraphBuildingMemoryStats }, 'Memory usage after graph building');
takeMemorySample('After graph building');
jobManager.updateJobStatus(jobId, JobStatus.RUNNING, 'Generating diagrams...');
sseNotifier.sendProgress(sessionId, jobId, JobStatus.RUNNING, 'Generating diagrams...', 70);
jobManager.updateJobStatus(jobId, JobStatus.RUNNING, 'Generating optimized architecture overview...');
sseNotifier.sendProgress(sessionId, jobId, JobStatus.RUNNING, 'Generating optimized architecture overview...', 70);
const enhancementConfig = EnhancementConfigManager.getInstance().getConfig();
const fileDepDiagramMd = diagramOptimizer.optimizeDependencyDiagram(fileDepNodes, fileDepEdges, enhancementConfig.universalOptimization);
jobManager.updateJobStatus(jobId, JobStatus.RUNNING, 'Generating optimized class overview...');
sseNotifier.sendProgress(sessionId, jobId, JobStatus.RUNNING, 'Generating optimized class overview...', 75);
const classDiagramMd = diagramOptimizer.optimizeDependencyDiagram(classInheritanceNodes, classInheritanceEdges, enhancementConfig.universalOptimization);
jobManager.updateJobStatus(jobId, JobStatus.RUNNING, 'Generating optimized function overview...');
sseNotifier.sendProgress(sessionId, jobId, JobStatus.RUNNING, 'Generating optimized function overview...', 80);
const funcCallDiagramMd = diagramOptimizer.optimizeDependencyDiagram(funcCallNodes, funcCallEdges, enhancementConfig.universalOptimization);
jobManager.updateJobStatus(jobId, JobStatus.RUNNING, 'Generating sequence diagram...');
sseNotifier.sendProgress(sessionId, jobId, JobStatus.RUNNING, 'Generating sequence diagram...', 85);
const sequenceDiagramMd = generateMermaidSequenceDiagram(funcCallNodes, funcCallEdges);
const postDiagramGenerationMemoryStats = getMemoryStats();
logger.info({ postDiagramGenerationMemoryStats }, 'Memory usage after diagram generation');
jobManager.updateJobStatus(jobId, JobStatus.RUNNING, 'Generating output...');
sseNotifier.sendProgress(sessionId, jobId, JobStatus.RUNNING, 'Generating output...', 80);
const outputPath = await generateMarkdownOutput(fileInfosWithEnhancedImports, fileDepGraph, classInheritanceGraph, functionCallGraph, config, jobId);
const postOutputGenerationMemoryStats = getMemoryStats();
logger.info({ postOutputGenerationMemoryStats }, 'Memory usage after output generation');
takeMemorySample('After output generation');
const memoryReport = generateMemoryUsageReport();
logger.info(memoryReport);
const textualCodeMapMd = formatCodeMapToMarkdown(codeMapData, projectRoot);
const successfullyProcessedCount = fileInfosWithEnhancedImports.filter(fi => !(fi.comment && fi.comment.startsWith("Error processing file"))).length;
const filesWithErrorsCount = fileInfosWithEnhancedImports.length - successfullyProcessedCount;
let finalMarkdownOutput = `## Codebase Overview for ${path.basename(projectRoot)}\n\n`;
finalMarkdownOutput += `**Summary:** Processed ${fileInfosWithEnhancedImports.length} files. Successfully analyzed: ${successfullyProcessedCount}. Files with errors/skipped: ${filesWithErrorsCount}.\n\n`;
finalMarkdownOutput += `**Output saved to:** ${outputPath}\n\n`;
if (fileDepEdges.length > 0 || fileDepNodes.length > 0) {
if (enhancementConfig.universalOptimization.eliminateVerboseDiagrams) {
finalMarkdownOutput += `### File Dependencies\n\n${fileDepDiagramMd}\n\n`;
}
else {
finalMarkdownOutput += `### File Dependency Graph\n\n\`\`\`mermaid\n${fileDepDiagramMd}\n\`\`\`\n\n`;
}
}
if (classInheritanceEdges.length > 0 || classInheritanceNodes.length > 0) {
if (enhancementConfig.universalOptimization.eliminateVerboseDiagrams) {
finalMarkdownOutput += `### Class Inheritance\n\n${classDiagramMd}\n\n`;
}
else {
finalMarkdownOutput += `### Class Inheritance Diagram\n\n\`\`\`mermaid\n${classDiagramMd}\n\`\`\`\n\n`;
}
}
if (funcCallEdges.length > 0 || funcCallNodes.length > 0) {
if (enhancementConfig.universalOptimization.eliminateVerboseDiagrams) {
finalMarkdownOutput += `### Function Calls\n\n${funcCallDiagramMd}\n\n`;
}
else {
finalMarkdownOutput += `### Function Call Map (Heuristic)\n\n\`\`\`mermaid\n${funcCallDiagramMd}\n\`\`\`\n\n`;
}
if (!enhancementConfig.universalOptimization.eliminateVerboseDiagrams) {
finalMarkdownOutput += `### Method Call Sequence Diagram\n\n\`\`\`mermaid\n${sequenceDiagramMd}\n\`\`\`\n\n`;
}
}
finalMarkdownOutput += `## Detailed Code Structure\n\n${textualCodeMapMd}`;
const postOutputFormattingMemoryStats = getMemoryStats();
logger.info({ postOutputFormattingMemoryStats }, 'Memory usage after output formatting');
let optimizedOutput = finalMarkdownOutput;
if (enhancementConfig.enableOptimizations) {
const optimizationResult = adaptiveEngine.optimizeBasedOnCodebase(codeMapData, enhancementConfig.universalOptimization);
logger.info({
reductionAchieved: optimizationResult.reductionAchieved,
qualityMetrics: optimizationResult.qualityMetrics,
strategy: optimizationResult.strategy
}, 'Applied adaptive optimization');
if (enhancementConfig.universalOptimization.reduceClassDetails) {
let optimizedClassContent = '';
fileInfosWithEnhancedImports.forEach(fileInfo => {
fileInfo.classes.forEach(cls => {
optimizedClassContent += classOptimizer.optimizeClassInfo(cls, enhancementConfig.universalOptimization);
});
});
if (optimizedClassContent) {
const detailedStructureStart = optimizedOutput.indexOf('## Detailed Code Structure');
if (detailedStructureStart !== -1) {
optimizedOutput = optimizedOutput.substring(0, detailedStructureStart) +
'## Optimized Code Structure\n\n' + optimizedClassContent;
}
}
}
optimizedOutput = optimizeMarkdownOutput(optimizedOutput);
logger.info('Applied maximum aggressive optimization for AI agent consumption');
}
else {
optimizedOutput = optimizeMarkdownOutput(finalMarkdownOutput);
}
jobManager.updateJobStatus(jobId, JobStatus.COMPLETED, 'Code map generation complete');
sseNotifier.sendProgress(sessionId, jobId, JobStatus.COMPLETED, 'Code map generation complete', 100);
const finalMemoryStats = getMemoryStats();
logger.info({ finalMemoryStats }, 'Final memory usage statistics');
logger.info({ toolName: 'map-codebase', path: projectRoot, sessionId, successfullyProcessedCount, filesWithErrorsCount }, "Code map generated.");
const result = {
content: [{ type: 'text', text: optimizedOutput }],
isError: false,
};
jobManager.setJobResult(jobId, result);
return result;
}
catch (error) {
jobManager.updateJobStatus(jobId, JobStatus.FAILED, `Error: ${error instanceof Error ? error.message : String(error)}`);
sseNotifier.sendProgress(sessionId, jobId, JobStatus.FAILED, `Error: ${error instanceof Error ? error.message : String(error)}`);
try {
const errorMemoryStats = getMemoryStats();
logger.info({ errorMemoryStats }, 'Memory usage statistics at error');
}
catch (memoryError) {
logger.warn(`Failed to get memory statistics: ${memoryError instanceof Error ? memoryError.message : String(memoryError)}`);
}
logger.error({ err: error, toolName: 'map-codebase', params, sessionId, jobId }, 'Error in Code-Map Generator');
const errorMessage = error instanceof Error ? error.message : String(error);
const errorResult = {
content: [{ type: 'text', text: `Error generating code map: ${errorMessage}` }],
isError: true,
};
jobManager.setJobResult(jobId, errorResult);
return errorResult;
}
}
finally {
try {
const { processLifecycleManager } = await import('./parser.js');
if (processLifecycleManager) {
await processLifecycleManager.unregisterJob(jobId);
logger.debug(`Unregistered job ${jobId} from process lifecycle manager`);
}
const memoryStats = getMemoryStats();
logger.info({ memoryStats }, 'Memory usage statistics');
takeMemorySample('Final');
try {
const metricsCollector = getMetricsCollector();
metricsCollector.generateSummary();
logger.info('Performance metrics generated successfully');
resetMetricsCollector();
}
catch (metricsError) {
logger.warn(`Error generating performance metrics: ${metricsError instanceof Error ? metricsError.message : String(metricsError)}`);
}
const memManager = getMemoryManager();
if (memManager) {
logger.debug('Memory manager found, performing additional cleanup');
}
try {
disposeImportResolver();
logger.debug('Disposed import resolvers');
}
catch (importResolverError) {
logger.warn(`Error disposing import resolvers: ${importResolverError instanceof Error ? importResolverError.message : String(importResolverError)}`);
}
await clearCaches();
logger.debug('Closed all file-based caches');
try {
const validatedConfig = await extractCodeMapConfig(_config);
if (validatedConfig?.processing?.incremental) {
const incrementalProcessor = await createIncrementalProcessor(validatedConfig);
if (incrementalProcessor) {
await incrementalProcessor.close();
logger.debug('Closed incremental processor');
}
}
}
catch (incrementalError) {
logger.warn(`Error closing incremental processor: ${incrementalError instanceof Error ? incrementalError.message : String(incrementalError)}`);
}
if (global.gc) {
global.gc();
logger.debug('Forced garbage collection');
}
}
catch (error) {
logger.warn(`Error closing caches: ${error instanceof Error ? error.message : String(error)}`);
}
}
}
const codeMapToolDefinition = {
name: "map-codebase",
description: "Indexes and maps a codebase structure, providing class/function maps, comments, and Mermaid diagrams for AI consumption. Captures doc-strings and inline comments for semantic context.",
inputSchema: codeMapInputSchemaShape,
executor: codeMapExecutor,
};
registerTool(codeMapToolDefinition);
logger.info('Code-Map Generator tool registered.');