vibe-coder-mcp
Version:
Production-ready MCP server with complete agent integration, multi-transport support, and comprehensive development automation tools for AI-assisted workflows.
390 lines (389 loc) • 19.7 kB
JavaScript
import logger from '../../logger.js';
import { jobManager, JobStatus } from '../../services/job-manager/index.js';
import { sseNotifier } from '../../services/sse-notifier/index.js';
import { getMemoryStats, clearCaches, getMemoryManager } from './parser.js';
import { grammarManager } from './parser.js';
import path from 'path';
import { getMetricsCollector } from './performanceMetrics.js';
export function getBatchSize(config) {
return config.processing?.batchSize || 100;
}
export function calculateDynamicBatchSize(config, fileCount, averageFileSize) {
const baseBatchSize = getBatchSize(config);
const memManager = getMemoryManager();
const memStats = getMemoryStats();
if (!memStats) {
logger.debug('Memory stats unavailable, using base batch size');
return baseBatchSize;
}
const memoryUsagePercent = memStats.memoryUsagePercentage;
const availableMemoryMB = (memStats.heapTotal - memStats.heapUsed) / (1024 * 1024);
if (memManager) {
const managerStats = memManager.getMemoryStats();
if (managerStats.formatted?.memoryStatus === 'critical') {
logger.warn('Memory status is critical according to MemoryManager, using minimum batch size');
return 10;
}
if (managerStats.formatted?.memoryStatus === 'high') {
logger.debug('Memory status is high according to MemoryManager, reducing base batch size');
return Math.max(20, Math.floor(baseBatchSize * 0.4));
}
}
let dynamicBatchSize = baseBatchSize;
if (memoryUsagePercent > 0.7) {
dynamicBatchSize = Math.max(10, Math.floor(baseBatchSize * 0.25));
logger.debug(`High memory usage (${(memoryUsagePercent * 100).toFixed(1)}%), reducing batch size to ${dynamicBatchSize}`);
}
else if (memoryUsagePercent > 0.6) {
dynamicBatchSize = Math.max(20, Math.floor(baseBatchSize * 0.5));
logger.debug(`Moderate-high memory usage (${(memoryUsagePercent * 100).toFixed(1)}%), reducing batch size to ${dynamicBatchSize}`);
}
else if (memoryUsagePercent > 0.5) {
dynamicBatchSize = Math.max(50, Math.floor(baseBatchSize * 0.75));
logger.debug(`Moderate memory usage (${(memoryUsagePercent * 100).toFixed(1)}%), reducing batch size to ${dynamicBatchSize}`);
}
else if (memoryUsagePercent < 0.3 && availableMemoryMB > 2048) {
dynamicBatchSize = Math.min(500, Math.floor(baseBatchSize * 1.5));
logger.debug(`Low memory usage (${(memoryUsagePercent * 100).toFixed(1)}%) with ${availableMemoryMB.toFixed(0)}MB available, increasing batch size to ${dynamicBatchSize}`);
}
if (fileCount < 100) {
if (memoryUsagePercent < 0.5) {
dynamicBatchSize = Math.min(fileCount, 100);
logger.debug(`Small codebase (${fileCount} files), setting batch size to ${dynamicBatchSize}`);
}
}
else if (fileCount > 10000) {
dynamicBatchSize = Math.min(dynamicBatchSize, 200);
logger.debug(`Large codebase (${fileCount} files), capping batch size at ${dynamicBatchSize}`);
}
if (averageFileSize && averageFileSize > 100 * 1024) {
const sizeFactor = Math.min(1, 50 * 1024 / averageFileSize);
dynamicBatchSize = Math.max(10, Math.floor(dynamicBatchSize * sizeFactor));
logger.debug(`Large average file size (${(averageFileSize / 1024).toFixed(1)}KB), adjusting batch size to ${dynamicBatchSize}`);
}
const MIN_BATCH_SIZE = 10;
const MAX_BATCH_SIZE = 500;
dynamicBatchSize = Math.max(MIN_BATCH_SIZE, Math.min(MAX_BATCH_SIZE, dynamicBatchSize));
if (dynamicBatchSize !== baseBatchSize) {
logger.info(`Dynamic batch size: ${dynamicBatchSize} (base: ${baseBatchSize}, memory: ${(memoryUsagePercent * 100).toFixed(1)}%, files: ${fileCount})`);
}
return dynamicBatchSize;
}
export function splitIntoBatches(items, batchSize) {
const batches = [];
for (let i = 0; i < items.length; i += batchSize) {
batches.push(items.slice(i, i + batchSize));
}
return batches;
}
export async function processBatches(items, processItem, config, jobId, sessionId, progressLabel = 'Processing items', startProgress = 0, endProgress = 100) {
if (items.length === 0) {
return [];
}
const batchSize = calculateDynamicBatchSize(config, items.length);
const batches = splitIntoBatches(items, batchSize);
const totalBatches = batches.length;
const results = [];
logger.info(`Processing ${items.length} items in ${totalBatches} batches (batch size: ${batchSize})`);
const metricsCollector = getMetricsCollector();
for (let i = 0; i < totalBatches; i++) {
const batch = batches[i];
const currentBatch = i + 1;
const progressRange = endProgress - startProgress;
const progress = Math.round(startProgress + (progressRange * (i / totalBatches)));
jobManager.updateJobStatus(jobId, JobStatus.RUNNING, `${progressLabel}: batch ${currentBatch} of ${totalBatches} (${progress}% complete)`);
sseNotifier.sendProgress(sessionId, jobId, JobStatus.RUNNING, `${progressLabel}: batch ${currentBatch} of ${totalBatches} (${progress}% complete)`, progress);
logger.info(`Processing batch ${currentBatch} of ${totalBatches} (Size: ${batch.length})`);
metricsCollector.startBatch();
const batchPromises = batch.map(processItem);
const batchResults = await Promise.all(batchPromises);
metricsCollector.completeBatch(batch.length);
results.push(...batchResults);
await performLightweightCleanup(config);
const memStats = getMemoryStats();
if (memStats && memStats.memoryUsagePercentage > 0.7) {
logger.info(`Memory usage at ${memStats?.memoryUsagePercentage?.toFixed(2)}%, running aggressive cleanup after batch ${currentBatch}/${totalBatches}`);
await performAggressiveCleanup(config);
}
else if (i % 5 === 0 && i > 0) {
logger.debug(`Performing routine cleanup after batch ${currentBatch}/${totalBatches}`);
await clearAllCaches();
}
if (config.processing?.logMemoryUsage) {
const memoryUsage = process.memoryUsage();
logger.debug({
rss: `${Math.round(memoryUsage.rss / 1024 / 1024)} MB`,
heapTotal: `${Math.round(memoryUsage.heapTotal / 1024 / 1024)} MB`,
heapUsed: `${Math.round(memoryUsage.heapUsed / 1024 / 1024)} MB`,
external: `${Math.round(memoryUsage.external / 1024 / 1024)} MB`,
}, `Memory usage after batch ${currentBatch}`);
}
}
jobManager.updateJobStatus(jobId, JobStatus.RUNNING, `${progressLabel} completed`);
sseNotifier.sendProgress(sessionId, jobId, JobStatus.RUNNING, `${progressLabel} completed`, endProgress);
return results;
}
export async function processBatchesWithIntermediateStorage(items, processItem, saveIntermediateResult, loadIntermediateResults, combineResults, config, jobId, sessionId, progressLabel = 'Processing items', startProgress = 0, endProgress = 100) {
if (items.length === 0) {
return combineResults([]);
}
const batchSize = calculateDynamicBatchSize(config, items.length);
const batches = splitIntoBatches(items, batchSize);
const totalBatches = batches.length;
logger.info(`Processing ${items.length} items in ${totalBatches} batches with intermediate storage (batch size: ${batchSize})`);
const processingRange = (endProgress - startProgress) * 0.8;
for (let i = 0; i < totalBatches; i++) {
const batch = batches[i];
const currentBatch = i + 1;
const progress = Math.round(startProgress + (processingRange * (i / totalBatches)));
jobManager.updateJobStatus(jobId, JobStatus.RUNNING, `${progressLabel}: batch ${currentBatch} of ${totalBatches} (${progress}% complete)`);
sseNotifier.sendProgress(sessionId, jobId, JobStatus.RUNNING, `${progressLabel}: batch ${currentBatch} of ${totalBatches} (${progress}% complete)`, progress);
logger.info(`Processing batch ${currentBatch} of ${totalBatches} (Size: ${batch.length})`);
const batchPromises = batch.map(processItem);
const batchResults = await Promise.all(batchPromises);
await Promise.all(batchResults.map((result, index) => saveIntermediateResult(result, (i * batchSize) + index)));
await performLightweightCleanup(config);
const memStats = getMemoryStats();
if (memStats && memStats.memoryUsagePercentage > 0.7) {
logger.info(`Memory usage at ${memStats?.memoryUsagePercentage?.toFixed(2)}%, running aggressive cleanup after batch ${currentBatch}/${totalBatches}`);
await performAggressiveCleanup(config);
}
else if (i % 5 === 0 && i > 0) {
logger.debug(`Performing routine cleanup after batch ${currentBatch}/${totalBatches}`);
await clearAllCaches();
}
if (config.processing?.logMemoryUsage) {
const memoryUsage = process.memoryUsage();
logger.debug({
rss: `${Math.round(memoryUsage.rss / 1024 / 1024)} MB`,
heapTotal: `${Math.round(memoryUsage.heapTotal / 1024 / 1024)} MB`,
heapUsed: `${Math.round(memoryUsage.heapUsed / 1024 / 1024)} MB`,
external: `${Math.round(memoryUsage.external / 1024 / 1024)} MB`,
}, `Memory usage after batch ${currentBatch}`);
}
}
const combiningProgress = Math.round(startProgress + processingRange);
jobManager.updateJobStatus(jobId, JobStatus.RUNNING, `Combining results...`);
sseNotifier.sendProgress(sessionId, jobId, JobStatus.RUNNING, `Combining results...`, combiningProgress);
const intermediateResults = await loadIntermediateResults();
const finalResult = await combineResults(intermediateResults);
jobManager.updateJobStatus(jobId, JobStatus.RUNNING, `${progressLabel} completed`);
sseNotifier.sendProgress(sessionId, jobId, JobStatus.RUNNING, `${progressLabel} completed`, endProgress);
return finalResult;
}
async function performLightweightCleanup(config) {
clearTemporaryCaches();
resetBatchState();
if (config?.processing?.logMemoryUsage) {
const memoryUsage = process.memoryUsage();
logger.debug({
rss: `${Math.round(memoryUsage.rss / 1024 / 1024)} MB`,
heapTotal: `${Math.round(memoryUsage.heapTotal / 1024 / 1024)} MB`,
heapUsed: `${Math.round(memoryUsage.heapUsed / 1024 / 1024)} MB`,
external: `${Math.round(memoryUsage.external / 1024 / 1024)} MB`,
}, `Memory usage after lightweight cleanup`);
}
}
async function performAggressiveCleanup(config) {
await clearAllCaches();
resetAllState();
if (global.gc) {
global.gc();
}
if (config?.processing?.logMemoryUsage) {
const memoryUsage = process.memoryUsage();
logger.debug({
rss: `${Math.round(memoryUsage.rss / 1024 / 1024)} MB`,
heapTotal: `${Math.round(memoryUsage.heapTotal / 1024 / 1024)} MB`,
heapUsed: `${Math.round(memoryUsage.heapUsed / 1024 / 1024)} MB`,
external: `${Math.round(memoryUsage.external / 1024 / 1024)} MB`,
}, `Memory usage after aggressive cleanup`);
}
}
function clearTemporaryCaches() {
}
function resetBatchState() {
}
async function clearAllCaches() {
try {
await clearCaches();
if (grammarManager) {
await grammarManager.unloadUnusedGrammars();
}
}
catch (error) {
logger.warn({ err: error }, 'Error clearing caches during aggressive cleanup');
}
}
function resetAllState() {
}
export function groupFilesByExtension(files) {
const filesByExtension = new Map();
for (const file of files) {
const ext = path.extname(file.path).toLowerCase();
if (!filesByExtension.has(ext)) {
filesByExtension.set(ext, []);
}
filesByExtension.get(ext).push(file);
}
return filesByExtension;
}
export function createLanguageBasedBatches(files, batchSize = 50) {
const filesByExtension = groupFilesByExtension(files);
const sortedExtensions = Array.from(filesByExtension.entries())
.sort((a, b) => b[1].length - a[1].length)
.map(([ext]) => ext);
const batches = [];
let currentBatch = [];
for (const ext of sortedExtensions) {
const filesForExt = filesByExtension.get(ext);
for (const file of filesForExt) {
if (currentBatch.length >= batchSize) {
batches.push(currentBatch);
currentBatch = [];
}
currentBatch.push(file);
}
if (currentBatch.length > 0 && currentBatch.length < batchSize) {
batches.push(currentBatch);
currentBatch = [];
}
}
if (currentBatch.length > 0) {
batches.push(currentBatch);
}
return batches;
}
async function performLanguageChangeCleanup() {
if (grammarManager) {
try {
await grammarManager.unloadUnusedGrammars();
logger.debug('Unloaded unused grammars during language change');
}
catch (error) {
logger.warn({ err: error }, 'Error unloading unused grammars during language change');
}
}
if (global.gc) {
global.gc();
logger.debug('Suggested garbage collection during language change');
}
}
export async function processLanguageBasedBatches(files, processor, config, jobId, sessionId, progressLabel = 'Processing files', startProgress = 0, endProgress = 100) {
if (files.length === 0) {
return [];
}
const batchSize = calculateDynamicBatchSize(config, files.length);
const batches = createLanguageBasedBatches(files, batchSize);
const totalBatches = batches.length;
const results = [];
logger.info(`Processing ${files.length} files in ${totalBatches} language-based batches (batch size: ${batchSize})`);
const progressIncrement = (endProgress - startProgress) / totalBatches;
let currentLanguage = '';
let languageChangeCount = 0;
for (let i = 0; i < batches.length; i++) {
const currentBatch = i + 1;
const batch = batches[i];
const currentProgress = Math.round(startProgress + (i * progressIncrement));
const languageCounts = new Map();
for (const file of batch) {
const ext = path.extname(file.path).toLowerCase();
languageCounts.set(ext, (languageCounts.get(ext) || 0) + 1);
}
const predominantLanguage = Array.from(languageCounts.entries())
.sort((a, b) => b[1] - a[1])[0][0];
if (predominantLanguage !== currentLanguage) {
languageChangeCount++;
currentLanguage = predominantLanguage;
logger.info(`Switching to processing ${currentLanguage} files (language change #${languageChangeCount})`);
if (languageChangeCount > 1) {
await performLanguageChangeCleanup();
}
}
jobManager.updateJobStatus(jobId, JobStatus.RUNNING, `${progressLabel}: batch ${currentBatch} of ${totalBatches} (${currentLanguage} files)`, currentProgress);
sseNotifier.sendProgress(sessionId, jobId, JobStatus.RUNNING, `${progressLabel}: batch ${currentBatch} of ${totalBatches} (${currentLanguage} files)`, currentProgress);
logger.info(`Processing batch ${currentBatch} of ${totalBatches} (Size: ${batch.length}, Language: ${currentLanguage})`);
if (grammarManager) {
const memStats = getMemoryStats();
const memManager = getMemoryManager();
let shouldPreloadGrammars = true;
if (memStats && memStats.memoryUsagePercentage > 0.6) {
logger.debug(`Memory usage at ${(memStats.memoryUsagePercentage * 100).toFixed(1)}%, skipping grammar preloading for batch ${currentBatch}`);
shouldPreloadGrammars = false;
}
if (memManager && shouldPreloadGrammars) {
const managerStats = memManager.getMemoryStats();
if (managerStats.formatted?.memoryStatus === 'high' ||
managerStats.formatted?.memoryStatus === 'critical') {
logger.debug(`MemoryManager indicates ${managerStats.formatted.memoryStatus} memory status, skipping grammar preloading`);
shouldPreloadGrammars = false;
}
}
if (shouldPreloadGrammars) {
try {
const fileExtensions = batch.map(file => path.extname(file.path));
await grammarManager.prepareGrammarsForBatch(fileExtensions);
logger.debug(`Preloaded grammars for ${fileExtensions.length} files in batch ${currentBatch}`);
}
catch (error) {
logger.warn({ err: error }, 'Error preparing grammars for batch');
}
}
else {
try {
await grammarManager.unloadUnusedGrammars();
logger.debug('Unloaded unused grammars to free memory');
}
catch {
logger.debug('Could not unload unused grammars');
}
}
}
const batchPromises = batch.map(processor);
const batchResults = await Promise.all(batchPromises);
results.push(...batchResults);
await performLightweightCleanup(config);
const memStats = getMemoryStats();
if (memStats && memStats.memoryUsagePercentage > 0.7) {
logger.info(`Memory usage at ${memStats?.memoryUsagePercentage?.toFixed(2)}%, running aggressive cleanup after batch ${currentBatch}/${totalBatches}`);
await performAggressiveCleanup(config);
}
else if (i % 5 === 0 && i > 0) {
logger.debug(`Performing routine cleanup after batch ${currentBatch}/${totalBatches}`);
await clearAllCaches();
}
if (config.processing?.logMemoryUsage) {
const memoryUsage = process.memoryUsage();
logger.debug({
rss: `${Math.round(memoryUsage.rss / 1024 / 1024)} MB`,
heapTotal: `${Math.round(memoryUsage.heapTotal / 1024 / 1024)} MB`,
heapUsed: `${Math.round(memoryUsage.heapUsed / 1024 / 1024)} MB`,
external: `${Math.round(memoryUsage.external / 1024 / 1024)} MB`,
}, `Memory usage after batch ${currentBatch}`);
}
}
return results;
}
export async function processBatchesWithMemoryCheck(items, processor, batchSize = 50, memoryThreshold = 0.7, cleanupFn) {
const results = [];
const batches = splitIntoBatches(items, batchSize);
for (const [index, batch] of batches.entries()) {
const batchResults = await Promise.all(batch.map(item => processor(item)));
results.push(...batchResults);
await performLightweightCleanup();
const memStats = getMemoryStats();
if (memStats && memStats.memoryUsagePercentage > memoryThreshold) {
logger.info(`Memory usage at ${memStats?.memoryUsagePercentage?.toFixed(2)}%, running aggressive cleanup after batch ${index + 1}/${batches.length}`);
if (cleanupFn) {
await cleanupFn();
}
else {
await performAggressiveCleanup();
}
if (global.gc) {
global.gc();
}
}
}
return results;
}