vibe-coder-mcp
Version:
Production-ready MCP server with complete agent integration, multi-transport support, and comprehensive development automation tools for AI-assisted workflows.
103 lines (102 loc) • 4.17 kB
JavaScript
import logger from '../../logger.js';
export class PerformanceMetricsCollector {
metrics;
currentBatchStartTime;
constructor() {
this.metrics = {
startTime: Date.now(),
totalFilesProcessed: 0,
totalBatchesProcessed: 0,
batches: [],
peakMemoryUsageMB: 0,
averageMemoryUsageMB: 0,
totalCacheHits: 0,
totalCacheMisses: 0
};
}
startBatch() {
this.currentBatchStartTime = Date.now();
}
completeBatch(filesProcessed, cacheHits, cacheMisses) {
if (!this.currentBatchStartTime) {
logger.warn('completeBatch called without startBatch');
return;
}
const processingTimeMs = Date.now() - this.currentBatchStartTime;
const memoryUsage = process.memoryUsage();
const memoryUsageMB = memoryUsage.heapUsed / (1024 * 1024);
const batchMetrics = {
batchNumber: this.metrics.totalBatchesProcessed + 1,
filesProcessed,
processingTimeMs,
memoryUsageMB,
cacheHits,
cacheMisses
};
this.metrics.batches.push(batchMetrics);
this.metrics.totalBatchesProcessed++;
this.metrics.totalFilesProcessed += filesProcessed;
if (memoryUsageMB > this.metrics.peakMemoryUsageMB) {
this.metrics.peakMemoryUsageMB = memoryUsageMB;
}
if (cacheHits !== undefined) {
this.metrics.totalCacheHits += cacheHits;
}
if (cacheMisses !== undefined) {
this.metrics.totalCacheMisses += cacheMisses;
}
this.currentBatchStartTime = undefined;
logger.debug({
batch: batchMetrics.batchNumber,
filesProcessed,
processingTimeMs,
memoryUsageMB: memoryUsageMB.toFixed(2),
filesPerSecond: (filesProcessed / (processingTimeMs / 1000)).toFixed(2)
}, 'Batch performance metrics');
}
finalize() {
this.metrics.endTime = Date.now();
if (this.metrics.batches.length > 0) {
const totalMemory = this.metrics.batches.reduce((sum, batch) => sum + batch.memoryUsageMB, 0);
this.metrics.averageMemoryUsageMB = totalMemory / this.metrics.batches.length;
}
return this.metrics;
}
generateSummary() {
const metrics = this.finalize();
const totalTimeMs = (metrics.endTime || Date.now()) - metrics.startTime;
const totalTimeSec = totalTimeMs / 1000;
const filesPerSecond = metrics.totalFilesProcessed / totalTimeSec;
const cacheHitRate = metrics.totalCacheHits > 0
? (metrics.totalCacheHits / (metrics.totalCacheHits + metrics.totalCacheMisses)) * 100
: 0;
const summary = [
'=== Performance Metrics Summary ===',
`Total execution time: ${totalTimeSec.toFixed(2)} seconds`,
`Total files processed: ${metrics.totalFilesProcessed}`,
`Total batches processed: ${metrics.totalBatchesProcessed}`,
`Average files per second: ${filesPerSecond.toFixed(2)}`,
`Peak memory usage: ${metrics.peakMemoryUsageMB.toFixed(2)} MB`,
`Average memory usage: ${metrics.averageMemoryUsageMB.toFixed(2)} MB`,
`Cache hit rate: ${cacheHitRate.toFixed(1)}% (${metrics.totalCacheHits} hits, ${metrics.totalCacheMisses} misses)`,
'',
'Batch Performance:',
...metrics.batches.slice(-5).map(batch => ` Batch ${batch.batchNumber}: ${batch.filesProcessed} files in ${batch.processingTimeMs}ms (${(batch.filesProcessed / (batch.processingTimeMs / 1000)).toFixed(2)} files/sec)`)
].join('\n');
logger.info('\n' + summary);
return summary;
}
getCurrentMetrics() {
return { ...this.metrics };
}
}
let metricsCollector = null;
export function getMetricsCollector() {
if (!metricsCollector) {
metricsCollector = new PerformanceMetricsCollector();
}
return metricsCollector;
}
export function resetMetricsCollector() {
metricsCollector = null;
}