vibe-coder-mcp
Version:
Production-ready MCP server with complete agent integration, multi-transport support, and comprehensive development automation tools for AI-assisted workflows.
235 lines (234 loc) • 9.95 kB
JavaScript
import { createHash } from 'crypto';
import { promises as fs } from 'fs';
import path from 'path';
import logger from '../../../logger.js';
export class PackageCache {
static CACHE_DIR = path.join(process.env.VIBE_CODER_OUTPUT_DIR
? path.resolve(process.env.VIBE_CODER_OUTPUT_DIR)
: path.join(process.cwd(), 'VibeCoderOutput'), 'context-curator', 'cache');
static CACHE_VERSION = '1.0.0';
static DEFAULT_TTL_MS = 24 * 60 * 60 * 1000;
static MAX_CACHE_SIZE_MB = 500;
static async initialize() {
try {
await fs.mkdir(this.CACHE_DIR, { recursive: true });
logger.info({ cacheDir: this.CACHE_DIR }, 'Package cache initialized');
}
catch (error) {
logger.error({ error: error instanceof Error ? error.message : 'Unknown error' }, 'Failed to initialize package cache');
throw error;
}
}
static generateCacheKey(projectPath, userPrompt, taskType) {
const content = `${projectPath}:${userPrompt}:${taskType}`;
return createHash('sha256').update(content).digest('hex');
}
static async getCachedPackage(cacheKey) {
try {
const cacheFilePath = path.join(this.CACHE_DIR, `${cacheKey}.json`);
const metadataFilePath = path.join(this.CACHE_DIR, `${cacheKey}.meta.json`);
const [cacheExists, metadataExists] = await Promise.all([
fs.access(cacheFilePath).then(() => true).catch(() => false),
fs.access(metadataFilePath).then(() => true).catch(() => false)
]);
if (!cacheExists || !metadataExists) {
return null;
}
const metadataContent = await fs.readFile(metadataFilePath, 'utf-8');
const metadata = JSON.parse(metadataContent);
const now = Date.now();
const cachedAt = new Date(metadata.cachedAt).getTime();
if (now - cachedAt > metadata.ttlMs) {
logger.info({ cacheKey }, 'Cache entry expired, removing');
await this.removeCacheEntry(cacheKey);
return null;
}
if (metadata.version !== this.CACHE_VERSION) {
logger.info({ cacheKey, version: metadata.version }, 'Cache version mismatch, removing');
await this.removeCacheEntry(cacheKey);
return null;
}
const packageContent = await fs.readFile(cacheFilePath, 'utf-8');
const contextPackage = JSON.parse(packageContent);
metadata.hitCount++;
metadata.lastAccessed = new Date();
await fs.writeFile(metadataFilePath, JSON.stringify(metadata, null, 2), 'utf-8');
logger.info({
cacheKey,
hitCount: metadata.hitCount,
age: now - cachedAt
}, 'Cache hit - returning cached package');
return {
package: contextPackage,
metadata
};
}
catch (error) {
logger.warn({
cacheKey,
error: error instanceof Error ? error.message : 'Unknown error'
}, 'Failed to retrieve cached package');
return null;
}
}
static async cachePackage(cacheKey, contextPackage, ttlMs = this.DEFAULT_TTL_MS) {
try {
await this.initialize();
const cacheFilePath = path.join(this.CACHE_DIR, `${cacheKey}.json`);
const metadataFilePath = path.join(this.CACHE_DIR, `${cacheKey}.meta.json`);
const packageContent = JSON.stringify(contextPackage, null, 2);
const sizeBytes = Buffer.byteLength(packageContent, 'utf-8');
const metadata = {
cacheKey,
cachedAt: new Date(),
ttlMs,
sizeBytes,
hitCount: 0,
lastAccessed: new Date(),
version: this.CACHE_VERSION
};
await this.enforceMaxCacheSize(sizeBytes);
await Promise.all([
fs.writeFile(cacheFilePath, packageContent, 'utf-8'),
fs.writeFile(metadataFilePath, JSON.stringify(metadata, null, 2), 'utf-8')
]);
logger.info({
cacheKey,
sizeBytes,
ttlMs
}, 'Package cached successfully');
}
catch (error) {
logger.error({
cacheKey,
error: error instanceof Error ? error.message : 'Unknown error'
}, 'Failed to cache package');
throw error;
}
}
static async removeCacheEntry(cacheKey) {
try {
const cacheFilePath = path.join(this.CACHE_DIR, `${cacheKey}.json`);
const metadataFilePath = path.join(this.CACHE_DIR, `${cacheKey}.meta.json`);
await Promise.all([
fs.unlink(cacheFilePath).catch(() => { }),
fs.unlink(metadataFilePath).catch(() => { })
]);
logger.info({ cacheKey }, 'Cache entry removed');
}
catch (error) {
logger.warn({
cacheKey,
error: error instanceof Error ? error.message : 'Unknown error'
}, 'Failed to remove cache entry');
}
}
static async clearCache() {
try {
const files = await fs.readdir(this.CACHE_DIR);
const deletePromises = files.map(file => fs.unlink(path.join(this.CACHE_DIR, file)).catch(() => { }));
await Promise.all(deletePromises);
logger.info({ filesRemoved: files.length }, 'Cache cleared successfully');
}
catch (error) {
logger.error({
error: error instanceof Error ? error.message : 'Unknown error'
}, 'Failed to clear cache');
throw error;
}
}
static async getCacheStats() {
try {
const files = await fs.readdir(this.CACHE_DIR);
const metadataFiles = files.filter(file => file.endsWith('.meta.json'));
let totalSizeBytes = 0;
let totalHits = 0;
let oldestEntry = null;
let newestEntry = null;
for (const metadataFile of metadataFiles) {
try {
const metadataPath = path.join(this.CACHE_DIR, metadataFile);
const metadataContent = await fs.readFile(metadataPath, 'utf-8');
const metadata = JSON.parse(metadataContent);
totalSizeBytes += metadata.sizeBytes;
totalHits += metadata.hitCount;
const cachedAt = new Date(metadata.cachedAt);
if (!oldestEntry || cachedAt < oldestEntry) {
oldestEntry = cachedAt;
}
if (!newestEntry || cachedAt > newestEntry) {
newestEntry = cachedAt;
}
}
catch {
logger.warn({ metadataFile }, 'Failed to read metadata file');
}
}
return {
totalEntries: metadataFiles.length,
totalSizeBytes,
totalSizeMB: totalSizeBytes / (1024 * 1024),
oldestEntry,
newestEntry,
totalHits
};
}
catch (error) {
logger.error({
error: error instanceof Error ? error.message : 'Unknown error'
}, 'Failed to get cache stats');
throw error;
}
}
static async enforceMaxCacheSize(newEntrySizeBytes) {
try {
const stats = await this.getCacheStats();
const maxSizeBytes = this.MAX_CACHE_SIZE_MB * 1024 * 1024;
if (stats.totalSizeBytes + newEntrySizeBytes <= maxSizeBytes) {
return;
}
logger.info({
currentSizeMB: stats.totalSizeMB,
maxSizeMB: this.MAX_CACHE_SIZE_MB
}, 'Cache size limit exceeded, cleaning up old entries');
const files = await fs.readdir(this.CACHE_DIR);
const metadataFiles = files.filter(file => file.endsWith('.meta.json'));
const entriesWithAge = [];
for (const metadataFile of metadataFiles) {
try {
const metadataPath = path.join(this.CACHE_DIR, metadataFile);
const metadataContent = await fs.readFile(metadataPath, 'utf-8');
const metadata = JSON.parse(metadataContent);
entriesWithAge.push({
cacheKey: metadata.cacheKey,
cachedAt: new Date(metadata.cachedAt),
sizeBytes: metadata.sizeBytes
});
}
catch {
logger.warn({ metadataFile }, 'Failed to read metadata for cleanup');
}
}
entriesWithAge.sort((a, b) => a.cachedAt.getTime() - b.cachedAt.getTime());
let currentSize = stats.totalSizeBytes;
let removedCount = 0;
for (const entry of entriesWithAge) {
if (currentSize + newEntrySizeBytes <= maxSizeBytes) {
break;
}
await this.removeCacheEntry(entry.cacheKey);
currentSize -= entry.sizeBytes;
removedCount++;
}
logger.info({
removedCount,
newSizeMB: currentSize / (1024 * 1024)
}, 'Cache cleanup completed');
}
catch (error) {
logger.warn({
error: error instanceof Error ? error.message : 'Unknown error'
}, 'Failed to enforce cache size limits');
}
}
}