UNPKG

vibe-coder-mcp

Version:

Production-ready MCP server with complete agent integration, multi-transport support, and comprehensive development automation tools for AI-assisted workflows.

212 lines (211 loc) 7.66 kB
import logger from '../../../logger.js'; import { FileCache } from './fileCache.js'; import { MemoryCache } from './memoryCache.js'; import { getMemoryStats } from '../parser.js'; export class TieredCache { name; fileCache; memoryCache = null; options; stats = { memoryHits: 0, fileHits: 0, misses: 0, totalGets: 0, totalSets: 0 }; static DEFAULT_OPTIONS = { maxEntries: 10000, maxAge: 24 * 60 * 60 * 1000, validateOnGet: true, pruneOnStartup: true, pruneInterval: 60 * 60 * 1000, serialize: JSON.stringify, deserialize: JSON.parse, useMemoryCache: true, memoryMaxEntries: 1000, memoryMaxAge: 10 * 60 * 1000, memoryThreshold: 0.8, memorySizeCalculator: (value) => JSON.stringify(value).length }; constructor(options) { this.name = options.name; this.options = { ...TieredCache.DEFAULT_OPTIONS, name: options.name, cacheDir: options.cacheDir, maxEntries: options.maxEntries ?? TieredCache.DEFAULT_OPTIONS.maxEntries, maxAge: options.maxAge ?? TieredCache.DEFAULT_OPTIONS.maxAge, validateOnGet: options.validateOnGet ?? TieredCache.DEFAULT_OPTIONS.validateOnGet, pruneOnStartup: options.pruneOnStartup ?? TieredCache.DEFAULT_OPTIONS.pruneOnStartup, pruneInterval: options.pruneInterval ?? TieredCache.DEFAULT_OPTIONS.pruneInterval, serialize: options.serialize ?? TieredCache.DEFAULT_OPTIONS.serialize, deserialize: options.deserialize ?? TieredCache.DEFAULT_OPTIONS.deserialize, useMemoryCache: options.useMemoryCache ?? TieredCache.DEFAULT_OPTIONS.useMemoryCache, memoryMaxEntries: options.memoryMaxEntries ?? TieredCache.DEFAULT_OPTIONS.memoryMaxEntries, memoryMaxAge: options.memoryMaxAge ?? TieredCache.DEFAULT_OPTIONS.memoryMaxAge, memoryThreshold: options.memoryThreshold ?? TieredCache.DEFAULT_OPTIONS.memoryThreshold, memorySizeCalculator: options.memorySizeCalculator ?? TieredCache.DEFAULT_OPTIONS.memorySizeCalculator }; this.fileCache = new FileCache({ name: `${this.name}-file`, cacheDir: this.options.cacheDir, maxEntries: this.options.maxEntries, maxAge: this.options.maxAge, validateOnGet: this.options.validateOnGet, pruneOnStartup: this.options.pruneOnStartup, pruneInterval: this.options.pruneInterval, serialize: this.options.serialize, deserialize: this.options.deserialize }); if (this.options.useMemoryCache) { this.initializeMemoryCache(); } logger.debug(`Created tiered cache "${this.name}" with memory caching ${this.options.useMemoryCache ? 'enabled' : 'disabled'}`); } initializeMemoryCache() { if (this.shouldUseMemoryCache()) { this.memoryCache = new MemoryCache({ name: `${this.name}-memory`, maxEntries: this.options.memoryMaxEntries, maxAge: this.options.memoryMaxAge, sizeCalculator: this.options.memorySizeCalculator }); logger.debug(`Initialized memory cache for "${this.name}"`); } else { this.memoryCache = null; logger.debug(`Memory cache disabled for "${this.name}" due to high memory usage`); } } shouldUseMemoryCache() { if (!this.options.useMemoryCache) { return false; } const stats = getMemoryStats(); const memoryUsage = stats.memoryUsagePercentage; return memoryUsage < this.options.memoryThreshold; } async init() { await this.fileCache.init(); logger.debug(`Initialized tiered cache "${this.name}"`); } async get(key) { this.stats.totalGets++; if (this.memoryCache) { const memoryValue = this.memoryCache.get(key); if (memoryValue !== undefined) { this.stats.memoryHits++; return memoryValue; } } try { const fileValue = await this.fileCache.get(key); if (fileValue !== undefined) { this.stats.fileHits++; if (this.memoryCache) { this.memoryCache.set(key, fileValue); } return fileValue; } } catch (error) { logger.warn({ err: error, key }, `Error getting value from file cache for ${key}`); } this.stats.misses++; return undefined; } async set(key, value, ttl) { this.stats.totalSets++; if (this.memoryCache) { this.memoryCache.set(key, value, ttl); } try { await this.fileCache.set(key, value, ttl); } catch (error) { logger.warn({ err: error, key }, `Error setting value in file cache for ${key}`); } } async delete(key) { if (this.memoryCache) { this.memoryCache.delete(key); } try { await this.fileCache.delete(key); } catch (error) { logger.warn({ err: error, key }, `Error deleting value from file cache for ${key}`); } } async has(key) { if (this.memoryCache && this.memoryCache.has(key)) { return true; } try { return await this.fileCache.has(key); } catch (error) { logger.warn({ err: error, key }, `Error checking if key exists in file cache for ${key}`); return false; } } async clear() { if (this.memoryCache) { this.memoryCache.clear(); } try { await this.fileCache.clear(); } catch (error) { logger.warn({ err: error }, `Error clearing file cache for ${this.name}`); } this.stats = { memoryHits: 0, fileHits: 0, misses: 0, totalGets: 0, totalSets: 0 }; logger.debug(`Cleared tiered cache "${this.name}"`); } async prune() { let prunedCount = 0; try { prunedCount += await this.fileCache.prune(); } catch (error) { logger.warn({ err: error }, `Error pruning file cache for ${this.name}`); } logger.debug(`Pruned ${prunedCount} entries from tiered cache "${this.name}"`); return prunedCount; } async getStats() { const fileStats = await this.fileCache.getStats(); return { ...fileStats, name: this.name, hits: this.stats.memoryHits + this.stats.fileHits, misses: this.stats.misses, hitRatio: this.stats.totalGets > 0 ? (this.stats.memoryHits + this.stats.fileHits) / this.stats.totalGets : 0, totalSize: fileStats.totalSize, memoryStats: this.memoryCache ? { hits: this.stats.memoryHits, size: this.memoryCache.getSize(), totalSize: this.memoryCache.getTotalSize() } : undefined }; } close() { this.fileCache.close(); if (this.memoryCache) { this.memoryCache.clear(); this.memoryCache = null; } logger.debug(`Closed tiered cache "${this.name}"`); } }