@simonecoelhosfo/optimizely-mcp-server
Version:
Optimizely MCP Server for AI assistants with integrated CLI tools
260 lines • 6.96 kB
JavaScript
/**
* PatternCache - High-performance caching for parsed queries
*
* IMPLEMENTATION STATUS:
* COMPLETE: LRU cache with TTL and performance metrics
*
* Last Updated: July 3, 2025
*/
import { createHash } from 'crypto';
export class PatternCache {
cache;
maxSize;
ttl;
stats;
parseTimeSum = 0;
parseCount = 0;
cachedTimeSum = 0;
cachedCount = 0;
constructor(config = {}) {
this.maxSize = config.maxSize || 1000;
this.ttl = config.ttl || 3600000; // 1 hour default
this.cache = new Map();
this.stats = {
hits: 0,
misses: 0,
evictions: 0,
avgParseTime: 0,
avgCachedTime: 0,
cacheSize: 0,
hitRate: 0
};
}
/**
* Get cached parse result
*/
get(query) {
const key = this.generateKey(query);
const entry = this.cache.get(key);
if (!entry) {
this.stats.misses++;
this.updateStats();
return null;
}
// Check TTL
if (Date.now() - entry.timestamp > this.ttl) {
this.cache.delete(key);
this.stats.misses++;
this.updateStats();
return null;
}
// Update hit count and move to end (LRU)
entry.hits++;
this.cache.delete(key);
this.cache.set(key, entry);
this.stats.hits++;
const accessTime = Date.now() - entry.timestamp;
this.cachedTimeSum += accessTime;
this.cachedCount++;
this.updateStats();
return entry.result;
}
/**
* Store parse result in cache
*/
set(query, result, parseTime) {
const key = this.generateKey(query);
// Evict if at capacity
if (this.cache.size >= this.maxSize) {
this.evictOldest();
}
const entry = {
key,
query,
result,
timestamp: Date.now(),
hits: 0,
parseTime
};
this.cache.set(key, entry);
this.parseTimeSum += parseTime;
this.parseCount++;
this.updateStats();
}
/**
* Clear cache
*/
clear() {
this.cache.clear();
this.resetStats();
}
/**
* Get cache statistics
*/
getStats() {
return { ...this.stats };
}
/**
* Get top queries by hit count
*/
getTopQueries(limit = 10) {
const entries = Array.from(this.cache.values())
.sort((a, b) => b.hits - a.hits)
.slice(0, limit);
return entries.map(entry => ({
query: entry.query,
hits: entry.hits,
lastAccessed: new Date(entry.timestamp)
}));
}
/**
* Warm cache with common queries
*/
warmCache(commonQueries) {
for (const { query, result } of commonQueries) {
this.set(query, result, 0);
}
}
/**
* Generate cache key from query
*/
generateKey(query) {
const normalized = query.toLowerCase().trim().replace(/\s+/g, ' ');
return createHash('md5').update(normalized).digest('hex');
}
/**
* Evict oldest entry (LRU)
*/
evictOldest() {
const firstKey = this.cache.keys().next().value;
if (firstKey) {
this.cache.delete(firstKey);
this.stats.evictions++;
}
}
/**
* Update statistics
*/
updateStats() {
this.stats.cacheSize = this.cache.size;
this.stats.hitRate = this.stats.hits + this.stats.misses > 0
? this.stats.hits / (this.stats.hits + this.stats.misses)
: 0;
this.stats.avgParseTime = this.parseCount > 0
? this.parseTimeSum / this.parseCount
: 0;
this.stats.avgCachedTime = this.cachedCount > 0
? this.cachedTimeSum / this.cachedCount
: 0;
}
/**
* Reset statistics
*/
resetStats() {
this.stats = {
hits: 0,
misses: 0,
evictions: 0,
avgParseTime: 0,
avgCachedTime: 0,
cacheSize: 0,
hitRate: 0
};
this.parseTimeSum = 0;
this.parseCount = 0;
this.cachedTimeSum = 0;
this.cachedCount = 0;
}
/**
* Export cache for persistence
*/
export() {
return Array.from(this.cache.values()).map(entry => ({
query: entry.query,
result: entry.result,
hits: entry.hits
}));
}
/**
* Import cache from persistence
*/
import(data) {
this.clear();
for (const item of data) {
const key = this.generateKey(item.query);
const entry = {
key,
query: item.query,
result: item.result,
timestamp: Date.now(),
hits: item.hits || 0,
parseTime: 0
};
this.cache.set(key, entry);
}
this.updateStats();
}
}
/**
* Singleton instance for global cache
*/
let globalCache = null;
export function getGlobalCache() {
if (!globalCache) {
globalCache = new PatternCache({
maxSize: 5000,
ttl: 3600000 // 1 hour
});
}
return globalCache;
}
/**
* Pre-warm cache with common patterns
*/
export function warmCacheWithCommonPatterns() {
const cache = getGlobalCache();
const commonPatterns = [
{
query: "show all flags",
result: {
primaryEntity: 'flags',
action: 'show',
relatedEntities: [],
fields: ['name', 'key', 'status', 'description'],
filters: [],
joins: [],
groupBy: [],
orderBy: [],
aggregations: [],
confidence: 1.0,
platform: 'feature',
originalQuery: "show all flags",
normalizedQuery: "show all flags",
transformations: [],
matchedPatterns: []
}
},
{
query: "list experiments",
result: {
primaryEntity: 'experiments',
action: 'list',
relatedEntities: [],
fields: ['name', 'key', 'status', 'created_time'],
filters: [],
joins: [],
groupBy: [],
orderBy: [],
aggregations: [],
confidence: 1.0,
platform: 'web',
originalQuery: "list experiments",
normalizedQuery: "list experiments",
transformations: [],
matchedPatterns: []
}
}
];
cache.warmCache(commonPatterns);
}
//# sourceMappingURL=PatternCache.js.map