@simonecoelhosfo/optimizely-mcp-server
Version:
Optimizely MCP Server for AI assistants with integrated CLI tools
255 lines • 7.33 kB
JavaScript
/**
* QueryCache - Intelligent caching layer for analytics queries
*
* Features:
* - LRU cache with configurable size
* - Query normalization for better hit rates
* - TTL-based expiration
* - Cache key generation from query + params
* - Memory usage tracking
*/
import crypto from 'crypto';
import { getLogger } from '../logging/Logger.js';
export class QueryCache {
cache = new Map();
accessOrder = [];
stats = {
hits: 0,
misses: 0,
evictions: 0,
hitRate: 0,
memoryUsage: 0,
entries: 0
};
// Configuration
maxSize;
defaultTTL;
maxMemory;
constructor(config) {
this.maxSize = config?.maxSize || 1000;
this.defaultTTL = config?.defaultTTL || 300000; // 5 minutes default
this.maxMemory = config?.maxMemory || 100 * 1024 * 1024; // 100MB default
// Start cleanup interval
this.startCleanupInterval();
}
/**
* Get cached result for a query
*/
get(query, params) {
const key = this.generateCacheKey(query, params);
const entry = this.cache.get(key);
if (!entry) {
this.stats.misses++;
this.updateHitRate();
return null;
}
// Check if expired
if (this.isExpired(entry)) {
this.evict(key);
this.stats.misses++;
this.updateHitRate();
return null;
}
// Update access order (LRU)
this.updateAccessOrder(key);
// Update stats
entry.hits++;
this.stats.hits++;
this.updateHitRate();
getLogger().debug({
cacheKey: key,
hits: entry.hits,
age: Date.now() - entry.timestamp
}, 'QueryCache: Cache hit');
return entry.result;
}
/**
* Store query result in cache
*/
set(query, result, params, ttl) {
const key = this.generateCacheKey(query, params);
const size = this.estimateSize(result);
// Check memory limit
if (this.stats.memoryUsage + size > this.maxMemory) {
this.evictUntilMemoryAvailable(size);
}
// Check size limit
if (this.cache.size >= this.maxSize) {
this.evictLRU();
}
const entry = {
key,
query,
result,
timestamp: Date.now(),
hits: 0,
size,
ttl: ttl || this.defaultTTL
};
this.cache.set(key, entry);
this.updateAccessOrder(key);
this.stats.memoryUsage += size;
this.stats.entries = this.cache.size;
getLogger().debug({
cacheKey: key,
size,
ttl: entry.ttl,
totalEntries: this.cache.size,
memoryUsage: this.stats.memoryUsage
}, 'QueryCache: Cached query result');
}
/**
* Clear entire cache
*/
clear() {
this.cache.clear();
this.accessOrder = [];
this.stats.memoryUsage = 0;
this.stats.entries = 0;
getLogger().info('QueryCache: Cache cleared');
}
/**
* Get cache statistics
*/
getStats() {
return { ...this.stats };
}
/**
* Generate normalized cache key from query
*/
generateCacheKey(query, params) {
// Normalize the query for better cache hits
const normalized = {
sql: this.normalizeSQL(query.sql),
params: params || query.params,
jsonata: query.jsonataExpression,
pipeline: query.processingPipeline
};
const json = JSON.stringify(normalized, this.sortReplacer);
return crypto.createHash('md5').update(json).digest('hex');
}
/**
* Normalize SQL for better cache hits
*/
normalizeSQL(sql) {
return sql
// Remove extra whitespace
.replace(/\s+/g, ' ')
// Normalize case for keywords
.replace(/\b(select|from|where|join|on|and|or|group by|order by|limit)\b/gi, match => match.toUpperCase())
// Remove trailing semicolon
.replace(/;\s*$/, '')
.trim();
}
/**
* JSON replacer that sorts object keys for consistent hashing
*/
sortReplacer(key, value) {
if (value && typeof value === 'object' && !Array.isArray(value)) {
return Object.keys(value).sort().reduce((sorted, key) => {
sorted[key] = value[key];
return sorted;
}, {});
}
return value;
}
/**
* Estimate memory size of result
*/
estimateSize(result) {
// Rough estimation - can be improved
const json = JSON.stringify(result);
return json.length * 2; // 2 bytes per character (UTF-16)
}
/**
* Check if cache entry is expired
*/
isExpired(entry) {
return Date.now() - entry.timestamp > entry.ttl;
}
/**
* Update access order for LRU
*/
updateAccessOrder(key) {
const index = this.accessOrder.indexOf(key);
if (index > -1) {
this.accessOrder.splice(index, 1);
}
this.accessOrder.push(key);
}
/**
* Evict least recently used entry
*/
evictLRU() {
if (this.accessOrder.length === 0)
return;
const key = this.accessOrder.shift();
this.evict(key);
}
/**
* Evict entries until enough memory is available
*/
evictUntilMemoryAvailable(requiredSize) {
while (this.stats.memoryUsage + requiredSize > this.maxMemory &&
this.accessOrder.length > 0) {
this.evictLRU();
}
}
/**
* Evict a specific entry
*/
evict(key) {
const entry = this.cache.get(key);
if (!entry)
return;
this.cache.delete(key);
this.stats.memoryUsage -= entry.size;
this.stats.evictions++;
this.stats.entries = this.cache.size;
const index = this.accessOrder.indexOf(key);
if (index > -1) {
this.accessOrder.splice(index, 1);
}
getLogger().debug({
cacheKey: key,
size: entry.size,
hits: entry.hits,
age: Date.now() - entry.timestamp
}, 'QueryCache: Evicted entry');
}
/**
* Update hit rate statistic
*/
updateHitRate() {
const total = this.stats.hits + this.stats.misses;
this.stats.hitRate = total > 0 ? this.stats.hits / total : 0;
}
/**
* Start periodic cleanup of expired entries
*/
startCleanupInterval() {
setInterval(() => {
this.cleanupExpired();
}, 60000); // Run every minute
}
/**
* Remove all expired entries
*/
cleanupExpired() {
let cleaned = 0;
const now = Date.now();
for (const [key, entry] of this.cache.entries()) {
if (now - entry.timestamp > entry.ttl) {
this.evict(key);
cleaned++;
}
}
if (cleaned > 0) {
getLogger().debug({
cleaned,
remaining: this.cache.size
}, 'QueryCache: Cleaned expired entries');
}
}
}
//# sourceMappingURL=QueryCache.js.map