@simonecoelhosfo/optimizely-mcp-server
Version:
Optimizely MCP Server for AI assistants with integrated CLI tools
272 lines • 9.44 kB
JavaScript
/**
* Cache Orchestrator for Intelligent Query Engine
*
* Coordinates all caching components and provides a unified interface
* for the IntelligentQueryEngine to interact with the caching layer.
*/
import { QueryNormalizer } from './QueryNormalizer.js';
import { CacheKeyGenerator } from './CacheKeyGenerator.js';
import { InMemoryCache } from './InMemoryCache.js';
import { TTLStrategy } from './TTLStrategy.js';
import { getLogger } from '../../../logging/Logger.js';
import EventEmitter from 'events';
export class CacheOrchestrator extends EventEmitter {
logger = getLogger();
enabled;
normalizer;
keyGenerator;
memoryCache;
ttlStrategy;
statsTimer;
statsInterval;
constructor(config = { enabled: true }) {
super();
this.enabled = config.enabled;
this.normalizer = config.normalizer || new QueryNormalizer();
this.keyGenerator = config.keyGenerator || new CacheKeyGenerator();
this.memoryCache = config.memoryCache || new InMemoryCache();
this.ttlStrategy = config.ttlStrategy || new TTLStrategy();
this.statsInterval = config.statsInterval || 300000; // 5 minutes
if (this.enabled) {
this.startStatsReporting();
if (config.warmupOnStart) {
this.warmupCache().catch(err => this.logger.error({ error: err }, 'Cache warmup failed'));
}
}
this.logger.info({
enabled: this.enabled,
warmupOnStart: config.warmupOnStart
}, 'CacheOrchestrator initialized');
}
/**
* Execute a query with caching
*/
async executeWithCache(query, queryExecutor, context, options) {
const startTime = Date.now();
// If caching is disabled or bypassed, execute directly
if (!this.enabled || options?.bypassCache) {
try {
const data = await queryExecutor(query);
return {
data,
cached: false,
executionTime: Date.now() - startTime,
};
}
catch (error) {
this.emitEvent({ type: 'error', query, error: error });
throw error;
}
}
try {
// Normalize the query
const normalizedQuery = this.normalizer.normalize(query);
// Generate cache key
const cacheKey = this.keyGenerator.generateKey(normalizedQuery, context);
// Check cache first (unless refresh requested)
if (!options?.refreshCache) {
const cached = await this.memoryCache.get(cacheKey);
if (cached) {
this.emitEvent({
type: 'hit',
key: cacheKey,
query: normalizedQuery,
metadata: cached.metadata
});
// Check if soft refresh needed
const ttl = cached.metadata.ttl;
if (this.ttlStrategy.shouldRefresh(cached.metadata.cachedAt, ttl)) {
// Trigger background refresh (don't wait)
this.refreshInBackground(normalizedQuery, cacheKey, queryExecutor, context, options?.ttlFactors);
}
return {
data: cached.data,
cached: true,
executionTime: Date.now() - startTime,
cacheKey,
ttl,
};
}
}
// Cache miss or refresh requested - execute query
this.emitEvent({ type: 'miss', key: cacheKey, query: normalizedQuery });
const queryStartTime = Date.now();
// Pass original query to executor, not normalized
const data = await queryExecutor(query);
const queryExecutionTime = Date.now() - queryStartTime;
// Calculate TTL
const ttl = this.ttlStrategy.calculateTTL(normalizedQuery, context, options?.ttlFactors);
// Store in cache
await this.memoryCache.set(cacheKey, data, ttl, {
executionTime: queryExecutionTime,
syncVersion: context?.syncVersion,
});
this.emitEvent({
type: 'set',
key: cacheKey,
query: normalizedQuery,
metadata: { ttl, executionTime: queryExecutionTime }
});
return {
data,
cached: false,
executionTime: Date.now() - startTime,
cacheKey,
ttl,
};
}
catch (error) {
this.logger.error({ error, query }, 'Cache execution failed');
this.emitEvent({ type: 'error', query, error: error });
// Fallback to direct execution with original query
const data = await queryExecutor(query);
return {
data,
cached: false,
executionTime: Date.now() - startTime,
};
}
}
/**
* Invalidate cache entries
*/
async invalidate(patterns) {
if (!this.enabled)
return 0;
const invalidated = await this.memoryCache.invalidate(patterns);
this.emitEvent({
type: 'invalidate',
metadata: { patterns, count: invalidated }
});
return invalidated;
}
/**
* Invalidate cache for specific entity
*/
async invalidateEntity(entity, operation, context) {
if (!this.enabled)
return 0;
const pattern = this.keyGenerator.generateInvalidationPattern(entity, operation, context);
return this.invalidate([pattern]);
}
/**
* Clear all cache entries
*/
clearCache() {
if (this.enabled) {
this.memoryCache.clear();
}
}
/**
* Get cache statistics
*/
getStats() {
return this.enabled ? this.memoryCache.getStats() : null;
}
/**
* Preload cache with common queries
*/
async preloadQuery(query, queryExecutor, context, ttlFactors) {
if (!this.enabled)
return;
try {
await this.executeWithCache(query, queryExecutor, context, {
refreshCache: true,
ttlFactors,
});
}
catch (error) {
this.logger.warn({ error, query }, 'Cache preload failed');
}
}
/**
* Warm up cache with common queries
*/
async warmupCache() {
this.logger.info('Starting cache warmup');
// This would be implemented with actual common queries
// For now, it's a placeholder for the warmup logic
this.logger.info('Cache warmup completed');
}
/**
* Refresh cache entry in background
*/
async refreshInBackground(normalizedQuery, cacheKey, queryExecutor, context, ttlFactors) {
try {
const startTime = Date.now();
const data = await queryExecutor(normalizedQuery);
const executionTime = Date.now() - startTime;
const ttl = this.ttlStrategy.calculateTTL(normalizedQuery, context, ttlFactors);
await this.memoryCache.set(cacheKey, data, ttl, {
executionTime,
syncVersion: context?.syncVersion,
});
this.logger.debug({
cacheKey,
ttl,
executionTime
}, 'Background cache refresh completed');
}
catch (error) {
this.logger.error({ error, cacheKey }, 'Background refresh failed');
}
}
/**
* Start periodic stats reporting
*/
startStatsReporting() {
this.statsTimer = setInterval(() => {
const stats = this.getStats();
if (stats) {
this.logger.info({ cacheStats: stats }, 'Cache statistics');
this.emit('stats', stats);
}
}, this.statsInterval);
}
/**
* Emit cache event
*/
emitEvent(event) {
this.emit('cache-event', event);
// Log significant events
if (event.type === 'error') {
this.logger.error({ event }, 'Cache error event');
}
else if (event.type === 'invalidate') {
this.logger.info({ event }, 'Cache invalidation event');
}
}
/**
* Check if a query would be cached
*/
wouldBeCached(query) {
if (!this.enabled)
return false;
try {
const normalized = this.normalizer.normalize(query);
// Simple heuristic - could be expanded
return normalized.operation !== 'realtime';
}
catch {
return false;
}
}
/**
* Get recommended TTL for a scenario
*/
getRecommendedTTL(scenario) {
return this.ttlStrategy.getRecommendedTTL(scenario);
}
/**
* Shutdown orchestrator
*/
shutdown() {
if (this.statsTimer) {
clearInterval(this.statsTimer);
}
this.memoryCache.shutdown();
this.removeAllListeners();
this.logger.info('CacheOrchestrator shutdown');
}
}
//# sourceMappingURL=CacheOrchestrator.js.map