okta-mcp-server
Version:
Model Context Protocol (MCP) server for Okta API operations with support for bulk operations and caching
119 lines • 3.98 kB
JavaScript
/**
* Factory for creating cache instances based on configuration
*/
import { MemoryCache } from './memory-cache.js';
import { SqliteCache } from './sqlite-cache.js';
import { logger as baseLogger } from '../../utils/logger.js';
const logger = baseLogger.child({ module: 'CacheFactory' });
export class CacheFactory {
/**
* Create a cache instance based on configuration
*/
static async create(config, eventBus) {
if (!config.enabled) {
logger.info('Cache disabled, using memory cache with minimal size');
return new MemoryCache({ maxSize: 1, ...(eventBus && { eventBus }) });
}
const cacheType = config.type || 'memory';
logger.info(`Creating ${cacheType} cache with config:`, {
type: cacheType,
ttl: config.ttl,
maxSize: config.maxSize,
});
switch (cacheType) {
case 'sqlite':
return this.createSqliteCache(config, eventBus);
// Redis support removed in v2.0
case 'memory':
default:
return new MemoryCache({
maxSize: config.maxSize || 1000,
...(eventBus && { eventBus }),
});
}
}
/**
* Create SQLite cache instance
*/
static createSqliteCache(config, eventBus) {
logger.info('Creating SQLite cache');
return new SqliteCache({
maxSize: config.maxSize || 10000,
...(eventBus && { eventBus }),
});
}
/**
* Create a multi-layer cache with L1 (memory) and L2 (SQLite/Redis)
*/
static async createMultiLayer(config, eventBus) {
if (!config.enabled) {
return this.create(config, eventBus);
}
// Create L1 memory cache with smaller size
const l1Size = Math.min(100, Math.floor((config.maxSize || 1000) / 10));
const l1Cache = new MemoryCache({
maxSize: l1Size,
...(eventBus && { eventBus }),
});
// Create L2 cache based on configuration
const l2Cache = await this.create(config, eventBus);
// Return multi-layer cache wrapper
return new MultiLayerCache(l1Cache, l2Cache, eventBus);
}
}
/**
* Multi-layer cache implementation
*/
class MultiLayerCache {
l1;
l2;
eventBus;
constructor(l1, l2, eventBus) {
this.l1 = l1;
this.l2 = l2;
this.eventBus = eventBus;
}
async get(key) {
// Try L1 first
const l1Value = await this.l1.get(key);
if (l1Value !== undefined) {
this.eventBus?.emit('cache:hit', { key });
return l1Value;
}
// Try L2
const l2Value = await this.l2.get(key);
if (l2Value !== undefined) {
this.eventBus?.emit('cache:hit', { key });
// Populate L1
await this.l1.set(key, l2Value);
return l2Value;
}
this.eventBus?.emit('cache:miss', { key });
return undefined;
}
async set(key, value, options) {
// Set in both layers
await Promise.all([this.l1.set(key, value, options), this.l2.set(key, value, options)]);
}
async delete(key) {
const [l1Result, l2Result] = await Promise.all([this.l1.delete(key), this.l2.delete(key)]);
return l1Result || l2Result;
}
async has(key) {
const [l1Has, l2Has] = await Promise.all([this.l1.has(key), this.l2.has(key)]);
return l1Has || l2Has;
}
async clear() {
await Promise.all([this.l1.clear(), this.l2.clear()]);
}
async clearByTag(tag) {
await Promise.all([this.l1.clearByTag(tag), this.l2.clearByTag(tag)]);
}
async size() {
// Return L2 size as it's the authoritative cache
return this.l2.size();
}
}
// Convenience function export
export const createCache = CacheFactory.create;
//# sourceMappingURL=cache-factory.js.map