@simonecoelhosfo/optimizely-mcp-server
Version:
Optimizely MCP Server for AI assistants with integrated CLI tools
146 lines (143 loc) ⢠6.35 kB
JavaScript
/**
* Test sync cache integration
* Verifies that cache is properly invalidated during sync operations
*/
import { IntelligentQueryEngine } from './IntelligentQueryEngine.js';
import { OptimizelyAdapter } from './adapters/OptimizelyAdapter.js';
import Database from 'better-sqlite3';
console.log('Testing Sync Cache Integration\n');
async function setupDatabase(db) {
db.exec(`
CREATE TABLE IF NOT EXISTS flags (
id TEXT PRIMARY KEY,
key TEXT NOT NULL,
name TEXT,
enabled INTEGER DEFAULT 1,
project_id TEXT
);
CREATE TABLE IF NOT EXISTS experiments (
id TEXT PRIMARY KEY,
key TEXT NOT NULL,
name TEXT,
status TEXT,
project_id TEXT
);
-- Insert initial data
INSERT INTO flags VALUES ('1', 'flag_a', 'Flag A', 1, 'proj1');
INSERT INTO flags VALUES ('2', 'flag_b', 'Flag B', 0, 'proj1');
INSERT INTO experiments VALUES ('1', 'exp_a', 'Experiment A', 'running', 'proj1');
INSERT INTO experiments VALUES ('2', 'exp_b', 'Experiment B', 'paused', 'proj1');
`);
}
async function testSyncCacheIntegration() {
const db = new Database(':memory:');
await setupDatabase(db);
// Create engine with cache and sync integration
const engine = new IntelligentQueryEngine({
cache: {
enabled: true,
defaultTTL: 300000, // 5 minutes
syncIntegration: {
enabled: true,
batchInvalidation: true,
batchDelay: 500, // 500ms for testing
}
}
});
// Register adapter
const adapter = new OptimizelyAdapter({ database: db });
engine.registerAdapter(adapter);
// Get sync handler
const syncHandler = engine.getSyncCacheHandler();
if (!syncHandler) {
console.error('Sync cache handler not available');
return;
}
// Test queries
const flagQuery = {
find: 'flags',
select: ['key', 'name', 'enabled'],
where: [{ field: 'enabled', operator: '=', value: 1 }],
};
const expQuery = {
find: 'experiments',
select: ['key', 'status'],
where: [{ field: 'status', operator: '=', value: 'running' }],
};
console.log('=== Step 1: Initial Queries (populate cache) ===');
let result = await engine.query(flagQuery);
console.log(`Flags: ${result.data.length} found, Cached: ${result.metadata.cached}`);
result = await engine.query(expQuery);
console.log(`Experiments: ${result.data.length} found, Cached: ${result.metadata.cached}`);
console.log('\n=== Step 2: Verify Cache Hit ===');
result = await engine.query(flagQuery);
console.log(`Flags query - Cached: ${result.metadata.cached} ā
`);
console.log('\n=== Step 3: Simulate Sync Start ===');
await syncHandler.onSyncStart('proj1');
console.log('Sync started for proj1');
console.log('\n=== Step 4: Update Database (simulate sync) ===');
db.prepare('UPDATE flags SET enabled = 1 WHERE key = ?').run('flag_b');
db.prepare('UPDATE experiments SET status = ? WHERE key = ?').run('running', 'exp_b');
console.log('Updated flag_b and exp_b');
console.log('\n=== Step 5: Notify Cache of Changes ===');
await syncHandler.onEntitySynced({
type: 'entity_synced',
entity: 'flags',
entityKey: 'flag_b',
projectId: 'proj1',
operation: 'update',
});
await syncHandler.onEntitySynced({
type: 'entity_synced',
entity: 'experiments',
entityId: '2',
projectId: 'proj1',
operation: 'update',
});
console.log('\n=== Step 6: Complete Sync (triggers batch invalidation) ===');
await syncHandler.onSyncComplete('proj1');
// Wait for batch delay
await new Promise(resolve => setTimeout(resolve, 600));
console.log('\n=== Step 7: Query Again (should fetch fresh data) ===');
result = await engine.query(flagQuery);
console.log(`Flags: ${result.data.length} found (should be 2), Cached: ${result.metadata.cached}`);
console.log('Flag keys:', result.data.map(f => f.key).join(', '));
result = await engine.query(expQuery);
console.log(`Experiments: ${result.data.length} found (should be 2), Cached: ${result.metadata.cached}`);
console.log('Experiment keys:', result.data.map(e => e.key).join(', '));
console.log('\n=== Step 8: Test Bulk Sync ===');
await syncHandler.onSyncStart('proj1');
// Add new entities
db.prepare('INSERT INTO flags VALUES (?, ?, ?, ?, ?)').run('3', 'flag_c', 'Flag C', 1, 'proj1');
db.prepare('INSERT INTO experiments VALUES (?, ?, ?, ?, ?)').run('3', 'exp_c', 'Experiment C', 'running', 'proj1');
// Bulk invalidation
await syncHandler.onBulkSync([
{ entity: 'flags', entityId: '3' },
{ entity: 'experiments', entityId: '3' },
], 'proj1');
await syncHandler.onSyncComplete('proj1');
await new Promise(resolve => setTimeout(resolve, 600));
console.log('\n=== Step 9: Final Queries ===');
result = await engine.query(flagQuery);
console.log(`Flags: ${result.data.length} found (should be 3)`);
console.log('Flag keys:', result.data.map(f => f.key).join(', '));
result = await engine.query(expQuery);
console.log(`Experiments: ${result.data.length} found (should be 3)`);
console.log('Experiment keys:', result.data.map(e => e.key).join(', '));
console.log('\n=== Cache & Invalidation Statistics ===');
const cacheStats = engine.getCacheMetrics();
console.log('Cache Stats:', cacheStats);
const invalidationStats = engine.getInvalidationEngine().getStats();
console.log('Invalidation Stats:', invalidationStats);
const syncStats = engine.syncCacheIntegration?.getStats();
console.log('Sync Integration Stats:', syncStats);
await engine.shutdown();
console.log('\nSync cache integration test completed!');
console.log('\nš Summary:');
console.log(' - Cache invalidation triggered by sync events');
console.log(' - Batch invalidation reduces overhead during sync');
console.log(' - Fresh data fetched after invalidation');
console.log(' - Dependency tracking ensures related entities are invalidated');
}
testSyncCacheIntegration().catch(console.error);
//# sourceMappingURL=test-sync-cache-integration.js.map