@simonecoelhosfo/optimizely-mcp-server
Version:
Optimizely MCP Server for AI assistants with integrated CLI tools
952 lines (951 loc) • 200 kB
JavaScript
/**
* Cache Manager for Optimizely MCP Server
* @description High-performance caching layer that synchronizes Optimizely data to local
* SQLite storage with comprehensive pagination, error handling, and data integrity features
*
* This manager orchestrates data synchronization between the Optimizely API and local cache,
* providing fast access to projects, flags, experiments, audiences, and related entities
* while maintaining data consistency and handling rate limits gracefully.
*
* Key capabilities:
* - Full and incremental data synchronization
* - Advanced pagination with retry logic
* - Complex filtering and querying of cached data
* - Transactional integrity for multi-entity operations
* - Comprehensive error handling and recovery
* - Performance optimized with indexes and aggregations
*
* @author Optimizely MCP Server
* @version 1.0.0
*/
import { getLogger } from '../logging/Logger.js';
import { MCPErrorMapper } from '../errors/MCPErrorMapping.js';
import { ChangeHistoryTracker } from '../sync/ChangeHistoryTracker.js';
import { IncrementalSyncManager } from '../sync/IncrementalSyncManager.js';
import { createEnvironmentFilterFromEnv } from '../config/EnvironmentFilter.js';
import { safeIdToString } from '../utils/SafeIdConverter.js';
import ProgressEventEmitter from '../events/ProgressEventEmitter.js';
// CRITICAL: SafeIdConverter prevents scientific notation in large IDs
// DO NOT REMOVE OR REPLACE WITH String() - This caused duplicate records bug
// Evidence: 5.6006992878633e+15 vs 5600699287863296 for same entity
// Fixed: January 26, 2025 - See CONTEXT-RECOVERY-V2.md for details
/**
* Cache Manager Class
* @description Manages synchronization and caching of Optimizely data with local SQLite storage.
* Provides high-level operations for data sync, querying, and cache management while handling
* the complexity of API pagination, rate limiting, and error recovery.
*
* Architecture:
* - Uses SQLiteEngine for persistent storage with transaction support
* - Integrates with OptimizelyAPIHelper for API communication
* - Implements comprehensive error handling with MCP error mapping
* - Supports both full and targeted synchronization strategies
*/
export class CacheManager {
/** SQLite storage engine for persistent data storage */
storage;
/** Optimizely API client for data synchronization */
client;
/** Project filter for applying configuration-based filtering */
projectFilter;
/** Configuration for cache manager behavior */
config;
/** Change history tracker for incremental sync */
changeTracker;
/** Incremental sync manager */
incrementalSyncManager;
/** Environment filter for per-project environment syncing */
environmentFilter;
/** Sync lock to prevent concurrent sync operations */
static syncLock = new Map();
/**
* Creates a new Cache Manager instance
* @param storageEngine - Initialized SQLite storage engine
* @param optimizelyClient - Configured Optimizely API client
* @param projectFilter - Optional project filter for configuration-based filtering
* @description Initializes the cache manager with storage and API dependencies.
* Both dependencies should be properly configured before passing to the constructor.
* Call init() after construction to initialize the storage layer.
*
* @example
* ```typescript
* const storage = new SQLiteEngine({ path: './cache.db' });
* const apiClient = new OptimizelyAPIHelper(apiToken);
* const cache = new CacheManager(storage, apiClient);
* await cache.init();
* ```
*/
constructor(storageEngine, optimizelyClient, projectFilter, config) {
this.storage = storageEngine;
this.client = optimizelyClient;
this.projectFilter = projectFilter;
this.config = config;
this.environmentFilter = createEnvironmentFilterFromEnv();
}
/**
* Initializes the cache manager and underlying storage
* @param options - Initialization options
* @returns Promise that resolves when initialization is complete
* @description Initializes the SQLite storage engine, creates database schema,
* and prepares the cache for data operations. Must be called before using
* any other cache operations.
*
* @throws {McpError} When storage initialization fails
* @throws {Error} With code 'DATABASE_RESET_REQUIRED' when database needs reset
*/
async init(options) {
try {
await this.storage.init(options);
// Initialize change tracking components
this.changeTracker = new ChangeHistoryTracker(this.storage, this.client);
this.incrementalSyncManager = new IncrementalSyncManager(this.changeTracker, this, this.storage, this.client);
}
catch (error) {
// Re-throw DATABASE_RESET_REQUIRED errors to be handled by the caller
if (error.code === 'DATABASE_RESET_REQUIRED') {
throw error;
}
throw MCPErrorMapper.toMCPError(error, 'Cache manager initialization failed');
}
}
/**
* Gracefully closes the cache manager and storage connections
* @returns Promise that resolves when cleanup is complete
* @description Closes database connections and cleans up resources.
* Should be called during application shutdown to ensure proper cleanup.
* After calling close(), the cache manager will need to be reinitialized
* with init() before it can be used again.
*/
async close() {
await this.storage.close();
}
/**
* Serializes sync operations to prevent concurrent database lock issues
* @param lockKey - Unique key for the lock (typically project ID)
* @param operation - The async operation to execute
* @returns Promise resolving to the operation result
* @description Ensures only one sync operation runs at a time for each lock key,
* preventing SQLite database lock errors during concurrent sync operations.
*/
async serializeSync(lockKey, operation) {
// Check if there's already a sync operation running for this key
const existingLock = CacheManager.syncLock.get(lockKey);
if (existingLock) {
getLogger().debug({ lockKey }, 'CacheManager: Waiting for existing sync operation to complete');
// Wait for the existing operation to complete before proceeding
await existingLock;
}
// Create a new lock for this operation
const currentOperation = (async () => {
try {
getLogger().debug({ lockKey }, 'CacheManager: Starting serialized sync operation');
const result = await operation();
getLogger().debug({ lockKey }, 'CacheManager: Completed serialized sync operation');
return result;
}
catch (error) {
getLogger().error({ lockKey, error: error.message }, 'CacheManager: Serialized sync operation failed');
throw error;
}
finally {
// Clean up the lock when operation completes
CacheManager.syncLock.delete(lockKey);
}
})();
// Store the lock
CacheManager.syncLock.set(lockKey, currentOperation);
return currentOperation;
}
/**
* Gets the project filter instance for configuration-based filtering
* @returns The project filter instance, or undefined if not configured
*/
getProjectFilter() {
return this.projectFilter;
}
/**
* Syncs ALL projects to the database without filtering
* This ensures every project is available in the database for entity operations
* @param allProjects - Array of all projects from the API
*/
async syncAllProjectsToDatabase(allProjects) {
getLogger().info({
projectCount: allProjects.length
}, 'CacheManager: Syncing ALL projects to database (no filtering)');
for (const project of allProjects) {
try {
const projectId = String(project.id || project.project_id);
// Insert or update project in database
await this.storage.run(`
INSERT OR REPLACE INTO projects (
id, name, account_id, created, status, platform, description,
is_flags_enabled, project_created, project_modified, last_modified
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`, [
projectId,
project.name || null,
safeIdToString(project.account_id || ''),
project.created || null,
project.status || 'active',
project.platform || (project.is_flags_enabled ? 'custom' : 'web'),
project.description || null,
project.is_flags_enabled ? 1 : 0,
project.created || null,
project.last_modified || null,
new Date().toISOString()
]);
getLogger().debug({
projectId,
projectName: project.name,
platform: project.platform || (project.is_flags_enabled ? 'custom' : 'web')
}, 'CacheManager: Saved project to database');
}
catch (error) {
getLogger().error({
projectId: project.id,
projectName: project.name,
error: error.message
}, 'CacheManager: Failed to save project to database');
}
}
getLogger().info({
projectCount: allProjects.length
}, 'CacheManager: Completed saving ALL projects to database');
}
/**
* Fetches all pages of data from a paginated API endpoint with retry logic
* @param fetchFunction - Function that fetches a single page of data
* @param options - Configuration options for pagination and retry behavior
* @returns Promise resolving to array of all fetched items across all pages
* @description Advanced pagination utility that handles multiple response formats,
* implements exponential backoff retry logic, and provides comprehensive error handling.
*
* Supported Response Formats:
* - Direct array: `[item1, item2, ...]`
* - Object with data: `{ data: [item1, item2, ...] }`
* - Object with results: `{ results: [item1, item2, ...] }`
* - Object with items: `{ items: [item1, item2, ...] }`
*
* Retry Logic:
* - HTTP 429: Rate limit - exponential backoff with Retry-After header support
* - HTTP 500-599: Server errors - exponential backoff retry
* - Network errors: Automatic retry with backoff
* - Configurable retry attempts with failure logging
*
* @param fetchFunction - Callback function that accepts (page, pageSize) and returns Promise
* @param options - Pagination configuration object
* @param options.maxPages - Maximum pages to fetch (default: 1000, safety limit)
* @param options.pageSize - Items per page (default: 100)
* @param options.startPage - Starting page number (default: 1)
* @param options.delay - Base delay between requests in ms (default: 100)
* @param options.retryAttempts - Maximum retry attempts per page (default: 3)
*
* @example
* ```typescript
* const allFlags = await cache.fetchAllPages(
* (page, pageSize) => apiClient.getFlags({ page, per_page: pageSize }),
* { maxPages: 50, pageSize: 100, retryAttempts: 3 }
* );
* ```
*
* @throws {Error} When all retry attempts are exhausted
* @throws {Error} When response format is unrecognized
*/
async fetchAllPages(fetchFunction, maxRecords, options = {}) {
const { maxPages = 1000, pageSize = 100, startPage = 1, delay = 1, // Very minimal delay to avoid rate limiting
retryAttempts = 3, progressCallback } = options;
const allResults = [];
let currentPage = startPage;
let hasMore = true;
let totalFetched = 0;
while (hasMore && currentPage <= maxPages && (!maxRecords || totalFetched < maxRecords)) {
let currentAttempt = 1;
let success = false;
while (currentAttempt <= retryAttempts && !success) {
try {
// Adjust page size if we're close to maxRecords limit
let adjustedPageSize = pageSize;
if (maxRecords && (totalFetched + pageSize > maxRecords)) {
adjustedPageSize = maxRecords - totalFetched;
}
const response = await fetchFunction(currentPage, adjustedPageSize);
getLogger().debug({
page: currentPage,
responseType: Array.isArray(response) ? 'array' : typeof response,
responseLength: Array.isArray(response) ? response.length : 'N/A'
}, 'CacheManager: fetchAllPages response');
let items = [];
if (Array.isArray(response)) {
items = response;
}
else if (response && Array.isArray(response.data)) {
items = response.data;
}
else if (response && Array.isArray(response.results)) {
items = response.results;
}
else if (response && response.items) {
items = Array.isArray(response.items) ? response.items : [response.items];
}
else if (response === null || response === undefined) {
items = [];
}
if (items.length === 0) {
hasMore = false;
}
else {
// Limit items to maxRecords if necessary
if (maxRecords && totalFetched + items.length > maxRecords) {
items = items.slice(0, maxRecords - totalFetched);
hasMore = false; // Stop fetching after hitting the limit
}
allResults.push(...items);
totalFetched += items.length;
// Report progress if callback provided
if (progressCallback) {
progressCallback(totalFetched, maxRecords || totalFetched);
}
if (items.length < adjustedPageSize) {
hasMore = false;
}
}
success = true;
}
catch (error) {
getLogger().error({ currentPage, currentAttempt, error: error.message }, 'CacheManager.fetchAllPages: Error fetching page');
if (error.status === 404 || error.status === 400) {
hasMore = false;
success = true;
}
else if (currentAttempt < retryAttempts) {
const retryDelay = delay * Math.pow(2, currentAttempt - 1);
getLogger().warn({ currentPage, retryDelay }, 'CacheManager.fetchAllPages: Retrying page');
await new Promise(resolve => setTimeout(resolve, retryDelay));
currentAttempt++;
}
else {
getLogger().error({ currentPage, retryAttempts }, 'CacheManager.fetchAllPages: Failed to fetch page after all attempts');
throw error;
}
}
}
if (!success && !hasMore)
break;
if (!hasMore)
break;
currentPage++;
if (hasMore && delay > 0) {
await new Promise(resolve => setTimeout(resolve, delay));
}
}
return allResults;
}
/**
* Enhanced pagination method with progress reporting for performance optimization
* @description Extends fetchAllPages with real-time progress callbacks for batch operations.
* Maintains identical pagination behavior while adding progress tracking capabilities
* for high-performance synchronization with user feedback.
*
* This method provides the foundation for progress reporting in batch operations,
* allowing users to track sync progress in real-time during large data operations.
*
* @param fetchFunction - Callback function that accepts (page, pageSize) and returns Promise
* @param progressCallback - Progress callback function (fetched: number, estimated?: number)
* @param maxRecords - Maximum number of records to fetch across all pages
* @param options - Pagination configuration identical to fetchAllPages
* @param options.maxPages - Maximum pages to fetch (default: 1000, safety limit)
* @param options.pageSize - Items per page (default: 100)
* @param options.startPage - Starting page number (default: 1)
* @param options.delay - Base delay between requests in ms (default: 100)
* @param options.retryAttempts - Maximum retry attempts per page (default: 3)
* @returns Promise resolving to array of all fetched items
* @throws {Error} When all retry attempts are exhausted
* @throws {Error} When response format is unrecognized
*
* @example
* ```typescript
* const allAudiences = await cache.fetchAllPagesWithProgress(
* (page, pageSize) => apiClient.listAudiences(projectId, { page, per_page: pageSize }),
* (fetched, estimated) => this.logger.log(`Progress: ${fetched}/${estimated || '?'} audiences`),
* undefined, // no maxRecords limit
* { pageSize: 100, retryAttempts: 3 }
* );
* ```
*/
async fetchAllPagesWithProgress(fetchFunction, progressCallback, maxRecords, options = {}) {
const { maxPages = 1000, pageSize = 100, startPage = 1, delay = 1, // Very minimal delay to avoid rate limiting
retryAttempts = 3, entityName, projectName } = options;
const allResults = [];
let currentPage = startPage;
let hasMore = true;
let totalFetched = 0;
let estimatedTotal;
// Skip initial progress callback with 0 - it's confusing to show "0 records"
// The first real progress will come after fetching the first page
while (hasMore && currentPage <= maxPages && (!maxRecords || totalFetched < maxRecords)) {
let currentAttempt = 1;
let success = false;
while (currentAttempt <= retryAttempts && !success) {
try {
// Adjust page size if we're close to maxRecords limit
let adjustedPageSize = pageSize;
if (maxRecords && (totalFetched + pageSize > maxRecords)) {
adjustedPageSize = maxRecords - totalFetched;
}
const response = await fetchFunction(currentPage, adjustedPageSize);
let items = [];
// Extract items from response (identical to fetchAllPages logic)
if (Array.isArray(response)) {
items = response;
}
else if (response && Array.isArray(response.data)) {
items = response.data;
}
else if (response && Array.isArray(response.results)) {
items = response.results;
}
else if (response && response.items) {
items = Array.isArray(response.items) ? response.items : [response.items];
}
else if (response === null || response === undefined) {
items = [];
}
// Update estimated total if available from response metadata
if (response && typeof response === 'object' && !Array.isArray(response)) {
const responseWithMetadata = response;
if (responseWithMetadata.total_count !== undefined) {
estimatedTotal = responseWithMetadata.total_count;
}
else if (responseWithMetadata.count !== undefined) {
estimatedTotal = responseWithMetadata.count;
}
else if (responseWithMetadata.total !== undefined) {
estimatedTotal = responseWithMetadata.total;
}
}
if (items.length === 0) {
hasMore = false;
}
else {
// Limit items to maxRecords if necessary
if (maxRecords && totalFetched + items.length > maxRecords) {
items = items.slice(0, maxRecords - totalFetched);
hasMore = false; // Stop fetching after hitting the limit
}
allResults.push(...items);
totalFetched += items.length;
// Determine if there might be more pages
const mightHaveMore = items.length === adjustedPageSize;
if (items.length < adjustedPageSize) {
hasMore = false;
}
// Progress callback after each successful page
if (progressCallback) {
// Pass undefined as estimated if we might have more pages (avoid -1 which causes validation errors)
progressCallback(totalFetched, mightHaveMore ? undefined : totalFetched);
}
// Emit progress event if entity info is available
if (entityName && projectName) {
ProgressEventEmitter.emitEntityProgress(entityName, totalFetched, 'records', mightHaveMore, projectName);
}
}
success = true;
}
catch (error) {
getLogger().error({
currentPage,
currentAttempt,
error: error.message
}, 'CacheManager.fetchAllPagesWithProgress: Error fetching page');
if (error.status === 404 || error.status === 400) {
hasMore = false;
success = true;
}
else if (currentAttempt < retryAttempts) {
const retryDelay = delay * Math.pow(2, currentAttempt - 1);
getLogger().warn({
currentPage,
retryDelay
}, 'CacheManager.fetchAllPagesWithProgress: Retrying page');
await new Promise(resolve => setTimeout(resolve, retryDelay));
currentAttempt++;
}
else {
getLogger().error({
currentPage,
retryAttempts
}, 'CacheManager.fetchAllPagesWithProgress: Failed to fetch page after all attempts');
throw error;
}
}
}
if (!success && !hasMore)
break;
if (!hasMore)
break;
currentPage++;
if (hasMore && delay > 0) {
await new Promise(resolve => setTimeout(resolve, delay));
}
}
// Final progress callback
if (progressCallback) {
progressCallback(totalFetched, estimatedTotal || totalFetched);
}
return allResults;
}
/**
* Fetches all pages of data from a cursor-based paginated API endpoint
* @param fetchFunction - Function that fetches a page of data using cursor
* @param options - Configuration options for pagination
* @returns Promise resolving to array of all fetched items across all pages
* @description Handles Feature Experimentation API's cursor-based pagination
*/
async fetchAllPagesWithCursor(fetchFunction, options = {}) {
const { pageSize = 100, delay = 5, retryAttempts = 3, progressCallback } = options;
const allResults = [];
let pageToken = null;
let hasMore = true;
getLogger().info({ pageSize, delay, retryAttempts }, 'CacheManager.fetchAllPagesWithCursor: Starting pagination');
while (hasMore) {
let currentAttempt = 1;
let success = false;
getLogger().info({ pageToken, hasMore, currentPage: allResults.length / pageSize + 1 }, 'CacheManager.fetchAllPagesWithCursor: Fetching page');
while (currentAttempt <= retryAttempts && !success) {
try {
const response = await fetchFunction(pageToken || undefined, pageSize);
getLogger().info({
pageToken,
responseType: typeof response,
responseKeys: response ? Object.keys(response) : 'null',
hasItems: response && response.items !== undefined,
itemsCount: response && response.items ? response.items.length : 'N/A',
hasNextUrl: response && response.next_url !== undefined,
nextUrlType: response && response.next_url ? typeof response.next_url : 'N/A',
nextUrlValue: response && response.next_url ? JSON.stringify(response.next_url) : 'N/A'
}, 'CacheManager.fetchAllPagesWithCursor: Response received');
// Handle paginated response format
if (response && response.items !== undefined) {
const items = Array.isArray(response.items) ? response.items : [];
allResults.push(...items);
getLogger().info({
itemsAdded: items.length,
totalItems: allResults.length
}, 'CacheManager.fetchAllPagesWithCursor: Items added to results');
// Check for next page first to determine if there might be more
const hasNextPage = !!response.next_url;
// Report progress if callback provided
if (progressCallback) {
// Pass -1 as estimated if there's a next page (more data coming)
const estimatedParam = hasNextPage ? -1 : allResults.length;
progressCallback(allResults.length, estimatedParam);
}
// Process next page
if (hasNextPage) {
// next_url can be a string or array - handle both
const nextUrl = Array.isArray(response.next_url) ? response.next_url[0] : response.next_url;
getLogger().info({
nextUrlRaw: response.next_url,
nextUrlExtracted: nextUrl
}, 'CacheManager.fetchAllPagesWithCursor: Processing next_url');
if (nextUrl) {
// Extract page_token from next_url
const match = nextUrl.match(/page_token=([^&]+)/);
getLogger().info({
nextUrl,
regexMatch: match ? match[0] : 'no match',
extractedToken: match ? match[1] : 'no token'
}, 'CacheManager.fetchAllPagesWithCursor: Token extraction');
pageToken = match ? decodeURIComponent(match[1]) : null;
hasMore = pageToken !== null;
getLogger().info({
pageToken,
hasMore
}, 'CacheManager.fetchAllPagesWithCursor: Pagination state updated');
}
else {
getLogger().info('CacheManager.fetchAllPagesWithCursor: nextUrl is falsy, stopping pagination');
hasMore = false;
}
}
else {
getLogger().info('CacheManager.fetchAllPagesWithCursor: No next_url, stopping pagination');
hasMore = false;
}
}
else if (Array.isArray(response)) {
// Direct array response (shouldn't happen with Feature Experimentation API)
getLogger().info({ responseLength: response.length }, 'CacheManager.fetchAllPagesWithCursor: Direct array response');
allResults.push(...response);
hasMore = false;
}
else {
getLogger().warn({ response }, 'CacheManager.fetchAllPagesWithCursor: Unexpected response format');
hasMore = false;
}
success = true;
}
catch (error) {
getLogger().error({ pageToken, currentAttempt, error: error.message }, 'CacheManager.fetchAllPagesWithCursor: Error fetching page');
if (error.status === 404 || error.status === 400) {
hasMore = false;
success = true;
}
else if (currentAttempt < retryAttempts) {
const retryDelay = delay * Math.pow(2, currentAttempt - 1);
getLogger().warn({ pageToken, retryDelay }, 'CacheManager.fetchAllPagesWithCursor: Retrying page');
await new Promise(resolve => setTimeout(resolve, retryDelay));
currentAttempt++;
}
else {
getLogger().error({ pageToken, retryAttempts }, 'CacheManager.fetchAllPagesWithCursor: Failed to fetch page after all attempts');
throw error;
}
}
}
if (!success && !hasMore)
break;
if (hasMore && delay > 0) {
getLogger().info({ delay }, 'CacheManager.fetchAllPagesWithCursor: Applying delay before next page');
await new Promise(resolve => setTimeout(resolve, delay));
}
}
getLogger().info({ totalResults: allResults.length }, 'CacheManager.fetchAllPagesWithCursor: Pagination completed');
return allResults;
}
/**
* Performs a complete synchronization of Optimizely data to local cache
* @param projectIdInput - Optional project ID to sync specific project (if null/undefined, syncs all projects)
* @returns Promise resolving to sync result with success status, duration, and project count
* @description Executes a comprehensive data synchronization within a database transaction.
* Syncs projects and all associated entities including environments, flags, experiments,
* audiences, attributes, and events. Updates sync metadata for tracking purposes.
*
* Synchronization Process:
* 1. Fetches project(s) from Optimizely API
* 2. For each project, syncs all associated data:
* - Project metadata and configuration
* - Environment definitions
* - Feature flags with environment configurations
* - Experiments and A/B tests
* - Audience definitions and targeting rules
* - Custom attributes for targeting
* - Event definitions for metrics
* 3. Updates sync metadata timestamp
* 4. All operations wrapped in transaction for data integrity
*
* Performance Considerations:
* - Uses transactions for atomicity (all-or-nothing)
* - Implements pagination for large datasets
* - Handles rate limiting with retry logic
* - Provides detailed logging for monitoring
*
* @param projectIdInput - Project identifier (number, string, or null for all projects)
*
* @returns Promise resolving to sync result object:
* ```typescript
* {
* success: boolean;
* duration: number; // milliseconds
* projectsSynced: number;
* timestamp: string; // ISO timestamp
* }
* ```
*
* @example
* ```typescript
* // Sync all projects
* const result = await cache.fullSync();
* this.logger.log(`Synced ${result.projectsSynced} projects in ${result.duration}ms`);
*
* // Sync specific project
* const specificResult = await cache.fullSync('12345');
* ```
*
* @throws {McpError} When project is not found or sync operation fails
* @throws {McpError} When database transaction fails
*/
async fullSync(projectIdInput, progressCallback) {
getLogger().info('CacheManager: Starting full sync');
const startTime = Date.now();
// Helper to report progress
const reportProgress = (phase, current, total, message) => {
const percent = total > 0 ? Math.round((current / total) * 100) : 0;
getLogger().debug({ phase, current, total, percent, message }, 'CacheManager: Sync progress');
if (progressCallback) {
progressCallback({ phase, current, total, message, percent });
}
};
try {
// Fetch projects outside of transaction to avoid holding locks during API calls
let projectsToSync;
if (projectIdInput) {
const project = await this.client.getProject(projectIdInput);
if (!project)
throw MCPErrorMapper.toMCPError(new Error(`Project ${projectIdInput} not found via API for sync`), { operation: 'Sync project', metadata: { project_id: projectIdInput } });
projectsToSync = [project];
}
else {
getLogger().info('CacheManager: Fetching all projects from API');
const allProjects = await this.fetchAllPages((page, perPage) => {
getLogger().debug({ page, perPage }, 'CacheManager: Fetching projects page');
return this.client.listProjects({ page, per_page: perPage });
});
getLogger().info({ fetchedCount: allProjects.length }, 'CacheManager: Fetched projects from API');
// CRITICAL CHANGE: Save ALL projects to database, but filter entities by project
// Step 1: Save ALL projects to the database (no filtering)
await this.syncAllProjectsToDatabase(allProjects);
// Step 2: Apply project filter only for entity synchronization
if (this.projectFilter) {
projectsToSync = await this.projectFilter.filterProjects(allProjects);
getLogger().info({
totalProjects: allProjects.length,
projectsSavedToDatabase: allProjects.length,
filteredProjectsForEntitySync: projectsToSync.length
}, 'CacheManager: Saved ALL projects to database, filtering entity sync by configuration');
}
else {
projectsToSync = allProjects;
getLogger().info({ projectCount: allProjects.length }, 'CacheManager: No project filter configured, syncing all projects and entities');
}
}
getLogger().info({ projectCount: projectsToSync.length }, 'CacheManager: Syncing projects');
// Report initial progress
reportProgress('projects', 0, projectsToSync.length, 'Starting project synchronization');
// Sync each project individually (not in a long transaction)
let syncedCount = 0;
for (const project of projectsToSync) {
try {
getLogger().info({ projectId: project.id, projectName: project.name }, 'CacheManager: Starting sync for project');
reportProgress('projects', syncedCount, projectsToSync.length, `Syncing project: ${project.name}`);
// Emit project start event
ProgressEventEmitter.emitProjectStart(project.name || `Project ${project.id}`);
await this.syncProject(project, (entityProgress) => {
// Forward project-level progress
reportProgress(entityProgress.phase, entityProgress.current, entityProgress.total, `${project.name}: ${entityProgress.message}`);
});
// Initialize sync state for incremental sync with serialization to prevent locks
if (this.changeTracker) {
await this.serializeSync(`sync_state_${project.id}`, async () => {
await this.storage.transaction(async () => {
await this.changeTracker.updateSyncState(String(project.id), {
last_sync_time: new Date().toISOString(),
last_successful_sync: new Date().toISOString(),
sync_in_progress: false,
error_count: 0,
last_error: undefined
});
});
});
getLogger().info({ projectId: project.id }, 'CacheManager: Initialized sync state for project');
}
syncedCount++;
reportProgress('projects', syncedCount, projectsToSync.length, `Completed project: ${project.name}`);
getLogger().info({ projectId: project.id }, 'CacheManager: Successfully synced project');
}
catch (error) {
getLogger().error({
projectId: project.id,
error: error.message,
stack: error.stack
}, 'CacheManager: Failed to sync project, continuing with others');
reportProgress('projects', syncedCount, projectsToSync.length, `Failed to sync project: ${project.name}`);
// Continue with other projects even if one fails
}
}
// Update sync metadata in a small transaction
await this.storage.run('INSERT OR REPLACE INTO sync_metadata (key, value, updated_at) VALUES (?, ?, CURRENT_TIMESTAMP)', ['last_full_sync', new Date().toISOString()]);
const duration = Date.now() - startTime;
getLogger().info({ duration, projectCount: syncedCount }, 'CacheManager: Full sync completed');
return {
success: true,
duration,
projectsSynced: syncedCount,
timestamp: new Date().toISOString()
};
}
catch (error) {
getLogger().error({ error: error.message, stack: error.stack }, 'CacheManager: Full sync failed');
throw MCPErrorMapper.toMCPError(error, 'Cache full sync failed');
}
}
/**
* Synchronizes a single project and all its associated entities
* @param project - Project object from Optimizely API
* @returns Promise that resolves when project sync is complete
* @description Comprehensive project synchronization that handles all related entities.
* Fetches and stores project data, environments, flags, experiments, audiences,
* attributes, and events with proper error handling and transaction safety.
*
* Synchronization Process:
* 1. Stores project metadata and configuration
* 2. Fetches and stores all project environments
* 3. Fetches flags using appropriate pagination method based on project type
* 4. For each flag, fetches environment-specific rulesets
* 5. Fetches experiments with proper archival filtering
* 6. Synchronizes audiences, attributes, and events
*
* Error Handling:
* - Individual entity sync failures don't stop the entire process
* - Logs warnings for failed operations
* - Provides detailed context for troubleshooting
*
* Performance Features:
* - Uses prepared statements for database operations
* - Implements proper pagination for large datasets
* - Handles rate limiting with retry logic
*
* @param projectFromAPI - Complete project object from Optimizely API
*
* @throws {Error} When critical operations like project storage fail
* @throws {McpError} When project data is invalid or malformed
* @private
*/
async syncProject(projectFromAPI, progressCallback) {
const projectId = String(projectFromAPI.id);
getLogger().info({ projectId }, 'CacheManager.syncProject: Starting sync');
// Helper to report entity-level progress using event emitter
const reportEntityProgress = (entity, current, total, message, count, countType, isOngoing, totalCount) => {
const projectName = projectFromAPI.name || `Project ${projectId}`;
getLogger().debug({ entity, current, total, message, count, countType, isOngoing, totalCount }, 'CacheManager.syncProject: Entity progress');
// Emit events based on state
if (current === 0 && !count) {
// Entity starting
ProgressEventEmitter.emitEntityStart(entity, projectName);
}
else if (current >= total && total > 0) {
// Entity completed - check this BEFORE progress update
ProgressEventEmitter.emitEntityComplete(entity, count, countType, projectName);
}
else if (count !== undefined) {
// Progress update with count - pass totalCount for entities like flag_rulesets
ProgressEventEmitter.emitEntityProgress(entity, count, countType, isOngoing, projectName, totalCount);
}
// Also maintain callback compatibility
if (progressCallback) {
const phase = `project_${projectId}_${entity}`;
const percent = total > 0 ? Math.round((current / total) * 100) : 0;
progressCallback({
phase,
current,
total,
message,
percent,
entityData: {
entity,
count,
countType: countType || 'records',
isOngoing: isOngoing ?? (total === -1 || (total > 0 && current < total))
}
});
}
};
// Store project data
await this.storage.run(`
INSERT OR REPLACE INTO projects
(id, name, platform, is_flags_enabled, account_id, last_modified, data_json, synced_at)
VALUES (?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP)
`, [
projectId,
projectFromAPI.name || '',
projectFromAPI.platform || 'web',
projectFromAPI.is_flags_enabled ? 1 : 0,
safeIdToString(projectFromAPI.account_id || ''),
projectFromAPI.last_modified,
JSON.stringify(projectFromAPI)
]);
// Determine project platform and entity filter first
const isFeatureExperimentation = projectFromAPI.is_flags_enabled || false;
const platform = isFeatureExperimentation ? 'feature' : 'web';
// Get allowed entities from environment variables
const webEntities = process.env.WEB_ENTITIES?.split(',').map(e => e.trim()) || [
'experiments', 'campaigns', 'pages', 'audiences', 'events', 'attributes',
'experiment_results', 'groups', 'extensions', 'webhooks', 'list_attributes',
'web_environments', 'change_history'
];
const featureEntities = process.env.FEATURE_ENTITIES?.split(',').map(e => e.trim()) || [
'flags', 'environments', 'features', 'audiences', 'events', 'attributes',
'groups', 'webhooks', 'change_history'
// Note: No 'extensions' or 'list_attributes' - Feature Experimentation doesn't support these endpoints
];
const allowedEntities = isFeatureExperimentation ? featureEntities : webEntities;
// Only sync built-in environments if 'environments' is in allowed entities
let environments = [];
if (allowedEntities.includes('environments')) {
getLogger().debug({ projectId }, 'CacheManager.syncProject: Syncing environments');
try {
// Check project type - get project data to determine platform
const projectData = await this.storage.get('SELECT is_flags_enabled FROM projects WHERE id = ?', [String(projectId)]);
const isFeatureExperimentation = projectData?.is_flags_enabled;
if (isFeatureExperimentation) {
// Feature Experimentation project - use existing logic
const envResponse = await this.client.listEnvironments(projectId);
// Feature Experimentation API returns paginated response with items array
environments = Array.isArray(envResponse) ? envResponse : (envResponse?.items || []);
}
else {
// Web Experimentation project - use new Web environments API
environments = await this.fetchAllPages((page, perPage) => this.client.listWebEnvironments(projectId, {
page,
per_page: perPage
}));
}
// Store ALL environments - filtering happens later at ruleset call level
getLogger().info({
projectId,
environmentCount: environments.length,
environmentNames: environments.map((env) => env.key || env.name)
}, 'CacheManager.syncProject: Downloaded all environments for storage');
}
catch (error) {
if (error.status === 404) {
getLogger().info({ projectId }, 'CacheManager.syncProject: No environments endpoint for this project, using defaults');
// Use default development and production environments for projects without environments API
environments = [
{ key: 'production', name: 'Production', is_primary: true },
{ key: 'development', name: 'Development', is_primary: false }
];
}
else {
throw error;
}
}
for (const env of environments) {
await this.storage.run(`
INSERT OR REPLACE INTO environments
(project_id, key, name, is_primary, data_json, synced_at)
VALUES (?, ?, ?, ?, ?, CURRENT_TIMESTAMP)
`, [
projectId,
env.key,
env.name || '',
env.is_primary ? 1 : 0,
JSON.stringify(env)
]);
}
// Report progress for environments
// For Feature Experimentation, don't mark as complete yet since environments are used by flags
if (isFeatureExperimentation) {
reportEntityProgress('environments', 0, 1, 'Downloaded environments for flag sync', environments.length, 'records');
}
else {
// For Web Experimentation, environments are a standalone entity that's now complete
reportEntityProgress('environments', environments.length, environments.length, 'Environments synced', environments.length, 'records');
}
}
else {
getLogger().info({ projectId, platform }, 'CacheManager.syncProject: Skipping built-in environments sync (not in allowed entities)');
// Still need default environments for flags sync if this is Feature Experimentation
if (isFeatureExperimentation) {
environments = [
{ key: 'production', name: 'Production', is_primary: true },
{ key: 'development', name: 'Development', is_primary: false }
];
}
}
getLogger().info({
projectId,
platform,
isFeatureExperimentation,
allowedEntities
}, 'CacheManager.syncProject: Determined platform and entity filter');