UNPKG

@simonecoelhosfo/optimizely-mcp-server

Version:

Optimizely MCP Server for AI assistants with integrated CLI tools

966 lines (961 loc) • 56.1 kB
#!/usr/bin/env node /** * Enhanced Optimizely Cache Sync CLI with Progress Bars * @description Advanced CLI tool that leverages the Phase 3 progress callback infrastructure * to provide real-time progress bars and detailed performance metrics during sync operations. * * Features: * - Real-time progress bars for each sync phase * - SQL operation reduction metrics * - Entity-specific progress tracking * - Verbose mode for debugging * - Multi-project progress aggregation * - Performance optimization metrics display * * Usage: * npm run cache-sync:enhanced # Full sync with progress bars * npm run cache-sync:enhanced -- --verbose # Verbose output * npm run cache-sync:enhanced -- --project 123 # Specific project * npm run cache-sync:enhanced -- --force # Force refresh * npm run cache-sync:enhanced -- --incremental # Incremental sync * * @author Optimizely MCP Server * @version 2.0.0 */ import * as dotenv from 'dotenv'; import { Command } from 'commander'; import chalk from 'chalk'; // Load environment variables dotenv.config(); // Import required components import { ConfigManager } from '../dist/config/ConfigManager.js'; import { ProjectFilter } from '../dist/config/ProjectFilter.js'; import { OptimizelyAPIHelper } from '../dist/api/OptimizelyAPIHelper.js'; import { SQLiteEngine } from '../dist/storage/SQLiteEngine.js'; import { CacheManager } from '../dist/cache/CacheManager.js'; import { OptimizelyMCPTools } from '../dist/tools/OptimizelyMCPTools.js'; import { EntityRouter } from '../dist/tools/EntityRouter.js'; import { createLogger } from '../dist/logging/Logger.js'; import { StaticProgressReporter } from '../dist/cli/StaticProgressReporter.js'; import { FlickerFreeProgressReporter } from '../dist/cli/FlickerFreeProgressReporter.js'; import { SummaryReporter } from '../dist/cli/SummaryReporter.js'; import { DatabaseCleanupManager } from '../dist/utils/DatabaseCleanupManager.js'; import path from 'path'; /** * Enhanced CLI Application with Progress Reporting */ class EnhancedCacheSyncCLI { config; apiHelper; cacheManager; optimizelyTools; entityRouter; progressReporter; summaryReporter; phaseStartTimes = new Map(); entityMetrics = new Map(); async initialize(options) { console.log(chalk.blue('šŸ”§ Initializing Enhanced Cache Sync CLI...')); // Initialize configuration this.config = new ConfigManager(); await this.config.loadConfig(); const serverConfig = this.config.getConfig(); // Initialize logging - use file logging to avoid console conflicts createLogger({ logLevel: options.verbose ? 'debug' : 'info', logFile: serverConfig.logging.logFile, consoleLogging: false, // Disable console logging for CLI prettyPrint: false, maxFileSize: serverConfig.logging.maxFileSize, maxFiles: serverConfig.logging.maxFiles }); // Validate API token if (!serverConfig.optimizely.apiToken) { throw new Error('Optimizely API token is required. Set OPTIMIZELY_API_TOKEN environment variable.'); } // Initialize progress reporter and summary reporter // Use flicker-free reporter with original design this.progressReporter = options.classic ? new StaticProgressReporter() : new FlickerFreeProgressReporter(); this.summaryReporter = new SummaryReporter(); // Initialize API helper console.log(chalk.cyan('šŸ”Œ Connecting to Optimizely API...')); this.apiHelper = new OptimizelyAPIHelper(serverConfig.optimizely.apiToken, { baseUrl: serverConfig.optimizely.baseUrl, flagsUrl: serverConfig.optimizely.flagsUrl, requestsPerMinute: serverConfig.optimizely.rateLimits?.requestsPerMinute, requestsPerSecond: serverConfig.optimizely.rateLimits?.requestsPerSecond, retryAttempts: serverConfig.optimizely.retries?.maxAttempts, retryDelay: serverConfig.optimizely.retries?.baseDelay }); // Test API connection const healthCheck = await this.apiHelper.healthCheck(); if (healthCheck.status !== 'healthy') { throw new Error(`Optimizely API health check failed: ${healthCheck.error}`); } console.log(chalk.green('āœ“ API connection successful')); // Initialize storage console.log(chalk.cyan('šŸ’¾ Initializing database...')); const defaultDbPath = './data/optimizely-cache.db'; // Priority: CLI option > config > default let dbPath; if (options.databasePath) { // CLI option provided - use it directly dbPath = path.isAbsolute(options.databasePath) ? options.databasePath : path.join(process.cwd(), options.databasePath); console.log(chalk.green(`Using CLI-specified database path: ${dbPath}`)); } else { // Fall back to config or default const configDbPath = serverConfig.storage.databasePath || defaultDbPath; dbPath = path.isAbsolute(configDbPath) ? configDbPath : path.join(process.cwd(), configDbPath); console.log(chalk.gray(`Database path: ${dbPath}`)); } // Optimize database path for WSL2 dbPath = DatabaseCleanupManager.optimizeDatabasePath(dbPath); // Debug the actual path being used console.log(chalk.yellow(`šŸ” Debug - Final database path: ${dbPath}`)); console.log(chalk.yellow(`šŸ” Debug - Path type: ${path.isAbsolute(dbPath) ? 'absolute' : 'relative'}`)); // Ensure the directory exists const dbDir = path.dirname(dbPath); console.log(chalk.yellow(`šŸ” Debug - Database directory: ${dbDir}`)); // CRITICAL: Clean up orphaned database connections before opening console.log(chalk.cyan('šŸ”§ Checking for orphaned database connections...')); const cleanupManager = new DatabaseCleanupManager({ dbPath, timeoutMs: 10000, forceCleanup: options.force, verbose: options.verbose }); const cleanupSuccess = await cleanupManager.cleanup(); if (!cleanupSuccess && !options.force) { console.log(chalk.red('āŒ Database cleanup failed. Use --force to attempt aggressive cleanup.')); process.exit(1); } const fs = await import('fs/promises'); try { await fs.mkdir(dbDir, { recursive: true }); console.log(chalk.green(`āœ“ Directory created/verified: ${dbDir}`)); } catch (mkdirError) { console.log(chalk.red(`āŒ Failed to create directory: ${mkdirError.message}`)); throw mkdirError; } // Check if file exists and permissions try { await fs.access(dbPath, fs.constants.W_OK | fs.constants.R_OK); console.log(chalk.green(`āœ“ Database file exists and is writable: ${dbPath}`)); } catch (accessError) { if (accessError.code === 'ENOENT') { console.log(chalk.yellow(`āš ļø Database file does not exist, will be created: ${dbPath}`)); } else { console.log(chalk.red(`āŒ Database file access error: ${accessError.message}`)); } } const storageEngine = new SQLiteEngine({ path: dbPath, backupDir: serverConfig.storage.backupDir, verbose: undefined // Remove verbose logging for CLI }); try { await storageEngine.init(); console.log(chalk.green('āœ“ Database initialized')); // CRITICAL: Explicitly ensure views are created // The init() should have created them, but let's verify and recreate if needed const db = storageEngine.db; // Access the database instance if (db) { const viewCount = db.prepare("SELECT COUNT(*) as count FROM sqlite_master WHERE type='view'").get(); console.log(chalk.gray(` Views in database: ${viewCount.count}`)); if (viewCount.count === 0) { console.log(chalk.yellow('āš ļø No views found, creating views...')); // Fix path resolution for npm package const path = await import('path'); const { fileURLToPath } = await import('url'); const __filename = fileURLToPath(import.meta.url); const __dirname = path.dirname(__filename); const viewManagerPath = path.join(__dirname, '..', 'dist', 'storage', 'ViewManager.js'); const { ViewManager } = await import(viewManagerPath); const viewManager = new ViewManager(db); await viewManager.createAllViews(true); // Verify views were created const newViewCount = db.prepare("SELECT COUNT(*) as count FROM sqlite_master WHERE type='view'").get(); console.log(chalk.green(`āœ“ Created ${newViewCount.count} views`)); } } } catch (error) { // Handle DATABASE_RESET_REQUIRED error if (error.code === 'DATABASE_RESET_REQUIRED' || error.message?.includes('DATABASE_RESET_REQUIRED')) { console.log(chalk.yellow('\nāš ļø Database schema is outdated and needs to be reset.')); // Check if error has details in various locations const details = error.details || error.data || {}; console.log(chalk.gray(` Current version: ${details.currentVersion ?? 0}`)); console.log(chalk.gray(` Target version: ${details.targetVersion ?? 10}`)); console.log(chalk.gray(` Reason: ${details.reason || 'Schema migration required'}`)); // Check if --force or --reset option was provided if (options.force || options.reset) { console.log(chalk.cyan('\nšŸ”„ Reset option detected, automatically resetting database...')); // Reinitialize with confirmReset option await storageEngine.init({ confirmReset: true }); console.log(chalk.green('āœ“ Database reset and initialized successfully')); // CRITICAL: Ensure views are created after reset const db = storageEngine.db; if (db) { const viewCount = db.prepare("SELECT COUNT(*) as count FROM sqlite_master WHERE type='view'").get(); console.log(chalk.gray(` Views in database: ${viewCount.count}`)); if (viewCount.count === 0) { console.log(chalk.yellow('āš ļø No views found after reset, creating views...')); // Fix path resolution for npm package const path = await import('path'); const { fileURLToPath } = await import('url'); const __filename = fileURLToPath(import.meta.url); const __dirname = path.dirname(__filename); const viewManagerPath = path.join(__dirname, '..', 'dist', 'storage', 'ViewManager.js'); const { ViewManager } = await import(viewManagerPath); const viewManager = new ViewManager(db); await viewManager.createAllViews(true); // Verify views were created const newViewCount = db.prepare("SELECT COUNT(*) as count FROM sqlite_master WHERE type='view'").get(); console.log(chalk.green(`āœ“ Created ${newViewCount.count} views`)); } } } else { console.log(chalk.red('\nāŒ Database schema is outdated and needs to be reset.')); console.log(chalk.yellow('\nšŸŽÆ EASIEST SOLUTION:')); console.log(chalk.green.bold(' npm run reset-sync')); console.log(chalk.gray('\n This will automatically reset the database and sync your data.')); console.log(chalk.gray('\n Alternative (if you prefer command line options):')); console.log(chalk.gray(' npm run cache-sync:enhanced -- --force')); console.log(chalk.gray(' npm run cache-sync:enhanced -- --reset')); throw new Error('Database reset required. Run "npm run reset-sync" for the easiest solution.'); } } else { throw error; } } // Initialize project filter const projectFilter = new ProjectFilter(serverConfig.optimizely.projects); // Initialize cache manager this.cacheManager = new CacheManager(storageEngine, this.apiHelper, projectFilter, serverConfig); await this.cacheManager.init(); // Initialize Optimizely tools with cache manager this.optimizelyTools = new OptimizelyMCPTools(this.cacheManager); // Initialize EntityRouter for targeted sync this.entityRouter = new EntityRouter(this.apiHelper, this.cacheManager, storageEngine); console.log(chalk.green('āœ“ Initialization complete\n')); } /** * Perform cache sync with progress reporting */ async syncCache(options) { const syncStartTime = Date.now(); try { const operation = options.incremental ? 'Incremental Sync' : 'Full Sync'; this.progressReporter.startSync(operation, options); // Calculate total steps based on actual projects and their entity counts const totalSteps = await this.calculateTotalSteps(options.projectId); this.progressReporter.setTotalSteps(totalSteps.projectCount, totalSteps.avgEntitiesPerProject, totalSteps.totalSteps); // Create enhanced progress callback that tracks metrics const progressCallback = (progress) => { // Track phase timings if (!this.phaseStartTimes.has(progress.phase)) { this.phaseStartTimes.set(progress.phase, Date.now()); } // Track entity metrics from progress messages this.extractEntityMetrics(progress); // Update progress display this.progressReporter.updateProgress(progress); // When phase completes, record timing if (progress.percent >= 100 && this.phaseStartTimes.has(progress.phase)) { const startTime = this.phaseStartTimes.get(progress.phase); const duration = Date.now() - startTime; this.summaryReporter.addPhaseTiming(progress.phase, duration); } }; // Use targeted sync if tables specified, otherwise full sync let result; if (options.targetTables && options.targetTables.length > 0) { console.log(chalk.yellow(`šŸŽÆ Performing targeted sync for tables: ${options.targetTables.join(', ')}`)); result = await this.performTargetedSync({ targetTables: options.targetTables, projectId: options.projectId, force: options.force, progressCallback }); } else { // Use OptimizelyMCPTools.refreshCache which now supports progress callbacks result = await this.optimizelyTools.refreshCache({ projectId: options.projectId, force: options.force, incremental: options.incremental, progressCallback }); } // Process entity metrics for summary this.entityMetrics.forEach((metrics, entityType) => { const duration = Date.now() - metrics.startTime; const entityPerf = SummaryReporter.createEntityMetrics(entityType, metrics.count, duration, 1 // Batch operations count (from our optimization) ); this.summaryReporter.addEntityMetrics(entityPerf); }); // Finalize summary report const totalDuration = Date.now() - syncStartTime; const projectCount = result.projectsSynced || 1; this.summaryReporter.finalize(totalDuration, projectCount); // Extract progress data from result if available if (result.progress_summary) { console.log(chalk.bold('\nšŸ“ˆ Progress Summary:')); console.log(chalk.white(` Total phases: ${result.progress_summary.total_phases}`)); console.log(chalk.white(` Total updates: ${result.progress_summary.total_updates}`)); if (options.verbose && result.progress_updates) { console.log(chalk.bold('\nšŸ“‹ Detailed Progress Log:')); result.progress_updates.forEach((update) => { console.log(chalk.gray(` [${update.timestamp}] ${update.phase}: ${update.message}`)); }); } } this.progressReporter.completeSync(result); // Generate summary report if requested if (options.summary === true) { // Only show when explicitly requested this.summaryReporter.toConsole(); // Export reports if path provided if (options.exportPath) { const basePath = options.exportPath; const timestamp = new Date().toISOString().replace(/[:.]/g, '-'); // Export JSON const jsonPath = path.join(basePath, `performance-report-${timestamp}.json`); this.summaryReporter.toJSON(jsonPath); // Export Markdown const mdPath = path.join(basePath, `performance-report-${timestamp}.md`); this.summaryReporter.toMarkdown(mdPath); } } } catch (error) { this.progressReporter.error(error); throw error; } } /** * Calculate total steps based on actual projects and entity configuration */ async calculateTotalSteps(specificProjectId) { try { // Get project IDs from environment or use specific project const projectIds = specificProjectId ? [specificProjectId] : (process.env.OPTIMIZELY_PROJECT_IDS?.split(',').map(id => id.trim()).filter(id => id.length > 0) || []); if (projectIds.length === 0) { return { projectCount: 1, avgEntitiesPerProject: 7, totalSteps: 7 }; } // Get entity counts from environment variables or use defaults const webEntities = process.env.WEB_ENTITIES?.split(',').map(e => e.trim()).filter(e => e.length > 0) || [ 'experiments', 'campaigns', 'pages', 'audiences', 'events', 'attributes', 'extensions', 'change_history' ]; const featureEntities = process.env.FEATURE_ENTITIES?.split(',').map(e => e.trim()).filter(e => e.length > 0) || [ 'flags', 'environments', 'features', 'audiences', 'events', 'attributes', 'change_history' ]; console.log(chalk.gray(`Calculating steps: ${projectIds.length} projects, ${webEntities.length} web entities, ${featureEntities.length} feature entities`)); // Determine platform for each project by checking API let webProjectCount = 0; let featureProjectCount = 0; for (const projectId of projectIds) { try { const project = await this.apiHelper.getProject(projectId); if (project.is_flags_enabled) { featureProjectCount++; } else { webProjectCount++; } } catch (error) { console.log(chalk.yellow(`āš ļø Could not determine platform for project ${projectId}, assuming Web`)); webProjectCount++; } } // Calculate total steps const webSteps = webProjectCount * webEntities.length; // For Feature projects, add 1 extra step for flag_rulesets which is reported separately const featureSteps = featureProjectCount * (featureEntities.length + (featureEntities.includes('flags') ? 1 : 0)); const totalSteps = webSteps + featureSteps; const avgEntitiesPerProject = totalSteps / projectIds.length; console.log(chalk.gray(`Step calculation: ${webProjectCount} web projects Ɨ ${webEntities.length} entities = ${webSteps}, ${featureProjectCount} feature projects Ɨ ${featureEntities.length} entities${featureEntities.includes('flags') ? ' (+1 for flag_rulesets)' : ''} = ${featureSteps}, total: ${totalSteps}`)); return { projectCount: projectIds.length, avgEntitiesPerProject: Math.round(avgEntitiesPerProject), totalSteps }; } catch (error) { console.log(chalk.yellow(`āš ļø Could not calculate exact steps, using estimate`)); return { projectCount: 1, avgEntitiesPerProject: 7, totalSteps: 7 }; } } /** * Extract entity metrics from progress updates */ extractEntityMetrics(progress) { // Match patterns like "Syncing 17 audiences" or "Saved 76 events" const match = progress.message.match(/(?:Syncing|Saved|Processing)\s+(\d+)\s+(\w+)/i); if (match) { const count = parseInt(match[1]); const entityType = match[2].toLowerCase(); if (!isNaN(count) && entityType) { if (!this.entityMetrics.has(entityType)) { this.entityMetrics.set(entityType, { count: 0, startTime: Date.now() }); } const metrics = this.entityMetrics.get(entityType); metrics.count = Math.max(metrics.count, count); // Use max to avoid double counting } } } /** * Sync multiple projects */ async syncMultipleProjects(projectIds, options) { console.log(chalk.bold.blue(`šŸ”„ Syncing ${projectIds.length} projects...\n`)); let successCount = 0; let failureCount = 0; for (const projectId of projectIds) { console.log(chalk.cyan(`\nšŸ“¦ Project ${projectId}:`)); try { await this.syncCache({ ...options, projectId }); successCount++; } catch (error) { failureCount++; console.log(chalk.red(` āœ— Failed: ${error.message}`)); if (!options.continueOnError) { throw error; } } } // Display multi-project summary console.log(chalk.bold('\nšŸ“Š Multi-Project Summary:')); console.log(chalk.green(` āœ“ Successful: ${successCount} projects`)); if (failureCount > 0) { console.log(chalk.red(` āœ— Failed: ${failureCount} projects`)); } } /** * Get the summary reporter instance */ getSummaryReport() { return this.summaryReporter; } /** * List all available database tables */ async listTables() { if (!this.cacheManager) { throw new Error('Cache manager not initialized'); } const query = ` SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%' AND name != 'schema_info' ORDER BY name `; const tables = await this.cacheManager.storage.query(query); return tables.map((row) => row.name); } /** * List all available entity types based on EntityRouter configuration */ getAvailableEntityTypes() { // Standard entity types supported by the system const entityTypes = [ 'project', 'flag', 'experiment', 'campaign', 'page', 'audience', 'attribute', 'event', 'variation', 'variable_definition', 'rule', 'ruleset', 'environment', 'collaborator', 'group', 'extension', 'webhook', 'list_attribute', 'results' ]; return entityTypes.sort(); } /** * Map entity types to their corresponding database tables */ getEntityTableMapping() { return { 'project': 'projects', 'flag': 'flags', 'experiment': 'experiments', 'campaign': 'campaigns', 'page': 'pages', 'audience': 'audiences', 'attribute': 'attributes', 'event': 'events', 'variation': 'variations', 'variable_definition': 'variable_definitions', 'rule': 'rules', 'ruleset': 'rulesets', 'environment': 'environments', 'collaborator': 'collaborators', 'group': 'groups', 'extension': 'extensions', 'webhook': 'webhooks', 'list_attribute': 'list_attributes', 'results': 'experiment_results' }; } /** * Validate and convert entity types to table names */ convertEntitiesToTables(entities) { const mapping = this.getEntityTableMapping(); const tables = []; const invalid = []; for (const entity of entities) { const tableName = mapping[entity]; if (tableName) { tables.push(tableName); } else { invalid.push(entity); } } if (invalid.length > 0) { throw new Error(`Invalid entity types: ${invalid.join(', ')}. Use --list-entities to see available types.`); } return tables; } /** * Perform targeted synchronization for specific tables */ async performTargetedSync(options) { const startTime = Date.now(); const { targetTables, projectId, force, progressCallback } = options; // For targeted sync of entity tables (not projects table), we MUST have a project ID const entityTables = ['experiments', 'campaigns', 'pages', 'audiences', 'events', 'attributes', 'extensions', 'webhooks', 'flags', 'features']; const requiresProject = targetTables.some(table => entityTables.includes(table)); if (requiresProject && !projectId) { throw new Error('Project ID is required for syncing entity tables. Use --project <id> to specify which project to sync.'); } // Get table to entity mapping (reverse of entity to table mapping) const tableToEntityMapping = {}; const entityMapping = this.getEntityTableMapping(); Object.entries(entityMapping).forEach(([entity, table]) => { tableToEntityMapping[table] = entity; }); // If force option is enabled, clear target tables first if (force) { progressCallback?.({ phase: 'Clearing targeted tables', current: 0, total: targetTables.length, message: `Clearing ${targetTables.length} tables...`, percent: 0 }); await this.cacheManager.storage.run('BEGIN TRANSACTION'); try { await this.cacheManager.storage.run('PRAGMA foreign_keys = OFF'); for (let i = 0; i < targetTables.length; i++) { const table = targetTables[i]; await this.cacheManager.storage.run(`DELETE FROM ${table}`); progressCallback?.({ phase: 'Clearing targeted tables', current: i + 1, total: targetTables.length, message: `Cleared table: ${table}`, percent: Math.round(((i + 1) / targetTables.length) * 100) }); } await this.cacheManager.storage.run('PRAGMA foreign_keys = ON'); await this.cacheManager.storage.run('COMMIT'); } catch (error) { await this.cacheManager.storage.run('ROLLBACK'); throw error; } } // Sync each target table by entity type let totalSynced = 0; const results = []; for (let i = 0; i < targetTables.length; i++) { const table = targetTables[i]; const entityType = tableToEntityMapping[table]; if (!entityType) { console.log(chalk.yellow(`āš ļø Warning: No entity mapping found for table '${table}', skipping...`)); continue; } progressCallback?.({ phase: `Syncing ${entityType}`, current: i, total: targetTables.length, message: `Fetching ${entityType} data from API...`, percent: Math.round((i / targetTables.length) * 100) }); try { // Use CacheManager's direct methods to fetch from API let allEntities = []; // Call the appropriate API method based on entity type switch (entityType) { case 'project': // Projects require special handling - list all then filter const allProjects = await this.cacheManager.fetchAllPages((page, perPage) => this.apiHelper.listProjects({ page, per_page: perPage })); // Import safeIdToString for project filtering const { safeIdToString: safeId } = await import('../dist/utils/SafeIdConverter.js'); // Filter to specific project if projectId is provided if (projectId) { allEntities = allProjects.filter(p => safeId(p.id) === safeId(projectId)); } else { allEntities = allProjects; } break; case 'page': allEntities = await this.cacheManager.fetchAllPages((page, perPage) => this.apiHelper.listPages(projectId, { page, per_page: perPage })); break; case 'event': allEntities = await this.cacheManager.fetchAllPages((page, perPage) => this.apiHelper.listEvents(projectId, { page, per_page: perPage })); break; case 'audience': allEntities = await this.cacheManager.fetchAllPages((page, perPage) => this.apiHelper.listAudiences(projectId, { page, per_page: perPage })); break; case 'experiment': allEntities = await this.cacheManager.fetchAllPages((page, perPage) => this.apiHelper.listExperiments(projectId, { page, per_page: perPage })); break; case 'attribute': allEntities = await this.cacheManager.fetchAllPages((page, perPage) => this.apiHelper.listAttributes(projectId, { page, per_page: perPage })); break; case 'extension': allEntities = await this.cacheManager.fetchAllPages((page, perPage) => this.apiHelper.listExtensions(projectId, { page, per_page: perPage })); break; case 'webhook': allEntities = await this.cacheManager.fetchAllPages((page, perPage) => this.apiHelper.listWebhooks(projectId, { page, per_page: perPage })); break; case 'campaign': allEntities = await this.cacheManager.fetchAllPages((page, perPage) => this.apiHelper.listCampaigns(projectId, { page, per_page: perPage })); break; case 'flag': // Feature flags require special handling const flags = await this.cacheManager.fetchAllPages((page, perPage) => this.apiHelper.listFlags(projectId, { page, per_page: perPage })); allEntities = flags; break; case 'group': allEntities = await this.cacheManager.fetchAllPages((page, perPage) => this.apiHelper.listGroups({ page, per_page: perPage, project_id: projectId })); break; case 'environment': // Environments don't support pagination allEntities = await this.apiHelper.listEnvironments(projectId); break; case 'collaborator': // Collaborators use different endpoint allEntities = await this.apiHelper.listCollaborators(projectId); break; default: console.log(chalk.yellow(`āš ļø Warning: No API method found for entity type '${entityType}', skipping...`)); continue; } totalSynced += allEntities.length; // Update progress after fetching progressCallback?.({ phase: `Syncing ${entityType}`, current: i, total: targetTables.length, message: `Fetched ${allEntities.length} ${entityType}(s), saving to cache...`, percent: Math.round((i / targetTables.length) * 100) }); // Store entities in cache using the cache manager's batch methods if (allEntities.length > 0) { await this.saveEntitiesToCacheProper(table, allEntities, entityType, projectId); } results.push({ table, entityType, count: allEntities.length, success: true }); progressCallback?.({ phase: `Syncing ${entityType}`, current: i + 1, total: targetTables.length, message: `Synced ${allEntities.length} ${entityType}(s)`, percent: Math.round(((i + 1) / targetTables.length) * 100) }); } catch (error) { console.log(chalk.red(`āŒ Error syncing ${entityType}: ${error.message}`)); results.push({ table, entityType, count: 0, success: false, error: error.message }); } } const duration = Date.now() - startTime; return { success: true, projectsSynced: projectId ? 1 : 0, duration, timestamp: new Date().toISOString(), message: `Targeted sync completed for ${targetTables.length} tables`, totalChanges: totalSynced, totalCreated: totalSynced, totalUpdated: 0, totalDeleted: force ? targetTables.length : 0, targetedSync: true, tables: results }; } /** * Save entities to cache using CacheManager's proper batch insert methods */ async saveEntitiesToCacheProper(table, entities, entityType, projectId) { if (entities.length === 0) return; // Use CacheManager's batchInsert method which handles all the complexity // Need to import safeIdToString from utils const { safeIdToString } = await import('../dist/utils/SafeIdConverter.js'); // Define field mappers based on entity type switch (entityType) { case 'project': { const fieldMapper = (project) => [ safeIdToString(project.id), project.name || '', project.description || '', project.platform || '', project.status || '', safeIdToString(project.account_id), project.is_flags_enabled ? 1 : 0, project.archived ? 1 : 0, project.created || project.created_at || new Date().toISOString(), project.last_modified || project.updated_at || new Date().toISOString(), JSON.stringify(project), new Date().toISOString() ]; await this.cacheManager.storage.runBatch(`INSERT OR REPLACE INTO projects (id, name, description, platform, status, account_id, is_flags_enabled, archived, created_at, last_modified, data_json, synced_at) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, entities.map(fieldMapper)); break; } case 'page': { const fieldMapper = (page) => [ safeIdToString(page.id), safeIdToString(page.project_id || projectId), page.key || '', page.name || '', page.edit_url || '', page.activation_type || '', page.activation_code || '', page.category || '', page.page_type || '', typeof page.conditions === 'string' ? page.conditions : JSON.stringify(page.conditions || {}), page.archived ? 1 : 0, page.created || page.created_time || new Date().toISOString(), page.last_modified || page.updated_time || new Date().toISOString(), JSON.stringify(page), new Date().toISOString() ]; await this.cacheManager.storage.runBatch(`INSERT OR REPLACE INTO pages (id, project_id, key, name, edit_url, activation_type, activation_code, category, page_type, conditions, archived, created_time, updated_time, data_json, synced_at) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, entities.map(fieldMapper)); break; } case 'event': { const fieldMapper = (event) => [ safeIdToString(event.id), safeIdToString(event.project_id || projectId), event.key || '', event.name || '', event.description || '', event.event_type || 'custom', event.category || '', event.archived ? 1 : 0, event.created || event.created_time || new Date().toISOString(), JSON.stringify(event), new Date().toISOString() ]; await this.cacheManager.storage.runBatch(`INSERT OR REPLACE INTO events (id, project_id, key, name, description, event_type, category, archived, created_time, data_json, synced_at) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, entities.map(fieldMapper)); break; } case 'audience': { const fieldMapper = (audience) => [ safeIdToString(audience.id), safeIdToString(audience.project_id || projectId), audience.name || '', audience.description || '', JSON.stringify(audience.conditions || []), audience.archived ? 1 : 0, audience.created || audience.created_time || new Date().toISOString(), audience.last_modified || audience.updated_time || new Date().toISOString(), JSON.stringify(audience), new Date().toISOString() ]; await this.cacheManager.storage.runBatch(`INSERT OR REPLACE INTO audiences (id, project_id, name, description, conditions, archived, created_time, last_modified, data_json, synced_at) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, entities.map(fieldMapper)); break; } case 'experiment': { const fieldMapper = (experiment) => [ safeIdToString(experiment.id), safeIdToString(experiment.project_id || projectId), experiment.name || '', experiment.description || '', experiment.status || 'not_started', experiment.flag_key || '', experiment.environment || '', experiment.type || 'a/b', experiment.archived ? 1 : 0, experiment.created || experiment.created_time || new Date().toISOString(), experiment.last_modified || experiment.updated_time || new Date().toISOString(), JSON.stringify(experiment), new Date().toISOString() ]; await this.cacheManager.storage.runBatch(`INSERT OR REPLACE INTO experiments (id, project_id, name, description, status, flag_key, environment, type, archived, created_time, updated_time, data_json, synced_at) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, entities.map(fieldMapper)); break; } case 'attribute': { const fieldMapper = (attribute) => [ safeIdToString(attribute.id), safeIdToString(attribute.project_id || projectId), attribute.key || '', attribute.name || '', attribute.condition_type || '', attribute.archived ? 1 : 0, attribute.last_modified || attribute.updated_time || new Date().toISOString(), JSON.stringify(attribute), new Date().toISOString() ]; await this.cacheManager.storage.runBatch(`INSERT OR REPLACE INTO attributes (id, project_id, key, name, condition_type, archived, last_modified, data_json, synced_at) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`, entities.map(fieldMapper)); break; } default: // For other entity types, fall back to generic handling // Note: This is less reliable and should be expanded for all entity types console.log(chalk.yellow(`āš ļø Using generic handler for entity type '${entityType}'`)); // Get column names from the first entity to build dynamic SQL const sampleEntity = entities[0]; const columns = Object.keys(sampleEntity); // Build SQL for batch insert const placeholders = columns.map(() => '?').join(', '); const sql = `INSERT OR REPLACE INTO ${table} (${columns.join(', ')}) VALUES (${placeholders})`; // Prepare batch parameters const batchParams = entities.map(entity => columns.map(col => { const value = entity[col]; // Handle complex objects by JSON stringifying them return (typeof value === 'object' && value !== null) ? JSON.stringify(value) : value; })); // Use batch insert for performance await this.cacheManager.storage.runBatch(sql, batchParams, 100); } } async cleanup() { if (this.progressReporter) { this.progressReporter.dispose(); } if (this.cacheManager) { await this.cacheManager.close(); } } } /** * Create CLI command */ function createCommand() { const program = new Command(); program .name('cache-sync-enhanced') .description('Enhanced Optimizely Cache Sync with real-time progress bars') .version('2.0.0') .option('-p, --project <id>', 'sync specific project') .option('-m, --multi-project <ids...>', 'sync multiple projects (space-separated IDs)') .option('-f, --force', 'clear existing data before sync (auto-resets database if schema outdated)') .option('-r, --reset', 'reset database if schema is outdated (alias for --force)') .option('-i, --incremental', 'use incremental sync instead of full sync') .option('-v, --verbose', 'enable verbose output') .option('-c, --continue-on-error', 'continue syncing other projects if one fails (multi-project mode)') .option('-s, --summary', 'enable performance summary report generation') .option('-e, --export <path>', 'export performance reports to specified directory') .option('--json', 'output summary in JSON format') .option('--markdown', 'output summary in Markdown format') .option('--compact', 'use compact progress display (default)') .option('--classic', 'use classic verbose progress display') .option('--tables <tables...>', 'sync only specific database tables (space-separated table names)') .option('--entities <entities...>', 'sync only specific entity types (space-separated: flags, experiments, audiences, etc.)') .option('--database-path <path>', 'specify custom database file path (defaults to ./data/optimizely-cache.db)') .option('--list-tables', 'list all available database tables and exit') .option('--list-entities', 'list all available entity types and exit') .addHelpText('after', ` Examples: $ npm run cache-sync:enhanced # Full sync with progress bars $ npm run cache-sync:enhanced -- --verbose # Verbose output with detailed logs $ npm run cache-sync:enhanced -- --project 12345 # Sync specific project $ npm run cache-sync:enhanced -- --force # Force refresh (auto-resets if needed) $ npm run cache-sync:enhanced -- --reset # Reset database if schema is outdated $ npm run cache-sync:enhanced -- --incremental # Incremental sync $ npm run cache-sync:enhanced -- --multi-project 123 456 789 # Sync multiple projects $ npm run cache-sync:enhanced -- --export ./reports # Export performance reports $ npm run cache-sync:enhanced -- --summary # Show performance summary report $ npm run cache-sync:enhanced -- --database-path ./my-cache.db # Use custom database path $ npm run cache-sync:enhanced -- --database-path /tmp/optimizely.db # Use absolute path Targeted Sync Examples: $ npm run cache-sync:enhanced -- --tables experiments flags # Sync only experiments and flags tables $ npm run cache-sync:enhanced -- --entities experiment flag # Sync only experiment and flag entities $ npm run cache-sync:enhanced -- --project 12345 --tables experiments # Sync experiments table for specific project $ npm run cache-sync:enhanced -- --list-tables # Show all available database tables $ npm run cache-sync:enhanced -- --list-entities # Show all available entity types Targeted Sync Features: - Sync only specific database tables to recover from database locking issues - Entity-level targeting with automatic table mapping - Preserve foreign key relationships during targeted operations - Force clear specific tables before sync for complete refresh - Perfect for fixing timestamp-related sync conflicts Performance Features: - Real-time progress bars for each sync phase - SQL operation reduction metrics (target: 90%+ reduction) - Entity-specific performance breakdowns - Phase timing analysis - Optimization recommendations - Export reports in JSON/Markdown formats - Multi-project progress aggregation - Verbose debugging mode Recovery Use Cases: - Database lock errors left some tables out of sync - Change history timestamps don't match (use targeted sync to force refresh)