UNPKG

@simonecoelhosfo/optimizely-mcp-server

Version:

Optimizely MCP Server for AI assistants with integrated CLI tools

1,131 lines (1,097 loc) 59.4 kB
import Database from '../database/better-sqlite3-loader.js'; import { promises as fs } from 'fs'; import path from 'path'; import { getLogger } from '../logging/Logger.js'; import { MCPErrorMapper } from '../errors/MCPErrorMapping.js'; import { ViewManager } from './ViewManager.js'; export class SQLiteEngine { db = null; config; isInitialized = false; SCHEMA_VERSION = 12; // Removed incorrect UNIQUE(project_id, edit_url) constraint from pages table viewManager = null; // Enhanced concurrency management operationQueue = []; isProcessingQueue = false; connectionPool = []; activeConnections = 0; maxConnections; // Retry configuration retryConfig = { maxRetries: 5, baseDelay: 100, // milliseconds maxDelay: 5000, // milliseconds exponentialBase: 2 }; // Connection configuration connectionConfig = { busyTimeout: 30000, // 30 seconds cacheSize: 2000, maxConnections: 10 }; constructor(config) { this.config = { backupDir: './data/backups', ...config }; // Apply user configuration overrides if (config.retryConfig) { this.retryConfig = { ...this.retryConfig, ...config.retryConfig }; } if (config.connectionConfig) { this.connectionConfig = { ...this.connectionConfig, ...config.connectionConfig }; } this.maxConnections = this.connectionConfig.maxConnections; } /** * Get the database file path */ get dbPath() { return this.config.path; } async init(options) { if (this.isInitialized) return; try { const dbDir = path.dirname(this.config.path); await fs.mkdir(dbDir, { recursive: true }); // Check if database file exists before creating it const dbExists = await fs.access(this.config.path).then(() => true).catch(() => false); const isNewDatabase = !dbExists; getLogger().info({ dbPath: this.config.path, dbExists, isNewDatabase }, 'Database initialization check'); this.db = new Database(this.config.path); // Enhanced production-ready SQLite configuration for concurrency // WAL mode with enhanced configuration this.db.pragma('journal_mode = WAL'); // Enhanced WAL configuration for better concurrency this.db.pragma('wal_autocheckpoint = 1000'); // Checkpoint every 1000 pages this.db.pragma('wal_checkpoint_mode = TRUNCATE'); // Aggressive checkpointing // Synchronization and durability settings this.db.pragma('synchronous = NORMAL'); // Balance between performance and safety this.db.pragma('journal_size_limit = 67108864'); // 64MB WAL file limit // Memory and cache settings this.db.pragma(`cache_size = ${this.connectionConfig.cacheSize}`); this.db.pragma('temp_store = MEMORY'); this.db.pragma('mmap_size = 268435456'); // 256MB memory mapping // Connection and locking settings this.db.pragma(`busy_timeout = ${this.connectionConfig.busyTimeout}`); this.db.pragma('foreign_keys = ON'); // Transaction settings for better concurrency this.db.pragma('read_uncommitted = OFF'); // Ensure consistency this.db.pragma('locking_mode = NORMAL'); // Allow multiple connections // Force immediate WAL checkpoint to establish clean state this.db.pragma('wal_checkpoint(TRUNCATE)'); getLogger().info({ journalMode: this.db.pragma('journal_mode', { simple: true }), cacheSize: this.db.pragma('cache_size', { simple: true }), busyTimeout: this.db.pragma('busy_timeout', { simple: true }), walMode: this.db.pragma('wal_autocheckpoint', { simple: true }) }, 'SQLiteEngine: Enhanced database configuration applied'); // For new databases, auto-create schema without requiring reset if (isNewDatabase) { getLogger().info('Creating new database with fresh schema...'); await this.createSchema(); await this.createViews(); // Create all views for new database await this.setSchemaVersion(); } else { // For existing databases, check if migration is needed const migrationCheck = await this.checkMigrationStatus(); if (migrationCheck.needsMigration) { if (options?.skipMigration) { // Return early without initializing if we're just checking return; } if (!options?.confirmReset) { // Throw a special error that can be caught by the caller const error = new Error('DATABASE_RESET_REQUIRED'); error.code = 'DATABASE_RESET_REQUIRED'; error.details = migrationCheck; throw error; } getLogger().info('Database schema outdated, rebuilding with user confirmation...'); await this.dropAllTables(); await this.createSchema(); await this.createViews(); // Create all views after schema await this.setSchemaVersion(); } else { await this.createSchema(); // This only creates tables if they don't exist await this.createViews(); // Create all views await this.migrateSchema(); // This adds missing columns } } this.isInitialized = true; getLogger().info('SQLiteEngine initialized successfully'); } catch (error) { throw MCPErrorMapper.toMCPError(error, 'SQLiteEngine initialization failed'); } } /** * Check migration status without modifying the database * @returns Migration status information */ async checkMigrationStatus() { if (!this.db) { return { needsMigration: false, currentVersion: 0, targetVersion: this.SCHEMA_VERSION }; } try { // First check if schema_info table exists const tableExists = this.db.prepare(` SELECT name FROM sqlite_master WHERE type='table' AND name='schema_info' `).get(); if (!tableExists) { return { needsMigration: true, currentVersion: 0, targetVersion: this.SCHEMA_VERSION, reason: 'No schema version table found - database needs initialization' }; } // Get current schema version const result = this.db.prepare('SELECT version FROM schema_info ORDER BY version DESC LIMIT 1').get(); const currentVersion = result?.version || 0; if (currentVersion < this.SCHEMA_VERSION) { return { needsMigration: true, currentVersion, targetVersion: this.SCHEMA_VERSION, reason: `Schema version ${currentVersion} is outdated (target: ${this.SCHEMA_VERSION})` }; } return { needsMigration: false, currentVersion, targetVersion: this.SCHEMA_VERSION }; } catch (error) { // If there's any error, assume we need migration getLogger().warn({ error: error.message || error }, 'Error checking schema version, assuming migration needed'); return { needsMigration: true, currentVersion: 0, targetVersion: this.SCHEMA_VERSION, reason: 'Error checking schema version' }; } } async checkNeedsMigration() { const status = await this.checkMigrationStatus(); return status.needsMigration; } async setSchemaVersion() { if (!this.db) return; // Ensure schema_info table exists this.db.exec(` CREATE TABLE IF NOT EXISTS schema_info ( version INTEGER PRIMARY KEY, updated_at TEXT DEFAULT CURRENT_TIMESTAMP ) `); this.db.prepare('INSERT OR REPLACE INTO schema_info (version) VALUES (?)').run(this.SCHEMA_VERSION); getLogger().info(`Schema version set to ${this.SCHEMA_VERSION}`); } async dropAllTables() { if (!this.db) return; try { // Disable foreign keys temporarily this.db.pragma('foreign_keys = OFF'); // Get all table names except schema_info const tables = this.db.prepare(` SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%' AND name != 'schema_info' `).all(); // Drop each table for (const table of tables) { this.db.exec(`DROP TABLE IF EXISTS ${table.name}`); getLogger().info(`Dropped table: ${table.name}`); } // Re-enable foreign keys this.db.pragma('foreign_keys = ON'); getLogger().info('All tables dropped successfully'); } catch (error) { getLogger().error({ error: error.message || error }, 'Failed to drop tables'); } } async createSchema() { if (!this.db) throw new Error('Database not initialized'); const schema = ` -- Projects table CREATE TABLE IF NOT EXISTS projects ( id TEXT PRIMARY KEY, name TEXT NOT NULL, description TEXT, platform TEXT, status TEXT, account_id TEXT, is_flags_enabled BOOLEAN, archived BOOLEAN DEFAULT FALSE, created_at TEXT, last_modified TEXT, data_json TEXT, synced_at TEXT DEFAULT CURRENT_TIMESTAMP ); -- Environments table CREATE TABLE IF NOT EXISTS environments ( project_id TEXT, key TEXT, name TEXT, is_primary BOOLEAN DEFAULT FALSE, priority INTEGER, archived BOOLEAN DEFAULT FALSE, data_json TEXT, synced_at TEXT DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (project_id, key), FOREIGN KEY (project_id) REFERENCES projects(id) ); -- Feature flags table (complete with denormalized fields) CREATE TABLE IF NOT EXISTS flags ( project_id TEXT, key TEXT, id TEXT, name TEXT, description TEXT, archived BOOLEAN DEFAULT FALSE, created_time TEXT, updated_time TEXT, data_json TEXT, synced_at TEXT DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (project_id, key), FOREIGN KEY (project_id) REFERENCES projects(id) ); -- Flag environments (for cross-environment queries) CREATE TABLE IF NOT EXISTS flag_environments ( project_id TEXT, flag_key TEXT, environment_key TEXT, enabled BOOLEAN, archived BOOLEAN DEFAULT FALSE, rules_summary TEXT, data_json TEXT, synced_at TEXT DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (project_id, flag_key, environment_key), FOREIGN KEY (project_id, flag_key) REFERENCES flags(project_id, key) ); -- Variations table (Feature Experimentation) -- CRITICAL: flag_key is REQUIRED for variations to be cached -- The composite primary key (project_id, flag_key, key) requires all three fields -- Cache persistence checks: if (entityForStorage.key && entityForStorage.flag_key) -- Without flag_key in variation data, cache update is silently skipped -- Fixed: January 27, 2025 - Variations not persisting to cache CREATE TABLE IF NOT EXISTS variations ( id TEXT, key TEXT, project_id TEXT, flag_key TEXT, -- CRITICAL: Required for cache persistence name TEXT, description TEXT, enabled BOOLEAN DEFAULT TRUE, archived BOOLEAN DEFAULT FALSE, variables TEXT, -- JSON object of variable values created_time TEXT, updated_time TEXT, data_json TEXT, synced_at TEXT DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (project_id, flag_key, key), FOREIGN KEY (project_id, flag_key) REFERENCES flags(project_id, key) ); -- Variable Definitions table (Feature Experimentation) CREATE TABLE IF NOT EXISTS variable_definitions ( key TEXT, project_id TEXT, flag_key TEXT, description TEXT, type TEXT, -- string, boolean, integer, double, json default_value TEXT, created_time TEXT, updated_time TEXT, data_json TEXT, synced_at TEXT DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (project_id, flag_key, key), FOREIGN KEY (project_id, flag_key) REFERENCES flags(project_id, key) ); -- Rulesets table (Feature Experimentation) CREATE TABLE IF NOT EXISTS rulesets ( id TEXT, project_id TEXT, flag_key TEXT, environment_key TEXT, enabled BOOLEAN DEFAULT FALSE, rules_count INTEGER DEFAULT 0, revision INTEGER, created_time TEXT, updated_time TEXT, data_json TEXT, synced_at TEXT DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (project_id, flag_key, environment_key), FOREIGN KEY (project_id, flag_key) REFERENCES flags(project_id, key) ); -- Rules table (Feature Experimentation) CREATE TABLE IF NOT EXISTS rules ( id TEXT, key TEXT, project_id TEXT, flag_key TEXT, environment_key TEXT, type TEXT, -- a/b, rollout, targeted_delivery name TEXT, audience_conditions TEXT, -- JSON array percentage_included INTEGER, -- 0-10000 variations TEXT, -- JSON array of variation allocations metrics TEXT, -- JSON array of metrics enabled BOOLEAN DEFAULT TRUE, created_time TEXT, updated_time TEXT, data_json TEXT, synced_at TEXT DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (id), FOREIGN KEY (project_id, flag_key, environment_key) REFERENCES rulesets(project_id, flag_key, environment_key) ); -- Changes table (Feature Experimentation) CREATE TABLE IF NOT EXISTS changes ( id TEXT PRIMARY KEY, project_id TEXT, entity_type TEXT, entity_id TEXT, entity_key TEXT, action TEXT, -- created, updated, deleted, archived user_id TEXT, user_email TEXT, timestamp TEXT, details TEXT, -- JSON object with change details data_json TEXT, synced_at TEXT DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY (project_id) REFERENCES projects(id) ); -- Reports table (Feature Experimentation) CREATE TABLE IF NOT EXISTS reports ( id TEXT PRIMARY KEY, project_id TEXT, flag_key TEXT, rule_key TEXT, environment_key TEXT, report_type TEXT, -- experiment_results, flag_usage, etc. start_time TEXT, end_time TEXT, data_json TEXT, -- Store complete report data synced_at TEXT DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY (project_id, flag_key) REFERENCES flags(project_id, key) ); -- Features table (Feature Experimentation only) CREATE TABLE IF NOT EXISTS features ( id TEXT PRIMARY KEY, project_id TEXT NOT NULL, key TEXT NOT NULL, name TEXT NOT NULL, description TEXT, archived BOOLEAN DEFAULT FALSE, created TEXT, last_modified TEXT, variable_definitions TEXT, -- JSON array of variable definitions data_json TEXT, synced_at TEXT DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY (project_id) REFERENCES projects(id), UNIQUE(project_id, key) ); -- Experiments table CREATE TABLE IF NOT EXISTS experiments ( id TEXT PRIMARY KEY, project_id TEXT, name TEXT, description TEXT, status TEXT, flag_key TEXT, environment TEXT, type TEXT, archived BOOLEAN DEFAULT FALSE, created_time TEXT, updated_time TEXT, data_json TEXT, synced_at TEXT DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY (project_id) REFERENCES projects(id) ); -- Experiment Results table (Web Experimentation) CREATE TABLE IF NOT EXISTS experiment_results ( id TEXT PRIMARY KEY, -- experiment_id + timestamp as composite key experiment_id TEXT NOT NULL, project_id TEXT, confidence_level REAL, use_stats_engine BOOLEAN, stats_engine_version TEXT, baseline_count INTEGER, treatment_count INTEGER, total_count INTEGER, start_time TEXT, last_update TEXT, results_json TEXT, -- Store detailed results array as JSON reach_json TEXT, -- Store reach data as JSON stats_config_json TEXT, -- Store stats config as JSON data_json TEXT, -- Store complete response as JSON synced_at TEXT DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY (experiment_id) REFERENCES experiments(id), FOREIGN KEY (project_id) REFERENCES projects(id) ); -- Campaigns table (Web Experimentation) CREATE TABLE IF NOT EXISTS campaigns ( id TEXT PRIMARY KEY, project_id TEXT NOT NULL, name TEXT NOT NULL, description TEXT, holdback REAL, archived BOOLEAN DEFAULT FALSE, created_time TEXT, updated_time TEXT, data_json TEXT, synced_at TEXT DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY (project_id) REFERENCES projects(id), UNIQUE(project_id, name) ); -- Pages table (Web Experimentation) CREATE TABLE IF NOT EXISTS pages ( id TEXT PRIMARY KEY, project_id TEXT NOT NULL, key TEXT, name TEXT NOT NULL, edit_url TEXT, activation_type TEXT, activation_code TEXT, category TEXT, page_type TEXT, conditions TEXT, archived BOOLEAN DEFAULT FALSE, created_time TEXT, updated_time TEXT, data_json TEXT, synced_at TEXT DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY (project_id) REFERENCES projects(id) ); -- Audiences table CREATE TABLE IF NOT EXISTS audiences ( id TEXT PRIMARY KEY, project_id TEXT, name TEXT, description TEXT, conditions TEXT, archived BOOLEAN DEFAULT FALSE, created_time TEXT, last_modified TEXT, data_json TEXT, synced_at TEXT DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY (project_id) REFERENCES projects(id), UNIQUE(project_id, name) ); -- Attributes table -- CRITICAL: DO NOT REMOVE UNIQUE CONSTRAINTS -- These prevent duplicate entities with same business key -- Without them, we get duplicate records like: -- 5.6006992878633e+15 and 5600699287863296 for same attribute -- Fixed: January 26, 2025 - Database integrity issue CREATE TABLE IF NOT EXISTS attributes ( id TEXT PRIMARY KEY, project_id TEXT, key TEXT, name TEXT, condition_type TEXT, archived BOOLEAN DEFAULT FALSE, last_modified TEXT, data_json TEXT, synced_at TEXT DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY (project_id) REFERENCES projects(id), UNIQUE(project_id, key) -- CRITICAL: Prevents duplicate attributes per project ); -- Events table CREATE TABLE IF NOT EXISTS events ( id TEXT PRIMARY KEY, project_id TEXT, key TEXT, name TEXT, description TEXT, event_type TEXT, category TEXT, archived BOOLEAN DEFAULT FALSE, created_time TEXT, data_json TEXT, synced_at TEXT DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY (project_id) REFERENCES projects(id), UNIQUE(project_id, key) ); -- Collaborators table (Web Experimentation) CREATE TABLE IF NOT EXISTS collaborators ( user_id TEXT NOT NULL, project_id TEXT NOT NULL, email TEXT, name TEXT, role TEXT, permissions_json TEXT, -- Store permissions array as JSON invited_at TEXT, last_seen_at TEXT, data_json TEXT, -- Store complete collaborator data synced_at TEXT DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (user_id, project_id), FOREIGN KEY (project_id) REFERENCES projects(id) ); -- Groups table (Both platforms - mutual exclusion) CREATE TABLE IF NOT EXISTS groups ( id TEXT PRIMARY KEY, project_id TEXT, name TEXT NOT NULL, description TEXT, type TEXT, -- 'mutually_exclusive' or other types policy TEXT, -- 'random' or other allocation policies traffic_allocation INTEGER, -- Percentage (0-10000) archived BOOLEAN DEFAULT FALSE, created_time TEXT, updated_time TEXT, data_json TEXT, -- Store complete group data synced_at TEXT DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY (project_id) REFERENCES projects(id), UNIQUE(project_id, name) ); -- Extensions table (Web Experimentation - custom JavaScript) CREATE TABLE IF NOT EXISTS extensions ( id TEXT PRIMARY KEY, project_id TEXT NOT NULL, name TEXT NOT NULL, description TEXT, extension_type TEXT, -- 'analytics_integration', 'custom_code', etc. implementation TEXT, -- JavaScript code enabled BOOLEAN DEFAULT TRUE, created_time TEXT, updated_time TEXT, data_json TEXT, -- Store complete extension data synced_at TEXT DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY (project_id) REFERENCES projects(id), UNIQUE(project_id, name) ); -- Webhooks table (Web Experimentation) CREATE TABLE IF NOT EXISTS webhooks ( id TEXT PRIMARY KEY, project_id TEXT NOT NULL, name TEXT NOT NULL, url TEXT NOT NULL, event_types TEXT, -- JSON array of event types enabled BOOLEAN DEFAULT TRUE, headers TEXT, -- JSON object of headers secret TEXT, -- Webhook secret for validation created_time TEXT, updated_time TEXT, data_json TEXT, -- Store complete webhook data synced_at TEXT DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY (project_id) REFERENCES projects(id), UNIQUE(project_id, name) ); -- List Attributes table (Web Experimentation) CREATE TABLE IF NOT EXISTS list_attributes ( id TEXT PRIMARY KEY, project_id TEXT NOT NULL, name TEXT NOT NULL, description TEXT, key_field TEXT NOT NULL, list_type TEXT NOT NULL, -- 'cookies', 'zip_codes', 'query_parameter_values', etc. list_content TEXT, -- JSON array of list items created_time TEXT, updated_time TEXT, archived BOOLEAN DEFAULT FALSE, data_json TEXT, -- Store complete list attribute data synced_at TEXT DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY (project_id) REFERENCES projects(id), UNIQUE(project_id, key_field) ); -- Change history table (for incremental sync) CREATE TABLE IF NOT EXISTS change_history ( id INTEGER PRIMARY KEY AUTOINCREMENT, project_id TEXT NOT NULL, entity_type TEXT NOT NULL, entity_id TEXT NOT NULL, entity_name TEXT, action TEXT NOT NULL, timestamp TEXT NOT NULL, changed_by TEXT, change_summary TEXT, archived BOOLEAN DEFAULT FALSE, synced_at TEXT, created_at TEXT DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY (project_id) REFERENCES projects(id) ); -- Sync state table (for tracking sync progress) CREATE TABLE IF NOT EXISTS sync_state ( project_id TEXT PRIMARY KEY, last_sync_time TEXT NOT NULL, last_successful_sync TEXT, sync_in_progress BOOLEAN DEFAULT FALSE, error_count INTEGER DEFAULT 0, last_error TEXT, created_at TEXT DEFAULT CURRENT_TIMESTAMP, updated_at TEXT DEFAULT CURRENT_TIMESTAMP ); -- Sync metadata CREATE TABLE IF NOT EXISTS sync_metadata ( key TEXT PRIMARY KEY, value TEXT, updated_at TEXT DEFAULT CURRENT_TIMESTAMP ); -- Entity snapshots table for comprehensive change detection CREATE TABLE IF NOT EXISTS entity_snapshots ( project_id TEXT NOT NULL, entity_type TEXT NOT NULL, entity_id TEXT NOT NULL, snapshot_hash TEXT NOT NULL, snapshot_data TEXT NOT NULL, snapshot_time TEXT NOT NULL, PRIMARY KEY (project_id, entity_type, entity_id) ); -- Performance indexes CREATE INDEX IF NOT EXISTS idx_projects_archived ON projects(archived); CREATE INDEX IF NOT EXISTS idx_environments_project_archived ON environments(project_id, archived); CREATE INDEX IF NOT EXISTS idx_flags_project_archived ON flags(project_id, archived); CREATE INDEX IF NOT EXISTS idx_flags_name ON flags(name); CREATE INDEX IF NOT EXISTS idx_flags_key ON flags(key); CREATE INDEX IF NOT EXISTS idx_flag_environments_enabled ON flag_environments(enabled); CREATE INDEX IF NOT EXISTS idx_flag_environments_archived ON flag_environments(archived); CREATE INDEX IF NOT EXISTS idx_flag_environments_project_env ON flag_environments(project_id, environment_key); -- Variations indexes CREATE INDEX IF NOT EXISTS idx_variations_project_flag ON variations(project_id, flag_key); CREATE INDEX IF NOT EXISTS idx_variations_key ON variations(key); CREATE INDEX IF NOT EXISTS idx_variations_archived ON variations(archived); CREATE INDEX IF NOT EXISTS idx_variations_enabled ON variations(enabled); -- Variable Definitions indexes CREATE INDEX IF NOT EXISTS idx_variable_definitions_project_flag ON variable_definitions(project_id, flag_key); CREATE INDEX IF NOT EXISTS idx_variable_definitions_key ON variable_definitions(key); CREATE INDEX IF NOT EXISTS idx_variable_definitions_type ON variable_definitions(type); -- Rulesets indexes CREATE INDEX IF NOT EXISTS idx_rulesets_project_flag ON rulesets(project_id, flag_key); CREATE INDEX IF NOT EXISTS idx_rulesets_environment ON rulesets(environment_key); CREATE INDEX IF NOT EXISTS idx_rulesets_enabled ON rulesets(enabled); -- Rules indexes CREATE INDEX IF NOT EXISTS idx_rules_project_flag_env ON rules(project_id, flag_key, environment_key); CREATE INDEX IF NOT EXISTS idx_rules_type ON rules(type); CREATE INDEX IF NOT EXISTS idx_rules_enabled ON rules(enabled); -- Changes indexes CREATE INDEX IF NOT EXISTS idx_changes_project ON changes(project_id); CREATE INDEX IF NOT EXISTS idx_changes_entity ON changes(entity_type, entity_id); CREATE INDEX IF NOT EXISTS idx_changes_timestamp ON changes(timestamp); CREATE INDEX IF NOT EXISTS idx_changes_user ON changes(user_email); -- Reports indexes CREATE INDEX IF NOT EXISTS idx_reports_project_flag ON reports(project_id, flag_key); CREATE INDEX IF NOT EXISTS idx_reports_type ON reports(report_type); CREATE INDEX IF NOT EXISTS idx_reports_time_range ON reports(start_time, end_time); CREATE INDEX IF NOT EXISTS idx_experiments_project_status ON experiments(project_id, status, archived); CREATE INDEX IF NOT EXISTS idx_experiments_flag ON experiments(flag_key); CREATE INDEX IF NOT EXISTS idx_experiments_status ON experiments(status); CREATE INDEX IF NOT EXISTS idx_experiment_results_experiment ON experiment_results(experiment_id); CREATE INDEX IF NOT EXISTS idx_experiment_results_project ON experiment_results(project_id); CREATE INDEX IF NOT EXISTS idx_experiment_results_last_update ON experiment_results(last_update); CREATE INDEX IF NOT EXISTS idx_campaigns_project_archived ON campaigns(project_id, archived); CREATE INDEX IF NOT EXISTS idx_campaigns_name ON campaigns(name); CREATE INDEX IF NOT EXISTS idx_pages_project_archived ON pages(project_id, archived); CREATE INDEX IF NOT EXISTS idx_pages_name ON pages(name); CREATE INDEX IF NOT EXISTS idx_pages_activation_type ON pages(activation_type); CREATE INDEX IF NOT EXISTS idx_pages_key ON pages(key); CREATE INDEX IF NOT EXISTS idx_pages_category ON pages(category); CREATE INDEX IF NOT EXISTS idx_pages_page_type ON pages(page_type); CREATE INDEX IF NOT EXISTS idx_audiences_project_archived ON audiences(project_id, archived); CREATE INDEX IF NOT EXISTS idx_attributes_project_archived ON attributes(project_id, archived); CREATE INDEX IF NOT EXISTS idx_attributes_project_key ON attributes(project_id, key); CREATE INDEX IF NOT EXISTS idx_events_project_archived ON events(project_id, archived); CREATE INDEX IF NOT EXISTS idx_events_project_type ON events(project_id, event_type); CREATE INDEX IF NOT EXISTS idx_collaborators_project ON collaborators(project_id); CREATE INDEX IF NOT EXISTS idx_collaborators_email ON collaborators(email); CREATE INDEX IF NOT EXISTS idx_collaborators_role ON collaborators(role); CREATE INDEX IF NOT EXISTS idx_groups_project_archived ON groups(project_id, archived); CREATE INDEX IF NOT EXISTS idx_groups_name ON groups(name); CREATE INDEX IF NOT EXISTS idx_groups_type ON groups(type); CREATE INDEX IF NOT EXISTS idx_extensions_project ON extensions(project_id); CREATE INDEX IF NOT EXISTS idx_extensions_name ON extensions(name); CREATE INDEX IF NOT EXISTS idx_extensions_enabled ON extensions(enabled); CREATE INDEX IF NOT EXISTS idx_extensions_type ON extensions(extension_type); CREATE INDEX IF NOT EXISTS idx_webhooks_project ON webhooks(project_id); CREATE INDEX IF NOT EXISTS idx_webhooks_enabled ON webhooks(enabled); CREATE INDEX IF NOT EXISTS idx_list_attributes_project ON list_attributes(project_id); CREATE INDEX IF NOT EXISTS idx_list_attributes_name ON list_attributes(name); CREATE INDEX IF NOT EXISTS idx_list_attributes_type ON list_attributes(list_type); CREATE INDEX IF NOT EXISTS idx_list_attributes_archived ON list_attributes(archived); CREATE INDEX IF NOT EXISTS idx_change_history_archived ON change_history(archived); CREATE INDEX IF NOT EXISTS idx_change_history_project_timestamp ON change_history(project_id, timestamp); CREATE INDEX IF NOT EXISTS idx_change_history_entity ON change_history(entity_type, entity_id); CREATE INDEX IF NOT EXISTS idx_change_history_sync ON change_history(project_id, synced_at); CREATE INDEX IF NOT EXISTS idx_sync_state_project ON sync_state(project_id, sync_in_progress); CREATE INDEX IF NOT EXISTS idx_entity_snapshots_type ON entity_snapshots(entity_type, project_id); `; this.db.exec(schema); } /** * Create all database views * This ensures views are always created during initialization */ async createViews() { if (!this.db) throw new Error('Database not initialized'); const logger = getLogger(); try { // Initialize ViewManager if not already done if (!this.viewManager) { this.viewManager = new ViewManager(this.db); } logger.info('Creating database views...'); // Create all views (including discovered ones) await this.viewManager.createAllViews(true); // Validate views were created correctly const validation = this.viewManager.validateViews(); if (!validation.valid) { logger.warn(`View validation failed: ${validation.missing.length} missing views`); logger.warn(`Missing views: ${validation.missing.join(', ')}`); // Don't throw error - allow system to continue with missing views // but log warning for investigation } else { logger.info('All database views created successfully'); } } catch (error) { logger.error('Failed to create views:', error.message); logger.error('View creation error stack:', error.stack); // Re-throw the error to ensure it's visible during initialization throw new Error(`Failed to create database views: ${error.message}`); } } async migrateSchema() { if (!this.db) throw new Error('Database not initialized'); // Add missing columns to existing tables if they don't exist const migrations = [ { table: 'projects', column: 'archived', type: 'BOOLEAN DEFAULT FALSE' }, { table: 'environments', column: 'is_primary', type: 'BOOLEAN DEFAULT FALSE' }, { table: 'environments', column: 'archived', type: 'BOOLEAN DEFAULT FALSE' }, { table: 'flags', column: 'archived', type: 'BOOLEAN DEFAULT FALSE' }, { table: 'flag_environments', column: 'archived', type: 'BOOLEAN DEFAULT FALSE' }, { table: 'experiments', column: 'archived', type: 'BOOLEAN DEFAULT FALSE' }, { table: 'audiences', column: 'archived', type: 'BOOLEAN DEFAULT FALSE' }, { table: 'attributes', column: 'archived', type: 'BOOLEAN DEFAULT FALSE' }, { table: 'events', column: 'archived', type: 'BOOLEAN DEFAULT FALSE' }, { table: 'list_attributes', column: 'archived', type: 'BOOLEAN DEFAULT FALSE' }, { table: 'change_history', column: 'archived', type: 'BOOLEAN DEFAULT FALSE' }, { table: 'change_history', column: 'entity_type', type: 'TEXT' }, { table: 'change_history', column: 'entity_id', type: 'TEXT' }, { table: 'change_history', column: 'entity_name', type: 'TEXT' }, { table: 'change_history', column: 'changed_by', type: 'TEXT' }, { table: 'change_history', column: 'change_summary', type: 'TEXT' }, { table: 'change_history', column: 'synced_at', type: 'TEXT' } ]; for (const migration of migrations) { try { // Check if column exists const columns = this.db.prepare(`PRAGMA table_info(${migration.table})`).all(); const hasColumn = columns.some((col) => col.name === migration.column); if (!hasColumn) { const alterSql = `ALTER TABLE ${migration.table} ADD COLUMN ${migration.column} ${migration.type}`; this.db.exec(alterSql); getLogger().info(`Added ${migration.column} column to ${migration.table} table`); } } catch (error) { // Table might not exist yet, which is fine if (!error.message.includes('no such table')) { getLogger().warn(`Migration failed for ${migration.table}.${migration.column}: ${error.message}`); } } } } async query(sql, params = []) { if (!this.db) throw new Error('Database not initialized'); // 🔍 CACHE DEBUG: Track verification queries const isVerificationQuery = (sql.includes('events') || sql.includes('pages') || sql.includes('campaigns')) && (sql.includes('COUNT') || sql.includes('WHERE id')); if (isVerificationQuery) { getLogger().error({ '🔍 VERIFICATION QUERY': new Date().toISOString(), '📝 SQL': sql, '🔢 Params': params, '🔗 In Transaction': this.db.inTransaction }, '🚨 SQLiteEngine: Verification query starting'); } const stmt = this.db.prepare(sql); const results = stmt.all(params); if (isVerificationQuery) { getLogger().error({ '🔍 VERIFICATION RESULT': new Date().toISOString(), '📊 Row Count': results.length, '📋 First Row': results[0] || null, '✅ Found Data': results.length > 0 }, '🚨 SQLiteEngine: Verification query completed'); } return results; } async get(sql, params = []) { if (!this.db) throw new Error('Database not initialized'); const stmt = this.db.prepare(sql); return stmt.get(params); } /** * Retry mechanism with exponential backoff for database operations * Handles SQLite BUSY, LOCKED, and other transient errors */ async retryOperation(operation, operationName) { let lastError; for (let attempt = 0; attempt <= this.retryConfig.maxRetries; attempt++) { try { const result = await operation(); if (attempt > 0) { getLogger().info({ operationName, attempt: attempt + 1, totalAttempts: this.retryConfig.maxRetries + 1 }, 'SQLiteEngine: Operation succeeded after retry'); } return result; } catch (error) { lastError = error; const isRetryableError = this.isRetryableError(error); if (!isRetryableError || attempt === this.retryConfig.maxRetries) { getLogger().error({ operationName, attempt: attempt + 1, error: error.message, isRetryableError, finalAttempt: attempt === this.retryConfig.maxRetries }, 'SQLiteEngine: Operation failed (no more retries)'); throw error; } // Calculate delay with exponential backoff and jitter const delay = Math.min(this.retryConfig.baseDelay * Math.pow(this.retryConfig.exponentialBase, attempt), this.retryConfig.maxDelay); // Add jitter to prevent thundering herd const jitteredDelay = delay + Math.random() * delay * 0.1; getLogger().warn({ operationName, attempt: attempt + 1, error: error.message, nextRetryIn: Math.round(jitteredDelay), isRetryableError }, 'SQLiteEngine: Operation failed, retrying with backoff'); await this.sleep(jitteredDelay); } } throw lastError; } /** * Determines if an error is retryable based on SQLite error codes and patterns */ isRetryableError(error) { const errorMessage = error.message?.toLowerCase() || ''; const errorCode = error.code; // SQLite error codes that are retryable const retryableCodes = [ 'SQLITE_BUSY', 'SQLITE_LOCKED', 'SQLITE_PROTOCOL', 'SQLITE_IOERR', 'SQLITE_FULL' ]; // Error message patterns that indicate retryable conditions const retryablePatterns = [ 'database is locked', 'database table is locked', 'database disk image is malformed', 'disk i/o error', 'database or disk is full', 'busy', 'locked', 'step' ]; return retryableCodes.includes(errorCode) || retryablePatterns.some(pattern => errorMessage.includes(pattern)); } /** * Enhanced transaction wrapper with IMMEDIATE locking * Prevents most deadlock scenarios by acquiring locks early */ async withTransaction(operation) { if (!this.db) throw new Error('Database not initialized'); return this.retryOperation(async () => { // Use IMMEDIATE transaction to acquire locks early this.db.exec('BEGIN IMMEDIATE'); try { const result = await operation(); this.db.exec('COMMIT'); // Force WAL checkpoint after successful transaction this.db.pragma('wal_checkpoint(PASSIVE)'); return result; } catch (error) { this.db.exec('ROLLBACK'); throw error; } }, 'transaction'); } /** * Sleep utility for retry delays */ async sleep(ms) { return new Promise(resolve => setTimeout(resolve, ms)); } async run(sql, params = []) { if (!this.db) throw new Error('Database not initialized'); // 🔍 CACHE DEBUG: Track operations by entity type const isEventOperation = sql.includes('events'); const isPageOperation = sql.includes('pages'); const isCampaignOperation = sql.includes('campaigns'); const operationType = sql.trim().toUpperCase().split(' ')[0]; const debugTimestamp = new Date().toISOString(); if (isEventOperation || isPageOperation || isCampaignOperation) { getLogger().error({ '🔍 CACHE SQL DEBUG': debugTimestamp, '📊 Entity Type': isEventOperation ? 'EVENT' : isPageOperation ? 'PAGE' : 'CAMPAIGN', '🎯 Operation': operationType, '📝 SQL': sql.substring(0, 200), '🔢 Params Count': params.length, '🔗 In Transaction': this.db.inTransaction }, '🚨 SQLiteEngine: Entity-specific SQL operation starting'); } return this.retryOperation(async () => { // Debug logging for SQLite binding errors if (params.length > 0) { params.forEach((param, index) => { const paramType = typeof param; const isBigInt = paramType === 'bigint'; const isObject = paramType === 'object' && param !== null && !Buffer.isBuffer(param); const isProblematic = isBigInt || isObject || param === undefined; if (isProblematic) { getLogger().error({ sql: sql.substring(0, 100) + '...', paramIndex: index, paramType, isBigInt, isObject, isUndefined: param === undefined, constructor: param?.constructor?.name, value: isObject ? JSON.stringify(param) : String(param) }, 'SQLiteEngine: SQLITE BINDING ERROR DETECTED'); } }); } try { const stmt = this.db.prepare(sql); const result = stmt.run(params); // 🔍 CACHE DEBUG: Log result for entity operations if (isEventOperation || isPageOperation || isCampaignOperation) { getLogger().error({ '🔍 CACHE SQL RESULT': debugTimestamp, '📊 Entity Type': isEventOperation ? 'EVENT' : isPageOperation ? 'PAGE' : 'CAMPAIGN', '✅ Changes': result.changes, '🆔 LastID': result.lastInsertRowid, '🎯 Success': result.changes > 0 || operationType === 'SELECT', '🔗 In Transaction After': this.db.inTransaction }, '🚨 SQLiteEngine: Entity operation completed'); } // Force WAL checkpoint for critical operations to ensure persistence if (sql.toLowerCase().includes('insert') || sql.toLowerCase().includes('update') || sql.toLowerCase().includes('delete')) { try { // 🔍 CACHE DEBUG: Track WAL checkpoint for entity operations const walBefore = this.db.inTransaction; const walResult = this.db.pragma('wal_checkpoint(PASSIVE)'); if (isEventOperation || isPageOperation || isCampaignOperation) { getLogger().error({ '🔍 WAL CHECKPOINT': debugTimestamp, '📊 Entity Type': isEventOperation ? 'EVENT' : isPageOperation ? 'PAGE' : 'CAMPAIGN', '🔄 WAL Result': walResult, '🔗 Transaction Before': walBefore, '🔗 Transaction After': this.db.inTransaction }, '🚨 SQLiteEngine: WAL checkpoint completed'); } } catch (walError) { getLogger().warn({ error: walError.message }, 'SQLiteEngine: WAL checkpoint warning (operation still succeeded)'); } } return { lastID: result.lastInsertRowid, changes: result.changes }; } catch (error) { getLogger().error({ sql: sql.substring(0, 100) + (sql.length > 100 ? '...' : ''), params: params.slice(0, 5), // Log first 5 params only for safety error: error.message }, 'SQLiteEngine: SQL execution error'); throw error; } }, 'run_operation'); } /** * Enhanced batch operation method with progress reporting * Performs multi-value INSERT operations for optimal performance * @param baseSQL - Base SQL statement with single VALUES placeholder * @param batchParams - Array of parameter arrays for batch insertion * @param batchSize - Number of records to process per batch (default: 100) * @param progressCallback - Optional callback for progress reporting * @returns Promise that resolves when all batches are complete * * @example * await engine.runBatch( * "INSERT OR REPLACE INTO audiences (id, name) VALUES (?)", * [["1", "Audience 1"], ["2", "Audience 2"]], * 100, * (saved, total) => console.log(`Progress: ${saved}/${total}`) * ); */ async runBatch(baseSQL, batchParams, batchSize = 100, progressCallback) { if (!this.db) throw new Error('Database not initialized'); // Validate inputs if (!baseSQL || !Array.isArray(batchParams)) { throw new Error('SQLiteEngine.runBatch: Invalid parameters - baseSQL and batchParams are required'); } if (batchParams.length === 0) { getLogger().debug('SQLiteEngine.runBatch: No parameters provided, skipping batch operation'); return; } // Validate parameter consistency const firstParamLength = batchParams[0]?.length; if (!firstParamLength) { throw new Error('SQLiteEngine.runBatch: First parameter array is empty or invalid'); } for (let i = 0; i < batchParams.length; i++) { if (!Array.isArray(batchParams[i]) || batchParams[i].length !== firstParamLength) { throw new Error(`SQLiteEngine.runBatch: Parameter array at index ${i} has inconsistent length (expected ${firstParamLength}, got ${batchParams[i]?.length})`); } } getLogger().debug({ totalRecords: batchParams.length, batchSize, parametersPerRecord: firstParamLength }, 'SQLiteEngine.runBatch: Starting batch operation'); // Process in chunks for memory management and progress reporting for (let i = 0; i < batchParams.length; i += batchSize) { const chunk = batchParams.slice(i, i + batchSize); try { // Build multi-value INSERT SQL // Transform "INSERT ... VALUES (?)" to "INSERT ... VALUES (?,?), (?,?), (?,?)" const placeholders = chunk.map(() => `(${new Array(firstParamLength).fill('?').join(', ')})`).join(', '); // Replace the single VALUES placeholder with multi-value placeholder const sql = baseSQL.replace(/VALUES\s*\([^)]*\)/, `VALUES ${placeholders}`); // Flatten parameter arrays for SQLite binding const flatParams = chunk.flat(); // Perform SQLite binding validation (preserve existing validation logic) if (flatParams.leng