UNPKG

@simonecoelhosfo/optimizely-mcp-server

Version:

Optimizely MCP Server for AI assistants with integrated CLI tools

889 lines 40.1 kB
/** * Cache Manager for Optimizely MCP Server * @description High-performance caching layer that synchronizes Optimizely data to local * SQLite storage with comprehensive pagination, error handling, and data integrity features * * This manager orchestrates data synchronization between the Optimizely API and local cache, * providing fast access to projects, flags, experiments, audiences, and related entities * while maintaining data consistency and handling rate limits gracefully. * * Key capabilities: * - Full and incremental data synchronization * - Advanced pagination with retry logic * - Complex filtering and querying of cached data * - Transactional integrity for multi-entity operations * - Comprehensive error handling and recovery * - Performance optimized with indexes and aggregations * * @author Optimizely MCP Server * @version 1.0.0 */ import { OptimizelyAPIHelper } from '../api/OptimizelyAPIHelper.js'; import { SQLiteEngine } from '../storage/SQLiteEngine.js'; import { ProjectFilter } from '../config/ProjectFilter.js'; import { MCPServerConfig } from '../config/ConfigManager.js'; /** * Cache Manager Class * @description Manages synchronization and caching of Optimizely data with local SQLite storage. * Provides high-level operations for data sync, querying, and cache management while handling * the complexity of API pagination, rate limiting, and error recovery. * * Architecture: * - Uses SQLiteEngine for persistent storage with transaction support * - Integrates with OptimizelyAPIHelper for API communication * - Implements comprehensive error handling with MCP error mapping * - Supports both full and targeted synchronization strategies */ export declare class CacheManager { /** SQLite storage engine for persistent data storage */ storage: SQLiteEngine; /** Optimizely API client for data synchronization */ client: OptimizelyAPIHelper; /** Project filter for applying configuration-based filtering */ private projectFilter?; /** Configuration for cache manager behavior */ private config?; /** Change history tracker for incremental sync */ private changeTracker?; /** Incremental sync manager */ private incrementalSyncManager?; /** Environment filter for per-project environment syncing */ private environmentFilter; /** Sync lock to prevent concurrent sync operations */ private static syncLock; /** * Creates a new Cache Manager instance * @param storageEngine - Initialized SQLite storage engine * @param optimizelyClient - Configured Optimizely API client * @param projectFilter - Optional project filter for configuration-based filtering * @description Initializes the cache manager with storage and API dependencies. * Both dependencies should be properly configured before passing to the constructor. * Call init() after construction to initialize the storage layer. * * @example * ```typescript * const storage = new SQLiteEngine({ path: './cache.db' }); * const apiClient = new OptimizelyAPIHelper(apiToken); * const cache = new CacheManager(storage, apiClient); * await cache.init(); * ``` */ constructor(storageEngine: SQLiteEngine, optimizelyClient: OptimizelyAPIHelper, projectFilter?: ProjectFilter, config?: MCPServerConfig); /** * Initializes the cache manager and underlying storage * @param options - Initialization options * @returns Promise that resolves when initialization is complete * @description Initializes the SQLite storage engine, creates database schema, * and prepares the cache for data operations. Must be called before using * any other cache operations. * * @throws {McpError} When storage initialization fails * @throws {Error} With code 'DATABASE_RESET_REQUIRED' when database needs reset */ init(options?: { confirmReset?: boolean; }): Promise<void>; /** * Gracefully closes the cache manager and storage connections * @returns Promise that resolves when cleanup is complete * @description Closes database connections and cleans up resources. * Should be called during application shutdown to ensure proper cleanup. * After calling close(), the cache manager will need to be reinitialized * with init() before it can be used again. */ close(): Promise<void>; /** * Serializes sync operations to prevent concurrent database lock issues * @param lockKey - Unique key for the lock (typically project ID) * @param operation - The async operation to execute * @returns Promise resolving to the operation result * @description Ensures only one sync operation runs at a time for each lock key, * preventing SQLite database lock errors during concurrent sync operations. */ private serializeSync; /** * Gets the project filter instance for configuration-based filtering * @returns The project filter instance, or undefined if not configured */ getProjectFilter(): ProjectFilter | undefined; /** * Syncs ALL projects to the database without filtering * This ensures every project is available in the database for entity operations * @param allProjects - Array of all projects from the API */ private syncAllProjectsToDatabase; /** * Fetches all pages of data from a paginated API endpoint with retry logic * @param fetchFunction - Function that fetches a single page of data * @param options - Configuration options for pagination and retry behavior * @returns Promise resolving to array of all fetched items across all pages * @description Advanced pagination utility that handles multiple response formats, * implements exponential backoff retry logic, and provides comprehensive error handling. * * Supported Response Formats: * - Direct array: `[item1, item2, ...]` * - Object with data: `{ data: [item1, item2, ...] }` * - Object with results: `{ results: [item1, item2, ...] }` * - Object with items: `{ items: [item1, item2, ...] }` * * Retry Logic: * - HTTP 429: Rate limit - exponential backoff with Retry-After header support * - HTTP 500-599: Server errors - exponential backoff retry * - Network errors: Automatic retry with backoff * - Configurable retry attempts with failure logging * * @param fetchFunction - Callback function that accepts (page, pageSize) and returns Promise * @param options - Pagination configuration object * @param options.maxPages - Maximum pages to fetch (default: 1000, safety limit) * @param options.pageSize - Items per page (default: 100) * @param options.startPage - Starting page number (default: 1) * @param options.delay - Base delay between requests in ms (default: 100) * @param options.retryAttempts - Maximum retry attempts per page (default: 3) * * @example * ```typescript * const allFlags = await cache.fetchAllPages( * (page, pageSize) => apiClient.getFlags({ page, per_page: pageSize }), * { maxPages: 50, pageSize: 100, retryAttempts: 3 } * ); * ``` * * @throws {Error} When all retry attempts are exhausted * @throws {Error} When response format is unrecognized */ fetchAllPages(fetchFunction: (page: number, pageSize: number) => Promise<any[] | { data?: any[]; results?: any[]; items?: any[]; } | null>, maxRecords?: number, options?: { maxPages?: number; pageSize?: number; startPage?: number; delay?: number; retryAttempts?: number; progressCallback?: (current: number, total: number) => void; }): Promise<any[]>; /** * Enhanced pagination method with progress reporting for performance optimization * @description Extends fetchAllPages with real-time progress callbacks for batch operations. * Maintains identical pagination behavior while adding progress tracking capabilities * for high-performance synchronization with user feedback. * * This method provides the foundation for progress reporting in batch operations, * allowing users to track sync progress in real-time during large data operations. * * @param fetchFunction - Callback function that accepts (page, pageSize) and returns Promise * @param progressCallback - Progress callback function (fetched: number, estimated?: number) * @param maxRecords - Maximum number of records to fetch across all pages * @param options - Pagination configuration identical to fetchAllPages * @param options.maxPages - Maximum pages to fetch (default: 1000, safety limit) * @param options.pageSize - Items per page (default: 100) * @param options.startPage - Starting page number (default: 1) * @param options.delay - Base delay between requests in ms (default: 100) * @param options.retryAttempts - Maximum retry attempts per page (default: 3) * @returns Promise resolving to array of all fetched items * @throws {Error} When all retry attempts are exhausted * @throws {Error} When response format is unrecognized * * @example * ```typescript * const allAudiences = await cache.fetchAllPagesWithProgress( * (page, pageSize) => apiClient.listAudiences(projectId, { page, per_page: pageSize }), * (fetched, estimated) => this.logger.log(`Progress: ${fetched}/${estimated || '?'} audiences`), * undefined, // no maxRecords limit * { pageSize: 100, retryAttempts: 3 } * ); * ``` */ fetchAllPagesWithProgress(fetchFunction: (page: number, pageSize: number) => Promise<any[] | { data?: any[]; results?: any[]; items?: any[]; } | null>, progressCallback?: (fetched: number, estimated?: number) => void, maxRecords?: number, options?: { maxPages?: number; pageSize?: number; startPage?: number; delay?: number; retryAttempts?: number; entityName?: string; projectName?: string; }): Promise<any[]>; /** * Fetches all pages of data from a cursor-based paginated API endpoint * @param fetchFunction - Function that fetches a page of data using cursor * @param options - Configuration options for pagination * @returns Promise resolving to array of all fetched items across all pages * @description Handles Feature Experimentation API's cursor-based pagination */ fetchAllPagesWithCursor(fetchFunction: (pageToken?: string, pageSize?: number) => Promise<any>, options?: { pageSize?: number; delay?: number; retryAttempts?: number; progressCallback?: (current: number, total?: number) => void; }): Promise<any[]>; /** * Performs a complete synchronization of Optimizely data to local cache * @param projectIdInput - Optional project ID to sync specific project (if null/undefined, syncs all projects) * @returns Promise resolving to sync result with success status, duration, and project count * @description Executes a comprehensive data synchronization within a database transaction. * Syncs projects and all associated entities including environments, flags, experiments, * audiences, attributes, and events. Updates sync metadata for tracking purposes. * * Synchronization Process: * 1. Fetches project(s) from Optimizely API * 2. For each project, syncs all associated data: * - Project metadata and configuration * - Environment definitions * - Feature flags with environment configurations * - Experiments and A/B tests * - Audience definitions and targeting rules * - Custom attributes for targeting * - Event definitions for metrics * 3. Updates sync metadata timestamp * 4. All operations wrapped in transaction for data integrity * * Performance Considerations: * - Uses transactions for atomicity (all-or-nothing) * - Implements pagination for large datasets * - Handles rate limiting with retry logic * - Provides detailed logging for monitoring * * @param projectIdInput - Project identifier (number, string, or null for all projects) * * @returns Promise resolving to sync result object: * ```typescript * { * success: boolean; * duration: number; // milliseconds * projectsSynced: number; * timestamp: string; // ISO timestamp * } * ``` * * @example * ```typescript * // Sync all projects * const result = await cache.fullSync(); * this.logger.log(`Synced ${result.projectsSynced} projects in ${result.duration}ms`); * * // Sync specific project * const specificResult = await cache.fullSync('12345'); * ``` * * @throws {McpError} When project is not found or sync operation fails * @throws {McpError} When database transaction fails */ fullSync(projectIdInput?: number | string | null, progressCallback?: (progress: { phase: string; current: number; total: number; message: string; percent: number; entityData?: { entity: string; count?: number; countType?: 'records' | 'api_calls'; isOngoing?: boolean; }; }) => void): Promise<any>; /** * Synchronizes a single project and all its associated entities * @param project - Project object from Optimizely API * @returns Promise that resolves when project sync is complete * @description Comprehensive project synchronization that handles all related entities. * Fetches and stores project data, environments, flags, experiments, audiences, * attributes, and events with proper error handling and transaction safety. * * Synchronization Process: * 1. Stores project metadata and configuration * 2. Fetches and stores all project environments * 3. Fetches flags using appropriate pagination method based on project type * 4. For each flag, fetches environment-specific rulesets * 5. Fetches experiments with proper archival filtering * 6. Synchronizes audiences, attributes, and events * * Error Handling: * - Individual entity sync failures don't stop the entire process * - Logs warnings for failed operations * - Provides detailed context for troubleshooting * * Performance Features: * - Uses prepared statements for database operations * - Implements proper pagination for large datasets * - Handles rate limiting with retry logic * * @param projectFromAPI - Complete project object from Optimizely API * * @throws {Error} When critical operations like project storage fail * @throws {McpError} When project data is invalid or malformed * @private */ private syncProject; /** * Synchronizes all audiences for a specific project * @param projectId - Project identifier for audience synchronization * @returns Promise that resolves when audience sync is complete * @description Fetches and stores all audience definitions for the specified project. * Handles audience conditions, archival status, and metadata with proper * pagination and error handling. * * Audience Data Stored: * - Audience ID and project association * - Name and description * - Targeting conditions (JSON format) * - Archival status and timestamps * - Complete audience configuration as JSON * * @param projectId - Project ID (string or number) for audience fetching * * @throws {Error} When API calls fail or database operations fail * @private */ private syncAudiences; /** * Synchronizes all custom attributes for a specific project * @param projectId - Project identifier for attribute synchronization * @returns Promise that resolves when attribute sync is complete * @description Fetches and stores all custom attribute definitions for targeting. * Handles attribute types, keys, archival status, and complete metadata * with proper pagination support. * * Attribute Data Stored: * - Attribute ID and project association * - Attribute key and display name * - Condition type for targeting rules * - Archival status and modification timestamps * - Complete attribute configuration as JSON * * @param projectId - Project ID (string or number) for attribute fetching * * @throws {Error} When API calls fail or database operations fail * @private */ private syncAttributes; /** * Synchronizes all event definitions for a specific project * @param projectId - Project identifier for event synchronization * @returns Promise that resolves when event sync is complete * @description Fetches and stores all event definitions used for experiment metrics. * Handles event types, categories, archival status, and complete metadata * with proper pagination support. * * Event Data Stored: * - Event ID and project association * - Event key and display name * - Event description and category * - Event type classification * - Archival status and creation timestamps * - Complete event configuration as JSON * * @param projectId - Project ID (string or number) for event fetching * * @throws {Error} When API calls fail or database operations fail * @private */ private syncEvents; /** * Synchronizes all experiments for a specific project with batch optimization * @param projectId - Project identifier for experiment synchronization * @returns Promise that resolves when experiment sync is complete * @description Fetches and stores all experiment definitions with 98%+ performance * improvement through batch operations. Handles experiment status, flags, environments, * and metadata with comprehensive validation and performance tracking. * * Performance optimization: * - OLD: N individual INSERT statements (one per experiment) * - NEW: 1 batch INSERT statement (all experiments in single operation) * - Expected improvement: 98%+ reduction in SQL operations * * @throws {Error} When API calls fail or database operations fail * @private */ private syncExperiments; /** * Synchronizes all flags for a specific project with batch optimization * @param projectId - Project identifier for flag synchronization * @param environments - Array of environments to sync flag configurations for * @returns Promise that resolves when flag sync is complete * @description Fetches and stores all flag definitions with batch optimization, * then delegates flag environment sync to a separate batch operation. * @private */ private syncFlags; /** * Synchronizes flag environment configurations with batch optimization * @param projectId - Project identifier * @param flags - Array of flag objects * @param environments - Array of environment objects * @param progressCallback - Optional progress callback for UI updates * @returns Promise that resolves when flag environment sync is complete * @description Fetches and stores flag ruleset configurations for each flag/environment * combination using batch operations for massive performance improvement. * Handles API errors gracefully by recording error states. * @private */ private syncFlagEnvironments; /** * Extracts structured rulesets and rules from flag environment JSON data * @param projectId - Project identifier * @param flagEnvironmentData - Array of flag environment records with JSON data * @returns Promise that resolves when extraction is complete * @description Parses flag_environments.data_json and extracts structured data * into rulesets and rules tables for efficient querying by validation tests. * This completes the three-layer data architecture: flag_environments (raw) → * rulesets (metadata) → rules (granular analytics). * @private */ private extractRulesetsAndRules; /** * Synchronizes experiment results for a specific project * @param projectId - Project identifier for results synchronization * @returns Promise that resolves when results sync is complete * @description Fetches and stores experiment results data including metrics, * statistical analysis, and variation performance. Results are only fetched * for active experiments to optimize API usage. * * Results Data Stored: * - Experiment ID and project association * - Statistical configuration (confidence level, stats engine) * - Reach data (baseline, treatment, total counts) * - Start time and last update timestamps * - Detailed results and metrics as JSON * * @param projectId - Project ID (string or number) for results fetching * * @throws {Error} When API calls fail or database operations fail * @private */ private syncExperimentResults; /** * Synchronizes all campaigns for a Web Experimentation project * @param projectId - Project identifier for campaign synchronization * @returns Promise that resolves when campaign sync is complete * @description Fetches and stores all campaign definitions for the specified project. * Campaigns are available only for Web Experimentation projects and provide * grouping and traffic allocation for experiments. * * Campaign Data Stored: * - Campaign ID and project association * - Name and description * - Holdback percentage for traffic allocation * - Archival status and timestamps * - Complete campaign configuration as JSON * * @param projectId - Project ID (string or number) for campaign fetching * * @throws {Error} When API calls fail or database operations fail * @private */ private syncCampaigns; /** * Synchronizes all pages for a Web Experimentation project * @param projectId - Project identifier for page synchronization * @returns Promise that resolves when page sync is complete * @description Fetches and stores all page definitions for the specified project. * Pages define URL targeting and activation rules for Web experiments. * * Page Data Stored: * - Page ID and project association * - Name and edit URL * - Activation mode and code * - URL targeting conditions * - Archival status and timestamps * - Complete page configuration as JSON * * @param projectId - Project ID (string or number) for page fetching * * @throws {Error} When API calls fail or database operations fail * @private */ private syncPages; /** * Synchronizes all collaborators for a specific project * @param projectId - Project identifier for collaborator synchronization * @returns Promise that resolves when collaborator sync is complete * @description Fetches and stores all collaborators for the specified project. * Handles user information, roles, permissions, and metadata with proper * pagination support. * * Collaborator Data Stored: * - User ID and project association * - Email and name * - Role and permissions * - Invitation and last activity timestamps * - Complete collaborator data as JSON * * @param projectId - Project ID (string or number) for collaborator fetching * * @throws {Error} When API calls fail or database operations fail * @private */ private syncCollaborators; /** * Synchronizes all groups for a specific project * @param projectId - Project identifier for group synchronization * @returns Promise that resolves when group sync is complete * @description Fetches and stores all groups for the specified project. * Groups provide mutual exclusion for experiments and traffic allocation * management. * * Group Data Stored: * - Group ID and project association * - Name and description * - Type (e.g., mutually_exclusive) * - Policy (e.g., random allocation) * - Traffic allocation percentage * - Archival status and timestamps * - Complete group data as JSON * * @param projectId - Project ID (string or number) for group fetching * * @throws {Error} When API calls fail or database operations fail * @private */ private syncGroups; /** * Synchronizes all extensions for a Web Experimentation project * @param projectId - Project identifier for extension synchronization * @returns Promise that resolves when extension sync is complete * @description Fetches and stores all extensions for the specified project. * Extensions contain custom JavaScript code for analytics integrations * and other customizations. * * Extension Data Stored: * - Extension ID and project association * - Name and description * - Extension type (analytics_integration, custom_code, etc.) * - JavaScript implementation code * - Enabled status * - Timestamps * - Complete extension data as JSON * * @param projectId - Project ID (string or number) for extension fetching * * @throws {Error} When API calls fail or database operations fail * @private */ private syncExtensions; /** * Synchronizes all webhooks for a specific project * @param projectId - Project identifier for webhook synchronization * @returns Promise that resolves when webhook sync is complete */ private syncWebhooks; /** * Synchronizes all list attributes for a specific project * @param projectId - Project identifier for list attribute synchronization * @returns Promise that resolves when list attribute sync is complete * @description Fetches and stores all list attributes for the specified project. * List attributes are different from regular attributes and contain lists of values * for targeting purposes (e.g., zip codes, cookie values). * * List Attribute Data Stored: * - List attribute ID and project association * - Name and description * - Key field and list type * - List content (JSON array) * - Archival status and timestamps * - Complete list attribute data as JSON * * @param projectId - Project ID (string or number) for list attribute fetching * * @throws {Error} When API calls fail or database operations fail * @private */ private syncListAttributes; /** * Synchronizes Web Experimentation environments for a specific project * @param projectId - Project identifier for environment synchronization * @returns Promise that resolves when environment sync is complete * @description Fetches and stores all Web Experimentation environments for the specified project. * This is separate from Feature Experimentation environments and uses a different API endpoint. * * Environment Data Stored: * - Environment ID and key * - Project association * - Name and description * - Primary environment flag * - Archival status * - Complete environment data as JSON * * @param projectId - Project ID (string or number) for environment fetching * * @throws {Error} When API calls fail or database operations fail * @private */ private syncWebEnvironments; /** * Synchronizes all features for a specific project * @param projectId - Project identifier for features synchronization * @returns Promise that resolves when features sync is complete * @description Fetches and stores all feature definitions for the specified project. * Features are only available in Feature Experimentation projects. */ private syncFeatures; /** * Synchronizes change history for a specific project * @param projectId - Project identifier for change history synchronization * @returns Promise that resolves when change history sync is complete * @description Fetches and stores change history from Optimizely API for the specified project. * This populates the change_history table which is essential for incremental sync functionality. * Change history includes entity modifications, creations, deletions, and archival actions. * * Change History Data Stored: * - Entity type and ID * - Action performed (created, updated, deleted, archived) * - User information (who made the change) * - Timestamp of the change * - Change summary and details * * @param projectId - Project ID (string or number) for change history fetching * * @throws {Error} When API calls fail or database operations fail * @private */ private syncChangeHistory; /** * Performs an incremental synchronization using change history * @param projectIdInput - Optional project ID to sync specific project (if null/undefined, syncs all projects) * @returns Promise resolving to sync result with success status, duration, and statistics * @description Executes an incremental data synchronization using Optimizely's change history API. * Only fetches and updates entities that have changed since the last sync, significantly * reducing API calls and sync time. * * Incremental Sync Process: * 1. Checks last sync timestamp for each project * 2. Fetches change history since last sync * 3. Deduplicates changes to unique entities * 4. Fetches only changed entities from API * 5. Updates local cache with changes * 6. Records sync metadata for next run * * Performance Benefits: * - 90%+ reduction in API calls vs full sync * - <5 seconds for typical incremental updates * - Automatic change deduplication * - Resilient to partial failures * * @example * ```typescript * // Incremental sync all projects * const result = await cache.incrementalSync(); * this.logger.log(`Synced ${result.totalChanges} changes in ${result.duration}ms`); * * // Incremental sync specific project * const projectResult = await cache.incrementalSync('12345'); * ``` * * @throws {McpError} When sync operation fails */ incrementalSync(projectIdInput?: number | string | null): Promise<any>; /** * Resets the entire cache by creating a backup and reinitializing the database * @returns Promise resolving to reset operation result with backup information * @description Performs a complete cache reset with automatic backup creation. * This operation completely clears all cached data and recreates the database * schema from scratch. A backup is automatically created before reset. * * Reset Process: * 1. Creates automatic backup with timestamp * 2. Closes current database connections * 3. Deletes existing database files (db, wal, shm) * 4. Reinitializes database with fresh schema * 5. Returns operation result with backup path * * @returns Promise resolving to reset result object: * ```typescript * { * success: boolean; * backupPath?: string; // Path to created backup file * timestamp: string; // ISO timestamp of operation * error?: string; // Error message if operation failed * } * ``` * * @example * ```typescript * const result = await cache.resetCache(); * if (result.success) { * this.logger.log(`Cache reset successful. Backup: ${result.backupPath}`); * } else { * this.logger.error(`Cache reset failed: ${result.error}`); * } * ``` * * @throws Never throws - all errors are captured in result object */ resetCache(): Promise<{ success: boolean; backupPath?: string; timestamp: string; error?: string; }>; /** * Queries the flag cache with comprehensive filtering and search capabilities * @param filters - Filter criteria for flag search * @returns Promise resolving to array of matching flag records with environment status * @description Advanced flag querying with full-text search, environment filtering, * and comprehensive sorting options. Constructs complex SQL queries with proper * parameter binding and includes environment status aggregation. * * Query Features: * - Project-specific filtering * - Archive status filtering * - Full-text search across name, description, and key * - Environment-specific enablement filtering * - Complex sorting with validation * - Environment status aggregation for each flag * - SQL injection protection through parameterized queries * * Environment Status: * Each returned flag includes environment status in two formats: * - `env_status`: JSON object with environment_key: boolean mapping * - `env_status_agg`: Comma-separated string format for fallback * * @param filters - Query filter configuration object: * @param filters.project_id - Filter by specific project ID * @param filters.archived - Filter by archive status (default: false) * @param filters.search - Search term for name, description, or key * @param filters.environment - Filter by specific environment key * @param filters.enabled - Filter by enablement status in specified environment * @param filters.sort - Sort specification: "column [ASC|DESC]" (default: "updated_time DESC") * @param filters.limit - Maximum number of results to return * @param filters.offset - Number of results to skip (for pagination) * * @example * ```typescript * // Search for flags containing "checkout" in production environment * const flags = await cache.queryFlags({ * project_id: '12345', * search: 'checkout', * environment: 'production', * enabled: true, * limit: 50 * }); * * // Get all active flags sorted by last update * const recentFlags = await cache.queryFlags({ * archived: false, * sort: 'updated_time DESC', * limit: 100 * }); * ``` * * @throws {McpError} When storage is not initialized * @throws {Error} When SQL query execution fails */ queryFlags(filters?: { project_id?: string; id?: string; key?: string; archived?: boolean; search?: string; environment?: string; enabled?: boolean; sort?: string; limit?: number; offset?: number; }): Promise<any[]>; /** * Deserialize JSON fields for entity based on entity type * @param entityType - The type of entity * @param row - The database row * @returns The row with deserialized fields * @private */ private deserializeEntityFields; /** * Retrieves comprehensive synchronization status and cache statistics * @returns Promise resolving to sync status object with detailed metrics * @description Provides comprehensive cache statistics and synchronization metadata * including entity counts, sync timestamps, and health metrics. Uses aggregated * queries for performance and provides insights into cache state. * * Status Information: * - Last synchronization timestamp * - Entity counts across all data types * - Active vs archived entity breakdowns * - Environment utilization metrics * - Running experiment counts * * Performance Notes: * - Uses single aggregated query for efficiency * - Counts are computed at query time for accuracy * - Includes metadata from sync_metadata table * * @returns Promise resolving to sync status object: * ```typescript * { * last_full_sync: string | null; // ISO timestamp of last sync * project_count: number; // Total cached projects * flag_count: number; // Total cached flags * experiment_count: number; // Total cached experiments * audience_count: number; // Total cached audiences * attribute_count: number; // Total cached attributes * event_count: number; // Total cached events * enabled_flag_instances_count: number; // Enabled flag-environment pairs * running_experiments_count: number; // Currently running experiments * } * ``` * * @example * ```typescript * const status = await cache.getSyncStatus(); * this.logger.log(`Last sync: ${status.last_full_sync}`); * this.logger.log(`Cache contains ${status.flag_count} flags and ${status.experiment_count} experiments`); * this.logger.log(`${status.running_experiments_count} experiments are currently running`); * ``` * * @throws {McpError} When storage is not initialized */ getSyncStatus(): Promise<any>; /** * Enhanced batch insert helper for high-performance entity synchronization * @description Performs batch INSERT OR REPLACE operations using the new SQLiteEngine.runBatch() * method to achieve 90%+ performance improvement by reducing individual SQL operations. * * This method handles: * - Response envelope extraction (result.data || result) * - JSON field serialization using existing jsonFieldsByEntity mappings * - Pre-calculated timestamps for CURRENT_TIMESTAMP compatibility * - Progress callback integration for real-time feedback * - Comprehensive parameter validation and error handling * * @param tableName - Target database table name * @param entityType - Entity type for JSON field mapping (audience, experiment, etc.) * @param entities - Array of entity objects to insert/update * @param baseSQL - Base SQL INSERT OR REPLACE statement with placeholders * @param fieldMapper - Function to extract field values from each entity * @param projectId - Project ID for entities that need it * @param progressCallback - Optional progress reporting callback * @param batchSize - Batch size for chunked processing (default: 100) * @returns Promise that resolves when batch insert is complete * @throws {McpError} When batch operation fails * @private */ private batchInsert; /** * 🧪 VALIDATION HELPER: Compare database state for batch optimization validation * @description Comprehensive validation method to ensure batch operations produce * identical results to individual operations. Critical for performance optimization * safety validation during pilot implementation. * * This method provides detailed comparison of: * - Row counts by entity type * - Field value integrity across all records * - Data type consistency * - JSON field deserialization accuracy * - Timestamp ranges and ordering * * @param entityType - Entity type to validate (audience, experiment, etc.) * @param projectId - Project ID to scope validation * @param expectedCount - Expected number of records * @returns Promise resolving to detailed validation results * @throws {Error} When validation detects data integrity issues * @private */ private validateBatchOptimizationResults; } //# sourceMappingURL=CacheManager.d.ts.map