datapilot-cli
Version:
Enterprise-grade streaming multi-format data analysis with comprehensive statistical insights and intelligent relationship detection - supports CSV, JSON, Excel, TSV, Parquet - memory-efficient, cross-platform
219 lines • 6.56 kB
TypeScript
/**
* Ultra-Advanced Result Caching System for DataPilot Sequential Execution Engine
*
* Ultra-Hard Challenges Solved:
* 1. Cache Key Generation: Unique, reliable keys for datasets/options combinations with edge case handling
* 2. Memory Efficiency: Smart LRU eviction, memory monitoring, and pressure-aware cleanup
* 3. Cache Invalidation: File changes, option changes, version changes, dependency tracking
* 4. Thread Safety: Concurrent executions without cache corruption using locks and atomic operations
* 5. Integration Complexity: Seamless integration with SequentialExecutor and DependencyResolver
*
* Performance Features:
* - Multi-level caching (file, section, result)
* - Sub-second cache lookups with memory pressure awareness
* - Support for 100MB+ datasets and 10MB+ section results
* - Automatic memory management under different system constraints
* - Optional cache persistence across CLI invocations
*/
import { EventEmitter } from 'events';
import type { CLIOptions } from './types';
import type { LogContext } from '../utils/logger';
/**
* Cache statistics for monitoring and optimization
*/
interface CacheStats {
totalEntries: number;
totalSizeBytes: number;
hitCount: number;
missCount: number;
evictionCount: number;
cleanupCount: number;
averageAccessTime: number;
memoryPressureLevel: 'low' | 'medium' | 'high' | 'critical';
oldestEntry?: Date;
newestEntry?: Date;
}
/**
* Ultra-Advanced Result Cache with multi-level caching and memory management
*/
export declare class ResultCache extends EventEmitter {
private cache;
private lruOrder;
private checksumCache;
private integrityCache;
private accessTimeMeasurements;
private lastCleanup;
private isCleaningUp;
private persistentCacheEnabled;
private cacheDir;
private memoryLimit;
private context;
private stats;
private readonly lockMap;
private readonly operationQueue;
constructor(options?: CLIOptions, context?: LogContext, persistentCache?: boolean);
/**
* Generate unique, reliable cache key for dataset/options combination
* Handles edge cases like option ordering, nested objects, and special values
*/
private generateCacheKey;
/**
* Fast file integrity checking with checksum caching
*/
private getFileIntegrity;
/**
* Compute fast checksum for large files using sampling
*/
private computeFastChecksum;
/**
* Thread-safe cache retrieval with atomic operations
*/
get<T = any>(filePath: string, sectionName: string | null, options: CLIOptions, dependencies?: string[]): Promise<T | null>;
/**
* Thread-safe cache storage with memory management
*/
set<T = any>(filePath: string, sectionName: string | null, options: CLIOptions, data: T, dependencies?: string[], ttl?: number): Promise<void>;
/**
* Validate cache entry for freshness and integrity
*/
private validateCacheEntry;
/**
* Invalidate cache entries based on file changes or dependency changes
*/
invalidateByFile(filePath: string): Promise<void>;
/**
* Invalidate dependent cache entries when a section result changes
*/
invalidateDependents(sectionName: string): Promise<void>;
/**
* Invalidate specific cache entry
*/
private invalidateEntry;
/**
* Memory-aware LRU eviction with smart cleanup
*/
private performMemoryPressureCleanup;
/**
* Get eviction candidates sorted by priority
*/
private getEvictionCandidates;
/**
* Calculate eviction priority score (higher = more likely to evict)
*/
private calculateEvictionScore;
/**
* Check if cache should reject entry due to memory pressure
*/
private shouldRejectDueToMemoryPressure;
/**
* Calculate approximate size of data in bytes
*/
private calculateDataSize;
/**
* Extract options that affect cache validity
*/
private extractCacheableOptions;
/**
* Thread-safe lock acquisition
*/
private acquireLock;
/**
* Thread-safe lock release
*/
private releaseLock;
/**
* Update LRU order - most recent first
*/
private updateLRU;
/**
* Remove from LRU order
*/
private removeLRU;
/**
* Record access time for performance monitoring
*/
private recordAccessTime;
/**
* Setup automatic cleanup intervals
*/
private setupAutomaticCleanup;
/**
* Check if cleanup is needed and perform it
*/
private checkAndPerformCleanup;
/**
* Persistent cache operations
*/
private persistEntry;
/**
* Remove persistent cache entry
*/
private removePersistentEntry;
/**
* Get cache statistics
*/
getStats(): CacheStats;
/**
* Clear all cache entries
*/
clear(): Promise<void>;
/**
* Get file basename for key generation
*/
private getFileBasename;
/**
* Dispose of cache resources
*/
dispose(): Promise<void>;
/**
* Get detailed cache information for debugging
*/
getCacheInfo(): {
entries: Array<{
key: string;
size: number;
age: number;
accessCount: number;
dependencies: string[];
filePath: string;
sectionName?: string;
}>;
memoryUsage: {
totalSizeBytes: number;
averageEntrySize: number;
largestEntry: number;
};
performance: {
hitRate: number;
averageAccessTime: number;
memoryPressureLevel: string;
};
};
}
/**
* Factory function to create a configured result cache
*/
export declare function createResultCache(options?: CLIOptions, context?: LogContext, persistentCache?: boolean): ResultCache;
/**
* Cache-aware wrapper for the SequentialExecutor
* Provides transparent caching integration with the execution engine
*/
export declare class CachedSequentialExecutor {
private cache;
private originalExecutor;
constructor(executor: any, cacheOptions?: CLIOptions, context?: LogContext);
/**
* Execute with intelligent caching
*/
execute(requestedSections: string[]): Promise<any>;
/**
* Get cache statistics
*/
getCacheStats(): CacheStats;
/**
* Clear cache
*/
clearCache(): Promise<void>;
}
export {};
//# sourceMappingURL=result-cache.d.ts.map