UNPKG

fortify2-js

Version:

MOST POWERFUL JavaScript Security Library! Military-grade cryptography + 19 enhanced object methods + quantum-resistant algorithms + perfect TypeScript support. More powerful than Lodash with built-in security.

307 lines (303 loc) 9.93 kB
'use strict'; var index = require('../memory/index.js'); /** * FortifyJS - Smart Cache System * Advanced caching with LRU, LFU, adaptive strategies and intelligent management */ class SmartCache { constructor(config = {}) { this.cache = new Map(); this.accessOrder = []; this.frequencyMap = new Map(); this.stats = { hits: 0, misses: 0, evictions: 0, totalSize: 0, compressionRatio: 0, }; this.config = { strategy: "adaptive", maxSize: 1000, ttl: 300000, // 5 minutes autoCleanup: true, compressionEnabled: false, persistToDisk: false, ...config, }; if (this.config.autoCleanup) { this.setupAutoCleanup(); } } /** * Get value from cache with smart access tracking */ get(key) { const entry = this.cache.get(key); if (!entry) { this.stats.misses++; return null; } // Check TTL expiration if (this.isExpired(entry)) { this.delete(key); this.stats.misses++; return null; } // Update access patterns this.updateAccessPattern(key, entry); this.stats.hits++; return entry.result; } /** * Set value in cache with intelligent eviction */ set(key, value, ttl) { // Check if we need to evict entries if (this.cache.size >= this.config.maxSize) { this.evictEntries(); } const entry = { result: value, timestamp: Date.now(), accessCount: 1, lastAccessed: new Date(), ttl: ttl || this.config.ttl, priority: this.calculatePriority(key, value), size: this.estimateSize(value), frequency: 1, }; this.cache.set(key, entry); this.updateFrequency(key); this.updateAccessOrder(key); this.updateStats(); } /** * Delete entry from cache */ delete(key) { const deleted = this.cache.delete(key); if (deleted) { this.removeFromAccessOrder(key); this.frequencyMap.delete(key); this.updateStats(); } return deleted; } /** * Clear all cache entries */ clear() { this.cache.clear(); this.accessOrder = []; this.frequencyMap.clear(); this.updateStats(); } /** * Get cache statistics */ getStats() { const hitRate = this.stats.hits / (this.stats.hits + this.stats.misses) || 0; return { ...this.stats, hitRate, size: this.cache.size, maxSize: this.config.maxSize, strategy: this.config.strategy, averageAccessCount: this.getAverageAccessCount(), memoryUsage: this.estimateMemoryUsage(), }; } /** * Intelligent cache warming based on patterns */ warmCache(patterns) { // Sort by priority and warm most important entries first patterns .sort((a, b) => b.priority - a.priority) .slice(0, Math.floor(this.config.maxSize * 0.3)) // Warm up to 30% of cache .forEach(({ key, value }) => { this.set(key, value); }); } /** * Predictive cache preloading */ preloadPredictedEntries(predictions) { const highProbabilityPredictions = predictions .filter((p) => p.probability > 0.7) .slice(0, Math.floor(this.config.maxSize * 0.1)); // Preload up to 10% of cache // This would typically trigger async loading of predicted entries // For now, we mark them as high priority for future caching highProbabilityPredictions.forEach(({ key }) => { // Mark for priority caching when the value becomes available this.markForPriorityCache(key); }); } /** * Adaptive strategy adjustment based on performance */ adaptStrategy(metrics) { if (this.config.strategy !== "adaptive") return; const hitRate = this.stats.hits / (this.stats.hits + this.stats.misses) || 0; if (hitRate < 0.3) { // Low hit rate, switch to LFU to keep frequently used items this.config.strategy = "lfu"; } else if (hitRate > 0.8 && metrics.memoryUsage > 0.8) { // High hit rate but memory pressure, switch to LRU for better memory management this.config.strategy = "lru"; } else { // Balanced performance, use adaptive strategy this.config.strategy = "adaptive"; } } /** * Memory pressure handling */ handleMemoryPressure(pressureLevel) { switch (pressureLevel) { case "high": // Aggressive cleanup - remove 50% of cache this.evictEntries(Math.floor(this.cache.size * 0.5)); break; case "medium": // Moderate cleanup - remove 25% of cache this.evictEntries(Math.floor(this.cache.size * 0.25)); break; case "low": // Light cleanup - remove expired entries only this.cleanupExpiredEntries(); break; } } /** * Private helper methods */ isExpired(entry) { if (!entry.ttl) return false; return Date.now() - entry.timestamp > entry.ttl; } updateAccessPattern(key, entry) { entry.accessCount++; entry.lastAccessed = new Date(); this.updateFrequency(key); this.updateAccessOrder(key); } updateFrequency(key) { const current = this.frequencyMap.get(key) || 0; this.frequencyMap.set(key, current + 1); } updateAccessOrder(key) { // Remove from current position and add to end (most recent) this.removeFromAccessOrder(key); this.accessOrder.push(key); } removeFromAccessOrder(key) { const index = this.accessOrder.indexOf(key); if (index > -1) { this.accessOrder.splice(index, 1); } } calculatePriority(key, value) { // Calculate priority based on various factors const frequency = this.frequencyMap.get(key) || 0; const size = this.estimateSize(value); const recency = 1; // New entries get base recency score // Higher frequency and recency increase priority, larger size decreases it return (((frequency * 0.4 + recency * 0.4) / Math.max(size / 1000, 1)) * 0.2); } estimateSize(value) { try { return JSON.stringify(value).length * 2; // Rough estimate } catch { return 1000; // Default size for non-serializable objects } } evictEntries(count) { const entriesToEvict = count || Math.max(1, Math.floor(this.config.maxSize * 0.1)); switch (this.config.strategy) { case "lru": this.evictLRU(entriesToEvict); break; case "lfu": this.evictLFU(entriesToEvict); break; case "adaptive": this.evictAdaptive(entriesToEvict); break; } this.stats.evictions += entriesToEvict; } evictLRU(count) { // Remove least recently used entries const toRemove = this.accessOrder.slice(0, count); toRemove.forEach((key) => this.delete(key)); } evictLFU(count) { // Remove least frequently used entries const entries = Array.from(this.cache.entries()) .sort((a, b) => (a[1].frequency || 0) - (b[1].frequency || 0)) .slice(0, count); entries.forEach(([key]) => this.delete(key)); } evictAdaptive(count) { // Adaptive eviction based on priority score const entries = Array.from(this.cache.entries()) .sort((a, b) => (a[1].priority || 0) - (b[1].priority || 0)) .slice(0, count); entries.forEach(([key]) => this.delete(key)); } cleanupExpiredEntries() { for (const [key, entry] of this.cache.entries()) { if (this.isExpired(entry)) { this.delete(key); } } } setupAutoCleanup() { this.cleanupInterval = setInterval(() => { this.cleanupExpiredEntries(); // Adaptive cleanup based on memory pressure const memoryStats = index.memoryManager.getStats(); const memoryPressure = memoryStats.pressure; // 0-1 scale if (memoryPressure > 0.8) { this.handleMemoryPressure("high"); } else if (memoryPressure > 0.6) { this.handleMemoryPressure("medium"); } }, 60000); // Check every minute } updateStats() { this.stats.totalSize = this.cache.size; } getAverageAccessCount() { if (this.cache.size === 0) return 0; const total = Array.from(this.cache.values()).reduce((sum, entry) => sum + entry.accessCount, 0); return total / this.cache.size; } estimateMemoryUsage() { return Array.from(this.cache.values()).reduce((total, entry) => total + (entry.size || 0), 0); } markForPriorityCache(key) { // Implementation for marking entries for priority caching // This could be used by predictive systems } /** * Cleanup resources */ destroy() { if (this.cleanupInterval) { clearInterval(this.cleanupInterval); } this.clear(); } } exports.SmartCache = SmartCache; //# sourceMappingURL=smart-cache.js.map