@ooples/token-optimizer-mcp
Version:
Intelligent context window optimization for Claude Code - store content externally via caching and compression, freeing up your context window for what matters
355 lines • 11.4 kB
TypeScript
/**
* CacheCompression - Advanced Compression Strategies for Cache Optimization
*
* Implements 6 compression algorithms with adaptive selection, dictionary-based
* compression for repeated patterns, and delta compression for time-series data.
*
* Token Reduction Target: 89%+
*
* Operations:
* 1. compress - Compress cache data with algorithm selection
* 2. decompress - Decompress previously compressed data
* 3. analyze - Analyze compression effectiveness for data
* 4. optimize - Optimize compression settings for workload
* 5. benchmark - Benchmark all algorithms against test data
* 6. configure - Configure default compression strategy
*
* Algorithms:
* - gzip: Fast, general-purpose compression (Node.js built-in)
* - brotli: Better compression ratio, slower (Node.js built-in)
* - lz4: Very fast, lower ratio (requires lz4 package)
* - zstd: Good balance, adaptive (requires zstd-codec package)
* - snappy: Extremely fast, moderate ratio (requires snappy package)
* - custom: Domain-specific compression for structured data
*/
import { CacheEngine } from '../../core/cache-engine.js';
import { TokenCounter } from '../../core/token-counter.js';
import { MetricsCollector } from '../../core/metrics.js';
/**
* Compression algorithm types
*/
export type CompressionAlgorithm = 'gzip' | 'brotli' | 'lz4' | 'zstd' | 'snappy' | 'custom';
/**
* Compression level (0-9, where 9 is maximum compression)
*/
export type CompressionLevel = 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9;
/**
* Data type hints for adaptive compression
*/
export type DataType = 'json' | 'text' | 'binary' | 'time-series' | 'structured' | 'auto';
/**
* Compression operation types
*/
export type CompressionOperation = 'compress' | 'decompress' | 'analyze' | 'optimize' | 'benchmark' | 'configure';
/**
* Options for cache compression operations
*/
export interface CacheCompressionOptions {
operation: CompressionOperation;
data?: any;
algorithm?: CompressionAlgorithm;
level?: CompressionLevel;
dictionary?: Buffer;
dataType?: DataType;
sampleSize?: number;
includeMetrics?: boolean;
targetRatio?: number;
maxLatency?: number;
workloadType?: 'read-heavy' | 'write-heavy' | 'balanced';
algorithms?: CompressionAlgorithm[];
testData?: any;
iterations?: number;
defaultAlgorithm?: CompressionAlgorithm;
autoSelect?: boolean;
enableDelta?: boolean;
useCache?: boolean;
cacheTTL?: number;
}
/**
* Compression analysis results
*/
export interface CompressionAnalysis {
dataType: DataType;
originalSize: number;
estimatedCompressedSize: number;
estimatedRatio: number;
recommendedAlgorithm: CompressionAlgorithm;
recommendedLevel: CompressionLevel;
characteristics: {
entropy: number;
repetition: number;
compressibility: number;
patterns: string[];
};
timeSeries?: {
isDelta: boolean;
deltaSize: number;
temporalPatterns: string[];
};
}
/**
* Compression recommendation
*/
export interface CompressionRecommendation {
algorithm: CompressionAlgorithm;
level: CompressionLevel;
expectedRatio: number;
expectedLatency: number;
useDictionary: boolean;
useDelta: boolean;
reasoning: string;
}
/**
* Benchmark result for a single algorithm
*/
export interface BenchmarkResult {
algorithm: CompressionAlgorithm;
level: CompressionLevel;
originalSize: number;
compressedSize: number;
compressionRatio: number;
compressionTime: number;
decompressionTime: number;
throughput: {
compression: number;
decompression: number;
};
memoryUsage: {
compression: number;
decompression: number;
};
}
/**
* Compression configuration
*/
export interface CompressionConfig {
defaultAlgorithm: CompressionAlgorithm;
defaultLevel: CompressionLevel;
autoSelect: boolean;
enableDelta: boolean;
dictionary?: Buffer;
algorithmOverrides: Map<DataType, CompressionAlgorithm>;
}
/**
* Compression operation result
*/
export interface CacheCompressionResult {
success: boolean;
operation: CompressionOperation;
data: {
compressed?: Buffer;
decompressed?: any;
analysis?: CompressionAnalysis;
recommendations?: CompressionRecommendation[];
benchmarkResults?: BenchmarkResult[];
configuration?: CompressionConfig;
};
metadata: {
tokensUsed: number;
tokensSaved: number;
cacheHit: boolean;
executionTime: number;
compressionRatio?: number;
algorithm?: CompressionAlgorithm;
level?: CompressionLevel;
};
}
/**
* Cache Compression Tool - Advanced compression strategies
*/
export declare class CacheCompressionTool {
private cache;
private tokenCounter;
private metrics;
private config;
private lz4Module;
private zstdModule;
private snappyModule;
private packagesLoaded;
constructor(cache: CacheEngine, tokenCounter: TokenCounter, metrics: MetricsCollector);
/**
* Lazy load compression packages
*/
private loadPackages;
/**
* Main entry point for compression operations
*/
run(options: CacheCompressionOptions): Promise<CacheCompressionResult>;
/**
* Compress data using specified or auto-selected algorithm
*/
private compress;
/**
* Decompress data
*/
private decompress;
/**
* Analyze data compressibility and recommend algorithm
*/
private analyze;
/**
* Optimize compression settings for workload
*/
private optimize;
/**
* Benchmark compression algorithms
*/
private benchmark;
/**
* Configure compression settings
*/
private configure;
/**
* Compress using specific algorithm
*/
private compressWithAlgorithm;
/**
* Decompress using specific algorithm
*/
private decompressWithAlgorithm;
/**
* Custom compression for structured data
*/
private customCompress;
/**
* Custom decompression for structured data
*/
private customDecompress;
/**
* Build compression dictionary from object
*/
private buildDictionary;
/**
* Compress object using dictionary
*/
private compressWithDictionary;
/**
* Decompress object using dictionary
*/
private decompressWithDictionary;
/**
* Apply delta compression for time-series data
*/
private applyDeltaCompression;
/**
* Calculate Shannon entropy
*/
private calculateEntropy;
/**
* Calculate repetition score
*/
private calculateRepetition;
/**
* Detect common patterns in data
*/
private detectPatterns;
/**
* Detect data type from content
*/
private detectDataType;
/**
* Check if data is time-series
*/
private isTimeSeries;
/**
* Convert data to buffer
*/
private toBuffer;
/**
* Generate test data for benchmarking
*/
private generateTestData;
/**
* Benchmark a specific algorithm
*/
private benchmarkAlgorithm;
}
/**
* MCP Tool Definition
*/
export declare const CACHE_COMPRESSION_TOOL_DEFINITION: {
readonly name: "cache_compression";
readonly description: "Advanced compression strategies for cache optimization with 89%+ token reduction. Supports 6 algorithms (gzip, brotli, lz4, zstd, snappy, custom), adaptive selection, dictionary-based compression, and delta compression for time-series data.";
readonly inputSchema: {
readonly type: "object";
readonly properties: {
readonly operation: {
readonly type: "string";
readonly enum: readonly ["compress", "decompress", "analyze", "optimize", "benchmark", "configure"];
readonly description: "Compression operation to perform";
};
readonly data: {
readonly description: "Data to compress/decompress/analyze";
};
readonly algorithm: {
readonly type: "string";
readonly enum: readonly ["gzip", "brotli", "lz4", "zstd", "snappy", "custom"];
readonly description: "Compression algorithm (auto-selected if not specified)";
};
readonly level: {
readonly type: "number";
readonly minimum: 0;
readonly maximum: 9;
readonly description: "Compression level (0-9, higher = better compression)";
};
readonly dataType: {
readonly type: "string";
readonly enum: readonly ["json", "text", "binary", "time-series", "structured", "auto"];
readonly description: "Data type hint for adaptive compression";
};
readonly targetRatio: {
readonly type: "number";
readonly minimum: 0;
readonly maximum: 1;
readonly description: "Target compression ratio for optimize operation (0-1)";
};
readonly maxLatency: {
readonly type: "number";
readonly description: "Maximum acceptable latency in milliseconds";
};
readonly workloadType: {
readonly type: "string";
readonly enum: readonly ["read-heavy", "write-heavy", "balanced"];
readonly description: "Workload type for optimization";
};
readonly algorithms: {
readonly type: "array";
readonly items: {
readonly type: "string";
readonly enum: readonly ["gzip", "brotli", "lz4", "zstd", "snappy"];
};
readonly description: "Algorithms to benchmark";
};
readonly iterations: {
readonly type: "number";
readonly description: "Number of benchmark iterations";
};
readonly defaultAlgorithm: {
readonly type: "string";
readonly enum: readonly ["gzip", "brotli", "lz4", "zstd", "snappy", "custom"];
readonly description: "Default algorithm for configure operation";
};
readonly autoSelect: {
readonly type: "boolean";
readonly description: "Enable auto-selection of algorithm based on data type";
};
readonly enableDelta: {
readonly type: "boolean";
readonly description: "Enable delta compression for time-series data";
};
readonly useCache: {
readonly type: "boolean";
readonly description: "Enable caching of analysis/benchmark results";
readonly default: true;
};
readonly cacheTTL: {
readonly type: "number";
readonly description: "Cache TTL in seconds";
readonly default: 3600;
};
};
readonly required: readonly ["operation"];
};
};
export declare function runCacheCompression(options: CacheCompressionOptions): Promise<CacheCompressionResult>;
//# sourceMappingURL=cache-compression.d.ts.map