vibe-coder-mcp
Version:
Production-ready MCP server with complete agent integration, multi-transport support, and comprehensive development automation tools for AI-assisted workflows.
235 lines (234 loc) • 9.61 kB
JavaScript
import { promisify } from 'util';
import { gzip, gunzip } from 'zlib';
import { createHash } from 'crypto';
import logger from '../../../logger.js';
const gzipAsync = promisify(gzip);
const gunzipAsync = promisify(gunzip);
export class PackageCompressor {
static DEFAULT_COMPRESSION_LEVEL = 6;
static MAX_COMPRESSION_SIZE = 50 * 1024 * 1024;
static async compressPackage(contextPackage, compressionLevel = this.DEFAULT_COMPRESSION_LEVEL) {
const startTime = Date.now();
try {
logger.info({
packageId: contextPackage.id,
compressionLevel
}, 'Starting package compression');
const jsonString = JSON.stringify(contextPackage);
const originalSize = Buffer.byteLength(jsonString, 'utf8');
if (originalSize > this.MAX_COMPRESSION_SIZE) {
throw new Error(`Package too large for compression: ${originalSize} bytes > ${this.MAX_COMPRESSION_SIZE} bytes`);
}
const compressedData = await gzipAsync(jsonString, {
level: compressionLevel,
memLevel: 8
});
const compressedSize = compressedData.length;
const compressionRatio = compressedSize / originalSize;
const compressionTimeMs = Date.now() - startTime;
const checksum = this.calculateChecksum(compressedData);
const metadata = {
originalSize,
compressedSize,
compressionRatio,
algorithm: 'gzip',
compressionLevel,
compressionTimeMs,
checksum
};
logger.info({
packageId: contextPackage.id,
originalSize,
compressedSize,
compressionRatio: (compressionRatio * 100).toFixed(1) + '%',
compressionTimeMs
}, 'Package compression completed');
return {
compressedData,
metadata
};
}
catch (error) {
logger.error({
packageId: contextPackage.id,
error: error instanceof Error ? error.message : 'Unknown error',
compressionTimeMs: Date.now() - startTime
}, 'Package compression failed');
throw error;
}
}
static async decompressPackage(compressedPackage) {
const startTime = Date.now();
try {
logger.info({
algorithm: compressedPackage.metadata.algorithm,
compressedSize: compressedPackage.metadata.compressedSize
}, 'Starting package decompression');
const calculatedChecksum = this.calculateChecksum(compressedPackage.compressedData);
if (calculatedChecksum !== compressedPackage.metadata.checksum) {
throw new Error('Checksum mismatch - compressed data may be corrupted');
}
let decompressedBuffer;
switch (compressedPackage.metadata.algorithm) {
case 'gzip':
decompressedBuffer = await gunzipAsync(compressedPackage.compressedData);
break;
default:
throw new Error(`Unsupported compression algorithm: ${compressedPackage.metadata.algorithm}`);
}
const jsonString = decompressedBuffer.toString('utf8');
const contextPackage = JSON.parse(jsonString);
const decompressionTimeMs = Date.now() - startTime;
logger.info({
packageId: contextPackage.id,
originalSize: compressedPackage.metadata.originalSize,
decompressionTimeMs
}, 'Package decompression completed');
return contextPackage;
}
catch (error) {
logger.error({
error: error instanceof Error ? error.message : 'Unknown error',
decompressionTimeMs: Date.now() - startTime
}, 'Package decompression failed');
throw error;
}
}
static estimateCompressionRatio(contextPackage) {
try {
const jsonString = JSON.stringify(contextPackage);
let estimatedRatio = 0.3;
const hasLargeTextContent = contextPackage.files.some(file => file.file.content && file.file.content.length > 10000);
if (hasLargeTextContent) {
estimatedRatio = 0.2;
}
const hasRepetitiveContent = this.hasRepetitiveContent(jsonString);
if (hasRepetitiveContent) {
estimatedRatio *= 0.8;
}
return Math.max(0.1, Math.min(0.9, estimatedRatio));
}
catch (error) {
logger.warn({
packageId: contextPackage.id,
error: error instanceof Error ? error.message : 'Unknown error'
}, 'Failed to estimate compression ratio');
return 0.5;
}
}
static getCompressionStats(metadata) {
const spaceSavedBytes = metadata.originalSize - metadata.compressedSize;
const spaceSavedPercentage = (spaceSavedBytes / metadata.originalSize) * 100;
let compressionEfficiency;
if (metadata.compressionRatio <= 0.3) {
compressionEfficiency = 'excellent';
}
else if (metadata.compressionRatio <= 0.5) {
compressionEfficiency = 'good';
}
else if (metadata.compressionRatio <= 0.7) {
compressionEfficiency = 'fair';
}
else {
compressionEfficiency = 'poor';
}
return {
spaceSavedBytes,
spaceSavedPercentage,
compressionEfficiency
};
}
static optimizeForCompression(contextPackage) {
try {
logger.info({ packageId: contextPackage.id }, 'Optimizing package for compression');
const optimized = JSON.parse(JSON.stringify(contextPackage));
this.removeNullValues(optimized);
this.deduplicateContent(optimized);
this.compressRepetitiveStrings(optimized);
logger.info({ packageId: contextPackage.id }, 'Package optimization for compression completed');
return optimized;
}
catch (error) {
logger.warn({
packageId: contextPackage.id,
error: error instanceof Error ? error.message : 'Unknown error'
}, 'Failed to optimize package for compression, using original');
return contextPackage;
}
}
static calculateChecksum(data) {
return createHash('sha256').update(data).digest('hex');
}
static hasRepetitiveContent(content) {
const sampleSize = Math.min(1000, content.length);
const sample = content.substring(0, sampleSize);
const patterns = new Set();
for (let i = 0; i < sample.length - 10; i++) {
const pattern = sample.substring(i, i + 10);
if (patterns.has(pattern)) {
return true;
}
patterns.add(pattern);
}
return false;
}
static removeNullValues(obj) {
if (Array.isArray(obj)) {
obj.forEach(item => {
if (typeof item === 'object' && item !== null) {
this.removeNullValues(item);
}
});
}
else if (obj && typeof obj === 'object') {
const objRecord = obj;
Object.keys(objRecord).forEach(key => {
if (objRecord[key] === null || objRecord[key] === undefined) {
delete objRecord[key];
}
else if (typeof objRecord[key] === 'object' && objRecord[key] !== null) {
this.removeNullValues(objRecord[key]);
}
});
}
}
static deduplicateContent(contextPackage) {
if (contextPackage.files && Array.isArray(contextPackage.files)) {
const contentMap = new Map();
contextPackage.files.forEach((file) => {
const fileRecord = file;
if (fileRecord.file && typeof fileRecord.file.content === 'string') {
const contentHash = createHash('md5')
.update(fileRecord.file.content)
.digest('hex');
if (contentMap.has(contentHash)) {
fileRecord.file.contentRef = contentHash;
delete fileRecord.file.content;
}
else {
contentMap.set(contentHash, fileRecord.file.content);
}
}
});
}
}
static compressRepetitiveStrings(obj) {
if (typeof obj === 'string' && obj.length > 100) {
obj = obj.replace(/\s+/g, ' ');
}
else if (Array.isArray(obj)) {
obj.forEach(item => {
if (typeof item === 'string' || Array.isArray(item) || (item && typeof item === 'object')) {
this.compressRepetitiveStrings(item);
}
});
}
else if (obj && typeof obj === 'object') {
Object.values(obj).forEach(value => {
if (typeof value === 'string' || Array.isArray(value) || (value && typeof value === 'object')) {
this.compressRepetitiveStrings(value);
}
});
}
}
}