UNPKG

@kadi.build/local-remote-file-manager-ability

Version:

Local & Remote File Management System with S3-compatible container registry, HTTP server provider, file streaming, and comprehensive testing suite

1,079 lines (882 loc) 33.1 kB
import { promises as fs } from 'fs'; import fsSync from 'fs'; import path from 'path'; import crypto from 'crypto'; import archiver from 'archiver'; import unzipper from 'unzipper'; import tar from 'tar'; import EventEmitter from 'events'; class CompressionProvider extends EventEmitter { constructor(config) { super(); this.config = config || {}; this.localRoot = this.config.localRoot || process.cwd(); this.enabled = this.config.enabled !== false; // Default true this.compressionLevel = this.config.level || 6; // 0-9 for ZIP, 0-9 for gzip this.defaultFormat = this.config.format || 'zip'; // 'zip' or 'tar.gz' this.maxFileSize = this.config.maxFileSize || 1073741824; // 1GB this.chunkSize = this.config.chunkSize || 8388608; // 8MB this.enableProgressTracking = this.config.enableProgressTracking !== false; this.enableChecksumVerification = this.config.enableChecksumVerification !== false; // Compression state tracking this.activeOperations = new Map(); this.operationCount = 0; } // ============================================================================ // CONNECTION AND VALIDATION // ============================================================================ async testConnection() { try { // Test if we can access the local root const stats = await fs.stat(this.localRoot); if (!stats.isDirectory()) { throw new Error(`Local root '${this.localRoot}' is not a directory`); } // Test write access by creating a temporary file const testFile = path.join(this.localRoot, '.compression-provider-test'); await fs.writeFile(testFile, 'test'); await fs.unlink(testFile); // Test compression libraries availability await this.testCompressionLibraries(); return { provider: 'compression', localRoot: this.localRoot, enabled: this.enabled, supportedFormats: ['zip', 'tar.gz'], defaultFormat: this.defaultFormat, compressionLevel: this.compressionLevel, activeOperations: this.activeOperations.size, enableProgressTracking: this.enableProgressTracking, enableChecksumVerification: this.enableChecksumVerification }; } catch (error) { throw new Error(`Compression provider connection test failed: ${error.message}`); } } async testCompressionLibraries() { try { // Test archiver (ZIP creation) const testArchiver = archiver('zip', { zlib: { level: 1 } }); if (!testArchiver) { throw new Error('Archiver library not available'); } // Test unzipper availability if (!unzipper.Extract) { throw new Error('Unzipper library not available'); } // Test tar library if (!tar.create) { throw new Error('Tar library not available'); } return true; } catch (error) { throw new Error(`Compression library test failed: ${error.message}`); } } validateConfig() { const errors = []; const warnings = []; if (!this.localRoot) { errors.push('Local root directory is required for compression operations'); } if (this.compressionLevel < 0 || this.compressionLevel > 9) { errors.push('Compression level must be between 0 and 9'); } if (!['zip', 'tar.gz'].includes(this.defaultFormat)) { errors.push('Default format must be either "zip" or "tar.gz"'); } if (this.maxFileSize <= 0) { errors.push('Max file size must be positive'); } if (this.chunkSize <= 0) { errors.push('Chunk size must be positive'); } if (this.chunkSize > this.maxFileSize) { warnings.push('Chunk size is larger than max file size'); } if (!this.enabled) { warnings.push('Compression operations are disabled'); } if (this.compressionLevel > 6) { warnings.push('High compression level (>6) may significantly impact performance'); } return { isValid: errors.length === 0, errors, warnings }; } // ============================================================================ // PATH MANAGEMENT METHODS // ============================================================================ normalizePath(inputPath) { if (!inputPath || inputPath === '/') { return this.localRoot; } // Handle absolute paths if (path.isAbsolute(inputPath)) { return path.normalize(inputPath); } // Handle relative paths - resolve them relative to localRoot const resolvedLocalRoot = path.resolve(this.localRoot); return path.resolve(resolvedLocalRoot, inputPath); } validatePath(inputPath) { if (!inputPath) { throw new Error('Path cannot be empty'); } // Check for invalid characters if (/[<>:"|?*\x00-\x1f]/.test(inputPath)) { throw new Error(`Path contains invalid characters: ${inputPath}`); } return true; } generateOperationId() { this.operationCount++; return `compress_${Date.now()}_${this.operationCount}`; } // ============================================================================ // COMPRESSION OPERATIONS // ============================================================================ async compressFile(inputPath, outputPath, options = {}) { if (!this.enabled) { throw new Error('Compression operations are disabled in configuration'); } this.validatePath(inputPath); this.validatePath(outputPath); const resolvedInputPath = this.normalizePath(inputPath); const resolvedOutputPath = this.normalizePath(outputPath); const { format = this.defaultFormat, level = this.compressionLevel, includeRoot = false, password = null } = options; // Validate compression level const validatedLevel = Math.max(1, Math.min(9, parseInt(level) || this.compressionLevel)); console.log(`📦 Compressing ${path.basename(inputPath)} to ${format.toUpperCase()}...`); const operationId = this.generateOperationId(); try { // Check if input exists const inputStats = await fs.stat(resolvedInputPath); // Check file size limit if (inputStats.isFile() && inputStats.size > this.maxFileSize) { throw new Error(`File size ${this.formatBytes(inputStats.size)} exceeds maximum of ${this.formatBytes(this.maxFileSize)}`); } // Ensure output directory exists const outputDir = path.dirname(resolvedOutputPath); await fs.mkdir(outputDir, { recursive: true }); // Track operation this.activeOperations.set(operationId, { type: 'compress', inputPath: resolvedInputPath, outputPath: resolvedOutputPath, format, level: validatedLevel, startedAt: new Date().toISOString(), progress: { processed: 0, total: 0 } }); let result; if (format === 'zip') { result = await this.compressToZip(resolvedInputPath, resolvedOutputPath, { level: validatedLevel, includeRoot, password, operationId }); } else if (format === 'tar.gz') { result = await this.compressToTarGz(resolvedInputPath, resolvedOutputPath, { level: validatedLevel, includeRoot, operationId }); } else { throw new Error(`Unsupported compression format: ${format}`); } // Clean up operation tracking this.activeOperations.delete(operationId); console.log(`✅ Compression completed: ${result.name} (${this.formatBytes(result.size)})`); return { operationId, name: path.basename(outputPath), inputPath, outputPath, format, level: validatedLevel, size: result.size, originalSize: result.originalSize, compressionRatio: result.compressionRatio, hash: result.hash, completedAt: new Date().toISOString() }; } catch (error) { // Clean up on error this.activeOperations.delete(operationId); if (this.isFileNotFoundError(error)) { throw new Error(`Input not found: ${inputPath}`); } throw new Error(`Compression failed: ${error.message}`); } } async compressToZip(inputPath, outputPath, options = {}) { const { level, includeRoot, password, operationId } = options; return new Promise(async (resolve, reject) => { try { const output = fsSync.createWriteStream(outputPath); const archive = archiver('zip', { zlib: { level: Math.max(1, Math.min(9, level || this.compressionLevel)) } }); // Set up progress tracking let totalBytes = 0; let processedBytes = 0; if (this.enableProgressTracking) { totalBytes = await this.calculateTotalSize(inputPath); this.updateOperationProgress(operationId, 0, totalBytes); } // Track progress archive.on('progress', (progress) => { if (this.enableProgressTracking && operationId) { processedBytes = progress.entries.processed; this.updateOperationProgress(operationId, processedBytes, totalBytes); this.emit('compressionProgress', { operationId, processed: processedBytes, total: totalBytes, percentage: totalBytes > 0 ? Math.round((processedBytes / totalBytes) * 100) : 0 }); } }); // Handle errors archive.on('error', reject); output.on('error', reject); // Handle completion output.on('close', async () => { try { const outputStats = await fs.stat(outputPath); const inputStats = await fs.stat(inputPath); const originalSize = inputStats.isDirectory() ? totalBytes : inputStats.size; const result = { name: path.basename(outputPath), size: outputStats.size, originalSize: originalSize, compressionRatio: originalSize > 0 ? Math.max(0, (originalSize - outputStats.size) / originalSize) : 0 }; // Calculate checksum if enabled if (this.enableChecksumVerification) { result.hash = await this.calculateChecksum(outputPath); } resolve(result); } catch (error) { reject(error); } }); // Pipe archive to output archive.pipe(output); // Add files/directories to archive const inputStats = await fs.stat(inputPath); if (inputStats.isFile()) { const fileName = includeRoot ? path.basename(inputPath) : path.basename(inputPath); archive.file(inputPath, { name: fileName }); } else if (inputStats.isDirectory()) { const baseDir = includeRoot ? path.basename(inputPath) : false; archive.directory(inputPath, baseDir); } // Finalize the archive await archive.finalize(); } catch (error) { reject(error); } }); } async compressToTarGz(inputPath, outputPath, options = {}) { const { level, includeRoot, operationId } = options; try { const inputStats = await fs.stat(inputPath); let totalBytes = 0; if (this.enableProgressTracking) { totalBytes = await this.calculateTotalSize(inputPath); this.updateOperationProgress(operationId, 0, totalBytes); } const tarOptions = { file: outputPath, gzip: { level: level || this.compressionLevel }, cwd: inputStats.isDirectory() ? (includeRoot ? path.dirname(inputPath) : inputPath) : path.dirname(inputPath) }; // Add progress tracking let processedBytes = 0; if (this.enableProgressTracking) { tarOptions.filter = (path, stat) => { processedBytes += stat.size || 0; this.updateOperationProgress(operationId, processedBytes, totalBytes); this.emit('compressionProgress', { operationId, processed: processedBytes, total: totalBytes, percentage: totalBytes > 0 ? Math.round((processedBytes / totalBytes) * 100) : 0 }); return true; }; } // Determine what to compress if (inputStats.isFile()) { await tar.create(tarOptions, [path.basename(inputPath)]); } else { const targetPath = includeRoot ? path.basename(inputPath) : '.'; await tar.create(tarOptions, [targetPath]); } // Get result stats const outputStats = await fs.stat(outputPath); const originalSize = inputStats.isDirectory() ? totalBytes : inputStats.size; const result = { name: path.basename(outputPath), size: outputStats.size, originalSize: originalSize, compressionRatio: originalSize > 0 ? Math.max(0, (originalSize - outputStats.size) / originalSize) : 0 }; // Calculate checksum if enabled if (this.enableChecksumVerification) { result.hash = await this.calculateChecksum(outputPath); } return result; } catch (error) { throw new Error(`TAR.GZ compression failed: ${error.message}`); } } // ============================================================================ // DECOMPRESSION OPERATIONS - FIXED // ============================================================================ async decompressFile(inputPath, outputDirectory, options = {}) { if (!this.enabled) { throw new Error('Compression operations are disabled in configuration'); } this.validatePath(inputPath); this.validatePath(outputDirectory); const resolvedInputPath = this.normalizePath(inputPath); const resolvedOutputDir = this.normalizePath(outputDirectory); const { format = this.detectFormat(inputPath), overwrite = false, preservePermissions = true, password = null } = options; console.log(`📦 Decompressing ${path.basename(inputPath)} from ${format.toUpperCase()}...`); const operationId = this.generateOperationId(); try { // Check if input file exists const inputStats = await fs.stat(resolvedInputPath); if (!inputStats.isFile()) { throw new Error(`Input '${inputPath}' is not a file`); } // Check file size limit if (inputStats.size > this.maxFileSize) { throw new Error(`Archive size ${this.formatBytes(inputStats.size)} exceeds maximum of ${this.formatBytes(this.maxFileSize)}`); } // Ensure output directory exists await fs.mkdir(resolvedOutputDir, { recursive: true }); // Track operation this.activeOperations.set(operationId, { type: 'decompress', inputPath: resolvedInputPath, outputPath: resolvedOutputDir, format, startedAt: new Date().toISOString(), progress: { processed: 0, total: inputStats.size } }); let result; if (format === 'zip') { result = await this.decompressFromZip(resolvedInputPath, resolvedOutputDir, { overwrite, password, operationId }); } else if (format === 'tar.gz') { result = await this.decompressFromTarGz(resolvedInputPath, resolvedOutputDir, { overwrite, preservePermissions, operationId }); } else { throw new Error(`Unsupported decompression format: ${format}`); } // Clean up operation tracking this.activeOperations.delete(operationId); console.log(`✅ Decompression completed: ${result.extractedFiles} file(s) extracted`); return { operationId, inputPath, outputDirectory, format, extractedFiles: result.extractedFiles, extractedDirectories: result.extractedDirectories, totalSize: result.totalSize, hash: result.hash, completedAt: new Date().toISOString() }; } catch (error) { // Clean up on error this.activeOperations.delete(operationId); if (this.isFileNotFoundError(error)) { throw new Error(`Archive not found: ${inputPath}`); } throw new Error(`Decompression failed: ${error.message}`); } } async decompressFromZip(inputPath, outputDir, options = {}) { const { overwrite, password, operationId } = options; try { // Verify input file exists and get size const inputStats = await fs.stat(inputPath); // Ensure output directory exists await fs.mkdir(outputDir, { recursive: true }); let extractedFiles = 0; let extractedDirectories = 0; let totalSize = 0; // Try Method 1: unzipper.Open (most reliable) try { const directory = await unzipper.Open.file(inputPath); for (const file of directory.files) { if (file.type === 'Directory') { extractedDirectories++; const dirPath = path.join(outputDir, file.path); await fs.mkdir(dirPath, { recursive: true }); } else if (file.type === 'File') { const filePath = path.join(outputDir, file.path); // Check if file exists and handle overwrite const fileExists = await fs.access(filePath).then(() => true).catch(() => false); if (!overwrite && fileExists) { continue; } // Ensure parent directory exists const fileDir = path.dirname(filePath); await fs.mkdir(fileDir, { recursive: true }); // Extract file content const content = await file.buffer(); await fs.writeFile(filePath, content); extractedFiles++; totalSize += content.length; // Update progress if tracking enabled if (this.enableProgressTracking && operationId) { this.updateOperationProgress(operationId, totalSize, inputStats.size); this.emit('decompressionProgress', { operationId, processed: totalSize, total: inputStats.size, currentFile: file.path, percentage: Math.round((totalSize / inputStats.size) * 100) }); } } } const result = { extractedFiles, extractedDirectories, totalSize }; // Add checksum if enabled if (this.enableChecksumVerification) { try { result.hash = await this.calculateChecksum(inputPath); } catch (error) { console.warn('⚠️ Could not calculate checksum:', error.message); } } return result; } catch (openError) { // Method 2: Use unzipper.Extract as fallback return new Promise((resolve, reject) => { let extractedFiles = 0; let extractedDirectories = 0; let totalSize = 0; const extract = unzipper.Extract({ path: outputDir, overwrite: overwrite }); // Count extracted items extract.on('entry', (entry) => { if (entry.type === 'Directory') { extractedDirectories++; } else { extractedFiles++; totalSize += entry.vars.uncompressedSize || 0; } }); extract.on('close', async () => { const result = { extractedFiles, extractedDirectories, totalSize }; // Add checksum if enabled if (this.enableChecksumVerification) { try { result.hash = await this.calculateChecksum(inputPath); } catch (error) { console.warn('⚠️ Could not calculate checksum:', error.message); } } resolve(result); }); extract.on('error', (extractError) => { reject(new Error(`ZIP extraction failed: ${extractError.message}`)); }); // Pipe file to extract const readStream = fsSync.createReadStream(inputPath); readStream.pipe(extract); // Add timeout setTimeout(() => { reject(new Error('ZIP extraction timeout after 30 seconds')); }, 30000); }); } } catch (error) { throw new Error(`ZIP decompression failed: ${error.message}`); } } async decompressFromTarGz(inputPath, outputDir, options = {}) { const { overwrite, preservePermissions, operationId } = options; try { let extractedFiles = 0; let extractedDirectories = 0; let totalSize = 0; let processedBytes = 0; const extractOptions = { file: inputPath, cwd: outputDir, preservePaths: true, unlink: overwrite, preserveOwner: preservePermissions }; // Add progress tracking if (this.enableProgressTracking) { extractOptions.onentry = (entry) => { const size = entry.size || 0; if (entry.type === 'Directory') { extractedDirectories++; } else { extractedFiles++; totalSize += size; } processedBytes += size; this.updateOperationProgress(operationId, processedBytes, totalSize); this.emit('decompressionProgress', { operationId, processed: processedBytes, total: totalSize, currentFile: entry.path, percentage: totalSize > 0 ? Math.round((processedBytes / totalSize) * 100) : 0 }); }; } await tar.extract(extractOptions); const result = { extractedFiles, extractedDirectories, totalSize }; // Calculate checksum of original archive if enabled if (this.enableChecksumVerification) { result.hash = await this.calculateChecksum(inputPath); } return result; } catch (error) { throw new Error(`TAR.GZ decompression failed: ${error.message}`); } } // ============================================================================ // BATCH OPERATIONS // ============================================================================ async compressMultipleFiles(fileList, outputDirectory, options = {}) { const { format = this.defaultFormat, level = this.compressionLevel, namingPattern = '{name}.{format}' // {name} = original name, {format} = extension } = options; const results = []; const errors = []; console.log(`📦 Starting batch compression of ${fileList.length} file(s)...`); for (let i = 0; i < fileList.length; i++) { const inputPath = fileList[i]; try { const baseName = path.parse(inputPath).name; const outputName = namingPattern .replace('{name}', baseName) .replace('{format}', format); const outputPath = path.join(outputDirectory, outputName); console.log(`📦 [${i + 1}/${fileList.length}] Compressing ${path.basename(inputPath)}...`); const result = await this.compressFile(inputPath, outputPath, { format, level }); results.push({ inputPath, outputPath, result }); } catch (error) { console.error(`❌ Failed to compress ${inputPath}: ${error.message}`); errors.push({ inputPath, error: error.message }); } } console.log(`✅ Batch compression completed: ${results.length} successful, ${errors.length} failed`); return { successful: results, failed: errors, summary: { total: fileList.length, successful: results.length, failed: errors.length, successRate: (results.length / fileList.length * 100).toFixed(1) + '%' } }; } async decompressMultipleFiles(fileList, outputDirectory, options = {}) { const results = []; const errors = []; console.log(`📦 Starting batch decompression of ${fileList.length} archive(s)...`); for (let i = 0; i < fileList.length; i++) { const inputPath = fileList[i]; try { const archiveName = path.parse(inputPath).name; const archiveOutputDir = path.join(outputDirectory, archiveName); console.log(`📦 [${i + 1}/${fileList.length}] Decompressing ${path.basename(inputPath)}...`); const result = await this.decompressFile(inputPath, archiveOutputDir, options); results.push({ inputPath, outputDirectory: archiveOutputDir, result }); } catch (error) { console.error(`❌ Failed to decompress ${inputPath}: ${error.message}`); errors.push({ inputPath, error: error.message }); } } console.log(`✅ Batch decompression completed: ${results.length} successful, ${errors.length} failed`); return { successful: results, failed: errors, summary: { total: fileList.length, successful: results.length, failed: errors.length, successRate: (results.length / fileList.length * 100).toFixed(1) + '%' } }; } // ============================================================================ // UTILITY METHODS // ============================================================================ detectFormat(filePath) { const ext = path.extname(filePath).toLowerCase(); const fileName = path.basename(filePath).toLowerCase(); if (ext === '.zip') { return 'zip'; } else if (fileName.endsWith('.tar.gz') || fileName.endsWith('.tgz')) { return 'tar.gz'; } else { // Try to detect by reading file header return this.detectFormatByHeader(filePath); } } async detectFormatByHeader(filePath) { try { const buffer = Buffer.alloc(10); const fd = await fs.open(filePath, 'r'); await fd.read(buffer, 0, 10, 0); await fd.close(); // ZIP signature: PK (0x504B) if (buffer[0] === 0x50 && buffer[1] === 0x4B) { return 'zip'; } // GZIP signature: 1f 8b if (buffer[0] === 0x1f && buffer[1] === 0x8b) { return 'tar.gz'; } // Default to zip if detection fails return 'zip'; } catch (error) { return 'zip'; } } async calculateTotalSize(inputPath) { const stats = await fs.stat(inputPath); if (stats.isFile()) { return stats.size; } else if (stats.isDirectory()) { return await this.calculateDirectorySize(inputPath); } return 0; } async calculateDirectorySize(dirPath) { let totalSize = 0; try { const entries = await fs.readdir(dirPath, { withFileTypes: true }); for (const entry of entries) { const fullPath = path.join(dirPath, entry.name); if (entry.isFile()) { const stats = await fs.stat(fullPath); totalSize += stats.size; } else if (entry.isDirectory()) { totalSize += await this.calculateDirectorySize(fullPath); } } } catch (error) { // Skip directories we can't read } return totalSize; } async calculateChecksum(filePath) { return new Promise((resolve, reject) => { const hash = crypto.createHash('sha256'); const stream = fsSync.createReadStream(filePath); stream.on('error', reject); stream.on('data', chunk => hash.update(chunk)); stream.on('end', () => resolve(hash.digest('hex'))); }); } updateOperationProgress(operationId, processed, total) { const operation = this.activeOperations.get(operationId); if (operation) { operation.progress = { processed, total }; operation.lastUpdated = new Date().toISOString(); } } // ============================================================================ // INFORMATION AND STATUS METHODS // ============================================================================ listActiveOperations() { const operations = []; for (const [operationId, operationInfo] of this.activeOperations) { operations.push({ operationId, type: operationInfo.type, inputPath: operationInfo.inputPath, outputPath: operationInfo.outputPath, format: operationInfo.format, startedAt: operationInfo.startedAt, progress: operationInfo.progress, duration: Date.now() - new Date(operationInfo.startedAt).getTime() }); } return operations; } getOperationInfo(operationId) { const operationInfo = this.activeOperations.get(operationId); if (!operationInfo) { throw new Error(`Operation not found: ${operationId}`); } return { operationId, type: operationInfo.type, inputPath: operationInfo.inputPath, outputPath: operationInfo.outputPath, format: operationInfo.format, level: operationInfo.level, startedAt: operationInfo.startedAt, progress: operationInfo.progress, duration: Date.now() - new Date(operationInfo.startedAt).getTime() }; } getCompressionStatus() { return { enabled: this.enabled, activeOperations: this.activeOperations.size, supportedFormats: ['zip', 'tar.gz'], defaultFormat: this.defaultFormat, compressionLevel: this.compressionLevel, enableProgressTracking: this.enableProgressTracking, enableChecksumVerification: this.enableChecksumVerification, config: { maxFileSize: this.maxFileSize, chunkSize: this.chunkSize } }; } // ============================================================================ // UTILITY METHODS // ============================================================================ formatBytes(bytes) { if (bytes === 0) return '0 Bytes'; const k = 1024; const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB']; const i = Math.floor(Math.log(bytes) / Math.log(k)); return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i]; } formatDuration(milliseconds) { const seconds = Math.floor(milliseconds / 1000); const minutes = Math.floor(seconds / 60); const hours = Math.floor(minutes / 60); if (hours > 0) { return `${hours}h ${minutes % 60}m ${seconds % 60}s`; } else if (minutes > 0) { return `${minutes}m ${seconds % 60}s`; } else { return `${seconds}s`; } } // ============================================================================ // ERROR HANDLING HELPERS // ============================================================================ isFileNotFoundError(error) { return error.code === 'ENOENT' || error.message.includes('no such file') || error.message.includes('not found'); } isPermissionError(error) { return error.code === 'EACCES' || error.code === 'EPERM' || error.message.includes('permission denied'); } isSpaceError(error) { return error.code === 'ENOSPC' || error.message.includes('no space left') || error.message.includes('disk full'); } isCorruptedArchiveError(error) { return error.message.includes('invalid zip file') || error.message.includes('corrupted') || error.message.includes('bad archive') || error.message.includes('unexpected end of archive'); } // ============================================================================ // CLEANUP AND SHUTDOWN // ============================================================================ async shutdown() { console.log('🔄 Shutting down compression provider...'); try { // Wait for active operations to complete or cancel them const activeOps = Array.from(this.activeOperations.keys()); if (activeOps.length > 0) { console.log(`⏳ Waiting for ${activeOps.length} active operation(s) to complete...`); // Give operations 30 seconds to complete gracefully const timeout = setTimeout(() => { console.log('⚠️ Force-stopping remaining operations...'); this.activeOperations.clear(); }, 30000); // Wait for operations to complete while (this.activeOperations.size > 0) { await new Promise(resolve => setTimeout(resolve, 1000)); } clearTimeout(timeout); } // Remove all event listeners this.removeAllListeners(); console.log('✅ Compression provider shutdown complete'); return { operationsCompleted: activeOps.length, shutdownAt: new Date().toISOString() }; } catch (error) { console.error('❌ Error during compression provider shutdown:', error.message); throw error; } } } export { CompressionProvider };