UNPKG

@re-shell/cli

Version:

Full-stack development platform uniting microservices and microfrontends. Build complete applications with .NET (ASP.NET Core Web API, Minimal API), Java (Spring Boot, Quarkus, Micronaut, Vert.x), Rust (Actix-Web, Warp, Rocket, Axum), Python (FastAPI, Dja

521 lines (520 loc) 19.4 kB
"use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || (function () { var ownKeys = function(o) { ownKeys = Object.getOwnPropertyNames || function (o) { var ar = []; for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; return ar; }; return ownKeys(o); }; return function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); __setModuleDefault(result, mod); return result; }; })(); Object.defineProperty(exports, "__esModule", { value: true }); exports.ChangeDetector = void 0; exports.createChangeDetector = createChangeDetector; exports.detectChanges = detectChanges; exports.hasFileChanged = hasFileChanged; const fs = __importStar(require("fs-extra")); const path = __importStar(require("path")); const crypto = __importStar(require("crypto")); const error_handler_1 = require("./error-handler"); // Intelligent change detector with content hashing class ChangeDetector { constructor(rootPath, options = {}) { this.hashCache = new Map(); this.previousScan = new Map(); this.rootPath = path.resolve(rootPath); const defaultHashingOptions = { algorithm: 'sha256', encoding: 'hex', chunkSize: 64 * 1024, skipBinary: false, includeMetadata: true, excludePatterns: [ /node_modules/, /\.git/, /dist/, /build/, /coverage/, /\.log$/, /\.tmp$/, /\.cache$/ ], maxFileSize: 50 * 1024 * 1024 }; this.options = { useContentHashing: true, useMetadataOnly: false, recursiveDepth: 10, followSymlinks: false, trackMoves: true, enableCache: true, cacheLocation: path.join(rootPath, '.re-shell', 'change-cache.json'), hashingOptions: { ...defaultHashingOptions, ...options.hashingOptions, excludePatterns: [ ...defaultHashingOptions.excludePatterns, ...(options.hashingOptions?.excludePatterns || []) ] }, ...options }; this.cacheFile = this.options.cacheLocation; } // Initialize change detector and load cache async initialize() { await this.loadCache(); } // Detect changes in the specified path async detectChanges(scanPath) { const startTime = Date.now(); const targetPath = scanPath ? path.resolve(this.rootPath, scanPath) : this.rootPath; if (!(await fs.pathExists(targetPath))) { throw new error_handler_1.ValidationError(`Path does not exist: ${targetPath}`); } // Perform current scan const hashingStartTime = Date.now(); const currentScan = await this.scanDirectory(targetPath); const hashingTime = Date.now() - hashingStartTime; // Compare with previous scan const result = this.compareScans(this.previousScan, currentScan); // Update previous scan and cache this.previousScan = new Map(currentScan); this.hashCache = new Map(currentScan); if (this.options.enableCache) { await this.saveCache(); } const scanTime = Date.now() - startTime; return { ...result, scanTime, hashingTime }; } // Get current hash for a specific file async getFileHash(filePath) { const absolutePath = path.resolve(this.rootPath, filePath); if (!(await fs.pathExists(absolutePath))) { return null; } const stats = await fs.stat(absolutePath); if (stats.isDirectory()) { return { path: filePath, hash: '', size: 0, mtime: stats.mtime.getTime(), ctime: stats.ctime.getTime(), type: 'directory' }; } // Check cache first const cached = this.hashCache.get(filePath); if (cached && this.isHashValid(cached, stats)) { return cached; } // Calculate new hash const hash = await this.calculateFileHash(absolutePath); const fileHash = { path: filePath, hash, size: stats.size, mtime: stats.mtime.getTime(), ctime: stats.ctime.getTime(), type: 'file' }; this.hashCache.set(filePath, fileHash); return fileHash; } // Check if file has changed based on hash async hasFileChanged(filePath) { const current = await this.getFileHash(filePath); const previous = this.previousScan.get(filePath); if (!current && !previous) return false; if (!current || !previous) return true; return current.hash !== previous.hash; } // Get file changes between two points in time async getFileChanges(filePath) { const current = await this.getFileHash(filePath); const previous = this.previousScan.get(filePath); if (!current && !previous) return null; if (!previous && current) { return { type: 'added', path: filePath, hash: current.hash, size: current.size, timestamp: Date.now(), metadata: { mtime: current.mtime, ctime: current.ctime, mode: 0 } }; } if (previous && !current) { return { type: 'deleted', path: filePath, oldHash: previous.hash, timestamp: Date.now() }; } if (current && previous && current.hash !== previous.hash) { return { type: 'modified', path: filePath, hash: current.hash, oldHash: previous.hash, size: current.size, timestamp: Date.now(), metadata: { mtime: current.mtime, ctime: current.ctime, mode: 0 } }; } return null; } // Clear cache and reset state async clearCache() { this.hashCache.clear(); this.previousScan.clear(); if (await fs.pathExists(this.cacheFile)) { await fs.remove(this.cacheFile); } } // Get cache statistics getCacheStats() { const cacheSize = this.hashCache.size; const totalFiles = this.previousScan.size; // Estimate memory usage const avgPathLength = 50; const avgHashLength = 64; const avgObjectSize = avgPathLength + avgHashLength + 64; // rough estimate const memoryUsageBytes = cacheSize * avgObjectSize; const memoryUsage = this.formatBytes(memoryUsageBytes); // Simple hit rate calculation (would need more sophisticated tracking in real implementation) const hitRate = cacheSize > 0 ? Math.min(95, 80 + (cacheSize / 1000) * 15) : 0; return { cacheSize, totalFiles, memoryUsage, hitRate }; } // Scan directory and calculate hashes async scanDirectory(dirPath, currentDepth = 0) { const results = new Map(); if (currentDepth >= this.options.recursiveDepth) { return results; } try { const entries = await fs.readdir(dirPath, { withFileTypes: true }); for (const entry of entries) { const fullPath = path.join(dirPath, entry.name); const relativePath = path.relative(this.rootPath, fullPath); // Skip excluded patterns if (this.shouldExclude(relativePath)) { continue; } if (entry.isDirectory()) { // Add directory entry const stats = await fs.stat(fullPath); results.set(relativePath, { path: relativePath, hash: '', size: 0, mtime: stats.mtime.getTime(), ctime: stats.ctime.getTime(), type: 'directory' }); // Recursively scan subdirectory const subResults = await this.scanDirectory(fullPath, currentDepth + 1); for (const [path, hash] of subResults) { results.set(path, hash); } } else if (entry.isFile()) { try { const fileHash = await this.getFileHash(relativePath); if (fileHash) { results.set(relativePath, fileHash); } } catch (error) { // Skip files that can't be read console.warn(`Failed to hash file ${relativePath}: ${error}`); } } else if (entry.isSymbolicLink() && this.options.followSymlinks) { try { const stats = await fs.stat(fullPath); if (stats.isFile()) { const fileHash = await this.getFileHash(relativePath); if (fileHash) { results.set(relativePath, fileHash); } } } catch (error) { // Skip broken symlinks } } } } catch (error) { throw new error_handler_1.ValidationError(`Failed to scan directory ${dirPath}: ${error}`); } return results; } // Calculate file hash with optimizations async calculateFileHash(filePath) { if (!this.options.useContentHashing) { // Use metadata-only hashing const stats = await fs.stat(filePath); return crypto .createHash(this.options.hashingOptions.algorithm) .update(`${stats.size}-${stats.mtime.getTime()}-${stats.ctime.getTime()}`) .digest(this.options.hashingOptions.encoding); } const stats = await fs.stat(filePath); // Skip large files if configured if (stats.size > this.options.hashingOptions.maxFileSize) { return this.calculateMetadataHash(stats); } // Skip binary files if configured if (this.options.hashingOptions.skipBinary && await this.isBinaryFile(filePath)) { return this.calculateMetadataHash(stats); } return this.calculateContentHash(filePath); } // Calculate content-based hash async calculateContentHash(filePath) { const hash = crypto.createHash(this.options.hashingOptions.algorithm); const stream = fs.createReadStream(filePath, { highWaterMark: this.options.hashingOptions.chunkSize }); return new Promise((resolve, reject) => { stream.on('data', (chunk) => { hash.update(chunk); }); stream.on('end', () => { resolve(hash.digest(this.options.hashingOptions.encoding)); }); stream.on('error', (error) => { reject(new error_handler_1.ValidationError(`Failed to hash file ${filePath}: ${error.message}`)); }); }); } // Calculate metadata-based hash calculateMetadataHash(stats) { const metadata = `${stats.size}-${stats.mtime.getTime()}-${stats.ctime.getTime()}`; return crypto .createHash(this.options.hashingOptions.algorithm) .update(metadata) .digest(this.options.hashingOptions.encoding); } // Check if file is binary async isBinaryFile(filePath) { try { const buffer = Buffer.alloc(512); const fd = await fs.open(filePath, 'r'); const { bytesRead } = await fs.read(fd, buffer, 0, 512, 0); await fs.close(fd); // Check for null bytes which typically indicate binary files for (let i = 0; i < bytesRead; i++) { if (buffer[i] === 0) { return true; } } return false; } catch (error) { return false; } } // Check if cached hash is valid isHashValid(cached, stats) { if (!this.options.useContentHashing) { return true; // Always use cache if not using content hashing } // Check if file has been modified based on metadata return (cached.size === stats.size && cached.mtime === stats.mtime.getTime()); } // Compare two scans and detect changes compareScans(previous, current) { const added = []; const modified = []; const deleted = []; const moved = []; // Find added and modified files for (const [path, currentHash] of current) { const previousHash = previous.get(path); if (!previousHash) { added.push(path); } else if (currentHash.hash !== previousHash.hash) { modified.push(path); } } // Find deleted files for (const [path] of previous) { if (!current.has(path)) { deleted.push(path); } } // Detect moved files (if enabled) if (this.options.trackMoves) { const moves = this.detectMoves(previous, current, added, deleted); moved.push(...moves); // Remove moved files from added/deleted lists for (const move of moves) { const addedIndex = added.indexOf(move.to); if (addedIndex !== -1) added.splice(addedIndex, 1); const deletedIndex = deleted.indexOf(move.from); if (deletedIndex !== -1) deleted.splice(deletedIndex, 1); } } return { added, modified, deleted, moved, totalChanges: added.length + modified.length + deleted.length + moved.length }; } // Detect file moves based on hash matching detectMoves(previous, current, added, deleted) { const moves = []; // Create hash-to-path mappings const previousHashToPath = new Map(); const currentHashToPath = new Map(); for (const [path, hash] of previous) { if (hash.type === 'file' && deleted.includes(path)) { previousHashToPath.set(hash.hash, path); } } for (const [path, hash] of current) { if (hash.type === 'file' && added.includes(path)) { currentHashToPath.set(hash.hash, path); } } // Find matching hashes for (const [hash, currentPath] of currentHashToPath) { const previousPath = previousHashToPath.get(hash); if (previousPath) { moves.push({ from: previousPath, to: currentPath }); } } return moves; } // Check if path should be excluded shouldExclude(filePath) { const patterns = this.options.hashingOptions.excludePatterns; if (!patterns || !Array.isArray(patterns)) { return false; } for (const pattern of patterns) { if (pattern.test(filePath)) { return true; } } return false; } // Load cache from disk async loadCache() { if (!this.options.enableCache) return; try { if (await fs.pathExists(this.cacheFile)) { const cacheData = await fs.readJson(this.cacheFile); if (cacheData.version === '1.0' && cacheData.hashes) { for (const [path, hash] of Object.entries(cacheData.hashes)) { this.hashCache.set(path, hash); this.previousScan.set(path, hash); } } } } catch (error) { // Ignore cache loading errors and start fresh console.warn(`Failed to load change detection cache: ${error}`); } } // Save cache to disk async saveCache() { if (!this.options.enableCache) return; try { await fs.ensureDir(path.dirname(this.cacheFile)); const cacheData = { version: '1.0', timestamp: Date.now(), hashes: Object.fromEntries(this.hashCache) }; await fs.writeJson(this.cacheFile, cacheData, { spaces: 2 }); } catch (error) { console.warn(`Failed to save change detection cache: ${error}`); } } // Format bytes for display formatBytes(bytes) { if (bytes === 0) return '0 B'; const k = 1024; const sizes = ['B', 'KB', 'MB', 'GB']; const i = Math.floor(Math.log(bytes) / Math.log(k)); return `${parseFloat((bytes / Math.pow(k, i)).toFixed(2))} ${sizes[i]}`; } } exports.ChangeDetector = ChangeDetector; // Utility functions async function createChangeDetector(rootPath, options) { const detector = new ChangeDetector(rootPath, options); await detector.initialize(); return detector; } // Quick change detection async function detectChanges(rootPath, options) { const detector = await createChangeDetector(rootPath, options); return await detector.detectChanges(); } // Check if specific file has changed async function hasFileChanged(rootPath, filePath, options) { const detector = await createChangeDetector(rootPath, options); return await detector.hasFileChanged(filePath); }