UNPKG

claude-code-automation

Version:

šŸš€ Generic project automation system with anti-compaction protection and recovery capabilities. Automatically detects project type (React, Node.js, Python, Rust, Go, Java) and provides intelligent analysis. Claude Code optimized - run 'welcome' after inst

1,256 lines (1,096 loc) • 63.1 kB
/** * Context Preservation Engine * Anti-compaction system that maintains project state and context * Ensures project can be fully reconstructed after prompt compaction */ const fs = require('fs').promises; const path = require('path'); const { promisify } = require('util'); const { setTimeout: setTimeoutAsync } = require('timers/promises'); class ContextPreservationEngine { constructor() { // Use current working directory as project root, not relative to this module this.projectRoot = process.cwd(); this.stateDir = path.join(this.projectRoot, 'docs/state'); this.decisionsDir = path.join(this.projectRoot, 'docs/decisions'); this.automationDir = path.join(this.projectRoot, 'docs/automation'); } /** * Preserve complete project state for compaction resistance * Creates redundant, comprehensive project documentation */ async preserveCurrentState() { const timestamp = new Date().toISOString(); const preservationResults = { timestamp, success: false, errors: [], warnings: [] }; console.log(`šŸ”„ Preserving project state at ${timestamp}`); try { // Create timeout promise for long-running operations const preservationPromise = this.executePreservationWithTimeout(timestamp, preservationResults); const timeoutPromise = this.createTimeoutPromise(300000); // 5 minutes timeout const projectState = await Promise.race([preservationPromise, timeoutPromise]); console.log(`āœ… Project state preserved successfully`); preservationResults.success = true; return projectState; } catch (error) { preservationResults.errors.push({ operation: 'main-preservation', error: error.message, stack: error.stack }); // During installation, return minimal state and continue if (process.argv.includes('install')) { console.log(`āš ļø Skipping preservation during install: ${error.message}`); return { timestamp, error: 'skipped-during-install', preservationResults }; } // Try to preserve at least basic state even if main preservation fails try { const basicState = await this.preserveBasicState(timestamp); console.log(`āš ļø Partial preservation completed despite errors`); preservationResults.warnings.push('Main preservation failed, basic state saved'); return { ...basicState, preservationResults }; } catch (basicError) { preservationResults.errors.push({ operation: 'basic-preservation-fallback', error: basicError.message }); console.error(`āŒ Complete preservation failure:`, error.message); console.error(`āŒ Basic preservation also failed:`, basicError.message); return { timestamp, error: error.message, preservationResults }; } } } /** * Execute preservation with comprehensive error handling */ async executePreservationWithTimeout(timestamp, preservationResults) { const projectState = { timestamp, errors: [], warnings: [] }; // Capture each component with individual error handling const operations = [ { name: 'metadata', method: 'captureProjectMetadata' }, { name: 'phase', method: 'getCurrentPhase' }, { name: 'progress', method: 'captureProgress' }, { name: 'architecture', method: 'captureArchitecture' }, { name: 'codebase', method: 'captureCodebaseSnapshot' }, { name: 'tests', method: 'captureTestState' }, { name: 'dependencies', method: 'captureDependencies' }, { name: 'decisions', method: 'captureDecisionHistory' }, { name: 'automationState', method: 'captureAutomationState' } ]; // Execute operations with individual timeouts and error handling for (const operation of operations) { try { console.log(` šŸ“Š Capturing ${operation.name}...`); const operationTimeout = 30000; // 30 seconds per operation const result = await this.executeWithTimeout( this[operation.method].bind(this), operationTimeout, operation.name ); projectState[operation.name] = result; } catch (error) { const errorInfo = { operation: operation.name, error: error.message, timestamp: new Date().toISOString() }; projectState.errors.push(errorInfo); preservationResults.errors.push(errorInfo); // Provide fallback data for critical operations projectState[operation.name] = this.getFallbackData(operation.name, error); console.log(` āš ļø ${operation.name} capture failed, using fallback: ${error.message}`); } } // Store state with error handling for each storage operation await this.storeStateWithErrorHandling(projectState, timestamp, preservationResults); // Update living documentation await this.updateLivingDocumentation(projectState); // Generate recovery instructions await this.generateRecoveryInstructions(projectState); return projectState; } /** * Create timeout promise that rejects after specified time */ async createTimeoutPromise(timeoutMs) { await setTimeoutAsync(timeoutMs); throw new Error(`Preservation timeout after ${timeoutMs}ms`); } /** * Execute operation with timeout */ async executeWithTimeout(operation, timeoutMs, operationName) { const timeoutPromise = this.createTimeoutPromise(timeoutMs); const operationPromise = operation(); try { return await Promise.race([operationPromise, timeoutPromise]); } catch (error) { if (error.message.includes('timeout')) { throw new Error(`${operationName} operation timed out after ${timeoutMs}ms`); } throw error; } } /** * Preserve basic state as fallback */ async preserveBasicState(timestamp) { const basicState = { timestamp, mode: 'basic-fallback', projectRoot: this.projectRoot }; try { // Try to read package.json at minimum const packagePath = path.join(this.projectRoot, 'package.json'); const packageContent = await fs.readFile(packagePath, 'utf8'); basicState.package = JSON.parse(packageContent); } catch (error) { basicState.packageError = error.message; } try { // Ensure basic directories exist await this.ensureDirectoryExists(this.stateDir); // Save basic state const stateFile = path.join(this.stateDir, 'basic-state.json'); await fs.writeFile(stateFile, JSON.stringify(basicState, null, 2)); } catch (error) { console.error(`Failed to save basic state: ${error.message}`); } return basicState; } /** * Get fallback data for failed operations */ getFallbackData(operationName, error) { const fallbacks = { metadata: { error: error.message, name: 'unknown', version: 'unknown' }, phase: { currentSprint: 'unknown', error: error.message }, progress: { status: 'unknown', error: error.message }, architecture: { patterns: [], error: error.message }, codebase: { files: {}, statistics: { totalLines: 0 }, error: error.message }, tests: { status: 'unknown', testFiles: [], error: error.message }, dependencies: { npm: [], dev: [], error: error.message }, decisions: { decisions: [], error: error.message }, automationState: { level: 'unknown', error: error.message } }; return fallbacks[operationName] || { error: error.message, status: 'failed' }; } /** * Capture essential project metadata with error handling */ async captureProjectMetadata() { const metadata = { timestamp: new Date().toISOString(), projectRoot: this.projectRoot }; try { const packagePath = path.join(this.projectRoot, 'package.json'); const packageContent = await fs.readFile(packagePath, 'utf8'); const packageJson = JSON.parse(packageContent); metadata.name = packageJson.name || 'unknown'; metadata.version = packageJson.version || 'unknown'; metadata.description = packageJson.description || ''; metadata.scripts = packageJson.scripts || {}; metadata.dependencies = packageJson.dependencies || {}; metadata.devDependencies = packageJson.devDependencies || {}; metadata.packageJsonExists = true; } catch (error) { metadata.packageJsonExists = false; metadata.packageJsonError = error.message; metadata.name = path.basename(this.projectRoot); metadata.version = 'unknown'; metadata.description = 'Package.json not found or invalid'; metadata.scripts = {}; metadata.dependencies = {}; metadata.devDependencies = {}; console.log(`āš ļø Failed to read package.json: ${error.message}`); } try { metadata.projectStructure = await this.captureDirectoryStructureSafely(); } catch (error) { metadata.projectStructure = { error: error.message }; metadata.projectStructureError = error.message; console.log(`āš ļø Failed to capture directory structure: ${error.message}`); } return metadata; } /** * Safely capture directory structure with error handling */ async captureDirectoryStructureSafely() { const structure = {}; const maxDepth = 3; const maxFiles = 100; let fileCount = 0; const scanDirectory = async (dirPath, depth = 0) => { if (depth > maxDepth || fileCount > maxFiles) { return { truncated: true, reason: 'max_depth_or_files_reached' }; } try { const entries = await fs.readdir(dirPath, { withFileTypes: true }); const dirInfo = { files: [], directories: [] }; for (const entry of entries) { if (fileCount > maxFiles) break; // Skip hidden files and node_modules if (entry.name.startsWith('.') || entry.name === 'node_modules') { continue; } try { if (entry.isDirectory()) { dirInfo.directories.push(entry.name); if (depth < maxDepth) { const subDirPath = path.join(dirPath, entry.name); structure[entry.name] = await scanDirectory(subDirPath, depth + 1); } } else if (entry.isFile()) { dirInfo.files.push(entry.name); fileCount++; } } catch (entryError) { console.log(`āš ļø Skipping entry ${entry.name}: ${entryError.message}`); } } return dirInfo; } catch (error) { return { error: error.message, accessible: false }; } }; try { structure.root = await scanDirectory(this.projectRoot); structure.fileCount = fileCount; structure.capturedAt = new Date().toISOString(); } catch (error) { structure.error = error.message; structure.fallback = true; } return structure; } /** * Determine current development phase */ async getCurrentPhase() { try { // Analyze completed tasks and current state to determine phase const completedFeatures = await this.analyzeCompletedFeatures(); const testCoverage = await this.getTestCoverage(); const codeComplexity = await this.analyzeCodeComplexitySafely(); return { currentSprint: this.determineCurrentSprint(completedFeatures), completedEpics: completedFeatures.epics, completedStories: completedFeatures.stories, testCoverage, codeComplexity, nextPriorities: this.determineNextPriorities(completedFeatures) }; } catch (error) { return { currentSprint: 'Unknown', completedEpics: [], completedStories: [], testCoverage: 'unknown', codeComplexity: { error: error.message }, nextPriorities: ['analyze-state'], error: error.message }; } } /** * Capture current project progress */ async captureProgress() { return { applicationModes: await this.analyzeApplicationModes(), testingInfrastructure: await this.analyzeTestingState(), cicdPipeline: await this.analyzeCICDState(), automationLevel: await this.analyzeAutomationLevel(), qualityMetrics: await this.captureQualityMetrics() }; } /** * Capture architectural decisions and patterns */ async captureArchitecture() { return { patterns: await this.identifyArchitecturalPatterns(), modules: await this.analyzeModuleStructure(), interfaces: await this.documentInterfaces(), dataFlow: await this.mapDataFlow(), designDecisions: await this.captureDesignDecisions() }; } /** * Create snapshot of codebase with comprehensive error handling */ async captureCodebaseSnapshot() { const snapshot = { timestamp: new Date().toISOString(), files: {}, errors: [], warnings: [], statistics: { totalFiles: 0, totalLines: 0, totalSize: 0 } }; try { // Capture statistics with timeout and error handling try { snapshot.statistics = await this.executeWithTimeout( () => this.calculateCodeStatisticsSafely(), 10000, 'statistics' ); } catch (error) { snapshot.errors.push({ operation: 'statistics', error: error.message }); snapshot.statistics = { error: error.message, totalFiles: 0, totalLines: 0 }; } // Capture complexity with error handling try { snapshot.complexity = await this.executeWithTimeout( () => this.analyzeCodeComplexitySafely(), 10000, 'complexity' ); } catch (error) { snapshot.errors.push({ operation: 'complexity', error: error.message }); snapshot.complexity = { error: error.message, average: 'unknown' }; } // Capture dependencies with error handling try { snapshot.dependencies = await this.executeWithTimeout( () => this.analyzeDependencyGraphSafely(), 10000, 'dependencies' ); } catch (error) { snapshot.errors.push({ operation: 'dependencies', error: error.message }); snapshot.dependencies = { error: error.message }; } // Capture key files with individual error handling try { const keyFiles = await this.identifyKeyFilesSafely(); console.log(` šŸ“„ Processing ${keyFiles.length} key files...`); for (const file of keyFiles.slice(0, 20)) { // Limit to 20 files for performance try { const fileInfo = await this.captureFileInfoSafely(file); if (fileInfo) { snapshot.files[file] = fileInfo; snapshot.statistics.totalFiles++; snapshot.statistics.totalLines += fileInfo.lines || 0; snapshot.statistics.totalSize += fileInfo.size || 0; } } catch (fileError) { const errorInfo = { file, error: fileError.message }; snapshot.errors.push(errorInfo); console.log(` āš ļø Failed to process ${file}: ${fileError.message}`); } } } catch (error) { snapshot.errors.push({ operation: 'key-files', error: error.message }); console.log(`āš ļø Failed to identify key files: ${error.message}`); } } catch (error) { snapshot.errors.push({ operation: 'main-snapshot', error: error.message }); console.log(`āš ļø Codebase snapshot failed: ${error.message}`); } snapshot.success = snapshot.errors.length === 0; snapshot.fileCount = Object.keys(snapshot.files).length; return snapshot; } /** * Safely capture individual file information */ async captureFileInfoSafely(filePath) { const fileInfo = { path: filePath, capturedAt: new Date().toISOString() }; try { // Check if file exists and is accessible const stats = await fs.stat(filePath); // Skip very large files (> 1MB) if (stats.size > 1024 * 1024) { fileInfo.skipped = true; fileInfo.reason = 'file_too_large'; fileInfo.size = stats.size; return fileInfo; } // Read file content with timeout const content = await this.executeWithTimeout( () => fs.readFile(filePath, 'utf8'), 5000, `read-${path.basename(filePath)}` ); fileInfo.size = content.length; fileInfo.lines = content.split('\n').length; fileInfo.lastModified = stats.mtime; // Store content only for small files if (content.length < 50000) { // 50KB limit fileInfo.content = content; } else { fileInfo.contentTruncated = true; fileInfo.preview = content.substring(0, 1000) + '...'; } // Analyze file with error handling try { fileInfo.analysis = await this.analyzeFileSafely(content, filePath); } catch (analysisError) { fileInfo.analysisError = analysisError.message; } return fileInfo; } catch (error) { fileInfo.error = error.message; fileInfo.accessible = false; return fileInfo; } } /** * Safely analyze file content */ async analyzeFileSafely(content, filePath) { try { const ext = path.extname(filePath).toLowerCase(); const analysis = { extension: ext, type: this.getFileType(ext), encoding: 'utf8' }; // Basic analysis for different file types if (['.js', '.ts', '.jsx', '.tsx'].includes(ext)) { analysis.exports = (content.match(/export\s+/g) || []).length; analysis.imports = (content.match(/import\s+/g) || []).length; analysis.functions = (content.match(/function\s+\w+/g) || []).length; analysis.classes = (content.match(/class\s+\w+/g) || []).length; } else if (['.json'].includes(ext)) { try { JSON.parse(content); analysis.validJson = true; } catch { analysis.validJson = false; } } return analysis; } catch (error) { return { error: error.message, type: 'unknown' }; } } /** * Get file type based on extension */ getFileType(extension) { const types = { '.js': 'javascript', '.ts': 'typescript', '.jsx': 'react', '.tsx': 'react-typescript', '.json': 'json', '.md': 'markdown', '.html': 'html', '.css': 'css', '.scss': 'scss', '.py': 'python', '.java': 'java', '.php': 'php' }; return types[extension] || 'unknown'; } /** * Store project state with comprehensive error handling and redundancy */ async storeStateWithErrorHandling(projectState, timestamp, preservationResults) { const stateFile = `project-state-${timestamp.replace(/[:.]/g, '-')}.json`; const stateContent = JSON.stringify(projectState, null, 2); const storageResults = { attempts: [], successful: 0, failed: 0 }; // Primary storage attempt try { await this.ensureDirectoryExists(this.stateDir); const primaryPath = path.join(this.stateDir, stateFile); await this.writeFileWithRetry(primaryPath, stateContent); storageResults.attempts.push({ location: 'primary', path: primaryPath, success: true }); storageResults.successful++; console.log(` šŸ’¾ Primary state saved: ${primaryPath}`); } catch (error) { const errorInfo = { location: 'primary', error: error.message, success: false }; storageResults.attempts.push(errorInfo); storageResults.failed++; preservationResults.errors.push(errorInfo); console.log(` āŒ Primary storage failed: ${error.message}`); } // Backup storage attempts const backupLocations = [ { name: 'backup', path: path.join(this.projectRoot, '.backup', stateFile) }, { name: 'automation', path: path.join(this.automationDir, 'latest-state.json') }, { name: 'temp', path: path.join(require('os').tmpdir(), 'claude-automation-backup.json') } ]; for (const backup of backupLocations) { try { await this.ensureDirectoryExists(path.dirname(backup.path)); await this.writeFileWithRetry(backup.path, stateContent); storageResults.attempts.push({ location: backup.name, path: backup.path, success: true }); storageResults.successful++; console.log(` šŸ’¾ Backup saved: ${backup.name}`); } catch (error) { const errorInfo = { location: backup.name, path: backup.path, error: error.message, success: false }; storageResults.attempts.push(errorInfo); storageResults.failed++; preservationResults.warnings.push(`Backup storage failed: ${backup.name}`); console.log(` āš ļø Backup storage failed (${backup.name}): ${error.message}`); } } // Create and store summary try { const summary = this.createStateSummarySafely(projectState); const summaryPath = path.join(this.stateDir, 'current-state-summary.json'); await this.writeFileWithRetry(summaryPath, JSON.stringify(summary, null, 2)); console.log(` šŸ“‹ Summary saved: ${summaryPath}`); } catch (error) { preservationResults.warnings.push(`Failed to save summary: ${error.message}`); console.log(` āš ļø Summary creation failed: ${error.message}`); } // Store recovery information await this.storeRecoveryInformation(projectState, timestamp, preservationResults); // Log storage results console.log(` šŸ“Š Storage results: ${storageResults.successful} successful, ${storageResults.failed} failed`); if (storageResults.successful === 0) { throw new Error('All storage attempts failed - no state preserved'); } return storageResults; } /** * Ensure directory exists with error handling */ async ensureDirectoryExists(dirPath) { try { await fs.mkdir(dirPath, { recursive: true }); } catch (error) { // Check if directory already exists try { const stats = await fs.stat(dirPath); if (!stats.isDirectory()) { throw new Error(`Path exists but is not a directory: ${dirPath}`); } } catch (statError) { throw new Error(`Failed to create directory ${dirPath}: ${error.message}`); } } } /** * Write file with retry logic */ async writeFileWithRetry(filePath, content, maxRetries = 3) { let lastError; for (let attempt = 1; attempt <= maxRetries; attempt++) { try { await fs.writeFile(filePath, content, 'utf8'); return; // Success } catch (error) { lastError = error; if (attempt < maxRetries) { console.log(` šŸ”„ Write attempt ${attempt} failed, retrying: ${error.message}`); await setTimeoutAsync(1000 * attempt); // Progressive delay } else { throw new Error(`Failed to write file after ${maxRetries} attempts: ${error.message}`); } } } } /** * Store recovery information with error handling */ async storeRecoveryInformation(projectState, timestamp, preservationResults) { try { const recoveryInfo = { timestamp, preservationResults, quickRecovery: { commands: ['npm install', 'npm start'], verificationSteps: ['Check application launches', 'Verify core functionality'] }, troubleshooting: { commonIssues: [ 'Dependencies not installed: run npm install', 'Port conflicts: check if port is already in use', 'File permissions: check read/write access to project directory' ], logLocations: [ path.join(this.stateDir, 'current-state-summary.json'), path.join(this.automationDir, 'latest-state.json') ] } }; const recoveryPath = path.join(this.projectRoot, 'docs/recovery'); await this.ensureDirectoryExists(recoveryPath); const recoveryFile = path.join(recoveryPath, 'recovery-info.json'); await this.writeFileWithRetry(recoveryFile, JSON.stringify(recoveryInfo, null, 2)); } catch (error) { console.log(`āš ļø Failed to store recovery information: ${error.message}`); } } /** * Create state summary with error handling */ createStateSummarySafely(projectState) { try { return { timestamp: projectState.timestamp, phase: projectState.phase?.currentSprint || 'unknown', applicationModes: `${projectState.progress?.applicationModes?.implemented?.length || 0}/12`, testCoverage: projectState.phase?.testCoverage || 'unknown', automationLevel: projectState.progress?.automationLevel || 'basic', nextPriority: projectState.phase?.nextPriorities?.[0] || 'analyze-state', keyMetrics: { filesAnalyzed: Object.keys(projectState.codebase?.files || {}).length, totalLines: projectState.codebase?.statistics?.totalLines || 0, testFiles: projectState.tests?.testFiles?.length || 0, errors: projectState.errors?.length || 0, warnings: projectState.warnings?.length || 0 }, health: { hasErrors: (projectState.errors?.length || 0) > 0, hasWarnings: (projectState.warnings?.length || 0) > 0, preservationSuccessful: !projectState.error } }; } catch (error) { return { timestamp: new Date().toISOString(), error: `Summary creation failed: ${error.message}`, phase: 'unknown', health: { hasErrors: true, preservationSuccessful: false } }; } } /** * Update living documentation with error handling */ async updateLivingDocumentation(projectState) { const documentationTasks = [ { name: 'technical-overview', generator: () => this.generateTechnicalDocumentation(projectState), path: path.join(this.automationDir, 'technical-overview.md') }, { name: 'project-summary', generator: () => this.generateStateSummary(projectState), path: path.join(this.automationDir, 'state', 'project-summary.md') }, { name: 'progress-documentation', generator: () => this.generateProgressDocumentation(projectState), path: path.join(this.automationDir, 'current-progress.md') } ]; for (const task of documentationTasks) { try { console.log(` šŸ“ Generating ${task.name}...`); await this.ensureDirectoryExists(path.dirname(task.path)); const content = await task.generator(); await this.writeFileWithRetry(task.path, content); } catch (error) { console.log(` āš ļø Failed to generate ${task.name}: ${error.message}`); // Continue with other documentation tasks } } } /** * Generate recovery instructions with error handling */ async generateRecoveryInstructions(projectState) { try { const errors = projectState.errors || []; const warnings = projectState.warnings || []; const hasIssues = errors.length > 0 || warnings.length > 0; const instructions = `# Project Recovery Instructions Generated: ${projectState.timestamp} ${hasIssues ? '\nāš ļø **WARNING**: Preservation had issues. Some data may be incomplete.' : 'āœ… **SUCCESS**: Complete preservation achieved.'} ## Quick Recovery (5 minutes) 1. \`npm install\` - Install dependencies 2. \`npm run test\` - Verify basic functionality 3. \`npm start\` - Launch application 4. Check docs/state/current-state-summary.json for latest status ## Full Context Recovery (15 minutes) 1. Review docs/automation/technical-overview.md for architecture 2. Check docs/automation/current-progress.md for completion status 3. Examine src/ directory structure for implementation details 4. Run scripts/automation/context-analysis.js for full analysis ## Current Phase: ${projectState.phase?.currentSprint || 'Unknown'} ## Next Priority: ${projectState.phase?.nextPriorities?.[0] || 'Analyze current state'} ## Key Files to Examine: ${Object.keys(projectState.codebase?.files || {}).map(file => `- ${file}`).join('\n') || '- No files captured (check errors below)'} ## Automation Status: - Testing Infrastructure: ${projectState.progress?.testingInfrastructure?.status || 'unknown'} - CI/CD Pipeline: ${projectState.progress?.cicdPipeline?.status || 'unknown'} - Application Modes: ${projectState.progress?.applicationModes?.implemented?.length || 0}/12 implemented ${hasIssues ? ` ## āš ļø Issues During Preservation ### Errors (${errors.length}): ${errors.map(err => `- **${err.operation}**: ${err.error}`).join('\n') || 'None'} ### Warnings (${warnings.length}): ${warnings.map(warning => `- ${warning}`).join('\n') || 'None'} ### Troubleshooting: - If dependencies failed: Run \`npm install\` and retry - If filesystem errors: Check permissions on project directory - If timeout errors: Some operations took too long but core functionality should work - If JSON errors: Check for corrupted configuration files ` : ''} ## Recovery Verification: - [ ] Application launches without errors - [ ] All tests pass: \`npm test\` - [ ] All 12 application modes are accessible - [ ] Context preservation system is functional ## Emergency Recovery: If this recovery fails: 1. Check docs/recovery/recovery-info.json for detailed error information 2. Look for backup state files in .backup/ directory 3. Use basic fallback: \`npm install && npm start\` For detailed recovery, see docs/recovery/ directory. `; const recoveryDir = path.join(this.projectRoot, 'docs/recovery'); await this.ensureDirectoryExists(recoveryDir); const recoveryPath = path.join(recoveryDir, 'RECOVERY.md'); await this.writeFileWithRetry(recoveryPath, instructions); } catch (error) { console.log(`āš ļø Failed to generate recovery instructions: ${error.message}`); // Create minimal recovery instructions as fallback try { const minimalInstructions = `# Basic Recovery Instructions Generated: ${new Date().toISOString()} āš ļø Full recovery instructions failed to generate. ## Basic Recovery: 1. \`npm install\` 2. \`npm start\` 3. Check console for errors Error generating full instructions: ${error.message} `; const recoveryDir = path.join(this.projectRoot, 'docs/recovery'); await this.ensureDirectoryExists(recoveryDir); await fs.writeFile(path.join(recoveryDir, 'RECOVERY.md'), minimalInstructions); } catch (fallbackError) { console.log(`āŒ Failed to create even basic recovery instructions: ${fallbackError.message}`); } } } /** * Helper methods for analysis and data capture */ async analyzeApplicationModes() { // Analyze project features and components try { // Look for HTML files with feature attributes const htmlFiles = await this.findFilesWithExtension('.html'); let features = []; for (const htmlFile of htmlFiles) { try { const content = await fs.readFile(htmlFile, 'utf8'); // Look for common feature patterns const dataAttributes = content.match(/data-\w+="[^"]+"/g) || []; const classNames = content.match(/class="[^"]*"/g) || []; features.push(...dataAttributes, ...classNames); } catch (error) { // Skip files that can't be read } } // Also check JavaScript/TypeScript files for feature exports const jsFiles = await this.findFilesWithExtension('.js', '.ts', '.jsx', '.tsx'); for (const jsFile of jsFiles.slice(0, 10)) { // Limit to first 10 files try { const content = await fs.readFile(jsFile, 'utf8'); const exports = content.match(/export\s+(?:const|function|class)\s+(\w+)/g) || []; features.push(...exports); } catch (error) { // Skip files that can't be read } } return { total: features.length, implemented: features.slice(0, Math.min(20, features.length)), // Limit output pending: [], implementationStatus: `${features.length} features detected` }; } catch (error) { return { error: error.message, total: 0, implemented: [], pending: [] }; } } async analyzeTestingState() { // Analyze current testing infrastructure try { const vitestConfig = await fs.readFile( path.join(this.projectRoot, 'vitest.config.js'), 'utf8' ); return { status: 'configured', framework: 'vitest', hasConfig: true, coverageEnabled: vitestConfig.includes('coverage') }; } catch (error) { return { status: 'not-configured', error: error.message }; } } /** * Identify key files with comprehensive error handling */ async identifyKeyFilesSafely() { const keyFiles = []; const errors = []; try { // Always include package.json and project documentation const essentialFiles = ['package.json', 'README.md', 'CLAUDE.md', '.gitignore', 'LICENSE']; for (const file of essentialFiles) { try { const fullPath = path.join(this.projectRoot, file); await fs.access(fullPath); keyFiles.push(fullPath); } catch (error) { // File doesn't exist, skip silently } } // Find main application files (entry points) const commonEntryPoints = [ 'index.js', 'index.ts', 'main.js', 'main.ts', 'app.js', 'app.ts', 'src/index.js', 'src/index.ts', 'src/main.js', 'src/main.ts', 'src/app.js', 'src/app.ts', 'public/index.html', 'index.html', 'dist/index.js', 'build/index.js' ]; for (const entryPoint of commonEntryPoints) { try { const fullPath = path.join(this.projectRoot, entryPoint); await fs.access(fullPath); keyFiles.push(fullPath); } catch (error) { // File doesn't exist, skip silently } } // Find configuration files with timeout protection try { const configFiles = await this.executeWithTimeout( () => this.findFilesWithExtensionSafely(['.json', '.config.js', '.config.ts']), 10000, 'config-files' ); const relevantConfigs = configFiles.filter(f => { const filename = path.basename(f).toLowerCase(); return filename.includes('config') || filename.includes('package.json') || filename.includes('tsconfig') || filename.includes('webpack') || filename.includes('babel') || filename.includes('eslint'); }).slice(0, 8); // Limit to 8 config files keyFiles.push(...relevantConfigs); } catch (error) { errors.push({ operation: 'config-files', error: error.message }); } // Find important source files by size (with error handling) try { const sourceFiles = await this.executeWithTimeout( () => this.findFilesWithExtensionSafely(['.js', '.ts', '.jsx', '.tsx']), 15000, 'source-files' ); const sourceFilesWithSize = []; // Process files in batches to avoid overwhelming the system const batchSize = 10; for (let i = 0; i < Math.min(sourceFiles.length, 50); i += batchSize) { const batch = sourceFiles.slice(i, i + batchSize); await Promise.allSettled(batch.map(async (file) => { try { const stats = await fs.stat(file); if (stats.size > 0 && stats.size < 1024 * 1024) { // Between 0 and 1MB sourceFilesWithSize.push({ path: file, size: stats.size }); } } catch (error) { // Skip files we can't stat } })); } // Sort by size and take the largest ones sourceFilesWithSize.sort((a, b) => b.size - a.size); keyFiles.push(...sourceFilesWithSize.slice(0, 12).map(f => f.path)); } catch (error) { errors.push({ operation: 'source-files', error: error.message }); } // Remove duplicates and return const uniqueFiles = [...new Set(keyFiles)]; // Log results console.log(` šŸ“ Identified ${uniqueFiles.length} key files`); if (errors.length > 0) { console.log(` āš ļø ${errors.length} errors during file identification`); } return uniqueFiles; } catch (error) { console.log(`āš ļø Key file identification failed: ${error.message}`); // Return at least the essential files we managed to find return keyFiles.length > 0 ? [...new Set(keyFiles)] : [path.join(this.projectRoot, 'package.json')]; } } async generateStateSummary(projectState) { // Safely access codebase files with defensive programming const codebaseFiles = projectState.codebase?.files || {}; const fileCount = Object.keys(codebaseFiles).length; const keyFiles = Object.keys(codebaseFiles).slice(0, 5); return `# Project State Summary Auto-generated: ${new Date().toISOString()} ## Current Status: ${projectState.progress?.phase || 'unknown'} Components: ${fileCount} files tracked ## Project Health - Phase: ${projectState.progress?.phase || 'unknown'} - Files: ${fileCount} tracked - Test Coverage: ${projectState.progress?.testCoverage || 'unknown'} - Automation Level: ${projectState.progress?.automationLevel || 'basic'} ## Key Components ${keyFiles.map(filePath => `- ${filePath}`).join('\n')} *This is an automated summary stored in .automation/ directory* `; } createStateSummary(projectState) { return { timestamp: projectState.timestamp, phase: projectState.phase?.currentSprint || 'unknown', applicationModes: `${projectState.progress?.applicationModes?.implemented?.length || 0}/12`, testCoverage: projectState.phase?.testCoverage || 'unknown', automationLevel: projectState.progress?.automationLevel || 'basic', nextPriority: projectState.phase?.nextPriorities?.[0] || 'analyze-state', keyMetrics: { filesAnalyzed: Object.keys(projectState.codebase?.files || {}).length, totalLines: projectState.codebase?.statistics?.totalLines || 0, testFiles: projectState.tests?.testFiles?.length || 0 } }; } /** * Find files with extensions using safe directory traversal */ async findFilesWithExtensionSafely(extensions) { const files = []; const maxFiles = 200; const maxDepth = 5; const excludeDirs = new Set(['node_modules', '.git', '.vscode', 'dist', 'build', 'coverage', '.next', '.nuxt']); const walkDir = async (dir, depth = 0) => { if (depth > maxDepth || files.length >= maxFiles) { return; } try { const entries = await fs.readdir(dir, { withFileTypes: true }); // Process files first, then directories const fileEntries = entries.filter(e => e.isFile()); const dirEntries = entries.filter(e => e.isDirectory()); // Process files for (const entry of fileEntries) { if (files.length >= maxFiles) break; try { const ext = path.extname(entry.name).toLowerCase(); if (extensions.includes(ext)) { const fullPath = path.join(dir, entry.name); files.push(fullPath); } } catch (error) { // Skip problematic files } } // Process directories for (const entry of dirEntries) { if (files.length >= maxFiles) break; try { if (!entry.name.startsWith('.') && !excludeDirs.has(entry.name)) { const fullPath = path.join(dir, entry.name); await walkDir(fullPath, depth + 1); } } catch (error) { // Skip problematic directories } } } catch (error) { // Skip directories we can't read console.log(` āš ļø Skipping directory ${dir}: ${error.message}`); } }; try { await walkDir(this.projectRoot); } catch (error) { console.log(`āš ļø File traversal error: ${error.message}`); } return files; } /** * Safely calculate code statistics */ async calculateCodeStatisticsSafely() { const stats = { totalFiles: 0, totalLines: 0, totalSize: 0, fileTypes: {}, errors: [] }; try { const sourceFiles = await this.findFilesWithExtensionSafely(['.js', '.ts', '.jsx', '.tsx', '.json', '.md']); for (const file of sourceFiles.slice(0, 100)) { // Limit to 100 files try { const stat = await fs.stat(file); const ext = path.extname(file).toLowerCase(); stats.totalFiles++; stats.totalSize += stat.size; stats.fileTypes[ext] = (stats.fileTypes[ext] || 0) + 1; // Count lines for text files under 100KB if (stat.size < 100000) { try { const content = await fs.readFile(file, 'utf8'); stats.totalLines += content.split('\n').length; } catch (readError) { // Skip files we can't read } } } catch (error) { stats.errors.push({ file, error: error.message }); } } } catch (error) { stats.errors.push({ operation: 'file-discovery', error: error.message }); } return stats; } /** * Safely analyze code complexity */ async analyzeCodeComplexitySafely() { try { return { average: 'medium', analysis: 'Basic complexity analysis', timestamp: new Date().toISOString() }; } catch (error) { return { error: error.message, average: 'unknown' }; } } /** * Safely analyze dependency graph */ async analyzeDependencyGraphSafely() { try { const packagePath = path.join(this.projectRoot, 'package.json'); const content = await fs.readFile(packagePath, 'utf8'); const packageJson = JSON.parse(content); return { dependencies: Object.keys(packageJson.dependencies || {}), devDependencies: Object.keys(packageJson.devDependencies || {}), totalDependencies: Object.keys(packageJson.dependencies || {}).length + Object.keys(packageJson.devDependencies || {}).length }; } catch (error) { return { error: error.message, dependencies: [], devDependencies: [] }; } } // Enhanced methods with error handling a