UNPKG

abyss-ai

Version:

Autonomous AI coding agent - enhanced OpenCode with autonomous capabilities

251 lines (208 loc) 8.12 kB
import * as prompts from "@clack/prompts" import path from "path" import { glob } from "glob" export interface BatchProcessingOptions { directory: string filePatterns: string[] excludePatterns: string[] maxFiles: number concurrent: number dryRun: boolean backupEnabled: boolean } export interface BatchResult { file: string success: boolean changes: number issues: number processingTime: number error?: string } export class BatchProcessor { private options: BatchProcessingOptions private results: BatchResult[] = [] private startTime: number = 0 constructor(options: Partial<BatchProcessingOptions> = {}) { this.options = { directory: process.cwd(), filePatterns: ["**/*.{js,ts,jsx,tsx,py,java,cpp,c,go,rs}"], excludePatterns: ["**/node_modules/**", "**/dist/**", "**/build/**", "**/.git/**"], maxFiles: 50, concurrent: 3, dryRun: false, backupEnabled: true, ...options } } async findFiles(): Promise<string[]> { const files: string[] = [] for (const pattern of this.options.filePatterns) { const matches = await glob(pattern, { cwd: this.options.directory, ignore: this.options.excludePatterns, absolute: true }) files.push(...matches) } // Remove duplicates and limit const uniqueFiles = [...new Set(files)] return uniqueFiles.slice(0, this.options.maxFiles) } async processBatch(processorFn: (filePath: string) => Promise<BatchResult>): Promise<BatchResult[]> { this.startTime = Date.now() this.results = [] const files = await this.findFiles() if (files.length === 0) { throw new Error("No files found matching the specified patterns") } prompts.log.info(`Found ${files.length} files to process`) // Prioritize files by type and size for optimal processing const prioritizedFiles = await this.prioritizeFiles(files) if (this.options.dryRun) { prompts.log.info("DRY RUN MODE - No files will be modified") return prioritizedFiles.map(file => ({ file: path.relative(this.options.directory, file.path), success: true, changes: 0, issues: 0, processingTime: 0 })) } // Create backup if enabled if (this.options.backupEnabled) { await this.createBackup(prioritizedFiles.map(f => f.path)) } // Process files in batches with adaptive concurrency const adaptiveConcurrency = this.calculateOptimalConcurrency(prioritizedFiles) const batches = this.chunkArray(prioritizedFiles.map(f => f.path), adaptiveConcurrency) for (const batch of batches) { const batchPromises = batch.map(async (file) => { try { const result = await processorFn(file) this.results.push(result) return result } catch (error) { const errorResult: BatchResult = { file: path.relative(this.options.directory, file), success: false, changes: 0, issues: 0, processingTime: 0, error: error instanceof Error ? error.message : String(error) } this.results.push(errorResult) return errorResult } }) await Promise.all(batchPromises) } return this.results } private async prioritizeFiles(files: string[]): Promise<{path: string, priority: number, size: number}[]> { const prioritizedFiles = await Promise.all(files.map(async (file) => { try { const stat = await Bun.file(file).stat() const ext = path.extname(file).toLowerCase() // Priority scoring: smaller files and common types first let priority = 0 // File type priority (common types process faster) if (['.js', '.ts', '.json'].includes(ext)) priority += 3 else if (['.jsx', '.tsx', '.py'].includes(ext)) priority += 2 else if (['.java', '.cpp', '.c', '.go'].includes(ext)) priority += 1 // Size priority (smaller files first) if (stat.size < 5000) priority += 3 else if (stat.size < 20000) priority += 2 else if (stat.size < 50000) priority += 1 return { path: file, priority, size: stat.size } } catch (error) { return { path: file, priority: 0, size: 0 } } })) return prioritizedFiles.sort((a, b) => b.priority - a.priority) } private calculateOptimalConcurrency(files: {path: string, priority: number, size: number}[]): number { const avgSize = files.reduce((sum, f) => sum + f.size, 0) / files.length // Adjust concurrency based on average file size if (avgSize < 5000) return Math.min(this.options.concurrent * 2, 8) // Small files: more concurrency else if (avgSize > 50000) return Math.max(Math.floor(this.options.concurrent / 2), 1) // Large files: less concurrency else return this.options.concurrent // Default concurrency } async createBackup(files: string[]): Promise<string> { const timestamp = new Date().toISOString().replace(/[:.]/g, '-') const backupDir = path.join(this.options.directory, `.abyss-backup-${timestamp}`) prompts.log.info(`Creating backup in: ${backupDir}`) // Create backup directory await Bun.write(path.join(backupDir, '.backup-info.json'), JSON.stringify({ timestamp, originalDirectory: this.options.directory, files: files.map(f => path.relative(this.options.directory, f)), options: this.options }, null, 2)) // Copy files to backup for (const file of files) { try { const relativePath = path.relative(this.options.directory, file) const backupPath = path.join(backupDir, relativePath) const backupDirPath = path.dirname(backupPath) // Ensure backup directory exists await Bun.write(path.join(backupDirPath, '.keep'), '') // Copy file const content = await Bun.file(file).text() await Bun.write(backupPath, content) } catch (error) { prompts.log.warn(`Failed to backup ${file}: ${error}`) } } return backupDir } async restoreFromBackup(backupDir: string): Promise<boolean> { try { const backupInfoPath = path.join(backupDir, '.backup-info.json') const backupInfo = JSON.parse(await Bun.file(backupInfoPath).text()) prompts.log.info(`Restoring from backup: ${backupDir}`) for (const relativePath of backupInfo.files) { const backupFilePath = path.join(backupDir, relativePath) const originalFilePath = path.join(backupInfo.originalDirectory, relativePath) if (await Bun.file(backupFilePath).exists()) { const content = await Bun.file(backupFilePath).text() await Bun.write(originalFilePath, content) } } prompts.log.success(`Successfully restored ${backupInfo.files.length} files`) return true } catch (error) { prompts.log.error(`Failed to restore backup: ${error}`) return false } } getResults(): BatchResult[] { return this.results } getSummary() { const total = this.results.length const successful = this.results.filter(r => r.success).length const failed = total - successful const totalChanges = this.results.reduce((sum, r) => sum + r.changes, 0) const totalIssues = this.results.reduce((sum, r) => sum + r.issues, 0) const totalTime = this.results.reduce((sum, r) => sum + r.processingTime, 0) const elapsedTime = Date.now() - this.startTime return { total, successful, failed, totalChanges, totalIssues, totalTime, elapsedTime, successRate: total > 0 ? successful / total : 0, averageTime: total > 0 ? totalTime / total : 0 } } private chunkArray<T>(array: T[], chunkSize: number): T[][] { const chunks: T[][] = [] for (let i = 0; i < array.length; i += chunkSize) { chunks.push(array.slice(i, i + chunkSize)) } return chunks } }