UNPKG

codecrucible-synth

Version:

Production-Ready AI Development Platform with Multi-Voice Synthesis, Smithery MCP Integration, Enterprise Security, and Zero-Timeout Reliability

123 lines (105 loc) • 4.83 kB
/** * CLI Display Module * Handles all display and output formatting for the CLI */ import chalk from 'chalk'; // import ora from 'ora'; import { SynthesisResult } from '../types.js'; export class CLIDisplay { /** * Display synthesis results with proper formatting */ static displayResults(synthesis: SynthesisResult, responses: any[]): void { console.log(chalk.green('\nšŸŽÆ Synthesis Complete!')); console.log(chalk.gray(` Quality Score: ${(synthesis.qualityScore * 100).toFixed(0)}/100`)); console.log(chalk.gray(` Voices Used: ${synthesis.voicesUsed?.join(', ') || 'N/A'}`)); // Show combined result console.log(chalk.bold('\nšŸ“„ Final Synthesis:')); console.log(synthesis.content || synthesis.combinedCode || 'No content available'); // Show individual responses if (responses && responses.length > 0) { console.log(chalk.bold('\nšŸ‘„ Individual Voice Responses:')); responses.forEach((response, index) => { console.log(chalk.cyan(`\n ${index + 1}. ${response.voice || 'Unknown Voice'}:`)); console.log( response.content?.substring(0, 200) + (response.content?.length > 200 ? '...' : '') ); }); } } // Note: Streaming display methods were consolidated into CLI class // displayStreamingResponse method removed as it was unused /** * Show CLI help information */ static showHelp(): void { console.log(chalk.bold('\nšŸ”Ø CodeCrucible Synth - AI-Powered Development Tool\n')); console.log(chalk.cyan('šŸ“– USAGE:')); console.log(' crucible [options] [command]'); console.log(' crucible <prompt>'); console.log(' crucible analyze [files...]'); console.log(' crucible generate <prompt>'); console.log(chalk.cyan('\n⚔ COMMANDS:')); console.log(' analyze [files...] Analyze files or directories'); console.log(' generate <prompt> Generate code from prompt'); console.log(' status Show system status'); console.log(' models List available models'); console.log(' configure Configure the system'); console.log(chalk.cyan('\nšŸŽ›ļø OPTIONS:')); console.log(' --voices <names> Specific voices to use'); console.log(' --mode <mode> Synthesis mode (competitive, collaborative, consensus)'); console.log(' --interactive Enter interactive mode'); console.log(' --spiral Use spiral methodology'); console.log(' --autonomous Enable autonomous mode'); console.log(' --no-stream Disable streaming responses (streaming is default)'); console.log(' --fast Use fast mode'); console.log(' --verbose Verbose output'); console.log(' --quiet Quiet mode'); console.log(' --help Show this help'); console.log(chalk.cyan('\nšŸ“š EXAMPLES:')); console.log(' crucible "Create a REST API for user management"'); console.log(' crucible analyze src/'); console.log(' crucible --voices Explorer,Architect "Design a microservice"'); console.log(' crucible --spiral --autonomous "Refactor this codebase"'); console.log( chalk.gray( '\nšŸ’” For more information, visit: https://github.com/rhinos0608/codecrucible-synth' ) ); } /** * Display model recommendations */ static async showModelRecommendations(): Promise<void> { console.log(chalk.bold('\nšŸ¤– Model Recommendations\n')); const models = [ { name: 'qwen2.5-coder:7b', type: 'Coding', performance: 'High', size: '4.4GB' }, { name: 'deepseek-coder:8b', type: 'Coding', performance: 'High', size: '4.9GB' }, { name: 'llama3.1:8b', type: 'General', performance: 'Medium', size: '4.7GB' }, { name: 'codestral:22b', type: 'Coding', performance: 'Very High', size: '13GB' }, ]; console.log(chalk.cyan('šŸ“‹ Recommended Models:')); models.forEach(model => { const perfColor = model.performance === 'Very High' ? 'green' : model.performance === 'High' ? 'yellow' : 'gray'; console.log( ` ${chalk.bold(model.name)} - ${model.type} (${chalk[perfColor](model.performance)} - ${model.size})` ); }); console.log(chalk.gray('\nšŸ’” Install with: ollama pull <model-name>')); } /** * Get performance indicator for a model */ static getModelPerformance(model: string): string { const highPerf = ['qwen2.5-coder', 'deepseek-coder', 'codestral']; const mediumPerf = ['llama3.1', 'mistral']; if (highPerf.some(h => model.includes(h))) return chalk.green('ā—'); if (mediumPerf.some(m => model.includes(m))) return chalk.yellow('ā—'); return chalk.gray('ā—'); } }