codecrucible-synth
Version:
Production-Ready AI Development Platform with Multi-Voice Synthesis, Smithery MCP Integration, Enterprise Security, and Zero-Timeout Reliability
97 lines ⢠5 kB
JavaScript
/**
* CLI Display Module
* Handles all display and output formatting for the CLI
*/
import chalk from 'chalk';
export class CLIDisplay {
/**
* Display synthesis results with proper formatting
*/
static displayResults(synthesis, responses) {
console.log(chalk.green('\nšÆ Synthesis Complete!'));
console.log(chalk.gray(` Quality Score: ${(synthesis.qualityScore * 100).toFixed(0)}/100`));
console.log(chalk.gray(` Voices Used: ${synthesis.voicesUsed?.join(', ') || 'N/A'}`));
// Show combined result
console.log(chalk.bold('\nš Final Synthesis:'));
console.log(synthesis.content || synthesis.combinedCode || 'No content available');
// Show individual responses
if (responses && responses.length > 0) {
console.log(chalk.bold('\nš„ Individual Voice Responses:'));
responses.forEach((response, index) => {
console.log(chalk.cyan(`\n ${index + 1}. ${response.voice || 'Unknown Voice'}:`));
console.log(response.content?.substring(0, 200) + (response.content?.length > 200 ? '...' : ''));
});
}
}
// Note: Streaming display methods were consolidated into CLI class
// displayStreamingResponse method removed as it was unused
/**
* Show CLI help information
*/
static showHelp() {
console.log(chalk.bold('\nšØ CodeCrucible Synth - AI-Powered Development Tool\n'));
console.log(chalk.cyan('š USAGE:'));
console.log(' crucible [options] [command]');
console.log(' crucible <prompt>');
console.log(' crucible analyze [files...]');
console.log(' crucible generate <prompt>');
console.log(chalk.cyan('\nā” COMMANDS:'));
console.log(' analyze [files...] Analyze files or directories');
console.log(' generate <prompt> Generate code from prompt');
console.log(' status Show system status');
console.log(' models List available models');
console.log(' configure Configure the system');
console.log(chalk.cyan('\nšļø OPTIONS:'));
console.log(' --voices <names> Specific voices to use');
console.log(' --mode <mode> Synthesis mode (competitive, collaborative, consensus)');
console.log(' --interactive Enter interactive mode');
console.log(' --spiral Use spiral methodology');
console.log(' --autonomous Enable autonomous mode');
console.log(' --no-stream Disable streaming responses (streaming is default)');
console.log(' --fast Use fast mode');
console.log(' --verbose Verbose output');
console.log(' --quiet Quiet mode');
console.log(' --help Show this help');
console.log(chalk.cyan('\nš EXAMPLES:'));
console.log(' crucible "Create a REST API for user management"');
console.log(' crucible analyze src/');
console.log(' crucible --voices Explorer,Architect "Design a microservice"');
console.log(' crucible --spiral --autonomous "Refactor this codebase"');
console.log(chalk.gray('\nš” For more information, visit: https://github.com/rhinos0608/codecrucible-synth'));
}
/**
* Display model recommendations
*/
static async showModelRecommendations() {
console.log(chalk.bold('\nš¤ Model Recommendations\n'));
const models = [
{ name: 'qwen2.5-coder:7b', type: 'Coding', performance: 'High', size: '4.4GB' },
{ name: 'deepseek-coder:8b', type: 'Coding', performance: 'High', size: '4.9GB' },
{ name: 'llama3.1:8b', type: 'General', performance: 'Medium', size: '4.7GB' },
{ name: 'codestral:22b', type: 'Coding', performance: 'Very High', size: '13GB' },
];
console.log(chalk.cyan('š Recommended Models:'));
models.forEach(model => {
const perfColor = model.performance === 'Very High'
? 'green'
: model.performance === 'High'
? 'yellow'
: 'gray';
console.log(` ${chalk.bold(model.name)} - ${model.type} (${chalk[perfColor](model.performance)} - ${model.size})`);
});
console.log(chalk.gray('\nš” Install with: ollama pull <model-name>'));
}
/**
* Get performance indicator for a model
*/
static getModelPerformance(model) {
const highPerf = ['qwen2.5-coder', 'deepseek-coder', 'codestral'];
const mediumPerf = ['llama3.1', 'mistral'];
if (highPerf.some(h => model.includes(h)))
return chalk.green('ā');
if (mediumPerf.some(m => model.includes(m)))
return chalk.yellow('ā');
return chalk.gray('ā');
}
}
//# sourceMappingURL=cli-display.js.map