UNPKG

codedetective

Version:

AI-powered tool to analyze codebases, reconstruct development timelines, and suggest where to resume work

100 lines 4.31 kB
#!/usr/bin/env node import { Command } from 'commander'; import dotenv from 'dotenv'; import path from 'path'; import fs from 'fs'; import { fileURLToPath } from 'url'; import { analyzeRepository, FileCategory } from './analyzer.js'; import { generateReport } from './generator.js'; import ora from 'ora'; // Load environment variables dotenv.config(); const __dirname = path.dirname(fileURLToPath(import.meta.url)); const packageJson = JSON.parse(fs.readFileSync(path.join(__dirname, '../package.json'), 'utf8')); const program = new Command(); program .name('codedetective') .description('AI-powered tool to analyze codebases, reconstruct development timelines, and suggest where to resume work') .version(packageJson.version); program .command('analyze') .description('Analyze a repository and generate a report') .argument('[directory]', 'Directory to analyze', '.') .option('-o, --output <file>', 'Output file for the report', 'codedetective-report.docx') .option('-f, --format <format>', 'Output format (docx, markdown, json)', 'docx') .option('-d, --depth <depth>', 'Max depth of file analysis', '3') .option('-m, --model <model>', 'OpenAI model to use', 'gpt-4-turbo') .option('-t, --token <token>', 'OpenAI API token (or set OPENAI_API_KEY env var)') .option('--skip-gitignore', 'Skip respecting .gitignore rules', false) .option('--include <pattern>', 'Include file pattern (glob)') .option('--exclude <pattern>', 'Exclude file pattern (glob)') .option('--category <category>', 'Filter files by category (code, figure, data_table)') .option('--code-only', 'Only include code files (shorthand for --category=code)') .option('--figure-only', 'Only include figure files (shorthand for --category=figure)') .option('--data-only', 'Only include data table files (shorthand for --category=data_table)') .action(async (directory, options) => { const spinner = ora('Analyzing repository...').start(); try { // Ensure API key is available const apiKey = options.token || process.env.OPENAI_API_KEY; if (!apiKey) { spinner.fail('API key is required. Set OPENAI_API_KEY env var or use --token option.'); process.exit(1); } // Determine category filter let category = undefined; if (options.category) { category = options.category; } else if (options.codeOnly) { category = FileCategory.CODE; } else if (options.figureOnly) { category = FileCategory.FIGURE; } else if (options.dataOnly) { category = FileCategory.DATA_TABLE; } // Analyze repository spinner.text = 'Analyzing repository files...'; const repoData = await analyzeRepository({ directory: path.resolve(directory), maxDepth: parseInt(options.depth), includePattern: options.include, excludePattern: options.exclude, respectGitignore: !options.skipGitignore, category }); // Generate report spinner.text = 'Generating AI analysis report...'; // When using docx format, ensure the file extension is .docx let outputPath = options.output; if (options.format === 'docx' && !outputPath.toLowerCase().endsWith('.docx')) { outputPath += '.docx'; } else if (options.format === 'markdown' && !outputPath.toLowerCase().endsWith('.md')) { outputPath += '.md'; } else if (options.format === 'json' && !outputPath.toLowerCase().endsWith('.json')) { outputPath += '.json'; } const report = await generateReport({ repoData, apiKey, model: options.model, format: options.format, outputPath }); // For docx format, the file is written by the generator if (options.format !== 'docx') { fs.writeFileSync(outputPath, report); } spinner.succeed(`Report generated successfully: ${outputPath}`); } catch (error) { spinner.fail(`Error: ${error.message}`); process.exit(1); } }); program.parse(process.argv); //# sourceMappingURL=cli.js.map