agentsqripts
Version:
Comprehensive static code analysis toolkit for identifying technical debt, security vulnerabilities, performance issues, and code quality problems
261 lines (236 loc) • 11.1 kB
JavaScript
#!/usr/bin/env node
/**
* @file Command-line interface for WET (Write Everything Twice) code analysis
* @description Single responsibility: Provide interactive CLI for duplicate code detection and deduplication
*
* This CLI tool serves as the user-facing interface for comprehensive WET code analysis,
* detecting exact duplicates, similar logic patterns, and repetitive boilerplate code.
* It implements sophisticated filtering options, configurable similarity thresholds, and
* multiple output formats to support AI-driven deduplication workflows and code quality improvement.
*
* Design rationale:
* - AI-focused CLI design enables automated deduplication workflows
* - Configurable similarity thresholds adapt to different coding patterns and styles
* - Multiple opportunity levels enable prioritized deduplication based on impact
* - Category-based filtering focuses efforts on specific types of duplication
* - Quantified savings estimates enable ROI-based deduplication decisions
*/
const fs = require('fs');
const path = require('path');
const { getProcessArgs } = require('../lib/utils/processHelpers');
const { shouldShowHelp } = require('../lib/utils/cliHelpers');
const { analyzeProjectWetCode, analyzeFileWetCode } = require('../lib/wet-code');
const { formatResults } = require('./lib/commonFormatters');
const { createSummaryFormatter, createRecommendationsSection, createTopIssuesSection } = require('./lib/summaryFormatter');
const { createDetailedFormatter, formatIssueWithIcons } = require('./lib/detailedFormatter');
const { createHelpFunction } = require('./lib/helpFormatter');
const { parseArgs: sharedParseArgs } = require('./lib/argumentParser');
const { handleAnalysisError } = require('./lib/errorHandler');
/**
* Display help information using shared formatter
*/
const showHelp = createHelpFunction({
command: 'analyze-wet-code.js',
description: 'Analyzes code for WET (Write Everything Twice) patterns including duplicate logic blocks,\nsimilar code patterns, and repetitive boilerplate that can be deduplicated by AI agents.',
options: [
{ flag: '--extensions <exts>', description: 'Comma-separated list of file extensions (default: .js,.ts,.jsx,.tsx)' },
{ flag: '--output-format <fmt>', description: 'Output format: json, summary, detailed (default: summary)' },
{ flag: '--min-lines <num>', description: 'Minimum lines to consider for duplication (default: 5)' },
{ flag: '--similarity <threshold>', description: 'Similarity threshold for fuzzy matching 0.0-1.0 (default: 0.8)' },
{ flag: '--opportunity <level>', description: 'Filter by deduplication opportunity: HIGH, MEDIUM, LOW (default: all)' },
{ flag: '--category <cat>', description: 'Filter by category: "Exact Match", "Similar Logic", "Pattern", "Copy-Paste" (default: all)' },
{ flag: '--help', description: 'Show this help message' }
],
modes: ' file Analyze a single file\n project Analyze entire project (default)',
examples: [
'node analyze-wet-code.js .',
'node analyze-wet-code.js --min-lines 3 --similarity 0.7 src/',
'node analyze-wet-code.js --output-format json --opportunity HIGH .',
'node analyze-wet-code.js --category "Exact Match" --extensions .js,.ts .'
],
output: ' The tool identifies WET code patterns by checking:\n - Exact duplicate code blocks (identical logic)\n - Similar logic patterns (near-duplicates with minor variations)\n - Repeated patterns (common structures with different data)\n - Copy-paste detection (blocks copied across files)\n - Boilerplate code (repetitive initialization/configuration)',
sections: {
'OPPORTUNITY LEVELS': ' 🔥 HIGH: Major deduplication potential with significant code reduction\n ⚡ MEDIUM: Moderate deduplication opportunities with good savings\n 💡 LOW: Minor patterns worth considering for cleanup',
'CATEGORIES': ' 🎯 Exact Match: Identical code blocks that can be extracted immediately\n 🔄 Similar Logic: Near-duplicate patterns that can be parameterized\n 📋 Pattern: Repeated structures that can be abstracted\n 📄 Copy-Paste: Copied code sections that need consolidation\n 🏗️ Boilerplate: Repetitive setup code that can be templated',
'DEDUPLICATION IMPACT': ' - Lines reduced: Estimated code reduction after deduplication\n - Files affected: Number of files that would benefit\n - Complexity reduction: Maintenance burden reduction\n - Effort estimate: Implementation complexity (1=easy, 3=complex)'
}
});
// Old showHelp implementation removed - using shared formatter
// Removed duplicate formatResults - now using shared version from commonFormatters
// Create custom summary formatter using shared utility
const formatSummary = createSummaryFormatter({
title: '✨ DRY Code Analysis Results',
scoreField: 'projectDryScore',
gradeField: 'dryGrade',
filesField: 'totalFiles',
issuesField: 'totalDuplicateGroups',
scorePrefix: 'DRY Score: ',
scoreSuffix: '/100',
customMetrics: [
{ field: 'filesWithDuplicates', icon: '⚠️', label: 'Files with Duplicates' },
{ field: 'estimatedSavings.linesReduced', icon: '💾', label: 'Potential Reduction', suffix: ' lines' },
{ field: 'estimatedSavings.effort', icon: '⏱️', label: 'Deduplication Effort', suffix: ' points' }
],
breakdowns: [
{
field: 'opportunityBreakdown',
type: 'severity',
icon: '📈',
title: 'Deduplication Opportunities'
},
{
field: 'categoryBreakdown',
type: 'category',
icon: '🏷️',
title: 'Pattern Categories',
icons: {
'Exact Match': '🎯',
'Similar Logic': '🔄',
'Pattern': '📋',
'Copy-Paste': '📄',
'Boilerplate': '🏗️'
}
}
],
customSections: [
createRecommendationsSection('Top Deduplication Priorities', '💡'),
createTopIssuesSection({
field: 'topDuplicates',
title: 'Most Duplicated Code Blocks',
limit: 10,
formatItem: (dup, i) => {
const oppIcon = dup.opportunity === 'HIGH' ? '🔥' :
dup.opportunity === 'MEDIUM' ? '⚡' : '💡';
const catIcon = dup.category === 'Exact Match' ? '🎯' :
dup.category === 'Similar Logic' ? '🔄' :
dup.category === 'Pattern' ? '📋' :
dup.category === 'Copy-Paste' ? '📄' : '🏗️';
return ` ${i + 1}. ${oppIcon} ${catIcon} ${dup.description}\n 📍 ${dup.locations.join(', ')}\n 💾 Save ${dup.linesReduced} lines | ⏱️ Effort: ${dup.effort}/3`;
}
})
]
});
// Old formatSummary implementation removed - using shared formatter
// Old formatDetailed removed - using shared detailed formatter
// Replace with shared detailed formatter
const formatDetailedNew = createDetailedFormatter({
title: '🔍 Detailed Duplication Analysis',
formatSummary: formatSummary,
filesField: 'duplicateGroups',
issuesField: 'blocks',
formatFile: (group, index) => {
const header = `--- Duplicate Group #${index + 1} ---\n`;
const info = [
`Type: ${group.type}`,
`Category: ${group.pattern.category}`,
`Similarity: ${(group.similarity * 100).toFixed(1)}%`,
`Complexity: ${group.complexity}`,
`Deduplication Opportunity: ${group.deduplicationOpportunity || 'N/A'}`
];
if (group.estimatedSavings) {
info.push(`Estimated Savings:`);
info.push(` - Lines Reduced: ${group.estimatedSavings.linesReduced}`);
info.push(` - Files Affected: ${group.estimatedSavings.filesAffected}`);
info.push(` - Effort: ${group.estimatedSavings.estimatedEffort}/3`);
info.push(` - Impact: ${group.estimatedSavings.impact}`);
}
return header + info.join('\n') + '\n';
},
formatIssue: (block, index) => {
const lines = [];
if (index === 0) lines.push('Occurrences:');
const relativePath = path.relative('.', block.file);
lines.push(` ${index + 1}. ${relativePath}:${block.startLine}-${block.endLine}`);
// Show code preview for first occurrence
if (index === 0 && block.originalContent) {
lines.push('Code Preview:');
const codeLines = block.originalContent.split('\n');
codeLines.forEach((line, idx) => {
lines.push(`${String(block.startLine + idx).padStart(4)}: ${line}`);
});
}
return lines;
}
});
/**
* Main function
*/
async function main() {
const args = getProcessArgs();
if (shouldShowHelp(args)) {
showHelp();
return;
}
// Parse arguments using shared parser
const toolOptions = {
extensions: {
flags: ['-e', '--extensions'],
default: ['.js', '.ts', '.jsx', '.tsx'],
parser: (value) => value.split(',').map(ext => ext.trim()),
description: 'File extensions to analyze (comma-separated)'
},
format: {
flags: ['-f', '--format'],
default: 'summary',
values: ['json', 'detailed', 'summary'],
description: 'Output format'
},
minLines: {
flags: ['--min-lines'],
default: 5,
parser: parseInt,
description: 'Minimum lines for duplicate detection'
},
similarity: {
flags: ['--similarity'],
default: 0.8,
parser: parseFloat,
description: 'Similarity threshold (0-1)'
},
opportunity: {
flags: ['--opportunity'],
parser: (value) => value.toUpperCase(),
description: 'Filter by opportunity level (HIGH, MEDIUM, LOW)'
},
category: {
flags: ['--category'],
description: 'Filter by category'
}
};
const { targetPath, mode, options } = sharedParseArgs(args, toolOptions);
const { extensions, format: outputFormat, minLines, similarity, opportunity, category } = options;
try {
// Check if target is a file or directory
const stat = await fs.promises.stat(targetPath);
let results;
if (stat.isFile()) {
// Analyze single file
results = await analyzeFileWetCode(targetPath, { minDuplicateLines: minLines });
} else {
// Analyze project
results = await analyzeProjectWetCode(targetPath, {
extensions,
excludePatterns: ['node_modules', '.git', 'dist', 'build', 'coverage'],
minDuplicateLines: minLines
});
}
// Output results
const formatted = formatResults(results, outputFormat, { opportunity, category }, formatSummary, formatDetailedNew);
console.log(formatted);
// Exit with error code if high-impact duplicates found
const hasHighImpact = results.summary ?
results.summary.opportunityBreakdown.HIGH > 0 :
results.duplicateGroups.some(g => g.deduplicationOpportunity === 'HIGH');
if (hasHighImpact) {
process.exit(1);
}
} catch (error) {
handleAnalysisError(error, 'WET code');
}
}
if (require.main === module) {
main().catch(error => {
console.error('Fatal error:', error);
process.exit(1);
});
}