agentsqripts
Version:
Comprehensive static code analysis toolkit for identifying technical debt, security vulnerabilities, performance issues, and code quality problems
228 lines (192 loc) • 7.81 kB
JavaScript
/**
* @file WET code analysis implementation
* @description Main analysis logic for WET code detection
*/
const fs = require('fs');
const { promises: fsPromises } = require('fs');
const qerrors = require('qerrors');
// Import specialized modules
const { extractLogicalBlocks } = require('./blockExtractor');
const { groupDuplicateBlocks } = require('./duplicateGrouper');
const { groupDuplicateBlocksOptimized } = require('./optimizedDuplicateGrouper');
const { getAllFiles } = require('./fileSystemUtils');
const { generateWetRecommendations, generateProjectWetRecommendations } = require('./recommendationGenerator');
const { getDryGrade, getWetGrade } = require('./gradeCalculator');
const { calculateDryScore } = require('./wetScoreCalculator');
const { VALID_EXTENSIONS, WET_CODE_CONFIG } = require('./wetCodeConfig');
/**
* Analyzes a single file for WET code patterns
* @param {string} filePath - Path to the file to analyze
* @param {Object} options - Analysis options
* @returns {Promise<Object>} WET code analysis results
*/
async function analyzeFileWetCode(filePath, options = {}) {
try {
const content = await fsPromises.readFile(filePath, 'utf8');
const blocks = extractLogicalBlocks(content, options.minDuplicateLines);
// Add file information to blocks
const fileBlocks = blocks.map(block => ({
...block,
file: filePath
}));
// Find internal duplicates within the file
const duplicateGroups = groupDuplicateBlocks(fileBlocks);
// Calculate WET score (0-100, lower is better)
let wetScore = 0;
let totalComplexity = 0;
duplicateGroups.forEach(group => {
if (group.blocks.length > 1) {
const duplicatePenalty = (group.blocks.length - 1) * group.complexity;
wetScore += duplicatePenalty;
}
totalComplexity += group.complexity;
});
// Normalize score
const normalizedWetScore = totalComplexity > 0 ?
Math.min(100, Math.round((wetScore / totalComplexity) * 100)) : 0;
// Calculate DRY score (inverse of WET score)
const dryScore = 100 - normalizedWetScore;
return {
file: filePath,
wetScore: normalizedWetScore,
dryScore: dryScore,
wetGrade: getWetGrade(normalizedWetScore),
dryGrade: getDryGrade(dryScore),
duplicateGroups,
metrics: {
totalBlocks: fileBlocks.length,
duplicateGroups: duplicateGroups.length,
totalComplexity,
linesOfCode: content.split('\n').length,
estimatedSavings: duplicateGroups.reduce((sum, group) =>
sum + (group.estimatedSavings?.linesReduced || 0), 0)
},
recommendations: generateWetRecommendations(duplicateGroups)
};
} catch (error) {
qerrors.default(error, 'analyzeFileWetCode failed', { filePath, options });
throw error;
}
}
/**
* Analyzes an entire project for WET code patterns
* @param {string} projectPath - Path to the project
* @param {Object} options - Analysis options
* @returns {Object} Project WET code analysis results
*/
async function analyzeProjectWetCode(projectPath, options = {}) {
const {
extensions = VALID_EXTENSIONS,
excludePatterns = ['node_modules', '.git', 'dist', 'build', 'coverage'],
minDuplicateLines = WET_CODE_CONFIG.minDuplicateLines,
maxFiles = Infinity, // No file limit by default
useOptimized = true,
showProgress = true
} = options;
const startTime = Date.now();
console.log('🔍 Starting WET code analysis...');
const files = await getAllFiles(projectPath, extensions, excludePatterns);
// Limit files for extremely large projects
const filesToAnalyze = files.slice(0, maxFiles);
if (files.length > maxFiles) {
console.warn(`⚠️ Project has ${files.length} files. Analyzing first ${maxFiles} files for performance.`);
}
console.log(`📁 Analyzing ${filesToAnalyze.length} files...`);
const allBlocks = [];
const batchSize = 100;
// Process files in batches with progress
for (let i = 0; i < filesToAnalyze.length; i += batchSize) {
const batch = filesToAnalyze.slice(i, Math.min(i + batchSize, filesToAnalyze.length));
// Process batch in parallel
const batchPromises = batch.map(async (file) => {
try {
const content = await fsPromises.readFile(file, 'utf8');
const blocks = extractLogicalBlocks(content, minDuplicateLines);
return blocks.map(block => ({ ...block, file }));
} catch (error) {
if (showProgress) {
console.warn(` ⚠️ Could not analyze ${file}: ${error.message}`);
}
return [];
}
});
const batchResults = await Promise.all(batchPromises);
batchResults.forEach(blocks => allBlocks.push(...blocks));
if (showProgress && i > 0 && i % (batchSize * 5) === 0) {
const progress = Math.round((i / filesToAnalyze.length) * 100);
console.log(` 📊 Progress: ${progress}% (${i}/${filesToAnalyze.length} files)`);
}
}
console.log(`📊 Extracted ${allBlocks.length} code blocks`);
// Group all duplicates across the project
console.log('🔍 Finding duplicate patterns...');
// For large projects, skip similarity analysis by default
const skipSimilarity = allBlocks.length > 20000;
const duplicateGroups = useOptimized
? groupDuplicateBlocksOptimized(allBlocks, {
showProgress,
skipSimilarity: options.skipSimilarity !== undefined ? options.skipSimilarity : skipSimilarity
})
: groupDuplicateBlocks(allBlocks);
// Calculate project metrics
const totalFiles = files.length;
const filesWithDuplicates = new Set();
let totalLinesReduced = 0;
let totalComplexityReduction = 0;
let totalEffort = 0;
duplicateGroups.forEach(group => {
if (group.blocks.length > 1) {
group.blocks.forEach(block => filesWithDuplicates.add(block.file));
if (group.estimatedSavings) {
totalLinesReduced += group.estimatedSavings.linesReduced;
totalComplexityReduction += group.estimatedSavings.complexityReduction;
totalEffort += group.estimatedSavings.estimatedEffort;
}
}
});
// Calculate project WET score
const totalBlocks = allBlocks.length;
const duplicateBlocks = duplicateGroups.reduce((sum, group) =>
sum + Math.max(0, group.blocks.length - 1), 0);
const projectWetScore = totalBlocks > 0 ?
Math.round((duplicateBlocks / totalBlocks) * 100) : 0;
// Calculate DRY score (inverse of WET score)
const projectDryScore = 100 - projectWetScore;
// Categorize duplicates
const categoryBreakdown = {};
const opportunityBreakdown = { HIGH: 0, MEDIUM: 0, LOW: 0 };
duplicateGroups.forEach(group => {
const category = group.pattern.category;
categoryBreakdown[category] = (categoryBreakdown[category] || 0) + 1;
if (group.deduplicationOpportunity) {
opportunityBreakdown[group.deduplicationOpportunity]++;
}
});
const analysisTime = Date.now() - startTime;
return {
timestamp: new Date().toISOString(),
summary: {
totalFiles,
filesWithDuplicates: filesWithDuplicates.size,
totalDuplicateGroups: duplicateGroups.length,
projectWetScore,
projectDryScore,
wetGrade: getWetGrade(projectWetScore),
dryGrade: getDryGrade(projectDryScore),
estimatedSavings: {
linesReduced: totalLinesReduced,
complexityReduction: totalComplexityReduction,
effort: totalEffort
},
categoryBreakdown,
opportunityBreakdown
},
duplicateGroups,
recommendations: generateProjectWetRecommendations(duplicateGroups, totalLinesReduced, totalEffort),
analysisTime
};
}
module.exports = {
analyzeFileWetCode,
analyzeProjectWetCode
};