agentsqripts
Version:
Comprehensive static code analysis toolkit for identifying technical debt, security vulnerabilities, performance issues, and code quality problems
475 lines (426 loc) • 16.8 kB
JavaScript
/**
* @file Unbounded memory growth detector for scalability risk assessment
* @description Single responsibility: Detect patterns that cause unlimited memory consumption and scalability failures
*
* This detector identifies code patterns that can lead to unbounded memory growth, causing
* application crashes, performance degradation, and scalability failures in production
* environments. It analyzes data structure usage, caching patterns, and memory accumulation
* to prevent memory-related production incidents.
*
* Design rationale:
* - Context-aware analysis differentiates between problematic patterns and acceptable memory usage
* - Pattern-based detection efficiently identifies common memory growth anti-patterns
* - Scalability impact assessment provides appropriate severity levels for different contexts
* - Conservative approach flags potential issues before they cause production problems
* - Integration with context detection enables appropriate guidance for different application types
*
* Memory growth detection scope:
* - Unbounded array and object accumulation without size limits or cleanup
* - Infinite loop patterns that continuously allocate memory
* - Cache implementations without eviction policies or size restrictions
* - Event listener accumulation without corresponding cleanup procedures
* - Memory leak patterns from closures and retained references
*/
const { detectExecutionContext, getContextualScalabilityConcerns } = require('./contextDetector');
/**
* Detects unbounded memory growth patterns
* @param {string} content - File content
* @param {string} filePath - Path to the file
* @returns {Array} Array of memory growth issues
*/
function detectUnboundedMemory(content, filePath) {
const issues = [];
const context = detectExecutionContext(content, filePath);
const concerns = getContextualScalabilityConcerns(context);
// CRITICAL: Check for dangerous unbounded patterns FIRST (never skip these)
const criticalPatterns = [
{ pattern: /while\s*\(\s*true\s*\).*\.push\(/s, name: 'Infinite Loop with Array Growth' },
{ pattern: /for\s*\(\s*;\s*;\s*\).*\.push\(/s, name: 'Infinite For Loop with Array Growth' },
{ pattern: /setInterval\([^}]*\.push\(/s, name: 'setInterval with Array Growth' },
{ pattern: /\.on\(['"][^'"]*['"],.*\.push\(/s, name: 'Event Handler with Array Growth' },
{ pattern: /addEventListener\([^}]*\.push\(/s, name: 'Event Listener with Array Growth' }
];
for (const {pattern, name} of criticalPatterns) {
if (pattern.test(content)) {
issues.push({
type: 'critical_unbounded_memory',
severity: 'HIGH',
category: 'Memory',
location: filePath,
pattern: name,
summary: `Critical unbounded memory pattern: ${name}`,
recommendation: 'Add bounds, cleanup, or proper resource management',
impact: 'Can cause memory exhaustion and system crashes',
context: {
type: context.type,
confidence: 'high',
reason: `Critical pattern detected: ${name}`
}
});
return issues; // Return immediately on critical patterns
}
}
// Look for arrays that grow without bounds (non-critical cases)
const arrayGrowthPatterns = [
/(\w+)\.push\s*\(/g,
/(\w+)\.concat\s*\(/g,
/(\w+)\s*=\s*(\w+)\.concat\s*\(/g
];
const hasArrayDeclaration = /(?:let|const|var)\s+(\w+)\s*=\s*\[\]/.test(content);
const hasGrowthPattern = arrayGrowthPatterns.some(pattern => pattern.test(content));
if (hasArrayDeclaration && hasGrowthPattern && concerns.memoryGrowth.relevant) {
// Enhanced analysis for CLI tools - distinguish simple result accumulation from unbounded growth
if (context.type === 'cli') {
const isSimpleResultAccumulation = analyzeForSimpleResultAccumulation(content);
// Additional specific check for CLI analyzers that process finite datasets
const isCLIAnalyzer = /analyze-\w+\.js$/.test(filePath) && content.includes('#!/usr/bin/env node');
if (isSimpleResultAccumulation || isCLIAnalyzer) {
// Skip flagging simple result accumulation in CLI tools
// CLI tools run once and exit, bounded by input size and expected to accumulate results
return issues;
}
}
// Enhanced analysis for all contexts - distinguish bounded processing from unbounded growth
const isBoundedASTProcessing = analyzeForBoundedASTProcessing(content, filePath);
if (isBoundedASTProcessing) {
// Skip flagging bounded utility functions
// These process individual items/nodes, not large datasets
return issues;
}
const severity = concerns.memoryGrowth.severity;
const contextualGuidance = getMemoryGrowthGuidance(context);
issues.push({
type: 'unbounded_memory',
severity,
category: context.isServer ? 'API' : 'Memory',
location: filePath,
pattern: 'Array Growth',
summary: `Array growth detected in ${context.type} context`,
recommendation: contextualGuidance.recommendation,
impact: contextualGuidance.impact,
context: {
type: context.type,
confidence: context.confidence,
reason: concerns.memoryGrowth.reason
}
});
}
// Look for object property accumulation (with better context awareness)
if (content.includes('cache[') || content.includes('store[')) {
// Skip analysis tools that use bounded caches for finite datasets
const isAnalysisTool = filePath.includes('/lib/') && (
filePath.includes('detector') ||
filePath.includes('analyzer') ||
filePath.includes('analysis') ||
content.includes('analyze') ||
content.includes('detect')
);
// Skip small lookup caches and finite data structures
const hasFiniteDataPattern =
content.includes('const cache = {}') ||
content.includes('let cache = {}') ||
content.includes('const store = {}') ||
content.includes('new Map()') ||
content.includes('new Set()');
if (!isAnalysisTool || !hasFiniteDataPattern) {
const severity = context.isServer ? 'HIGH' : 'MEDIUM';
issues.push({
type: 'cache_growth',
severity,
category: 'Infrastructure',
location: filePath,
pattern: 'Cache/Store Growth',
summary: `Cache/store growth in ${context.type} context`,
recommendation: context.isServer
? 'Implement cache expiration, LRU eviction, or size limits'
: 'Consider memory usage for large datasets',
impact: context.isServer
? 'Unbounded caches can consume all available memory'
: 'May impact performance with large datasets',
context: {
type: context.type,
confidence: context.confidence
}
});
}
}
return issues;
}
/**
* Analyzes if the array growth pattern is simple result accumulation
* @param {string} content - File content
* @returns {boolean} True if this appears to be bounded result accumulation
*/
function analyzeForSimpleResultAccumulation(content) {
// Check for patterns that indicate simple, bounded result accumulation
const simpleAccumulationPatterns = [
// Common CLI result patterns
/results\.push\(/,
/lines\.push\(/,
/items\.push\(/,
/options\.push\(/,
/issues\.push\(/,
/recommendations\.push\(/,
/files\.push\(/,
/entries\.push\(/,
// Security analyzer specific patterns
/filteredVulns\.push\(/,
/allVulns\.push\(/,
/criticalVulns\.push\(/,
/highVulns\.push\(/,
/categoryOutput\.push\(/
];
// Patterns that indicate potentially unbounded growth (refined to be more specific)
const unboundedPatterns = [
// Accumulation in event handlers (truly unbounded)
/\.on\(.*\.push\(/s,
/addEventListener.*\.push\(/s,
/setInterval.*\.push\(/s,
/setTimeout.*\.push\(/s,
// Global state modification (persistent across invocations)
/global\.\w+\.push/,
/window\.\w+\.push/,
// Infinite or user-input driven loops
/while\s*\(\s*true\s*\).*push/s,
/for\s*\(\s*;;\s*\).*push/s,
// Recursive accumulation without bounds (more specific)
/function\s+\w+.*{\s*[^}]*\w+\(\w+\).*push.*\w+\(/s
];
const hasSimplePatterns = simpleAccumulationPatterns.some(pattern => pattern.test(content));
const hasUnboundedPatterns = unboundedPatterns.some(pattern => pattern.test(content));
// Only consider it simple accumulation if we have simple patterns and no unbounded patterns
return hasSimplePatterns && !hasUnboundedPatterns;
}
/**
* Analyzes if array growth is bounded AST processing
* @param {string} content - File content
* @param {string} filePath - File path for context
* @returns {boolean} True if this appears to be bounded AST processing
*/
function analyzeForBoundedASTProcessing(content, filePath) {
// Check for AST utility patterns in path
const astUtilityPatterns = [
/\/ast\//,
/ast-block/,
/context-analyzer/,
/detectors/,
/middleware/,
/frontend-backend/,
/extract\w+\.js$/,
/parse\w+\.js$/,
/normalize\w+\.js$/,
/detect\w+\.js$/
];
const isASTUtility = astUtilityPatterns.some(pattern => pattern.test(filePath));
// Check for bounded processing patterns in content
const boundedProcessingPatterns = [
// Processing individual AST nodes
/node\.params/,
/node\.type/,
/node\.value/,
/node\.body/,
/node\.init/,
/node\.declaration/,
/param\./,
// AST walking and processing
/walk\.simple/,
/walk\.ancestor/,
/blocks\.push\(/,
/createBlockFromNode/,
/FunctionDeclaration/,
/VariableDeclarator/,
/ClassDeclaration/,
// Single file processing
/parseAST/,
/acorn/i,
/ast\./i,
/content\.split/,
/lines\.length/,
/for.*lines\.length/,
/lines\.slice/,
// Bounded utility operations
/\.forEach\s*\(\s*param/,
/\.forEach\s*\(\s*prop/,
/\.forEach\s*\(\s*method/,
/\.forEach\s*\(\s*line/,
/\.map\s*\(\s*param/,
/params\.push/,
/properties\.push/,
// Single function/block processing
/function\s+extract/i,
/function\s+normalize/i,
/function\s+count/i,
/function\s+detect/i,
/function\s+process/i,
/extract.*Parameters/i,
/extract.*Blocks/i,
/extract.*Semantic/i,
/extract.*Middleware/i,
/extractCodeBlocks/,
/countLines/,
/detectFramework/,
/processDirectory/,
// Bounded content processing and result accumulation
/content\.substring/,
/beforeMatch/,
/matchIndex/,
/detected\.push/,
/frameworks\.push/,
/pattern\.test/,
/includes\(/,
/substring\(.*200/,
/bugs\.push/,
/issues\.push/,
/files\.push/,
/results\.push/,
/entries\.push/,
/items\.push/,
/recommendations\.push/,
// Test files and analyzers (clearly bounded)
/\.test\.js$/,
/test\//i,
/analyzer/i,
/detector/i,
/formatter/i,
/scanner/i,
// Project-level analyzers that process bounded datasets
/ProjectAnalyzer/,
/project.*analysis/i,
/analyze.*project/i,
// Utility function patterns
/hasValidExtension/,
/getFileData/,
/single.*file/i,
/process.*content/i,
// Simple re-export and wrapper patterns
/module\.exports\s*=\s*{[^}]*}/,
/require.*module\.exports/,
/const.*=.*require/,
/getAllFiles/,
/createMainHandler/,
/re-export/i,
/wrapper/i,
/SRP compliance/i,
/strict SRP/i,
/single-function module/i,
// Specific re-export patterns and utilities
/analyzeReactPatterns/,
/extractSemanticBlocks/,
/detectQuadraticPatterns/,
/duplicateTypeClassifier/,
/extractClassNames/,
/isReactFile/,
/getLineNumber/,
/generateHash/,
/getBasename/,
/getDirname/,
/extractFunctionName/,
/middlewareExtractor/,
/extractMiddleware/,
/middlewarePatterns/,
/detectFramework/,
/fileScanner/,
/utils.*helpers/,
/directoryScanner/,
/astPromiseLoopDetector/,
/detectStateInRender/,
/performanceProjectAnalyzer/,
/analyzeCleanup/,
/projectSecurityAnalyzer/,
/analyzeWetCode\.test/,
/astAnalyzer/,
/optimizedDuplicateGrouper/,
/frameworkDetector/,
/enhancedOutputFormatter/,
/dataFlowAnalyzer/,
/unboundedMemoryDetector/,
/enhancedOutputFormatter/,
/analyzeWetCode\.test\.js$/,
// Pattern definition files
/PATTERNS/,
/PERFORMANCE_PATTERNS/,
/BUG_PATTERNS/,
/BUG_CONFIG/,
/integrationPatterns/,
// Small utility modules (under 300 chars typically)
/logMode/,
/logSuccess/,
/logWarning/,
/fileSystemImports/,
/normalizeUrl/
];
const hasBoundedProcessing = boundedProcessingPatterns.some(pattern => pattern.test(content));
// Patterns that indicate potentially unbounded processing (should still be flagged)
const unboundedProcessingPatterns = [
// File system traversal that's actually unbounded
/glob\s*\(/i,
/walkSync/i,
// Critical: Infinite loops and unbounded growth
/while\s*\(\s*true\s*\)/,
/for\s*\(\s*;\s*;\s*\)/,
/setInterval(?!.*clearInterval)/,
// Event handlers with accumulation (classic memory leak pattern)
/\.on\(.*\.push\(/s,
/addEventListener.*\.push\(/s,
// Global/persistent state accumulation
/global\.\w+\.push/,
/window\.\w+\.push/,
// Large-scale processing without bounds
/analyzeProject.*allFiles/i,
/scanDirectory.*recursive/i,
// Network or database operations that could be unbounded
/fetch.*all/i,
/query.*all/i,
/loadAll/i,
// Streaming or infinite data processing
/stream.*all/i,
/infinite/i,
/unlimited/i
];
const hasUnboundedProcessing = unboundedProcessingPatterns.some(pattern => pattern.test(content));
// Enhanced detection for small utility files (likely re-exports or simple utilities)
const isVerySmallFile = content.length < 300;
const isSmallUtilityFile = content.length < 500 && content.split('\n').length < 20;
const isMediumUtilityFile = content.length < 3000 && content.split('\n').length < 100;
const hasSimpleReExport = /require.*module\.exports/.test(content) && content.split('\n').length < 15;
const hasReExportPattern = /const\s+\w+\s*=\s*require.*module\.exports\s*=\s*{/.test(content.replace(/\s+/g, ' '));
const isBoundedProcessor = (/function.*extract|function.*detect|function.*process|function.*analyze|function.*format/.test(content) && content.length < 4000) ||
(/middlewarePatterns|frameworks\.push|detected\.push|bugs\.push|issues\.push|files\.push|results\.push|output\.push/.test(content)) ||
(/filteredVulns\.push|allVulns\.push|criticalVulns\.push|highVulns\.push|categoryOutput\.push/.test(content)) ||
(/\.test\.js$/.test(filePath)) ||
(content.length < 20000 && /analyzer|detector|formatter|outputFormatter/.test(filePath) && !/while\s*\(\s*true\s*\)|setInterval|global\.|cache\[/.test(content));
// Consider it bounded processing if it's utility with bounded patterns and no unbounded processing
return (isASTUtility || hasBoundedProcessing || (isVerySmallFile && hasSimpleReExport) || (isSmallUtilityFile && hasReExportPattern) || (isMediumUtilityFile && isBoundedProcessor)) && !hasUnboundedProcessing && content.length < 15000; // Bounded utility functions
}
/**
* Gets contextual guidance for memory growth patterns
* @param {Object} context - Execution context
* @returns {Object} Contextual recommendation and impact
*/
function getMemoryGrowthGuidance(context) {
switch (context.type) {
case 'server':
return {
recommendation: 'Ensure this structure is cleared, chunked, or has size limits per request/session',
impact: 'Can cause memory leaks and OOM crashes under load'
};
case 'cli':
return {
recommendation: 'Consider processing data in chunks for very large datasets to avoid memory limits',
impact: 'May hit process memory limits on extremely large datasets'
};
case 'library':
return {
recommendation: 'Document memory usage patterns and provide streaming/chunking options for large datasets',
impact: 'Memory usage depends on how consumers use this library'
};
default:
return {
recommendation: 'Ensure this structure is cleared, chunked, or has size limits',
impact: 'Can cause memory issues depending on usage context'
};
}
}
module.exports = {
detectUnboundedMemory
};