nullvoid
Version:
Detect malicious code
1,382 lines (1,218 loc) ⢠120 kB
JavaScript
const fs = require('fs');
const path = require('path');
const https = require('https');
const { execSync } = require('child_process');
const parser = require('@babel/parser');
const traverse = require('@babel/traverse').default;
const t = require('@babel/types');
const acorn = require('acorn');
const walk = require('acorn-walk');
const tar = require('tar');
const { isNullVoidCode } = require('./lib/nullvoidDetection');
const { VALIDATION_CONFIG } = require('./lib/config');
// Parallel scanning will be imported when needed to avoid circular dependencies
const glob = require('glob');
const fse = require('fs-extra');
const os = require('os');
// Import new secure utilities
const { logger, createLogger } = require('./lib/logger');
const { validateScanOptions, ValidationError } = require('./lib/validation');
const {
CACHE_CONFIG,
NETWORK_CONFIG,
PARALLEL_CONFIG,
FILE_CONFIG,
ENTROPY_THRESHOLDS,
SCAN_CONFIG,
SECURITY_CONFIG,
PERFORMANCE_CONFIG
} = require('./lib/config');
const { PackageCache } = require('./lib/cache');
const { rateLimitedRequest, getNpmRegistryStatus } = require('./lib/rateLimiter');
const {
globalErrorHandler,
NetworkError,
FileSystemError,
TimeoutError,
CacheError
} = require('./lib/errorHandler');
// Import new secure components
const { analyzeFileSafely, analyzeWalletThreats } = require('./lib/sandbox');
const { safeReadFile, safeReadDir, validatePath, PathValidationError } = require('./lib/pathSecurity');
const {
InputValidator,
SecurityError,
safeExecute
} = require('./lib/secureErrorHandler');
const { isTestFile } = require('./lib/nullvoidDetection');
const {
analyzeCodeStructure: analyzeCodeStructureUtil,
detectWalletHijacking: detectWalletHijackingUtil,
detectObfuscatedIoCs: detectObfuscatedIoCsUtil,
detectDynamicRequires: detectDynamicRequiresUtil,
calculateShannonEntropy: calculateShannonEntropyUtil
} = require('./lib/detection');
const { analyzeDependencyConfusion } = require('./lib/dependencyConfusion');
// Create package cache instance
const packageCache = new PackageCache({
maxSize: CACHE_CONFIG.MAX_SIZE,
defaultTTL: CACHE_CONFIG.TTL
});
/**
* Utility function to get npm global prefix
* @returns {string} npm global prefix path
*/
function getNpmGlobalPrefix() {
try {
return execSync('npm config get prefix', { encoding: 'utf8' }).trim();
} catch (error) {
logger.warn('Could not get npm global prefix', { error: error.message });
return '/usr/local'; // fallback
}
}
// Simple performance monitoring
const performanceMetrics = {
startTime: null,
packagesScanned: 0,
cacheHits: 0,
cacheMisses: 0,
networkRequests: 0,
errors: 0
};
// Suspicious package.json patterns
const SUSPICIOUS_PACKAGE_PATTERNS = {
scripts: [
'curl.*http',
'wget.*http',
'rm -rf /',
'rm -rf ~',
'chmod.*777',
'eval\\(.*\\)', // More specific eval pattern
'node -e.*http', // Only flag node -e with http requests
'bash -c.*curl',
'bash -c.*wget',
'bash -c.*rm',
'bash -c.*chmod'
],
dependencies: [
'http://.*',
'git://.*',
'file://.*'
],
keywords: [
'malware',
'virus',
'trojan',
'backdoor'
]
};
/**
* Cache management functions
*/
function getCachedResult(key) {
const result = packageCache.get(key);
if (result) {
performanceMetrics.cacheHits++;
return result;
}
performanceMetrics.cacheMisses++;
return null;
}
function setCachedResult(key, data) {
packageCache.set(key, data);
}
/**
* Main scan function that performs heuristic checks on npm packages
* @param {string} packageName - Optional package name to scan
* @param {object} options - Scan options
* @returns {Promise<object>} Scan results
*/
async function scan(packageName, options = {}, progressCallback = null) {
const startTime = Date.now();
// Validate inputs
try {
if (packageName) {
// Use secure validation that allows both package names and local paths
InputValidator.validatePackageName(packageName);
}
validateScanOptions(options);
} catch (error) {
if (error instanceof ValidationError) {
logger.error(`Validation error: ${error.message}`, { field: error.field, value: error.value });
throw error;
}
throw error;
}
const threats = [];
let packagesScanned = 0;
let filesScanned = 0;
let directoryStructure = null;
let dependencyTree = null;
let performanceData = null;
// Reset performance metrics
performanceMetrics.startTime = startTime;
performanceMetrics.packagesScanned = 0;
performanceMetrics.cacheHits = 0;
performanceMetrics.cacheMisses = 0;
performanceMetrics.networkRequests = 0;
performanceMetrics.errors = 0;
try {
// If no package specified, scan current directory (including subdirectories)
if (!packageName) {
// Scan current directory for JavaScript files and suspicious patterns
const directoryResult = await scanDirectory(process.cwd(), options, progressCallback);
threats.push(...directoryResult.threats);
filesScanned = directoryResult.filesScanned;
packagesScanned = directoryResult.packagesScanned || 0;
directoryStructure = directoryResult.directoryStructure;
// Also scan any package.json files found in the directory
const fs = require('fs');
const path = require('path');
const packageJsonPath = path.join(process.cwd(), 'package.json');
if (fs.existsSync(packageJsonPath)) {
try {
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf8'));
const dependencies = {
...packageJson.dependencies,
...packageJson.devDependencies
};
if (Object.keys(dependencies).length > 0) {
const maxDepth = options.maxDepth || 3;
// Use parallel processing if enabled and multiple dependencies
const useParallel = options.parallel !== false && Object.keys(dependencies).length > 1;
if (useParallel) {
try {
const { scanPackagesInParallel, getParallelConfig } = require('./lib/parallel');
const parallelConfig = getParallelConfig();
const treeResult = await buildAndScanDependencyTreeParallel(dependencies, maxDepth, options, 'root', parallelConfig);
threats.push(...treeResult.threats);
packagesScanned += treeResult.packagesScanned;
dependencyTree = treeResult.tree;
} catch (error) {
if (options.verbose) {
console.warn(`Warning: Parallel processing failed, falling back to sequential: ${error.message}`);
}
const treeResult = await buildAndScanDependencyTree(dependencies, maxDepth, options, 'root');
threats.push(...treeResult.threats);
packagesScanned += treeResult.packagesScanned;
dependencyTree = treeResult.tree;
}
} else {
const treeResult = await buildAndScanDependencyTree(dependencies, maxDepth, options, 'root');
threats.push(...treeResult.threats);
packagesScanned += treeResult.packagesScanned;
dependencyTree = treeResult.tree;
}
}
} catch (error) {
if (options.verbose) {
console.warn(`Warning: Could not parse package.json: ${error.message}`);
}
}
}
} else {
// Check if packageName is a directory path
const fs = require('fs');
const path = require('path');
if (fs.existsSync(packageName) && fs.statSync(packageName).isDirectory()) {
// Scan directory for package.json files and suspicious patterns
const directoryResult = await scanDirectory(packageName, options, progressCallback);
threats.push(...directoryResult.threats);
filesScanned = directoryResult.filesScanned;
packagesScanned = directoryResult.packagesScanned || 0;
directoryStructure = directoryResult.directoryStructure;
// Also scan any package.json files found in the directory
const packageJsonPath = path.join(packageName, 'package.json');
if (fs.existsSync(packageJsonPath)) {
try {
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf8'));
const dependencies = {
...packageJson.dependencies,
...packageJson.devDependencies
};
if (Object.keys(dependencies).length > 0) {
const maxDepth = options.maxDepth || 3;
// Use parallel processing if enabled and multiple dependencies
const useParallel = options.parallel !== false && Object.keys(dependencies).length > 1;
if (useParallel) {
try {
const { scanPackagesInParallel, getParallelConfig } = require('./lib/parallel');
const parallelConfig = getParallelConfig();
const treeResult = await buildAndScanDependencyTreeParallel(dependencies, maxDepth, options, 'root', parallelConfig);
threats.push(...treeResult.threats);
packagesScanned += treeResult.packagesScanned;
dependencyTree = treeResult.tree;
} catch (error) {
if (options.verbose) {
console.warn(`Warning: Parallel processing failed, falling back to sequential: ${error.message}`);
}
const treeResult = await buildAndScanDependencyTree(dependencies, maxDepth, options, 'root');
threats.push(...treeResult.threats);
packagesScanned += treeResult.packagesScanned;
dependencyTree = treeResult.tree;
}
} else {
const treeResult = await buildAndScanDependencyTree(dependencies, maxDepth, options, 'root');
threats.push(...treeResult.threats);
packagesScanned += treeResult.packagesScanned;
dependencyTree = treeResult.tree;
}
}
} catch (error) {
if (options.verbose) {
console.warn(`Warning: Could not parse package.json: ${error.message}`);
}
}
}
} else if (fs.existsSync(packageName) && fs.statSync(packageName).isFile()) {
// Scan individual file for malware patterns using secure analysis
const filePath = validatePath(packageName);
const fileName = path.basename(filePath);
// Check if it's a JavaScript file
if (fileName.endsWith('.js') || fileName.endsWith('.mjs') || fileName.endsWith('.ts')) {
try {
const content = safeReadFile(filePath);
// Use secure file analysis
const analysisResult = analyzeFileSafely(filePath);
if (!analysisResult.safe) {
threats.push(...analysisResult.threats);
}
// Additional wallet threat analysis
const walletThreats = analyzeWalletThreats(content, fileName);
threats.push(...walletThreats);
// Critical: Add malicious code structure analysis
const codeAnalysis = analyzeCodeStructure(content, filePath);
if (codeAnalysis.isMalicious && !isNullVoidCode(filePath) && !isTestFile(filePath)) {
threats.push({
type: 'MALICIOUS_CODE_STRUCTURE',
message: 'Code structure indicates malicious obfuscated content',
package: filePath,
severity: 'CRITICAL',
details: codeAnalysis.reason,
lineNumber: codeAnalysis.lineNumber,
sampleCode: codeAnalysis.sampleCode
});
}
// Additional AST analysis for comprehensive detection
const astThreats = analyzeJavaScriptAST(content, filePath);
threats.push(...astThreats);
// Check for obfuscated IoCs
const iocThreats = checkObfuscatedIoCs(content, filePath);
threats.push(...iocThreats);
// Check for dynamic requires
const requireThreats = detectDynamicRequires(content, filePath);
threats.push(...requireThreats);
filesScanned = 1;
packagesScanned = 0;
if (options.verbose) {
console.log(`Scanned individual file securely: ${filePath}`);
}
} catch (error) {
if (error instanceof PathValidationError) {
threats.push({
type: 'PATH_TRAVERSAL_ATTEMPT',
message: 'Path traversal attempt detected',
package: fileName,
severity: 'CRITICAL',
details: error.message
});
} else {
threats.push({
type: 'FILE_ANALYSIS_ERROR',
message: 'Failed to analyze file safely',
package: fileName,
severity: 'MEDIUM',
details: error.message
});
}
if (options.verbose) {
console.warn(`Warning: Could not analyze file ${filePath}: ${error.message}`);
}
}
} else {
// Non-JavaScript file - check for suspicious patterns safely
try {
const fileContent = safeReadFile(filePath);
// Check for obfuscated patterns even in non-JS files
const iocThreats = checkObfuscatedIoCs(fileContent, fileName);
threats.push(...iocThreats);
filesScanned = 1;
packagesScanned = 0;
} catch (error) {
if (error instanceof PathValidationError) {
threats.push({
type: 'PATH_TRAVERSAL_ATTEMPT',
message: 'Path traversal attempt detected',
package: fileName,
severity: 'CRITICAL',
details: error.message
});
} else {
threats.push({
type: 'FILE_READ_ERROR',
message: 'Failed to read file safely',
package: fileName,
severity: 'MEDIUM',
details: error.message
});
}
if (options.verbose) {
console.warn(`Warning: Could not analyze file ${filePath}: ${error.message}`);
}
}
}
} else {
// Scan specific package with dependency tree analysis
const maxDepth = options.maxDepth || 3;
const treeResult = await buildAndScanDependencyTree({ [packageName]: 'latest' }, maxDepth, options, null);
threats.push(...treeResult.threats);
packagesScanned = treeResult.packagesScanned;
dependencyTree = treeResult.tree;
}
}
// Dependency Confusion Detection
if (dependencyTree && Object.keys(dependencyTree).length > 0) {
try {
if (options.verbose) {
console.log('š Analyzing dependency confusion patterns...');
}
// Prepare packages for dependency confusion analysis
const packagesToAnalyze = [];
// Add root package if scanning a specific package
if (packageName && !packageName.startsWith('/') && !packageName.startsWith('./')) {
packagesToAnalyze.push({
name: packageName,
path: process.cwd() // Use current directory as fallback
});
}
// Add packages from dependency tree
for (const [pkgName, pkgInfo] of Object.entries(dependencyTree)) {
if (pkgInfo.path) {
packagesToAnalyze.push({
name: pkgName,
path: pkgInfo.path
});
}
}
// Run dependency confusion analysis
const dependencyConfusionThreats = await analyzeDependencyConfusion(packagesToAnalyze);
threats.push(...dependencyConfusionThreats);
if (options.verbose && dependencyConfusionThreats.length > 0) {
console.log(`ā ļø Found ${dependencyConfusionThreats.length} dependency confusion threat(s)`);
}
} catch (error) {
if (options.verbose) {
console.warn(`Warning: Dependency confusion analysis failed: ${error.message}`);
}
// Add error as low-priority threat for debugging
threats.push({
type: 'DEPENDENCY_CONFUSION_ERROR',
message: `Dependency confusion analysis failed: ${error.message}`,
severity: 'LOW',
package: packageName || 'unknown',
details: error.stack,
confidence: 10
});
}
}
// Get performance metrics
const endTime = Date.now();
const duration = endTime - startTime;
performanceData = {
packagesScanned: performanceMetrics.packagesScanned,
cacheHits: performanceMetrics.cacheHits,
cacheMisses: performanceMetrics.cacheMisses,
cacheHitRate: performanceMetrics.cacheHits / (performanceMetrics.cacheHits + performanceMetrics.cacheMisses) || 0,
networkRequests: performanceMetrics.networkRequests,
errors: performanceMetrics.errors,
packagesPerSecond: performanceMetrics.packagesScanned / (duration / 1000) || 0,
duration: duration
};
return {
threats,
packagesScanned,
filesScanned,
directoryStructure,
dependencyTree,
performance: performanceData,
duration: duration,
timestamp: new Date().toISOString()
};
} catch (error) {
throw new Error(`Scan failed: ${error.message}`);
}
}
/**
* Analyze dependency tree for hidden threats and suspicious patterns
* @param {object} tree - Dependency tree structure
* @param {object} options - Scan options
* @returns {Array} Array of additional threats found
*/
function analyzeDependencyTree(tree, options) {
const threats = [];
// Analyze tree structure for suspicious patterns
const packageNames = Object.keys(tree);
const suspiciousPackages = [];
const deepDependencies = [];
for (const [packageName, packageInfo] of Object.entries(tree)) {
// Check for suspicious package names
if (packageName.match(/^[a-z0-9]{32,}$/) || // Random-looking names
packageName.includes('malware') ||
packageName.includes('virus') ||
packageName.includes('trojan') ||
packageName.includes('backdoor')) {
suspiciousPackages.push(packageName);
}
// Check for deep dependency chains (potential hiding spots)
if (packageInfo.depth >= 2) {
deepDependencies.push({
name: packageName,
depth: packageInfo.depth,
threatCount: packageInfo.threats.length
});
}
// Check for packages with many transitive dependencies (potential attack vectors)
const depCount = packageInfo.dependencies ? Object.keys(packageInfo.dependencies).length : 0;
// Higher thresholds for popular frameworks and libraries
const isPopularFramework = VALIDATION_CONFIG.POPULAR_FRAMEWORKS.some(framework => packageName.toLowerCase().includes(framework));
const threshold = isPopularFramework ? 60 : 40; // Even higher thresholds to reduce false positives
if (depCount > threshold) {
threats.push({
type: 'HIGH_DEPENDENCY_COUNT',
message: `Package has unusually high number of dependencies (${depCount})`,
package: packageName,
severity: 'MEDIUM',
details: `Package "${packageName}" has ${depCount} dependencies, which could be used to hide malicious code`
});
}
}
// Report suspicious package names
for (const packageName of suspiciousPackages) {
threats.push({
type: 'SUSPICIOUS_PACKAGE_NAME',
message: `Suspicious package name detected: ${packageName}`,
package: packageName,
severity: 'HIGH',
details: `Package name "${packageName}" appears suspicious or randomly generated`
});
}
// Report deep dependencies with threats
const deepThreats = deepDependencies.filter(dep => dep.threatCount > 0);
if (deepThreats.length > 0) {
// Determine severity based on the actual threats found
// If most threats are LOW/MEDIUM, use MEDIUM severity
// Only use HIGH if there are CRITICAL threats
const severity = 'MEDIUM'; // Most deep dependency threats are signature/integrity issues
threats.push({
type: 'DEEP_DEPENDENCY_THREATS',
message: `Threats found in deep dependency chain`,
package: `š¦ npm-registry://${deepThreats[0].name}@latest\nš https://www.npmjs.com/package/${deepThreats[0].name}`,
severity: severity,
details: `Found ${deepThreats.length} packages with threats at depth 2+: ${deepThreats.map(d => `${d.name} (depth ${d.depth})`).join(', ')}`
});
}
// Check for circular dependencies (potential attack vectors)
const circularDeps = detectCircularDependencies(tree);
if (circularDeps.length > 0) {
threats.push({
type: 'CIRCULAR_DEPENDENCIES',
message: `Circular dependencies detected`,
package: 'dependency-tree',
severity: 'MEDIUM',
details: `Found circular dependencies: ${circularDeps.join(', ')}`
});
}
return threats;
}
/**
* Detect circular dependencies in the tree
* @param {object} tree - Dependency tree
* @returns {Array} Array of circular dependency chains
*/
function detectCircularDependencies(tree) {
const circular = [];
const visited = new Set();
const recursionStack = new Set();
function dfs(packageName, path) {
if (recursionStack.has(packageName)) {
// Found a cycle
const cycleStart = path.indexOf(packageName);
const cycle = path.slice(cycleStart).concat(packageName);
circular.push(cycle.join(' -> '));
return;
}
if (visited.has(packageName)) {
return;
}
visited.add(packageName);
recursionStack.add(packageName);
const packageInfo = tree[packageName];
if (packageInfo && packageInfo.dependencies) {
for (const depName of Object.keys(packageInfo.dependencies)) {
dfs(depName, [...path, packageName]);
}
}
recursionStack.delete(packageName);
}
for (const packageName of Object.keys(tree)) {
if (!visited.has(packageName)) {
dfs(packageName, []);
}
}
return circular;
}
/**
* Build and scan dependency tree in parallel
* @param {object} dependencies - Direct dependencies
* @param {number} maxDepth - Maximum depth to scan
* @param {object} options - Scan options
* @param {string} rootPackage - Root package name
* @param {object} parallelConfig - Parallel processing configuration
* @returns {Promise<object>} Scan results
*/
async function buildAndScanDependencyTreeParallel(dependencies, maxDepth, options, rootPackage = 'root', parallelConfig) {
const threats = [];
const tree = {};
let packagesScanned = 0;
const scannedPackages = new Set();
// Convert dependencies to array for parallel processing
const dependencyArray = Object.entries(dependencies).map(([name, version]) => {
// Try to get the actual file system path for the package
let packagePath = name;
try {
const resolvedPath = require.resolve(name);
packagePath = resolvedPath;
} catch (error) {
// If require.resolve fails, try to find the package in common locations
const fs = require('fs');
const path = require('path');
const possiblePaths = [];
// 1. Local project node_modules (highest priority)
possiblePaths.push(
path.join(process.cwd(), 'node_modules', name),
path.join(process.cwd(), 'node_modules', name, 'package.json')
);
// 2. Get npm global prefix and use it
try {
const { execSync } = require('child_process');
const npmGlobalPrefix = getNpmGlobalPrefix();
if (npmGlobalPrefix && npmGlobalPrefix !== 'undefined') {
possiblePaths.push(
path.join(npmGlobalPrefix, 'lib', 'node_modules', name),
path.join(npmGlobalPrefix, 'lib', 'node_modules', name, 'package.json')
);
}
} catch (error) {
// Fallback to common locations if npm config fails
if (process.env.HOME) {
possiblePaths.push(
path.join(process.env.HOME, '.npm-global', 'lib', 'node_modules', name),
path.join(process.env.HOME, '.npm-global', 'lib', 'node_modules', name, 'package.json')
);
}
}
// 3. Check npm cache locations
if (process.env.HOME) {
possiblePaths.push(
path.join(process.env.HOME, '.npm', 'packages', name),
path.join(process.env.HOME, '.npm', 'packages', name, 'package.json')
);
}
// Find the first existing path
for (const possiblePath of possiblePaths) {
if (fs.existsSync(possiblePath)) {
packagePath = possiblePath;
break;
}
}
// If still no path found, use registry path as fallback
if (packagePath === name) {
// Clean version number by removing semver operators (^, ~, etc.)
const cleanVersion = version.replace(/^[\^~>=<]/, '');
const npmLink = `https://www.npmjs.com/package/${name}${cleanVersion !== 'latest' ? `/v/${cleanVersion}` : ''}`;
packagePath = `š¦ npm-registry://${name}@${version}\nš ${npmLink}`;
}
}
return {
name,
version,
depth: 0,
parent: rootPackage,
path: packagePath
};
});
// Use parallel scanning for the first level
if (dependencyArray.length > 1) {
const { scanPackagesInParallel } = require('./lib/parallel');
const parallelResults = await scanPackagesInParallel(dependencyArray, options);
threats.push(...parallelResults.threats);
packagesScanned += parallelResults.metrics.scannedPackages;
// Build tree structure from parallel results
parallelResults.packages.forEach(pkg => {
if (!tree[pkg.name]) {
tree[pkg.name] = {
version: pkg.version,
threats: pkg.threatCount,
dependencies: {}
};
}
});
// Process sub-dependencies sequentially to maintain tree structure
for (const pkg of parallelResults.packages) {
if (maxDepth > 0) {
try {
const packageData = await getPackageData(pkg.name, pkg.version);
if (packageData && packageData.dependencies) {
const subTreeResult = await buildAndScanDependencyTree(
packageData.dependencies,
maxDepth - 1,
options,
pkg.name
);
threats.push(...subTreeResult.threats);
packagesScanned += subTreeResult.packagesScanned;
tree[pkg.name].dependencies = subTreeResult.tree;
}
} catch (error) {
if (options.verbose) {
console.warn(`Warning: Could not scan sub-dependencies for ${pkg.name}: ${error.message}`);
}
}
}
}
} else {
// Fall back to sequential for single dependency
for (const [name, version] of Object.entries(dependencies)) {
if (scannedPackages.has(name)) continue;
scannedPackages.add(name);
try {
const packageThreats = await scanPackage(name, version, options);
threats.push(...packageThreats);
packagesScanned++;
tree[name] = {
version,
threats: packageThreats.length,
dependencies: {}
};
// Recursively scan dependencies if within depth limit
if (maxDepth > 0) {
const packageData = await getPackageData(name, version);
if (packageData && packageData.dependencies) {
const subTreeResult = await buildAndScanDependencyTree(
packageData.dependencies,
maxDepth - 1,
options,
name
);
threats.push(...subTreeResult.threats);
packagesScanned += subTreeResult.packagesScanned;
tree[name].dependencies = subTreeResult.tree;
}
}
} catch (error) {
if (options.verbose) {
console.warn(`Warning: Could not scan dependency ${name}: ${error.message}`);
}
}
}
}
return {
threats,
tree,
packagesScanned
};
}
/**
* Build and scan dependency tree for transitive dependencies
* @param {object} dependencies - Direct dependencies from package.json
* @param {number} maxDepth - Maximum depth to traverse
* @param {object} options - Scan options
* @returns {Promise<object>} Tree scan results
*/
async function buildAndScanDependencyTree(dependencies, maxDepth, options, rootPackage = null) {
const threats = [];
const tree = {};
const scannedPackages = new Set();
let packagesScanned = 0;
// Process dependencies level by level
let currentLevel = Object.entries(dependencies).map(([name, version]) => {
// Always try to get the actual file system path for the package
let packagePath = name;
try {
const resolvedPath = require.resolve(name);
packagePath = resolvedPath;
} catch (error) {
// If require.resolve fails, try to find the package in common locations
const fs = require('fs');
const path = require('path');
const possiblePaths = [];
// 1. Local project node_modules (highest priority)
possiblePaths.push(
path.join(process.cwd(), 'node_modules', name),
path.join(process.cwd(), 'node_modules', name, 'package.json')
);
// 2. Get npm global prefix and use it
try {
const { execSync } = require('child_process');
const npmGlobalPrefix = getNpmGlobalPrefix();
if (npmGlobalPrefix && npmGlobalPrefix !== 'undefined') {
possiblePaths.push(
path.join(npmGlobalPrefix, 'lib', 'node_modules', name),
path.join(npmGlobalPrefix, 'lib', 'node_modules', name, 'package.json')
);
}
} catch (error) {
// Fallback to common locations if npm config fails
if (process.env.HOME) {
possiblePaths.push(
path.join(process.env.HOME, '.npm-global', 'lib', 'node_modules', name),
path.join(process.env.HOME, '.npm-global', 'lib', 'node_modules', name, 'package.json')
);
}
}
// 3. Check npm cache locations
if (process.env.HOME) {
possiblePaths.push(
path.join(process.env.HOME, '.npm', 'packages', name),
path.join(process.env.HOME, '.npm', 'packages', name, 'package.json')
);
}
// Find the first existing path
for (const possiblePath of possiblePaths) {
if (fs.existsSync(possiblePath)) {
packagePath = possiblePath;
break;
}
}
// If still no path found, use registry path as fallback
if (packagePath === name) {
// Clean version number by removing semver operators (^, ~, etc.)
const cleanVersion = version.replace(/^[\^~>=<]/, '');
const npmLink = `https://www.npmjs.com/package/${name}${cleanVersion !== 'latest' ? `/v/${cleanVersion}` : ''}`;
packagePath = `š¦ npm-registry://${name}@${version}\nš ${npmLink}`;
}
}
return {
name,
version,
path: packagePath
};
});
let depth = 0;
while (currentLevel.length > 0 && depth < maxDepth) {
const nextLevel = [];
for (const packageInfo of currentLevel) {
const { name: packageName, version, path: packagePath } = packageInfo;
// Skip if already scanned (avoid circular dependencies)
const packageKey = `${packageName}@${version}`;
if (scannedPackages.has(packageKey)) {
continue;
}
scannedPackages.add(packageKey);
// Initialize tree structure
if (!tree[packageName]) {
tree[packageName] = {
version: version || 'unknown',
depth,
threats: [],
dependencies: {}
};
}
// Scan the package
const packageThreats = await scanPackage(packageName, version, options, packagePath);
threats.push(...packageThreats);
tree[packageName].threats = packageThreats;
packagesScanned++;
// Get package dependencies for next level
try {
const packageData = await getPackageMetadata(packageName, version);
if (packageData && packageData.dependencies) {
const packageDeps = Object.entries(packageData.dependencies);
tree[packageName].dependencies = Object.fromEntries(packageDeps);
// Add to next level if not already scanned
for (const [depName, depVersion] of packageDeps) {
const depKey = `${depName}@${depVersion || 'unknown'}`;
if (!scannedPackages.has(depKey)) {
// Try to get the actual file system path for the dependency
let depPath = depName;
try {
const resolvedPath = require.resolve(depName);
depPath = resolvedPath;
} catch (error) {
// If require.resolve fails, try to find the package in common locations
const fs = require('fs');
const path = require('path');
const possiblePaths = [];
// 1. Local project node_modules (highest priority)
possiblePaths.push(
path.join(process.cwd(), 'node_modules', depName),
path.join(process.cwd(), 'node_modules', depName, 'package.json')
);
// 2. Get npm global prefix and use it
try {
const { execSync } = require('child_process');
const npmGlobalPrefix = getNpmGlobalPrefix();
if (npmGlobalPrefix && npmGlobalPrefix !== 'undefined') {
possiblePaths.push(
path.join(npmGlobalPrefix, 'lib', 'node_modules', depName),
path.join(npmGlobalPrefix, 'lib', 'node_modules', depName, 'package.json')
);
}
} catch (error) {
// Fallback to common locations if npm config fails
if (process.env.HOME) {
possiblePaths.push(
path.join(process.env.HOME, '.npm-global', 'lib', 'node_modules', depName),
path.join(process.env.HOME, '.npm-global', 'lib', 'node_modules', depName, 'package.json')
);
}
}
// 3. Check npm cache locations
if (process.env.HOME) {
possiblePaths.push(
path.join(process.env.HOME, '.npm', 'packages', depName),
path.join(process.env.HOME, '.npm', 'packages', depName, 'package.json')
);
}
// Find the first existing path
for (const possiblePath of possiblePaths) {
if (fs.existsSync(possiblePath)) {
depPath = possiblePath;
break;
}
}
// If still no path found, use package name and version as fallback
if (depPath === depName) {
depPath = `${depName}@${depVersion || 'latest'}`;
}
}
nextLevel.push({
name: depName,
version: depVersion || 'latest',
path: depPath
});
}
}
}
} catch (error) {
if (options.verbose) {
console.warn(`Warning: Could not get dependencies for ${packageName}: ${error.message}`);
}
}
}
currentLevel = nextLevel;
depth++;
}
// Analyze the complete dependency tree for additional threats
const treeThreats = analyzeDependencyTree(tree, options);
threats.push(...treeThreats);
return {
threats,
packagesScanned,
tree
};
}
/**
* Scan a specific package for threats
* @param {string} packageName - Package name
* @param {string} version - Package version
* @param {object} options - Scan options
* @returns {Promise<Array>} Array of threats found
*/
async function scanPackage(packageName, version, options, packagePath = null) {
const threats = [];
try {
// Check cache first
const cacheKey = `${packageName}@${version}`;
const cachedThreats = getCachedResult(cacheKey);
if (cachedThreats) {
return cachedThreats;
}
// Get package metadata from npm registry
const packageData = await getPackageMetadata(packageName, version);
if (!packageData) {
performanceMetrics.errors++;
if (options.verbose) {
console.warn(`Warning: Could not fetch metadata for ${packageName}`);
}
return threats;
}
// Update performance metrics
performanceMetrics.packagesScanned++;
// Heuristic 1: Check for postinstall scripts
const postinstallThreats = await checkPostinstallScripts(packageData);
threats.push(...postinstallThreats);
// Heuristic 2: Entropy analysis for obfuscated code
const entropyThreats = await checkCodeEntropy(packageData);
threats.push(...entropyThreats);
// Heuristic 3: Check for suspicious file patterns
const filePatternThreats = await checkSuspiciousFilePatterns(packageData);
threats.push(...filePatternThreats);
// Heuristic 4: Check for known malicious patterns
const patternThreats = await checkMaliciousPatterns(packageData);
threats.push(...patternThreats);
// Heuristic 5: Check for wallet hijacking attempts
const walletThreats = await checkWalletHijacking(packageData);
threats.push(...walletThreats);
// Heuristic 6: Check for network response manipulation
const networkThreats = await checkNetworkManipulation(packageData);
threats.push(...networkThreats);
// Heuristic 7: Check for multi-chain targeting
const multiChainThreats = await checkMultiChainTargeting(packageData);
threats.push(...multiChainThreats);
// Heuristic 8: Check for stealth controls and obfuscation
const stealthThreats = await checkStealthControls(packageData);
threats.push(...stealthThreats);
// Heuristic 9: Advanced AST analysis (disabled for performance)
const astThreats = []; // Disabled tarball analysis
threats.push(...astThreats);
// Heuristic 10: Check for specific obfuscated IoCs
const packageContent = JSON.stringify(packageData);
const iocThreats = checkObfuscatedIoCs(packageContent, packageName);
threats.push(...iocThreats);
// Heuristic 11: Enhanced package.json static analysis
const packageJsonThreats = analyzePackageJson(packageData, packageName);
threats.push(...packageJsonThreats);
// Heuristic 12: Dynamic require() detection
// Analyze package content for dynamic require patterns
const packageContentForRequire = JSON.stringify(packageData);
const dynamicRequireThreats = detectDynamicRequires(packageContentForRequire, packageName);
threats.push(...dynamicRequireThreats);
// Heuristic 13: Enhanced entropy analysis
const enhancedEntropyThreats = analyzeContentEntropy(packageContent, 'JSON', packageName);
threats.push(...enhancedEntropyThreats);
// Heuristic 14: Signature verification and tampering detection
const signatureThreats = await checkPackageSignatures(packageData, packageName, options);
threats.push(...signatureThreats);
// Cache the results
setCachedResult(cacheKey, threats);
} catch (error) {
if (options.verbose) {
console.warn(`Warning: Could not scan ${packageName}: ${error.message}`);
}
}
// Add package path to threats if provided
if (packagePath) {
threats.forEach(threat => {
// Use the package path directly for dependency tree context, but enhance it with npm link
if (packagePath.includes('š¦') || packagePath.includes('š')) {
// Already enhanced format
threat.package = packagePath;
} else {
// Add npm link to existing path
// Clean version number by removing semver operators (^, ~, etc.)
const cleanVersion = version.replace(/^[\^~>=<]/, '');
const npmLink = `https://www.npmjs.com/package/${packageName}${cleanVersion !== 'latest' ? `/v/${cleanVersion}` : ''}`;
threat.package = `š ${packagePath}\nš ${npmLink}`;
}
});
} else {
// If no package path provided, try to find the actual file system path for the package
const fs = require('fs');
const path = require('path');
// Dynamically detect package locations using Node.js built-in methods
const possiblePaths = [];
// 1. Local project node_modules (highest priority)
possiblePaths.push(
path.join(process.cwd(), 'node_modules', packageName),
path.join(process.cwd(), 'node_modules', packageName, 'package.json')
);
// 2. Get npm global prefix and use it
try {
const { execSync } = require('child_process');
const npmGlobalPrefix = getNpmGlobalPrefix();
if (npmGlobalPrefix && npmGlobalPrefix !== 'undefined') {
possiblePaths.push(
path.join(npmGlobalPrefix, 'lib', 'node_modules', packageName),
path.join(npmGlobalPrefix, 'lib', 'node_modules', packageName, 'package.json')
);
}
} catch (error) {
// Fallback to common locations if npm config fails
if (process.env.HOME) {
possiblePaths.push(
path.join(process.env.HOME, '.npm-global', 'lib', 'node_modules', packageName),
path.join(process.env.HOME, '.npm-global', 'lib', 'node_modules', packageName, 'package.json')
);
}
}
// 3. Check npm cache locations
if (process.env.HOME) {
possiblePaths.push(
path.join(process.env.HOME, '.npm', 'packages', packageName),
path.join(process.env.HOME, '.npm', 'packages', packageName, 'package.json'),
path.join(process.env.HOME, '.npm', '_cacache', 'content-v2', 'sha512'),
path.join(process.env.HOME, '.npm', '_cacache', 'content-v2', 'sha1')
);
}
// 4. Check if package is available via require.resolve (most reliable)
try {
const resolvedPath = require.resolve(packageName);
possiblePaths.push(resolvedPath);
} catch (error) {
// Package not found via require.resolve, continue with other methods
}
let actualPath = null;
for (const possiblePath of possiblePaths) {
if (fs.existsSync(possiblePath)) {
actualPath = possiblePath;
break;
}
}
// Use actual file system path if found, otherwise use package name and version
let packageDisplay;
if (actualPath) {
// Show the full absolute file system path with clickable npm link
// Clean version number by removing semver operators (^, ~, etc.)
const cleanVersion = version.replace(/^[\^~>=<]+/, '');
const npmLink = `https://www.npmjs.com/package/${packageName}${cleanVersion !== 'latest' ? `/v/${cleanVersion}` : ''}`;
packageDisplay = `š ${actualPath}\nš ${npmLink}`;
} else {
// For packages scanned from npm registry (not installed locally), show registry path with npm link
// Clean version number by removing semver operators (^, ~, etc.)
const cleanVersion = version.replace(/^[\^~>=<]+/, '');
const npmLink = `https://www.npmjs.com/package/${packageName}${cleanVersion !== 'latest' ? `/v/${cleanVersion}` : ''}`;
packageDisplay = `š¦ npm-registry://${packageName}@${version}\nš ${npmLink}`;
}
threats.forEach(threat => {
threat.package = packageDisplay;
});
}
return threats;
}
/**
* Get package metadata from npm registry
* @param {string} packageName - Package name
* @param {string} version - Package version
* @returns {Promise<object>} Package metadata
*/
async function getPackageMetadata(packageName, version) {
return new Promise((resolve, reject) => {
const url = `https://registry.npmjs.org/${packageName}`;
const timeout = NETWORK_CONFIG.TIMEOUT;
performanceMetrics.networkRequests++;
const request = https.get(url, {
timeout,
agent: false, // Disable connection pooling
keepAlive: false // Disable keep-alive
}, (res) => {
let data = '';
// Handle different status codes
if (res.statusCode === 404) {
reject(new Error(`Package ${packageName} not found`));
return;
}
if (res.statusCode !== 200) {
reject(new Error(`HTTP ${res.statusCode}: ${res.statusMessage}`));
return;
}
res.on('data', (chunk) => {
data += chunk;
});
res.on('end', () => {
try {
const packageData = JSON.parse(data);
let versionData;
if (version === 'latest') {
versionData = packageData['dist-tags']?.latest;
if (versionData) {
versionData = packageData.versions[versionData];
}
} else {
// Handle version ranges by finding the best match
if (version.startsWith('^') || version.startsWith('~') || version.startsWith('>=')) {
// For version ranges, try to find the latest compatible version
const availableVersions = Object.keys(packageData.versions || {});
if (availableVersions.length > 0) {
// Sort versions and take the latest
const sortedVersions = availableVersions.sort((a, b) => {
const aParts = a.split('.').map(Number);
const bParts = b.split('.').map(Number);
for (let i = 0; i < Math.max(aParts.length, bParts.length); i++) {
const aPart = aParts[i] || 0;
const bPart = bParts[i] || 0;
if (aPart !== bPart) return bPart - aPart;
}
return 0;
});
versionData = packageData.versions[sortedVersions[0]];
}
} else {
versionData = packageData.versions?.[version];
}
}
if (!versionData) {
reject(new Error(`Version ${version} not found for package ${packageName}`));
return;
}
resolve(versionData);
} catch (error) {
reject(new Error(`Failed to parse package data for ${packageName}: ${error.message}`));
}
});
res.on('error', (error) => {
reject(new Error(`Network error for ${packageName}: ${error.message}`));
});
});
request.on('timeout', () => {
request.destroy();
reject(new Error(`Request timeout for ${packageName}`));
});
request.on('error', (error) => {
request.destroy();
reject(new Error(`Request error for ${packageName}: ${error.message}`));
});
// Ensure proper cleanup and prevent hanging
request.setTimeout(timeout);
// Force cleanup after a reasonable time
const cleanupTimer = setTimeout(() => {
if (!request.destroyed) {
request.destroy();
}
}, timeout + 1000);
cleanupTimer.unref(); // Don't keep process alive
// Ensure request is properly destroyed on completion
request.on('close', () => {
clearTimeout(cleanupTimer);
});
});
}
/**
* Download and analyze package files
* @param {object} packageData - Package metadata
* @returns {Promise<string>} Package file content
*/
async function downloadPackageFiles(packageData) {
// This is a simplified version - in production you'd download the actual tarball
// For now, we'll analyze the package.json and any available files
let content = '';
if (packageData.packageJson) {
content += JSON.stringify(packageData.packageJson, null, 2);
}
// Add any other file content that might be available
if (packageData.files) {
content += '\n' + Object.keys(packageData.files).join('\n');
}
return content;
}
/**
* Check for postinstall scripts that could execute malicious code
*/
async function checkPostinstallScripts(packageData) {
const threats = [];
if (!packageData || !packageData.scripts) {
return threats;
}
const suspiciousScripts = VALIDATION_CONFIG.SUSPICIOUS_SCRIPTS;
// Check all scripts for suspicious commands
for (const [scriptName, scriptContent] of Object.entries(packageData.scripts)) {
if (scriptName === 'postinstall' || scriptName === 'preinstall') {
const lowerScript = scriptContent.toLowerCase();
for (const suspicious of suspiciousScripts) {
if (lowerScript.includes(suspicious)) {
threats.push({
type: 'POSTINSTALL_SCRIPT',
message: `Package contains ${scriptName} script with suspicious commands`,
package: packageData.name || 'unknown',
severity: 'HIGH',
detail