@dreamhorizonorg/sentinel
Version:
Open-source, zero-dependency tool that blocks compromised packages BEFORE download. Built to counter supply chain and credential theft attacks like Shai-Hulud.
770 lines (644 loc) • 28.7 kB
JavaScript
/**
* Package Vulnerability Scanner Library
*
* Core scanning logic for checking packages against security blacklists
* Supports JSON files, HTTP endpoints, and local folders
*/
import { execSync } from 'child_process';
import fs from 'fs';
import path from 'path';
import { fileURLToPath } from 'url';
import { COMPROMISED_PACKAGES_FILENAME, DEFAULT_IGNORE_DIRS, DEFAULT_VERSION_SPEC, NPM_AUDIT_TIMEOUT_MS, TEMP_AUDIT_DIR, TEMP_PACKAGE_NAME, TEMP_PACKAGE_VERSION } from './constants/app.constants.mjs';
import { LOCKFILE_NAMES, PACKAGE_JSON_FILENAME } from './constants/cli.constants.mjs';
import { BLOCKED_MESSAGES, ERROR_MESSAGES, INFO_MESSAGES, SUCCESS_MESSAGES, WARNING_MESSAGES } from './constants/validation.constants.mjs';
import { checkWithProviders } from './providers/index.mjs';
import { colors } from './utils/color.utils.mjs';
import { getDataSourcePath, jsonToMap, loadJsonFromFile } from './utils/data-source.utils.mjs';
import { isDirectory, isHttpUrl, pathExists } from './utils/file.utils.mjs';
import { fetchJsonFromUrl, resolveLatestVersion } from './utils/http.utils.mjs';
import { logError, logInfo, logSuccess, logVerbose, logWarning, shouldSuppressLog } from './utils/log.utils.mjs';
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
/**
* Load compromised packages from various sources
*/
export async function loadCompromisedPackages(config = {}) {
const dataSource = getDataSourcePath(config);
let packages = [];
// Check if it's an HTTP endpoint
if (isHttpUrl(dataSource)) {
try {
const json = await fetchJsonFromUrl(dataSource);
packages = Array.isArray(json) ? json : [json];
} catch (error) {
throw new Error(ERROR_MESSAGES.FAILED_TO_LOAD_FROM_ENDPOINT(dataSource, error.message));
}
} else {
// File-based source
let filePath = dataSource;
// If it's a directory, look for JSON file
if (isDirectory(dataSource)) {
filePath = path.join(dataSource, COMPROMISED_PACKAGES_FILENAME);
}
if (!pathExists(filePath)) {
throw new Error(ERROR_MESSAGES.COMPROMISED_PACKAGES_NOT_FOUND(filePath));
}
packages = loadJsonFromFile(filePath);
if (!Array.isArray(packages)) {
packages = [packages];
}
}
return jsonToMap(packages);
}
/**
* Get all compromised packages as array
*/
export async function listCompromisedPackages(config = {}) {
const compromisedMap = await loadCompromisedPackages(config);
const packages = [];
for (const [packageName, versions] of compromisedMap.entries()) {
packages.push({
name: packageName,
versions: Array.from(versions),
allVersions: versions.size === 0
});
}
return packages.sort((a, b) => a.name.localeCompare(b.name));
}
/**
* Check if a package version is compromised
*/
function isCompromised(packageName, version, compromisedMap) {
const compromisedVersions = compromisedMap.get(packageName);
if (!compromisedVersions) {
return false;
}
if (compromisedVersions.size === 0) {
return true; // All versions compromised
}
return compromisedVersions.has(version);
}
/**
* Check npm audit for package vulnerabilities
*/
function checkNpmAudit(packageName, version, config = {}) {
if (config.skipNpmAudit) {
return { found: false };
}
try {
const tempDir = path.join(__dirname, '..', TEMP_AUDIT_DIR);
if (!pathExists(tempDir)) {
fs.mkdirSync(tempDir, { recursive: true });
}
const tempPackageJson = path.join(tempDir, PACKAGE_JSON_FILENAME);
fs.writeFileSync(tempPackageJson, JSON.stringify({
name: TEMP_PACKAGE_NAME,
version: TEMP_PACKAGE_VERSION,
dependencies: {
[packageName]: version ?? DEFAULT_VERSION_SPEC
}
}, null, 2));
try {
execSync('npm install --package-lock-only', {
cwd: tempDir,
stdio: 'pipe',
timeout: NPM_AUDIT_TIMEOUT_MS
});
const auditOutput = execSync('npm audit --json', {
cwd: tempDir,
stdio: 'pipe',
encoding: 'utf-8',
timeout: NPM_AUDIT_TIMEOUT_MS
});
const audit = JSON.parse(auditOutput);
if (pathExists(tempDir)) {
fs.rmSync(tempDir, { recursive: true, force: true });
}
if (audit.vulnerabilities && Object.keys(audit.vulnerabilities).length > 0) {
for (const vulnPackage of Object.keys(audit.vulnerabilities)) {
if (vulnPackage === packageName || vulnPackage.startsWith(`${packageName}/`)) {
return {
found: true,
severity: audit.vulnerabilities[vulnPackage].severity,
title: audit.vulnerabilities[vulnPackage].title ?? 'Security vulnerability'
};
}
}
}
return { found: false };
} catch (error) {
if (pathExists(tempDir)) {
fs.rmSync(tempDir, { recursive: true, force: true });
}
return { found: false, error: error.message };
}
} catch (error) {
return { found: false, error: error.message };
}
}
/**
* Add package to compromised list (saves to local JSON file)
*/
function addToCompromisedList(packageName, version, config = {}) {
try {
const dataSource = getDataSourcePath(config);
// Only update if it's a local file
if (isHttpUrl(dataSource)) {
logWarning(colors.yellow(WARNING_MESSAGES.CANNOT_UPDATE_REMOTE), config.logMode);
return false;
}
let filePath = dataSource;
// If it's a directory, use the JSON file
if (isDirectory(dataSource)) {
filePath = path.join(dataSource, COMPROMISED_PACKAGES_FILENAME);
} else if (!filePath.endsWith('.json')) {
// If path doesn't end with .json, assume it's a directory
filePath = path.join(filePath, COMPROMISED_PACKAGES_FILENAME);
}
// Load existing packages
let packages = [];
if (pathExists(filePath)) {
packages = loadJsonFromFile(filePath);
if (!Array.isArray(packages)) {
packages = [packages];
}
}
// Find or create package entry
// IMPORTANT: Only add specific versions, never block all versions automatically
if (!version) {
logWarning(colors.yellow('⚠️ Cannot add package without specific version - would block ALL versions'), config.logMode);
return false;
}
const existingIndex = packages.findIndex(p => p.name === packageName);
if (existingIndex >= 0) {
const existing = packages[existingIndex];
const versionsSet = new Set(existing.compromisedVersions ?? []);
versionsSet.add(version);
packages[existingIndex].compromisedVersions = Array.from(versionsSet);
} else {
packages.push({
name: packageName,
compromisedVersions: [version],
notes: `Added automatically on ${new Date().toISOString().split('T')[0]}`
});
}
// Ensure directory exists
const dir = path.dirname(filePath);
if (!pathExists(dir)) {
fs.mkdirSync(dir, { recursive: true });
}
// Save updated packages
fs.writeFileSync(filePath, JSON.stringify(packages, null, 2));
return true;
} catch (error) {
logError(colors.red(ERROR_MESSAGES.UPDATE_COMPROMISED_LIST_ERROR(error.message)));
return false;
}
}
/**
* Validate a single package
*/
export async function validatePackage(packageName, version = null, config = {}) {
// If no version provided, try to resolve the latest version from npm registry
// This ensures we check and blacklist specific versions, not entire packages
let resolvedVersion = version;
if (!version) {
logVerbose(colors.dim(`[Registry] Resolving latest version for ${packageName}...`), config.logMode);
resolvedVersion = await resolveLatestVersion(packageName);
if (resolvedVersion) {
logVerbose(colors.dim(`[Registry] Latest version: ${packageName}@${resolvedVersion}`), config.logMode);
}
}
const compromisedMap = await loadCompromisedPackages(config);
// Check blacklist first
const isInBlacklist = compromisedMap.has(packageName);
const isVersionCompromised = resolvedVersion ? isCompromised(packageName, resolvedVersion, compromisedMap) : isInBlacklist;
if (isVersionCompromised) {
// Use resolvedVersion for display if available, otherwise show generic message
const displayVersion = resolvedVersion ?? version;
if (!displayVersion) {
logError(colors.red(colors.bold(BLOCKED_MESSAGES.PACKAGE_COMPROMISED(packageName))));
logError(colors.yellow(BLOCKED_MESSAGES.PACKAGE_VULNERABLE));
} else {
logError(colors.red(colors.bold(BLOCKED_MESSAGES.PACKAGE_VERSION_COMPROMISED(packageName, displayVersion))));
logError(colors.yellow(BLOCKED_MESSAGES.SPECIFIC_VERSION_VULNERABLE));
}
logError(colors.yellow(BLOCKED_MESSAGES.USE_ALTERNATIVE));
logError(colors.yellow(BLOCKED_MESSAGES.CONTACT_SECURITY));
return false;
}
// Check vulnerability providers (even if package is in blacklist with safe version)
// Use resolvedVersion for actual checks to get accurate vulnerability data
const versionToCheck = resolvedVersion ?? version;
const packageDisplayName = `${packageName}${versionToCheck ? '@' + versionToCheck : ''}`;
// Check established vulnerability providers (OSV, GitHub, Snyk)
const providerResult = await checkWithProviders(packageName, versionToCheck, config);
if (providerResult.found) {
logError(colors.red(colors.bold(BLOCKED_MESSAGES.PACKAGE_HAS_VULNERABILITIES(packageName, versionToCheck, providerResult.title, providerResult.severity))));
logError(colors.dim(` Source: ${providerResult.source}${providerResult.url ? ` - ${providerResult.url}` : ''}`));
logInfo(colors.blue(INFO_MESSAGES.ADDING_TO_LIST), config.logMode);
// Only add specific version to blacklist (never block all versions)
if (addToCompromisedList(packageName, versionToCheck, config)) {
logSuccess(colors.green(SUCCESS_MESSAGES.PACKAGE_ADDED_TO_LIST), config.logMode);
}
logError(colors.yellow(BLOCKED_MESSAGES.CONTACT_SECURITY));
return false;
}
// Fallback to npm audit if providers didn't find anything
logInfo(colors.blue(INFO_MESSAGES.CHECKING_NPM_AUDIT(packageDisplayName)), config.logMode);
const auditResult = checkNpmAudit(packageName, versionToCheck, config);
if (auditResult.found) {
logError(colors.red(colors.bold(BLOCKED_MESSAGES.PACKAGE_HAS_VULNERABILITIES(packageName, versionToCheck, auditResult.title, auditResult.severity))));
logInfo(colors.blue(INFO_MESSAGES.ADDING_TO_LIST), config.logMode);
// Only add specific version to blacklist (never block all versions)
if (addToCompromisedList(packageName, versionToCheck, config)) {
logSuccess(colors.green(SUCCESS_MESSAGES.PACKAGE_ADDED_TO_LIST), config.logMode);
}
logError(colors.yellow(BLOCKED_MESSAGES.CONTACT_SECURITY));
return false;
}
// Warn if package is in blacklist but version is safe
if (isInBlacklist && versionToCheck) {
const compromisedVersions = Array.from(compromisedMap.get(packageName));
logWarning(colors.yellow(WARNING_MESSAGES.PACKAGE_HAS_COMPROMISED_VERSIONS(packageName, versionToCheck, compromisedVersions)), config.logMode);
}
return true;
}
/**
* Check lockfile for compromised packages
*/
export async function checkLockfile(lockfilePath, config = {}) {
if (!pathExists(lockfilePath)) {
logError(colors.red(ERROR_MESSAGES.LOCKFILE_NOT_FOUND(lockfilePath)));
return false;
}
const compromisedMap = await loadCompromisedPackages(config);
const lockfileContent = fs.readFileSync(lockfilePath, 'utf-8');
const issues = [];
if (lockfilePath.endsWith('yarn.lock')) {
// Yarn lockfile format (supports scoped packages like @scope/package):
// Classic (v1): "package@version": with version "X.X.X"
// Berry (v4): "package@npm:version": with version: X.X.X
// Regex handles both Classic and Berry formats:
// - Classic: version "4.17.21"
// - Berry: version: 4.17.21 or version: "4.17.21"
const entryRegex = /^"?(@?[^"@\s]+)@[^\n]+\n\s+version[:\s]+["']?([^"'\s\n]+)["']?/gm;
let match;
while ((match = entryRegex.exec(lockfileContent)) !== null) {
const packageName = match[1];
const version = match[2].trim();
if (isCompromised(packageName, version, compromisedMap)) {
issues.push({ package: packageName, version, file: lockfilePath });
}
}
} else if (lockfilePath.endsWith('package-lock.json')) {
const lockfile = JSON.parse(lockfileContent);
const packages = lockfile.packages ?? {};
for (const [packagePath, packageData] of Object.entries(packages)) {
if (packagePath === '') continue;
const packageName = packagePath.replace(/^node_modules\//, '');
const version = packageData.version;
if (version && isCompromised(packageName, version, compromisedMap)) {
issues.push({ package: packageName, version, file: lockfilePath });
}
}
} else if (lockfilePath.endsWith('pnpm-lock.yaml')) {
// pnpm-lock.yaml format varies by version:
// v6+: /@scope/package@version: or /package@version:
// v5: /@scope/package/version: or /package/version:
// Handle v6+ format with @ separator (supports scoped packages)
const v6Regex = /^\s+\/(@?[^@\s]+(?:\/[^@\s]+)?)@([^\s:]+):/gm;
let match;
while ((match = v6Regex.exec(lockfileContent)) !== null) {
const packageName = match[1];
const version = match[2];
if (isCompromised(packageName, version, compromisedMap)) {
issues.push({ package: packageName, version, file: lockfilePath });
}
}
// Handle v5 format with / separator: /package/version: or /@scope/package/version:
const v5Regex = /^\s+\/((?:@[^/]+\/)?[^/]+)\/([^/:]+):/gm;
let v5Match;
while ((v5Match = v5Regex.exec(lockfileContent)) !== null) {
const packageName = v5Match[1];
const version = v5Match[2];
if (isCompromised(packageName, version, compromisedMap)) {
// Avoid duplicates if already found by v6 regex
const exists = issues.some(i => i.package === packageName && i.version === version);
if (!exists) {
issues.push({ package: packageName, version, file: lockfilePath });
}
}
}
} else if (lockfilePath.endsWith('bun.lock')) {
// bun.lock is JSONC format (JSON with comments)
// Format: { "packages": { "lodash": ["lodash@4.17.21", "", {}, "sha512-..."] } }
// For scoped: { "packages": { "@types/node": ["@types/node@20.12.14", "", {}, "sha512-..."] } }
try {
// Remove comments for JSON parsing (JSONC -> JSON)
const jsonContent = lockfileContent
.replace(/\/\/.*$/gm, '') // Remove single-line comments
.replace(/\/\*[\s\S]*?\*\//g, ''); // Remove multi-line comments
const lockfile = JSON.parse(jsonContent);
const packages = lockfile.packages ?? {};
for (const [packageKey, packageData] of Object.entries(packages)) {
// packageData is an array: [fullSpec, registry, deps, integrity]
// fullSpec format: "lodash@4.17.21" or "@types/node@20.12.14"
if (Array.isArray(packageData) && packageData.length > 0) {
const fullSpec = packageData[0];
// Extract package name and version from fullSpec
// Handle scoped packages: @scope/name@version
const lastAtIndex = fullSpec.lastIndexOf('@');
if (lastAtIndex > 0) {
const packageName = fullSpec.substring(0, lastAtIndex);
const version = fullSpec.substring(lastAtIndex + 1);
if (isCompromised(packageName, version, compromisedMap)) {
issues.push({ package: packageName, version, file: lockfilePath });
}
}
}
}
} catch (parseError) {
logWarning(colors.yellow(`Warning: Could not parse bun.lock: ${parseError.message}`), config.logMode);
}
}
if (issues.length > 0) {
logError(colors.red(colors.bold(`❌ Found ${issues.length} compromised package(s) in lockfile:`)));
issues.forEach(issue => {
logError(colors.red(` - ${issue.package}@${issue.version}`));
});
return false;
}
logSuccess(colors.green(SUCCESS_MESSAGES.NO_COMPROMISED_PACKAGES_IN_LOCKFILE), config.logMode);
return true;
}
/**
* Check package.json for compromised packages
*/
export async function checkPackageJson(packageJsonPath, config = {}) {
if (!pathExists(packageJsonPath)) {
logError(colors.red(ERROR_MESSAGES.PACKAGE_JSON_NOT_FOUND(packageJsonPath)));
return false;
}
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf-8'));
const compromisedMap = await loadCompromisedPackages(config);
const issues = [];
const allDeps = {
...packageJson.dependencies,
...packageJson.devDependencies,
...packageJson.peerDependencies,
...packageJson.optionalDependencies
};
for (const [packageName, versionSpec] of Object.entries(allDeps)) {
// Extract version from spec (handles ^, ~, >=, <=, ranges, etc.)
// For validation, we check if the base version matches compromised versions
// This is a best-effort check since ranges can't be fully validated without resolving
const versionMatch = versionSpec.match(/^[\^~>=<]*(.+?)(?:-|$)/);
const version = versionMatch ? versionMatch[1].split('-')[0] : versionSpec.split('-')[0];
if (isCompromised(packageName, version, compromisedMap)) {
issues.push({ package: packageName, version: versionSpec, file: packageJsonPath });
} else if (compromisedMap.has(packageName)) {
logWarning(colors.yellow(WARNING_MESSAGES.PACKAGE_VERSION_WARNING(packageName, versionSpec)), config.logMode);
}
}
if (issues.length > 0) {
logError(colors.red(colors.bold(`❌ Found ${issues.length} compromised package(s) in package.json:`)));
issues.forEach(issue => {
logError(colors.red(` - ${issue.package}@${issue.version}`));
});
return false;
}
logSuccess(colors.green(SUCCESS_MESSAGES.NO_COMPROMISED_PACKAGES_IN_PACKAGE_JSON), config.logMode);
return true;
}
/**
* Recursively find all package.json files in a directory
*/
function findPackageJsonFiles(dir, ignoreDirs = DEFAULT_IGNORE_DIRS) {
const packageJsonFiles = [];
if (!pathExists(dir)) {
return packageJsonFiles;
}
function walk(currentPath) {
const entries = fs.readdirSync(currentPath, { withFileTypes: true });
for (const entry of entries) {
const fullPath = path.join(currentPath, entry.name);
// Skip ignored directories
if (entry.isDirectory() && ignoreDirs.includes(entry.name)) {
continue;
}
if (entry.isDirectory()) {
walk(fullPath);
} else if (entry.name === PACKAGE_JSON_FILENAME) {
packageJsonFiles.push(fullPath);
}
}
}
walk(dir);
return packageJsonFiles;
}
/**
* Find all lockfiles in a directory (non-recursive, only top-level)
*/
function findLockfiles(dir) {
const lockfiles = [];
if (!pathExists(dir)) {
return lockfiles;
}
for (const lockfileName of LOCKFILE_NAMES) {
const lockfilePath = path.join(dir, lockfileName);
if (pathExists(lockfilePath)) {
lockfiles.push(lockfilePath);
}
}
return lockfiles;
}
/**
* Scan a repository for compromised packages
* Checks both package.json files AND lockfiles
*/
export async function scanRepository(repoPath, config = {}) {
const resolvedPath = path.resolve(repoPath);
if (!pathExists(resolvedPath)) {
logError(colors.red(`❌ Repository path not found: ${resolvedPath}`));
return { success: false, totalFiles: 0, issues: [] };
}
logInfo(colors.blue(INFO_MESSAGES.SCANNING_REPOSITORY(colors.bold(resolvedPath))), config.logMode);
const packageJsonFiles = findPackageJsonFiles(resolvedPath);
const lockfiles = findLockfiles(resolvedPath);
const totalFiles = packageJsonFiles.length + lockfiles.length;
if (packageJsonFiles.length === 0 && lockfiles.length === 0) {
logError(colors.yellow(ERROR_MESSAGES.NO_PACKAGE_JSON_FILES(resolvedPath)));
return { success: true, totalFiles: 0, issues: [] };
}
logInfo(colors.blue(INFO_MESSAGES.FOUND_PACKAGE_JSON_FILES(packageJsonFiles.length)), config.logMode);
if (lockfiles.length > 0) {
logInfo(colors.blue(` Found ${lockfiles.length} lockfile(s)`), config.logMode);
}
const compromisedMap = await loadCompromisedPackages(config);
const allIssues = [];
let filesWithIssues = 0;
// Scan package.json files
for (const packageJsonPath of packageJsonFiles) {
const relativePath = path.relative(resolvedPath, packageJsonPath);
try {
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf-8'));
const allDeps = {
...packageJson.dependencies,
...packageJson.devDependencies,
...packageJson.peerDependencies,
...packageJson.optionalDependencies
};
const fileIssues = [];
for (const [packageName, versionSpec] of Object.entries(allDeps)) {
// Extract version from spec (handles ^, ~, >=, <=, ranges, etc.)
const versionMatch = versionSpec.match(/^[\^~>=<]*(.+?)(?:-|$)/);
const version = versionMatch ? versionMatch[1].split('-')[0] : versionSpec.split('-')[0];
if (isCompromised(packageName, version, compromisedMap)) {
fileIssues.push({ package: packageName, version: versionSpec });
}
}
if (fileIssues.length > 0) {
filesWithIssues++;
allIssues.push({
file: relativePath,
issues: fileIssues
});
if (!shouldSuppressLog(config.logMode)) {
logError(colors.red(`❌ ${relativePath}`));
fileIssues.forEach(issue => {
logError(colors.red(` - ${issue.package}@${issue.version}`));
});
logInfo('', config.logMode);
}
} else {
logVerbose(colors.green(`✅ ${relativePath}`), config.logMode);
}
} catch (error) {
logWarning(colors.yellow(ERROR_MESSAGES.ERROR_READING_FILE(relativePath, error.message)), config.logMode);
}
}
// Scan lockfiles for compromised packages
for (const lockfilePath of lockfiles) {
const relativePath = path.relative(resolvedPath, lockfilePath);
try {
const lockfileContent = fs.readFileSync(lockfilePath, 'utf-8');
const lockfileIssues = [];
if (lockfilePath.endsWith('yarn.lock')) {
// Yarn lockfile format (supports scoped packages like @scope/package):
// Classic (v1): "package@version": with version "X.X.X"
// Berry (v4): "package@npm:version": with version: X.X.X
// Regex handles both Classic and Berry formats
const entryRegex = /^"?(@?[^"@\s]+)@[^\n]+\n\s+version[:\s]+["']?([^"'\s\n]+)["']?/gm;
let match;
while ((match = entryRegex.exec(lockfileContent)) !== null) {
const packageName = match[1];
const version = match[2].trim();
if (isCompromised(packageName, version, compromisedMap)) {
lockfileIssues.push({ package: packageName, version });
}
}
} else if (lockfilePath.endsWith('package-lock.json')) {
const lockfile = JSON.parse(lockfileContent);
const packages = lockfile.packages ?? {};
for (const [packagePath, packageData] of Object.entries(packages)) {
if (packagePath === '') continue;
const packageName = packagePath.replace(/^node_modules\//, '');
const version = packageData.version;
if (version && isCompromised(packageName, version, compromisedMap)) {
lockfileIssues.push({ package: packageName, version });
}
}
} else if (lockfilePath.endsWith('pnpm-lock.yaml')) {
// pnpm-lock.yaml format varies by version:
// v6+: /@scope/package@version: or /package@version:
// v5: /@scope/package/version: or /package/version:
// Handle v6+ format with @ separator (supports scoped packages)
const v6Regex = /^\s+\/(@?[^@\s]+(?:\/[^@\s]+)?)@([^\s:]+):/gm;
let match;
while ((match = v6Regex.exec(lockfileContent)) !== null) {
const packageName = match[1];
const version = match[2];
if (isCompromised(packageName, version, compromisedMap)) {
lockfileIssues.push({ package: packageName, version });
}
}
// Handle v5 format with / separator
const v5Regex = /^\s+\/((?:@[^/]+\/)?[^/]+)\/([^/:]+):/gm;
let v5Match;
while ((v5Match = v5Regex.exec(lockfileContent)) !== null) {
const packageName = v5Match[1];
const version = v5Match[2];
if (isCompromised(packageName, version, compromisedMap)) {
const exists = lockfileIssues.some(i => i.package === packageName && i.version === version);
if (!exists) {
lockfileIssues.push({ package: packageName, version });
}
}
}
} else if (lockfilePath.endsWith('bun.lock')) {
// bun.lock is JSONC format (JSON with comments)
// Format: { "packages": { "lodash": ["lodash@4.17.21", "", {}, "sha512-..."] } }
try {
// Remove comments for JSON parsing (JSONC -> JSON)
const jsonContent = lockfileContent
.replace(/\/\/.*$/gm, '') // Remove single-line comments
.replace(/\/\*[\s\S]*?\*\//g, ''); // Remove multi-line comments
const lockfile = JSON.parse(jsonContent);
const packages = lockfile.packages ?? {};
for (const [packageKey, packageData] of Object.entries(packages)) {
// packageData is an array: [fullSpec, registry, deps, integrity]
// fullSpec format: "lodash@4.17.21" or "@types/node@20.12.14"
if (Array.isArray(packageData) && packageData.length > 0) {
const fullSpec = packageData[0];
// Extract package name and version from fullSpec
// Handle scoped packages: @scope/name@version
const lastAtIndex = fullSpec.lastIndexOf('@');
if (lastAtIndex > 0) {
const packageName = fullSpec.substring(0, lastAtIndex);
const version = fullSpec.substring(lastAtIndex + 1);
if (isCompromised(packageName, version, compromisedMap)) {
lockfileIssues.push({ package: packageName, version });
}
}
}
}
} catch (parseError) {
logWarning(colors.yellow(`Warning: Could not parse bun.lock: ${parseError.message}`), config.logMode);
}
}
if (lockfileIssues.length > 0) {
filesWithIssues++;
allIssues.push({
file: relativePath,
issues: lockfileIssues
});
if (!shouldSuppressLog(config.logMode)) {
logError(colors.red(`❌ ${relativePath} (lockfile)`));
lockfileIssues.forEach(issue => {
logError(colors.red(` - ${issue.package}@${issue.version}`));
});
logInfo('', config.logMode);
}
} else {
logVerbose(colors.green(`✅ ${relativePath} (lockfile)`), config.logMode);
}
} catch (error) {
logWarning(colors.yellow(ERROR_MESSAGES.ERROR_READING_FILE(relativePath, error.message)), config.logMode);
}
}
// Summary
const totalIssues = allIssues.reduce((sum, file) => sum + file.issues.length, 0);
if (!shouldSuppressLog(config.logMode)) {
logInfo(colors.bold(INFO_MESSAGES.SCAN_SUMMARY), config.logMode);
logInfo(` Files scanned: ${totalFiles}`, config.logMode);
logInfo(` Files with issues: ${filesWithIssues}`, config.logMode);
logInfo(` Total compromised packages: ${totalIssues}`, config.logMode);
}
if (totalIssues > 0) {
logError(colors.red(colors.bold(`\n❌ Repository scan found ${totalIssues} compromised package(s) in ${filesWithIssues} file(s)`)));
return { success: false, totalFiles, issues: allIssues };
}
logSuccess(colors.green(colors.bold(`\n✅ Repository scan complete - No compromised packages found`)), config.logMode);
return { success: true, totalFiles, issues: [] };
}