@nodesecure/scanner
Version:
A package API to run a static analysis of your module's dependencies.
301 lines • 13.1 kB
JavaScript
var __addDisposableResource = (this && this.__addDisposableResource) || function (env, value, async) {
if (value !== null && value !== void 0) {
if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected.");
var dispose, inner;
if (async) {
if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined.");
dispose = value[Symbol.asyncDispose];
}
if (dispose === void 0) {
if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined.");
dispose = value[Symbol.dispose];
if (async) inner = dispose;
}
if (typeof dispose !== "function") throw new TypeError("Object not disposable.");
if (inner) dispose = function() { try { inner.call(this); } catch (e) { return Promise.reject(e); } };
env.stack.push({ value: value, dispose: dispose, async: async });
}
else if (async) {
env.stack.push({ async: true });
}
return value;
};
var __disposeResources = (this && this.__disposeResources) || (function (SuppressedError) {
return function (env) {
function fail(e) {
env.error = env.hasError ? new SuppressedError(e, env.error, "An error was suppressed during disposal.") : e;
env.hasError = true;
}
var r, s = 0;
function next() {
while (r = env.stack.pop()) {
try {
if (!r.async && s === 1) return s = 0, env.stack.push(r), Promise.resolve().then(next);
if (r.dispose) {
var result = r.dispose.call(r.value);
if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) { fail(e); return next(); });
}
else s |= 1;
}
catch (e) {
fail(e);
}
}
if (s === 1) return env.hasError ? Promise.reject(env.error) : Promise.resolve();
if (env.hasError) throw env.error;
}
return next();
};
})(typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) {
var e = new Error(message);
return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e;
});
// Import Node.js Dependencies
import path from "node:path";
import { readFileSync } from "node:fs";
// Import Third-party Dependencies
import { Mutex, MutexRelease } from "@openally/mutex";
import { extractAndResolve, scanDirOrArchive } from "@nodesecure/tarball";
import * as Vulnera from "@nodesecure/vulnera";
import { npm } from "@nodesecure/tree-walker";
import { parseAuthor } from "@nodesecure/utils";
import { ManifestManager } from "@nodesecure/mama";
// Import Internal Dependencies
import { getDependenciesWarnings, addMissingVersionFlags, getUsedDeps, getManifestLinks } from "./utils/index.js";
import { NpmRegistryProvider } from "./registry/NpmRegistryProvider.js";
import { TempDirectory } from "./class/TempDirectory.class.js";
import { Logger, ScannerLoggerEvents } from "./class/logger.class.js";
// CONSTANTS
const kDefaultDependencyVersionFields = {
description: "",
size: 0,
author: null,
engines: {},
scripts: {},
licenses: [],
uniqueLicenseIds: [],
composition: {
extensions: [],
files: [],
minified: [],
unused: [],
missing: [],
required_files: [],
required_nodejs: [],
required_thirdparty: [],
required_subpath: []
}
};
const kDefaultDependencyMetadata = {
publishedCount: 0,
lastUpdateAt: new Date(),
lastVersion: "N/A",
hasChangedAuthor: false,
hasManyPublishers: false,
hasReceivedUpdateInOneYear: true,
homepage: null,
author: null,
publishers: [],
maintainers: [],
integrity: {}
};
const { version: packageVersion } = JSON.parse(readFileSync(new URL(path.join("..", "package.json"), import.meta.url), "utf-8"));
export async function depWalker(manifest, options, logger = new Logger()) {
const env_1 = { stack: [], error: void 0, hasError: false };
try {
const { scanRootNode = false, includeDevDeps = false, packageLock, maxDepth, location, vulnerabilityStrategy = Vulnera.strategies.NONE, registry } = options;
const tempDir = __addDisposableResource(env_1, await TempDirectory.create(), true);
const payload = {
id: tempDir.id,
rootDependencyName: manifest.name ?? "workspace",
scannerVersion: packageVersion,
vulnerabilityStrategy,
warnings: []
};
const dependencies = new Map();
const npmTreeWalker = new npm.TreeWalker({
registry
});
{
logger
.start(ScannerLoggerEvents.analysis.tree)
.start(ScannerLoggerEvents.analysis.tarball)
.start(ScannerLoggerEvents.analysis.registry);
const fetchedMetadataPackages = new Set();
const operationsQueue = [];
const locker = new Mutex({ concurrency: 5 });
locker.on(MutexRelease, () => logger.tick(ScannerLoggerEvents.analysis.tarball));
const rootDepsOptions = {
maxDepth,
includeDevDeps,
packageLock
};
for await (const current of npmTreeWalker.walk(manifest, rootDepsOptions)) {
const { name, version, ...currentVersion } = current;
const dependency = {
versions: {
[version]: {
...currentVersion,
...structuredClone(kDefaultDependencyVersionFields)
}
},
vulnerabilities: [],
metadata: structuredClone(kDefaultDependencyMetadata)
};
let proceedDependencyScan = true;
if (dependencies.has(name)) {
const dep = dependencies.get(name);
operationsQueue.push(new NpmRegistryProvider(name, version).enrichDependencyVersion(dep));
if (version in dep.versions) {
// The dependency has already entered the analysis
// This happens if the package is used by multiple packages in the tree
proceedDependencyScan = false;
}
else {
dep.versions[version] = dependency.versions[version];
}
}
else {
dependencies.set(name, dependency);
}
// If the dependency is a DevDependencies we ignore it.
if (current.isDevDependency || !proceedDependencyScan) {
continue;
}
logger.tick(ScannerLoggerEvents.analysis.tree);
// There is no need to fetch 'N' times the npm metadata for the same package.
if (fetchedMetadataPackages.has(name) || !current.existOnRemoteRegistry) {
logger.tick(ScannerLoggerEvents.analysis.registry);
}
else {
fetchedMetadataPackages.add(name);
const provider = new NpmRegistryProvider(name, version);
operationsQueue.push(provider.enrichDependency(logger, dependency));
}
const scanDirOptions = {
ref: dependency.versions[version],
location,
isRootNode: scanRootNode && name === manifest.name,
registry
};
operationsQueue.push(scanDirOrArchiveEx(name, version, locker, tempDir, scanDirOptions));
}
logger.end(ScannerLoggerEvents.analysis.tree);
await Promise.allSettled(operationsQueue);
logger
.end(ScannerLoggerEvents.analysis.tarball)
.end(ScannerLoggerEvents.analysis.registry);
}
const { hydratePayloadDependencies, strategy } = Vulnera.setStrategy(vulnerabilityStrategy);
const isVulnHydratable = (strategy === "github-advisory" || strategy === "snyk")
&& typeof location === "undefined";
if (!isVulnHydratable) {
await hydratePayloadDependencies(dependencies, {
useStandardFormat: true,
path: location
});
}
payload.vulnerabilityStrategy = strategy;
// We do this because it "seem" impossible to link all dependencies in the first walk.
// Because we are dealing with package only one time it may happen sometimes.
const globalWarnings = [];
for (const [packageName, dependency] of dependencies) {
const metadataIntegrities = dependency.metadata?.integrity ?? {};
for (const [version, integrity] of Object.entries(metadataIntegrities)) {
const dependencyVer = dependency.versions[version];
const isEmptyPackage = dependencyVer.warnings.some((warning) => warning.kind === "empty-package");
if (isEmptyPackage) {
globalWarnings.push({
type: "empty-package",
message: `${packageName}@${version} only contain a package.json file!`
});
}
if (!("integrity" in dependencyVer) || dependencyVer.flags.includes("isGit")) {
continue;
}
if (dependencyVer.integrity !== integrity) {
globalWarnings.push({
type: "integrity-mismatch",
message: `${packageName}@${version} manifest & tarball integrity doesn't match!`
});
}
}
for (const version of Object.entries(dependency.versions)) {
const [verStr, verDescriptor] = version;
verDescriptor.flags.push(...addMissingVersionFlags(new Set(verDescriptor.flags), dependency));
if (isLocalManifest(verDescriptor, manifest, packageName)) {
Object.assign(dependency.metadata, {
author: parseAuthor(manifest.author),
homepage: manifest.homepage
});
Object.assign(verDescriptor, {
author: parseAuthor(manifest.author),
links: getManifestLinks(manifest),
repository: manifest.repository
});
}
const usedDeps = npmTreeWalker.relationsMap.get(`${packageName}@${verStr}`) || new Set();
if (usedDeps.size === 0) {
continue;
}
const usedBy = Object.create(null);
for (const [name, version] of getUsedDeps(usedDeps)) {
usedBy[name] = version;
}
Object.assign(verDescriptor.usedBy, usedBy);
}
}
try {
const { warnings, illuminated } = await getDependenciesWarnings(dependencies, options.highlight?.contacts);
payload.warnings = globalWarnings.concat(warnings);
payload.highlighted = {
contacts: illuminated
};
payload.dependencies = Object.fromEntries(dependencies);
return payload;
}
finally {
logger.emit(ScannerLoggerEvents.done);
}
}
catch (e_1) {
env_1.error = e_1;
env_1.hasError = true;
}
finally {
const result_1 = __disposeResources(env_1);
if (result_1)
await result_1;
}
}
// eslint-disable-next-line max-params
async function scanDirOrArchiveEx(name, version, locker, tempDir, options) {
const env_2 = { stack: [], error: void 0, hasError: false };
try {
const _ = __addDisposableResource(env_2, await locker.acquire(), false);
try {
const { registry, location = process.cwd(), isRootNode, ref } = options;
const mama = await (isRootNode ?
ManifestManager.fromPackageJSON(location) :
extractAndResolve(tempDir.location, {
spec: `${name}@${version}`,
registry
}));
await scanDirOrArchive(mama, ref);
}
catch {
// ignore
}
}
catch (e_2) {
env_2.error = e_2;
env_2.hasError = true;
}
finally {
__disposeResources(env_2);
}
}
function isLocalManifest(verDescriptor, manifest, packageName) {
return verDescriptor.existOnRemoteRegistry === false && (packageName === manifest.name || manifest.name === undefined);
}
//# sourceMappingURL=depWalker.js.map