snyk-nodejs-lockfile-parser
Version:
Generate a dep tree given a lockfile
307 lines • 13.3 kB
JavaScript
Object.defineProperty(exports, "__esModule", { value: true });
exports.matchOverrideKey = exports.getChildNodeKey = exports.buildDepGraphNpmLockV2 = exports.parseNpmLockV2Project = exports.extractPkgsFromNpmLockV2 = void 0;
const extract_npm_lock_v2_pkgs_1 = require("./extract-npm-lock-v2-pkgs");
Object.defineProperty(exports, "extractPkgsFromNpmLockV2", { enumerable: true, get: function () { return extract_npm_lock_v2_pkgs_1.extractPkgsFromNpmLockV2; } });
const dep_graph_1 = require("@snyk/dep-graph");
const util_1 = require("../util");
const errors_1 = require("../../errors");
const parsers_1 = require("../../parsers");
const semver = require("semver");
const micromatch = require("micromatch");
const pathUtil = require("path");
const event_loop_spinner_1 = require("event-loop-spinner");
const parseNpmLockV2Project = async (pkgJsonContent, pkgLockContent, options) => {
const { includeDevDeps, strictOutOfSync, includeOptionalDeps } = options;
const pkgJson = (0, util_1.parsePkgJson)(pkgJsonContent);
const pkgs = (0, extract_npm_lock_v2_pkgs_1.extractPkgsFromNpmLockV2)(pkgLockContent);
const depgraph = await (0, exports.buildDepGraphNpmLockV2)(pkgs, pkgJson, {
includeDevDeps,
includeOptionalDeps,
strictOutOfSync,
});
return depgraph;
};
exports.parseNpmLockV2Project = parseNpmLockV2Project;
const buildDepGraphNpmLockV2 = async (npmLockPkgs, pkgJson, options) => {
const { includeDevDeps, strictOutOfSync, includeOptionalDeps } = options;
const depGraphBuilder = new dep_graph_1.DepGraphBuilder({ name: 'npm' }, { name: pkgJson.name, version: pkgJson.version });
const topLevelDeps = (0, util_1.getTopLevelDeps)(pkgJson, {
includeDevDeps,
includeOptionalDeps,
includePeerDeps: true,
});
const rootNode = {
id: 'root-node',
name: pkgJson.name,
version: pkgJson.version,
dependencies: topLevelDeps,
isDev: false,
inBundle: false,
key: '',
};
const pkgKeysByName = Object.keys(npmLockPkgs).reduce((acc, key) => {
const name = key.replace(/.*node_modules\//, '');
if (!name) {
return acc;
}
if (!acc.has(name)) {
acc.set(name, []);
}
acc.get(name).push(key);
return acc;
}, new Map());
const visitedMap = new Set();
await dfsVisit(depGraphBuilder, rootNode, visitedMap, npmLockPkgs, strictOutOfSync, includeDevDeps, includeOptionalDeps, [], pkgKeysByName, pkgJson.overrides);
return depGraphBuilder.build();
};
exports.buildDepGraphNpmLockV2 = buildDepGraphNpmLockV2;
const dfsVisit = async (depGraphBuilder, node, visitedMap, npmLockPkgs, strictOutOfSync, includeDevDeps, includeOptionalDeps, ancestry, pkgKeysByName, overrides) => {
visitedMap.add(node.id);
for (const [name, depInfo] of Object.entries(node.dependencies || {})) {
if (event_loop_spinner_1.eventLoopSpinner.isStarving()) {
await event_loop_spinner_1.eventLoopSpinner.spin();
}
const childNode = getChildNode(name, depInfo, npmLockPkgs, strictOutOfSync, includeDevDeps, includeOptionalDeps, [
...ancestry,
{
name: node.name,
version: node.version,
key: node.key || '',
inBundle: node.inBundle || false,
},
], pkgKeysByName, overrides);
if (!visitedMap.has(childNode.id)) {
(0, util_1.addPkgNodeToGraph)(depGraphBuilder, childNode, {});
await dfsVisit(depGraphBuilder, childNode, visitedMap, npmLockPkgs, strictOutOfSync, includeDevDeps, includeOptionalDeps, [
...ancestry,
{
name: node.name,
version: node.version,
key: node.key,
inBundle: node.inBundle || false,
},
], pkgKeysByName, overrides);
}
depGraphBuilder.connectDep(node.id, childNode.id);
}
};
const getChildNode = (name, depInfo, pkgs, strictOutOfSync, includeDevDeps, includeOptionalDeps, ancestry, pkgKeysByName, overrides) => {
let version = depInfo.version;
const override = overrides &&
checkOverrides([...ancestry, { name, version }], overrides);
if (override) {
version = override;
}
if (version.startsWith('npm:')) {
version = version.split('@').pop() || version;
}
let childNodeKey = (0, exports.getChildNodeKey)(name, version, ancestry, pkgs, pkgKeysByName);
if (!childNodeKey) {
if (strictOutOfSync) {
throw new errors_1.OutOfSyncError(`${name}@${depInfo.version}`, parsers_1.LockfileType.npm);
}
else {
return {
id: `${name}@${depInfo.version}`,
name: name,
version: depInfo.version,
dependencies: {},
isDev: depInfo.isDev,
missingLockFileEntry: true,
key: '',
};
}
}
let depData = pkgs[childNodeKey];
const resolvedToWorkspace = () => {
// Workspaces can be set as an array, or as an object
// { packages: [] }, this can be checked in
// https://github.com/npm/map-workspaces/blob/ff82968a3dbb78659fb7febfce4841bf58c514de/lib/index.js#L27-L41
if (pkgs['']['workspaces'] === undefined) {
return false;
}
const workspacesDeclaration = Array.isArray(pkgs['']['workspaces']['packages'])
? pkgs['']['workspaces']['packages']
: pkgs['']['workspaces'] || [];
const resolvedPath = depData.resolved || '';
const fixedResolvedPath = resolvedPath.replace(/\\/g, '/');
const normalizedWorkspacesDefn = workspacesDeclaration.map((p) => {
return pathUtil.normalize(p).replace(/\\/g, '/');
});
return micromatch.isMatch(fixedResolvedPath, normalizedWorkspacesDefn);
};
// Check for workspaces
if (depData['link'] && resolvedToWorkspace()) {
childNodeKey = depData.resolved;
depData = pkgs[depData.resolved];
}
const dependencies = (0, util_1.getGraphDependencies)(depData.dependencies || {}, depInfo.isDev);
const devDependencies = includeDevDeps
? (0, util_1.getGraphDependencies)(depData.devDependencies || {}, depInfo.isDev)
: {};
const optionalDependencies = includeOptionalDeps
? (0, util_1.getGraphDependencies)(depData.optionalDependencies || {}, depInfo.isDev)
: {};
return {
id: `${name}@${depData.version}`,
name: name,
version: depData.version,
dependencies: Object.assign(Object.assign(Object.assign({}, dependencies), devDependencies), optionalDependencies),
isDev: depInfo.isDev,
inBundle: depData.inBundle,
key: childNodeKey,
};
};
const getChildNodeKey = (name, version, ancestry, pkgs, pkgKeysByName) => {
// This is a list of all our possible options for the childKey
const candidateKeys = pkgKeysByName.get(name);
// Lockfile missing entry
if (!candidateKeys) {
return undefined;
}
// If we only have one candidate then we just take it
if (candidateKeys.length === 1) {
return candidateKeys[0];
}
// If we are bundled we assume we are scoped by the bundle root at least
// otherwise the ancestry root is the root ignoring the true root
const isBundled = ancestry[ancestry.length - 1].inBundle;
const rootOperatingIdx = isBundled
? ancestry.findIndex((el) => el.inBundle === true) - 1
: 1;
const ancestryFromRootOperatingIdx = [
...ancestry.slice(rootOperatingIdx).map((el) => el.name),
name,
];
// We filter on a number of cases
let filteredCandidates = candidateKeys.filter((candidate) => {
// This is splitting the candidate that looks like
// `node_modules/a/node_modules/b` into ["a", "b"]
// To do this we remove the first node_modules substring
// and then split on the rest
const candidateAncestry = (candidate.startsWith('node_modules/')
? candidate.replace('node_modules/', '').split('/node_modules/')
: candidate.split('/node_modules/')).map((el) => {
if (pkgs[el]) {
return pkgs[el].name || el;
}
return el;
});
// Check the ancestry of the candidate is a subset of
// the current pkg. If it is not then it can't be a
// valid key.
const isCandidateAncestryIsSubsetOfPkgAncestry = candidateAncestry.every((pkg) => {
return ancestryFromRootOperatingIdx.includes(pkg);
});
if (isCandidateAncestryIsSubsetOfPkgAncestry === false) {
return false;
}
// If we are bundled we assume the bundle root is the first value
// in the candidates scoping
if (isBundled) {
const doesBundledPkgShareBundleRoot = candidateAncestry[0] === ancestryFromRootOperatingIdx[0];
if (doesBundledPkgShareBundleRoot === false) {
return false;
}
}
// So now we can check semver to filter out some values
// if our version is valid semver
if (semver.validRange(version)) {
const candidatePkgVersion = pkgs[candidate].version;
const doesVersionSatisfySemver = semver.satisfies(candidatePkgVersion, version);
return doesVersionSatisfySemver;
}
return true;
});
if (filteredCandidates.length === 1) {
return filteredCandidates[0];
}
const ancestryNames = ancestry.map((el) => el.name).concat(name);
while (ancestryNames.length > 0) {
const possibleKey = `node_modules/${ancestryNames.join('/node_modules/')}`;
if (filteredCandidates.includes(possibleKey)) {
return possibleKey;
}
ancestryNames.shift();
}
// Here we go through th eancestry backwards to find the nearest
// ancestor package
const reversedAncestry = ancestry.reverse();
for (let parentIndex = 0; parentIndex < reversedAncestry.length; parentIndex++) {
const parentName = reversedAncestry[parentIndex].name;
const possibleFilteredKeys = filteredCandidates.filter((key) => key.includes(parentName));
if (possibleFilteredKeys.length === 1) {
return possibleFilteredKeys[0];
}
if (possibleFilteredKeys.length === 0) {
continue;
}
filteredCandidates = possibleFilteredKeys;
}
return undefined;
};
exports.getChildNodeKey = getChildNodeKey;
const checkOverrides = (ancestry, overrides) => {
const ancestryWithoutRoot = ancestry.slice(1);
// First traverse into overrides from root down
for (const [idx, pkg] of ancestryWithoutRoot.entries()) {
// Do we have this in overrides
const override = (0, exports.matchOverrideKey)(overrides, pkg);
// If we dont find current element move down the ancestry
if (!override) {
continue;
}
// If we find a string as override we know we found what we want *if*
// we are at our root
if (idx + 1 === ancestryWithoutRoot.length &&
typeof override === 'string') {
return override;
}
// If we don't find a string we might have a dotted reference
// we only care about this if we are the final element in the ancestry.
if (idx + 1 === ancestryWithoutRoot.length && override['.']) {
return override['.'];
}
// If we don't find a string or a dotted reference we need to recurse
// to find the override
const recursiveOverride = checkOverrides(ancestryWithoutRoot, override);
// If we get a non-undefined result, it is our answer
if (recursiveOverride) {
return recursiveOverride;
}
}
return;
};
// Here we have to match our pkg to
// possible keys in the overrides object
const matchOverrideKey = (overrides, pkg) => {
if (overrides[pkg.name]) {
return overrides[pkg.name];
}
const overrideKeysNameToVersions = Object.keys(overrides).reduce((acc, key) => {
// Split the key to separate the package name from the version spec
const atIndex = key.lastIndexOf('@');
const name = key.substring(0, atIndex);
const versionSpec = key.substring(atIndex + 1);
// Check if the package name already exists in the accumulator
if (!acc[name]) {
acc[name] = [];
}
// Add the version spec to the list of versions for this package name
acc[name].push(versionSpec);
return acc;
}, {});
const computedOverrides = overrideKeysNameToVersions[pkg.name];
if (computedOverrides) {
for (const versionSpec of computedOverrides) {
const isPkgVersionSubsetOfOverrideSpec = semver.subset(pkg.version, semver.validRange(versionSpec));
if (isPkgVersionSubsetOfOverrideSpec) {
return overrides[`${pkg.name}@${versionSpec}`];
}
}
}
return null;
};
exports.matchOverrideKey = matchOverrideKey;
//# sourceMappingURL=index.js.map
;