inspectpack
Version:
An inspection tool for Webpack frontend JavaScript bundles.
396 lines (395 loc) • 16 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.mapDepsToPackageName = exports.dependencies = exports._findPackage = exports._resolvePackageMap = exports.readPackages = exports.readPackage = exports._files = void 0;
const path_1 = require("path");
const files_1 = require("./files");
// **Testing**: Stubbable accessor for readJson.
exports._files = { readJson: files_1.readJson };
/**
* Read a `package.json`.
*
* These calls are memoized into a cache if provided as this is the only real
* I/O involved.
*
* @param {String} path full path to `package.json`
* @param {INpmPackageMapPromise?} cache cache object
* @returns {Promise<INpmPackage | null>} object w/ package.json info
*/
const readPackage = (path, cache) => {
const _cache = cache || {};
// Return from cache if exists.
if (typeof _cache[path] !== "undefined") {
return _cache[path];
}
// Otherwise, cache promise.
_cache[path] = Promise.resolve()
.then(() => exports._files.readJson(path))
.catch((err) => {
// Tolerate non-existent package.json.
if (err.code === "ENOENT") {
return null;
}
// Mutate in a more helpful error.
if (err instanceof SyntaxError && (err.message || "").indexOf("Unexpected token") > -1) {
err.message = `JSON parsing error for ${path} - ${err.message}`;
}
// Real file error.
throw err;
});
return _cache[path];
};
exports.readPackage = readPackage;
// Helpers
const _isScoped = (n) => n.startsWith("@");
const _isIncludedPkg = (pkgsFilter) => (name) => {
// Package names are posix paths.
return !pkgsFilter || pkgsFilter.indexOf(files_1.toPosixPath(name)) > -1;
};
/**
* Recursively traverse a directory and inflate `package.json` information.
*
* These calls are memoized into a cache.
*
* @param {String} path starting path
* @param {String[]?} pkgsFilter limit to only these package names
* @param {INpmPackageMapPromise?} cache package map
* @returns {Promise<INpmPackageMapPromise>} map of paths to package info
*/
const readPackages = (path, pkgsFilter, cache) => {
const _cache = cache || {};
const isIncludedPkg = _isIncludedPkg(pkgsFilter);
return Promise.resolve()
// Read root package.
.then(() => exports.readPackage(path_1.join(path, "package.json"), _cache))
// Read next level of directories.
.then(() => files_1.readDir(path_1.join(path, "node_modules")))
// Add extra directories for scoped packages.
.then((dirs) => Promise
.all(dirs
.filter(_isScoped)
.map((scope) => files_1.readDir(path_1.join(path, "node_modules", scope))
.then((scoped) => scoped.map((f) => path_1.join(scope, f)))))
.then((extras) => extras.reduce((m, s) => m.concat(s), []))
.then((extrasFlat) => dirs
.filter((d) => !_isScoped(d))
.concat(extrasFlat)))
// Recurse into all next levels.
.then((dirs) => Promise.all(dirs
// Filter to known packages.
.filter(isIncludedPkg)
// Recurse.
.map((dir) => exports.readPackages(path_1.join(path, "node_modules", dir), pkgsFilter, _cache))))
// The cache **is** our return value.
.then(() => _cache);
};
exports.readPackages = readPackages;
// Resolve the entire package map.
const _resolvePackageMap = (pkgMap) => Promise
// Resolve all paths to package objects.
.all(Object.keys(pkgMap).map((path) => pkgMap[path]))
// Add non-null package objects to final object.
.then((pkgs) => {
const map = {};
Object.keys(pkgMap).forEach((path, i) => {
if (pkgs[i] !== null) {
map[path] = pkgs[i];
}
});
return map;
});
exports._resolvePackageMap = _resolvePackageMap;
const _findPackage = ({ filePath, name, pkgMap, }) => {
// We now check the existing package map which, if iterating in correct
// directory order, should already have higher up roots that may contain
// `node_modules` **within** the `require` resolution rules that would
// naturally be the "selected" module.
//
// Fixes https://github.com/FormidableLabs/inspectpack/issues/10
const cachedRoots = Object.keys(pkgMap)
// Get directories.
.map((k) => path_1.dirname(k))
// Limit to those that are a higher up directory from our root, which
// is fair game by Node.js `require` resolution rules, and not the current
// root because that already failed.
.filter((p) => p !== filePath && filePath.indexOf(p) === 0);
const roots = [filePath].concat(cachedRoots);
// Iterate down potential paths.
// If we find it as _first_ result, then it hasn't been flattened.
let isFlattened = false;
for (const curRoot of roots) {
// Reset to full path. This _will_ end up in some duplicate checks, but
// shouldn't be too expensive.
let curFilePath = filePath;
while (curRoot.length <= curFilePath.length) {
// Check at this level.
const pkgPath = path_1.join(curFilePath, "node_modules", name);
const pkgJsonPath = path_1.join(pkgPath, "package.json");
const pkgObj = pkgMap[pkgJsonPath];
// Found a match.
if (pkgObj) {
// Validation: These should all be **real** npm packages, so we should
// **never** fail here. But, can't hurt to check.
if (!pkgObj.name) {
throw new Error(`Found package without name: ${JSON.stringify(pkgObj)}`);
}
else if (!pkgObj.version) {
throw new Error(`Found package without version: ${JSON.stringify(pkgObj)}`);
}
return { isFlattened, pkgPath, pkgObj };
}
// Decrement path. If we find it now, it's flattened.
curFilePath = path_1.dirname(curFilePath);
isFlattened = true;
}
}
return { isFlattened: false, pkgPath: null, pkgObj: null };
};
exports._findPackage = _findPackage;
var IFoundMapEntryState;
(function (IFoundMapEntryState) {
IFoundMapEntryState[IFoundMapEntryState["started"] = 0] = "started";
IFoundMapEntryState[IFoundMapEntryState["recursing"] = 1] = "recursing";
IFoundMapEntryState[IFoundMapEntryState["complete"] = 2] = "complete";
})(IFoundMapEntryState || (IFoundMapEntryState = {}));
// - Populates `pkgMap` with installed `package.json`s
// - Creates a recursive `IDependencies[]` object that later needs to be
// flattened and ranges fixed.
const _recurseDependencies = ({ filePath, foundMap, names, pkgMap, pkgsFilter, }) => {
// Build up cache.
const _foundMap = foundMap || {};
const isIncludedPkg = _isIncludedPkg(pkgsFilter);
return names
.filter(isIncludedPkg)
// Inflated current level.
.map((name) => {
// Find actual location.
const { isFlattened, pkgPath, pkgObj } = exports._findPackage({ filePath, name, pkgMap });
// Short-circuit on not founds.
if (pkgPath === null || pkgObj === null) {
return null;
}
// Build and check cache.
const found = _foundMap[pkgPath] = _foundMap[pkgPath] || {
dependencies: {},
state: IFoundMapEntryState.started,
};
if (found.dependencies[name]) {
return { pkg: found.dependencies[name], pkgNames: [], found };
}
// Start building object.
const pkg = {
dependencies: [],
filePath: pkgPath,
name: pkgObj.name,
range: pkgObj.version || "*",
version: pkgObj.version,
};
// Add reference to cache.
if (!isFlattened) {
found.dependencies[name] = pkg;
}
// Get list of package names to recurse.
// We **don't** traverse devDeps here because shouldn't have with
// real, installed packages.
//
// TODO(129): Traverse optionalDependencies too.
// https://github.com/FormidableLabs/inspectpack/issues/129
const pkgNames = Object.keys(pkgObj.dependencies || {});
return { pkg, pkgNames, found };
})
// Remove empties
.filter(Boolean)
// Lazy recurse after all caches have been filled for current level.
.map((obj) => {
// TS: Have to cast because boolean filter isn't inferred correctly.
const { pkg, pkgNames, found } = obj;
// Only recurse when have dependencies.
//
// **Note**: This also serves as a way for found / cached dependency
// hits to have this mutation step avoided since we manually return
// `[]` on a cache hit.
if (found.state === IFoundMapEntryState.started && pkgNames.length) {
// Mark state before recursion so that we do only **one** traversal
// per unique file path.
// https://github.com/FormidableLabs/inspectpack/issues/128
found.state = IFoundMapEntryState.recursing;
pkg.dependencies = _recurseDependencies({
filePath: pkg.filePath,
foundMap: _foundMap,
names: pkgNames,
pkgMap,
pkgsFilter,
});
found.state = IFoundMapEntryState.complete;
}
return pkg;
});
};
const _identifyCircularRefs = (pkg, refPath) => {
const _refPath = refPath || [];
// Detect circular and short-circuit.
const circRef = _refPath.find((ref) => pkg === ref);
if (circRef) {
return {
isCircular: true,
refs: {},
};
}
// Traverse further.
const nextPath = _refPath.concat([pkg]);
const refs = {};
pkg.dependencies
.map((dep) => _identifyCircularRefs(dep, nextPath))
.forEach((obj, i) => {
refs[i] = obj;
});
return {
isCircular: false,
refs,
};
};
const _getRef = (circRefs, refPath) => refPath.reduce((curRef, i) => {
curRef = curRef.refs[i];
if (!curRef) {
throw new Error(`Could not find ref path: ${refPath}`);
}
return curRef;
}, circRefs);
// TS: null-allowing-intermediate function.
const _resolveRefsOrNull = (pkg, circRefs, refPath) => {
// Get circular references map if not provided.
const _circRefs = circRefs || _identifyCircularRefs(pkg);
const _refPath = refPath || []; // LUT into circRefs object.
const ref = _getRef(_circRefs, _refPath);
// Short-circuit if flattened.
if (ref.isCircular) {
return null;
}
const resolvedPkg = {
dependencies: pkg.dependencies
.map((dep, i) => _resolveRefsOrNull(dep, _circRefs, _refPath.concat([i])))
.filter(Boolean),
filePath: pkg.filePath,
name: pkg.name,
range: pkg.range,
version: pkg.version,
};
return resolvedPkg;
};
// Create a new object with circular / flattened references resolved.
//
// TS: We create a casted wrapper function here which is safe because the
// _incoming_ `pkg` object is non-null, which means result is non-null.
const _resolveRefs = (pkg) => _resolveRefsOrNull(pkg);
// TS: null-allowing-intermediate function.
const _resolveRangesOrNull = (pkg, pkgMap, lastFilePath) => {
// Try a lookup pkgMap with lastFilePath to switch range from dependencies directly.
let range;
if (lastFilePath) {
const item = pkgMap[path_1.join(lastFilePath, "package.json")];
if (item && item.dependencies) {
range = item.dependencies[pkg.name];
}
if (!range && item && item.devDependencies) {
range = item.devDependencies[pkg.name];
}
}
// Mutate the object.
const resolvedPkg = Object.assign({}, pkg, {
// Recurse.
dependencies: pkg.dependencies
.map((dep) => _resolveRangesOrNull(dep, pkgMap, pkg.filePath))
.filter(Boolean),
// Patch ranges
range: range || pkg.range,
});
return resolvedPkg;
};
// Correct ranges from package map.
const _resolveRanges = (pkg, pkgMap) => _resolveRangesOrNull(pkg, pkgMap);
/**
* Create a dependency graph as **depended**, irrespective of tree flattening.
*
* The basic scheme is as follows:
* - Take in a pre-existing list of all possible package names to limit the
* I/O and recursion we're going to do
* - Read in **all** potential packages from the starting file path (limited
* to _potential_ packages we need) to an object of file paths : package data.
* - Recursively traverse up paths like real node resolution to find things
* while assembling our logical dependencies structure.
*
* @param {String} filePath full path to dir w/ `package.json`
* @param {String[]?} pkgsFilter limit to only these package names
* @param {INpmPackageMapPromise?} cache cache object
* @returns {Promise<IDependencies | null>} dependencies graph object
*/
const dependencies = (filePath, pkgsFilter, cache) => {
const _cache = cache || {};
return Promise.resolve()
// Read all packages.
.then(() => exports.readPackages(filePath, pkgsFilter, _cache))
.then(exports._resolvePackageMap)
// Start processing stuff.
.then((pkgMap) => {
// Short-circuit empty package.
const rootPkg = pkgMap[path_1.join(filePath, "package.json")];
if (rootPkg === null || rootPkg === undefined) {
return null;
}
// Have a real package, start inflating.
// Include devDependencies in root of project because _could_ end up in
// real final bundle.
const names = [].concat(Object.keys(rootPkg.dependencies || {}), Object.keys(rootPkg.devDependencies || {}));
let pkg = {
dependencies: _recurseDependencies({
filePath,
names,
pkgMap,
pkgsFilter,
}),
filePath,
name: rootPkg.name || "ROOT",
range: rootPkg.version || "*",
version: rootPkg.version || "*",
};
// At this point, we now have a potentially circular object with pointers.
// We want to convert it as follows:
// 1. Unwind and "flatten" the pointers.
// 2. Resolve any circular pointers.
// 3. Fix any ranges from last path before flattened package.
// Post process the object and resolve circular references + flatten.
pkg = _resolveRefs(pkg);
// Post process to correct ranges. There will likely be some wrong before
// we do this.
pkg = _resolveRanges(pkg, pkgMap);
return pkg;
});
};
exports.dependencies = dependencies;
// Internal implementation.
const _mapDepsToPackageName = (deps, depsMap, pkgsPath) => {
// Current level, path.
const curPath = (pkgsPath || []).concat({
name: deps.name,
range: deps.range,
version: deps.version,
});
// Mutate map.
depsMap[deps.name] = depsMap[deps.name] || {};
const depsByVers = depsMap[deps.name][deps.version] = depsMap[deps.name][deps.version] || {};
const depsByFileName = depsByVers[deps.filePath] = depsByVers[deps.filePath] || { skews: [] };
depsByFileName.skews.push(curPath);
// Recurse.
deps.dependencies.forEach((dep) => {
_mapDepsToPackageName(dep, depsMap, curPath);
});
return depsMap;
};
/**
* Create a lookup table by package name + version.
*
* @param {IDependencies} deps dependencies graph
* @returns {IDependenciesByPackageName} lookup table
*/
const mapDepsToPackageName = (deps) => _mapDepsToPackageName(deps, {});
exports.mapDepsToPackageName = mapDepsToPackageName;