bob-the-bundler
Version:
Bob The Bundler!
272 lines (271 loc) • 12.4 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.checkCommand = void 0;
const tslib_1 = require("tslib");
const globby_1 = tslib_1.__importDefault(require("globby"));
const zod_1 = tslib_1.__importDefault(require("zod"));
const fse = tslib_1.__importStar(require("fs-extra"));
const resolve_exports_1 = tslib_1.__importDefault(require("resolve.exports"));
const command_1 = require("../command");
const bootstrap_1 = require("./bootstrap");
const path_1 = tslib_1.__importDefault(require("path"));
const p_limit_1 = tslib_1.__importDefault(require("p-limit"));
const execa_1 = tslib_1.__importDefault(require("execa"));
const get_root_package_json_1 = require("../utils/get-root-package-json");
const get_workspaces_1 = require("../utils/get-workspaces");
const get_workspace_package_paths_1 = require("../utils/get-workspace-package-paths");
const config_1 = require("../config");
const ExportsMapEntry = zod_1.default.object({
default: zod_1.default.string(),
types: zod_1.default.string(),
});
const ExportsMapModel = zod_1.default.record(zod_1.default.union([
zod_1.default.string(),
zod_1.default.object({
require: zod_1.default.optional(ExportsMapEntry),
import: ExportsMapEntry,
default: ExportsMapEntry,
}),
]));
const BinModel = zod_1.default.record(zod_1.default.string());
exports.checkCommand = (0, command_1.createCommand)((api) => {
return {
command: "check",
describe: "Check whether all files in the exports map within the built package can be imported.",
builder(yargs) {
return yargs.options({});
},
async handler() {
const cwd = process.cwd();
const rootPackageJSON = await (0, get_root_package_json_1.getRootPackageJSON)(cwd);
const workspaces = (0, get_workspaces_1.getWorkspaces)(rootPackageJSON);
const isSinglePackage = workspaces === null;
let checkConfigs = [];
if (isSinglePackage) {
checkConfigs.push({
cwd,
packageJSON: rootPackageJSON,
});
}
else {
const workspacesPaths = await (0, get_workspace_package_paths_1.getWorkspacePackagePaths)(cwd, workspaces);
const limit = (0, p_limit_1.default)(20);
await Promise.all(workspacesPaths.map((workspacePath) => limit(async () => {
const packageJSONPath = path_1.default.join(workspacePath, "package.json");
const packageJSON = await fse.readJSON(packageJSONPath);
checkConfigs.push({
cwd: workspacePath,
packageJSON,
});
})));
}
const limit = (0, p_limit_1.default)(20);
let didFail = false;
await Promise.allSettled(checkConfigs.map(({ cwd, packageJSON }) => limit(async () => {
var _a, _b, _c;
const config = (0, config_1.getBobConfig)(packageJSON);
if (config === false || (config === null || config === void 0 ? void 0 : config.check) === false) {
api.reporter.warn(`Skip check for '${packageJSON.name}'.`);
return;
}
const distPackageJSONPath = path_1.default.join(cwd, "dist", "package.json");
const distPackageJSON = await fse.readJSON(distPackageJSONPath);
// a tell for a types-only build is the lack of main import and presence of typings
if (distPackageJSON.main === "" &&
(distPackageJSON.typings || "").endsWith("d.ts")) {
api.reporter.warn(`Skip check for '${packageJSON.name}' because it's a types-only package.`);
return;
}
try {
await checkExportsMapIntegrity({
cwd: path_1.default.join(cwd, "dist"),
packageJSON: distPackageJSON,
skipExports: new Set((_b = (_a = config === null || config === void 0 ? void 0 : config.check) === null || _a === void 0 ? void 0 : _a.skip) !== null && _b !== void 0 ? _b : []),
includesCommonJS: (_c = config === null || config === void 0 ? void 0 : config.commonjs) !== null && _c !== void 0 ? _c : true,
});
}
catch (err) {
api.reporter.error(`Integrity check of '${packageJSON.name}' failed.`);
api.reporter.log(err);
didFail = true;
return;
}
api.reporter.success(`Checked integrity of '${packageJSON.name}'.`);
})));
if (didFail) {
throw new Error("One ore more integrity checks failed.");
}
},
};
});
async function checkExportsMapIntegrity(args) {
const exportsMapResult = ExportsMapModel.safeParse(args.packageJSON["exports"]);
if (exportsMapResult.success === false) {
throw new Error("Missing exports map within the 'package.json'.\n" +
exportsMapResult.error.message +
"\nCorrect Example:\n" +
JSON.stringify(bootstrap_1.presetFields.exports, null, 2));
}
const exportsMap = exportsMapResult["data"];
const cjsSkipExports = new Set();
const esmSkipExports = new Set();
for (const definedExport of args.skipExports) {
if (args.includesCommonJS) {
const cjsResult = resolve_exports_1.default.resolve(args.packageJSON, definedExport, {
require: true,
});
if (typeof cjsResult === "string") {
cjsSkipExports.add(cjsResult);
}
}
const esmResult = resolve_exports_1.default.resolve(args.packageJSON, definedExport);
if (typeof esmResult === "string") {
esmSkipExports.add(esmResult);
}
}
for (const key of Object.keys(exportsMap)) {
if (args.includesCommonJS) {
const cjsResult = resolve_exports_1.default.resolve(args.packageJSON, key, {
require: true,
});
if (!cjsResult) {
throw new Error(`Could not resolve CommonJS import '${key}' for '${args.packageJSON.name}'.`);
}
if (cjsResult.match(/.(js|cjs)$/)) {
const cjsFilePaths = await (0, globby_1.default)(cjsResult, {
cwd: args.cwd,
});
const limit = (0, p_limit_1.default)(20);
await Promise.all(cjsFilePaths.map((file) => limit(async () => {
if (cjsSkipExports.has(file)) {
return;
}
const result = await runRequireJSFileCommand({
path: file,
cwd: args.cwd,
});
if (result.exitCode !== 0) {
throw new Error(`Require of file '${file}' failed.\n` +
`In case this file is expected to raise an error please add an export to the 'bob.check.skip' field in your 'package.json' file.\n` +
`Error:\n` +
result.stderr);
}
})));
}
else {
// package.json or other files
// for now we just make sure they exists
await fse.stat(path_1.default.join(args.cwd, cjsResult));
}
}
const esmResult = resolve_exports_1.default.resolve({ exports: exportsMap }, key);
if (!esmResult) {
throw new Error(`Could not resolve CommonJS import '${key}' for '${args.packageJSON.name}'.`);
}
if (esmResult.match(/.(js|mjs)$/)) {
const esmFilePaths = await (0, globby_1.default)(esmResult, {
cwd: args.cwd,
});
const limit = (0, p_limit_1.default)(20);
await Promise.all(esmFilePaths.map((file) => limit(async () => {
if (esmSkipExports.has(file)) {
return;
}
const result = await runImportJSFileCommand({
path: file,
cwd: args.cwd,
});
if (result.exitCode !== 0) {
throw new Error(`Import of file '${file}' failed with error:\n` + result.stderr);
}
})));
}
else {
// package.json or other files
// for now we just make sure they exists
await fse.stat(path_1.default.join(args.cwd, esmResult));
}
}
const legacyRequire = resolve_exports_1.default.legacy(args.packageJSON, {
fields: ["main"],
});
if (!legacyRequire || typeof legacyRequire !== "string") {
throw new Error(`Could not resolve legacy CommonJS entrypoint.`);
}
if (args.includesCommonJS) {
const legacyRequireResult = await runRequireJSFileCommand({
path: legacyRequire,
cwd: args.cwd,
});
if (legacyRequireResult.exitCode !== 0) {
throw new Error(`Require of file '${legacyRequire}' failed with error:\n` +
legacyRequireResult.stderr);
}
}
else {
const legacyRequireResult = await runImportJSFileCommand({
path: legacyRequire,
cwd: args.cwd,
});
if (legacyRequireResult.exitCode !== 0) {
throw new Error(`Require of file '${legacyRequire}' failed with error:\n` +
legacyRequireResult.stderr);
}
}
const legacyImport = resolve_exports_1.default.legacy(args.packageJSON);
if (!legacyImport || typeof legacyImport !== "string") {
throw new Error(`Could not resolve legacy ESM entrypoint.`);
}
const legacyImportResult = await runImportJSFileCommand({
path: legacyImport,
cwd: args.cwd,
});
if (legacyImportResult.exitCode !== 0) {
throw new Error(`Require of file '${legacyRequire}' failed with error:\n` +
legacyImportResult.stderr);
}
if (args.packageJSON.bin) {
const result = BinModel.safeParse(args.packageJSON.bin);
if (result.success === false) {
throw new Error("Invalid format of bin field in package.json.\n" + result.error.message);
}
const cache = new Set();
for (const filePath of Object.values(result.data)) {
if (cache.has(filePath)) {
continue;
}
cache.add(filePath);
const absoluteFilePath = path_1.default.join(args.cwd, filePath);
await fse.stat(absoluteFilePath).catch(() => {
throw new Error("Could not find binary file '" + absoluteFilePath + "'.");
});
await fse
.access(path_1.default.join(args.cwd, filePath), fse.constants.X_OK)
.catch(() => {
throw new Error("Binary file '" +
absoluteFilePath +
"' is not executable.\n" +
`Please set the executable bit e.g. by running 'chmod +x "${absoluteFilePath}"'.`);
});
const contents = await fse.readFile(absoluteFilePath, "utf-8");
if (contents.startsWith("#!/usr/bin/env node\n") === false) {
throw new Error("Binary file '" +
absoluteFilePath +
"' does not have a shebang.\n Please add '#!/usr/bin/env node' to the beginning of the file.");
}
}
}
}
const timeout = `;setTimeout(() => { throw new Error("The Node.js process hangs. There is probably some side-effects. All exports should be free of side effects.") }, 500).unref()`;
function runRequireJSFileCommand(args) {
return (0, execa_1.default)("node", ["-e", `require('${args.path}')${timeout}`], {
cwd: args.cwd,
reject: false,
});
}
function runImportJSFileCommand(args) {
return (0, execa_1.default)("node", ["-e", `import('${args.path}').then(() => {${timeout}})`], {
cwd: args.cwd,
reject: false,
});
}