bob-the-bundler
Version:
Bob The Bundler!
268 lines (267 loc) • 11.7 kB
JavaScript
import path from 'path';
import { execa } from 'execa';
import fse from 'fs-extra';
import { globby } from 'globby';
import pLimit from 'p-limit';
import * as resolve from 'resolve.exports';
import zod from 'zod';
import { createCommand } from '../command.js';
import { getBobConfig } from '../config.js';
import { getRootPackageJSON } from '../utils/get-root-package-json.js';
import { getWorkspacePackagePaths } from '../utils/get-workspace-package-paths.js';
import { getWorkspaces } from '../utils/get-workspaces.js';
import { presetFields } from './bootstrap.js';
const ExportsMapEntry = zod.object({
default: zod.string(),
types: zod.string(),
});
const ExportsMapModel = zod.record(zod.union([
zod.string(),
zod.object({
require: zod.optional(ExportsMapEntry),
import: ExportsMapEntry,
default: ExportsMapEntry,
}),
]));
const EnginesModel = zod.record(zod.string(), zod.string());
const BinModel = zod.record(zod.string());
export const checkCommand = createCommand(api => {
return {
command: 'check',
describe: 'Check whether all files in the exports map within the built package can be imported.',
builder(yargs) {
return yargs.options({});
},
async handler() {
const cwd = process.cwd();
const rootPackageJSON = await getRootPackageJSON();
const workspaces = await getWorkspaces(rootPackageJSON);
const isSinglePackage = workspaces === null;
let checkConfigs = [];
if (isSinglePackage) {
checkConfigs.push({
cwd,
packageJSON: rootPackageJSON,
});
}
else {
const workspacesPaths = await getWorkspacePackagePaths(workspaces);
const limit = pLimit(20);
await Promise.all(workspacesPaths.map(workspacePath => limit(async () => {
const packageJSONPath = path.join(workspacePath, 'package.json');
const packageJSON = await fse.readJSON(packageJSONPath);
checkConfigs.push({
cwd: workspacePath,
packageJSON,
});
})));
}
const limit = pLimit(20);
let didFail = false;
await Promise.allSettled(checkConfigs.map(({ cwd, packageJSON }) => limit(async () => {
var _a, _b, _c;
const config = getBobConfig(packageJSON);
if (config === false || (config === null || config === void 0 ? void 0 : config.check) === false) {
api.reporter.warn(`Skip check for '${packageJSON.name}'.`);
return;
}
const distPackageJSONPath = path.join(cwd, 'dist', 'package.json');
const distPackageJSON = await fse.readJSON(distPackageJSONPath);
try {
await checkExportsMapIntegrity({
cwd: path.join(cwd, 'dist'),
packageJSON: distPackageJSON,
skipExports: new Set((_b = (_a = config === null || config === void 0 ? void 0 : config.check) === null || _a === void 0 ? void 0 : _a.skip) !== null && _b !== void 0 ? _b : []),
includesCommonJS: (_c = config === null || config === void 0 ? void 0 : config.commonjs) !== null && _c !== void 0 ? _c : true,
});
await checkEngines({
packageJSON: distPackageJSON,
});
}
catch (err) {
api.reporter.error(`Integrity check of '${packageJSON.name}' failed.`);
api.reporter.log(err);
didFail = true;
return;
}
api.reporter.success(`Checked integrity of '${packageJSON.name}'.`);
})));
if (didFail) {
throw new Error('One ore more integrity checks failed.');
}
},
};
});
async function checkExportsMapIntegrity(args) {
var _a, _b, _c, _d;
const exportsMapResult = ExportsMapModel.safeParse(args.packageJSON['exports']);
if (exportsMapResult.success === false) {
throw new Error("Missing exports map within the 'package.json'.\n" +
exportsMapResult.error.message +
'\nCorrect Example:\n' +
JSON.stringify(presetFields.exports, null, 2));
}
const exportsMap = exportsMapResult['data'];
const cjsSkipExports = new Set();
const esmSkipExports = new Set();
for (const definedExport of args.skipExports) {
if (args.includesCommonJS) {
const cjsResult = (_a = resolve.resolve(args.packageJSON, definedExport, {
require: true,
})) === null || _a === void 0 ? void 0 : _a[0];
if (typeof cjsResult === 'string') {
cjsSkipExports.add(cjsResult);
}
}
const esmResult = (_b = resolve.resolve(args.packageJSON, definedExport)) === null || _b === void 0 ? void 0 : _b[0];
if (typeof esmResult === 'string') {
esmSkipExports.add(esmResult);
}
}
for (const key of Object.keys(exportsMap)) {
if (args.includesCommonJS) {
const cjsResult = (_c = resolve.resolve(args.packageJSON, key, {
require: true,
})) === null || _c === void 0 ? void 0 : _c[0];
if (!cjsResult) {
throw new Error(`Could not resolve CommonJS import '${key}' for '${args.packageJSON.name}'.`);
}
if (cjsResult.match(/.(js|cjs)$/)) {
const cjsFilePaths = await globby(cjsResult, {
cwd: args.cwd,
});
const limit = pLimit(20);
await Promise.all(cjsFilePaths.map(file => limit(async () => {
if (cjsSkipExports.has(file)) {
return;
}
const result = await runRequireJSFileCommand({
path: file,
cwd: args.cwd,
});
if (result.exitCode !== 0) {
throw new Error(`Require of file '${file}' failed.\n` +
`In case this file is expected to raise an error please add an export to the 'bob.check.skip' field in your 'package.json' file.\n` +
`Error:\n` +
result.stderr);
}
})));
}
else {
// package.json or other files
// for now we just make sure they exists
await fse.stat(path.join(args.cwd, cjsResult));
}
}
const esmResult = (_d = resolve.resolve({ exports: exportsMap }, key)) === null || _d === void 0 ? void 0 : _d[0];
if (!esmResult) {
throw new Error(`Could not resolve CommonJS import '${key}' for '${args.packageJSON.name}'.`);
}
if (esmResult.match(/.(js|mjs)$/)) {
const esmFilePaths = await globby(esmResult, {
cwd: args.cwd,
});
const limit = pLimit(20);
await Promise.all(esmFilePaths.map(file => limit(async () => {
if (esmSkipExports.has(file)) {
return;
}
const result = await runImportJSFileCommand({
path: file,
cwd: args.cwd,
});
if (result.exitCode !== 0) {
throw new Error(`Import of file '${file}' failed with error:\n` + result.stderr);
}
})));
}
else {
// package.json or other files
// for now we just make sure they exists
await fse.stat(path.join(args.cwd, esmResult));
}
}
const legacyRequire = resolve.legacy(args.packageJSON, {
fields: ['main'],
});
if (!legacyRequire || typeof legacyRequire !== 'string') {
throw new Error(`Could not resolve legacy CommonJS entrypoint.`);
}
if (args.includesCommonJS) {
const legacyRequireResult = await runRequireJSFileCommand({
path: legacyRequire,
cwd: args.cwd,
});
if (legacyRequireResult.exitCode !== 0) {
throw new Error(`Require of file '${legacyRequire}' failed with error:\n` + legacyRequireResult.stderr);
}
}
else {
const legacyRequireResult = await runImportJSFileCommand({
path: legacyRequire,
cwd: args.cwd,
});
if (legacyRequireResult.exitCode !== 0) {
throw new Error(`Require of file '${legacyRequire}' failed with error:\n` + legacyRequireResult.stderr);
}
}
const legacyImport = resolve.legacy(args.packageJSON);
if (!legacyImport || typeof legacyImport !== 'string') {
throw new Error(`Could not resolve legacy ESM entrypoint.`);
}
const legacyImportResult = await runImportJSFileCommand({
path: legacyImport,
cwd: args.cwd,
});
if (legacyImportResult.exitCode !== 0) {
throw new Error(`Require of file '${legacyRequire}' failed with error:\n` + legacyImportResult.stderr);
}
if (args.packageJSON.bin) {
const result = BinModel.safeParse(args.packageJSON.bin);
if (result.success === false) {
throw new Error('Invalid format of bin field in package.json.\n' + result.error.message);
}
const cache = new Set();
for (const filePath of Object.values(result.data)) {
if (cache.has(filePath)) {
continue;
}
cache.add(filePath);
const absoluteFilePath = path.join(args.cwd, filePath);
await fse.stat(absoluteFilePath).catch(() => {
throw new Error("Could not find binary file '" + absoluteFilePath + "'.");
});
await fse.access(path.join(args.cwd, filePath), fse.constants.X_OK).catch(() => {
throw new Error("Binary file '" +
absoluteFilePath +
"' is not executable.\n" +
`Please set the executable bit e.g. by running 'chmod +x "${absoluteFilePath}"'.`);
});
const contents = await fse.readFile(absoluteFilePath, 'utf-8');
if (!contents.startsWith('#!/usr/bin/env node\n')) {
throw new Error("Binary file '" +
absoluteFilePath +
"' does not have a shebang.\n Please add '#!/usr/bin/env node' to the beginning of the file.");
}
}
}
}
async function checkEngines(args) {
const engines = EnginesModel.safeParse(args.packageJSON.engines);
if (engines.success === false || engines.data['node'] === undefined) {
throw new Error('Please specify the node engine version in your package.json.');
}
}
const timeout = `;setTimeout(() => { throw new Error("The Node.js process hangs. There is probably some side-effects. All exports should be free of side effects.") }, 500).unref()`;
function runRequireJSFileCommand(args) {
return execa('node', ['-e', `require('${args.path}')${timeout}`], {
cwd: args.cwd,
reject: false,
});
}
function runImportJSFileCommand(args) {
return execa('node', ['-e', `import('${args.path}').then(() => {${timeout}})`], {
cwd: args.cwd,
reject: false,
});
}