UNPKG

mono-pub

Version:

Simple tool for publishing your npm packages that live in a monorepo

518 lines (509 loc) 20 kB
"use strict"; var __create = Object.create; var __defProp = Object.defineProperty; var __getOwnPropDesc = Object.getOwnPropertyDescriptor; var __getOwnPropNames = Object.getOwnPropertyNames; var __getProtoOf = Object.getPrototypeOf; var __hasOwnProp = Object.prototype.hasOwnProperty; var __export = (target, all) => { for (var name in all) __defProp(target, name, { get: all[name], enumerable: true }); }; var __copyProps = (to, from, except, desc) => { if (from && typeof from === "object" || typeof from === "function") { for (let key of __getOwnPropNames(from)) if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } return to; }; var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( // If the importer is in node compatibility mode or this is not an ESM // file that has been converted to a CommonJS file using a Babel- // compatible transform (i.e. "__esModule" has not been set), then set // "default" to the CommonJS "module.exports" for node compatibility. isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, mod )); var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); // src/index.ts var src_exports = {}; __export(src_exports, { default: () => publish }); module.exports = __toCommonJS(src_exports); var import_get3 = __toESM(require("lodash/get")); var import_isEqual2 = __toESM(require("lodash/isEqual")); // src/utils/path.ts var import_path = __toESM(require("path")); var import_fs = __toESM(require("fs")); var import_globby = __toESM(require("globby")); var import_uniq = __toESM(require("lodash/uniq")); var import_get = __toESM(require("lodash/get")); async function _scanPackage(filePath) { const content = await import_fs.promises.readFile(filePath); const pkg = JSON.parse(content.toString()); const name = (0, import_get.default)(pkg, "name", null); const privateFieldValue = (0, import_get.default)(pkg, "private", false); const isPrivate = privateFieldValue === "true" || privateFieldValue === true; return { private: isPrivate, name }; } async function getAllPackages(paths, cwd) { const matches = await (0, import_globby.default)(paths, { cwd, expandDirectories: false, onlyFiles: false, absolute: true, objectMode: true }); const fileNames = []; for (const match of matches) { if (match.dirent.isFile() && import_path.default.basename(match.name) === "package.json") { fileNames.push(match.path); } else if (match.dirent.isDirectory()) { const fullPath = match.path; const pkgPath = import_path.default.join(fullPath, "package.json"); if (import_fs.default.existsSync(pkgPath)) { fileNames.push(pkgPath); } } } const uniqPkgFileNames = (0, import_uniq.default)(fileNames); const packagesInfo = await Promise.all(uniqPkgFileNames.map(_scanPackage)); const result = []; uniqPkgFileNames.forEach((filename, idx) => { const info = packagesInfo[idx]; if (!info.private && info.name) { result.push({ name: info.name, location: filename }); } }); return result; } // src/logger.ts var import_signale = require("signale"); function getLogger({ stdout, stderr }) { return new import_signale.Signale({ config: { displayTimestamp: true, displayLabel: false }, types: { error: { color: "red", label: "", stream: [stderr], badge: "\u2716" }, log: { color: "magenta", label: "", stream: [stdout], badge: "\u2022" }, success: { color: "green", label: "", stream: [stdout], badge: "\u2714" }, info: { color: "blue", label: "", stream: [stdout], badge: "\u2139" } } }); } // src/utils/deps.ts var import_fs2 = require("fs"); var import_get2 = __toESM(require("lodash/get")); var import_set = __toESM(require("lodash/set")); // src/utils/versions.ts var import_isEqual = __toESM(require("lodash/isEqual")); var PATCH_REGEX = /\d+.\d+.x/i; var MINOR_REGEX = /(?<!\d+.)\d+.x/i; function versionToString(version) { return `${version.major}.${version.minor}.${version.patch}`; } function getNewVersion(latestRelease, releaseType) { if (releaseType === "none") { return latestRelease; } else if (!latestRelease) { return { major: 1, minor: 0, patch: 0 }; } else if (releaseType === "major") { return { major: latestRelease.major + 1, minor: 0, patch: 0 }; } else if (releaseType === "minor") { return { major: latestRelease.major, minor: latestRelease.minor + 1, patch: 0 }; } else { return { ...latestRelease, patch: latestRelease.patch + 1 }; } } function getVersionCriteria(currentVersion, newVersion) { if (currentVersion.includes("~") || PATCH_REGEX.test(currentVersion)) { return `~${newVersion}`; } if (currentVersion.includes("^") || MINOR_REGEX.test(currentVersion)) { return `^${newVersion}`; } return newVersion; } function isPackageChanged(newVersion, oldVersion, releaseType) { return !(releaseType === "none" || !newVersion || (0, import_isEqual.default)(newVersion, oldVersion)); } // src/utils/deps.ts async function getDependencies(packages) { const packagesNames = packages.map((pkg) => pkg.name); const result = Object.assign( {}, ...packages.map((pkg) => ({ [pkg.name]: { ...pkg, dependsOn: [] } })) ); for (const pkg of Object.values(result)) { const content = await import_fs2.promises.readFile(pkg.location); const json = JSON.parse(content.toString()); const deps = (0, import_get2.default)(json, "dependencies", {}); const depsInfo = Object.keys(deps).filter((dep) => packagesNames.includes(dep)).map((dep) => ({ name: dep, type: "dep", value: deps[dep] })); pkg.dependsOn.push(...depsInfo); const devDeps = (0, import_get2.default)(json, "devDependencies", {}); const devDepsInfo = Object.keys(devDeps).filter((dep) => packagesNames.includes(dep)).map((dep) => ({ name: dep, type: "devDep", value: devDeps[dep] })); pkg.dependsOn.push(...devDepsInfo); } return result; } function getExecutionOrder(packages, options) { const batches = []; const pkgMap = Object.fromEntries(packages.map((pkg) => [pkg.name, pkg])); const ignoreDependencies = (options == null ? void 0 : options.ignoreDependencies) || {}; const dependencies = /* @__PURE__ */ new Map(); for (const pkg of packages) { const packageIgnoreList = ignoreDependencies[pkg.name] || []; dependencies.set( pkg.name, pkg.dependsOn.map((dep) => dep.name).filter((name) => !packageIgnoreList.includes(name)) ); } while (dependencies.size > 0) { const batch = []; for (const [pkgName, pkgDeps] of dependencies) { if (pkgDeps.length === 0) { batch.push({ name: pkgName, location: pkgMap[pkgName].location }); dependencies.delete(pkgName); } } if (batch.length === 0) { throw new Error("The release cannot be done because of cyclic dependencies"); } batches.push(batch); const includedPackages = batch.map((pkg) => pkg.name); for (const [pkgName, pkgDeps] of dependencies) { dependencies.set( pkgName, pkgDeps.filter((depName) => !includedPackages.includes(depName)) ); } } if (options == null ? void 0 : options.batching) { return batches; } return batches.flat(); } async function patchPackageDeps(pkg, newVersions, latestReleases) { const file = await import_fs2.promises.readFile(pkg.location); const packageJson = JSON.parse(file.toString()); const version = newVersions[pkg.name] || latestReleases[pkg.name]; if (!version) { throw new TypeError( `Unable to patch package version ("${pkg.name}"), since it wasn't released before and no relevant changes were introduced` ); } (0, import_set.default)(packageJson, "version", versionToString(version)); for (const dep of pkg.dependsOn) { const depsGroup = dep.type === "dep" ? "dependencies" : "devDependencies"; const depVersion = newVersions[dep.name] ?? latestReleases[dep.name]; if (!depVersion) { throw new TypeError( `Unable to patch package dependency ("${dep.name}"), since it has no previous versions and relevant changes` ); } (0, import_set.default)(packageJson, [depsGroup, dep.name], getVersionCriteria(dep.value, versionToString(depVersion))); } await import_fs2.promises.writeFile(pkg.location, JSON.stringify(packageJson, null, 2)); } // src/utils/plugins.ts var CombinedPlugin = class { name = "CombinedPlugin"; allPlugins; versionGetter; extractor; analyzer; neededSetup = []; preparers = []; publishers = []; postPublishers = []; constructor(plugins) { this.allPlugins = plugins; } _getStepMessage(step, plugin, prev) { if (prev) { return `Found "${step}" step of "${plugin.name}" plugin. Overriding previous one from "${prev.name}"`; } return `Found "${step}" step of "${plugin.name}" plugin.`; } async setup(ctx) { const logger = ctx.logger; logger.log("Scanning received plugins"); for (const plugin of this.allPlugins) { logger.log(`Scanning ${plugin.name} plugin`); if (plugin.setup) { logger.log(this._getStepMessage("setup", plugin)); this.neededSetup.push(plugin); } if (plugin.getLastRelease) { logger.log(this._getStepMessage("getLastRelease", plugin, this.versionGetter)); this.versionGetter = plugin; } if (plugin.extractCommits) { logger.log(this._getStepMessage("extractCommits", plugin, this.extractor)); this.extractor = plugin; } if (plugin.getReleaseType) { logger.log(this._getStepMessage("getReleaseType", plugin, this.analyzer)); this.analyzer = plugin; } if (plugin.prepareAll || plugin.prepareSingle) { if (plugin.prepareAll && plugin.prepareSingle) { logger.warn( `Plugin "${plugin.name}" implements both "prepareAll" and "prepareSingle" methods, so only "prepareAll" be executed` ); } else if (plugin.prepareAll) { logger.info(this._getStepMessage("prepareAll", plugin)); } else { logger.info(this._getStepMessage("prepareSingle", plugin)); } this.preparers.push(plugin); } if (plugin.publish) { logger.log(this._getStepMessage("publish", plugin)); this.publishers.push(plugin); } if (plugin.postPublish) { logger.log(this._getStepMessage("postPublish", plugin)); this.postPublishers.push(plugin); } } if (!this.versionGetter) { logger.error('No plugins with "getLastRelease" step found'); return false; } if (!this.extractor) { logger.error('No plugins with "extractCommits" step found'); return false; } if (!this.analyzer) { logger.error('No plugins with "getReleaseType" step found'); return false; } for (const plugin of this.neededSetup) { logger.log(`Running "setup" step of "${plugin.name}" plugin`); const success = await plugin.setup(ctx); if (!success) { logger.error(`Conditions for setting up plugin ${plugin.name} have not been met. Aborting`); return false; } } logger.success("All plugins are set up and ready to use"); return true; } async getLastRelease(packages, ctx) { if (!this.versionGetter) { throw new Error("No versionGetter found. You should run setup step before this"); } ctx.logger.info(`Running "getLastRelease" of "${this.versionGetter.name}" plugin`); return this.versionGetter.getLastRelease(packages, ctx); } async extractCommits(pkgInfo, ctx) { if (!this.extractor) { throw new Error("No extractor found. You should run setup step before this"); } ctx.logger.log(`Running "extractCommits" of "${this.extractor.name}" plugin`); return this.extractor.extractCommits(pkgInfo, ctx); } async getReleaseType(commits, isDepsChanged, ctx) { if (!this.analyzer) { throw new Error("No analyzer found. You should run setup step before this"); } ctx.logger.log(`Running "getReleaseType" step of "${this.analyzer.name}" plugin`); return this.analyzer.getReleaseType(commits, isDepsChanged, ctx); } async prepareAll(info, ctx) { const executionOrder = getExecutionOrder(info.foundPackages, { ignoreDependencies: ctx.ignoreDependencies }); for (const plugin of this.preparers) { if (plugin.prepareAll) { ctx.logger.log(`Running "prepareAll" step of "${plugin.name}" plugin`); await plugin.prepareAll(info, ctx); } else if (plugin.prepareSingle) { for (const pkg of executionOrder) { const scopedLogger = ctx.logger.scope(pkg.name); const scopedContext = { ...ctx, logger: scopedLogger }; scopedLogger.log(`Running "prepareSingle" step of "${plugin.name}" plugin`); await plugin.prepareSingle({ ...info, targetPackage: pkg }, scopedContext); } } } } async publish(packageInfo, ctx) { ctx.logger.log("Starting to publish a package"); for (const plugin of this.publishers) { ctx.logger.log(`Running "publish" step of "${plugin.name}" plugin`); await plugin.publish(packageInfo, ctx); } } async postPublish(packageInfo, ctx) { ctx.logger.log("Running postPublish side effects"); for (const plugin of this.postPublishers) { ctx.logger.log(`Running "postPublish" step of "${plugin.name}" plugin`); await plugin.postPublish(packageInfo, ctx); } } }; // src/index.ts async function publish(paths, plugins, options = {}) { const { stdout = process.stdout, stderr = process.stderr, ignoreDependencies, ...restOptions } = options; const logger = getLogger({ stdout, stderr }); const context = { cwd: process.cwd(), env: process.env, ignoreDependencies: ignoreDependencies || {}, ...restOptions, logger }; logger.info("Starting releasing process..."); const packages = await getAllPackages(paths, context.cwd); const packagesInfo = Object.assign( {}, ...packages.map((pkg) => ({ [pkg.name]: pkg })) ); if (!packages.length) { logger.success("No matching packages found. Exiting..."); return; } const scopedContexts = Object.assign( {}, ...packages.map((pkg) => ({ [pkg.name]: { ...context, logger: logger.scope(pkg.name) } })) ); logger.success( `Found ${packages.length} packages to release: [${packages.map((pkg) => `"${pkg.name}"`).join(", ")}]` ); logger.log("Calculating release order based on packages dependencies and devDependencies..."); let packagesWithDeps = {}; let releaseOrder = []; try { packagesWithDeps = await getDependencies(packages); releaseOrder = getExecutionOrder(Object.values(packagesWithDeps), { ignoreDependencies }); } catch (err) { if (err instanceof Error) { logger.error(err.message); } throw err; } logger.success(`Packages release order: [${releaseOrder.map((pkg) => `"${pkg.name}"`).join(", ")}]`); logger.success( `Found ${plugins.length} plugins to form release chain: [${plugins.map((plugin) => `"${plugin.name}"`).join(", ")}]` ); logger.log("Starting the process of assembling the release chain"); const releaseChain = new CombinedPlugin(plugins); const success = await releaseChain.setup(context); if (!success) { throw new Error("Setup was not successful"); } logger.log("Searching for the latest releases..."); const latestReleases = await releaseChain.getLastRelease(packages, context); for (const [packageName, release] of Object.entries(latestReleases)) { if (!release) { scopedContexts[packageName].logger.log("No previous releases found..."); } else { scopedContexts[packageName].logger.log( `Found latest release version: ${release.major}.${release.minor}.${release.patch}` ); } } const newCommits = {}; const releaseTypes = {}; const newVersions = {}; for (const { name: pkgName } of releaseOrder) { const scopedLogger = scopedContexts[pkgName].logger; const latestRelease = (0, import_get3.default)(latestReleases, pkgName, null); const commits = await releaseChain.extractCommits( { ...packagesInfo[pkgName], latestRelease }, scopedContexts[pkgName] ); scopedLogger.info(`Found ${commits.length} commits since last release`); newCommits[pkgName] = commits; const isDepsChanged = packagesWithDeps[pkgName].dependsOn.some((dep) => releaseTypes[dep.name] !== "none"); const releaseType = await releaseChain.getReleaseType(commits, isDepsChanged, scopedContexts[pkgName]); const newVersion = getNewVersion(latestRelease, releaseType); releaseTypes[pkgName] = releaseType; newVersions[pkgName] = newVersion; if (!newVersion || releaseType === "none") { scopedLogger.info("There are no relevant changes found, so no new version won't be released"); } else if (latestRelease) { scopedLogger.info( `Found "${releaseType}" relevant changes since latest released version ("${versionToString( latestRelease )}"). So the next version of the package is "${versionToString(newVersion)}"` ); } else { scopedLogger.info( `Package has no previous releases, but "${releaseType}" relevant changes found, that's why package will be released under "${versionToString( newVersion )} version"` ); } } for (const pkg of Object.values(packagesWithDeps)) { if (releaseTypes[pkg.name] === "none") { continue; } scopedContexts[pkg.name].logger.log("Patching package.json with a new version criteria"); await patchPackageDeps(pkg, newVersions, latestReleases); } const foundPackages = Object.values(packagesWithDeps); const changedPackages = foundPackages.filter(({ name }) => { const newVersion = newVersions[name]; const releaseType = releaseTypes[name]; const oldVersion = latestReleases[name]; return isPackageChanged(newVersion, oldVersion, releaseType); }); await releaseChain.prepareAll( { foundPackages, changedPackages }, context ); for (const { name: packageName } of releaseOrder) { const newVersion = newVersions[packageName]; const releaseType = releaseTypes[packageName]; const oldVersion = latestReleases[packageName]; if (!isPackageChanged(newVersion, oldVersion, releaseType)) { continue; } await releaseChain.publish(packagesInfo[packageName], scopedContexts[packageName]); const bumpedDeps = []; for (const dep of packagesWithDeps[packageName].dependsOn) { const depOldVersion = latestReleases[dep.name]; const depNewVersion = newVersions[dep.name]; const depReleaseType = releaseTypes[dep.name]; if (!depNewVersion || (0, import_isEqual2.default)(depNewVersion, depOldVersion) || depReleaseType === "none") { continue; } bumpedDeps.push({ ...packagesInfo[dep.name], oldVersion: depOldVersion, newVersion: depNewVersion, releaseType: depReleaseType }); } const releasedPackageInfo = { ...packagesInfo[packageName], oldVersion, newVersion, releaseType, commits: newCommits[packageName], bumpedDeps }; await releaseChain.postPublish(releasedPackageInfo, scopedContexts[packageName]); scopedContexts[packageName].logger.success("Package is successfully published!"); } } //# sourceMappingURL=index.js.map // fix-cjs-exports if (module.exports.default) { Object.assign(module.exports.default, module.exports); module.exports = module.exports.default; delete module.exports.default; }