@changesets/cli
Version:
Organise your package versioning and publishing to make both contributors and maintainers happy
1,060 lines (912 loc) • 34.4 kB
JavaScript
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
var meow = _interopDefault(require('meow'));
var config = require('@changesets/config');
var fs = _interopDefault(require('fs-extra'));
var path = _interopDefault(require('path'));
var getWorkspaces$1 = _interopDefault(require('get-workspaces'));
var chalk = _interopDefault(require('chalk'));
var util = _interopDefault(require('util'));
var pkgDir = _interopDefault(require('pkg-dir'));
var uuid = _interopDefault(require('uuid/v1'));
var termSize = _interopDefault(require('term-size'));
var enquirer = require('enquirer');
var git = require('@changesets/git');
var _objectSpread = _interopDefault(require('@babel/runtime/helpers/objectSpread'));
var prettier = _interopDefault(require('prettier'));
var humanId = _interopDefault(require('human-id'));
var semver = _interopDefault(require('semver'));
var boxen = _interopDefault(require('boxen'));
var outdent = _interopDefault(require('outdent'));
var applyReleasePlan = _interopDefault(require('@changesets/apply-release-plan'));
var readChangesets = _interopDefault(require('@changesets/read'));
var getDependentsgraph = _interopDefault(require('get-dependents-graph'));
var assembleReleasePlan = _interopDefault(require('@changesets/assemble-release-plan'));
var pLimit = _interopDefault(require('p-limit'));
var spawn = _interopDefault(require('spawndamnit'));
var isCI = _interopDefault(require('is-ci'));
var table = _interopDefault(require('tty-table'));
var getReleasePlan = _interopDefault(require('@changesets/get-release-plan'));
let prefix = "🦋 ";
function format(args, customPrefix) {
let fullPrefix = prefix + (customPrefix === undefined ? "" : " " + customPrefix);
return fullPrefix + util.format("", ...args).split("\n").join("\n" + fullPrefix + " ");
}
function log(...args) {
console.log(format(args));
}
function info(...args) {
console.error(format(args, chalk.cyan("info")));
}
function warn(...args) {
console.error(format(args, chalk.yellow("warn")));
}
function error(...args) {
console.error(format(args, chalk.red("error")));
}
function success(...args) {
console.log(format(args, chalk.green("success")));
}
var logger = {
log,
info,
warn,
error,
success,
format
};
async function getProjectDirectory(cwd) {
const projectDir = await pkgDir(cwd);
if (!projectDir) {
throw new Error("Could not find project directory");
}
return projectDir;
}
async function getChangesetBase(cwd) {
const dir = await getProjectDirectory(cwd);
return path.resolve(dir, ".changeset");
}
const pkgPath = path.dirname(require.resolve("@changesets/cli/package.json"));
async function init(cwd) {
const changesetBase = await getChangesetBase(cwd);
if (fs.existsSync(changesetBase)) {
if (!fs.existsSync(path.join(changesetBase, "config.json"))) {
if (fs.existsSync(path.join(changesetBase, "config.js"))) {
logger.error("It looks like you're using the version 1 `.changeset/config.js` file");
logger.error("The format of the config object has significantly changed in v2 as well");
logger.error(" - we thoroughly recommend looking at the changelog for this package for what has changed");
logger.error("Changesets will write the defaults for the new config, remember to transfer your options into the new config at `.changeset/config.json`");
} else {
logger.error("It looks like you don't have a config file");
logger.error("The default config file will be written at `.changeset/config.json`");
}
await fs.writeFile(path.resolve(changesetBase, "config.json"), JSON.stringify(config.defaultWrittenConfig, null, 2));
} else {
logger.warn("It looks like you already have changesets initialized. You should be able to run changeset commands no problems.");
}
} else {
await fs.copy(path.resolve(pkgPath, "./default-files"), changesetBase);
await fs.writeFile(path.resolve(changesetBase, "config.json"), JSON.stringify(config.defaultWrittenConfig, null, 2));
logger.log(chalk`Thanks for choosing {green changesets} to help manage your versioning and publishing\n`);
logger.log("You should be set up to start using changesets now!\n");
logger.info("We have added a `.changeset` folder, and a couple of files to help you out:");
logger.info(chalk`- {blue .changeset/README.md} contains information about using changesets`);
logger.info(chalk`- {blue .changeset/config.json} is our default config`);
}
}
/* Notes on using inquirer:
* Each question needs a key, as inquirer is assembling an object behind-the-scenes.
* At each call, the entire responses object is returned, so we need a unique
* identifier for the name every time. This is why we are using UUIDs.
*/
const limit = Math.max(termSize().rows - 5, 10);
let cancelFlow = () => {
logger.success("Cancelled... 👋 ");
process.exit();
};
async function askCheckboxPlus(message, choices, format) {
const name = `CheckboxPlus-${uuid()}`;
return enquirer.prompt({
type: "autocomplete",
name,
message,
// @ts-ignore
prefix,
multiple: true,
choices,
format,
limit,
onCancel: cancelFlow
}).then(responses => responses[name]).catch(err => {
logger.error(err);
});
}
async function askQuestion(message) {
const name = `Question-${uuid()}`;
return enquirer.prompt([{
type: "input",
message,
name,
// @ts-ignore
prefix,
onCancel: cancelFlow
}]).then(responses => responses[name]).catch(err => {
logger.error(err);
});
}
async function askConfirm(message) {
const name = `Confirm-${uuid()}`;
return enquirer.prompt([{
message,
name,
// @ts-ignore
prefix,
type: "confirm",
initial: true,
onCancel: cancelFlow
}]).then(responses => responses[name]).catch(err => {
logger.error(err);
});
}
async function writeChangeset(changeset, cwd) {
const {
summary,
releases
} = changeset;
const changesetBase = await getChangesetBase(cwd); // Worth understanding that the ID merely needs to be a unique hash to avoid git conflicts
// experimenting with human readable ids to make finding changesets easier
const changesetID = humanId({
separator: "-",
capitalize: false
});
const prettierConfig = await prettier.resolveConfig(cwd);
const newChangesetPath = path.resolve(changesetBase, `${changesetID}.md`); // NOTE: The quotation marks in here are really important even though they are
// not spec for yaml. This is because package names can contain special
// characters that will otherwise break the parsing step
const changesetContents = `---
${releases.map(release => `"${release.name}": ${release.type}`).join("\n")}
---
${summary}
`;
await fs.writeFile(newChangesetPath, prettier.format(changesetContents, _objectSpread({}, prettierConfig, {
parser: "markdown"
})));
return changesetID;
}
async function getWorkspaces (opts) {
let workspaces = await getWorkspaces$1(_objectSpread({
tools: ["yarn", "bolt", "root"]
}, opts));
if (workspaces === null) {
return [];
}
return workspaces;
}
// @flow
const {
green,
yellow,
red,
bold,
blue,
cyan
} = chalk;
async function getPackagesToRelease(changedPackages, allPackages) {
function askInitialReleaseQuestion(defaultChoiceList) {
return askCheckboxPlus( // TODO: Make this wording better
`Which packages would you like to include?`, defaultChoiceList, x => {
// this removes changed packages and unchanged packages from the list
// of packages shown after selection
if (Array.isArray(x)) {
return x.filter(x => x !== "changed packages" && x !== "unchanged packages").map(x => cyan(x)).join(", ");
}
return x;
});
}
if (allPackages.length > 1) {
const unchangedPackagesNames = allPackages.map(({
name
}) => name).filter(name => !changedPackages.includes(name));
const defaultChoiceList = [{
name: "changed packages",
choices: changedPackages
}, {
name: "unchanged packages",
choices: unchangedPackagesNames
}].filter(({
choices
}) => choices.length !== 0);
let packagesToRelease = await askInitialReleaseQuestion(defaultChoiceList);
if (packagesToRelease.length === 0) {
do {
logger.error("You must select at least one package to release");
logger.error("(You most likely hit enter instead of space!)");
packagesToRelease = await askInitialReleaseQuestion(defaultChoiceList);
} while (packagesToRelease.length === 0);
}
return packagesToRelease.filter(pkgName => pkgName !== "changed packages" && pkgName !== "unchanged packages");
}
return [allPackages[0].name];
}
function formatPkgNameAndVersion(pkgName, version) {
return `${bold(pkgName)}@${bold(version)}`;
}
async function createChangeset(changedPackages, cwd) {
const allPackages = await getWorkspaces({
cwd
});
const packagesToRelease = await getPackagesToRelease(changedPackages, allPackages);
let pkgJsonsByName = new Map(allPackages.map(({
name,
config
}) => [name, config]));
const releases = [];
let pkgsLeftToGetBumpTypeFor = new Set(packagesToRelease);
let pkgsThatShouldBeMajorBumped = await askCheckboxPlus(bold(`Which packages should have a ${red("major")} bump?`), packagesToRelease.map(pkgName => {
return {
name: pkgName,
message: formatPkgNameAndVersion(pkgName, pkgJsonsByName.get(pkgName).version)
};
}));
for (const pkgName of pkgsThatShouldBeMajorBumped) {
// for packages that are under v1, we want to make sure major releases are intended,
// as some repo-wide sweeping changes have mistakenly release first majors
// of packages.
let {
version
} = pkgJsonsByName.get(pkgName);
if (semver.lt(version, "1.0.0")) {
// prettier-ignore
logger.log(yellow(`WARNING: Releasing a major version for ${green(pkgName)} will be its ${red('first major release')}.`));
logger.log(yellow(`If you are unsure if this is correct, contact the package's maintainers$ ${red("before committing this changeset")}.`));
let shouldReleaseFirstMajor = await askConfirm(bold(`Are you sure you want still want to release the ${red("first major release")} of ${pkgName}?`));
if (!shouldReleaseFirstMajor) {
continue;
}
}
pkgsLeftToGetBumpTypeFor.delete(pkgName);
releases.push({
name: pkgName,
type: "major"
});
}
if (pkgsLeftToGetBumpTypeFor.size !== 0) {
let pkgsThatShouldBeMinorBumped = await askCheckboxPlus(bold(`Which packages should have a ${green("minor")} bump?`), [...pkgsLeftToGetBumpTypeFor].map(pkgName => {
return {
name: pkgName,
message: formatPkgNameAndVersion(pkgName, pkgJsonsByName.get(pkgName).version)
};
}));
for (const pkgName of pkgsThatShouldBeMinorBumped) {
pkgsLeftToGetBumpTypeFor.delete(pkgName);
releases.push({
name: pkgName,
type: "minor"
});
}
}
if (pkgsLeftToGetBumpTypeFor.size !== 0) {
logger.log(`The following packages will be ${blue("patch")} bumped:`);
pkgsLeftToGetBumpTypeFor.forEach(pkgName => {
logger.log(formatPkgNameAndVersion(pkgName, pkgJsonsByName.get(pkgName).version));
});
for (const pkgName of pkgsLeftToGetBumpTypeFor) {
releases.push({
name: pkgName,
type: "patch"
});
}
}
logger.log("Please enter a summary for this change (this will be in the changelogs)");
let summary = await askQuestion("Summary");
while (summary.length === 0) {
logger.error("A summary is required for the changelog! 😪");
summary = await askQuestion("Summary");
}
return {
summary,
releases
};
}
function printConfirmationMessage(changeset) {
function getReleasesOfType(type) {
return changeset.releases.filter(release => release.type === type).map(release => release.name);
}
logger.log("=== Releasing the following packages ===");
const majorReleases = getReleasesOfType("major");
const minorReleases = getReleasesOfType("minor");
const patchReleases = getReleasesOfType("patch");
if (majorReleases.length > 0) logger.log(`${chalk.green("[Major]")}\n ${majorReleases.join(", ")}`);
if (minorReleases.length > 0) logger.log(`${chalk.green("[Minor]")}\n ${minorReleases.join(", ")}`);
if (patchReleases.length > 0) logger.log(`${chalk.green("[Patch]")}\n ${patchReleases.join(", ")}`);
const message = outdent`
${chalk.red("========= NOTE ========")}
All dependents of these packages that will be incompatible with the new version will be ${chalk.red("patch bumped")} when this changeset is applied.`;
const prettyMessage = boxen(message, {
borderStyle: "double",
align: "center"
});
logger.log(prettyMessage);
}
async function add(cwd, config) {
const changesetBase = await getChangesetBase(cwd);
if (!fs.existsSync(changesetBase)) {
logger.warn("There is no .changeset folder. ");
logger.warn("If this is the first time `changesets` have been used in this project, run `yarn changesets init` to get set up.");
logger.warn("If you expected there to be changesets, you should check git history for when the folder was removed to ensure you do not lose any configuration.");
return;
}
const changedPackages = await git.getChangedPackagesSinceMaster(cwd);
const changePackagesName = changedPackages.filter(a => a).map(pkg => pkg.name);
const newChangeset = await createChangeset(changePackagesName, cwd);
printConfirmationMessage(newChangeset);
const confirmChangeset = await askConfirm("Is this your desired changeset?");
if (confirmChangeset) {
const changesetID = await writeChangeset(newChangeset, cwd);
if (config.commit) {
await git.add(path.resolve(changesetBase, `${changesetID}.md`), cwd);
await git.commit(`CHANGESET: ${changesetID}. ${newChangeset.summary}`, cwd);
logger.log(chalk.green("Changeset added and committed"));
} else {
logger.log(chalk.green("Changeset added! - you can now commit it\n"));
}
let hasMajorChange = [...newChangeset.releases].find(c => c.type === "major");
if (hasMajorChange) {
logger.warn("This Changeset includes a major change and we STRONGLY recommend adding more information to the changeset:");
logger.warn("WHAT the breaking change is");
logger.warn("WHY the change was made");
logger.warn("HOW a consumer should update their code");
} else {
logger.log(chalk.green("If you want to modify or expand on the changeset summary, you can find it here"));
}
logger.info(chalk.blue(path.resolve(changesetBase, `${changesetID}.md`)));
}
}
// @flow
// folder, and tidy up the subfolders
const removeEmptyFolders = folderPath => {
const dirContents = fs.readdirSync(folderPath);
dirContents.forEach(contentPath => {
const singleChangesetPath = path.resolve(folderPath, contentPath);
if (fs.statSync(singleChangesetPath).isDirectory() && fs.readdirSync(singleChangesetPath).length < 1) {
fs.rmdirSync(singleChangesetPath);
}
});
};
const removeFolders = folderPath => {
if (!fs.existsSync(folderPath)) return;
const dirContents = fs.readdirSync(folderPath);
dirContents.forEach(contentPath => {
const singleChangesetPath = path.resolve(folderPath, contentPath);
if (fs.statSync(singleChangesetPath).isDirectory()) {
fs.emptyDirSync(singleChangesetPath);
fs.rmdirSync(singleChangesetPath);
}
});
};
// TODO take in cwd, and fetch changesetBase ourselves
async function getChangesets(changesetBase, sinceMasterOnly) {
if (!fs.existsSync(changesetBase)) {
throw new Error("There is no .changeset directory in this project");
}
const dirs = fs.readdirSync(changesetBase); // this needs to support just not dealing with dirs that aren't set up properly
let changesets = dirs.filter(dir => fs.lstatSync(path.join(changesetBase, dir)).isDirectory());
if (sinceMasterOnly) {
const newChangesets = await git.getChangedChangesetFilesSinceMaster(changesetBase);
const newHahses = newChangesets.map(c => c.split("/")[1]);
changesets = changesets.filter(dir => newHahses.includes(dir));
}
const changesetContents = changesets.map(async changesetDir => {
const summary = fs.readFileSync(path.join(changesetBase, changesetDir, "changes.md"), "utf-8");
const jsonPath = path.join(changesetBase, changesetDir, "changes.json");
const json = require(jsonPath);
const commit = await git.getCommitThatAddsFile(jsonPath, changesetBase);
return _objectSpread({}, json, {
summary,
commit,
id: changesetDir
});
});
return Promise.all(changesetContents);
}
let importantSeparator = chalk.red("===============================IMPORTANT!==============================="); // this function only exists while we wait for v1 changesets to be obsoleted
// and should be deleted before v3
async function getOldChangesetsAndWarn(cwd) {
let changesetBase = await getChangesetBase(cwd);
removeEmptyFolders(changesetBase);
let unreleasedChangesets = await getChangesets(changesetBase, false);
if (unreleasedChangesets.length === 0) {
return [];
}
logger.warn(importantSeparator);
logger.warn("There were old changesets from version 1 found");
logger.warn("Theses are being applied now but the dependents graph may have changed");
logger.warn("Make sure you validate all your dependencies");
logger.warn("In a future version, we will no longer apply these old changesets, and will instead throw here");
logger.warn("----------------------------------------------------------------------");
let thing = unreleasedChangesets.map(({
releases,
id,
summary
}) => ({
releases,
id,
summary
}));
return thing;
} // this function only exists while we wait for v1 changesets to be obsoleted
// and should be deleted before v3
async function cleanupOldChangesets(cwd, config) {
let changesetBase = await getChangesetBase(cwd);
removeFolders(changesetBase);
if (config.commit) {
await git.add(changesetBase, cwd);
logger.log("Committing removing old changesets...");
await git.commit(`removing legacy changesets`, cwd);
}
}
async function version(cwd, config) {
let oldChangesets = await getOldChangesetsAndWarn(cwd);
let newChangesets = await readChangesets(cwd, false);
let changesets = [...oldChangesets, ...newChangesets];
if (changesets.length === 0) {
logger.warn("No unreleased changesets found, exiting.");
return;
}
let workspaces = await getWorkspaces$1({
cwd,
tools: ["yarn", "bolt", "root"]
});
if (!workspaces) throw new Error("Could not resolve workspaes for current working directory");
let dependentsGraph = await getDependentsgraph({
cwd
}); // NOTE: in v3 when we are not support the old changeset format we can use `getReleasePlan` here
let releasePlan = await assembleReleasePlan(changesets, workspaces, dependentsGraph, config);
await applyReleasePlan(releasePlan, cwd, config);
if (oldChangesets.length > 0) {
await cleanupOldChangesets(cwd, config);
}
if (config.commit) {
logger.log("All files have been updated and committed. You're ready to publish!");
} else {
logger.log("All files have been updated. Review them and commit at your leisure");
}
logger.warn("If you alter version changes in package.jsons, make sure to run bolt before publishing to ensure the repo is in a valid state");
}
class ExitError extends Error {
constructor(code) {
super("the process exited with code: " + code);
this.code = code;
}
}
const npmRequestLimit = pLimit(40);
function getCorrectRegistry() {
let registry = process.env.npm_config_registry === "https://registry.yarnpkg.com" ? undefined : process.env.npm_config_registry;
return registry;
}
async function getTokenIsRequired() {
// Due to a super annoying issue in yarn, we have to manually override this env variable
// See: https://github.com/yarnpkg/yarn/issues/2935#issuecomment-355292633
const envOverride = {
npm_config_registry: getCorrectRegistry()
};
let result = await spawn("npm", ["profile", "get", "--json"], {
env: Object.assign({}, process.env, envOverride)
});
let json = JSON.parse(result.stdout.toString());
if (json.error) {
logger.error(`an error occurred while running \`npm profile get\`: ${json.error.code}`);
logger.error(json.error.summary);
if (json.error.summary) logger.error(json.error.summary);
throw new ExitError(1);
}
return json.tfa.mode === "auth-and-writes";
}
function info$1(pkgName) {
return npmRequestLimit(async () => {
logger.info(`npm info ${pkgName}`); // Due to a couple of issues with yarnpkg, we also want to override the npm registry when doing
// npm info.
// Issues: We sometimes get back cached responses, i.e old data about packages which causes
// `publish` to behave incorrectly. It can also cause issues when publishing private packages
// as they will always give a 404, which will tell `publish` to always try to publish.
// See: https://github.com/yarnpkg/yarn/issues/2935#issuecomment-355292633
const envOverride = {
npm_config_registry: getCorrectRegistry()
};
let result = await spawn("npm", ["info", pkgName, "--json"], {
env: Object.assign({}, process.env, envOverride)
});
return JSON.parse(result.stdout.toString());
});
}
async function infoAllow404(pkgName) {
let pkgInfo = await info$1(pkgName);
if (pkgInfo.error && pkgInfo.error.code === "E404") {
logger.warn(`Recieved 404 for npm info ${chalk.cyan(`"${pkgName}"`)}`);
return {
published: false,
pkgInfo: {}
};
}
if (pkgInfo.error) {
logger.error(`Recieved an unknown error code: ${pkgInfo.error.code} for npm info ${chalk.cyan(`"${pkgName}"`)}`);
logger.error(pkgInfo.error.summary);
if (pkgInfo.error.detail) logger.error(pkgInfo.error.detail);
throw new ExitError(1);
}
return {
published: true,
pkgInfo
};
}
let otpAskLimit = pLimit(1);
let askForOtpCode = twoFactorState => otpAskLimit(async () => {
if (twoFactorState.token !== null) return twoFactorState.token;
logger.info("This operation requires a one-time password from your authenticator.");
let val = await askQuestion("Enter one-time password:");
twoFactorState.token = val;
return val;
});
let getOtpCode = async twoFactorState => {
if (twoFactorState.token !== null) {
return twoFactorState.token;
}
return askForOtpCode(twoFactorState);
}; // we have this so that we can do try a publish again after a publish without
// the call being wrapped in the npm request limit and causing the publishes to potentially never run
async function internalPublish(pkgName, opts, twoFactorState) {
let publishFlags = opts.access ? ["--access", opts.access] : [];
if ((await twoFactorState.isRequired) && !isCI) {
let otpCode = await getOtpCode(twoFactorState);
publishFlags.push("--otp", otpCode);
} // Due to a super annoying issue in yarn, we have to manually override this env variable
// See: https://github.com/yarnpkg/yarn/issues/2935#issuecomment-355292633
const envOverride = {
npm_config_registry: getCorrectRegistry()
};
let {
stdout
} = await spawn("npm", ["publish", "--json", ...publishFlags], {
cwd: opts.cwd,
env: Object.assign({}, process.env, envOverride)
});
let json = JSON.parse(stdout.toString());
if (json.error) {
if (json.error.code === "EOTP" && !isCI) {
if (twoFactorState.token !== null) {
// the current otp code must be invalid since it errored
twoFactorState.token = null;
} // just in case this isn't already true
twoFactorState.isRequired = Promise.resolve(true);
return internalPublish(pkgName, opts, twoFactorState);
}
logger.error(`an error occurred while publishing ${pkgName}: ${json.error.code}`, json.error.summary, json.error.detail ? "\n" + json.error.detail : "");
return {
published: false
};
}
return {
published: true
};
}
function publish(pkgName, opts, twoFactorState) {
return npmRequestLimit(() => {
return internalPublish(pkgName, opts, twoFactorState);
});
}
async function publishPackages({
cwd,
access,
otp
}) {
const packages = await getWorkspaces({
cwd
});
const publicPackages = packages.filter(pkg => !pkg.config.private);
let twoFactorState = otp === undefined ? {
token: null,
// note: we're not awaiting this here, we want this request to happen in parallel with getUnpublishedPackages
isRequired: getTokenIsRequired()
} : {
token: otp,
isRequired: Promise.resolve(true)
};
const unpublishedPackagesInfo = await getUnpublishedPackages(publicPackages);
const unpublishedPackages = publicPackages.filter(pkg => {
return unpublishedPackagesInfo.some(p => pkg.name === p.name);
});
if (unpublishedPackagesInfo.length === 0) {
logger.warn("No unpublished packages to publish");
}
const publishedPackages = await Promise.all(unpublishedPackages.map(pkg => publishAPackage(pkg, access, twoFactorState)));
return publishedPackages;
}
async function publishAPackage(pkg, access, twoFactorState) {
const {
name,
version
} = pkg.config;
logger.info(`Publishing ${chalk.cyan(`"${name}"`)} at ${chalk.green(`"${version}"`)}`);
const publishDir = pkg.dir;
const publishConfirmation = await publish(name, {
cwd: publishDir,
access
}, twoFactorState);
return {
name,
newVersion: version,
published: publishConfirmation.published
};
}
async function getUnpublishedPackages(packages) {
const results = await Promise.all(packages.map(async pkg => {
const config = pkg.config;
const response = await infoAllow404(config.name);
return {
name: config.name,
localVersion: config.version,
isPublished: response.published,
publishedVersion: response.pkgInfo.version || ""
};
}));
const packagesToPublish = [];
for (const pkgInfo of results) {
const {
name,
isPublished,
localVersion,
publishedVersion
} = pkgInfo;
if (!isPublished) {
packagesToPublish.push(pkgInfo);
} else if (semver.gt(localVersion, publishedVersion)) {
packagesToPublish.push(pkgInfo);
logger.info(`${name} is being published because our local version (${localVersion}) is ahead of npm's (${publishedVersion})`);
} else if (semver.lt(localVersion, publishedVersion)) {
// If the local version is behind npm, something is wrong, we warn here, and by not getting published later, it will fail
logger.warn(`${name} is not being published because version ${publishedVersion} is already published on npm and we are trying to publish version ${localVersion}`);
}
}
return packagesToPublish;
}
function logReleases(pkgs) {
const mappedPkgs = pkgs.map(p => `${p.name}@${p.newVersion}`).join("\n");
logger.log(mappedPkgs);
}
async function run(cwd, {
otp
}, config) {
const response = await publishPackages({
cwd: cwd,
// if not public, we wont pass the access, and it works as normal
access: config.access,
otp: otp
});
const successful = response.filter(p => p.published);
const unsuccessful = response.filter(p => !p.published);
if (successful.length > 0) {
logger.success("packages published successfully:");
logReleases(successful); // We create the tags after the push above so that we know that HEAD wont change and that pushing
// wont suffer from a race condition if another merge happens in the mean time (pushing tags wont
// fail if we are behind master).
logger.log("Creating tags...");
for (const pkg of successful) {
const tag = `${pkg.name}@${pkg.newVersion}`;
logger.log("New tag: ", tag);
await git.tag(tag, cwd);
}
}
if (unsuccessful.length > 0) {
logger.error("packages failed to publish:");
logReleases(unsuccessful);
throw new ExitError(1);
}
}
async function getStatus(cwd, {
sinceMaster,
verbose,
output
}, config) {
// TODO: Check if we are no master and give a different error message if we are
const releasePlan = await getReleasePlan(cwd, sinceMaster, config);
const {
changesets,
releases
} = releasePlan;
if (changesets.length < 1) {
logger.error("No changesets present");
process.exit(1);
}
if (output) {
await fs.writeFile(path.join(cwd, output), JSON.stringify(releasePlan, undefined, 2));
return;
}
const print = verbose ? verbosePrint : SimplePrint;
print("patch", releases);
logger.log("---");
print("minor", releases);
logger.log("---");
print("major", releases);
return releasePlan;
}
function SimplePrint(type, releases) {
const packages = releases.filter(r => r.type === type);
if (packages.length) {
logger.info(chalk`Packages to be bumped at {green ${type}}:\n`);
const pkgs = packages.map(({
name
}) => `- ${name}`).join("\n");
logger.log(chalk.green(pkgs));
} else {
logger.info(chalk`{red NO} packages to be bumped at {green ${type}}`);
}
}
function verbosePrint(type, releases) {
const packages = releases.filter(r => r.type === type);
if (packages.length) {
logger.info(chalk`Packages to be bumped at {green ${type}}`);
const columns = packages.map(({
name,
newVersion: version,
changesets
}) => [chalk.green(name), version, changesets.map(c => chalk.blue(` .changeset/${c}/changes.md`)).join(" +")]);
const t1 = table([{
value: "Package Name",
width: 20
}, {
value: "New Version",
width: 20
}, {
value: "Related Changeset Summaries",
width: 70
}], columns, {
paddingLeft: 1,
paddingRight: 0,
headerAlign: "center",
align: "left"
});
logger.log(t1.render() + "\n");
} else {
logger.info(chalk`Running release would release {red NO} packages as a {green ${type}}`);
}
}
const {
input,
flags
} = meow(`
Usage
$ changesets [command]
Commands
init
add
version
publish [--otp=code]
status [--since-master --verbose --output=JSON_FILE.json]
`, {
flags: {
sinceMaster: {
type: "boolean"
},
verbose: {
type: "boolean",
alias: "v"
},
output: {
type: "string",
alias: "o"
},
otp: {
type: "string",
default: undefined
}
}
});
const cwd = process.cwd();
(async () => {
const workspaces = await getWorkspaces$1({
cwd,
tools: ["yarn", "bolt", "root"]
});
if (!workspaces) {
throw new Error("We could not resolve workspaces - check you are running this command from the correct directory");
}
if (input[0] === "init") {
await init(cwd);
return;
}
let config$1;
try {
config$1 = await config.read(cwd, workspaces);
} catch (e) {
let oldConfigExists = await fs.pathExists(path.resolve(cwd, ".changeset/config.js"));
if (oldConfigExists) {
logger.error("It looks like you're using the version 1 `.changeset/config.js` file");
logger.error("You'll need to convert it to a `.changeset/config.json` file");
logger.error("The format of the config object has significantly changed in v2 as well");
logger.error(" - we thoroughly recommend looking at the changelog for this package for what has changed");
process.exit(1);
} else {
throw e;
}
}
if (input.length < 1) {
// @ts-ignore if this is undefined, we have already exited
await add(cwd, config$1);
} else if (input.length > 1) {
logger.error("Too many arguments passed to changesets - we only accept the command name as an argument");
} else {
const {
sinceMaster,
verbose,
output,
otp
} = flags;
const deadFlags = ["updateChangelog", "isPublic", "skipCI", "commit"];
deadFlags.forEach(flag => {
if (flags[flag]) {
logger.error(`the flag ${flag} has been removed from changesets for version 2`);
logger.error(`Please encode the desired value into your config`);
logger.error(`See our changelog for more details`);
throw new ExitError(1);
}
}); // Command line options need to be undefined, otherwise their
// default value overrides the user's provided config in their
// config file. For this reason, we only assign them to this
// object as and when they exist.
switch (input[0]) {
case "add":
{
// @ts-ignore if this is undefined, we have already exited
await add(cwd, config$1);
return;
}
case "version":
{
// @ts-ignore if this is undefined, we have already exited
await version(cwd, config$1);
return;
}
case "publish":
{
// @ts-ignore if this is undefined, we have already exited
await run(cwd, {
otp
}, config$1);
return;
}
case "status":
{
// @ts-ignore if this is undefined, we have already exited
await getStatus(cwd, {
sinceMaster,
verbose,
output
}, config$1);
return;
}
case "bump":
{
logger.error('In version 2 of changesets, "bump" has been renamed to "version" - see our changelog for an explanation');
logger.error("To fix this, use `changeset version` instead, and update any scripts that use changesets");
throw new ExitError(1);
}
case "release":
{
logger.error('In version 2 of changesets, "release" has been renamed to "publish" - see our changelog for an explanation');
logger.error("To fix this, use `changeset publish` instead, and update any scripts that use changesets");
throw new ExitError(1);
}
default:
{
logger.error(`Invalid command ${input[0]} was provided`);
throw new ExitError(1);
}
}
}
})().catch(err => {
if (err instanceof ExitError) {
return process.exit(err.code);
}
logger.error(err);
process.exit(1);
});
;