eas-cli
Version:
EAS command line tool
340 lines (339 loc) • 15.9 kB
JavaScript
Object.defineProperty(exports, "__esModule", { value: true });
const tslib_1 = require("tslib");
const timeago_js_1 = require("@expo/timeago.js");
const core_1 = require("@oclif/core");
const chalk_1 = tslib_1.__importDefault(require("chalk"));
const node_fs_1 = tslib_1.__importDefault(require("node:fs"));
const path = tslib_1.__importStar(require("node:path"));
const EasCommand_1 = tslib_1.__importDefault(require("../../commandUtils/EasCommand"));
const flags_1 = require("../../commandUtils/flags");
const log_1 = tslib_1.__importDefault(require("../../log"));
const ora_1 = require("../../ora");
const json_1 = require("../../utils/json");
const progress_1 = require("../../utils/progress");
const WorkerAssets = tslib_1.__importStar(require("../../worker/assets"));
const deployment_1 = require("../../worker/deployment");
const upload_1 = require("../../worker/upload");
const logs_1 = require("../../worker/utils/logs");
const isDirectory = (directoryPath) => node_fs_1.default.promises
.stat(directoryPath)
.then(stat => stat.isDirectory())
.catch(() => false);
class WorkerDeploy extends EasCommand_1.default {
static description = 'Deploy your Expo web build';
static aliases = ['deploy'];
static usage = [(0, chalk_1.default) `deploy {dim [options]}`, `deploy --prod`];
static state = 'beta';
static flags = {
prod: core_1.Flags.boolean({
aliases: ['production'],
description: 'Create a new production deployment.',
default: false,
}),
alias: core_1.Flags.string({
description: 'Custom alias to assign to the new deployment.',
helpValue: 'name',
}),
id: core_1.Flags.string({
description: 'Custom unique identifier for the new deployment.',
helpValue: 'xyz123',
}),
'export-dir': core_1.Flags.string({
description: 'Directory where the Expo project was exported.',
helpValue: 'dir',
default: 'dist',
}),
'dry-run': core_1.Flags.boolean({
description: 'Outputs a tarball of the new deployment instead of uploading it.',
default: false,
}),
...flags_1.EASEnvironmentFlag,
...flags_1.EasNonInteractiveAndJsonFlags,
};
static contextDefinition = {
...this.ContextOptions.DynamicProjectConfig,
...this.ContextOptions.ProjectDir,
...this.ContextOptions.LoggedIn,
};
async runAsync() {
const { flags: rawFlags } = await this.parse(WorkerDeploy);
const flags = this.sanitizeFlags(rawFlags);
if (flags.json) {
(0, json_1.enableJsonOutput)();
}
log_1.default.warn('EAS Hosting is still in beta and subject to changes.');
const { getDynamicPrivateProjectConfigAsync, loggedIn: { graphqlClient }, projectDir, } = await this.getContextAsync(WorkerDeploy, { ...flags, withServerSideEnvironment: null });
const projectDist = await resolveExportedProjectAsync(flags, projectDir);
const { projectId } = await getDynamicPrivateProjectConfigAsync();
logExportedProjectInfo(projectDist);
async function* emitWorkerTarballAsync(params) {
yield ['assets.json', JSON.stringify(params.assetMap)];
yield ['manifest.json', JSON.stringify(params.manifest)];
if (projectDist.type === 'server' && projectDist.serverPath) {
const workerFiles = WorkerAssets.listWorkerFilesAsync(projectDist.serverPath);
for await (const workerFile of workerFiles) {
yield [`server/${workerFile.normalizedPath}`, workerFile.data];
}
}
}
async function finalizeDeployAsync(deployParams) {
const finalizeDeployUrl = new URL('/deploy/finalize', deployParams.baseURL);
finalizeDeployUrl.searchParams.set('token', deployParams.token);
const result = await (0, upload_1.callUploadApiAsync)(finalizeDeployUrl, {
method: 'POST',
headers: {
accept: 'application/json',
},
});
if (!result || typeof result !== 'object' || !('success' in result) || !result.success) {
throw new Error('Deploy failed: Incomplete asset uploads. Please try again');
}
}
async function uploadTarballAsync(tarPath, uploadUrl) {
const { response } = await (0, upload_1.uploadAsync)({
url: uploadUrl,
filePath: tarPath,
compress: false,
headers: {
accept: 'application/json',
},
});
if (response.status === 413) {
throw new Error('Upload failed! (Payload too large)\n' +
`The files in "${path.relative(projectDir, projectDist.path)}" (at: ${projectDir}) exceed the maximum file size (10MB gzip).`);
}
else if (!response.ok) {
throw new Error(`Upload failed! (${response.statusText})`);
}
else {
const json = await response.json();
if (!json.success || !json.result || typeof json.result !== 'object') {
throw new Error(json.message ? `Upload failed: ${json.message}` : 'Upload failed!');
}
const { id, fullName, token } = json.result;
if (typeof token !== 'string') {
throw new Error('Upload failed: API failed to return a deployment token');
}
else if (typeof id !== 'string') {
throw new Error('Upload failed: API failed to return a deployment identifier');
}
else if (typeof fullName !== 'string') {
throw new Error('Upload failed: API failed to return a script name');
}
const baseURL = new URL('/', uploadUrl).toString();
return { id, fullName, baseURL, token };
}
}
async function uploadAssetsAsync(assetMap, deployParams) {
const uploadParams = [];
const assetPath = projectDist.type === 'server' ? projectDist.clientPath : projectDist.path;
if (!assetPath) {
return;
}
for await (const asset of WorkerAssets.listAssetMapFilesAsync(assetPath, assetMap)) {
const uploadURL = new URL(`/asset/${asset.sha512}`, deployParams.baseURL);
uploadURL.searchParams.set('token', deployParams.token);
uploadParams.push({ url: uploadURL.toString(), filePath: asset.path });
}
const progress = {
total: uploadParams.length,
pending: 0,
percent: 0,
transferred: 0,
};
const updateProgress = (0, progress_1.createProgressTracker)({
total: progress.total,
message(ratio) {
const percent = `${Math.floor(ratio * 100)}`;
const details = chalk_1.default.dim(`(${progress.pending} Pending, ${progress.transferred} Completed, ${progress.total} Total)`);
return `Uploading assets: ${percent.padStart(3)}% ${details}`;
},
completedMessage: 'Uploaded assets',
});
try {
for await (const signal of (0, upload_1.batchUploadAsync)(uploadParams)) {
if ('response' in signal) {
progress.pending--;
progress.percent = ++progress.transferred / progress.total;
}
else {
progress.pending++;
}
updateProgress({ progress });
}
}
catch (error) {
updateProgress({ isComplete: true, error });
throw error;
}
updateProgress({ isComplete: true });
}
let assetMap;
let tarPath;
let deployResult;
let progress = (0, ora_1.ora)('Preparing project').start();
try {
const manifestResult = await WorkerAssets.createManifestAsync({
environment: flags.environment,
projectDir,
projectId,
}, graphqlClient);
if (manifestResult.conflictingVariableNames?.length) {
log_1.default.warn('> The following environment variables were present in local .env files as well as EAS environment variables. ' +
'In case of conflict, the EAS environment variable values will be used: ' +
manifestResult.conflictingVariableNames.join(' '));
}
assetMap = await WorkerAssets.createAssetMapAsync(projectDist.type === 'server' ? projectDist.clientPath : projectDist.path);
tarPath = await WorkerAssets.packFilesIterableAsync(emitWorkerTarballAsync({
assetMap,
manifest: manifestResult.manifest,
}));
if (flags.dryRun) {
const DRY_RUN_OUTPUT_PATH = 'deploy.tar.gz';
await node_fs_1.default.promises.copyFile(tarPath, DRY_RUN_OUTPUT_PATH);
progress.succeed('Saved deploy.tar.gz tarball');
if (flags.json) {
(0, json_1.printJsonOnlyOutput)({ tarPath: DRY_RUN_OUTPUT_PATH });
}
return;
}
const uploadUrl = await (0, deployment_1.getSignedDeploymentUrlAsync)(graphqlClient, {
appId: projectId,
deploymentIdentifier: flags.deploymentIdentifier,
// NOTE(cedric): this function might ask the user for a dev-domain name,
// when that happens, no ora spinner should be running.
onSetupDevDomain: () => progress.stop(),
nonInteractive: flags.nonInteractive,
});
progress.start('Creating deployment');
deployResult = await uploadTarballAsync(tarPath, uploadUrl);
progress.succeed('Created deployment');
}
catch (error) {
progress.fail('Failed to create deployment');
throw error;
}
await uploadAssetsAsync(assetMap, deployResult);
await finalizeDeployAsync(deployResult);
let deploymentAlias = null;
if (flags.aliasName) {
progress = (0, ora_1.ora)((0, chalk_1.default) `Assigning alias {bold ${flags.aliasName}} to deployment`).start();
try {
deploymentAlias = await (0, deployment_1.assignWorkerDeploymentAliasAsync)({
graphqlClient,
appId: projectId,
deploymentId: deployResult.id,
aliasName: flags.aliasName,
});
// Only stop the spinner when not promoting to production
if (!flags.isProduction) {
progress.succeed((0, chalk_1.default) `Assigned alias {bold ${flags.aliasName}} to deployment`);
}
}
catch (error) {
progress.fail((0, chalk_1.default) `Failed to assign {bold ${flags.aliasName}} alias to deployment`);
throw error;
}
}
let deploymentProdAlias = null;
if (flags.isProduction) {
try {
if (!flags.aliasName) {
progress = (0, ora_1.ora)((0, chalk_1.default) `Promoting deployment to {bold production}`).start();
}
else {
progress.text = (0, chalk_1.default) `Promoting deployment to {bold production}`;
}
deploymentProdAlias = await (0, deployment_1.assignWorkerDeploymentProductionAsync)({
graphqlClient,
appId: projectId,
deploymentId: deployResult.id,
});
progress.succeed(!flags.aliasName
? (0, chalk_1.default) `Promoted deployment to {bold production}`
: (0, chalk_1.default) `Promoted deployment to {bold production} with alias {bold ${flags.aliasName}}`);
}
catch (error) {
progress.fail('Failed to promote deployment to production');
throw error;
}
}
if (flags.json) {
(0, json_1.printJsonOnlyOutput)((0, logs_1.formatWorkerDeploymentJson)({
projectId,
deployment: {
deploymentIdentifier: deployResult.id,
url: (0, logs_1.getDeploymentUrlFromFullName)(deployResult.fullName),
},
aliases: [deploymentAlias],
production: deploymentProdAlias,
}));
return;
}
log_1.default.addNewLineIfNone();
log_1.default.log(`🎉 Your deployment is ready`);
log_1.default.addNewLineIfNone();
log_1.default.log((0, logs_1.formatWorkerDeploymentTable)({
projectId,
deployment: {
deploymentIdentifier: deployResult.id,
url: (0, logs_1.getDeploymentUrlFromFullName)(deployResult.fullName),
},
aliases: [deploymentAlias],
production: deploymentProdAlias,
}));
if (!deploymentProdAlias) {
log_1.default.addNewLineIfNone();
log_1.default.log('🚀 When you are ready to deploy to production:');
log_1.default.log((0, chalk_1.default) ` $ eas deploy {bold --prod}`);
}
}
sanitizeFlags(flags) {
return {
nonInteractive: flags['non-interactive'],
json: flags['json'],
isProduction: !!flags.prod,
aliasName: flags.alias?.trim().toLowerCase(),
deploymentIdentifier: flags.id?.trim(),
exportDir: flags['export-dir'],
environment: flags['environment'],
dryRun: flags['dry-run'],
};
}
}
exports.default = WorkerDeploy;
async function resolveExportedProjectAsync(flags, projectDir) {
const exportPath = path.join(projectDir, flags.exportDir);
const serverPath = path.join(exportPath, 'server');
const clientPath = path.join(exportPath, 'client');
const [exportDirStat, expoRoutesStat, hasClientDir] = await Promise.all([
node_fs_1.default.promises.stat(exportPath).catch(() => null),
node_fs_1.default.promises.stat(path.join(serverPath, '_expo/routes.json')).catch(() => null),
isDirectory(clientPath),
]);
if (!exportDirStat?.isDirectory()) {
throw new Error(`No "${flags.exportDir}/" folder found. Export your app with "npx expo export --platform web"`);
}
if (expoRoutesStat?.isFile()) {
return {
type: 'server',
path: exportPath,
modifiedAt: exportDirStat.mtime,
serverPath,
clientPath: hasClientDir ? clientPath : undefined,
};
}
return { type: 'static', path: exportPath, modifiedAt: exportDirStat.mtime };
}
function logExportedProjectInfo(project) {
let modifiedAgo = '';
// Only show the timestamp for exports older than 1 minute
if (project.modifiedAt && Date.now() - project.modifiedAt.getTime() > 60000) {
modifiedAgo = ` - exported ${(0, timeago_js_1.format)(project.modifiedAt)}`;
log_1.default.warn(`> Project export: ${project.type}${modifiedAgo}}`);
}
else {
log_1.default.log((0, chalk_1.default) `{dim > Project export: ${project.type}}`);
}
}
;