UNPKG

@amplience/dc-cli

Version:
218 lines (217 loc) 9.1 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.handler = exports.builder = exports.steps = exports.LOG_FILENAME = exports.desc = exports.command = void 0; exports.getDefaultMappingPath = getDefaultMappingPath; const log_helpers_1 = require("../../common/log-helpers"); const path_1 = require("path"); const configure_1 = require("../configure"); const cli_1 = require("../../cli"); const directory_utils_1 = require("../../common/import/directory-utils"); const content_clone_step_1 = require("./steps/content-clone-step"); const schema_clone_step_1 = require("./steps/schema-clone-step"); const settings_clone_step_1 = require("./steps/settings-clone-step"); const type_clone_step_1 = require("./steps/type-clone-step"); const index_clone_step_1 = require("./steps/index-clone-step"); const archive_log_1 = require("../../common/archive/archive-log"); const extension_clone_step_1 = require("./steps/extension-clone-step"); const event_clone_step_1 = require("./steps/event-clone-step"); function getDefaultMappingPath(name, platform = process.platform) { return (0, path_1.join)(process.env[platform == 'win32' ? 'USERPROFILE' : 'HOME'] || __dirname, '.amplience', `clone/`, `${name}.json`); } exports.command = 'clone <dir>'; exports.desc = 'Clone an entire hub. The active account and hub are the source for the copy. Exported data from the source hub will be placed in the specified folder.'; const LOG_FILENAME = (platform = process.platform) => (0, log_helpers_1.getDefaultLogPath)('hub', 'clone', platform); exports.LOG_FILENAME = LOG_FILENAME; exports.steps = [ new settings_clone_step_1.SettingsCloneStep(), new extension_clone_step_1.ExtensionCloneStep(), new schema_clone_step_1.SchemaCloneStep(), new type_clone_step_1.TypeCloneStep(), new index_clone_step_1.IndexCloneStep(), new content_clone_step_1.ContentCloneStep(), new event_clone_step_1.EventCloneStep() ]; const builder = (yargs) => { yargs .options(configure_1.configureCommandOptions) .config('config', cli_1.readConfig) .positional('dir', { describe: 'Directory to export content to, then import from. This must be set to the previous directory for a revert.', type: 'string' }) .options({ acceptSnapshotLimits: { type: 'boolean', boolean: true, describe: 'Must be passed to use the event clone step. Only use this argument if you fully understand its limitations.' }, dstHubId: { type: 'string', describe: 'Destination hub ID. If not specified, it will be the same as the source.' }, dstClientId: { type: 'string', describe: "Destination account's client ID. If not specified, it will be the same as the source." }, dstSecret: { type: 'string', describe: "Destination account's secret. Must be used alongside dstClientId." }, mapFile: { type: 'string', describe: 'Mapping file to use when updating content that already exists. Updated with any new mappings that are generated. If not present, will be created.' }, force: { type: 'boolean', boolean: true, describe: 'Overwrite content, create and assign content types, and ignore content with missing types/references without asking.', alias: 'f' }, validate: { type: 'boolean', boolean: true, describe: 'Only recreate folder structure - content is validated but not imported.', alias: 'v' }, skipIncomplete: { type: 'boolean', boolean: true, describe: 'Skip any content item that has one or more missing dependancy.' }, lastPublish: { type: 'boolean', boolean: true, describe: 'When available, export the last published version of a content item rather than its newest version.' }, publish: { type: 'boolean', boolean: true, describe: 'Publish any content items that either made a new version on import, or were published more recently in the JSON.' }, batchPublish: { type: 'boolean', boolean: true, describe: 'Batch publish requests up to the rate limit. (35/min)' }, republish: { type: 'boolean', boolean: true, describe: 'Republish content items regardless of whether the import changed them or not. (--publish not required)' }, excludeKeys: { type: 'boolean', boolean: true, describe: 'Exclude delivery keys when importing content items.' }, media: { type: 'boolean', boolean: true, describe: "Detect and rewrite media links to match assets in the target account's DAM. Your client must have DAM permissions configured." }, revertLog: { type: 'string', describe: 'Revert a previous clone using a given revert log and given directory. Reverts steps in reverse order, starting at the specified one.', coerce: log_helpers_1.openRevertLog }, step: { type: 'string', describe: 'Start at a specific step. Steps after the one you specify will also run.', choices: exports.steps.map(step => step.getId()) }, logFile: { type: 'string', default: exports.LOG_FILENAME, describe: 'Path to a log file to write to.', coerce: log_helpers_1.createLog }, ignoreSchemaValidation: { type: 'boolean', boolean: false, describe: 'Ignore content item schema validation during clone' } }); }; exports.builder = builder; const handler = async (argv) => { const log = argv.logFile.open(); const tempFolder = argv.dir; if (argv.mapFile == null) { argv.mapFile = getDefaultMappingPath(`hub-${argv.dstHubId}`); } const { hubId, clientId, clientSecret, acceptSnapshotLimits, patToken } = argv; const dstHubId = argv.dstHubId || hubId; const dstClientId = argv.dstClientId || clientId; const dstSecret = argv.dstSecret || clientSecret; const dstPatToken = argv.dstPatToken || patToken; const argvCore = { $0: argv.$0, _: argv._ }; const state = { argv: argv, from: { clientId: clientId, clientSecret: clientSecret, patToken: patToken, hubId: hubId, ...argvCore }, to: { clientId: dstClientId, clientSecret: dstSecret, patToken: dstPatToken, hubId: dstHubId, ...argvCore }, path: tempFolder, logFile: log }; await (0, directory_utils_1.ensureDirectoryExists)(tempFolder); const revertLog = await argv.revertLog; const stepIndex = Math.max(0, exports.steps.findIndex(step => step.getId() === argv.step)); if (revertLog) { if (revertLog.errorLevel === archive_log_1.LogErrorLevel.INVALID) { log.error('Could not read the revert log.'); await log.close(); return; } state.revertLog = revertLog; for (let i = stepIndex; i < exports.steps.length; i++) { const step = exports.steps[i]; if (step.isLimited && !acceptSnapshotLimits) { continue; } log.switchGroup(step.getName()); revertLog.switchGroup(step.getName()); log.appendLine(`=== Reverting Step ${i} - ${step.getName()} ===`); const success = await step.revert(state); if (!success) { log.appendLine(`Reverting step ${i} ('${step.getId()}': ${step.getName()}) Failed. Terminating.`); log.appendLine(''); log.appendLine('To continue the revert from this point, use the option:'); log.appendLine(`--step ${step.getId()}`); break; } } } else { for (let i = stepIndex; i < exports.steps.length; i++) { const step = exports.steps[i]; if (step.isLimited && !acceptSnapshotLimits) { continue; } log.switchGroup(step.getName()); log.appendLine(`=== Running Step ${i} - ${step.getName()} ===`); const success = await step.run(state); if (!success) { log.appendLine(`Step ${i} ('${step.getId()}': ${step.getName()}) Failed. Terminating.`); log.appendLine(''); log.appendLine('To continue the clone from this point, use the option:'); log.appendLine(`--step ${step.getId()}`); break; } } } await log.close(); }; exports.handler = handler;