UNPKG

@amplience/dc-cli

Version:
229 lines (228 loc) 9.21 kB
"use strict"; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.handler = exports.builder = exports.LOG_FILENAME = exports.desc = exports.command = void 0; exports.getTempFolder = getTempFolder; const log_helpers_1 = require("../../common/log-helpers"); const path_1 = require("path"); const rimraf_1 = __importDefault(require("rimraf")); const export_1 = require("./export"); const import_1 = require("./import"); const directory_utils_1 = require("../../common/import/directory-utils"); const import_revert_1 = require("./import-revert"); const file_log_1 = require("../../common/file-log"); const archive_log_1 = require("../../common/archive/archive-log"); const facet_1 = require("../../common/filter/facet"); function getTempFolder(name, platform = process.platform) { return (0, path_1.join)(process.env[platform == 'win32' ? 'USERPROFILE' : 'HOME'] || __dirname, '.amplience', `copy-${name}/`); } exports.command = 'copy'; exports.desc = 'Copy content items. The active account and hub are the source for the copy.'; const LOG_FILENAME = (platform = process.platform) => (0, log_helpers_1.getDefaultLogPath)('item', 'copy', platform); exports.LOG_FILENAME = LOG_FILENAME; const builder = (yargs) => { yargs .option('revertLog', { type: 'string', describe: 'Path to a log file to revert a copy for. This will archive the most recently copied resources, and revert updated ones.', coerce: log_helpers_1.openRevertLog }) .option('srcRepo', { type: 'string', describe: 'Copy content from within a given repository. Directory structure will start at the specified repository. Will automatically export all contained folders.' }) .option('srcFolder', { type: 'string', describe: 'Copy content from within a given folder. Directory structure will start at the specified folder. Can be used in addition to repoId.' }) .option('dstRepo', { type: 'string', describe: 'Copy matching the given repository to the source base directory, by ID. Folder structure will be followed and replicated from there.' }) .option('dstFolder', { type: 'string', describe: 'Copy matching the given folder to the source base directory, by ID. Folder structure will be followed and replicated from there.' }) .option('dstHubId', { type: 'string', describe: 'Destination hub ID. If not specified, it will be the same as the source.' }) .option('dstClientId', { type: 'string', describe: "Destination account's client ID. If not specified, it will be the same as the source." }) .option('dstSecret', { type: 'string', describe: "Destination account's secret. Must be used alongside dstClientId." }) .option('facet', { type: 'string', describe: "Copy content matching the given facets. Provide facets in the format 'label:example name,locale:en-GB', spaces are allowed between values. A regex can be provided for text filters, surrounded with forward slashes. For more examples, see the readme." }) .option('mapFile', { type: 'string', describe: 'Mapping file to use when updating content that already exists. Updated with any new mappings that are generated. If not present, will be created.' }) .alias('f', 'force') .option('f', { type: 'boolean', boolean: true, describe: 'Overwrite content, create and assign content types, and ignore content with missing types/references without asking.' }) .alias('v', 'validate') .option('v', { type: 'boolean', boolean: true, describe: 'Only recreate folder structure - content is validated but not imported.' }) .option('skipIncomplete', { type: 'boolean', boolean: true, describe: 'Skip any content item that has one or more missing dependancy.' }) .option('lastPublish', { type: 'boolean', boolean: true, describe: 'When available, export the last published version of a content item rather than its newest version.' }) .option('publish', { type: 'boolean', boolean: true, describe: 'Publish any content items that either made a new version on import, or were published more recently in the JSON.' }) .option('batchPublish', { type: 'boolean', boolean: true, describe: 'Batch publish requests up to the rate limit. (35/min)' }) .option('republish', { type: 'boolean', boolean: true, describe: 'Republish content items regardless of whether the import changed them or not. (--publish not required)' }) .option('excludeKeys', { type: 'boolean', boolean: true, describe: 'Exclude delivery keys when importing content items.' }) .option('media', { type: 'boolean', boolean: true, describe: "Detect and rewrite media links to match assets in the target account's DAM. Your client must have DAM permissions configured." }) .option('logFile', { type: 'string', default: exports.LOG_FILENAME, describe: 'Path to a log file to write to.', coerce: log_helpers_1.createLog }) .option('name', { type: 'string', hidden: true }) .option('schemaId', { type: 'string', hidden: true }) .option('ignoreSchemaValidation', { type: 'boolean', boolean: false, describe: 'Ignore content item schema validation during copy' }); }; exports.builder = builder; function rimraf(dir) { return new Promise((resolve) => { (0, rimraf_1.default)(dir, resolve); }); } const handler = async (argv) => { const log = argv.logFile.open(); const tempFolder = getTempFolder(Date.now().toString()); const yargArgs = { $0: '', _: [], json: true }; let result = false; const { hubId, clientId, clientSecret, patToken } = argv; const dstHubId = argv.dstHubId || hubId; const dstClientId = argv.dstClientId || clientId; const dstSecret = argv.dstSecret || clientSecret; const dstPatToken = argv.dstPatToken || patToken; const revertLog = await argv.revertLog; if (revertLog) { if (revertLog.errorLevel === archive_log_1.LogErrorLevel.INVALID) { log.error('Could not read the revert log.'); await log.close(); return false; } result = await (0, import_revert_1.revert)({ ...yargArgs, hubId: dstHubId, clientId: dstClientId, clientSecret: dstSecret, patToken: dstPatToken, dir: tempFolder, logFile: new file_log_1.FileLog(), revertLog: argv.revertLog, ignoreSchemaValidation: argv.ignoreSchemaValidation }); } else { await (0, directory_utils_1.ensureDirectoryExists)(tempFolder); try { log.appendLine('=== Exporting from source... ==='); await (0, export_1.handler)({ ...yargArgs, hubId: hubId, clientId: clientId, clientSecret: clientSecret, patToken: patToken, folderId: argv.srcFolder, repoId: argv.srcRepo, facet: (0, facet_1.withOldFilters)(argv.facet, argv), logFile: log, dir: tempFolder, exportedIds: argv.exportedIds, publish: argv.lastPublish }); log.appendLine('=== Importing to destination... ==='); const importResult = await (0, import_1.handler)({ ...yargArgs, hubId: dstHubId, clientId: dstClientId, clientSecret: dstSecret, patToken: patToken, dir: tempFolder, baseRepo: argv.dstRepo, baseFolder: argv.dstFolder, mapFile: argv.mapFile, force: argv.force, validate: argv.validate, skipIncomplete: argv.skipIncomplete, republish: argv.republish, publish: argv.publish, batchPublish: argv.batchPublish, excludeKeys: argv.excludeKeys, media: argv.media, logFile: log, revertLog: Promise.resolve(undefined), ignoreSchemaValidation: argv.ignoreSchemaValidation }); if (importResult) { log.appendLine('=== Done! ==='); result = true; } } catch (e) { log.appendLine('An unexpected error occurred: \n' + e.toString()); } await rimraf(tempFolder); } await log.close(); return result; }; exports.handler = handler;