UNPKG

@amplience/dc-cli

Version:
248 lines (247 loc) 9.85 kB
"use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || (function () { var ownKeys = function(o) { ownKeys = Object.getOwnPropertyNames || function (o) { var ar = []; for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; return ar; }; return ownKeys(o); }; return function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); __setModuleDefault(result, mod); return result; }; })(); var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.handler = exports.builder = exports.LOG_FILENAME = exports.desc = exports.command = void 0; const log_helpers_1 = require("../../common/log-helpers"); const copy = __importStar(require("./copy")); const dynamic_content_client_factory_1 = __importDefault(require("../../services/dynamic-content-client-factory")); const dc_management_sdk_js_1 = require("dc-management-sdk-js"); const import_revert_1 = require("./import-revert"); const archive_log_1 = require("../../common/archive/archive-log"); exports.command = 'move'; exports.desc = 'Move content items. The active account and hub are the source for the move.'; const LOG_FILENAME = (platform = process.platform) => (0, log_helpers_1.getDefaultLogPath)('item', 'move', platform); exports.LOG_FILENAME = LOG_FILENAME; const builder = (yargs) => { yargs .option('revertLog', { type: 'string', describe: 'Path to a log file to revert a move for. This will archive the most recently moved resources from the destination, unarchive from the source, and revert updated ones.', coerce: log_helpers_1.openRevertLog }) .option('srcRepo', { type: 'string', describe: 'Copy content from within a given repository. Directory structure will start at the specified repository. Will automatically export all contained folders.' }) .option('srcFolder', { type: 'string', describe: 'Copy content from within a given folder. Directory structure will start at the specified folder. Can be used in addition to repoId.' }) .option('dstRepo', { type: 'string', describe: 'Copy matching the given repository to the source base directory, by ID. Folder structure will be followed and replicated from there.' }) .option('dstFolder', { type: 'string', describe: 'Copy matching the given folder to the source base directory, by ID. Folder structure will be followed and replicated from there.' }) .option('dstHubId', { type: 'string', describe: 'Destination hub ID. If not specified, it will be the same as the source.' }) .option('dstClientId', { type: 'string', describe: "Destination account's client ID. If not specified, it will be the same as the source." }) .option('dstSecret', { type: 'string', describe: "Destination account's secret. Must be used alongside dstClientId." }) .option('facet', { type: 'string', describe: "Move content matching the given facets. Provide facets in the format 'label:example name,locale:en-GB', spaces are allowed between values. A regex can be provided for text filters, surrounded with forward slashes. For more examples, see the readme." }) .option('mapFile', { type: 'string', describe: 'Mapping file to use when updating content that already exists. Updated with any new mappings that are generated. If not present, will be created.' }) .alias('f', 'force') .option('f', { type: 'boolean', boolean: true, describe: 'Overwrite content, create and assign content types, and ignore content with missing types/references without asking.' }) .alias('v', 'validate') .option('v', { type: 'boolean', boolean: true, describe: 'Only recreate folder structure - content is validated but not imported.' }) .option('skipIncomplete', { type: 'boolean', boolean: true, describe: 'Skip any content item that has one or more missing dependancy.' }) .option('lastPublish', { type: 'boolean', boolean: true, describe: 'When available, export the last published version of a content item rather than its newest version.' }) .option('publish', { type: 'boolean', boolean: true, describe: 'Publish any content items that either made a new version on import, or were published more recently in the JSON.' }) .option('batchPublish', { type: 'boolean', boolean: true, describe: 'Batch publish requests up to the rate limit. (35/min)' }) .option('republish', { type: 'boolean', boolean: true, describe: 'Republish content items regardless of whether the import changed them or not. (--publish not required)' }) .option('excludeKeys', { type: 'boolean', boolean: true, describe: 'Exclude delivery keys when importing content items.' }) .option('media', { type: 'boolean', boolean: true, describe: "Detect and rewrite media links to match assets in the target account's DAM. Your client must have DAM permissions configured." }) .option('logFile', { type: 'string', default: exports.LOG_FILENAME, describe: 'Path to a log file to write to.', coerce: log_helpers_1.createLog }) .option('name', { type: 'string', hidden: true }) .option('schemaId', { type: 'string', hidden: true }) .option('ignoreSchemaValidation', { type: 'boolean', boolean: false, describe: 'Ignore content item schema validation during move' }); }; exports.builder = builder; const handler = async (argv) => { argv.exportedIds = []; const { hubId, clientId, clientSecret } = argv; const revertLog = await argv.revertLog; const dstHubId = argv.dstHubId || hubId; const dstClientId = argv.dstClientId || clientId; const dstSecret = argv.dstSecret || clientSecret; if (revertLog) { if (revertLog.errorLevel === archive_log_1.LogErrorLevel.INVALID) { console.error('Could not read the revert log.'); return; } const client = (0, dynamic_content_client_factory_1.default)({ ...argv, hubId: hubId, clientId: clientId, clientSecret: clientSecret }); const toUnarchive = revertLog.getData('MOVED'); for (let i = 0; i < toUnarchive.length; i++) { const id = toUnarchive[i]; let item; try { item = await client.contentItems.get(id); } catch { console.log(`Could not find item with id ${id}, skipping.`); continue; } if (item.status !== dc_management_sdk_js_1.Status.ACTIVE) { try { await item.related.unarchive(); } catch { console.log(`Could not unarchive item with id ${id}, skipping.`); continue; } } else { console.log(`Item with id ${id} is already unarchived, skipping.`); } } const yargArgs = { $0: '', _: [], json: true }; await (0, import_revert_1.revert)({ ...yargArgs, hubId: dstHubId, clientId: dstClientId, clientSecret: dstSecret, dir: '', logFile: argv.logFile, revertLog: argv.revertLog, ignoreSchemaValidation: argv.ignoreSchemaValidation }); } else { const log = argv.logFile.open(); argv.logFile = log; const copySuccess = await copy.handler(argv); if (!copySuccess) { return; } const client = (0, dynamic_content_client_factory_1.default)({ ...argv, hubId: hubId, clientId: clientId, clientSecret: clientSecret }); const exported = argv.exportedIds; for (let i = 0; i < exported.length; i++) { const item = await client.contentItems.get(exported[i]); try { await item.related.archive(); log.addAction('MOVED', item.id); } catch (e) { log.addComment(`ARCHIVE FAILED: ${item.id}`); log.addComment(e.toString()); } } await log.close(); } }; exports.handler = handler;