UNPKG

@amplience/dc-cli

Version:
212 lines (211 loc) 8.55 kB
"use strict"; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.handler = exports.processItems = exports.getContentItems = exports.builder = exports.coerceLog = exports.LOG_FILENAME = exports.desc = exports.command = void 0; const dynamic_content_client_factory_1 = __importDefault(require("../../services/dynamic-content-client-factory")); const archive_log_1 = require("../../common/archive/archive-log"); const confirm_all_content_1 = require("../../common/content-item/confirm-all-content"); const dc_management_sdk_js_1 = require("dc-management-sdk-js"); const log_helpers_1 = require("../../common/log-helpers"); const facet_1 = require("../../common/filter/facet"); const fetch_content_1 = require("../../common/filter/fetch-content"); const progress_bar_1 = require("../../common/progress-bar/progress-bar"); exports.command = 'archive [id]'; exports.desc = 'Archive Content Items'; const LOG_FILENAME = (platform = process.platform) => (0, log_helpers_1.getDefaultLogPath)('content-item', 'archive', platform); exports.LOG_FILENAME = LOG_FILENAME; const coerceLog = (logFile) => (0, log_helpers_1.createLog)(logFile, 'Content Items Archive Log'); exports.coerceLog = coerceLog; const builder = (yargs) => { yargs .positional('id', { type: 'string', describe: 'The ID of a content item to be archived. If id is not provided, this command will archive ALL content items through all content repositories in the hub.' }) .option('repoId', { type: 'string', describe: 'The ID of a content repository to search items in to be archived.', requiresArg: false }) .option('folderId', { type: 'string', describe: 'The ID of a folder to search items in to be archived.', requiresArg: false }) .option('facet', { type: 'string', describe: "Archive content matching the given facets. Provide facets in the format 'label:example name,locale:en-GB', spaces are allowed between values. A regex can be provided for text filters, surrounded with forward slashes. For more examples, see the readme." }) .option('revertLog', { type: 'string', describe: 'Path to a log file containing content items unarchived in a previous run of the unarchive command.\nWhen provided, archives all content items listed as UNARCHIVE in the log file.', requiresArg: false }) .alias('f', 'force') .option('f', { type: 'boolean', boolean: true, describe: 'If present, there will be no confirmation prompt before archiving the found content.' }) .alias('s', 'silent') .option('s', { type: 'boolean', boolean: true, describe: 'If present, no log file will be produced.' }) .option('ignoreError', { type: 'boolean', boolean: true, describe: 'If present, archive requests that fail will not abort the process.' }) .option('logFile', { type: 'string', default: exports.LOG_FILENAME, describe: 'Path to a log file to write to.', coerce: exports.coerceLog }) .option('name', { type: 'string', hidden: true }) .option('schemaId', { type: 'string', hidden: true }) .option('ignoreSchemaValidation', { type: 'boolean', boolean: false, describe: 'Ignore content item schema validation during archive' }); }; exports.builder = builder; const getContentItems = async ({ client, id, hubId, repoId, folderId, revertLog, facet }) => { try { let contentItems = []; if (revertLog != null) { const log = await new archive_log_1.ArchiveLog().loadFromFile(revertLog); id = log.getData('UNARCHIVE'); } if (id != null) { const itemIds = Array.isArray(id) ? id : [id]; const items = []; for (const id of itemIds) { try { items.push(await client.contentItems.get(id)); } catch { } } contentItems.push(...items.filter(item => item.status === dc_management_sdk_js_1.Status.ACTIVE)); return { contentItems, missingContent: contentItems.length != itemIds.length }; } const hub = await client.hubs.get(hubId); contentItems = await (0, fetch_content_1.getContent)(client, hub, facet, { repoId, folderId, status: dc_management_sdk_js_1.Status.ACTIVE, enrichItems: true }); return { contentItems, missingContent: false }; } catch (err) { console.log(err); return { contentItems: [], missingContent: false }; } }; exports.getContentItems = getContentItems; const processItems = async ({ contentItems, force, silent, logFile, allContent, missingContent, ignoreError, ignoreSchemaValidation }) => { if (contentItems.length == 0) { console.log('Nothing found to archive, aborting.'); return; } console.log('The following content items will be archived:'); contentItems.forEach((contentItem) => { console.log(` ${contentItem.label} (${contentItem.id})`); }); console.log(`Total: ${contentItems.length}`); if (!force) { const yes = await (0, confirm_all_content_1.confirmAllContent)('archive', 'content item', allContent, missingContent); if (!yes) { return; } } const log = logFile.open(); const progress = (0, progress_bar_1.progressBar)(contentItems.length, 0, { title: 'Archiving content items' }); let successCount = 0; for (let i = 0; i < contentItems.length; i++) { try { const deliveryKey = contentItems[i].body._meta.deliveryKey; let args = contentItems[i].id; if (deliveryKey) { contentItems[i].body._meta.deliveryKey = null; const updateParams = { ...(ignoreSchemaValidation ? { ignoreSchemaValidation: true } : {}) }; contentItems[i] = await contentItems[i].related.update(contentItems[i], updateParams); args += ` ${deliveryKey}`; } await contentItems[i].related.archive(); progress.increment(); log.addAction('ARCHIVE', `${args}`); successCount++; } catch (e) { progress.increment(); log.addComment(`ARCHIVE FAILED: ${contentItems[i].id}`); log.addComment(e.toString()); if (ignoreError) { log.warn(`Failed to archive ${contentItems[i].label} (${contentItems[i].id}), continuing.`, e); } else { progress.stop(); log.error(`Failed to archive ${contentItems[i].label} (${contentItems[i].id}), aborting.`, e); break; } } } progress.stop(); await log.close(!silent); console.log(`Archived ${successCount} content items.`); }; exports.processItems = processItems; const handler = async (argv) => { const { id, logFile, force, silent, ignoreError, hubId, revertLog, repoId, folderId, ignoreSchemaValidation } = argv; const client = (0, dynamic_content_client_factory_1.default)(argv); const facet = (0, facet_1.withOldFilters)(argv.facet, argv); const allContent = !id && !facet && !revertLog && !folderId && !repoId; if (repoId && id) { console.log('ID of content item is specified, ignoring repository ID'); } if (id && facet) { console.log('Please specify either a facet or an ID - not both.'); return; } if (repoId && folderId) { console.log('Folder is specified, ignoring repository ID'); } if (allContent) { console.log('No filter was given, archiving all content'); } const { contentItems, missingContent } = await (0, exports.getContentItems)({ client, id, hubId, repoId, folderId, revertLog, facet }); await (0, exports.processItems)({ contentItems, force, silent, logFile, allContent, missingContent, ignoreError, ignoreSchemaValidation }); }; exports.handler = handler;