UNPKG

@amplience/dc-cli

Version:
159 lines (158 loc) 6.85 kB
"use strict"; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.handler = exports.builder = exports.coerceLog = exports.LOG_FILENAME = exports.desc = exports.command = void 0; const dc_management_sdk_js_1 = require("dc-management-sdk-js"); const dynamic_content_client_factory_1 = __importDefault(require("../../services/dynamic-content-client-factory")); const archive_log_1 = require("../../common/archive/archive-log"); const filter_1 = require("../../common/filter/filter"); const confirm_all_content_1 = require("../../common/content-item/confirm-all-content"); const log_helpers_1 = require("../../common/log-helpers"); const paginate_with_progress_1 = require("../../common/dc-management-sdk-js/paginate-with-progress"); const progress_bar_1 = require("../../common/progress-bar/progress-bar"); exports.command = 'archive [id]'; exports.desc = 'Archive Content Types'; const LOG_FILENAME = (platform = process.platform) => (0, log_helpers_1.getDefaultLogPath)('type', 'archive', platform); exports.LOG_FILENAME = LOG_FILENAME; const coerceLog = (logFile) => (0, log_helpers_1.createLog)(logFile, 'Content Type Archive Log'); exports.coerceLog = coerceLog; const builder = (yargs) => { yargs .positional('id', { type: 'string', describe: 'The ID of a content type to be archived. If neither this or schemaId are provided, this command will archive ALL content types in the hub.' }) .option('schemaId', { type: 'string', describe: "The Schema ID of a Content Type's Schema to be archived.\nA regex can be provided to select multiple types with similar or matching schema IDs (eg /.header.\\.json/).\nA single --schemaId option may be given to match a single content type schema.\nMultiple --schemaId options may be given to match multiple content type schemas at the same time, or even multiple regex." }) .option('revertLog', { type: 'string', describe: 'Path to a log file containing content unarchived in a previous run of the unarchive command.\nWhen provided, archives all types listed as unarchived in the log file.', requiresArg: false }) .alias('f', 'force') .option('f', { type: 'boolean', boolean: true, describe: 'If present, there will be no confirmation prompt before archiving the found content.' }) .alias('s', 'silent') .option('s', { type: 'boolean', boolean: true, describe: 'If present, no log file will be produced.' }) .option('ignoreError', { type: 'boolean', boolean: true, describe: 'If present, archive requests that fail will not abort the process.' }) .option('logFile', { type: 'string', default: exports.LOG_FILENAME, describe: 'Path to a log file to write to.', coerce: exports.coerceLog }); }; exports.builder = builder; const handler = async (argv) => { const { id, logFile, force, silent, ignoreError, revertLog } = argv; const { schemaId } = argv; const client = (0, dynamic_content_client_factory_1.default)(argv); if (id != null && schemaId != null) { console.log('Please specify either a schema ID or an ID - not both.'); return; } let types; let allContent = false; let missingContent = false; if (id != null) { try { const typeIds = Array.isArray(id) ? id : [id]; types = await Promise.all(typeIds.map(id => client.contentTypes.get(id))); } catch (e) { console.log(`Fatal error: could not find content type with ID ${id}`); return; } } else { try { const hub = await client.hubs.get(argv.hubId); types = await (0, paginate_with_progress_1.paginateWithProgress)(hub.related.contentTypes.list, { status: dc_management_sdk_js_1.Status.ACTIVE }, { title: 'Retrieving active content types' }); } catch (e) { console.log(`Fatal error: could not retrieve content types to archive`); return; } if (revertLog != null) { try { const log = await new archive_log_1.ArchiveLog().loadFromFile(revertLog); const ids = log.getData('UNARCHIVE'); types = types.filter(type => ids.indexOf(type.id) !== -1); if (types.length !== ids.length) { missingContent = true; } } catch (e) { console.log(`Fatal error - could not read unarchive log`); return; } } else if (schemaId != null) { const schemaIdArray = Array.isArray(schemaId) ? schemaId : [schemaId]; types = types.filter(type => schemaIdArray.findIndex(id => (0, filter_1.equalsOrRegex)(type.contentTypeUri, id)) !== -1); } else { allContent = true; console.log('No filter, ID or log file was given, so archiving all content.'); } } if (types.length === 0) { console.log('Nothing found to archive, aborting.'); return; } console.log('The following content will be archived:'); types.forEach(type => { const settings = type.settings; console.log(' ' + (settings === undefined ? 'unknown' : settings.label)); }); if (!force) { const yes = await (0, confirm_all_content_1.confirmAllContent)('archive', 'content types', allContent, missingContent); if (!yes) { return; } } const log = logFile.open(); const progress = (0, progress_bar_1.progressBar)(types.length, 0, { title: 'Archiving content types' }); let successCount = 0; for (let i = 0; i < types.length; i++) { const settings = types[i].settings; const label = settings === undefined ? 'unknown' : settings.label; try { await types[i].related.archive(); progress.increment(); log.addAction('ARCHIVE', types[i].id || 'unknown'); successCount++; } catch (e) { progress.increment(); log.addComment(`ARCHIVE FAILED: ${types[i].id}`); log.addComment(e.toString()); if (ignoreError) { log.warn(`Failed to archive ${label}, continuing.`, e); } else { log.error(`Failed to archive ${label}, aborting.`, e); break; } } } progress.stop(); await log.close(!silent); console.log(`Archived ${successCount} content types.`); }; exports.handler = handler;