@amplience/dc-cli
Version:
Dynamic Content CLI Tool
155 lines (154 loc) • 6.83 kB
JavaScript
;
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.handler = exports.builder = exports.coerceLog = exports.LOG_FILENAME = exports.desc = exports.command = void 0;
const dc_management_sdk_js_1 = require("dc-management-sdk-js");
const dynamic_content_client_factory_1 = __importDefault(require("../../services/dynamic-content-client-factory"));
const archive_log_1 = require("../../common/archive/archive-log");
const filter_1 = require("../../common/filter/filter");
const confirm_all_content_1 = require("../../common/content-item/confirm-all-content");
const log_helpers_1 = require("../../common/log-helpers");
const paginate_with_progress_1 = require("../../common/dc-management-sdk-js/paginate-with-progress");
const progress_bar_1 = require("../../common/progress-bar/progress-bar");
exports.command = 'archive [id]';
exports.desc = 'Archive Content Type Schemas';
const LOG_FILENAME = (platform = process.platform) => (0, log_helpers_1.getDefaultLogPath)('schema', 'archive', platform);
exports.LOG_FILENAME = LOG_FILENAME;
const coerceLog = (logFile) => (0, log_helpers_1.createLog)(logFile, 'Content Type Schema Archive Log');
exports.coerceLog = coerceLog;
const builder = (yargs) => {
yargs
.positional('id', {
type: 'string',
describe: 'The ID of a schema to be archived. Note that this is different from the schema ID - which is in a URL format. If neither this or schemaId are provided, this command will archive ALL content type schemas in the hub.'
})
.option('schemaId', {
type: 'string',
describe: 'The Schema ID of a Content Type Schema to be archived.\nA regex can be provided to select multiple schemas with similar IDs (eg /.header.\\.json/).\nA single --schemaId option may be given to archive a single content type schema.\nMultiple --schemaId options may be given to archive multiple content type schemas at the same time, or even multiple regex.'
})
.option('revertLog', {
type: 'string',
describe: 'Path to a log file containing content unarchived in a previous run of the unarchive command.\nWhen provided, archives all schemas listed as unarchived in the log file.',
requiresArg: false
})
.alias('f', 'force')
.option('f', {
type: 'boolean',
boolean: true,
describe: 'If present, there will be no confirmation prompt before archiving the found content.'
})
.alias('s', 'silent')
.option('s', {
type: 'boolean',
boolean: true,
describe: 'If present, no log file will be produced.'
})
.option('ignoreError', {
type: 'boolean',
boolean: true,
describe: 'If present, archive requests that fail will not abort the process.'
})
.option('logFile', {
type: 'string',
default: exports.LOG_FILENAME,
describe: 'Path to a log file to write to.',
coerce: exports.coerceLog
});
};
exports.builder = builder;
const handler = async (argv) => {
const { id, logFile, force, silent, ignoreError, hubId, revertLog, schemaId } = argv;
const client = (0, dynamic_content_client_factory_1.default)(argv);
if (id != null && schemaId != null) {
console.log('Please specify either a schema ID or an ID - not both.');
return;
}
let schemas;
let allContent = false;
let missingContent = false;
if (id != null) {
try {
const schemasIds = Array.isArray(id) ? id : [id];
schemas = await Promise.all(schemasIds.map(id => client.contentTypeSchemas.get(id)));
}
catch (e) {
console.log(`Fatal error: could not find schema with ID ${id}`);
return;
}
}
else {
try {
const hub = await client.hubs.get(hubId);
schemas = await (0, paginate_with_progress_1.paginateWithProgress)(hub.related.contentTypeSchema.list, { status: dc_management_sdk_js_1.Status.ACTIVE }, { title: 'Retrieving active content type schemas' });
}
catch (e) {
console.log(`Fatal error: could not retrieve content type schemas to archive`);
return;
}
if (revertLog != null) {
try {
const log = await new archive_log_1.ArchiveLog().loadFromFile(revertLog);
const ids = log.getData('UNARCHIVE');
schemas = schemas.filter(schema => ids.indexOf(schema.schemaId) !== -1);
if (schemas.length !== ids.length) {
missingContent = true;
}
}
catch (e) {
console.log(`Fatal error - could not read unarchive log`);
return;
}
}
else if (schemaId != null) {
const schemaIdArray = Array.isArray(schemaId) ? schemaId : [schemaId];
schemas = schemas.filter(schema => schemaIdArray.findIndex(id => (0, filter_1.equalsOrRegex)(schema.schemaId, id)) !== -1);
}
else {
console.log('No filter, ID or log file was given, so archiving all content.');
allContent = true;
}
}
if (schemas.length === 0) {
console.log('Nothing found to archive, aborting.');
return;
}
console.log('The following content will be archived:');
schemas.forEach(schema => {
console.log(' ' + schema.schemaId);
});
if (!force) {
const yes = await (0, confirm_all_content_1.confirmAllContent)('archive', 'content type schema', allContent, missingContent);
if (!yes) {
return;
}
}
const log = logFile.open();
const progress = (0, progress_bar_1.progressBar)(schemas.length, 0, { title: 'Archiving content type schemas' });
let successCount = 0;
for (let i = 0; i < schemas.length; i++) {
try {
await schemas[i].related.archive();
progress.increment();
log.addAction('ARCHIVE', `${schemas[i].schemaId}`);
successCount++;
}
catch (e) {
progress.increment();
log.addComment(`ARCHIVE FAILED: ${schemas[i].schemaId}`);
log.addComment(e.toString());
if (ignoreError) {
log.warn(`Failed to archive ${schemas[i].schemaId}, continuing.`, e);
}
else {
log.error(`Failed to archive ${schemas[i].schemaId}, aborting.`, e);
break;
}
}
}
progress.stop();
await log.close(!silent);
console.log(`Archived ${successCount} content type schemas.`);
};
exports.handler = handler;