@amplience/dc-cli
Version:
Dynamic Content CLI Tool
140 lines (139 loc) • 6.6 kB
JavaScript
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.handler = exports.builder = exports.desc = exports.command = exports.coerceLog = exports.LOG_FILENAME = void 0;
const log_helpers_1 = require("../../common/log-helpers");
const dynamic_content_client_factory_1 = __importDefault(require("../../services/dynamic-content-client-factory"));
const facet_1 = require("../../common/filter/facet");
const dc_management_sdk_js_1 = require("dc-management-sdk-js");
const fetch_content_1 = require("../../common/filter/fetch-content");
const confirm_all_content_1 = require("../../common/content-item/confirm-all-content");
const progress_bar_1 = require("../../common/progress-bar/progress-bar");
const sync_service_1 = require("./sync.service");
const dedupe_content_items_1 = require("../../common/content-item/dedupe-content-items");
const get_content_items_by_ids_1 = require("../../common/content-item/get-content-items-by-ids");
const LOG_FILENAME = (platform = process.platform) => (0, log_helpers_1.getDefaultLogPath)('content-item', 'sync', platform);
exports.LOG_FILENAME = LOG_FILENAME;
const coerceLog = (logFile) => (0, log_helpers_1.createLog)(logFile, 'Content Items Sync Log');
exports.coerceLog = coerceLog;
exports.command = 'sync [id]';
exports.desc = 'Sync Content Items';
const builder = (yargs) => {
yargs
.positional('id', {
type: 'string',
describe: `The ID of a content item to sync. If id is not provided, this command will sync ALL content items through all content repositories in the hub.`
})
.option('repoId', {
type: 'string',
describe: 'The ID of a content repository to search items in to be sync.',
requiresArg: false
})
.option('folderId', {
type: 'string',
describe: 'The ID of a folder to search items in to be sync.',
requiresArg: false
})
.option('facet', {
type: 'string',
describe: "Publish content matching the given facets. Provide facets in the format 'label:example name,locale:en-GB', spaces are allowed between values. A regex can be provided for text filters, surrounded with forward slashes. For more examples, see the readme."
})
.alias('f', 'force')
.option('f', {
type: 'boolean',
boolean: true,
describe: 'If present, there will be no confirmation prompt before publishing the found content.'
})
.alias('s', 'silent')
.option('s', {
type: 'boolean',
boolean: true,
describe: 'If present, no log file will be produced.'
})
.option('logFile', {
type: 'string',
default: exports.LOG_FILENAME,
describe: 'Path to a log file to write to.',
coerce: exports.coerceLog
})
.option('destinationHubId', {
type: 'string',
describe: 'The ID of a destination hub to sync with.',
requiresArg: true,
demandOption: true
})
.option('ignoreSchemaValidation', {
type: 'boolean',
boolean: true,
describe: 'Ignore schema validation when syncing content items.'
})
.option('forceSync', {
type: 'boolean',
boolean: true,
describe: 'Sync destination content item when modified (overwrite destination modifications).'
});
};
exports.builder = builder;
const handler = async (argv) => {
const { id, logFile, force, silent, hubId, repoId, folderId, destinationHubId, ignoreSchemaValidation, forceSync } = argv;
const log = logFile.open();
const client = (0, dynamic_content_client_factory_1.default)(argv);
const facet = (0, facet_1.withOldFilters)(argv.facet, argv);
const allContent = !id && !facet && !folderId && !repoId;
if (repoId && id) {
log.appendLine('ID of content item is specified, ignoring repository ID');
}
if (id && facet) {
log.appendLine('Please specify either a facet or an ID - not both');
return;
}
if (repoId && folderId) {
log.appendLine('Folder is specified, ignoring repository ID');
}
if (allContent) {
log.appendLine('No filter was given, syncing all content');
}
const hub = await client.hubs.get(hubId);
let ids = [];
if (id) {
ids = Array.isArray(id) ? id : [id];
}
const contentItems = ids.length > 0
? await (0, get_content_items_by_ids_1.getContentByIds)(client, ids)
: await (0, fetch_content_1.getContent)(client, hub, facet, { repoId, folderId, status: dc_management_sdk_js_1.Status.ACTIVE, enrichItems: true });
if (!contentItems.length) {
log.appendLine('Nothing found to sync, aborting');
return;
}
const dedupedContentItems = (0, dedupe_content_items_1.dedupeContentItems)(contentItems);
log.appendLine(`Found ${dedupedContentItems.length} item(s) to sync (ignoring ${contentItems.length - dedupedContentItems.length} duplicate child item(s))`);
const missingContentItems = ids.length > 0 ? Boolean(ids.length !== contentItems.length) : false;
if (!force) {
const yes = await (0, confirm_all_content_1.confirmAllContent)('sync', 'content items', allContent, missingContentItems);
if (!yes) {
return;
}
}
log.appendLine(`Syncing ${dedupedContentItems.length} item(s)`);
const progress = (0, progress_bar_1.progressBar)(dedupedContentItems.length, 0, { title: 'Syncing content items' });
const syncService = new sync_service_1.ContentItemSyncService();
dedupedContentItems.forEach(contentItem => {
log.addComment(`Requesting content item sync: ${contentItem.label}`);
syncService.sync(destinationHubId, hub, contentItem, (syncJob) => {
progress.increment();
const logComment = syncJob.status === 'FAILED'
? `Failed content item sync job ${syncJob.id}: ${JSON.stringify(syncJob.errors)}`
: `Content item synced: ${contentItem.label} (jobId: ${syncJob.id})`;
log.addComment(logComment);
}, { ignoreSchemaValidation, forceSync });
});
await syncService.onIdle();
progress.stop();
const failedJobCount = syncService.failedJobs.length;
const failedJobsMsg = failedJobCount ? `with ${failedJobCount} failed jobs - check logs for details` : ``;
log.appendLine(`Sync complete ${failedJobsMsg}`);
await log.close(!silent);
};
exports.handler = handler;