@amplience/dc-cli
Version:
Dynamic Content CLI Tool
392 lines (391 loc) • 18.8 kB
JavaScript
;
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.handler = exports.trySaveMapping = exports.importEvents = exports.importEditions = exports.skipScheduleIfNeeded = exports.scheduleEdition = exports.prepareEditionForSchedule = exports.moveDateToFuture = exports.isScheduled = exports.importSlots = exports.rewriteSnapshots = exports.shouldUpdateEdition = exports.shouldUpdateEvent = exports.shouldUpdateSlot = exports.boundTimeRange = exports.builder = exports.getDefaultMappingPath = exports.LOG_FILENAME = exports.desc = exports.command = exports.ScheduleSecondsAllowance = exports.EventSecondsAllowance = exports.EditionSecondsAllowance = exports.InstantSecondsAllowance = void 0;
const dc_management_sdk_js_1 = require("dc-management-sdk-js");
const dynamic_content_client_factory_1 = __importDefault(require("../../services/dynamic-content-client-factory"));
const paginator_1 = __importDefault(require("../../common/dc-management-sdk-js/paginator"));
const import_service_1 = require("../../services/import.service");
const log_helpers_1 = require("../../common/log-helpers");
const export_1 = require("./export");
const content_mapping_1 = require("../../common/content-mapping");
const path_1 = require("path");
const content_dependancy_tree_1 = require("../../common/content-item/content-dependancy-tree");
const SnapshotCreator_1 = require("dc-management-sdk-js/build/main/lib/model/SnapshotCreator");
const lodash_1 = require("lodash");
const date_helpers_1 = require("../../common/import/date-helpers");
const event_schedule_error_1 = require("../../common/dc-management-sdk-js/event-schedule-error");
exports.InstantSecondsAllowance = 5;
exports.EditionSecondsAllowance = 5;
exports.EventSecondsAllowance = 60;
exports.ScheduleSecondsAllowance = 5;
exports.command = 'import <dir>';
exports.desc = 'Import Events';
const LOG_FILENAME = (platform = process.platform) => (0, log_helpers_1.getDefaultLogPath)('event', 'import', platform);
exports.LOG_FILENAME = LOG_FILENAME;
const getDefaultMappingPath = (name, platform = process.platform) => {
return (0, path_1.join)(process.env[platform == 'win32' ? 'USERPROFILE' : 'HOME'] || __dirname, '.amplience', `imports/`, `${name}.json`);
};
exports.getDefaultMappingPath = getDefaultMappingPath;
const builder = (yargs) => {
yargs
.positional('dir', {
describe: 'Directory containing Events',
type: 'string'
})
.option('acceptSnapshotLimits', {
type: 'boolean',
boolean: true,
describe: 'Must be passed to use the event import command. Only use this command if you fully understand its limitations.'
})
.option('mapFile', {
type: 'string',
describe: 'Mapping file to use when updating content that already exists. Updated with any new mappings that are generated. If not present, will be created.'
})
.alias('f', 'force')
.option('f', {
type: 'boolean',
boolean: true,
describe: 'Overwrite existing events, editions, slots and snapshots without asking.'
})
.option('schedule', {
type: 'boolean',
boolean: true,
describe: 'Schedule events in the destination repo if they are scheduled in the source. If any new or updated scheduled events started in the past, they will be moved to happen at the time of import. If they ended in the past, they will be skipped by default.'
})
.option('catchup', {
type: 'boolean',
boolean: true,
describe: 'Scheduling events that ended in the past will move to the current date, so that their publishes run.'
})
.option('originalIds', {
type: 'boolean',
boolean: true,
describe: 'Use original ids'
})
.option('logFile', {
type: 'string',
default: exports.LOG_FILENAME,
describe: 'Path to a log file to write to.',
coerce: log_helpers_1.createLog
});
};
exports.builder = builder;
const boundTimeRange = (realRange, range) => {
const eventStart = new Date(range.start);
const realEventStart = new Date(range.start);
const nowOffset = (0, date_helpers_1.dateOffset)(exports.InstantSecondsAllowance);
if (new Date(range.end) < new Date(realRange.end)) {
range.end = realRange.end;
}
if (eventStart > realEventStart || realEventStart < nowOffset) {
range.start = realRange.start;
}
};
exports.boundTimeRange = boundTimeRange;
const shouldUpdateSlot = (realSlot, slot) => {
return !(0, lodash_1.isEqual)(slot.content, realSlot.content);
};
exports.shouldUpdateSlot = shouldUpdateSlot;
const shouldUpdateEvent = (realEvent, event) => {
(0, exports.boundTimeRange)(realEvent, event);
return (event.name !== realEvent.name ||
event.brief !== realEvent.brief ||
event.comment !== realEvent.comment ||
event.start !== realEvent.start ||
event.end !== realEvent.end);
};
exports.shouldUpdateEvent = shouldUpdateEvent;
const shouldUpdateEdition = (realEdition, realSlots, edition) => {
(0, exports.boundTimeRange)(realEdition, edition);
return (edition.name !== realEdition.name ||
edition.start !== realEdition.start ||
edition.end !== realEdition.end ||
edition.comment !== realEdition.comment ||
edition.activeEndDate !== realEdition.activeEndDate ||
edition.slots.length != realSlots.length ||
edition.slots.map((x, i) => (0, exports.shouldUpdateSlot)(x, realSlots[i])).reduce((a, b) => a || b, false));
};
exports.shouldUpdateEdition = shouldUpdateEdition;
const rewriteSnapshots = async (content, mapping, hub, log) => {
const dummyRepo = new dc_management_sdk_js_1.ContentRepository();
const tree = new content_dependancy_tree_1.ContentDependancyTree([{ repo: dummyRepo, content }], new content_mapping_1.ContentMapping());
const dependencies = tree.all[0].dependancies;
let snapshotCreated = false;
for (const dep of dependencies) {
const entry = dep.dependancy;
let snapshotId = mapping.getSnapshot(entry.id);
const itemId = mapping.getContentItem(entry._meta.rootContentItemId) || entry._meta.rootContentItemId;
if (snapshotId == null) {
const result = await hub.related.snapshots.create([
new dc_management_sdk_js_1.Snapshot({
contentRoot: itemId,
comment: '',
createdFrom: SnapshotCreator_1.SnapshotCreator.ContentItem,
type: dc_management_sdk_js_1.SnapshotType.GENERATED
})
]);
const snapshot = result.snapshots[0];
snapshotId = snapshot.id;
mapping.registerSnapshot(entry.id, snapshotId);
log.addAction('SNAPSHOT-CREATE', snapshotId);
snapshotCreated = true;
}
dep.dependancy.id = snapshotId;
entry._meta.rootContentItemId = itemId;
}
return snapshotCreated;
};
exports.rewriteSnapshots = rewriteSnapshots;
const importSlots = async (slots, mapping, hub, edition, argv, log) => {
const editionSlots = await (0, paginator_1.default)(edition.related.slots.list);
let snapshot = false;
for (const slot of slots) {
let realSlot = undefined;
const slotId = mapping.getSlot(slot.id);
if (slotId == null) {
if (argv.originalIds && slot.id) {
realSlot = editionSlots.find(editionSlot => editionSlot.id === slot.id);
}
}
else {
realSlot = editionSlots.find(editionSlot => editionSlot.id === slotId);
}
const itemId = mapping.getContentItem(slot.slotId) || slot.slotId;
const updated = realSlot != null;
if (realSlot == null) {
const slotPage = await edition.related.slots.create([{ slot: itemId }]);
const items = slotPage.getItems();
realSlot = items[0];
mapping.registerSlot(slot.id, realSlot.id);
}
snapshot = (await (0, exports.rewriteSnapshots)(slot.content, mapping, hub, log)) || snapshot;
realSlot = await realSlot.related.content(slot.content);
log.addComment(`${updated ? 'Updated' : 'Created'} slot ${realSlot.slotId}.`);
log.addAction(`SLOT-${updated ? 'UPDATE' : 'CREATE'}`, realSlot.id);
}
return snapshot;
};
exports.importSlots = importSlots;
const isScheduled = (edition) => edition.publishingStatus === dc_management_sdk_js_1.PublishingStatus.PUBLISHED ||
edition.publishingStatus === dc_management_sdk_js_1.PublishingStatus.PUBLISHING ||
edition.publishingStatus === dc_management_sdk_js_1.PublishingStatus.SCHEDULING ||
edition.publishingStatus === dc_management_sdk_js_1.PublishingStatus.SCHEDULED;
exports.isScheduled = isScheduled;
const moveDateToFuture = async (date, event, offset) => {
const newDate = (0, date_helpers_1.dateMax)(new Date(date), (0, date_helpers_1.dateOffset)(offset));
if (newDate > new Date(event.end)) {
event.end = (0, date_helpers_1.dateMax)((0, date_helpers_1.dateOffset)(exports.EventSecondsAllowance), newDate).toISOString();
await event.related.update(event);
}
return newDate.toISOString();
};
exports.moveDateToFuture = moveDateToFuture;
const prepareEditionForSchedule = async (edition, event, force = false) => {
if (force || (0, exports.isScheduled)(edition)) {
edition.start = await (0, exports.moveDateToFuture)(edition.start, event, exports.EditionSecondsAllowance);
edition.end = await (0, exports.moveDateToFuture)(edition.end, event, exports.ScheduleSecondsAllowance);
}
};
exports.prepareEditionForSchedule = prepareEditionForSchedule;
const scheduleEdition = async (edition, log) => {
try {
await edition.related.schedule(false, edition.lastModifiedDate);
}
catch (e) {
if (e.response && e.response.data && typeof e.response.data === 'object') {
const warning = new event_schedule_error_1.EditionScheduleStatus(e.response.data);
if (warning.errors) {
for (const error of warning.errors) {
if (error.level === 'WARNING') {
let message = `${error.code}: ${error.message}`;
if (error.overlaps) {
message += ` (${error.overlaps
.map(overlap => `${overlap.name} - ${overlap.editionId} ${overlap.start}`)
.join(', ')})`;
}
log.warn(message);
}
else {
log.error(`${error.code}: ${error.message}`);
}
}
await edition.related.schedule(true, edition.lastModifiedDate);
}
}
else {
throw e;
}
}
};
exports.scheduleEdition = scheduleEdition;
const skipScheduleIfNeeded = (edition, catchup) => {
if (!catchup && (0, exports.isScheduled)(edition) && new Date(edition.end) < new Date()) {
edition.publishingStatus = dc_management_sdk_js_1.PublishingStatus.DRAFT;
}
};
exports.skipScheduleIfNeeded = skipScheduleIfNeeded;
const importEditions = async (editions, mapping, client, hub, event, argv, log) => {
for (const edition of editions) {
let realEdition = null;
const editionId = mapping.getEdition(edition.id);
if (editionId == null) {
if (argv.originalIds && edition.id) {
realEdition = await client.editions.get(edition.id);
}
}
else {
realEdition = await client.editions.get(editionId);
}
const filteredEdition = new dc_management_sdk_js_1.Edition({
name: edition.name,
start: edition.start,
end: edition.end,
comment: edition.comment,
activeEndDate: edition.activeEndDate,
publishingStatus: edition.publishingStatus
});
let update = true;
let schedule = argv.schedule;
(0, exports.skipScheduleIfNeeded)(edition, argv.catchup);
if (realEdition == null) {
await (0, exports.prepareEditionForSchedule)(filteredEdition, event);
realEdition = await event.related.editions.create(filteredEdition);
log.addComment(`Created edition ${realEdition.name}.`);
log.addAction('EDITION-CREATE', realEdition.id);
mapping.registerEdition(edition.id, realEdition.id);
}
else {
const slots = await (0, paginator_1.default)(realEdition.related.slots.list);
if ((0, exports.shouldUpdateEdition)(realEdition, slots, edition) ||
(schedule && !(0, exports.isScheduled)(realEdition) && (0, exports.isScheduled)(edition))) {
filteredEdition.start = edition.start;
filteredEdition.end = edition.end;
if (realEdition.publishingStatus == dc_management_sdk_js_1.PublishingStatus.SCHEDULED ||
realEdition.publishingStatus == dc_management_sdk_js_1.PublishingStatus.SCHEDULING) {
try {
await realEdition.related.unschedule();
realEdition.publishingStatus = dc_management_sdk_js_1.PublishingStatus.UNSCHEDULING;
schedule = true;
while (realEdition.publishingStatus === dc_management_sdk_js_1.PublishingStatus.UNSCHEDULING) {
realEdition = await client.editions.get(realEdition.id);
}
}
catch {
update = false;
}
}
else if ((0, exports.isScheduled)(realEdition)) {
update = false;
}
if (update) {
await (0, exports.prepareEditionForSchedule)(filteredEdition, event);
realEdition = await realEdition.related.update(filteredEdition);
log.addComment(`Updated edition ${realEdition.name}.`);
log.addAction('EDITION-UPDATE', realEdition.id);
}
else {
log.appendLine(`Skipped updating ${realEdition.name}, as it has already published.`);
}
}
else {
update = false;
}
}
let createdSnapshots = false;
if (update) {
createdSnapshots = await (0, exports.importSlots)(edition.slots, mapping, hub, realEdition, argv, log);
}
if (schedule && !(0, exports.isScheduled)(realEdition) && (0, exports.isScheduled)(edition)) {
if (update && edition.slots.length > 0) {
realEdition = await client.editions.get(realEdition.id);
if (createdSnapshots) {
const lastStart = realEdition.start;
await (0, exports.prepareEditionForSchedule)(realEdition, event, true);
if (realEdition.start != lastStart) {
realEdition = await realEdition.related.update(realEdition);
}
}
}
await (0, exports.scheduleEdition)(realEdition, log);
}
}
};
exports.importEditions = importEditions;
const importEvents = async (events, mapping, client, hub, argv, log) => {
for (const event of events) {
let realEvent = null;
const eventId = mapping.getEvent(event.id);
if (eventId == null) {
if (argv.originalIds && event.id) {
realEvent = await client.events.get(event.id);
}
}
else {
realEvent = await client.events.get(eventId);
}
const filteredEvent = new dc_management_sdk_js_1.Event({
name: event.name,
start: event.start,
end: event.end,
comment: event.comment,
brief: event.brief
});
if (realEvent == null) {
realEvent = await hub.related.events.create(filteredEvent);
log.addComment(`Created event ${realEvent.name}.`);
log.addAction('EVENT-CREATE', realEvent.id);
mapping.registerEvent(event.id, realEvent.id);
}
else if ((0, exports.shouldUpdateEvent)(realEvent, event)) {
realEvent = await realEvent.related.update(filteredEvent);
log.addComment(`Updated event ${realEvent.name}.`);
log.addAction('EVENT-UPDATE', realEvent.id);
}
await (0, exports.importEditions)((0, date_helpers_1.sortByEndDate)(event.editions), mapping, client, hub, realEvent, argv, log);
}
};
exports.importEvents = importEvents;
const trySaveMapping = async (mapFile, mapping, log) => {
if (mapFile != null) {
try {
await mapping.save(mapFile);
}
catch (e) {
log.appendLine(`Failed to save the mapping. ${e.toString()}`);
}
}
};
exports.trySaveMapping = trySaveMapping;
const handler = async (argv) => {
const { dir, logFile, acceptSnapshotLimits } = argv;
if (!acceptSnapshotLimits) {
console.log('Event import may result in a different state from the export due to snapshots of referenced content items being taken at the time of creation. Only use it if you fully understand its limitations. To use this command, pass the --acceptSnapshotLimits flag.');
return;
}
const client = (0, dynamic_content_client_factory_1.default)(argv);
const log = logFile.open();
const hub = await client.hubs.get(argv.hubId);
const events = await (0, import_service_1.loadJsonFromDirectory)(dir, export_1.EventWithEditions);
const importTitle = `hub-${hub.id}`;
const mapFile = argv.mapFile || (0, exports.getDefaultMappingPath)(importTitle);
const mapping = new content_mapping_1.ContentMapping();
if (await mapping.load(mapFile)) {
log.appendLine(`Existing mapping loaded from '${mapFile}', changes will be saved back to it.`);
}
else {
log.appendLine(`Creating new mapping file at '${mapFile}'.`);
}
try {
await (0, exports.importEvents)((0, date_helpers_1.sortByEndDate)(Object.values(events)), mapping, client, hub, argv, log);
}
catch (e) {
log.error('Failed to import events.', e);
}
await (0, exports.trySaveMapping)(mapFile, mapping, log);
log.appendLine('Done.');
await log.close();
};
exports.handler = handler;