UNPKG

@amplience/dc-cli

Version:
278 lines (277 loc) 12 kB
"use strict"; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.handler = exports.filterEvents = exports.processEvents = exports.getEventExports = exports.getExportRecordForEvent = exports.enrichEvents = exports.enrichEditions = exports.locateAndExportSnapshots = exports.locateSnapshots = exports.exportSnapshots = exports.EventWithEditions = exports.EditionWithSlots = exports.builder = exports.LOG_FILENAME = exports.desc = exports.command = void 0; const dynamic_content_client_factory_1 = __importDefault(require("../../services/dynamic-content-client-factory")); const paginator_1 = __importDefault(require("../../common/dc-management-sdk-js/paginator")); const dc_management_sdk_js_1 = require("dc-management-sdk-js"); const table_1 = require("table"); const table_consts_1 = require("../../common/table/table.consts"); const chalk_1 = __importDefault(require("chalk")); const export_service_1 = require("../../services/export.service"); const import_service_1 = require("../../services/import.service"); const directory_utils_1 = require("../../common/import/directory-utils"); const facet_1 = require("../../common/filter/facet"); const log_helpers_1 = require("../../common/log-helpers"); const content_dependancy_tree_1 = require("../../common/content-item/content-dependancy-tree"); const content_mapping_1 = require("../../common/content-mapping"); const path_1 = require("path"); exports.command = 'export <dir>'; exports.desc = 'Export Events'; const LOG_FILENAME = (platform = process.platform) => (0, log_helpers_1.getDefaultLogPath)('event', 'export', platform); exports.LOG_FILENAME = LOG_FILENAME; const builder = (yargs) => { yargs .positional('dir', { describe: 'Output directory for the exported Events.', type: 'string' }) .option('id', { describe: 'Export a single event by ID, rather then fetching all of them.', type: 'string' }) .option('fromDate', { describe: 'Start date for filtering events. Either "NOW" or in the format "<number>:<unit>", example: "-7:DAYS".', type: 'string' }) .option('toDate', { describe: 'To date for filtering events. Either "NOW" or in the format "<number>:<unit>", example: "-7:DAYS".', type: 'string' }) .option('snapshots', { describe: 'Save content snapshots with events, in subfolder "snapshots/".', type: 'boolean', boolean: true }) .option('logFile', { type: 'string', default: exports.LOG_FILENAME, describe: 'Path to a log file to write to.', coerce: log_helpers_1.createLog }); }; exports.builder = builder; class EditionWithSlots extends dc_management_sdk_js_1.Edition { } exports.EditionWithSlots = EditionWithSlots; class EventWithEditions extends dc_management_sdk_js_1.Event { } exports.EventWithEditions = EventWithEditions; const exportSnapshots = async (client, outputDir, snapshots, log) => { const baseDir = (0, path_1.join)(outputDir, 'snapshots/'); await (0, directory_utils_1.ensureDirectoryExists)(baseDir); log.appendLine(`Saving ${snapshots.size} snapshots to './snapshots/'.`); for (const id of snapshots) { log.appendLine(`Fetching snapshot ${id}.`); let snapshot; let snapshotJson; try { snapshot = await client.snapshots.get(id); snapshotJson = snapshot.toJSON(); const content = await Promise.all(snapshotJson.rootContentItems.map((item) => snapshot.related.snapshotContentItem(item.id))); for (const item of content) { const itemTree = new content_dependancy_tree_1.ContentDependancyTree([{ repo: new dc_management_sdk_js_1.ContentRepository(), content: item }], new content_mapping_1.ContentMapping()); for (const subItem of itemTree.all[0].dependancies) { log.appendLine('... scanning item ' + subItem.dependancy.id + ' ' + subItem.dependancy._meta.schema); try { await snapshot.related.snapshotContentItem(subItem.dependancy.id); log.appendLine('yep'); } catch { log.appendLine('nope!'); } } } snapshotJson.content = content; } catch (e) { log.warn(`Could not fetch snapshot ${id}, continuing: `, e); continue; } const filename = (0, path_1.join)(baseDir, id + '.json'); try { (0, export_service_1.writeJsonToFile)(filename, snapshotJson); } catch (e) { log.warn(`Could not write snapshot ${id}, continuing: `, e); } } }; exports.exportSnapshots = exportSnapshots; const locateSnapshots = (slots, snapshots) => { for (const slot of slots) { if (slot.content.body) { const item = { repo: new dc_management_sdk_js_1.ContentRepository(), content: slot.content }; const tree = new content_dependancy_tree_1.ContentDependancyTree([item], new content_mapping_1.ContentMapping()); const dependencies = tree.all[0].dependancies; for (const link of dependencies) { if (link.dependancy.id) { snapshots.add(link.dependancy.id); } } } } }; exports.locateSnapshots = locateSnapshots; const locateAndExportSnapshots = async (client, outputDir, events, log) => { const snapshots = new Set(); log.appendLine(`Scanning slots for snapshots.`); for (const event of events) { for (const edition of event.editions) { (0, exports.locateSnapshots)(edition.slots, snapshots); } } await (0, exports.exportSnapshots)(client, outputDir, snapshots, log); }; exports.locateAndExportSnapshots = locateAndExportSnapshots; const enrichEditions = async (editions) => { for (const edition of editions) { const withEditions = edition; const slots = await (0, paginator_1.default)(edition.related.slots.list); withEditions.slots = slots; } return editions; }; exports.enrichEditions = enrichEditions; const enrichEvents = async (events, log) => { for (const event of events) { if (log) { log.appendLine(`Fetching ${event.name} with editions.`); } const withEditions = event; try { const editions = await (0, paginator_1.default)(event.related.editions.list); withEditions.editions = await (0, exports.enrichEditions)(editions); } catch (e) { if (log) { log.warn(`Failed to fetch editions for ${event.name}, skipping.`, e); } } } const result = events; return result.filter(event => event.editions != undefined); }; exports.enrichEvents = enrichEvents; const getExportRecordForEvent = (event, outputDir, previouslyExportedEvents) => { const indexOfExportedEvent = Object.values(previouslyExportedEvents).findIndex(c => c.id === event.id); if (indexOfExportedEvent < 0) { const filename = (0, export_service_1.uniqueFilenamePath)(outputDir, event.name, 'json', Object.keys(previouslyExportedEvents)); previouslyExportedEvents[filename] = event; return { filename: filename, status: 'CREATED', event }; } const filename = Object.keys(previouslyExportedEvents)[indexOfExportedEvent]; return { filename, status: 'UPDATED', event }; }; exports.getExportRecordForEvent = getExportRecordForEvent; const getEventExports = (outputDir, previouslyExportedEvents, eventsBeingExported) => { const allExports = []; const updatedExportsMap = []; for (const event of eventsBeingExported) { if (!event.id) { continue; } const exportRecord = (0, exports.getExportRecordForEvent)(event, outputDir, previouslyExportedEvents); allExports.push(exportRecord); if (exportRecord.status === 'UPDATED') { updatedExportsMap.push({ uri: event.id, filename: exportRecord.filename }); } } return [allExports, updatedExportsMap]; }; exports.getEventExports = getEventExports; const processEvents = async (outputDir, previouslyExportedEvents, enrichedEvents, log) => { if (enrichedEvents.length === 0) { (0, export_service_1.nothingExportedExit)(log, 'No events to export from this hub, exiting.'); return; } const [allExports, updatedExportsMap] = (0, exports.getEventExports)(outputDir, previouslyExportedEvents, enrichedEvents); if (allExports.length === 0 || (Object.keys(updatedExportsMap).length > 0 && !(await (0, export_service_1.promptToOverwriteExports)(updatedExportsMap, log)))) { (0, export_service_1.nothingExportedExit)(log); return; } await (0, directory_utils_1.ensureDirectoryExists)(outputDir); const tableStream = (0, table_1.createStream)(table_consts_1.streamTableOptions); tableStream.write([chalk_1.default.bold('File'), chalk_1.default.bold('Schema ID'), chalk_1.default.bold('Result')]); for (const { filename, status, event } of allExports) { if (status !== 'UP-TO-DATE') { (0, export_service_1.writeJsonToFile)(filename, event); } tableStream.write([filename, event.name || '', status]); } process.stdout.write('\n'); }; exports.processEvents = processEvents; const filterEvents = (events, from, to) => { return events.filter(event => { const eventStart = new Date(event.start); const eventEnd = new Date(event.end); if (from && eventEnd < from) { return false; } if (to && eventStart > to) { return false; } return true; }); }; exports.filterEvents = filterEvents; const handler = async (argv) => { const { dir, fromDate, toDate, logFile, id, snapshots } = argv; const log = logFile.open(); const from = fromDate === undefined ? undefined : (0, facet_1.relativeDate)(fromDate); const to = toDate === undefined ? undefined : (0, facet_1.relativeDate)(toDate); const previouslyExportedEvents = (0, import_service_1.loadJsonFromDirectory)(dir, EventWithEditions); const client = (0, dynamic_content_client_factory_1.default)(argv); let hub; try { hub = await client.hubs.get(argv.hubId); } catch (e) { log.error(`Couldn't get hub with id ${argv.hubId}, aborting.`, e); await log.close(); return; } let filteredEvents; if (id) { try { filteredEvents = [await client.events.get(id)]; log.appendLine(`Exporting single event ${filteredEvents[0].name}.`); } catch (e) { log.error(`Failed to get event with id ${id}, aborting.`, e); await log.close(); return; } } else { try { const storedEvents = await (0, paginator_1.default)(hub.related.events.list); filteredEvents = (0, exports.filterEvents)(storedEvents, from, to); log.appendLine(`Exporting ${filteredEvents.length} of ${storedEvents.length} events...`); } catch (e) { log.error(`Failed to list events.`, e); filteredEvents = []; } } const enrichedEvents = await (0, exports.enrichEvents)(filteredEvents, log); await (0, exports.processEvents)(dir, previouslyExportedEvents, enrichedEvents, log); if (snapshots) { await (0, exports.locateAndExportSnapshots)(client, dir, enrichedEvents, log); } log.appendLine(`Done.`); await log.close(); }; exports.handler = handler;