@amplience/dc-cli
Version:
Dynamic Content CLI Tool
209 lines (208 loc) • 8.53 kB
JavaScript
;
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.handler = exports.processItems = exports.getContentItems = exports.builder = exports.coerceLog = exports.LOG_FILENAME = exports.desc = exports.command = void 0;
const dynamic_content_client_factory_1 = __importDefault(require("../../services/dynamic-content-client-factory"));
const confirm_all_content_1 = require("../../common/content-item/confirm-all-content");
const dc_management_sdk_js_1 = require("dc-management-sdk-js");
const log_helpers_1 = require("../../common/log-helpers");
const facet_1 = require("../../common/filter/facet");
const fetch_content_1 = require("../../common/filter/fetch-content");
const publish_queue_1 = require("../../common/import/publish-queue");
const question_helpers_1 = require("../../common/question-helpers");
const content_dependancy_tree_1 = require("../../common/content-item/content-dependancy-tree");
const content_mapping_1 = require("../../common/content-mapping");
exports.command = 'publish [id]';
exports.desc = 'Publish Content Items';
const LOG_FILENAME = (platform = process.platform) => (0, log_helpers_1.getDefaultLogPath)('content-item', 'publish', platform);
exports.LOG_FILENAME = LOG_FILENAME;
const coerceLog = (logFile) => (0, log_helpers_1.createLog)(logFile, 'Content Items Publish Log');
exports.coerceLog = coerceLog;
const builder = (yargs) => {
yargs
.positional('id', {
type: 'string',
describe: 'The ID of a content item to be published. If id is not provided, this command will publish ALL content items through all content repositories in the hub.'
})
.option('repoId', {
type: 'string',
describe: 'The ID of a content repository to search items in to be published.',
requiresArg: false
})
.option('folderId', {
type: 'string',
describe: 'The ID of a folder to search items in to be published.',
requiresArg: false
})
.option('facet', {
type: 'string',
describe: "Publish content matching the given facets. Provide facets in the format 'label:example name,locale:en-GB', spaces are allowed between values. A regex can be provided for text filters, surrounded with forward slashes. For more examples, see the readme."
})
.option('batchPublish', {
type: 'boolean',
boolean: true,
describe: 'Batch publish requests up to the rate limit. (35/min)'
})
.options('publishRateLimit', {
type: 'number',
describe: `Set the number of publishes per minute (max = ${publish_queue_1.MAX_PUBLISH_RATE_LIMIT})`
})
.alias('f', 'force')
.option('f', {
type: 'boolean',
boolean: true,
describe: 'If present, there will be no confirmation prompt before publishing the found content.'
})
.alias('s', 'silent')
.option('s', {
type: 'boolean',
boolean: true,
describe: 'If present, no log file will be produced.'
})
.option('logFile', {
type: 'string',
default: exports.LOG_FILENAME,
describe: 'Path to a log file to write to.',
coerce: exports.coerceLog
})
.option('name', {
type: 'string',
hidden: true
});
};
exports.builder = builder;
const getContentItems = async ({ client, id, hubId, repoId, folderId, facet }) => {
try {
let contentItems = [];
if (id != null) {
const itemIds = Array.isArray(id) ? id : [id];
const items = [];
for (const id of itemIds) {
try {
items.push(await client.contentItems.get(id));
}
catch {
}
}
contentItems.push(...items.filter(item => item.status === dc_management_sdk_js_1.Status.ACTIVE));
return {
contentItems,
missingContent: contentItems.length != itemIds.length
};
}
const hub = await client.hubs.get(hubId);
contentItems = await (0, fetch_content_1.getContent)(client, hub, facet, { repoId, folderId, status: dc_management_sdk_js_1.Status.ACTIVE, enrichItems: true });
return { contentItems, missingContent: false };
}
catch (err) {
console.log(err);
return {
contentItems: [],
missingContent: false
};
}
};
exports.getContentItems = getContentItems;
const processItems = async ({ contentItems, force, silent, logFile, allContent, missingContent, argv }) => {
if (contentItems.length == 0) {
console.log('Nothing found to publish, aborting.');
return;
}
const repoContentItems = contentItems.map(content => ({ repo: new dc_management_sdk_js_1.ContentRepository(), content }));
const contentTree = new content_dependancy_tree_1.ContentDependancyTree(repoContentItems, new content_mapping_1.ContentMapping());
let publishChildren = 0;
const rootContentItems = contentTree.all
.filter(node => {
let isTopLevel = true;
contentTree.traverseDependants(node, dependant => {
if (dependant != node && contentTree.all.findIndex(entry => entry === dependant) !== -1) {
isTopLevel = false;
}
}, true);
if (!isTopLevel) {
publishChildren++;
}
return isTopLevel;
})
.map(node => node.owner.content);
const log = logFile.open();
log.appendLine(`Found ${rootContentItems.length} items to publish. (${publishChildren} children included)`);
if (!force) {
const yes = await (0, confirm_all_content_1.confirmAllContent)('publish', 'content items', allContent, missingContent);
if (!yes) {
return;
}
}
const pubQueue = new publish_queue_1.PublishQueue(argv);
log.appendLine(`Publishing ${rootContentItems.length} items.`);
if (!argv.batchPublish) {
pubQueue.maxWaiting = 1;
}
for (const item of rootContentItems) {
try {
await pubQueue.publish(item);
log.appendLine(`Initiating publish for "${item.label}"`);
}
catch (e) {
log.appendLine(`Failed to initiate publish for ${item.label}: ${e.toString()}`);
}
}
log.appendLine(`Waiting for all publish jobs to complete...`);
let keepWaiting = true;
while (!pubQueue.isEmpty() && keepWaiting) {
await pubQueue.waitForAll();
if (pubQueue.unresolvedJobs.length > 0) {
keepWaiting = await (0, question_helpers_1.asyncQuestion)('Some publishes are taking longer than expected, would you like to continue waiting? (Y/n)');
}
}
log.appendLine(`Finished publishing, with ${pubQueue.unresolvedJobs.length} unresolved publish jobs`);
pubQueue.unresolvedJobs.forEach(job => {
log.appendLine(` - ${job.item.label}`);
});
log.appendLine(`Finished publishing, with ${pubQueue.failedJobs.length} failed publish jobs`);
pubQueue.failedJobs.forEach(job => {
log.appendLine(` - ${job.item.label}`);
});
log.appendLine(`Publish complete`);
await log.close(!silent);
};
exports.processItems = processItems;
const handler = async (argv) => {
const { id, logFile, force, silent, hubId, repoId, folderId } = argv;
const client = (0, dynamic_content_client_factory_1.default)(argv);
const facet = (0, facet_1.withOldFilters)(argv.facet, argv);
const allContent = !id && !facet && !folderId && !repoId;
if (repoId && id) {
console.log('ID of content item is specified, ignoring repository ID');
}
if (id && facet) {
console.log('Please specify either a facet or an ID - not both.');
return;
}
if (repoId && folderId) {
console.log('Folder is specified, ignoring repository ID');
}
if (allContent) {
console.log('No filter was given, publishing all content');
}
const { contentItems, missingContent } = await (0, exports.getContentItems)({
client,
id,
hubId,
repoId,
folderId,
facet
});
await (0, exports.processItems)({
contentItems,
force,
silent,
logFile,
allContent,
missingContent,
argv
});
};
exports.handler = handler;