@backstage/plugin-catalog-backend
Version:
The Backstage backend plugin that provides the Backstage catalog
380 lines (374 loc) • 14.5 kB
JavaScript
'use strict';
var catalogModel = require('@backstage/catalog-model');
var errors = require('@backstage/errors');
var stableStringify = require('fast-json-stable-stringify');
var api = require('@opentelemetry/api');
var metrics = require('../util/metrics.cjs.js');
var types = require('../stitching/types.cjs.js');
var TaskPipeline = require('./TaskPipeline.cjs.js');
var opentelemetry = require('../util/opentelemetry.cjs.js');
var deleteOrphanedEntities = require('../database/operations/util/deleteOrphanedEntities.cjs.js');
var constants = require('../constants.cjs.js');
function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; }
var stableStringify__default = /*#__PURE__*/_interopDefaultCompat(stableStringify);
const CACHE_TTL = 5;
const tracer = api.trace.getTracer(opentelemetry.TRACER_ID);
class DefaultCatalogProcessingEngine {
config;
scheduler;
logger;
knex;
processingDatabase;
orchestrator;
stitcher;
createHash;
pollingIntervalMs;
orphanCleanupIntervalMs;
onProcessingError;
tracker;
eventBroker;
stopFunc;
constructor(options) {
this.config = options.config;
this.scheduler = options.scheduler;
this.logger = options.logger;
this.knex = options.knex;
this.processingDatabase = options.processingDatabase;
this.orchestrator = options.orchestrator;
this.stitcher = options.stitcher;
this.createHash = options.createHash;
this.pollingIntervalMs = options.pollingIntervalMs ?? 1e3;
this.orphanCleanupIntervalMs = options.orphanCleanupIntervalMs ?? 3e4;
this.onProcessingError = options.onProcessingError;
this.tracker = options.tracker ?? progressTracker();
this.eventBroker = options.eventBroker;
this.stopFunc = void 0;
}
async start() {
if (this.stopFunc) {
throw new Error("Processing engine is already started");
}
const stopPipeline = this.startPipeline();
const stopCleanup = this.startOrphanCleanup();
this.stopFunc = () => {
stopPipeline();
stopCleanup();
};
}
async stop() {
if (this.stopFunc) {
this.stopFunc();
this.stopFunc = void 0;
}
}
startPipeline() {
return TaskPipeline.startTaskPipeline({
lowWatermark: 5,
highWatermark: 10,
pollingIntervalMs: this.pollingIntervalMs,
loadTasks: async (count) => {
try {
const { items } = await this.processingDatabase.getProcessableEntities(this.knex, {
processBatchSize: count
});
return items;
} catch (error) {
this.logger.warn("Failed to load processing items", error);
return [];
}
},
processTask: async (item) => {
await opentelemetry.withActiveSpan(tracer, "ProcessingRun", async (span) => {
const track = this.tracker.processStart(item, this.logger);
opentelemetry.addEntityAttributes(span, item.unprocessedEntity);
try {
const {
id,
state,
unprocessedEntity,
entityRef,
locationKey,
resultHash: previousResultHash
} = item;
const result = await this.orchestrator.process({
entity: unprocessedEntity,
state
});
track.markProcessorsCompleted(result);
if (result.ok) {
const { ttl: _, ...stateWithoutTtl } = state ?? {};
if (stableStringify__default.default(stateWithoutTtl) !== stableStringify__default.default(result.state)) {
await this.processingDatabase.transaction(async (tx) => {
await this.processingDatabase.updateEntityCache(tx, {
id,
state: {
ttl: CACHE_TTL,
...result.state
}
});
});
}
} else {
const maybeTtl = state?.ttl;
const ttl = Number.isInteger(maybeTtl) ? maybeTtl : 0;
await this.processingDatabase.transaction(async (tx) => {
await this.processingDatabase.updateEntityCache(tx, {
id,
state: ttl > 0 ? { ...state, ttl: ttl - 1 } : {}
});
});
}
const location = unprocessedEntity?.metadata?.annotations?.[catalogModel.ANNOTATION_LOCATION];
if (result.errors.length) {
this.eventBroker?.publish({
topic: constants.CATALOG_ERRORS_TOPIC,
eventPayload: {
entity: entityRef,
location,
errors: result.errors
}
});
}
const errorsString = JSON.stringify(
result.errors.map((e) => errors.serializeError(e))
);
let hashBuilder = this.createHash().update(errorsString);
if (result.ok) {
const { entityRefs: parents } = await this.processingDatabase.transaction(
(tx) => this.processingDatabase.listParents(tx, {
entityRefs: [
entityRef,
...result.deferredEntities.map(
(e) => catalogModel.stringifyEntityRef(e.entity)
)
]
})
);
hashBuilder = hashBuilder.update(stableStringify__default.default({ ...result.completedEntity })).update(stableStringify__default.default([...result.deferredEntities])).update(stableStringify__default.default([...result.relations])).update(stableStringify__default.default([...result.refreshKeys])).update(stableStringify__default.default([...parents]));
}
const resultHash = hashBuilder.digest("hex");
if (resultHash === previousResultHash) {
track.markSuccessfulWithNoChanges();
return;
}
if (!result.ok) {
Promise.resolve(void 0).then(
() => this.onProcessingError?.({
unprocessedEntity,
errors: result.errors
})
).catch((error) => {
this.logger.debug(
`Processing error listener threw an exception, ${errors.stringifyError(
error
)}`
);
});
await this.processingDatabase.transaction(async (tx) => {
await this.processingDatabase.updateProcessedEntityErrors(tx, {
id,
errors: errorsString,
resultHash
});
});
await this.stitcher.stitch({
entityRefs: [catalogModel.stringifyEntityRef(unprocessedEntity)]
});
track.markSuccessfulWithErrors();
return;
}
result.completedEntity.metadata.uid = id;
let oldRelationSources;
await this.processingDatabase.transaction(async (tx) => {
const { previous } = await this.processingDatabase.updateProcessedEntity(tx, {
id,
processedEntity: result.completedEntity,
resultHash,
errors: errorsString,
relations: result.relations,
deferredEntities: result.deferredEntities,
locationKey,
refreshKeys: result.refreshKeys
});
oldRelationSources = new Map(
previous.relations.map((r) => [
`${r.source_entity_ref}:${r.type}->${r.target_entity_ref}`,
r.source_entity_ref
])
);
});
const newRelationSources = new Map(
result.relations.map((relation) => {
const sourceEntityRef = catalogModel.stringifyEntityRef(relation.source);
const targetEntityRef = catalogModel.stringifyEntityRef(relation.target);
return [
`${sourceEntityRef}:${relation.type}->${targetEntityRef}`,
sourceEntityRef
];
})
);
const setOfThingsToStitch = /* @__PURE__ */ new Set([
catalogModel.stringifyEntityRef(result.completedEntity)
]);
newRelationSources.forEach((sourceEntityRef, uniqueKey) => {
if (!oldRelationSources.has(uniqueKey)) {
setOfThingsToStitch.add(sourceEntityRef);
}
});
oldRelationSources.forEach((sourceEntityRef, uniqueKey) => {
if (!newRelationSources.has(uniqueKey)) {
setOfThingsToStitch.add(sourceEntityRef);
}
});
await this.stitcher.stitch({
entityRefs: setOfThingsToStitch
});
track.markSuccessfulWithChanges();
} catch (error) {
errors.assertError(error);
track.markFailed(error);
}
});
}
});
}
startOrphanCleanup() {
const orphanStrategy = this.config.getOptionalString("catalog.orphanStrategy") ?? "delete";
if (orphanStrategy !== "delete") {
return () => {
};
}
const stitchingStrategy = types.stitchingStrategyFromConfig(this.config);
const runOnce = async () => {
try {
const n = await deleteOrphanedEntities.deleteOrphanedEntities({
knex: this.knex,
strategy: stitchingStrategy
});
if (n > 0) {
this.logger.info(`Deleted ${n} orphaned entities`);
}
} catch (error) {
this.logger.warn(`Failed to delete orphaned entities`, error);
}
};
if (this.scheduler) {
const abortController = new AbortController();
this.scheduler.scheduleTask({
id: "catalog_orphan_cleanup",
frequency: { milliseconds: this.orphanCleanupIntervalMs },
timeout: { milliseconds: this.orphanCleanupIntervalMs * 0.8 },
fn: runOnce,
signal: abortController.signal
});
return () => {
abortController.abort();
};
}
const intervalKey = setInterval(runOnce, this.orphanCleanupIntervalMs);
return () => {
clearInterval(intervalKey);
};
}
}
function progressTracker() {
const promProcessedEntities = metrics.createCounterMetric({
name: "catalog_processed_entities_count",
help: "Amount of entities processed, DEPRECATED, use OpenTelemetry metrics instead",
labelNames: ["result"]
});
const promProcessingDuration = metrics.createSummaryMetric({
name: "catalog_processing_duration_seconds",
help: "Time spent executing the full processing flow, DEPRECATED, use OpenTelemetry metrics instead",
labelNames: ["result"]
});
const promProcessorsDuration = metrics.createSummaryMetric({
name: "catalog_processors_duration_seconds",
help: "Time spent executing catalog processors, DEPRECATED, use OpenTelemetry metrics instead",
labelNames: ["result"]
});
const promProcessingQueueDelay = metrics.createSummaryMetric({
name: "catalog_processing_queue_delay_seconds",
help: "The amount of delay between being scheduled for processing, and the start of actually being processed, DEPRECATED, use OpenTelemetry metrics instead"
});
const meter = api.metrics.getMeter("default");
const processedEntities = meter.createCounter(
"catalog.processed.entities.count",
{ description: "Amount of entities processed" }
);
const processingDuration = meter.createHistogram(
"catalog.processing.duration",
{
description: "Time spent executing the full processing flow",
unit: "seconds"
}
);
const processorsDuration = meter.createHistogram(
"catalog.processors.duration",
{
description: "Time spent executing catalog processors",
unit: "seconds"
}
);
const processingQueueDelay = meter.createHistogram(
"catalog.processing.queue.delay",
{
description: "The amount of delay between being scheduled for processing, and the start of actually being processed",
unit: "seconds"
}
);
function processStart(item, logger) {
const startTime = process.hrtime();
const endOverallTimer = promProcessingDuration.startTimer();
const endProcessorsTimer = promProcessorsDuration.startTimer();
logger.debug(`Processing ${item.entityRef}`);
if (item.nextUpdateAt) {
const seconds = -item.nextUpdateAt.diffNow().as("seconds");
promProcessingQueueDelay.observe(seconds);
processingQueueDelay.record(seconds);
}
function endTime() {
const delta = process.hrtime(startTime);
return delta[0] + delta[1] / 1e9;
}
function markProcessorsCompleted(result) {
endProcessorsTimer({ result: result.ok ? "ok" : "failed" });
processorsDuration.record(endTime(), {
result: result.ok ? "ok" : "failed"
});
}
function markSuccessfulWithNoChanges() {
endOverallTimer({ result: "unchanged" });
promProcessedEntities.inc({ result: "unchanged" }, 1);
processingDuration.record(endTime(), { result: "unchanged" });
processedEntities.add(1, { result: "unchanged" });
}
function markSuccessfulWithErrors() {
endOverallTimer({ result: "errors" });
promProcessedEntities.inc({ result: "errors" }, 1);
processingDuration.record(endTime(), { result: "errors" });
processedEntities.add(1, { result: "errors" });
}
function markSuccessfulWithChanges() {
endOverallTimer({ result: "changed" });
promProcessedEntities.inc({ result: "changed" }, 1);
processingDuration.record(endTime(), { result: "changed" });
processedEntities.add(1, { result: "changed" });
}
function markFailed(error) {
promProcessedEntities.inc({ result: "failed" }, 1);
processedEntities.add(1, { result: "failed" });
logger.warn(`Processing of ${item.entityRef} failed`, error);
}
return {
markProcessorsCompleted,
markSuccessfulWithNoChanges,
markSuccessfulWithErrors,
markSuccessfulWithChanges,
markFailed
};
}
return { processStart };
}
exports.DefaultCatalogProcessingEngine = DefaultCatalogProcessingEngine;
//# sourceMappingURL=DefaultCatalogProcessingEngine.cjs.js.map