@backstage/plugin-catalog-backend
Version:
The Backstage backend plugin that provides the Backstage catalog
232 lines (226 loc) • 8.6 kB
JavaScript
var catalogModel = require('@backstage/catalog-model');
var lodash = require('lodash');
var uuid = require('uuid');
var conversion = require('./conversion.cjs.js');
var deleteWithEagerPruningOfChildren = require('./operations/provider/deleteWithEagerPruningOfChildren.cjs.js');
var refreshByRefreshKeys = require('./operations/provider/refreshByRefreshKeys.cjs.js');
var checkLocationKeyConflict = require('./operations/refreshState/checkLocationKeyConflict.cjs.js');
var insertUnprocessedEntity = require('./operations/refreshState/insertUnprocessedEntity.cjs.js');
var updateUnprocessedEntity = require('./operations/refreshState/updateUnprocessedEntity.cjs.js');
var util = require('./util.cjs.js');
var backendPluginApi = require('@backstage/backend-plugin-api');
function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; }
var lodash__default = /*#__PURE__*/_interopDefaultCompat(lodash);
const BATCH_SIZE = 50;
class DefaultProviderDatabase {
constructor(options) {
this.options = options;
}
async transaction(fn) {
try {
let result = void 0;
await this.options.database.transaction(
async (tx) => {
result = await fn(tx);
},
{
// If we explicitly trigger a rollback, don't fail.
doNotRejectOnRollback: true
}
);
return result;
} catch (e) {
this.options.logger.debug(`Error during transaction, ${e}`);
throw conversion.rethrowError(e);
}
}
async replaceUnprocessedEntities(txOpaque, options) {
const tx = txOpaque;
const { toAdd, toUpsert, toRemove } = await this.createDelta(tx, options);
if (toRemove.length) {
const removedCount = await deleteWithEagerPruningOfChildren.deleteWithEagerPruningOfChildren({
knex: tx,
entityRefs: toRemove,
sourceKey: options.sourceKey
});
this.options.logger.debug(
`removed, ${removedCount} entities: ${JSON.stringify(toRemove)}`
);
}
if (toAdd.length) {
for (const chunk of lodash__default.default.chunk(toAdd, 50)) {
try {
await tx.batchInsert(
"refresh_state",
chunk.map((item) => ({
entity_id: uuid.v4(),
entity_ref: catalogModel.stringifyEntityRef(item.deferred.entity),
unprocessed_entity: JSON.stringify(item.deferred.entity),
unprocessed_hash: item.hash,
errors: "",
location_key: item.deferred.locationKey,
next_update_at: tx.fn.now(),
last_discovery_at: tx.fn.now()
})),
BATCH_SIZE
);
await tx.batchInsert(
"refresh_state_references",
chunk.map((item) => ({
source_key: options.sourceKey,
target_entity_ref: catalogModel.stringifyEntityRef(item.deferred.entity)
})),
BATCH_SIZE
);
} catch (error) {
if (!backendPluginApi.isDatabaseConflictError(error)) {
throw error;
} else {
this.options.logger.debug(
`Fast insert path failed, falling back to slow path, ${error}`
);
toUpsert.push(...chunk);
}
}
}
}
if (toUpsert.length) {
for (const {
deferred: { entity, locationKey },
hash
} of toUpsert) {
const entityRef = catalogModel.stringifyEntityRef(entity);
try {
let ok = await updateUnprocessedEntity.updateUnprocessedEntity({
tx,
entity,
hash,
locationKey
});
if (!ok) {
ok = await insertUnprocessedEntity.insertUnprocessedEntity({
tx,
entity,
hash,
locationKey,
logger: this.options.logger
});
}
if (ok) {
await tx("refresh_state_references").where("target_entity_ref", entityRef).delete();
await tx(
"refresh_state_references"
).insert({
source_key: options.sourceKey,
target_entity_ref: entityRef
});
} else {
await tx("refresh_state_references").where("target_entity_ref", entityRef).andWhere({ source_key: options.sourceKey }).delete();
const conflictingKey = await checkLocationKeyConflict.checkLocationKeyConflict({
tx,
entityRef,
locationKey
});
if (conflictingKey) {
this.options.logger.warn(
`Source ${options.sourceKey} detected conflicting entityRef ${entityRef} already referenced by ${conflictingKey} and now also ${locationKey}`
);
}
}
} catch (error) {
this.options.logger.error(
`Failed to add '${entityRef}' from source '${options.sourceKey}', ${error}`
);
}
}
}
}
async listReferenceSourceKeys(txOpaque) {
const tx = txOpaque;
const rows = await tx(
"refresh_state_references"
).distinct("source_key").whereNotNull("source_key");
return rows.map((row) => row.source_key).filter((key) => !!key);
}
async refreshByRefreshKeys(txOpaque, options) {
const tx = txOpaque;
await refreshByRefreshKeys.refreshByRefreshKeys({ tx, keys: options.keys });
}
async createDelta(tx, options) {
if (options.type === "delta") {
const toAdd2 = new Array();
const toUpsert2 = new Array();
const toRemove2 = options.removed.map((e) => e.entityRef);
for (const chunk of lodash__default.default.chunk(options.added, 1e3)) {
const entityRefs = chunk.map((e) => catalogModel.stringifyEntityRef(e.entity));
const rows = await tx("refresh_state").select(["entity_ref", "unprocessed_hash", "location_key"]).whereIn("entity_ref", entityRefs);
const oldStates = new Map(
rows.map((row) => [
row.entity_ref,
{
unprocessed_hash: row.unprocessed_hash,
location_key: row.location_key
}
])
);
chunk.forEach((deferred, i) => {
const entityRef = entityRefs[i];
const newHash = util.generateStableHash(deferred.entity);
const oldState = oldStates.get(entityRef);
if (oldState === void 0) {
toAdd2.push({ deferred, hash: newHash });
} else if ((deferred.locationKey ?? null) !== (oldState.location_key ?? null)) {
toRemove2.push(entityRef);
toAdd2.push({ deferred, hash: newHash });
} else if (newHash !== oldState.unprocessed_hash) {
toUpsert2.push({ deferred, hash: newHash });
}
});
}
return { toAdd: toAdd2, toUpsert: toUpsert2, toRemove: toRemove2 };
}
const oldRefs = await tx(
"refresh_state_references"
).leftJoin("refresh_state", {
target_entity_ref: "entity_ref"
}).where({ source_key: options.sourceKey }).select({
target_entity_ref: "refresh_state_references.target_entity_ref",
location_key: "refresh_state.location_key",
unprocessed_hash: "refresh_state.unprocessed_hash"
});
const items = options.items.map((deferred) => ({
deferred,
ref: catalogModel.stringifyEntityRef(deferred.entity),
hash: util.generateStableHash(deferred.entity)
}));
const oldRefsSet = new Map(
oldRefs.map((r) => [
r.target_entity_ref,
{
locationKey: r.location_key,
oldEntityHash: r.unprocessed_hash
}
])
);
const newRefsSet = new Set(items.map((item) => item.ref));
const toAdd = new Array();
const toUpsert = new Array();
const toRemove = oldRefs.map((row) => row.target_entity_ref).filter((ref) => !newRefsSet.has(ref));
for (const item of items) {
const oldRef = oldRefsSet.get(item.ref);
const upsertItem = { deferred: item.deferred, hash: item.hash };
if (!oldRef) {
toAdd.push(upsertItem);
} else if ((oldRef.locationKey ?? void 0) !== (item.deferred.locationKey ?? void 0)) {
toRemove.push(item.ref);
toAdd.push(upsertItem);
} else if (oldRef.oldEntityHash !== item.hash) {
toUpsert.push(upsertItem);
}
}
return { toAdd, toUpsert, toRemove };
}
}
exports.DefaultProviderDatabase = DefaultProviderDatabase;
//# sourceMappingURL=DefaultProviderDatabase.cjs.js.map
;