@grouparoo/core
Version:
The Grouparoo Core
897 lines (896 loc) • 41.8 kB
JavaScript
;
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.RecordOps = void 0;
const GrouparooRecord_1 = require("../../models/GrouparooRecord");
const RecordProperty_1 = require("../../models/RecordProperty");
const Property_1 = require("../../models/Property");
const Source_1 = require("../../models/Source");
const Group_1 = require("../../models/Group");
const Destination_1 = require("../../models/Destination");
const Export_1 = require("../../models/Export");
const GroupMember_1 = require("../../models/GroupMember");
const actionhero_1 = require("actionhero");
const sequelize_1 = __importStar(require("sequelize"));
const locks_1 = require("../locks");
const recordProperty_1 = require("./recordProperty");
const GroupRule_1 = require("../../models/GroupRule");
const Import_1 = require("../../models/Import");
const Mapping_1 = require("../../models/Mapping");
const source_1 = require("./source");
const GrouparooModel_1 = require("../../models/GrouparooModel");
const cls_1 = require("../cls");
const destination_1 = require("./destination");
const propertiesCache_1 = require("../caches/propertiesCache");
const destinationsCache_1 = require("../caches/destinationsCache");
const modelsCache_1 = require("../caches/modelsCache");
var RecordOps;
(function (RecordOps) {
/**
* Get the Properties of this GrouparooRecord
*/
async function getProperties(record) {
const recordProperties = record.recordProperties ||
(await RecordProperty_1.RecordProperty.scope(null).findAll({
where: { recordId: record.id },
order: [["position", "ASC"]],
}));
const properties = await propertiesCache_1.PropertiesCache.findAllWithCache(record.modelId);
const hash = {};
for (const i in recordProperties) {
const property = properties.find((r) => r.id === recordProperties[i].propertyId);
if (!property) {
await recordProperties[i].destroy();
continue;
}
const key = property.key;
if (!hash[key]) {
hash[key] = {
id: recordProperties[i].propertyId,
sourceId: property.sourceId,
state: recordProperties[i].state,
values: [],
invalidValue: recordProperties[i].invalidValue,
invalidReason: recordProperties[i].invalidReason,
configId: property.getConfigId(),
type: property.type,
unique: property.unique,
isPrimaryKey: property.isPrimaryKey,
isArray: property.isArray,
valueChangedAt: recordProperties[i].valueChangedAt,
confirmedAt: recordProperties[i].confirmedAt,
stateChangedAt: recordProperties[i].stateChangedAt,
startedAt: recordProperties[i].startedAt,
createdAt: recordProperties[i].createdAt,
updatedAt: recordProperties[i].updatedAt,
};
}
hash[key].values.push(await recordProperties[i].getValue());
const timeFields = [
"valueChangedAt",
"confirmedAt",
"stateChangedAt",
"createdAt",
"updatedAt",
];
timeFields.forEach((field) => {
if (hash[key][field] < recordProperties[i][field]) {
hash[key][field] = recordProperties[i][field];
}
});
}
return hash;
}
RecordOps.getProperties = getProperties;
/**
* Search & List GrouparooRecords
*/
async function search({ limit, offset, state, groupId, modelId, searchKey, searchValue, order, caseSensitive, }) {
if (!limit)
limit = 100;
if (!offset)
offset = 0;
if (!order)
order = [["createdAt", "asc"]];
if (caseSensitive === undefined || caseSensitive === null)
caseSensitive = true;
const ands = [];
const include = [];
let countRequiresIncludes = false;
// Are we searching for GrouparooRecords in a specific state?
if (state && state !== "invalid") {
ands.push({ state });
}
// Are we searching for a specific RecordProperty?
if (searchKey && searchValue) {
include.push(RecordProperty_1.RecordProperty);
countRequiresIncludes = true;
const property = await propertiesCache_1.PropertiesCache.findOneWithCache(`${searchKey}`, undefined, "ready", "key");
if (!property)
throw new Error(`cannot find a property for ${searchKey}`);
ands.push(sequelize_1.default.where(sequelize_1.default.col("recordProperties.propertyId"), property.id));
if (searchValue.toLowerCase() === "null" || searchValue === "") {
ands.push(sequelize_1.default.where(sequelize_1.default.col("recordProperties.rawValue"), null));
}
else {
const op = searchValue.includes("%") ? sequelize_1.Op.like : sequelize_1.Op.eq;
ands.push(sequelize_1.default.where(!caseSensitive
? sequelize_1.default.fn("LOWER", sequelize_1.default.col("recordProperties.rawValue"))
: sequelize_1.default.col("recordProperties.rawValue"), {
[op]: !caseSensitive
? sequelize_1.default.fn("LOWER", searchValue)
: searchValue,
}));
}
}
// Are we limiting the search for only members of a Specific Group?
if (groupId) {
countRequiresIncludes = true;
include.push(GroupMember_1.GroupMember);
ands.push(sequelize_1.default.where(sequelize_1.default.col("groupMembers.groupId"), groupId));
}
// Are we limiting to a certain modelId?
if (modelId) {
ands.push({ modelId });
}
// are we looking for invalid records
if (state === "invalid") {
countRequiresIncludes = true;
ands.push({
invalid: true,
});
}
// Load the records in full now that we know the relevant records
const recordIds = (await GrouparooRecord_1.GrouparooRecord.findAll({
attributes: ["id"],
include,
where: { [sequelize_1.Op.and]: ands },
limit,
offset,
order,
subQuery: false,
})).map((p) => p.id);
const records = await GrouparooRecord_1.GrouparooRecord.findAll({
where: { id: recordIds },
order,
include: RecordProperty_1.RecordProperty,
});
const total = await GrouparooRecord_1.GrouparooRecord.count({
include: countRequiresIncludes ? include : undefined,
where: { [sequelize_1.Op.and]: ands },
distinct: true,
});
return { records, total };
}
RecordOps.search = search;
/**
* Add or Update a Property on GrouparooRecords
*/
async function addOrUpdateProperties(records, recordProperties, toLock = true, ignoreMissingProperties = false) {
var _a;
if (records.length === 0)
return;
if (records.length !== recordProperties.length) {
throw new Error("GrouparooRecords and RecordProperty arrays are not the same length");
}
const bulkCreates = [];
const bulkDeletes = { where: { id: [] } };
const now = new Date();
// load existing record properties
const existingRecordProperties = await RecordProperty_1.RecordProperty.findAll({
attributes: [
"id",
"recordId",
"propertyId",
"position",
"rawValue",
"valueChangedAt",
],
where: { recordId: { [sequelize_1.Op.in]: records.map((p) => p.id) } },
});
let recordOffset = 0;
for (const record of records) {
const properties = await propertiesCache_1.PropertiesCache.findAllWithCache(record.modelId);
const keys = Object.keys(recordProperties[recordOffset]);
checkKeys: for (const key of keys) {
// this special, internal-ony key is used to send extra information though an Import. `_meta` is prevented from being a valid Property key
if (key === "_meta")
continue checkKeys;
const h = {};
h[key] = Array.isArray(recordProperties[recordOffset][key])
? recordProperties[recordOffset][key]
: [recordProperties[recordOffset][key]];
const property = (_a = properties.find((p) => p.id === key)) !== null && _a !== void 0 ? _a : properties.find((p) => p.key === key);
if (!property) {
if (ignoreMissingProperties)
continue;
throw new Error(`cannot find a property for id or key \`${key}\``);
}
// add new GrouparooRecord Properties to batch
let position = 0;
buildQueries: for (const value of h[key]) {
if (position > 0 && !property.isArray) {
throw new Error("cannot set multiple record properties for a non-array property");
}
const existingRecordProperty = existingRecordProperties.find((p) => p.recordId === record.id &&
p.propertyId === property.id &&
p.position === position);
let { rawValue, invalidValue, invalidReason } = await recordProperty_1.RecordPropertyOps.buildRawValue(value, property.type, existingRecordProperty);
const bulkCreate = {
id: existingRecordProperty ? existingRecordProperty.id : undefined,
recordId: record.id,
propertyId: property.id,
position,
rawValue,
invalidValue,
invalidReason,
state: "ready",
stateChangedAt: now,
confirmedAt: now,
startedAt: null,
valueChangedAt: !existingRecordProperty ||
!existingRecordProperty.valueChangedAt ||
!existingRecordProperty.rawValue ||
rawValue !== existingRecordProperty.rawValue
? now
: existingRecordProperty.valueChangedAt,
unique: property.unique,
};
const matchIdx = bulkCreates.findIndex((bc) => bc.recordId === record.id &&
bc.propertyId === property.id &&
bc.position === position);
if (matchIdx >= 0) {
bulkCreates.splice(matchIdx, 1, bulkCreate);
}
else {
bulkCreates.push(bulkCreate);
}
position++;
}
// delete old properties we didn't update with a position (array property) greater than we saw
existingRecordProperties
.filter((p) => p.recordId === record.id &&
p.propertyId === property.id &&
p.position >= position)
.map((p) => bulkDeletes.where.id.push(p.id));
}
recordOffset++;
}
// TODO: This select may be slower than allowing the upsert to throw like we used to
// However, the previous upsert method was really hard to deal with because it would throw and ruin/end the transaction
const duplicateRecordRawValues = bulkCreates
.filter((bc) => bc.unique && !bc.invalidValue)
.map((bc) => bc.rawValue);
const duplicateRecordPropertyMatches = duplicateRecordRawValues.length > 0
? await RecordProperty_1.RecordProperty.findAll({
attributes: ["rawValue", "recordId", "propertyId"],
where: {
rawValue: duplicateRecordRawValues,
unique: true,
},
})
: [];
for (const duplicate of duplicateRecordPropertyMatches) {
const getMatchIdx = () => bulkCreates.findIndex((bc) => !bc.invalidValue &&
bc.rawValue === duplicate.rawValue &&
bc.propertyId === duplicate.propertyId &&
bc.recordId !== duplicate.recordId);
let matchIdx = getMatchIdx();
while (matchIdx >= 0) {
bulkCreates[matchIdx].invalidReason = RecordProperty_1.InvalidReasons.Duplicate;
bulkCreates[matchIdx].invalidValue = bulkCreates[matchIdx].rawValue;
bulkCreates[matchIdx].rawValue = null;
matchIdx = getMatchIdx();
}
}
while (bulkCreates.length > 0) {
await RecordProperty_1.RecordProperty.bulkCreate(bulkCreates.splice(0, actionhero_1.config.batchSize.internalWrite), {
updateOnDuplicate: [
"state",
"unique",
"stateChangedAt",
"confirmedAt",
"valueChangedAt",
"startedAt",
"rawValue",
"invalidValue",
"invalidReason",
"updatedAt",
],
});
}
if (bulkDeletes.where.id.length > 0) {
await RecordProperty_1.RecordProperty.destroy(bulkDeletes);
}
}
RecordOps.addOrUpdateProperties = addOrUpdateProperties;
async function resolvePendingProperties(record, sourceId) {
const pendingProperties = await RecordProperty_1.RecordProperty.findAll({
where: { recordId: record.id, state: "pending" },
});
const clearRecordPropertyIds = [];
for (let recordProperty of pendingProperties) {
const property = await propertiesCache_1.PropertiesCache.findOneWithCache(recordProperty.propertyId, record.modelId, "ready");
if (!sourceId || property.sourceId === sourceId) {
clearRecordPropertyIds.push(recordProperty.id);
}
}
await RecordProperty_1.RecordProperty.update({
state: "ready",
rawValue: null,
stateChangedAt: new Date(),
valueChangedAt: new Date(),
confirmedAt: new Date(),
}, { where: { id: clearRecordPropertyIds } });
}
/**
* Remove a Property on this GrouparooRecord
*/
async function removeProperty(record, key) {
const property = await Property_1.Property.findOne({ where: { key } });
if (!property)
return;
return RecordProperty_1.RecordProperty.destroy({
where: { recordId: record.id, propertyId: property.id },
});
}
RecordOps.removeProperty = removeProperty;
/**
* Remove all Properties from this GrouparooRecord
*/
async function removeProperties(record, properties) {
for (const i in properties) {
await record.removeProperty(properties[i]);
}
}
RecordOps.removeProperties = removeProperties;
async function buildNullProperties(records, state = "pending", skipPropertyLookup = false) {
const bulkArgs = [];
const now = new Date();
for (const record of records) {
const properties = await propertiesCache_1.PropertiesCache.findAllWithCache(record.modelId, "ready");
const recordProperties = skipPropertyLookup
? {}
: await record.getProperties();
for (const key in properties) {
const property = properties[key];
if (!recordProperties[property.key]) {
bulkArgs.push({
recordId: record.id,
propertyId: property.id,
state,
stateChangedAt: now,
valueChangedAt: now,
confirmedAt: now,
});
}
}
}
const total = bulkArgs.length;
while (bulkArgs.length > 0) {
await RecordProperty_1.RecordProperty.bulkCreate(bulkArgs.splice(0, actionhero_1.config.batchSize.internalWrite));
}
return total;
}
RecordOps.buildNullProperties = buildNullProperties;
async function updateGroupMemberships(records) {
const results = {};
const groups = await Group_1.Group.scope("notDraft").findAll({
include: [GroupRule_1.GroupRule],
});
for (const record of records)
results[record.id] = {};
for (const group of groups) {
const belongs = await group.updateRecordsMembership(records);
for (const recordId of Object.keys(belongs)) {
results[recordId][group.id] = belongs[recordId];
}
}
return results;
}
RecordOps.updateGroupMemberships = updateGroupMemberships;
/**
* Import the properties of this GrouparooRecord
*/
async function _import(record, toSave = true, toLock = true) {
let releaseLock;
if (toLock) {
const lockObject = await (0, locks_1.waitForLock)(`record:${record.id}`);
releaseLock = lockObject.releaseLock;
}
try {
const sources = await Source_1.Source.findAll({
where: { state: "ready", modelId: record.modelId },
include: [Mapping_1.Mapping, Property_1.Property],
});
const sortedSources = source_1.SourceOps.sortByDependencies(sources);
for (const source of sortedSources) {
const { canImport, properties } = await source.import(record);
// We need to save each property as it is loaded so it can be used as a mapping for the next source
// We also don't want to save more than one recordProperty at a time so we can isolate any problem values
if (canImport && toSave) {
for (const [k, property] of Object.entries(properties)) {
await addOrUpdateProperties([record], [{ [k]: property }], false);
}
await resolvePendingProperties(record, source.id);
}
}
if (toSave) {
await buildNullProperties([record]);
await RecordProperty_1.RecordProperty.update({ state: "ready" }, { where: { recordId: record.id } });
}
return record;
}
finally {
if (toLock)
await releaseLock();
}
}
RecordOps._import = _import;
/**
* Export this GrouparooRecord to all relevant Sources
*/
async function _export(record, force = false, additionalGroups = [], saveExports = true, sync = true, toDelete) {
const groups = [...additionalGroups, ...(await record.$get("groups"))];
const destinations = await destination_1.DestinationOps.relevantFor(record, groups);
// We want to find destinations which aren't in the above set and already have an Export for this GrouparooRecord.
// That's a sign that the GrouparooRecord is about to get a toDelete export
const existingExportNotDeleted = await actionhero_1.api.sequelize.query(`
SELECT * from "exports"
JOIN (
SELECT "destinationId", MAX("createdAt") as "createdAt"
FROM "exports"
WHERE "recordId" = '${record.id}'
GROUP BY "destinationId"
) AS "latest"
ON "latest"."destinationId" = "exports"."destinationId" AND "latest"."createdAt" = "exports"."createdAt"
WHERE "recordId" = '${record.id}'
AND "toDelete" = false
;
`, {
type: sequelize_1.QueryTypes.SELECT,
model: Export_1.Export,
});
for (const _export of existingExportNotDeleted) {
if (!destinations.map((d) => d.id).includes(_export.destinationId)) {
const destination = await Destination_1.Destination.findOne({
where: { id: _export.destinationId, state: "ready" },
});
// the destination may have been deleted or become not-ready
if (destination)
destinations.push(destination);
}
}
return Promise.all(destinations.map((destination) => destination.exportRecord(record, sync, // sync = true -> do the export in-line
force, // force = true -> do the export even if it looks like the data hasn't changed
saveExports, // saveExports = true -> should we really save these exports, or do we just want examples for the next export?
toDelete // are we deleting this record and should we ensure that all exports are toDelete=true?
)));
}
RecordOps._export = _export;
/**
* Fully Import and Export a record
*/
async function sync(record, force = true, toExport = true) {
const oldGroups = await record.$get("groups");
await record.markPending();
await record.import();
await record.updateGroupMembership();
await record.update({ state: "ready" });
return RecordOps._export(record, force, oldGroups, toExport);
}
RecordOps.sync = sync;
/**
* This is used for manually importing Records from the UI
*
* **WARNING**: This method expects NOT to be used within a CLS-wrapped context
*/
async function opportunisticallyImportAndUpdateInline(records) {
const response = [];
if (records.length === 0)
return response;
await cls_1.CLS.wrap(async () => {
await RecordOps.buildNullProperties(records);
await RecordOps.markPendingByIds(records.map((r) => r.id));
});
// This method should not be wrapped in a transaction because we want to allow multiple sources to throw and recover their imports
// We cannot Promise.all because SQLite cannot write in parallel
for (const record of records) {
let errorMessage;
try {
await record.import();
}
catch (error) {
errorMessage = error.message;
}
finally {
response.push({
recordId: record.id,
success: errorMessage ? false : true,
error: errorMessage,
});
}
}
await cls_1.CLS.wrap(async () => {
const successfulRecordIds = response
.filter((r) => r.success)
.map((r) => r.recordId);
const successfulRecords = records.filter((r) => successfulRecordIds.includes(r.id));
if (successfulRecords.length > 0) {
await RecordOps.computeRecordsValidity(successfulRecords);
await GrouparooRecord_1.GrouparooRecord.update({ state: "ready" }, { where: { id: successfulRecords.map((r) => r.id) } });
await RecordOps.updateGroupMemberships(successfulRecords);
}
});
return response;
}
RecordOps.opportunisticallyImportAndUpdateInline = opportunisticallyImportAndUpdateInline;
/**
* The method you'll be using to create records with arbitrary data.
* Hash looks like {email: "person@example.com", id: 123}
*
* This method today always returns a record by finding it or making a a new one... unless it throws because the source isn't allowed to make new records.
*/
async function findOrCreateByUniqueRecordProperties(hashes, referenceIds, source, includeAllProperties = false) {
const response = [];
const uniqueProperties = (await propertiesCache_1.PropertiesCache.findAllWithCache(source instanceof Source_1.Source ? source.modelId : undefined, "ready")).filter((p) => p.unique);
let i = 0;
const rawValues = [];
for (const hash of hashes) {
const uniquePropertiesHash = {};
uniqueProperties.forEach((property) => {
if ((hash[property.key] !== null && hash[property.key] !== undefined) ||
(hash[property.id] !== null && hash[property.id] !== undefined)) {
uniquePropertiesHash[property.id] = Array.isArray(hash[property.key])
? hash[property.key].map((v) => String(v))
: [String(hash[property.key])];
Array.isArray(hash[property.key])
? hash[property.key].forEach((v) => rawValues.push(String(v)))
: rawValues.push(String(hash[property.key]));
}
});
let error;
if (Object.keys(uniquePropertiesHash).length === 0) {
error = Error(`there are no unique record properties provided in ${JSON.stringify(hash)} (${referenceIds[i]})`);
}
response.push({
referenceId: referenceIds[i],
record: undefined,
isNew: undefined,
hash,
uniquePropertiesHash,
error,
});
i++;
}
const recordProperties = await RecordProperty_1.RecordProperty.findAll({
where: {
propertyId: uniqueProperties.map((p) => p.id),
unique: true,
rawValue: rawValues,
},
include: [GrouparooRecord_1.GrouparooRecord],
});
const properties = await propertiesCache_1.PropertiesCache.findAllWithCache(source instanceof Source_1.Source ? source.modelId : undefined, "ready");
for (const data of response) {
if (data.error)
continue;
// the record already exists in the DB and we can find it by property value
for (const [propertyId, matchValues] of Object.entries(data.uniquePropertiesHash)) {
const recordProperty = recordProperties.find((rp) => propertyId === rp.propertyId && matchValues.includes(rp.rawValue));
if (recordProperty) {
data.record = recordProperty.record;
data.isNew = false;
}
}
// another import in this batch has already created the record
if (!data.record) {
const match = response.find((r) => r.referenceId !== data.referenceId &&
r.isNew === true &&
JSON.stringify(r.uniquePropertiesHash) ===
JSON.stringify(data.uniquePropertiesHash));
if (match) {
data.record = match.record;
data.isNew = false;
}
}
// we need to create a new record
if (!data.record) {
let modelId;
if (source instanceof Source_1.Source) {
modelId = source.modelId;
}
else {
const models = await modelsCache_1.ModelsCache.findAllWithCache();
if (models.length > 1)
data.error = Error(`indeterminate model`);
modelId = models[0].id;
}
const canCreateNewRecord = typeof source === "boolean"
? source
: source instanceof Source_1.Source
? properties
.filter((p) => p.source.modelId === modelId &&
p.unique === true &&
p.sourceId === source.id)
.map((p) => p.key)
.filter((key) => !!data.hash[key]).length > 0
: false;
if (!canCreateNewRecord) {
data.error = Error(`could not create a new record because no record property in ${JSON.stringify(data.hash)} is unique and owned by the source`);
}
// we want to prevent running buildNullProperties per-record, we will do it in bulk below
const record = GrouparooRecord_1.GrouparooRecord.build({ modelId });
GrouparooRecord_1.GrouparooRecord.generateId(record);
data.record = record;
data.isNew = true;
}
}
const newRecords = await GrouparooRecord_1.GrouparooRecord.bulkCreate(response
.filter((d) => d.isNew)
.map((d) => {
return {
id: d.record.id,
state: d.record.state,
modelId: d.record.modelId,
};
}));
for (const data of response) {
if (data.isNew) {
data.record = newRecords.find((r) => r.id === data.record.id);
}
}
await buildNullProperties(response.filter((d) => !d.error && d.isNew).map((d) => d.record), undefined, true);
await addOrUpdateProperties(response.filter((d) => !d.error).map((d) => d.record), response
.filter((d) => !d.error)
.map((d) => (includeAllProperties ? d.hash : d.uniquePropertiesHash)), false, true);
return response;
}
RecordOps.findOrCreateByUniqueRecordProperties = findOrCreateByUniqueRecordProperties;
/**
* Mark many GrouparooRecords and all of their properties as pending
*/
async function markPendingByIds(recordIds, includeProperties = true) {
if (recordIds.length === 0)
return;
await GrouparooRecord_1.GrouparooRecord.update({ state: "pending" }, { where: { id: { [sequelize_1.Op.in]: recordIds } } });
if (includeProperties) {
await RecordProperty_1.RecordProperty.update({ state: "pending", startedAt: null }, { where: { recordId: { [sequelize_1.Op.in]: recordIds } } });
}
}
RecordOps.markPendingByIds = markPendingByIds;
/**
* Look for records by model that don't have a primary key property and are done importing/exporting.
*/
async function getRecordsToDestroy() {
const limit = actionhero_1.config.batchSize.imports;
let recordsToDestroy = [];
let models = await GrouparooModel_1.GrouparooModel.scope(null).findAll();
let modelIdsToClear = [];
for (const model of models) {
const propertiesByModel = await propertiesCache_1.PropertiesCache.findAllWithCache(model.id, "ready");
const primaryKeyProperties = propertiesByModel.filter((property) => {
return property.isPrimaryKey == true;
});
if (primaryKeyProperties.length === 0) {
modelIdsToClear.push(model.id);
}
}
for (const modelId of modelIdsToClear) {
// We have no directly mapped Property and every record for this model should be removed
// It's safe to assume that if there are no Properties, we aren't exporting
recordsToDestroy = await GrouparooRecord_1.GrouparooRecord.findAll({
attributes: ["id"],
where: { state: ["ready", "deleted"], modelId: modelId },
limit,
});
}
// Also search all records for a "null" value in the directly mapped property
return recordsToDestroy.concat(await actionhero_1.api.sequelize.query(`
SELECT "id" FROM "records"
WHERE "state" IN ('ready', 'deleted')
AND "id" IN (
SELECT DISTINCT("recordId") FROM "recordProperties"
JOIN properties ON "properties"."id"="recordProperties"."propertyId"
WHERE
"properties"."isPrimaryKey"=true
AND "rawValue" IS NULL
)
LIMIT ${limit};
`, {
model: GrouparooRecord_1.GrouparooRecord,
}));
}
RecordOps.getRecordsToDestroy = getRecordsToDestroy;
/**
* Import records whose primary key property has not been confirmed after a certain date.
*/
async function confirmExistence(limit, fromDate, sourceId) {
const primaryKeyProperties = sourceId
? await Property_1.Property.findAll({
where: { isPrimaryKey: true, sourceId },
})
: await Property_1.Property.findAll({
where: { isPrimaryKey: true },
});
const recordProperties = await RecordProperty_1.RecordProperty.findAll({
where: {
state: "ready",
confirmedAt: {
[sequelize_1.Op.lt]: fromDate,
},
rawValue: {
[sequelize_1.Op.ne]: null,
},
propertyId: primaryKeyProperties.map((p) => p.id),
},
limit,
});
const recordIds = recordProperties.map((pp) => pp.recordId);
// Only mark record and primary key property pending
await markPendingByIds(recordIds, false);
await RecordProperty_1.RecordProperty.update({ state: "pending", startedAt: null }, { where: { id: recordProperties.map((pp) => pp.id) } });
const uniqueRecordIds = recordIds.filter((val, idx, arr) => arr.indexOf(val) === idx);
return uniqueRecordIds.length;
}
RecordOps.confirmExistence = confirmExistence;
/**
* Merge 2 GrouparooRecords, favoring the first GrouparooRecord
*/
async function merge(record, otherRecord) {
var _a, _b, _c, _d;
const { releaseLock: releaseLockForRecord } = await (0, locks_1.waitForLock)(`record:${record.id}`);
const { releaseLock: releaseLockForOtherRecord } = await (0, locks_1.waitForLock)(`record:${otherRecord.id}`);
try {
// transfer properties, keeping the newest values
const properties = await record.getProperties();
const otherProperties = await otherRecord.getProperties();
const newProperties = {};
for (const key in otherProperties) {
if (!properties[key] ||
(((_b = (_a = otherProperties[key]) === null || _a === void 0 ? void 0 : _a.updatedAt) === null || _b === void 0 ? void 0 : _b.getTime()) >
((_d = (_c = properties[key]) === null || _c === void 0 ? void 0 : _c.updatedAt) === null || _d === void 0 ? void 0 : _d.getTime()) &&
otherProperties[key].values.length > 0 &&
!(otherProperties[key].values.length === 1 &&
(otherProperties[key].values[0] === null ||
otherProperties[key].values[0] === undefined)))) {
newProperties[key] = otherProperties[key].values;
}
}
// delete other record so unique record properties will be available
await otherRecord.destroy();
await addOrUpdateProperties([record], [newProperties], false);
// re-import and update groups
delete record.recordProperties;
await record.buildNullProperties();
await record.markPending();
return record;
}
finally {
await releaseLockForRecord();
await releaseLockForOtherRecord();
}
}
RecordOps.merge = merge;
async function computeRecordsValidity(records) {
const recordIds = records.map((r) => r.id);
await GrouparooRecord_1.GrouparooRecord.update({ invalid: false }, { where: { id: { [sequelize_1.Op.in]: recordIds } } });
// Update records to invalid if any associated properties are invalid.
await actionhero_1.api.sequelize.query(`
UPDATE
"records"
SET
"invalid" = TRUE
WHERE
"records"."id" IN(
SELECT
"recordProperties"."recordId" FROM "recordProperties"
WHERE
"recordProperties"."recordId" IN(${recordIds.map((id) => `'${id}'`)})
AND "recordProperties"."invalidReason" IS NOT NULL);
`);
}
RecordOps.computeRecordsValidity = computeRecordsValidity;
/**
* Find records that are not ready but whose properties are and make them ready.
* Then, process the related imports.
*/
async function makeReady(limit = 100) {
const partialRecords = await actionhero_1.api.sequelize.query(`
SELECT id from "records" where "state" = 'pending'
EXCEPT
SELECT DISTINCT("recordId") FROM "recordProperties" WHERE "state" = 'pending'
LIMIT ${limit};
`, {
type: sequelize_1.QueryTypes.SELECT,
model: GrouparooRecord_1.GrouparooRecord,
});
if (!partialRecords.length)
return [];
await GrouparooRecord_1.GrouparooRecord.update({ state: "ready" }, {
where: {
id: { [sequelize_1.Op.in]: partialRecords.map((p) => p.id) },
state: "pending",
},
});
const now = new Date();
await Import_1.Import.update({ state: "processing", importedAt: now }, {
where: {
recordId: partialRecords.map((r) => r.id),
state: ["importing", "processing"],
recordAssociatedAt: { [sequelize_1.Op.lt]: now },
},
});
await RecordOps.updateGroupMemberships(partialRecords);
await computeRecordsValidity(partialRecords);
return partialRecords;
}
RecordOps.makeReady = makeReady;
async function makeExports(recordIds, toExport, force = false) {
var _a, _b, _c, _d;
const records = await GrouparooRecord_1.GrouparooRecord.findAll({
where: { id: recordIds, state: "ready" },
include: [RecordProperty_1.RecordProperty],
});
const groupMembers = await GroupMember_1.GroupMember.findAll({
where: { recordId: recordIds },
include: [Group_1.Group],
});
const imports = await Import_1.Import.findAll({
where: { state: "processing", recordId: recordIds },
});
const destinationRecords = {};
const destinations = await destinationsCache_1.DestinationsCache.findAllWithCache();
for (const { id } of destinations)
destinationRecords[id] = [];
for (const record of records) {
const destinations = await destination_1.DestinationOps.relevantFor(record, groupMembers
.filter((gm) => gm.recordId === record.id)
.map((gm) => gm.group));
// check for explicit destinations to export to from each import
for (const _import of imports) {
if (_import.recordId === record.id &&
((_b = (_a = _import.data) === null || _a === void 0 ? void 0 : _a._meta) === null || _b === void 0 ? void 0 : _b.destinationId) &&
!destinations
.map((d) => d.id)
.includes((_d = (_c = _import.data) === null || _c === void 0 ? void 0 : _c._meta) === null || _d === void 0 ? void 0 : _d.destinationId)) {
const destination = await destinationsCache_1.DestinationsCache.findOneWithCache(_import.data._meta.destinationId);
if (destination)
destinations.push(destination);
}
}
for (const { id } of destinations)
destinationRecords[id].push(record);
}
if (toExport) {
for (const destination of destinations) {
await destination_1.DestinationOps.exportRecords(destination, destinationRecords[destination.id], false, force);
}
}
await Import_1.Import.update({ processedAt: new Date(), state: "complete" }, { where: { id: imports.map((i) => i.id) } });
return records;
}
RecordOps.makeExports = makeExports;
})(RecordOps = exports.RecordOps || (exports.RecordOps = {}));