UNPKG

@grouparoo/core

Version:
105 lines (104 loc) 4.95 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.ImportOps = void 0; const actionhero_1 = require("actionhero"); const Import_1 = require("../../models/Import"); const Run_1 = require("../../models/Run"); const record_1 = require("./record"); const sequelize_1 = require("sequelize"); const Schedule_1 = require("../../models/Schedule"); const sourcesCache_1 = require("../../modules/caches/sourcesCache"); const arrayUtils_1 = require("../arrayUtils"); var ImportOps; (function (ImportOps) { const defaultImportProcessingDelay = 1000 * 60 * 5; function retryStartedAt(delayMs = defaultImportProcessingDelay) { return new Date(new Date().getTime() - delayMs + actionhero_1.config.tasks.timeout * 2); } ImportOps.retryStartedAt = retryStartedAt; async function processPendingImportsForAssociation(limit = 100, delayMs = defaultImportProcessingDelay) { var _a; if (!delayMs || delayMs < defaultImportProcessingDelay) { delayMs = defaultImportProcessingDelay; } const imports = await Import_1.Import.findAll({ where: { state: "associating", startedAt: { [sequelize_1.Op.or]: [null, { [sequelize_1.Op.lt]: new Date().getTime() - delayMs }], }, }, limit, }); if (imports.length === 0) return []; await Import_1.Import.update({ startedAt: new Date() }, { where: { id: imports.map((i) => i.id) } }); const runIds = imports .filter((i) => i.creatorType === "run") .map((i) => i.creatorId) .filter(arrayUtils_1.uniqueArrayValues); const sources = await sourcesCache_1.SourcesCache.findAllWithCache(); const runs = await Run_1.Run.findAll({ where: { id: runIds } }); const schedules = await Schedule_1.Schedule.findAll({ where: { id: runs .filter((r) => r.creatorType === "schedule") .map((r) => r.creatorId), }, }); const bulkImportUpdates = []; const importCollections = {}; for (const _import of imports) { let source; if (runIds.includes(_import.creatorId)) { const run = runs.find((r) => r.id === _import.creatorId); const schedule = schedules.find((s) => s.id === run.creatorId); source = sources.find((s) => s.id === schedule.sourceId); } const sourceId = (_a = source === null || source === void 0 ? void 0 : source.id) !== null && _a !== void 0 ? _a : "no-source"; if (!importCollections[sourceId]) importCollections[sourceId] = []; importCollections[sourceId].push(_import); } for (const [sourceId, collectionImports] of Object.entries(importCollections)) { const response = await record_1.RecordOps.findOrCreateByUniqueRecordProperties(collectionImports.map((i) => i.data), collectionImports.map((i) => i.id), sources.find((s) => s.id === sourceId), true); const now = new Date(); for (const { referenceId, isNew, record, error } of response) { const _import = collectionImports.find((i) => i.id === referenceId); if (error) { if (actionhero_1.env !== "test") (0, actionhero_1.log)(`[ASSOCIATE IMPORT ERROR] ${error}`, "alert"); await _import.setError(error, "processPendingImportsForAssociation"); } else { bulkImportUpdates.push({ id: _import.id, state: "importing", creatorType: _import.creatorType, creatorId: _import.creatorId, createdRecord: isNew, recordId: record.id, recordAssociatedAt: now, startedAt: _import.startedAt, createdAt: _import.createdAt, updatedAt: now, }); } } } await record_1.RecordOps.markPendingByIds(bulkImportUpdates.map((i) => i.recordId)); while (bulkImportUpdates.length > 0) { await Import_1.Import.bulkCreate(bulkImportUpdates.splice(0, actionhero_1.config.batchSize.internalWrite), { updateOnDuplicate: [ "state", "createdRecord", "recordId", "recordAssociatedAt", "updatedAt", ], }); } return imports; } ImportOps.processPendingImportsForAssociation = processPendingImportsForAssociation; })(ImportOps = exports.ImportOps || (exports.ImportOps = {}));