n8n
Version:
n8n Workflow Automation Tool
571 lines • 25.2 kB
JavaScript
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.ExecutionRepository = void 0;
const typedi_1 = require("typedi");
const typeorm_1 = require("@n8n/typeorm");
const DateUtils_1 = require("@n8n/typeorm/util/DateUtils");
const flatted_1 = require("flatted");
const config_1 = require("@n8n/config");
const n8n_workflow_1 = require("n8n-workflow");
const n8n_core_1 = require("n8n-core");
const n8n_workflow_2 = require("n8n-workflow");
const config_2 = __importDefault(require("../../config"));
const ExecutionEntity_1 = require("../entities/ExecutionEntity");
const ExecutionMetadata_1 = require("../entities/ExecutionMetadata");
const executionData_repository_1 = require("./executionData.repository");
const Logger_1 = require("../../Logger");
const postgres_live_rows_retrieval_error_1 = require("../../errors/postgres-live-rows-retrieval.error");
const utils_1 = require("../../utils");
function parseFiltersToQueryBuilder(qb, filters) {
if (filters === null || filters === void 0 ? void 0 : filters.status) {
qb.andWhere('execution.status IN (:...workflowStatus)', {
workflowStatus: filters.status,
});
}
if (filters === null || filters === void 0 ? void 0 : filters.finished) {
qb.andWhere({ finished: filters.finished });
}
if (filters === null || filters === void 0 ? void 0 : filters.metadata) {
qb.leftJoin(ExecutionMetadata_1.ExecutionMetadata, 'md', 'md.executionId = execution.id');
for (const md of filters.metadata) {
qb.andWhere('md.key = :key AND md.value = :value', md);
}
}
if (filters === null || filters === void 0 ? void 0 : filters.startedAfter) {
qb.andWhere({
startedAt: (0, typeorm_1.MoreThanOrEqual)(DateUtils_1.DateUtils.mixedDateToUtcDatetimeString(new Date(filters.startedAfter))),
});
}
if (filters === null || filters === void 0 ? void 0 : filters.startedBefore) {
qb.andWhere({
startedAt: (0, typeorm_1.LessThanOrEqual)(DateUtils_1.DateUtils.mixedDateToUtcDatetimeString(new Date(filters.startedBefore))),
});
}
if (filters === null || filters === void 0 ? void 0 : filters.workflowId) {
qb.andWhere({
workflowId: filters.workflowId,
});
}
}
const lessThanOrEqual = (date) => {
return (0, typeorm_1.LessThanOrEqual)(DateUtils_1.DateUtils.mixedDateToUtcDatetimeString(new Date(date)));
};
const moreThanOrEqual = (date) => {
return (0, typeorm_1.MoreThanOrEqual)(DateUtils_1.DateUtils.mixedDateToUtcDatetimeString(new Date(date)));
};
let ExecutionRepository = class ExecutionRepository extends typeorm_1.Repository {
constructor(dataSource, globalConfig, logger, executionDataRepository, binaryDataService) {
super(ExecutionEntity_1.ExecutionEntity, dataSource.manager);
this.globalConfig = globalConfig;
this.logger = logger;
this.executionDataRepository = executionDataRepository;
this.binaryDataService = binaryDataService;
this.hardDeletionBatchSize = 100;
this.summaryFields = {
id: true,
workflowId: true,
mode: true,
retryOf: true,
status: true,
startedAt: true,
stoppedAt: true,
};
}
async findMultipleExecutions(queryParams, options) {
if (options === null || options === void 0 ? void 0 : options.includeData) {
if (!queryParams.relations) {
queryParams.relations = [];
}
queryParams.relations.push('executionData', 'metadata');
}
const executions = await this.find(queryParams);
if ((options === null || options === void 0 ? void 0 : options.includeData) && (options === null || options === void 0 ? void 0 : options.unflattenData)) {
const [valid, invalid] = (0, utils_1.separate)(executions, (e) => e.executionData !== null);
this.reportInvalidExecutions(invalid);
return valid.map((execution) => {
const { executionData, metadata, ...rest } = execution;
return {
...rest,
data: (0, flatted_1.parse)(executionData.data),
workflowData: executionData.workflowData,
customData: Object.fromEntries(metadata.map((m) => [m.key, m.value])),
};
});
}
else if (options === null || options === void 0 ? void 0 : options.includeData) {
const [valid, invalid] = (0, utils_1.separate)(executions, (e) => e.executionData !== null);
this.reportInvalidExecutions(invalid);
return valid.map((execution) => {
const { executionData, metadata, ...rest } = execution;
return {
...rest,
data: execution.executionData.data,
workflowData: execution.executionData.workflowData,
customData: Object.fromEntries(metadata.map((m) => [m.key, m.value])),
};
});
}
return executions.map((execution) => {
const { executionData, ...rest } = execution;
return rest;
});
}
reportInvalidExecutions(executions) {
if (executions.length === 0)
return;
n8n_workflow_2.ErrorReporterProxy.error(new n8n_workflow_1.ApplicationError('Found executions without executionData', {
extra: { executionIds: executions.map(({ id }) => id) },
}));
}
async findSingleExecution(id, options) {
const findOptions = {
where: {
id,
...options === null || options === void 0 ? void 0 : options.where,
},
};
if (options === null || options === void 0 ? void 0 : options.includeData) {
findOptions.relations = ['executionData', 'metadata'];
}
const execution = await this.findOne(findOptions);
if (!execution) {
return undefined;
}
const { executionData, metadata, ...rest } = execution;
if ((options === null || options === void 0 ? void 0 : options.includeData) && (options === null || options === void 0 ? void 0 : options.unflattenData)) {
return {
...rest,
data: (0, flatted_1.parse)(execution.executionData.data),
workflowData: execution.executionData.workflowData,
customData: Object.fromEntries(metadata.map((m) => [m.key, m.value])),
};
}
else if (options === null || options === void 0 ? void 0 : options.includeData) {
return {
...rest,
data: execution.executionData.data,
workflowData: execution.executionData.workflowData,
customData: Object.fromEntries(metadata.map((m) => [m.key, m.value])),
};
}
return rest;
}
async createNewExecution(execution) {
const { data, workflowData, ...rest } = execution;
const { identifiers: inserted } = await this.insert(rest);
const { id: executionId } = inserted[0];
const { connections, nodes, name, settings } = workflowData !== null && workflowData !== void 0 ? workflowData : {};
await this.executionDataRepository.insert({
executionId,
workflowData: { connections, nodes, name, settings, id: workflowData.id },
data: (0, flatted_1.stringify)(data),
});
return String(executionId);
}
async markAsCrashed(executionIds) {
if (!Array.isArray(executionIds))
executionIds = [executionIds];
await this.update({ id: (0, typeorm_1.In)(executionIds) }, {
status: 'crashed',
stoppedAt: new Date(),
});
this.logger.info('Marked executions as `crashed`', { executionIds });
}
async hardDelete(ids) {
return await Promise.all([
this.delete(ids.executionId),
this.binaryDataService.deleteMany([ids]),
]);
}
async updateStatus(executionId, status) {
await this.update({ id: executionId }, { status });
}
async resetStartedAt(executionId) {
await this.update({ id: executionId }, { startedAt: new Date() });
}
async updateExistingExecution(executionId, execution) {
const { id, data, workflowId, workflowData, startedAt, customData, ...executionInformation } = execution;
if (Object.keys(executionInformation).length > 0) {
await this.update({ id: executionId }, executionInformation);
}
if (data || workflowData) {
const executionData = {};
if (workflowData) {
executionData.workflowData = workflowData;
}
if (data) {
executionData.data = (0, flatted_1.stringify)(data);
}
await this.executionDataRepository.update({ executionId }, executionData);
}
}
async deleteExecutionsByFilter(filters, accessibleWorkflowIds, deleteConditions) {
if (!(deleteConditions === null || deleteConditions === void 0 ? void 0 : deleteConditions.deleteBefore) && !(deleteConditions === null || deleteConditions === void 0 ? void 0 : deleteConditions.ids)) {
throw new n8n_workflow_1.ApplicationError('Either "deleteBefore" or "ids" must be present in the request body');
}
const query = this.createQueryBuilder('execution')
.select(['execution.id', 'execution.workflowId'])
.andWhere('execution.workflowId IN (:...accessibleWorkflowIds)', { accessibleWorkflowIds });
if (deleteConditions.deleteBefore) {
query.andWhere('execution.startedAt <= :deleteBefore', {
deleteBefore: deleteConditions.deleteBefore,
});
parseFiltersToQueryBuilder(query, filters);
}
else if (deleteConditions.ids) {
query.andWhere('execution.id IN (:...executionIds)', { executionIds: deleteConditions.ids });
}
const executions = await query.getMany();
if (!executions.length) {
if (deleteConditions.ids) {
this.logger.error('Failed to delete an execution due to insufficient permissions', {
executionIds: deleteConditions.ids,
});
}
return;
}
const ids = executions.map(({ id, workflowId }) => ({
executionId: id,
workflowId,
}));
do {
const batch = ids.splice(0, this.hardDeletionBatchSize);
await Promise.all([
this.delete(batch.map(({ executionId }) => executionId)),
this.binaryDataService.deleteMany(batch),
]);
} while (ids.length > 0);
}
async getIdsSince(date) {
return await this.find({
select: ['id'],
where: {
startedAt: (0, typeorm_1.MoreThanOrEqual)(DateUtils_1.DateUtils.mixedDateToUtcDatetimeString(date)),
},
}).then((executions) => executions.map(({ id }) => id));
}
async softDeletePrunableExecutions() {
const maxAge = config_2.default.getEnv('executions.pruneDataMaxAge');
const maxCount = config_2.default.getEnv('executions.pruneDataMaxCount');
const date = new Date();
date.setHours(date.getHours() - maxAge);
const toPrune = [
{ stoppedAt: (0, typeorm_1.LessThanOrEqual)(DateUtils_1.DateUtils.mixedDateToUtcDatetimeString(date)) },
];
if (maxCount > 0) {
const executions = await this.find({
select: ['id'],
skip: maxCount,
take: 1,
order: { id: 'DESC' },
});
if (executions[0]) {
toPrune.push({ id: (0, typeorm_1.LessThanOrEqual)(executions[0].id) });
}
}
const [timeBasedWhere, countBasedWhere] = toPrune;
return await this.createQueryBuilder()
.update(ExecutionEntity_1.ExecutionEntity)
.set({ deletedAt: new Date() })
.where({
deletedAt: (0, typeorm_1.IsNull)(),
status: (0, typeorm_1.Not)((0, typeorm_1.In)(['new', 'running', 'waiting'])),
})
.andWhere(new typeorm_1.Brackets((qb) => countBasedWhere
? qb.where(timeBasedWhere).orWhere(countBasedWhere)
: qb.where(timeBasedWhere)))
.execute();
}
async hardDeleteSoftDeletedExecutions() {
const date = new Date();
date.setHours(date.getHours() - config_2.default.getEnv('executions.pruneDataHardDeleteBuffer'));
const workflowIdsAndExecutionIds = (await this.find({
select: ['workflowId', 'id'],
where: {
deletedAt: (0, typeorm_1.LessThanOrEqual)(DateUtils_1.DateUtils.mixedDateToUtcDatetimeString(date)),
},
take: this.hardDeletionBatchSize,
withDeleted: true,
})).map(({ id: executionId, workflowId }) => ({ workflowId, executionId }));
return workflowIdsAndExecutionIds;
}
async deleteByIds(executionIds) {
return await this.delete({ id: (0, typeorm_1.In)(executionIds) });
}
async getWaitingExecutions() {
const waitTill = new Date(Date.now() + 70000);
const where = {
waitTill: (0, typeorm_1.LessThanOrEqual)(waitTill),
status: (0, typeorm_1.Not)('crashed'),
};
const dbType = this.globalConfig.database.type;
if (dbType === 'sqlite') {
where.waitTill = (0, typeorm_1.LessThanOrEqual)(DateUtils_1.DateUtils.mixedDateToUtcDatetimeString(waitTill));
}
return await this.findMultipleExecutions({
select: ['id', 'waitTill'],
where,
order: {
waitTill: 'ASC',
},
});
}
async getExecutionsCountForPublicApi(data) {
const executions = await this.count({
where: {
...(data.lastId && { id: (0, typeorm_1.LessThan)(data.lastId) }),
...(data.status && { ...this.getStatusCondition(data.status) }),
...(data.workflowIds && { workflowId: (0, typeorm_1.In)(data.workflowIds) }),
...(data.excludedWorkflowIds && { workflowId: (0, typeorm_1.Not)((0, typeorm_1.In)(data.excludedWorkflowIds)) }),
},
take: data.limit,
});
return executions;
}
getStatusCondition(status) {
const condition = {};
if (status === 'success') {
condition.status = 'success';
}
else if (status === 'waiting') {
condition.status = 'waiting';
}
else if (status === 'error') {
condition.status = (0, typeorm_1.In)(['error', 'crashed']);
}
return condition;
}
async getExecutionsForPublicApi(params) {
var _a, _b;
let where = {};
if (params.lastId && ((_a = params.excludedExecutionsIds) === null || _a === void 0 ? void 0 : _a.length)) {
where.id = (0, typeorm_1.Raw)((id) => `${id} < :lastId AND ${id} NOT IN (:...excludedExecutionsIds)`, {
lastId: params.lastId,
excludedExecutionsIds: params.excludedExecutionsIds,
});
}
else if (params.lastId) {
where.id = (0, typeorm_1.LessThan)(params.lastId);
}
else if ((_b = params.excludedExecutionsIds) === null || _b === void 0 ? void 0 : _b.length) {
where.id = (0, typeorm_1.Not)((0, typeorm_1.In)(params.excludedExecutionsIds));
}
if (params.status) {
where = { ...where, ...this.getStatusCondition(params.status) };
}
if (params.workflowIds) {
where = { ...where, workflowId: (0, typeorm_1.In)(params.workflowIds) };
}
return await this.findMultipleExecutions({
select: [
'id',
'mode',
'retryOf',
'retrySuccessId',
'startedAt',
'stoppedAt',
'workflowId',
'waitTill',
'finished',
],
where,
order: { id: 'DESC' },
take: params.limit,
relations: ['executionData'],
}, {
includeData: params.includeData,
unflattenData: true,
});
}
async getExecutionInWorkflowsForPublicApi(id, workflowIds, includeData) {
return await this.findSingleExecution(id, {
where: {
workflowId: (0, typeorm_1.In)(workflowIds),
},
includeData,
unflattenData: true,
});
}
async findWithUnflattenedData(executionId, accessibleWorkflowIds) {
return await this.findSingleExecution(executionId, {
where: {
workflowId: (0, typeorm_1.In)(accessibleWorkflowIds),
},
includeData: true,
unflattenData: true,
});
}
async findIfShared(executionId, sharedWorkflowIds) {
return await this.findSingleExecution(executionId, {
where: {
workflowId: (0, typeorm_1.In)(sharedWorkflowIds),
},
includeData: true,
unflattenData: false,
});
}
async findIfAccessible(executionId, accessibleWorkflowIds) {
return await this.findSingleExecution(executionId, {
where: { workflowId: (0, typeorm_1.In)(accessibleWorkflowIds) },
});
}
async stopBeforeRun(execution) {
execution.status = 'canceled';
execution.stoppedAt = new Date();
await this.update({ id: execution.id }, { status: execution.status, stoppedAt: execution.stoppedAt });
return execution;
}
async stopDuringRun(execution) {
var _a;
const error = new n8n_workflow_2.ExecutionCancelledError(execution.id);
(_a = execution.data) !== null && _a !== void 0 ? _a : (execution.data = { resultData: { runData: {} } });
execution.data.resultData.error = {
...error,
message: error.message,
stack: error.stack,
};
execution.stoppedAt = new Date();
execution.waitTill = null;
execution.status = 'canceled';
await this.updateExistingExecution(execution.id, execution);
return execution;
}
async cancelMany(executionIds) {
await this.update({ id: (0, typeorm_1.In)(executionIds) }, { status: 'canceled', stoppedAt: new Date() });
}
async findManyByRangeQuery(query) {
var _a;
if (((_a = query === null || query === void 0 ? void 0 : query.accessibleWorkflowIds) === null || _a === void 0 ? void 0 : _a.length) === 0) {
throw new n8n_workflow_1.ApplicationError('Expected accessible workflow IDs');
}
const executions = await this.toQueryBuilder(query).getRawMany();
return executions.map((execution) => this.toSummary(execution));
}
toSummary(execution) {
execution.id = execution.id.toString();
const normalizeDateString = (date) => {
if (date.includes(' '))
return date.replace(' ', 'T') + 'Z';
return date;
};
if (execution.startedAt) {
execution.startedAt =
execution.startedAt instanceof Date
? execution.startedAt.toISOString()
: normalizeDateString(execution.startedAt);
}
if (execution.waitTill) {
execution.waitTill =
execution.waitTill instanceof Date
? execution.waitTill.toISOString()
: normalizeDateString(execution.waitTill);
}
if (execution.stoppedAt) {
execution.stoppedAt =
execution.stoppedAt instanceof Date
? execution.stoppedAt.toISOString()
: normalizeDateString(execution.stoppedAt);
}
return execution;
}
async fetchCount(query) {
return await this.toQueryBuilder(query).getCount();
}
async getLiveExecutionRowsOnPostgres() {
const tableName = `${this.globalConfig.database.tablePrefix}execution_entity`;
const pgSql = `SELECT n_live_tup as result FROM pg_stat_all_tables WHERE relname = '${tableName}';`;
try {
const rows = (await this.query(pgSql));
if (rows.length !== 1)
throw new postgres_live_rows_retrieval_error_1.PostgresLiveRowsRetrievalError(rows);
const [row] = rows;
return parseInt(row.result, 10);
}
catch (error) {
if (error instanceof Error)
this.logger.error(error.message, { error });
return -1;
}
}
toQueryBuilder(query) {
var _a, _b;
const { accessibleWorkflowIds, status, finished, workflowId, startedBefore, startedAfter, metadata, } = query;
const fields = Object.keys(this.summaryFields)
.concat(['waitTill', 'retrySuccessId'])
.map((key) => `execution.${key} AS "${key}"`)
.concat('workflow.name AS "workflowName"');
const qb = this.createQueryBuilder('execution')
.select(fields)
.innerJoin('execution.workflow', 'workflow')
.where('execution.workflowId IN (:...accessibleWorkflowIds)', { accessibleWorkflowIds });
if (query.kind === 'range') {
const { limit, firstId, lastId } = query.range;
qb.limit(limit);
if (firstId)
qb.andWhere('execution.id > :firstId', { firstId });
if (lastId)
qb.andWhere('execution.id < :lastId', { lastId });
if (((_a = query.order) === null || _a === void 0 ? void 0 : _a.stoppedAt) === 'DESC') {
qb.orderBy({ 'execution.stoppedAt': 'DESC' });
}
else if ((_b = query.order) === null || _b === void 0 ? void 0 : _b.top) {
qb.orderBy(`(CASE WHEN execution.status = '${query.order.top}' THEN 0 ELSE 1 END)`);
}
else {
qb.orderBy({ 'execution.id': 'DESC' });
}
}
if (status)
qb.andWhere('execution.status IN (:...status)', { status });
if (finished)
qb.andWhere({ finished });
if (workflowId)
qb.andWhere({ workflowId });
if (startedBefore)
qb.andWhere({ startedAt: lessThanOrEqual(startedBefore) });
if (startedAfter)
qb.andWhere({ startedAt: moreThanOrEqual(startedAfter) });
if ((metadata === null || metadata === void 0 ? void 0 : metadata.length) === 1) {
const [{ key, value }] = metadata;
qb.innerJoin(ExecutionMetadata_1.ExecutionMetadata, 'md', 'md.executionId = execution.id AND md.key = :key AND md.value = :value');
qb.setParameter('key', key);
qb.setParameter('value', value);
}
return qb;
}
async getAllIds() {
const executions = await this.find({ select: ['id'], order: { id: 'ASC' } });
return executions.map(({ id }) => id);
}
async getInProgressExecutionIds(batchSize) {
const executions = await this.find({
select: ['id'],
where: { status: (0, typeorm_1.In)(['new', 'running']) },
order: { startedAt: 'DESC' },
take: batchSize,
});
return executions.map(({ id }) => id);
}
};
exports.ExecutionRepository = ExecutionRepository;
exports.ExecutionRepository = ExecutionRepository = __decorate([
(0, typedi_1.Service)(),
__metadata("design:paramtypes", [typeorm_1.DataSource,
config_1.GlobalConfig,
Logger_1.Logger,
executionData_repository_1.ExecutionDataRepository,
n8n_core_1.BinaryDataService])
], ExecutionRepository);
//# sourceMappingURL=execution.repository.js.map
;