UNPKG

n8n

Version:

n8n Workflow Automation Tool

238 lines 11.5 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.UpdateWorkflowCredentials1630419189837 = void 0; const config = require("../../../../config"); const MigrationHelpers_1 = require("../../MigrationHelpers"); class UpdateWorkflowCredentials1630419189837 { constructor() { this.name = 'UpdateWorkflowCredentials1630419189837'; } async up(queryRunner) { console.log('Start migration', this.name); console.time(this.name); let tablePrefix = config.get('database.tablePrefix'); const schema = config.get('database.postgresdb.schema'); if (schema) { tablePrefix = schema + '.' + tablePrefix; } const helpers = new MigrationHelpers_1.MigrationHelpers(queryRunner); const credentialsEntities = await queryRunner.query(` SELECT id, name, type FROM ${tablePrefix}credentials_entity `); const workflowsQuery = ` SELECT id, nodes FROM ${tablePrefix}workflow_entity `; await helpers.runChunked(workflowsQuery, (workflows) => { workflows.forEach(async (workflow) => { const nodes = workflow.nodes; let credentialsUpdated = false; nodes.forEach((node) => { if (node.credentials) { const allNodeCredentials = Object.entries(node.credentials); for (const [type, name] of allNodeCredentials) { if (typeof name === 'string') { const matchingCredentials = credentialsEntities.find((credentials) => credentials.name === name && credentials.type === type); node.credentials[type] = { id: (matchingCredentials === null || matchingCredentials === void 0 ? void 0 : matchingCredentials.id.toString()) || null, name }; credentialsUpdated = true; } } } }); if (credentialsUpdated) { const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(` UPDATE ${tablePrefix}workflow_entity SET nodes = :nodes WHERE id = '${workflow.id}' `, { nodes: JSON.stringify(nodes) }, {}); queryRunner.query(updateQuery, updateParams); } }); }); const waitingExecutionsQuery = ` SELECT id, "workflowData" FROM ${tablePrefix}execution_entity WHERE "waitTill" IS NOT NULL AND finished = FALSE `; await helpers.runChunked(waitingExecutionsQuery, (waitingExecutions) => { waitingExecutions.forEach(async (execution) => { const data = execution.workflowData; let credentialsUpdated = false; data.nodes.forEach((node) => { if (node.credentials) { const allNodeCredentials = Object.entries(node.credentials); for (const [type, name] of allNodeCredentials) { if (typeof name === 'string') { const matchingCredentials = credentialsEntities.find((credentials) => credentials.name === name && credentials.type === type); node.credentials[type] = { id: (matchingCredentials === null || matchingCredentials === void 0 ? void 0 : matchingCredentials.id.toString()) || null, name }; credentialsUpdated = true; } } } }); if (credentialsUpdated) { const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(` UPDATE ${tablePrefix}execution_entity SET "workflowData" = :data WHERE id = '${execution.id}' `, { data: JSON.stringify(data) }, {}); queryRunner.query(updateQuery, updateParams); } }); }); const retryableExecutions = await queryRunner.query(` SELECT id, "workflowData" FROM ${tablePrefix}execution_entity WHERE "waitTill" IS NULL AND finished = FALSE AND mode != 'retry' ORDER BY "startedAt" DESC LIMIT 200 `); retryableExecutions.forEach(async (execution) => { const data = execution.workflowData; let credentialsUpdated = false; data.nodes.forEach((node) => { if (node.credentials) { const allNodeCredentials = Object.entries(node.credentials); for (const [type, name] of allNodeCredentials) { if (typeof name === 'string') { const matchingCredentials = credentialsEntities.find((credentials) => credentials.name === name && credentials.type === type); node.credentials[type] = { id: (matchingCredentials === null || matchingCredentials === void 0 ? void 0 : matchingCredentials.id.toString()) || null, name }; credentialsUpdated = true; } } } }); if (credentialsUpdated) { const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(` UPDATE ${tablePrefix}execution_entity SET "workflowData" = :data WHERE id = '${execution.id}' `, { data: JSON.stringify(data) }, {}); queryRunner.query(updateQuery, updateParams); } }); console.timeEnd(this.name); } async down(queryRunner) { let tablePrefix = config.get('database.tablePrefix'); const schema = config.get('database.postgresdb.schema'); if (schema) { tablePrefix = schema + '.' + tablePrefix; } const helpers = new MigrationHelpers_1.MigrationHelpers(queryRunner); const credentialsEntities = await queryRunner.query(` SELECT id, name, type FROM ${tablePrefix}credentials_entity `); const workflowsQuery = ` SELECT id, nodes FROM ${tablePrefix}workflow_entity `; await helpers.runChunked(workflowsQuery, (workflows) => { workflows.forEach(async (workflow) => { const nodes = workflow.nodes; let credentialsUpdated = false; nodes.forEach((node) => { if (node.credentials) { const allNodeCredentials = Object.entries(node.credentials); for (const [type, creds] of allNodeCredentials) { if (typeof creds === 'object') { const matchingCredentials = credentialsEntities.find((credentials) => credentials.id === creds.id && credentials.type === type); if (matchingCredentials) { node.credentials[type] = matchingCredentials.name; } else { node.credentials[type] = creds.name; } credentialsUpdated = true; } } } }); if (credentialsUpdated) { const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(` UPDATE ${tablePrefix}workflow_entity SET nodes = :nodes WHERE id = '${workflow.id}' `, { nodes: JSON.stringify(nodes) }, {}); queryRunner.query(updateQuery, updateParams); } }); }); const waitingExecutionsQuery = ` SELECT id, "workflowData" FROM ${tablePrefix}execution_entity WHERE "waitTill" IS NOT NULL AND finished = FALSE `; await helpers.runChunked(waitingExecutionsQuery, (waitingExecutions) => { waitingExecutions.forEach(async (execution) => { const data = execution.workflowData; let credentialsUpdated = false; data.nodes.forEach((node) => { if (node.credentials) { const allNodeCredentials = Object.entries(node.credentials); for (const [type, creds] of allNodeCredentials) { if (typeof creds === 'object') { const matchingCredentials = credentialsEntities.find((credentials) => credentials.id === creds.id && credentials.type === type); if (matchingCredentials) { node.credentials[type] = matchingCredentials.name; } else { node.credentials[type] = creds.name; } credentialsUpdated = true; } } } }); if (credentialsUpdated) { const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(` UPDATE ${tablePrefix}execution_entity SET "workflowData" = :data WHERE id = '${execution.id}' `, { data: JSON.stringify(data) }, {}); queryRunner.query(updateQuery, updateParams); } }); }); const retryableExecutions = await queryRunner.query(` SELECT id, "workflowData" FROM ${tablePrefix}execution_entity WHERE "waitTill" IS NULL AND finished = FALSE AND mode != 'retry' ORDER BY "startedAt" DESC LIMIT 200 `); retryableExecutions.forEach(async (execution) => { const data = execution.workflowData; let credentialsUpdated = false; data.nodes.forEach((node) => { if (node.credentials) { const allNodeCredentials = Object.entries(node.credentials); for (const [type, creds] of allNodeCredentials) { if (typeof creds === 'object') { const matchingCredentials = credentialsEntities.find((credentials) => credentials.id === creds.id && credentials.type === type); if (matchingCredentials) { node.credentials[type] = matchingCredentials.name; } else { node.credentials[type] = creds.name; } credentialsUpdated = true; } } } }); if (credentialsUpdated) { const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(` UPDATE ${tablePrefix}execution_entity SET "workflowData" = :data WHERE id = '${execution.id}' `, { data: JSON.stringify(data) }, {}); queryRunner.query(updateQuery, updateParams); } }); } } exports.UpdateWorkflowCredentials1630419189837 = UpdateWorkflowCredentials1630419189837; //# sourceMappingURL=1630419189837-UpdateWorkflowCredentials.js.map