UNPKG

@hotmeshio/hotmesh

Version:

Serverless Workflow

515 lines (514 loc) 21.7 kB
"use strict"; var _a; Object.defineProperty(exports, "__esModule", { value: true }); exports.WorkerService = void 0; const enums_1 = require("../../modules/enums"); const errors_1 = require("../../modules/errors"); const storage_1 = require("../../modules/storage"); const utils_1 = require("../../modules/utils"); const hotmesh_1 = require("../hotmesh"); const stream_1 = require("../../types/stream"); const search_1 = require("./search"); const factory_1 = require("./schemas/factory"); /** * The *Worker* service Registers worker functions and connects them to the mesh, * using the target backend provider/s (Redis, Postgres, NATS, etc). * * @example * ```typescript * import { MeshFlow } from '@hotmeshio/hotmesh'; * import { Client as Postgres } from 'pg'; * import * as workflows from './workflows'; * * async function run() { * const worker = await MeshFlow.Worker.create({ * connection: { * class: Postgres, * options: { connectionString: 'postgres://user:password@localhost:5432/db' } * }, * taskQueue: 'default', * workflow: workflows.example, * }); * * await worker.run(); * } * ``` */ class WorkerService { static hashOptions(connection) { if ('options' in connection) { //shorthand format return (0, utils_1.hashOptions)(connection.options); } else { //longhand format (sub, store, stream, pub, search) const response = []; for (const p in connection) { if (connection[p].options) { response.push((0, utils_1.hashOptions)(connection[p].options)); } } return response.join(''); } } /** * @private */ constructor() { } /** * @private */ static async activateWorkflow(hotMesh) { const app = await hotMesh.engine.store.getApp(hotMesh.engine.appId); const appVersion = app?.version; if (!appVersion) { try { await hotMesh.deploy((0, factory_1.getWorkflowYAML)(hotMesh.engine.appId, factory_1.APP_VERSION)); await hotMesh.activate(factory_1.APP_VERSION); } catch (err) { hotMesh.engine.logger.error('meshflow-worker-deploy-activate-err', err); throw err; } } else if (app && !app.active) { try { await hotMesh.activate(factory_1.APP_VERSION); } catch (err) { hotMesh.engine.logger.error('meshflow-worker-activate-err', err); throw err; } } } /** * @private */ static registerActivities(activities) { if (typeof activities === 'function' && typeof WorkerService.activityRegistry[activities.name] !== 'function') { WorkerService.activityRegistry[activities.name] = activities; } else { Object.keys(activities).forEach((key) => { if (activities[key].name && typeof WorkerService.activityRegistry[activities[key].name] !== 'function') { WorkerService.activityRegistry[activities[key].name] = activities[key]; } else if (typeof activities[key] === 'function') { WorkerService.activityRegistry[key] = activities[key]; } }); } return WorkerService.activityRegistry; } /** * Connects a worker to the mesh. * * @example * ```typescript * import { MeshFlow } from '@hotmeshio/hotmesh'; * import { Client as Postgres } from 'pg'; * import * as workflows from './workflows'; * * async function run() { * const worker = await MeshFlow.Worker.create({ * connection: { * class: Postgres, * options: { * connectionString: 'postgres://user:password@localhost:5432/db' * }, * }, * taskQueue: 'default', * workflow: workflows.example, * }); * * await worker.run(); * } * ``` */ static async create(config) { const workflow = config.workflow; const [workflowFunctionName, workflowFunction] = WorkerService.resolveWorkflowTarget(workflow); const baseTopic = `${config.taskQueue}-${workflowFunctionName}`; const activityTopic = `${baseTopic}-activity`; const workflowTopic = `${baseTopic}`; //initialize supporting workflows const worker = new WorkerService(); worker.activityRunner = await worker.initActivityWorker(config, activityTopic); worker.workflowRunner = await worker.initWorkflowWorker(config, workflowTopic, workflowFunction); search_1.Search.configureSearchIndex(worker.workflowRunner, config.search); await WorkerService.activateWorkflow(worker.workflowRunner); return worker; } /** * @private */ static resolveWorkflowTarget(workflow, name) { let workflowFunction; if (typeof workflow === 'function') { workflowFunction = workflow; return [workflowFunction.name ?? name, workflowFunction]; } else { const workflowFunctionNames = Object.keys(workflow); const lastFunctionName = workflowFunctionNames[workflowFunctionNames.length - 1]; workflowFunction = workflow[lastFunctionName]; return WorkerService.resolveWorkflowTarget(workflowFunction, lastFunctionName); } } /** * Run the connected worker; no-op (unnecessary to call) */ async run() { this.workflowRunner.engine.logger.info('meshflow-worker-running'); } /** * @private */ async initActivityWorker(config, activityTopic) { const providerConfig = config.connection; const targetNamespace = config?.namespace ?? factory_1.APP_ID; const optionsHash = WorkerService.hashOptions(config?.connection); const targetTopic = `${optionsHash}.${targetNamespace}.${activityTopic}`; const hotMeshWorker = await hotmesh_1.HotMesh.init({ guid: config.guid ? `${config.guid}XA` : undefined, logLevel: config.options?.logLevel ?? enums_1.HMSH_LOGLEVEL, appId: targetNamespace, engine: { connection: providerConfig }, workers: [ { topic: activityTopic, connection: providerConfig, callback: this.wrapActivityFunctions().bind(this), }, ], }); WorkerService.instances.set(targetTopic, hotMeshWorker); return hotMeshWorker; } /** * @private */ wrapActivityFunctions() { return async (data) => { try { //always run the activity function when instructed; return the response const activityInput = data.data; const activityName = activityInput.activityName; const activityFunction = WorkerService.activityRegistry[activityName]; const pojoResponse = await activityFunction.apply(this, activityInput.arguments); return { status: stream_1.StreamStatus.SUCCESS, metadata: { ...data.metadata }, data: { response: pojoResponse }, }; } catch (err) { this.activityRunner.engine.logger.error('meshflow-worker-activity-err', { name: err.name, message: err.message, stack: err.stack, }); if (!(err instanceof errors_1.MeshFlowTimeoutError) && !(err instanceof errors_1.MeshFlowMaxedError) && !(err instanceof errors_1.MeshFlowFatalError)) { //use code 599 as a proxy for all retryable errors // (basically anything not 596, 597, 598) return { status: stream_1.StreamStatus.SUCCESS, code: enums_1.HMSH_CODE_MESHFLOW_RETRYABLE, metadata: { ...data.metadata }, data: { $error: { message: err.message, stack: err.stack, timestamp: (0, utils_1.formatISODate)(new Date()), }, }, }; } return { //always returrn success (the MeshFlow module is just fine); // it's the user's function that has failed status: stream_1.StreamStatus.SUCCESS, code: err.code, stack: err.stack, metadata: { ...data.metadata }, data: { $error: { message: err.message, stack: err.stack, timestamp: (0, utils_1.formatISODate)(new Date()), code: err.code, }, }, }; } }; } /** * @private */ async initWorkflowWorker(config, workflowTopic, workflowFunction) { const providerConfig = config.connection; const targetNamespace = config?.namespace ?? factory_1.APP_ID; const optionsHash = WorkerService.hashOptions(config?.connection); const targetTopic = `${optionsHash}.${targetNamespace}.${workflowTopic}`; const hotMeshWorker = await hotmesh_1.HotMesh.init({ guid: config.guid, logLevel: config.options?.logLevel ?? enums_1.HMSH_LOGLEVEL, appId: config.namespace ?? factory_1.APP_ID, engine: { connection: providerConfig }, workers: [ { topic: workflowTopic, connection: providerConfig, callback: this.wrapWorkflowFunction(workflowFunction, workflowTopic, config).bind(this), }, ], }); WorkerService.instances.set(targetTopic, hotMeshWorker); return hotMeshWorker; } /** * @private */ wrapWorkflowFunction(workflowFunction, workflowTopic, config) { return async (data) => { const counter = { counter: 0 }; const interruptionRegistry = []; let isProcessing = false; try { //incoming data payload has arguments and workflowId const workflowInput = data.data; const context = new Map(); context.set('canRetry', workflowInput.canRetry); context.set('expire', workflowInput.expire); context.set('counter', counter); context.set('interruptionRegistry', interruptionRegistry); context.set('connection', config.connection); context.set('namespace', config.namespace ?? factory_1.APP_ID); context.set('raw', data); context.set('workflowId', workflowInput.workflowId); if (workflowInput.originJobId) { //if present there is an origin job to which this job is subordinated; // garbage collect (expire) this job when originJobId is expired context.set('originJobId', workflowInput.originJobId); } //TODO: the query is provider-specific; // refactor as an abstract interface the provider must implement let replayQuery = ''; if (workflowInput.workflowDimension) { //every hook function runs in an isolated dimension controlled //by the index assigned when the signal was received; even if the //hook function re-runs, its scope will always remain constant context.set('workflowDimension', workflowInput.workflowDimension); replayQuery = `-*${workflowInput.workflowDimension}-*`; } else { //last letter of words like 'hook', 'sleep', 'wait', 'signal', 'search', 'start', 'proxy', 'child', 'collator', 'trace', 'enrich', 'publish' replayQuery = '-*[ehklptydr]-*'; } context.set('workflowTopic', workflowTopic); context.set('workflowName', workflowTopic.split('-').pop()); context.set('workflowTrace', data.metadata.trc); context.set('workflowSpan', data.metadata.spn); const store = this.workflowRunner.engine.store; const [cursor, replay] = await store.findJobFields(workflowInput.workflowId, replayQuery, 50000, 5000); context.set('replay', replay); context.set('cursor', cursor); // if != 0, more remain const workflowResponse = await storage_1.asyncLocalStorage.run(context, async () => { return await workflowFunction.apply(this, workflowInput.arguments); }); return { code: 200, status: stream_1.StreamStatus.SUCCESS, metadata: { ...data.metadata }, data: { response: workflowResponse, done: true }, }; } catch (err) { if (isProcessing) { return; } if (err instanceof errors_1.MeshFlowWaitForError || interruptionRegistry.length > 1) { isProcessing = true; //NOTE: this type is spawned when `Promise.all` is used OR if the interruption is a `waitFor` const workflowInput = data.data; const execIndex = counter.counter - interruptionRegistry.length + 1; const { workflowId, workflowTopic, workflowDimension, originJobId, expire, } = workflowInput; const collatorFlowId = `${(0, utils_1.guid)()}$C`; return { status: stream_1.StreamStatus.SUCCESS, code: enums_1.HMSH_CODE_MESHFLOW_ALL, metadata: { ...data.metadata }, data: { code: enums_1.HMSH_CODE_MESHFLOW_ALL, items: [...interruptionRegistry], size: interruptionRegistry.length, workflowDimension: workflowDimension || '', index: execIndex, originJobId: originJobId || workflowId, parentWorkflowId: workflowId, workflowId: collatorFlowId, workflowTopic: workflowTopic, expire, }, }; } else if (err instanceof errors_1.MeshFlowSleepError) { //return the sleep interruption isProcessing = true; return { status: stream_1.StreamStatus.SUCCESS, code: err.code, metadata: { ...data.metadata }, data: { code: err.code, message: JSON.stringify({ duration: err.duration, index: err.index, workflowDimension: err.workflowDimension, }), duration: err.duration, index: err.index, workflowDimension: err.workflowDimension, }, }; } else if (err instanceof errors_1.MeshFlowProxyError) { //return the proxyActivity interruption isProcessing = true; return { status: stream_1.StreamStatus.SUCCESS, code: err.code, metadata: { ...data.metadata }, data: { code: err.code, message: JSON.stringify({ message: err.message, workflowId: err.workflowId, activityName: err.activityName, dimension: err.workflowDimension, }), arguments: err.arguments, workflowDimension: err.workflowDimension, index: err.index, originJobId: err.originJobId, parentWorkflowId: err.parentWorkflowId, expire: err.expire, workflowId: err.workflowId, workflowTopic: err.workflowTopic, activityName: err.activityName, backoffCoefficient: err.backoffCoefficient, maximumAttempts: err.maximumAttempts, maximumInterval: err.maximumInterval, }, }; } else if (err instanceof errors_1.MeshFlowChildError) { //return the child interruption isProcessing = true; const msg = { message: err.message, workflowId: err.workflowId, dimension: err.workflowDimension, }; return { status: stream_1.StreamStatus.SUCCESS, code: err.code, metadata: { ...data.metadata }, data: { arguments: err.arguments, await: err.await, backoffCoefficient: err.backoffCoefficient || enums_1.HMSH_MESHFLOW_EXP_BACKOFF, code: err.code, index: err.index, message: JSON.stringify(msg), maximumAttempts: err.maximumAttempts || enums_1.HMSH_MESHFLOW_MAX_ATTEMPTS, maximumInterval: err.maximumInterval || (0, utils_1.s)(enums_1.HMSH_MESHFLOW_MAX_INTERVAL), originJobId: err.originJobId, parentWorkflowId: err.parentWorkflowId, expire: err.expire, persistent: err.persistent, signalIn: err.signalIn, workflowDimension: err.workflowDimension, workflowId: err.workflowId, workflowTopic: err.workflowTopic, }, }; } // ALL other errors are actual fatal errors (598, 597, 596) // OR will be retried (599) isProcessing = true; return { status: stream_1.StreamStatus.SUCCESS, code: err.code || new errors_1.MeshFlowRetryError(err.message).code, metadata: { ...data.metadata }, data: { $error: { message: err.message, type: err.name, name: err.name, stack: err.stack, code: err.code || new errors_1.MeshFlowRetryError(err.message).code, }, }, }; } }; } /** * @private */ static async shutdown() { for (const [_, hotMeshInstance] of WorkerService.instances) { (await hotMeshInstance).stop(); } } } _a = WorkerService; /** * @private */ WorkerService.activityRegistry = {}; //user's activities /** * @private */ WorkerService.instances = new Map(); /** * @private */ WorkerService.getHotMesh = async (workflowTopic, config, options) => { const targetNamespace = config?.namespace ?? factory_1.APP_ID; const optionsHash = WorkerService.hashOptions(config?.connection); const targetTopic = `${optionsHash}.${targetNamespace}.${workflowTopic}`; if (WorkerService.instances.has(targetTopic)) { return await WorkerService.instances.get(targetTopic); } const hotMeshClient = hotmesh_1.HotMesh.init({ logLevel: options?.logLevel ?? enums_1.HMSH_LOGLEVEL, appId: targetNamespace, engine: { connection: { ...config?.connection }, }, }); WorkerService.instances.set(targetTopic, hotMeshClient); await WorkerService.activateWorkflow(await hotMeshClient); return hotMeshClient; }; /** * @private */ WorkerService.Context = { info: () => { return { workflowId: '', workflowTopic: '', }; }, }; exports.WorkerService = WorkerService;