UNPKG

@hotmeshio/hotmesh

Version:

Serverless Workflow

372 lines (367 loc) 15.7 kB
"use strict"; var _a; Object.defineProperty(exports, "__esModule", { value: true }); exports.ClientService = void 0; const enums_1 = require("../../modules/enums"); const utils_1 = require("../../modules/utils"); const hotmesh_1 = require("../hotmesh"); const key_1 = require("../../modules/key"); const types_1 = require("../../types"); const search_1 = require("./search"); const handle_1 = require("./handle"); const factory_1 = require("./schemas/factory"); /** * The MeshFlow `Client` service is functionally * equivalent to the Temporal `Client` service. * Start a new workflow execution by calling * `workflow.start`. Note the direct connection to * Postgres. * * NATS can be used as the message broker if advanced * messaging is required (i.e, patterned subscriptions). * @example * ```typescript * //client.ts * import { Client, HotMesh } from '@hotmeshio/hotmesh'; * import { Client as Postgres } from 'pg'; * async function run(): Promise<string> { * const client = new Client({ * connection: { * class: Postgres, * options: { connectionString: 'postgresql://usr:pwd@localhost:5432/db' } * } * }); * const handle = await client.workflow.start({ * args: ['HotMesh'], * taskQueue: 'default', * workflowName: 'example', * workflowId: HotMesh.guid() * }); * return await handle.result(); * //returns ['Hello HotMesh', '¡Hola, HotMesh!'] * } * ``` */ class ClientService { /** * @private */ constructor(config) { /** * @private */ this.getHotMeshClient = async (workflowTopic, namespace) => { //namespace isolation requires the connection options to be hashed //as multiple intersecting databases can be used by the same service //hashing options allows for reuse of the same connection without risk of //overwriting data in another namespace. const optionsHash = this.hashOptions(); const targetNS = namespace ?? factory_1.APP_ID; const connectionNS = `${optionsHash}.${targetNS}`; if (ClientService.instances.has(connectionNS)) { const hotMeshClient = await ClientService.instances.get(connectionNS); await this.verifyWorkflowActive(hotMeshClient, targetNS); return hotMeshClient; } //init, but don't await const readonly = this.connection.readonly ?? undefined; let hotMeshClient = hotmesh_1.HotMesh.init({ appId: targetNS, logLevel: enums_1.HMSH_LOGLEVEL, engine: { readonly, connection: this.connection, }, }); //synchronously cache the promise (before awaiting) ClientService.instances.set(connectionNS, hotMeshClient); //resolve, activate, and return the client const resolvedClient = await hotMeshClient; if (!readonly) { resolvedClient.engine.logger.info('meshflow-readonly-client', { guid: resolvedClient.engine.guid, appId: targetNS, }); await this.activateWorkflow(resolvedClient, targetNS); } return resolvedClient; }; /** * It is possible for a client to invoke a workflow without first * creating the stream. This method will verify that the stream * exists and if not, create it. * @private */ this.verifyStream = async (hotMeshClient, workflowTopic, namespace) => { const optionsHash = this.hashOptions(); const targetNS = namespace ?? factory_1.APP_ID; const targetTopic = `${optionsHash}.${targetNS}.${workflowTopic}`; if (!ClientService.topics.includes(targetTopic)) { ClientService.topics.push(targetTopic); await ClientService.createStream(hotMeshClient, workflowTopic, namespace); } }; /** * @private */ this.search = async (hotMeshClient, index, query) => { const searchClient = hotMeshClient.engine.search; return await searchClient.sendIndexedQuery(index, query); }; /** * The MeshFlow `Client` service is functionally * equivalent to the Temporal `Client` service. * Starting a workflow is the primary use case and * is accessed by calling workflow.start(). */ this.workflow = { /** * Starts a workflow, verifies the idempotent id, and * adds searchable data to the record. */ start: async (options) => { const taskQueueName = options.taskQueue ?? options.entity; const workflowName = options.entity ?? options.workflowName; const trc = options.workflowTrace; const spn = options.workflowSpan; //hotmesh `topic` is equivalent to `queue+workflowname` pattern in other systems const workflowTopic = `${taskQueueName}-${workflowName}`; const hotMeshClient = await this.getHotMeshClient(workflowTopic, options.namespace); //verify that the stream channel exists before enqueueing await this.verifyStream(hotMeshClient, workflowTopic, options.namespace); const payload = { arguments: [...options.args], originJobId: options.originJobId, expire: options.expire ?? enums_1.HMSH_EXPIRE_JOB_SECONDS, persistent: options.persistent, signalIn: options.signalIn, parentWorkflowId: options.parentWorkflowId, workflowId: options.workflowId || hotmesh_1.HotMesh.guid(), workflowTopic: workflowTopic, backoffCoefficient: options.config?.backoffCoefficient || enums_1.HMSH_MESHFLOW_EXP_BACKOFF, maximumAttempts: options.config?.maximumAttempts || enums_1.HMSH_MESHFLOW_MAX_ATTEMPTS, maximumInterval: (0, utils_1.s)(options.config?.maximumInterval || enums_1.HMSH_MESHFLOW_MAX_INTERVAL), }; const context = { metadata: { trc, spn }, data: {} }; const jobId = await hotMeshClient.pub(`${options.namespace ?? factory_1.APP_ID}.execute`, payload, context, { search: options?.search?.data, marker: options?.marker, pending: options?.pending, }); return new handle_1.WorkflowHandleService(hotMeshClient, workflowTopic, jobId); }, /** * Sends a message payload to a running workflow that is paused and awaiting the signal */ signal: async (signalId, data, namespace) => { const topic = `${namespace ?? factory_1.APP_ID}.wfs.signal`; return await (await this.getHotMeshClient(topic, namespace)).hook(topic, { id: signalId, data }); }, /** * Spawns an a new, isolated execution cycle within the same job. * Similar to `worker` functions, `hook` functions have a linked * function. But hooks do not start a new job and instead read/write * their isolated activity data to an existing Job record (HASH). * * This example spawns a hook that will update workflow `guid123`. * * @example * ```typescript * await client.workflow.hook({ * namespace: 'demo', * taskQueue: 'default', * workflowName: 'myDemoFunction', * workflowId: 'guid123', * args: ['Hello'], * }); * ``` */ hook: async (options) => { const workflowTopic = `${options.taskQueue ?? options.entity}-${options.entity ?? options.workflowName}`; const payload = { arguments: [...options.args], id: options.workflowId, workflowTopic, backoffCoefficient: options.config?.backoffCoefficient || enums_1.HMSH_MESHFLOW_EXP_BACKOFF, maximumAttempts: options.config?.maximumAttempts || enums_1.HMSH_MESHFLOW_MAX_ATTEMPTS, maximumInterval: (0, utils_1.s)(options.config?.maximumInterval || enums_1.HMSH_MESHFLOW_MAX_INTERVAL), }; //seed search data before entering const hotMeshClient = await this.getHotMeshClient(workflowTopic, options.namespace); const msgId = await hotMeshClient.hook(`${hotMeshClient.appId}.flow.signal`, payload, types_1.StreamStatus.PENDING, 202); //todo: commit search data BEFORE enqueuing hook if (options.search?.data) { const searchSessionId = `-search-${hotmesh_1.HotMesh.guid()}-0`; const search = new search_1.Search(options.workflowId, hotMeshClient, searchSessionId); const entries = Object.entries(options.search.data).flat(); await search.set(...entries); } return msgId; }, /** * Returns a reference to a running workflow, * allowing callers to check the status of the workflow, * interrupt it, and even await its eventual response * if still in a pending state. * * @example * ```typescript * const handle = await client.workflow.getHandle( * 'default', * 'myFunction', * 'someGuid123', * 'demo', * ); * ``` */ getHandle: async (taskQueue, workflowName, workflowId, namespace) => { const workflowTopic = `${taskQueue}-${workflowName}`; const hotMeshClient = await this.getHotMeshClient(workflowTopic, namespace); return new handle_1.WorkflowHandleService(hotMeshClient, workflowTopic, workflowId); }, /** * Provides direct access to the SEARCH backend when making * queries. Taskqueues and workflow names are * used to identify the point of presence to use. `...args` is * the tokenized query. When querying Redis/FTSEARCH, the trailing * ...args might be `'@_custom1:meshflow'`. For postgres, * the trailing ...args would be: `'_custom', 'meshflow'`. In each case, * the query looks for all job data where the field `_custom` is * equal to `meshflow`. * * @example * ```typescript * await client.workflow.search( * 'someTaskQueue' * 'someWorkflowName', * 'meshflow', * 'user', * ...args, * ); * //returns [count, [id, fields[]], [id, fields[]], [id, fields[]], ...]] * ``` */ search: async (taskQueue, workflowName, namespace, index, ...query) => { const workflowTopic = `${taskQueue}-${workflowName}`; const hotMeshClient = await this.getHotMeshClient(workflowTopic, namespace); try { return await this.search(hotMeshClient, index, query); } catch (error) { hotMeshClient.engine.logger.error('meshflow-client-search-err', { error, }); throw error; } }, }; this.connection = config.connection; } hashOptions() { if ('options' in this.connection) { //shorthand format return (0, utils_1.hashOptions)(this.connection.options); } else { //longhand format (sub, store, stream, pub, search) const response = []; for (const p in this.connection) { if (this.connection[p].options) { response.push((0, utils_1.hashOptions)(this.connection[p].options)); } } return response.join(''); } } /** * Any router can be used to deploy and activate the HotMesh * distributed executable to the active quorum EXCEPT for * those routers in `readonly` mode. */ async deployAndActivate(namespace = factory_1.APP_ID, version = factory_1.APP_VERSION) { if (isNaN(Number(version))) { throw new Error('Invalid version number'); } const hotMesh = await this.getHotMeshClient('', namespace); await this.activateWorkflow(hotMesh, namespace, version); } /** * @private */ async verifyWorkflowActive(hotMesh, appId = factory_1.APP_ID, count = 0) { const app = await hotMesh.engine.store.getApp(appId); const appVersion = app?.version; if (isNaN(appVersion)) { if (count > 10) { throw new Error('Workflow failed to activate'); } await (0, utils_1.sleepFor)(enums_1.HMSH_QUORUM_DELAY_MS * 2); return await this.verifyWorkflowActive(hotMesh, appId, count + 1); } return true; } /** * @private */ async activateWorkflow(hotMesh, appId = factory_1.APP_ID, version = factory_1.APP_VERSION) { const app = await hotMesh.engine.store.getApp(appId); const appVersion = app?.version; if (appVersion === version && !app.active) { try { await hotMesh.activate(version); } catch (error) { hotMesh.engine.logger.error('meshflow-client-activate-err', { error, }); throw error; } } else if (isNaN(Number(appVersion)) || appVersion < version) { try { await hotMesh.deploy((0, factory_1.getWorkflowYAML)(appId, version)); await hotMesh.activate(version); } catch (error) { hotMesh.engine.logger.error('meshflow-client-deploy-activate-err', { error, }); throw error; } } } /** * @private */ static async shutdown() { for (const [_, hotMeshInstance] of ClientService.instances) { (await hotMeshInstance).stop(); } } } _a = ClientService; /** * @private */ ClientService.topics = []; /** * @private */ ClientService.instances = new Map(); /** * Creates a stream where messages can be published to ensure there is a * channel in place when the message arrives (a race condition for those * platforms without implicit topic setup). * @private */ ClientService.createStream = async (hotMeshClient, workflowTopic, namespace) => { const params = { appId: namespace ?? factory_1.APP_ID, topic: workflowTopic }; const streamKey = hotMeshClient.engine.store.mintKey(key_1.KeyType.STREAMS, params); try { await hotMeshClient.engine.stream.createConsumerGroup(streamKey, 'WORKER'); } catch (err) { //ignore if already exists } }; exports.ClientService = ClientService;