@hotmeshio/hotmesh
Version:
Permanent-Memory Workflows & AI Agents
349 lines (348 loc) • 10.6 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.MemFlow = void 0;
const hotmesh_1 = require("../hotmesh");
const utils_1 = require("../../modules/utils");
const client_1 = require("./client");
const connection_1 = require("./connection");
const search_1 = require("./search");
const entity_1 = require("./entity");
const worker_1 = require("./worker");
const workflow_1 = require("./workflow");
const handle_1 = require("./handle");
const interruption_1 = require("./workflow/interruption");
const interceptor_1 = require("./interceptor");
/**
* The MemFlow service provides a Temporal-compatible workflow framework backed by
* Postgres. It offers durable execution, entity-based memory management,
* and composable workflows.
*
* ## Core Features
*
* ### 1. Entity-Based Memory Model
* Each workflow has a durable JSONB entity that serves as its memory:
* ```typescript
* export async function researchAgent(query: string) {
* const agent = await MemFlow.workflow.entity();
*
* // Initialize entity state
* await agent.set({
* query,
* findings: [],
* status: 'researching'
* });
*
* // Update state atomically
* await agent.merge({ status: 'analyzing' });
* await agent.append('findings', newFinding);
* }
* ```
*
* ### 2. Hook Functions & Workflow Coordination
* Spawn and coordinate multiple perspectives/phases:
* ```typescript
* // Launch parallel research perspectives
* await MemFlow.workflow.execHook({
* taskQueue: 'research',
* workflowName: 'optimisticView',
* args: [query],
* signalId: 'optimistic-complete'
* });
*
* await MemFlow.workflow.execHook({
* taskQueue: 'research',
* workflowName: 'skepticalView',
* args: [query],
* signalId: 'skeptical-complete'
* });
*
* // Wait for both perspectives
* await Promise.all([
* MemFlow.workflow.waitFor('optimistic-complete'),
* MemFlow.workflow.waitFor('skeptical-complete')
* ]);
* ```
*
* ### 3. Durable Activities & Proxies
* Define and execute durable activities with automatic retry:
* ```typescript
* // Default: activities use workflow's task queue
* const activities = MemFlow.workflow.proxyActivities<{
* analyzeDocument: typeof analyzeDocument;
* validateFindings: typeof validateFindings;
* }>({
* activities: { analyzeDocument, validateFindings },
* retryPolicy: {
* maximumAttempts: 3,
* backoffCoefficient: 2
* }
* });
*
* // Activities are durable and automatically retried
* const analysis = await activities.analyzeDocument(data);
* const validation = await activities.validateFindings(analysis);
* ```
*
* ### 4. Explicit Activity Registration
* Register activity workers explicitly before workflows start:
* ```typescript
* // Register shared activity pool for interceptors
* await MemFlow.registerActivityWorker({
* connection: {
* class: Postgres,
* options: { connectionString: 'postgresql://usr:pwd@localhost:5432/db' }
* },
* taskQueue: 'shared-activities'
* }, sharedActivities, 'shared-activities');
*
* // Register custom activity pool for specific use cases
* await MemFlow.registerActivityWorker({
* connection: {
* class: Postgres,
* options: { connectionString: 'postgresql://usr:pwd@localhost:5432/db' }
* },
* taskQueue: 'priority-activities'
* }, priorityActivities, 'priority-activities');
* ```
*
* ### 5. Workflow Composition
* Build complex workflows through composition:
* ```typescript
* // Start a child workflow
* const childResult = await MemFlow.workflow.execChild({
* taskQueue: 'analysis',
* workflowName: 'detailedAnalysis',
* args: [data],
* // Child workflow config
* config: {
* maximumAttempts: 5,
* backoffCoefficient: 2
* }
* });
*
* // Fire-and-forget child workflow
* await MemFlow.workflow.startChild({
* taskQueue: 'notifications',
* workflowName: 'sendUpdates',
* args: [updates]
* });
* ```
*
* ### 6. Workflow Interceptors
* Add cross-cutting concerns through interceptors that run as durable functions:
* ```typescript
* // First register shared activity worker for interceptors
* await MemFlow.registerActivityWorker({
* connection: {
* class: Postgres,
* options: { connectionString: 'postgresql://usr:pwd@localhost:5432/db' }
* },
* taskQueue: 'interceptor-activities'
* }, { auditLog }, 'interceptor-activities');
*
* // Add audit interceptor that uses activities with explicit taskQueue
* MemFlow.registerInterceptor({
* async execute(ctx, next) {
* try {
* // Interceptors use explicit taskQueue to prevent per-workflow queues
* const { auditLog } = MemFlow.workflow.proxyActivities<typeof activities>({
* activities: { auditLog },
* taskQueue: 'interceptor-activities', // Explicit shared queue
* retryPolicy: { maximumAttempts: 3 }
* });
*
* await auditLog(ctx.get('workflowId'), 'started');
*
* const result = await next();
*
* await auditLog(ctx.get('workflowId'), 'completed');
*
* return result;
* } catch (err) {
* // CRITICAL: Always check for HotMesh interruptions
* if (MemFlow.didInterrupt(err)) {
* throw err; // Rethrow for replay system
* }
* throw err;
* }
* }
* });
* ```
*
* ## Basic Usage Example
*
* ```typescript
* import { Client, Worker, MemFlow } from '@hotmeshio/hotmesh';
* import { Client as Postgres } from 'pg';
* import * as activities from './activities';
*
* // (Optional) Register shared activity workers for interceptors
* await MemFlow.registerActivityWorker({
* connection: {
* class: Postgres,
* options: { connectionString: 'postgresql://usr:pwd@localhost:5432/db' }
* },
* taskQueue: 'shared-activities'
* }, sharedActivities, 'shared-activities');
*
* // Initialize worker
* await Worker.create({
* connection: {
* class: Postgres,
* options: { connectionString: 'postgresql://usr:pwd@localhost:5432/db' }
* },
* taskQueue: 'default',
* workflow: workflows.example
* });
*
* // Initialize client
* const client = new Client({
* connection: {
* class: Postgres,
* options: { connectionString: 'postgresql://usr:pwd@localhost:5432/db' }
* }
* });
*
* // Start workflow
* const handle = await client.workflow.start({
* args: ['input data'],
* taskQueue: 'default',
* workflowName: 'example',
* workflowId: MemFlow.guid()
* });
*
* // Get result
* const result = await handle.result();
*
* // Cleanup
* await MemFlow.shutdown();
* ```
*/
class MemFlowClass {
/**
* @private
*/
constructor() { }
/**
* Register a workflow interceptor
* @param interceptor The interceptor to register
*/
static registerInterceptor(interceptor) {
MemFlowClass.interceptorService.register(interceptor);
}
/**
* Clear all registered workflow interceptors
*/
static clearInterceptors() {
MemFlowClass.interceptorService.clear();
}
/**
* Get the interceptor service instance
* @internal
*/
static getInterceptorService() {
return MemFlowClass.interceptorService;
}
/**
* Shutdown everything. All connections, workers, and clients will be closed.
* Include in your signal handlers to ensure a clean shutdown.
*/
static async shutdown() {
await MemFlowClass.Client.shutdown();
await MemFlowClass.Worker.shutdown();
await hotmesh_1.HotMesh.stop();
}
}
exports.MemFlow = MemFlowClass;
/**
* The MemFlow `Client` service is functionally
* equivalent to the Temporal `Client` service.
*/
MemFlowClass.Client = client_1.ClientService;
/**
* The MemFlow `Connection` service is functionally
* equivalent to the Temporal `Connection` service.
*/
MemFlowClass.Connection = connection_1.ConnectionService;
/**
* @private
*/
MemFlowClass.Search = search_1.Search;
/**
* @private
*/
MemFlowClass.Entity = entity_1.Entity;
/**
* The Handle provides methods to interact with a running
* workflow. This includes exporting the workflow, sending signals, and
* querying the state of the workflow. An instance of the Handle service
* is typically accessed via the MemFlow.Client class (workflow.getHandle).
*/
MemFlowClass.Handle = handle_1.WorkflowHandleService;
/**
* The MemFlow `Worker` service is functionally
* equivalent to the Temporal `Worker` service.
*/
MemFlowClass.Worker = worker_1.WorkerService;
/**
* Register activity workers for a task queue. Activities execute via message queue
* and can run on different servers from workflows.
*
* @example
* ```typescript
* // Activity worker
* const activities = {
* async processPayment(amount: number) { return `Processed $${amount}`; },
* async sendEmail(to: string, msg: string) { /* ... *\/ }
* };
*
* await MemFlow.registerActivityWorker({
* connection: {
* class: Postgres,
* options: { connectionString: 'postgresql://usr:pwd@localhost:5432/db' }
* },
* taskQueue: 'payment'
* }, activities, 'payment');
*
* // Workflow worker (can be on different server)
* async function orderWorkflow(amount: number) {
* const { processPayment, sendEmail } = MemFlow.workflow.proxyActivities<{
* processPayment: (amount: number) => Promise<string>;
* sendEmail: (to: string, msg: string) => Promise<void>;
* }>({
* taskQueue: 'payment',
* retryPolicy: { maximumAttempts: 3 }
* });
*
* const result = await processPayment(amount);
* await sendEmail('customer@example.com', result);
* return result;
* }
*
* await MemFlow.Worker.create({
* connection: { class: Postgres, options: { connectionString: '...' } },
* taskQueue: 'orders',
* workflow: orderWorkflow
* });
* ```
*/
MemFlowClass.registerActivityWorker = worker_1.WorkerService.registerActivityWorker;
/**
* The MemFlow `workflow` service is functionally
* equivalent to the Temporal `Workflow` service
* with additional methods for managing workflows,
* including: `execChild`, `waitFor`, `sleep`, etc
*/
MemFlowClass.workflow = workflow_1.WorkflowService;
/**
* Checks if an error is a HotMesh reserved error type that indicates
* a workflow interruption rather than a true error condition.
*
* @see {@link utils/interruption.didInterrupt} for detailed documentation
*/
MemFlowClass.didInterrupt = interruption_1.didInterrupt;
MemFlowClass.interceptorService = new interceptor_1.InterceptorService();
/**
* Generate a unique identifier for workflow IDs
*/
MemFlowClass.guid = utils_1.guid;