UNPKG

@hotmeshio/hotmesh

Version:

Serverless Workflow

45 lines (44 loc) 1.92 kB
import { ILogger } from '../logger'; import { StoreService } from '../store'; import { DependencyExport, ExportOptions, JobActionExport, JobExport } from '../../types/exporter'; import { ProviderClient, ProviderTransaction } from '../../types/provider'; import { StringStringType, Symbols } from '../../types/serializer'; /** * Downloads job data from Redis (hscan, hmget, hgetall) * Expands process data and includes dependency list */ declare class ExporterService { appId: string; logger: ILogger; store: StoreService<ProviderClient, ProviderTransaction>; symbols: Promise<Symbols> | Symbols; constructor(appId: string, store: StoreService<ProviderClient, ProviderTransaction>, logger: ILogger); /** * Convert the job hash into a JobExport object. * This object contains various facets that describe the interaction * in terms relevant to narrative storytelling. */ export(jobId: string, options?: ExportOptions): Promise<JobExport>; /** * Inflates the key from Redis, 3-character symbol * into a human-readable JSON path, reflecting the * tree-like structure of the unidimensional Hash */ inflateKey(key: string): string; /** * Inflates the job data from Redis into a JobExport object * @param jobHash - the job data from Redis * @param dependencyList - the list of dependencies for the job * @returns - the inflated job data */ inflate(jobHash: StringStringType, dependencyList: string[]): JobExport; /** * Inflates the dependency data from Redis into a JobExport object by * organizing the dimensional isolate in sch a way asto interleave * into a story * @param data - the dependency data from Redis * @returns - the organized dependency data */ inflateDependencyData(data: string[], actions: JobActionExport): DependencyExport[]; } export { ExporterService };