@hotmeshio/hotmesh
Version:
Permanent-Memory Workflows & AI Agents
127 lines (126 loc) • 4.49 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.ExporterService = void 0;
const key_1 = require("../../modules/key");
const utils_1 = require("../../modules/utils");
const serializer_1 = require("../serializer");
/**
* Downloads job data and expands process data and
* includes dependency list
*/
class ExporterService {
constructor(appId, store, logger) {
this.appId = appId;
this.logger = logger;
this.store = store;
}
/**
* Convert the job hash into a JobExport object.
* This object contains various facets that describe the interaction
* in terms relevant to narrative storytelling.
*/
async export(jobId, options = {}) {
if (!this.symbols) {
this.symbols = this.store.getAllSymbols();
this.symbols = await this.symbols;
}
const depData = []; // await this.store.getDependencies(jobId);
const jobData = await this.store.getRaw(jobId);
const jobExport = this.inflate(jobData, depData);
return jobExport;
}
/**
* Inflates the key
* into a human-readable JSON path, reflecting the
* tree-like structure of the unidimensional Hash
*/
inflateKey(key) {
return key in this.symbols ? this.symbols[key] : key;
}
/**
* Inflates the job data into a JobExport object
* @param jobHash - the job data
* @param dependencyList - the list of dependencies for the job
* @returns - the inflated job data
*/
inflate(jobHash, dependencyList) {
//the list of actions taken in the workflow and hook functions
const actions = {
hooks: {},
main: {
cursor: -1,
items: [],
},
};
const process = {};
const dependencies = this.inflateDependencyData(dependencyList, actions);
const regex = /^([a-zA-Z]{3}),(\d+(?:,\d+)*)/;
Object.entries(jobHash).forEach(([key, value]) => {
const match = key.match(regex);
if (match) {
//activity process state
const [_, letters, numbers] = match;
const path = this.inflateKey(letters);
const dimensions = `${numbers.replace(/,/g, '/')}`;
const resolved = serializer_1.SerializerService.fromString(value);
process[`${dimensions}/${path}`] = resolved;
}
else if (key.length === 3) {
//job state
process[this.inflateKey(key)] = serializer_1.SerializerService.fromString(value);
}
});
return {
dependencies,
process: (0, utils_1.restoreHierarchy)(process),
status: jobHash[':'],
};
}
/**
* Inflates the dependency data into a JobExport object by
* organizing the dimensional isolate in such a way as to interleave
* into a story
* @param data - the dependency data
* @returns - the organized dependency data
*/
inflateDependencyData(data, actions) {
const hookReg = /([0-9,]+)-(\d+)$/;
const flowReg = /-(\d+)$/;
return data.map((dependency, index) => {
const [action, topic, gid, _pd, ...jid] = dependency.split(key_1.VALSEP);
const jobId = jid.join(key_1.VALSEP);
const match = jobId.match(hookReg);
let prefix;
let type;
let dimensionKey = '';
if (match) {
//hook-originating dependency
const [_, dimension, counter] = match;
dimensionKey = dimension.split(',').join('/');
prefix = `${dimensionKey}[${counter}]`;
type = 'hook';
}
else {
const match = jobId.match(flowReg);
if (match) {
//main workflow-originating dependency
const [_, counter] = match;
prefix = `[${counter}]`;
type = 'flow';
}
else {
//'other' types like signal cleanup
prefix = '/';
type = 'other';
}
}
return {
type: action,
topic,
gid,
jid: jobId,
};
});
}
}
exports.ExporterService = ExporterService;