@godspeedsystems/core
Version:
> 4th Generation Declarative Microservice Framework
1,024 lines (1,023 loc) • 60 kB
JavaScript
/*
* You are allowed to study this software for learning and local * development purposes only. Any other use without explicit permission by Mindgrep, is prohibited.
* © 2022 Mindgrep Technologies Pvt Ltd
*/ "use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
function _export(target, all) {
for(var name in all)Object.defineProperty(target, name, {
enumerable: true,
get: all[name]
});
}
_export(exports, {
GSActor: function() {
return GSActor;
},
GSCloudEvent: function() {
return GSCloudEvent;
},
GSContext: function() {
return GSContext;
},
GSDynamicFunction: function() {
return GSDynamicFunction;
},
GSEachParallelFunction: function() {
return GSEachParallelFunction;
},
GSEachSeriesFunction: function() {
return GSEachSeriesFunction;
},
GSFunction: function() {
return GSFunction;
},
GSIFFunction: function() {
return GSIFFunction;
},
GSLogEvent: function() {
return GSLogEvent;
},
GSParallelFunction: function() {
return GSParallelFunction;
},
GSSeriesFunction: function() {
return GSSeriesFunction;
},
GSStatus: function() {
return GSStatus;
},
GSSwitchFunction: function() {
return GSSwitchFunction;
}
});
const _api = /*#__PURE__*/ _interop_require_default(require("@opentelemetry/api"));
const _logger = require("../logger");
const _utils = require("./utils");
const _scriptRuntime = /*#__PURE__*/ _interop_require_default(require("./scriptRuntime"));
const _metrics = /*#__PURE__*/ _interop_require_default(require("@godspeedsystems/metrics"));
const _config = /*#__PURE__*/ _interop_require_default(require("config"));
const _caching = require("./caching");
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) {
try {
var info = gen[key](arg);
var value = info.value;
} catch (error) {
reject(error);
return;
}
if (info.done) {
resolve(value);
} else {
Promise.resolve(value).then(_next, _throw);
}
}
function _async_to_generator(fn) {
return function() {
var self = this, args = arguments;
return new Promise(function(resolve, reject) {
var gen = fn.apply(self, args);
function _next(value) {
asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value);
}
function _throw(err) {
asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err);
}
_next(undefined);
});
};
}
function _define_property(obj, key, value) {
if (key in obj) {
Object.defineProperty(obj, key, {
value: value,
enumerable: true,
configurable: true,
writable: true
});
} else {
obj[key] = value;
}
return obj;
}
function _interop_require_default(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
}
function _object_spread(target) {
for(var i = 1; i < arguments.length; i++){
var source = arguments[i] != null ? arguments[i] : {};
var ownKeys = Object.keys(source);
if (typeof Object.getOwnPropertySymbols === "function") {
ownKeys = ownKeys.concat(Object.getOwnPropertySymbols(source).filter(function(sym) {
return Object.getOwnPropertyDescriptor(source, sym).enumerable;
}));
}
ownKeys.forEach(function(key) {
_define_property(target, key, source[key]);
});
}
return target;
}
function ownKeys(object, enumerableOnly) {
var keys = Object.keys(object);
if (Object.getOwnPropertySymbols) {
var symbols = Object.getOwnPropertySymbols(object);
if (enumerableOnly) {
symbols = symbols.filter(function(sym) {
return Object.getOwnPropertyDescriptor(object, sym).enumerable;
});
}
keys.push.apply(keys, symbols);
}
return keys;
}
function _object_spread_props(target, source) {
source = source != null ? source : {};
if (Object.getOwnPropertyDescriptors) {
Object.defineProperties(target, Object.getOwnPropertyDescriptors(source));
} else {
ownKeys(Object(source)).forEach(function(key) {
Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));
});
}
return target;
}
function _object_without_properties(source, excluded) {
if (source == null) return {};
var target = _object_without_properties_loose(source, excluded);
var key, i;
if (Object.getOwnPropertySymbols) {
var sourceSymbolKeys = Object.getOwnPropertySymbols(source);
for(i = 0; i < sourceSymbolKeys.length; i++){
key = sourceSymbolKeys[i];
if (excluded.indexOf(key) >= 0) continue;
if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue;
target[key] = source[key];
}
}
return target;
}
function _object_without_properties_loose(source, excluded) {
if (source == null) return {};
var target = {};
var sourceKeys = Object.keys(source);
var key, i;
for(i = 0; i < sourceKeys.length; i++){
key = sourceKeys[i];
if (excluded.indexOf(key) >= 0) continue;
target[key] = source[key];
}
return target;
}
const tracer = _api.default.trace.getTracer('my-service-tracer');
class GSFunction extends Function {
_internalCall(ctx, taskValue) {
var _this = this;
return _async_to_generator(function*() {
var _this_logs, _this_logs1;
if ((_this_logs = _this.logs) === null || _this_logs === void 0 ? void 0 : _this_logs.before) {
var _config_log;
const log = _this.logs.before;
//@ts-ignore
ctx.childLogger[log.level || ((_config_log = _config.default.log) === null || _config_log === void 0 ? void 0 : _config_log.level) || 'info'](log.attributes ? yield (0, _scriptRuntime.default)(ctx, log.attributes, taskValue) : null, `${log.message} %o`, log.params);
}
const timers = [];
if (_this.metrics) {
for (let metric of _this.metrics){
if (metric.timer) {
//@ts-ignore
timers.push(metric.obj.startTimer());
}
}
}
const status = yield _this._call(ctx, taskValue);
if (_this.metrics) {
for (let timer of timers){
//@ts-ignore
timer();
}
for (let metric of _this.metrics){
let obj = metric.obj;
for (let key of Object.keys(metric)){
if (![
'type',
'name',
'obj',
'timer',
'help'
].includes(key)) {
const val = yield (0, _scriptRuntime.default)(ctx, metric[key], taskValue);
obj = obj[key](val);
}
}
}
}
if ((_this_logs1 = _this.logs) === null || _this_logs1 === void 0 ? void 0 : _this_logs1.after) {
var _config_log1;
const log = _this.logs.after;
//@ts-ignore
ctx.childLogger[log.level || ((_config_log1 = _config.default.log) === null || _config_log1 === void 0 ? void 0 : _config_log1.level) || 'info'](log.attributes ? yield (0, _scriptRuntime.default)(ctx, log.attributes, taskValue) : null, `${log.message} %o`, log.params);
}
return status;
})();
}
_observability(ctx, taskValue) {
var _this = this;
return _async_to_generator(function*() {
if (_this.yaml.trace) {
let trace = _this.yaml.trace;
return tracer.startActiveSpan(trace.name, /*#__PURE__*/ function() {
var _ref = _async_to_generator(function*(span) {
if (trace.attributes) {
trace.attributes.task_id = _this.id;
trace.attributes.workflow_name = _this.workflow_name;
for(let attr in trace.attributes){
span.setAttribute(attr, trace.attributes[attr]);
}
}
const status = yield _this._internalCall(ctx, taskValue);
if (!status.success) {
span.setStatus({
//@ts-ignore
code: _api.default.SpanStatusCode.ERROR,
message: 'Error'
});
}
span.end();
return status;
});
return function(span) {
return _ref.apply(this, arguments);
};
}());
} else {
return _this._internalCall(ctx, taskValue);
}
})();
}
_executefn(ctx, taskValue) {
var _this = this;
return _async_to_generator(function*() {
// final status to return
let status;
let args = _this.args;
try {
if (_this.retry) {
const parseDuration = yield (0, _utils.getParseDurationPkg)();
if (_this.retry.interval) {
_this.retry.interval = parseDuration(_this.retry.interval.replace(/^PT/i, ''));
}
if (_this.retry.min_interval) {
_this.retry.min_interval = parseDuration(_this.retry.min_interval.replace(/^PT/i, ''));
}
if (_this.retry.max_interval) {
_this.retry.max_interval = parseDuration(_this.retry.max_interval.replace(/^PT/i, ''));
}
}
ctx.childLogger.debug({
workflow_name: _this.workflow_name,
task_id: _this.id
}, 'Executing task %s with args %o', _this.id, _this.args);
if (Array.isArray(_this.args)) {
args = [
..._this.args
];
} else if ((0, _utils.isPlainObject)(_this.args)) {
args = _object_spread({}, _this.args);
}
// ctx.childLogger.debug({ 'workflow_name': this.workflow_name, 'task_id': this.id }, `Retry logic is %o`, this.retry);
ctx.childLogger.setBindings({
workflow_name: _this.workflow_name,
task_id: _this.id
});
if (String(_this.yaml.fn).startsWith('datasource.')) {
// If datasource is a script then evaluate it else load ctx.datasources as it is.
const [, datasourceName, entityType, method] = _this.yaml.fn.split('.');
// const datasource: any = ctx.datasources[datasourceName];
// so that prisma plugin get the entityName and method in plugin to execute respective method.
args.meta = {
fnNameInWorkflow: _this.yaml.fn,
entityType,
method,
authzPerms: args.authzPerms
};
delete args.authzPerms;
// REMOVE: this is not required, because now all the datasources are functions
// if (datasource instanceof Function) {
// args.datasource = await evaluateScript(ctx, datasource, taskValue);
// ctx.childLogger.info({ 'workflow_name': this.workflow_name, 'task_id': this.id }, 'datasource evaluated');
// } else {
// args.datasource = datasource;
// ctx.childLogger.info({ 'workflow_name': this.workflow_name, 'task_id': this.id }, 'datasource %o', args.datasource);
// }
// copy datasource headers to args.config.headers [This is useful to define the headers at datasource level
// so that datasource headers are passed to all the workflows using this datasource]
//TODO check if this is fine
// let headers = datasource.config.headers;
// if (headers) {
// args.headers = args.headers || {};
// let tempObj: any = {};
// Object.keys({ ...headers, ...args.headers }).map(key => {
// tempObj[key] = args.headers[key] || headers[key];
// });
// Object.assign(args.headers, tempObj);
// Object.keys(args.headers).forEach(key => args.headers[key] === undefined && delete args.headers[key]);
// ctx.childLogger.debug({ 'workflow_name': this.workflow_name, 'task_id': this.id }, `settings datasource headers with args.headers: %o`, args.headers);
// }
// TODO: this will be moved to datasource plugin
// if (ds.authn && !datasource.authn_response) {
// ctx.childLogger.info({ 'workflow_name': this.workflow_name, 'task_id': this.id }, 'Executing datasource authn workflow');
// datasource.authn_response = await authnWorkflow(ds, ctx);
// }
// TODO: this will be moved to datasource plugin
// if (ds.before_method_hook) {
// await ds.before_method_hook(ctx);
// }
}
// TODO: look back
// if (args && ctx.inputs.metadata?.messagebus?.kafka) { //com.gs.kafka will always have args
// args.kafka = ctx.inputs.metadata?.messagebus.kafka;
// }
//
// Generally all methods with retry will have some args
if (args && _this.retry) {
args.retry = _this.retry;
}
let res;
ctx.childLogger.setBindings({
workflow_name: _this.workflow_name,
task_id: _this.id
});
if (Array.isArray(args)) {
res = yield _this.fn(ctx, args);
} else {
res = yield _this.fn(ctx, args);
}
if (res instanceof GSStatus) {
status = res;
} else {
if (typeof res == 'object') {
//Some framework functions like HTTP return an object in following format. Check if that is the case.
//All framework functions are expected to set success as boolean variable. Can not be null.
if (res.success !== undefined || res.code !== undefined) {
let { success, code, data, message, headers, exitWithStatus } = res;
status = new GSStatus(success, code, message, data, headers);
//Check if exitWithStatus is set in the res object. If it is set then return by setting ctx.exitWithStatus else continue.
if (exitWithStatus) {
ctx.exitWithStatus = status;
}
} else {
const { exitWithStatus } = res, restObj = _object_without_properties(res, [
"exitWithStatus"
]);
status = new GSStatus(true, 200, undefined, restObj);
//Check if exitWithStatus is set in the res object. If it is set then return by setting ctx.exitWithStatus else continue.
if (exitWithStatus) {
ctx.exitWithStatus = status;
}
}
} else {
//This function gives a non GSStatus compliant return, then create a new GSStatus and set in the output for this function
ctx.childLogger.debug({
workflow_name: _this.workflow_name,
task_id: _this.id
}, `Result of task execution ${_this.id} %o`, res);
status = new GSStatus(true, 200, undefined, res);
}
}
ctx.childLogger[!status.success ? 'error' : 'debug']({
workflow_name: _this.workflow_name,
task_id: _this.id
}, `Result of task execution ${_this.id} %o`, res);
} catch (err) {
ctx.childLogger.error({
workflow_name: _this.workflow_name,
task_id: _this.id
}, 'Caught error from execution in task id: %s, error: %s', _this.id, err);
status = new GSStatus(false, 500, err.message, {
message: 'Internal server error'
});
}
// TODO: move it to datasource
// if (args.datasource?.after_method_hook) {
// ctx.outputs.current_output = status;
// await args.datasource.after_method_hook(ctx);
// }
return status;
})();
}
handleError(ctx, status, taskValue) {
var _this = this;
return _async_to_generator(function*() {
if (!status.success) {
/**
* If the call had an error, set that in events so that we can send it to the telemetry backend.
*/ ctx.addLogEvent(new GSLogEvent('ERROR', ctx.outputs));
if (_this.onError) {
var _this_onError_log, _ctx_config_defaults_on_error, _ctx_config_defaults, _ctx_config;
ctx.outputs[_this.id] = status;
if (_this.onError.response instanceof Function) {
//The script may need the output of the task so far, for the transformation logic.
//So set the status in outputs, against this task's id
const res = yield (0, _scriptRuntime.default)(ctx, _this.onError.response, taskValue);
if (typeof res === 'object' && !(res.success === undefined && res.code === undefined)) {
//Meaning the script is returning GS Status compatible response
let { success, code, data, message, headers } = res;
status = new GSStatus(success, code, message, data, headers);
} else {
//This function gives a non GSStatus compliant return, then create a new GSStatus and set in the output for this function
status = new GSStatus(true, 200, undefined, res);
}
} else if (_this.onError.response) {
status.data = _this.onError.response;
} else if (_this.onError.tasks) {
status = yield _this.onError.tasks(ctx);
}
if (((_this_onError_log = _this.onError.log) === null || _this_onError_log === void 0 ? void 0 : _this_onError_log.attributes) || _this.onError.log_attributes) {
var _this_onError_log1;
const error = {};
const logAttributes = ((_this_onError_log1 = _this.onError.log) === null || _this_onError_log1 === void 0 ? void 0 : _this_onError_log1.attributes) || _this.onError.log_attributes;
for(let key in logAttributes){
const script = (0, _utils.compileScript)(logAttributes[key], {
section: 'on_error.log.attributes'
});
error[key] = yield (0, _scriptRuntime.default)(ctx, script, taskValue);
}
ctx.childLogger.setBindings({
error
});
}
var _this_onError_continue, _ref;
const onErrorContinue = (_ref = (_this_onError_continue = _this.onError.continue) !== null && _this_onError_continue !== void 0 ? _this_onError_continue : (_ctx_config = ctx.config) === null || _ctx_config === void 0 ? void 0 : (_ctx_config_defaults = _ctx_config.defaults) === null || _ctx_config_defaults === void 0 ? void 0 : (_ctx_config_defaults_on_error = _ctx_config_defaults.on_error) === null || _ctx_config_defaults_on_error === void 0 ? void 0 : _ctx_config_defaults_on_error.continue) !== null && _ref !== void 0 ? _ref : false;
if (onErrorContinue === false) {
ctx.childLogger.error({
workflow_name: _this.workflow_name,
task_id: _this.id
}, 'exiting on error %s', _this.id);
ctx.exitWithStatus = status;
}
} else {
if (ctx.exitWithStatus) {
ctx.exitWithStatus = status;
}
}
}
ctx.outputs[_this.id] = status;
return status;
})();
}
/**
*
* @param instruction
* @param ctx
*/ _call(ctx, taskValue) {
var _this = this;
return _async_to_generator(function*() {
let status;
let cachingInstruction = null;
let cachingDs;
try {
var _this_caching, _this_yaml_fn, _this_caching1;
// ctx.childLogger.debug({ 'workflow_name': this.workflow_name, 'task_id': this.id }, '_call invoked with task value %s %o', this.id, taskValue);
let datastoreAuthzArgs; /*
This is when datasource needs to modify its SQL query or something
else to ensure that the current user gets or mutates only the data
which it has access to.
*/
if (_this.yaml.authz) {
ctx.childLogger.setBindings({
workflow_name: _this.workflow_name,
task_id: _this.id
});
ctx.childLogger.debug(`Invoking authz workflow`);
//let args = await evaluateScript(ctx, this.yaml.authz.args, taskValue);
ctx.forAuth = true;
//const newCtx = ctx.cloneWithNewData(args);
let authzRes = yield _this.yaml.authz(ctx);
ctx.forAuth = false;
if (authzRes.code === 403) {
var _authzRes_data;
//Authorization task executed successfully and returned user is not authorized
authzRes.success = false;
if (!((_authzRes_data = authzRes.data) === null || _authzRes_data === void 0 ? void 0 : _authzRes_data.message)) {
(0, _utils.setAtPath)(authzRes, 'data.message', authzRes.message || 'Access Forbidden');
}
ctx.exitWithStatus = authzRes;
ctx.childLogger.debug('Authorization task failed at the task level with code 403');
//This task has failed and task must not be allowed to execute further
return authzRes;
} else if (authzRes.success !== true) {
var _authzRes_data1;
//Ensure success = false for no ambiguity further
authzRes.success = false;
if (!authzRes.code || authzRes.code < 400 || authzRes.code > 599) {
authzRes.code = 403;
}
if (!((_authzRes_data1 = authzRes.data) === null || _authzRes_data1 === void 0 ? void 0 : _authzRes_data1.message)) {
(0, _utils.setAtPath)(authzRes, 'data.message', authzRes.message || 'Access Forbidden');
}
ctx.childLogger.debug(`Task level auth failed. Authorization task did not explicitly return success === true, hence failed with code ${authzRes.code}`);
ctx.exitWithStatus = authzRes;
return authzRes;
}
ctx.childLogger.debug('Authorization passed at the task level');
//Authorization successful.
//Whatever is in the data of the authzRes is to be passed on to
//the datasource plugin's execute function as it is.
datastoreAuthzArgs = authzRes.data;
}
ctx.childLogger.setBindings({
workflow_name: _this.workflow_name,
task_id: _this.id
});
if ((_this_caching = _this.caching) === null || _this_caching === void 0 ? void 0 : _this_caching.before) {
var _this_caching2;
cachingInstruction = yield (0, _caching.evaluateCachingInstAndInvalidates)(ctx, (_this_caching2 = _this.caching) === null || _this_caching2 === void 0 ? void 0 : _this_caching2.before, taskValue);
// check in cache and return
status = yield (0, _caching.fetchFromCache)(cachingInstruction);
if (status) {
ctx.childLogger.debug({
workflow_name: _this.workflow_name,
task_id: _this.id
}, 'Loading result from cache');
status = typeof status === 'string' && JSON.parse(status) || status;
ctx.outputs[_this.id] = status;
return status;
}
}
let args = _this.args;
if (_this.args_script) {
args = yield (0, _scriptRuntime.default)(ctx, _this.args_script, taskValue);
if (ctx.exitWithStatus) {
//ctx.childLogger.error({ 'workflow_name': this.workflow_name, 'task_id': this.id }, 'Caught error in evaluation of script %s in task id: %s', this.args_script, this.id);
throw ctx.exitWithStatus;
}
}
// ctx.childLogger.debug({ 'workflow_name': this.workflow_name, 'task_id': this.id }, 'args after evaluation: %s %o', this.id, args);
if (_this.fnScript) {
var _this_nativeFunctions;
ctx.childLogger.setBindings({
workflow_name: _this.workflow_name,
task_id: _this.id
});
let s = yield (0, _scriptRuntime.default)(ctx, _this.fnScript, taskValue);
ctx.childLogger.setBindings({
workflow_name: '',
task_id: ''
});
//First look in native functions
_this.fn = (_this_nativeFunctions = _this.nativeFunctions) === null || _this_nativeFunctions === void 0 ? void 0 : _this_nativeFunctions[s];
//Look in YAML workflow
if (!_this.fn) {
var _this_workflows;
_this.fn = (_this_workflows = _this.workflows) === null || _this_workflows === void 0 ? void 0 : _this_workflows[s];
if (_this.fn) {
_this.isSubWorkflow = true;
}
}
//Next, check if this is a datasource call
if (!_this.fn && s.startsWith('datasource.')) {
var _this_nativeFunctions1;
const fnName = s.split('.').splice(0, 2).join('.');
_this.fn = (_this_nativeFunctions1 = _this.nativeFunctions) === null || _this_nativeFunctions1 === void 0 ? void 0 : _this_nativeFunctions1[fnName];
if (_this.fn) {
_this.yaml.fn = s;
}
}
//If still is not found, the script evaluate to invalid function name
if (!_this.fn) {
ctx.childLogger.error(`Did not find any function by the name ${s}`);
status = new GSStatus(false, 500, undefined, 'Internal Server Error');
} else {
ctx.childLogger.debug({
workflow_name: _this.workflow_name,
task_id: _this.id
}, `invoking dynamic fn: ${s}`);
}
}
if (datastoreAuthzArgs && ((_this_yaml_fn = _this.yaml.fn) === null || _this_yaml_fn === void 0 ? void 0 : _this_yaml_fn.startsWith('datasource.'))) {
//args.data = _.merge(args.data, datastoreAuthzArgs);
//setup authzPerms for now. delete that key in the _execugteFn, after moving the same to `args.meta` key
args.authzPerms = datastoreAuthzArgs;
ctx.childLogger.debug({
workflow_name: _this.workflow_name,
task_id: _this.id
}, 'merged args with authz args.data: %o', args);
}
ctx.childLogger.setBindings({
workflow_name: '',
task_id: ''
});
if (_this.fn instanceof GSFunction) {
if (_this.isSubWorkflow) {
ctx.childLogger.debug({
workflow_name: _this.workflow_name,
task_id: _this.id
}, 'isSubWorkflow, if subworkflow is creating new ctx, replacing inputs data with args data');
ctx.childLogger.setBindings({
workflow_name: _this.workflow_name,
task_id: _this.id
});
const newCtx = ctx.cloneWithNewData(args);
ctx.childLogger.setBindings({
workflow_name: '',
task_id: ''
});
status = yield _this.fn(newCtx, taskValue);
} else {
ctx.childLogger.debug({
workflow_name: _this.workflow_name,
task_id: _this.id
}, 'No isSubWorkflow, continuing in the same ctx');
status = yield _this.fn(ctx, taskValue);
}
} else {
_this.args = args;
status = yield _this._executefn(ctx, taskValue);
}
status = yield _this.handleError(ctx, status, taskValue);
if (ctx.forAuth) {
if (status.success !== true) {
ctx.exitWithStatus = status;
}
}
if ((_this_caching1 = _this.caching) === null || _this_caching1 === void 0 ? void 0 : _this_caching1.after) {
var _this_caching3;
cachingInstruction = yield (0, _caching.evaluateCachingInstAndInvalidates)(ctx, (_this_caching3 = _this.caching) === null || _this_caching3 === void 0 ? void 0 : _this_caching3.after, taskValue);
yield (0, _caching.setInCache)(ctx, cachingInstruction, status);
}
} catch (err) {
ctx.childLogger.error({
workflow_name: _this.workflow_name,
task_id: _this.id
}, 'Caught error in evaluation in task id: %s', _this.id);
ctx.childLogger.debug('error: %o', err);
status = new GSStatus(false, 500, err.message, {
message: 'Internal server error'
});
}
return status;
})();
}
constructor(yaml, workflows, nativeFunctions, _fn, args, isSubWorkflow, fnScript, location){
super('return arguments.callee._observability.apply(arguments.callee, arguments)'), _define_property(this, "yaml", void 0), _define_property(this, "id", void 0 // can be dot separated fqn
), _define_property(this, "args", void 0), _define_property(this, "args_script", void 0), _define_property(this, "fn", void 0), _define_property(this, "onError", void 0), _define_property(this, "retry", void 0), _define_property(this, "isSubWorkflow", void 0), _define_property(this, "logs", void 0), _define_property(this, "metrics", void 0), _define_property(this, "workflow_name", void 0), _define_property(this, "workflows", void 0), _define_property(this, "nativeFunctions", void 0), _define_property(this, "fnScript", void 0), _define_property(this, "caching", void 0);
this.yaml = yaml;
this.id = yaml.id || yaml.workflow_name;
this.fn = _fn;
this.workflow_name = yaml.workflow_name;
this.workflows = workflows;
this.nativeFunctions = nativeFunctions;
this.fnScript = fnScript;
this.args = args || {};
const str = JSON.stringify(this.args);
if (str.match(/<(.*?)%/) && str.includes('%>') || str.match(/(^|\/):([^/]+)/)) {
this.args_script = (0, _utils.compileScript)(this.args, location);
}
this.onError = yaml.on_error;
if (this.onError && this.onError.response) {
if (!(this.onError.response instanceof Function)) {
this.onError.response = (0, _utils.compileScript)(this.onError.response, _object_spread_props(_object_spread({}, location), {
section: 'on_error'
}));
}
}
// if (this.yaml.authz) {
// this.yaml.authz.args = compileScript(this.yaml.authz?.args);
// }
// retry
this.retry = yaml.retry;
this.isSubWorkflow = isSubWorkflow;
if (this.yaml.logs) {
var _this_logs_before, _this_logs, _this_logs_after, _this_logs1;
this.logs = this.yaml.logs;
if ((_this_logs = this.logs) === null || _this_logs === void 0 ? void 0 : (_this_logs_before = _this_logs.before) === null || _this_logs_before === void 0 ? void 0 : _this_logs_before.attributes) {
if (!(this.logs.before.attributes instanceof Function)) {
this.logs.before.attributes.task_id = this.id;
this.logs.before.attributes.workflow_name = this.workflow_name;
this.logs.before.attributes = (0, _utils.compileScript)(this.logs.before.attributes, _object_spread_props(_object_spread({}, location), {
section: 'logs.before.attributes'
}));
}
}
if ((_this_logs1 = this.logs) === null || _this_logs1 === void 0 ? void 0 : (_this_logs_after = _this_logs1.after) === null || _this_logs_after === void 0 ? void 0 : _this_logs_after.attributes) {
if (!(this.logs.after.attributes instanceof Function)) {
this.logs.after.attributes.task_id = this.id;
this.logs.after.attributes.workflow_name = this.workflow_name;
this.logs.after.attributes = (0, _utils.compileScript)(this.logs.after.attributes, _object_spread_props(_object_spread({}, location), {
section: 'logs.after.attributes'
}));
}
}
}
// metrics
if (this.yaml.metrics) {
this.metrics = this.yaml.metrics;
// @ts-ignore
for (let metric of this.metrics){
metric.labels.task_id = this.id;
metric.labels.workflow_name = this.workflow_name;
switch(metric.type){
case 'counter':
metric.obj = new _metrics.default.Counter({
name: metric.name,
help: metric.help,
labelNames: Object.keys(metric.labels || {})
});
break;
case 'gauge':
metric.obj = new _metrics.default.Gauge({
name: metric.name,
help: metric.help,
labelNames: Object.keys(metric.labels || {})
});
break;
case 'histogram':
metric.obj = new _metrics.default.Histogram({
name: metric.name,
help: metric.help,
labelNames: Object.keys(metric.labels || {})
});
break;
case 'summary':
metric.obj = new _metrics.default.Summary({
name: metric.name,
help: metric.help,
labelNames: Object.keys(metric.labels || {})
});
break;
default:
_logger.logger.error({
workflow_name: this.workflow_name,
task_id: this.id
}, 'Invalid metric type %s, it should be one of counter,summary,histogram,gauge', metric.type);
process.exit(1);
}
for (let key of Object.keys(metric)){
if (![
'type',
'name',
'obj',
'timer',
'help'
].includes(key)) {
metric[key] = (0, _utils.compileScript)(metric[key], _object_spread_props(_object_spread({}, location), {
section: 'metric'
}));
}
}
}
}
//caching
if (this.yaml.caching) {
this.caching = {};
let cachingLocation;
if (this.yaml.caching.before) {
cachingLocation = _object_spread_props(_object_spread({}, location), {
section: 'caching.before'
});
(0, _caching.checkCachingDs)(this.yaml.caching.before, cachingLocation);
this.caching.before = (0, _utils.compileScript)(this.yaml.caching.before, cachingLocation);
}
if (this.yaml.caching.after) {
cachingLocation = _object_spread_props(_object_spread({}, location), {
section: 'caching.after'
});
(0, _caching.checkCachingDs)(this.yaml.caching.after, cachingLocation);
this.caching.after = (0, _utils.compileScript)(this.yaml.caching.after, cachingLocation);
}
}
}
}
class GSSeriesFunction extends GSFunction {
_call(ctx, taskValue) {
var _this = this;
return _async_to_generator(function*() {
ctx.childLogger.debug({
workflow_name: _this.workflow_name,
task_id: _this.id
}, `GSSeriesFunction. Executing tasks with ids: ${_this.args.map((task)=>task.id)}`);
let ret;
for (const child of _this.args){
ret = yield child(ctx, taskValue);
if (ctx.exitWithStatus) {
if (child.yaml.isEachParallel) {
ctx.childLogger.debug({
workflow_name: _this.workflow_name,
task_id: _this.id
}, 'isEachParallel: %s, ret: %o', child.yaml.isEachParallel, ret);
ctx.outputs[_this.id] = ret;
return ret;
}
if (child.yaml.isParallel) {
ctx.childLogger.debug({
workflow_name: _this.workflow_name,
task_id: _this.id
}, 'isParallel: %s, ret: %o', child.yaml.isParallel, ret);
ctx.outputs[_this.id] = ret;
} else {
ctx.outputs[_this.id] = ret;
return ret;
}
}
}
ctx.childLogger.setBindings({
workflow_name: _this.workflow_name,
task_id: _this.id
});
// ctx.childLogger.debug({ 'workflow_name': this.workflow_name, 'task_id': this.id }, 'this.id: %s, output: %o', this.id, ret.data);
ctx.outputs[_this.id] = ret;
return ret;
})();
}
}
class GSDynamicFunction extends GSFunction {
_call(ctx, taskValue) {
var _this = this;
return _async_to_generator(function*() {
ctx.childLogger.debug({
workflow_name: _this.workflow_name,
task_id: _this.id
}, `GSDynamicFunction. Executing tasks with ids: ${_this.args.map((task)=>task.id)}`);
let ret;
for (const child of _this.args){
ret = yield child(ctx, taskValue);
if (ctx.exitWithStatus) {
ctx.outputs[_this.id] = ctx.exitWithStatus;
return ctx.exitWithStatus;
}
}
ctx.childLogger.debug({
workflow_name: _this.workflow_name,
task_id: _this.id
}, 'this.id: %s, output: %s', _this.id, ret.data);
if (ret.success && typeof ret.data === 'string') {
ctx.outputs[_this.id] = yield _this.workflows[ret.data](ctx, taskValue);
} else {
return _this.handleError(ctx, ret, taskValue);
}
return ctx.outputs[_this.id];
})();
}
}
class GSParallelFunction extends GSFunction {
_call(ctx, taskValue) {
var _this = this;
return _async_to_generator(function*() {
ctx.childLogger.debug({
workflow_name: _this.workflow_name,
task_id: _this.id
}, `GSParallelFunction. Executing tasks with ids: ${_this.args.map((task)=>task.id)}`);
const promises = [];
for (const child of _this.args){
promises.push(child(ctx, taskValue));
}
yield Promise.all(promises);
const outputs = [];
const status = new GSStatus(true, 200, '', outputs);
let output;
for (const child of _this.args){
output = ctx.outputs[child.id];
outputs.push(output);
}
ctx.outputs[_this.id] = status;
return status;
})();
}
}
class GSSwitchFunction extends GSFunction {
_call(ctx, taskValue) {
var _this = this;
return _async_to_generator(function*() {
ctx.childLogger.debug({
workflow_name: _this.workflow_name,
task_id: _this.id
}, 'inside switch executor: %o', _this.args);
// tasks incase of series, parallel and condition, cases should be converted to args
let [value, cases] = _this.args;
ctx.childLogger.debug({
workflow_name: _this.workflow_name,
task_id: _this.id
}, 'condition: %s', value);
if (_this.condition_script) {
ctx.childLogger.setBindings({
workflow_name: _this.workflow_name,
task_id: _this.id
});
value = yield (0, _scriptRuntime.default)(ctx, _this.condition_script, taskValue);
ctx.childLogger.setBindings({
workflow_name: '',
task_id: ''
});
}
if (cases[value]) {
yield cases[value](ctx, taskValue);
ctx.outputs[_this.id] = ctx.outputs[cases[value].id];
} else {
//check for default otherwise error
if (cases.default) {
yield cases.default(ctx, taskValue);
ctx.outputs[_this.id] = ctx.outputs[cases.default.id];
} else {
//error
ctx.outputs[_this.id] = new GSStatus(false, undefined, `case ${value} is missing and no default found in switch`);
}
}
return ctx.outputs[_this.id];
})();
}
constructor(yaml, workflows, nativeFunctions, _fn, args, isSubWorkflow, location){
super(yaml, workflows, nativeFunctions, _fn, args, isSubWorkflow, undefined, location), _define_property(this, "condition_script", void 0);
const [condition, cases] = this.args;
if (typeof condition == 'string' && condition.match(/<(.*?)%/) && condition.includes('%>')) {
this.condition_script = (0, _utils.compileScript)(condition, location);
}
}
}
class GSIFFunction extends GSFunction {
_call(ctx, taskValue) {
var _this = this;
return _async_to_generator(function*() {
ctx.childLogger.debug({
workflow_name: _this.workflow_name,
task_id: _this.id
}, 'inside GSIFFunction executor: %o', _this.args);
// tasks incase of series, parallel and condition, cases should be converted to args
let [value, task] = _this.args;
ctx.childLogger.debug({
workflow_name: _this.workflow_name,
task_id: _this.id
}, 'condition: %s', value);
if (_this.condition_script) {
ctx.childLogger.setBindings({
workflow_name: _this.workflow_name,
task_id: _this.id
});
value = yield (0, _scriptRuntime.default)(ctx, _this.condition_script, taskValue);
ctx.childLogger.setBindings({
workflow_name: '',
task_id: ''
});
}
if (value) {
ctx.outputs[_this.id] = yield _this.task(ctx, taskValue);
} else {
if (_this.else_fn) {
ctx.outputs[_this.id] = yield _this.else_fn(ctx, taskValue);
} else {
ctx.outputs[_this.id] = new GSStatus(false, undefined, `condition not matching and no else present`);
}
}
return ctx.outputs[_this.id];
})();
}
constructor(yaml, workflows, nativeFunctions, _fn, args, isSubWorkflow, location){
super(yaml, workflows, nativeFunctions, _fn, args, isSubWorkflow, undefined, location), _define_property(this, "condition_script", void 0), _define_property(this, "task", void 0), _define_property(this, "else_fn", void 0);
const [condition, task, else_fn] = this.args;
if (typeof condition == 'string' && condition.match(/<(.*?)%/) && condition.includes('%>')) {
this.condition_script = (0, _utils.compileScript)(condition, location);
}
this.task = task;
this.else_fn = else_fn;
}
}
class GSEachParallelFunction extends GSFunction {
_call(ctx, taskValue) {
var _this = this;
return _async_to_generator(function*() {
ctx.childLogger.debug({
workflow_name: _this.workflow_name,
task_id: _this.id
}, `GSEachParallelFunction. Executing tasks with ids: ${_this.args.map((task)=>task.id)}`);
let [value, task] = _this.args;
ctx.childLogger.debug({
workflow_name: _this.workflow_name,
task_id: _this.id
}, 'value: %o', value);
if (_this.value_script) {
ctx.childLogger.setBindings({
workflow_name: _this.workflow_name,
task_id: _this.id
});
value = yield (0, _scriptRuntime.default)(ctx, _this.value_script, taskValue);
ctx.childLogger.setBindings({
workflow_name: '',
task_id: ''
});
}
let i = 0;
if (!Array.isArray(value)) {
ctx.outputs[_this.id] = new GSStatus(false, undefined, `GSEachParallel value is not an array`);
return ctx.outputs[_this.id];
}
const promises = [];
let outputs = [];
let status;
let failedTasksCount = 0;
for (const val of value){
promises.push(task(ctx, val));
}
outputs = yield Promise.all(promises);