@getanthill/datastore
Version:
Event-Sourced Datastore
598 lines • 26.1 kB
JavaScript
;
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.replay = exports.replayHandler = exports.start = exports.buildHandler = exports.localEventHandler = exports.getProcessingStats = exports.shouldProcessData = exports.isDataProcessed = exports.waitForProcessing = exports.readyForProcessing = exports.mapReceivedData = exports.getDatastoreFromTrigger = exports.heartbeat = exports.stop = exports.destroy = exports.errorHandler = exports.signalHandler = exports.init = exports.log = exports.setServer = exports.getServer = void 0;
const commander_1 = require("commander");
const path_1 = __importDefault(require("path"));
const assert_1 = require("assert");
const express_1 = __importDefault(require("express"));
const omit_1 = __importDefault(require("lodash/omit"));
const telemetry = __importStar(require("@getanthill/telemetry"));
const utils = __importStar(require("./utils"));
const metrics = __importStar(require("../constants/metrics"));
let server;
function getServer() {
return server;
}
exports.getServer = getServer;
function setServer(_server) {
server = _server;
return server;
}
exports.setServer = setServer;
function log(services, options, level, message, context) {
if (options?.verbose !== true) {
return;
}
const logger = services?.telemetry?.logger ?? telemetry.logger;
/* @ts-ignore */
logger[level](message, context);
}
exports.log = log;
function init(services, stopHandler, options) {
if (options.skipProcessBinding === true) {
return;
}
process.once('SIGTERM', signalHandler(services, stopHandler, options, 'SIGTERM'));
process.once('SIGINT', signalHandler(services, stopHandler, options, 'SIGINT'));
process.once('uncaughtException', errorHandler(services, stopHandler, options, 'uncaughtException'));
process.once('unhandledRejection', errorHandler(services, stopHandler, options, 'unhandledRejection'));
}
exports.init = init;
function signalHandler(services, stopHandler, options, signal, destroyHandler = destroy) {
return () => {
return destroyHandler(services, stopHandler, options, signal);
};
}
exports.signalHandler = signalHandler;
function errorHandler(services, stopHandler, options, signal, destroyHandler = destroy) {
return (err) => {
return destroyHandler(services, stopHandler, options, signal, err);
};
}
exports.errorHandler = errorHandler;
/**
* Cleanup and stop the process properly, then exit the process.
* @param signal - Signal to stop the process with
* @param err - Error that caused the destruction of the process
*/
function destroy(services, stopHandler, options, signal, err) {
metrics.incrementProcessStatus({ state: err ? 'crashing' : 'stopping' });
/* istanbul ignore next */
const exitTimeout = options.exitTimeout || services?.config?.exitTimeout;
if (err) {
log(services, { verbose: true }, 'error', '[runner] Application error', {
err,
signal,
});
}
log(services, options, 'info', '[runner] Stopping application', {
err,
signal,
exit_timeout: exitTimeout,
});
return stop(services, options, stopHandler)
.then(() => {
metrics.incrementProcessStatus({ state: err ? 'crashed' : 'stopped' });
log(services, options, 'info', '[runner] Application stopped', err);
return new Promise((resolve) => {
const timeout = setTimeout(() => {
timeout.unref();
process.exit(err ? 1 : 0);
resolve(timeout);
}, exitTimeout);
});
})
.catch((stopErr) => {
console.error('[runner] Application crashed', {
err: stopErr,
firstErr: err,
});
return new Promise((resolve) => {
const timeout = setTimeout(() => {
timeout.unref();
process.exit(err ? 1 : 0);
resolve(timeout);
}, exitTimeout);
});
});
}
exports.destroy = destroy;
async function stop(services, options, stopHandler) {
log(services, options, 'debug', '[runner] Stopping');
if (typeof stopHandler === 'function') {
log(services, options, 'debug', '[runner] Calling handler stop...');
await stopHandler();
}
if (server) {
log(services, options, 'debug', '[runner] Closing the hearbeat...');
await server.close();
server = null;
}
if (!services) {
log(services, options, 'info', '[runner] Stopped');
return;
}
// Close all Datastores streaming
for (const ds in services.datastores) {
services.datastores[ds].streams.closeAll();
}
log(services, options, 'info', '[runner] Stopped');
}
exports.stop = stop;
async function heartbeat(port = process.env.PORT) {
const app = (0, express_1.default)(); // You can also use Express
app.disable('x-powered-by');
/**
* Heartbeat route (Unauthenticated)
*/
app.get('/heartbeat', (_req, res) => {
res.json({ is_alive: true });
});
// Listen
server = await app.listen(port);
return { app, server, port };
}
exports.heartbeat = heartbeat;
async function getDatastoreFromTrigger(services, trigger, options) {
(0, assert_1.ok)(!!trigger.model, 'Model is not defined');
(0, assert_1.ok)(trigger.source === 'entities' || trigger.source === 'events', 'Source must be either `entities` or `events`');
const datastore = services.datastores[trigger.datastore];
// Setting the Timeout explicitly
datastore.core.setTimeout(options.timeout);
await datastore.heartbeat();
return datastore;
}
exports.getDatastoreFromTrigger = getDatastoreFromTrigger;
function mapReceivedData(input, raw) {
if (raw === false && typeof input === 'string') {
return JSON.parse(input);
}
if (raw === true && typeof input !== 'string') {
return JSON.stringify(input);
}
return input;
}
exports.mapReceivedData = mapReceivedData;
function readyForProcessing(stats) {
return stats.processing + 1 <= stats.maxParallelEvents;
}
exports.readyForProcessing = readyForProcessing;
async function waitForProcessing(stats, queueId, waitTimeInMilliseconds = 10, maxWaitInMilliseconds = 3600000) {
const tic = Date.now();
let waitingTimeInMilliseconds = 0;
let waited = false;
while (waitingTimeInMilliseconds < maxWaitInMilliseconds &&
stats.queuing <= queueId &&
readyForProcessing(stats) !== true) {
waited === false && (stats.waiting += 1);
waited = true;
waitingTimeInMilliseconds = Date.now() - tic;
await new Promise((resolve) => setTimeout(resolve, waitTimeInMilliseconds));
}
waited === true && (stats.waited += 1) && (stats.waiting -= 1);
}
exports.waitForProcessing = waitForProcessing;
async function isDataProcessed(services, datastore, model, input, processingStateIndex, processing) {
const processingState = processing?.states?.[processingStateIndex];
if (!processingState) {
return false;
}
const correlationField = processing.correlation_field;
const processingField = processing.field;
if (input?.[correlationField] === undefined ||
input?.[processingField] === undefined) {
return false;
}
await services.datastores[datastore].update(model, input[correlationField], {
[processingField]: processingState,
});
return true;
}
exports.isDataProcessed = isDataProcessed;
async function shouldProcessData(services, datastore, model, input, processing) {
if (!processing) {
return true;
}
const correlationField = processing.correlation_field;
const processingField = processing.field;
const processingStates = processing.states;
if (input?.[correlationField] === undefined ||
input?.[processingField] !== processingStates[0]) {
return false;
}
try {
await services.datastores[datastore].update(model, input[correlationField], {
[processingField]: processingStates[1],
}, {
version: input.version + 1,
});
return true;
}
catch (err) {
return false;
}
}
exports.shouldProcessData = shouldProcessData;
function getProcessingStats(from) {
return {
processingTimeWindowInMilliseconds: Infinity,
progress: 1000,
queuing: 0,
waiting: 0,
waited: 0,
processing: 0,
processed: 0,
totalWaitingDurationInMilliseconds: 0,
averageWaitingDurationInMilliseconds: 0,
totalProcessingDurationInMilliseconds: 0,
averageProcessingDurationInMilliseconds: 0,
maxParallelEvents: 10, // 10 first request for sampling
...from,
};
}
exports.getProcessingStats = getProcessingStats;
function localEventHandler(services, handler, handlerId, datastore, model, source, raw, stats = getProcessingStats(), processing) {
const [handlerPath] = handlerId.split('#');
const metadata = {
handlerId,
path: handlerPath,
datastore,
model,
source,
raw,
};
return async (input, route, headers, opts) => {
const ticQueuing = Date.now();
let ticProcessing = Date.now();
let data;
try {
metrics.incrementProcessing({
state: 'request',
model,
});
// Handle queuing locally
if (typeof stats.processingTimeWindowInMilliseconds === 'number' &&
stats.processingTimeWindowInMilliseconds !== Infinity) {
await new Promise((resolve) => setTimeout(resolve, 1));
stats.queuing += 1;
const queueId = stats.queuing;
await waitForProcessing(stats, queueId);
ticProcessing = Date.now();
stats.queuing -= 1;
}
stats.processing += 1;
data = mapReceivedData(input, raw);
const _shouldProcessData = (opts?.delivery ?? 0) > 0 ||
(await shouldProcessData(services, datastore, model, data, processing));
let res;
if (_shouldProcessData === true) {
res = await handler(data, metadata);
await isDataProcessed(services, datastore, model, data, 2, processing);
}
typeof opts?.ack === 'function' && (await opts.ack());
metrics.incrementProcessing({
state: 'success',
model,
});
return res;
}
catch (err) {
metrics.incrementProcessing({
state: 'error',
model,
});
if (typeof opts?.delivery === 'number' && opts?.delivery > 0) {
services?.telemetry?.logger.warn('[runner] Event discarded', {
input,
route,
headers: (0, omit_1.default)(headers, 'authorization'),
deliver: opts?.delivery,
});
await isDataProcessed(services, datastore, model, data, 3, processing);
typeof opts?.ack === 'function' && (await opts.ack());
return;
}
services?.telemetry?.logger.error('Event handler error', {
// err,
message: err.message,
response: err?.response?.data,
details: err?.response?.data?.details?.[0],
msg: input,
...metadata,
});
const isRetriable = typeof opts?.nack === 'function';
if (isRetriable === true) {
await opts.nack();
}
else {
await isDataProcessed(services, datastore, model, data, 3, processing);
}
}
finally {
const tac = Date.now();
stats.processing -= 1;
stats.processed += 1;
stats.totalWaitingDurationInMilliseconds += ticProcessing - ticQueuing;
stats.averageWaitingDurationInMilliseconds =
stats.totalWaitingDurationInMilliseconds / stats.processed;
stats.totalProcessingDurationInMilliseconds += tac - ticProcessing;
stats.averageProcessingDurationInMilliseconds =
stats.totalProcessingDurationInMilliseconds / stats.processed;
stats.maxParallelEvents = Math.max(1, stats.processingTimeWindowInMilliseconds /
stats.averageProcessingDurationInMilliseconds);
stats.processed % stats.progress === 0 &&
services?.telemetry?.logger?.info('[runner] Processing statistics', {
queuing: stats.queuing,
waiting: stats.waiting,
processing: stats.processing,
waited: stats.waited,
processed: stats.processed,
average_waiting_duration_in_ms: stats.averageWaitingDurationInMilliseconds,
average_processing_duration_in_ms: stats.averageProcessingDurationInMilliseconds,
max_supported_parallel_events: stats.maxParallelEvents,
});
}
};
}
exports.localEventHandler = localEventHandler;
async function buildHandler(handlerId, cmd, handlersForTest) {
const handlerUrl = new URL(handlerId, 'ds://handlers');
const handlerPath = path_1.default.resolve(cmd.cwd, handlerUrl.pathname.slice(1));
const handlers = handlersForTest ?? require(handlerPath);
const factoryId = handlerUrl.hash.slice(1) || 'main';
const factory = handlers[factoryId];
(0, assert_1.ok)(typeof factory === 'function', 'A valid handler factory must be requested');
const config = await factory(handlerUrl);
(0, assert_1.ok)(typeof config === 'object', 'Handler configuration must be an object');
(0, assert_1.ok)(typeof config.start === 'function', 'Handler configuration must have a `start` method');
const services = await config.start();
if (services.datastores instanceof Map) {
services.datastores = Object.fromEntries(services.datastores.entries());
}
const triggers = (config.triggers ?? [
{
datastore: config.datastore,
model: config.model,
source: config.source,
raw: config.raw,
query: config.query,
headers: config.headers,
queryAsJSONSchema: false,
processing: config.processing,
},
]).map((trigger) => ({
datastore: cmd.datastore ?? trigger.datastore,
model: cmd.model ?? trigger.model,
source: cmd.source ?? trigger.source,
raw: cmd.raw ?? trigger.raw ?? false,
query: cmd.query ?? trigger.query ?? {},
headers: cmd.headers ?? trigger.headers ?? {},
queryAsJSONSchema: cmd.queryAsJSONSchema ?? trigger.queryAsJSONSchema ?? false,
processing: trigger.processing,
}));
return {
services,
triggers,
stop: config.stop,
handler: config.handler,
};
}
exports.buildHandler = buildHandler;
function start() {
return async (handlerIds, options, command, handlersForTest) => {
let services = undefined;
try {
metrics.incrementProcessStatus({ state: 'starting' });
log(undefined, options, 'info', '[runner] Starting streaming', {
handler_ids: handlerIds,
options,
});
for (const handlerId of handlerIds) {
const { services: _services, triggers, stop: stopHandler, handler, } = await buildHandler(handlerId, options, handlersForTest);
services = _services;
for (const trigger of triggers) {
const datastore = await getDatastoreFromTrigger(services, trigger, options);
const query = trigger.query;
if (trigger.processing?.field) {
query[trigger.processing?.field] = trigger.processing?.states[0];
}
const streamId = datastore.streams.getStreamId(trigger.model, trigger.source, query);
datastore.streams.on(streamId, localEventHandler(services, handler, handlerId, trigger.datastore, trigger.model, trigger.source, trigger.raw, getProcessingStats({
processingTimeWindowInMilliseconds: options.processingTimeWindow,
progress: options.progress,
maxParallelEvents: options.initParallelEvents,
}), trigger.processing));
init(services, stopHandler, options);
log(services, options, 'info', '[runner] Starting streaming', {
...trigger,
opts: {
reconnectionMaxAttempts: options.maxReconnectionAttempts,
reconnectionInterval: options.reconnectionInterval,
connectionMaxLifeSpanInSeconds: options.connectionMaxLifeSpanInSeconds,
},
});
await datastore.streams.listen(trigger.model, trigger.source, trigger.query, {
reconnectionMaxAttempts: options.maxReconnectionAttempts,
reconnectionInterval: options.reconnectionInterval,
connectionMaxLifeSpanInSeconds: options.connectionMaxLifeSpanInSeconds,
queueName: handlerId,
queryAsJSONSchema: trigger.queryAsJSONSchema,
});
metrics.incrementProcessStatus({ state: 'started' });
}
}
if (options.heartbeat === true) {
/* istanbul ignore next */
heartbeat().catch((err) => {
/* istanbul ignore next */
log(services, options, 'warn', 'Failed to expose heartbeat', {
err,
});
});
}
}
catch (err) {
log(services, { verbose: true }, 'error', 'Initialization error', err);
await destroy(services, undefined, options, 'uncaughtException', err);
}
};
}
exports.start = start;
function debugHandler(stopHandler, handler, stats) {
return async (entity, metadata) => {
if (stats.count > 0) {
return stopHandler();
}
stats.count += 1;
return handler(entity, metadata);
};
}
async function replayHandler(handlerId, options, services, triggers, stopHandler, handler) {
init(services, stopHandler, options);
const pageSize = options.debug === true ? 1 : options.pageSize;
log(services, options, 'info', '[runner] Starting the replay', {
handler_id: handlerId,
page_size: pageSize,
triggers,
opts: {
sleep: options.sleep,
},
});
log(services, options, 'debug', '[runner] Replay informations', {
handler_id: handlerId,
triggers,
});
const stats = { count: 0 };
const processingStats = getProcessingStats({
progress: options.progress,
});
await utils.walkMulti(new Map(Object.keys(services.datastores).map((k) => [k, services.datastores[k]])), triggers.map((trigger) => {
const query = trigger.query;
if (trigger.processing?.field) {
query[trigger.processing?.field] = trigger.processing?.states[0];
}
return {
datastore: trigger.datastore,
model: trigger.model,
query,
source: trigger.source,
headers: trigger.headers,
};
}), pageSize, (input, query, queryIteration) => {
return localEventHandler(services, options.debug !== true
? handler
: debugHandler(stopHandler, handler, stats), handlerId, query.datastore, query.model, query.source, query.raw, processingStats, triggers[queryIteration?.query_index]?.processing)(input);
}, {
sleep: options.debug === true ? 1 : options.sleep,
is_mutating: options.mutating,
});
await stopHandler();
}
exports.replayHandler = replayHandler;
function replay() {
return async (handlerIds, options, command, handlersForTest) => {
let services = undefined;
try {
metrics.incrementProcessStatus({ state: 'starting' });
log(undefined, options, 'info', '[runner] Starting replay', {
handler_ids: handlerIds,
options,
});
/* istanbul ignore next */
if (options.heartbeat === true) {
heartbeat().catch((err) => {
/* istanbul ignore next */
log(undefined, options, 'warn', 'Failed to expose heartbeat', {
err,
});
});
}
for (const handlerId of handlerIds) {
const { services: _services, triggers, stop: stopHandler, handler, } = await buildHandler(handlerId, options, handlersForTest);
services = _services;
await replayHandler(handlerId, options, services, triggers, stopHandler, handler);
}
await destroy(services, undefined, options, 'SIGTERM');
}
catch (err) {
log(services, { verbose: true }, 'error', 'Replay error', {
err,
response: err?.response?.data,
});
await destroy(services, undefined, options, 'uncaughtException', err);
}
};
}
exports.replay = replay;
/* istanbul ignore next */
function commonOptions(command) {
command
.option('--datastore <datastore>', 'Datastore to listen on')
.option('--model <model>', 'Model to listen on')
.option('--source <source>', 'Source of messages')
.option('--raw', 'If present, no JSON parsing is applied to messages', false)
.option('--timeout <timeout>', 'Requests timeout in milliseconds', (v) => parseInt(v, 10), 30000)
.option('--exit-timeout <timeout>', 'Graceful exit timeout', (v) => parseInt(v, 10), NaN)
.option('--verbose', 'Display runner logs', false)
.option('--safe', 'Executes all handlers even with errors', false)
.option('--heartbeat', 'If present, an heartbeat route is exposed', false)
.option('--cwd <cwd>', 'Current working directory', process.env.DATASTORE_RUNNER_CWD || '');
return command;
}
/* istanbul ignore next */
function register(name = 'runner') {
const program = new commander_1.Command(name);
const startCommand = program.command('start <handlerIds...>');
commonOptions(startCommand);
startCommand
.option('--query <query>', 'Forced query', (v) => !!v && JSON.parse(v), null)
.option('-ptw, --processing-time-window <processing_time_window>', 'Event processing time window', (v) => parseInt(v, 10), Infinity)
.option('-p, --progress <progress>', 'Progress feedback interval', (v) => parseInt(v, 10), 1000)
.option('-ipr, --init-parallel-events <init_parallel_events>', 'Initial maximal parallel events for sampling', (v) => parseInt(v, 10), 10)
.option('--max-reconnection-attempts <max_reconnection_attempts>', 'Max reconnection attempts in case of error or unexpected closing', (v) => parseInt(v, 10), 1000)
.option('--reconnection-interval <reconnection_interval>', 'Interval in milliseconds between two reconnection attempts', (v) => parseInt(v, 10), 100)
.option('--connection-max-life-span-in-seconds <connection_max_life_span_in_seconds>', 'Max life span in seconds of a Datastore HTTP streaming connection', (v) => parseInt(v, 10), 3600)
.description('Start the queries handlers')
.action(start());
const replayCommand = program.command('replay <handlerIds...>');
commonOptions(replayCommand);
replayCommand
.option('--date-field <dateField>', 'Date field', 'created_at')
.option('--page-size <pageSize>', 'Page size', (v) => parseInt(v, 10), 100)
.option('-s, --sleep <sleep>', 'Sleep period between each batch', (v) => parseInt(v, 10), 0)
.option('--query <query>', 'Forced query', (v) => !!v && JSON.parse(v), null)
.option('--mutating', 'Activates replay mode with possible mutations', false)
.option('-p, --progress <progress>', 'Progress feedback interval', (v) => parseInt(v, 10), 1000)
.option('--debug', 'Execute the handler only once', false)
.description('Replay events in order')
.action(replay());
return program;
}
exports.default = register;
//# sourceMappingURL=runner.js.map