@getanthill/datastore
Version:
Event-Sourced Datastore
297 lines • 11.4 kB
JavaScript
;
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.writeJSON = writeJSON;
exports.serverSentEventKeepAlive = serverSentEventKeepAlive;
exports.initChangeStream = initChangeStream;
exports.serverSentEvents = serverSentEvents;
exports.stream = stream;
const mapKeys_1 = __importDefault(require("lodash/mapKeys"));
const isObject_1 = __importDefault(require("lodash/isObject"));
const utils_1 = require("../utils");
/**
* @credits https://stackoverflow.com/questions/56699849/how-do-i-know-when-a-stream-node-js-mongodb-is-ready-for-changes
*
* @param changeStream
* @returns
*/
async function streamReady(changeStream, timeout, step = 0) {
return new Promise((ok) => {
const i = setInterval(() => {
if (changeStream.cursor.id) {
clearInterval(i);
return ok();
}
/* istanbul ignore next */
step += 1;
/* istanbul ignore next */
if (step > timeout) {
clearInterval(i);
ok();
}
}, 1);
i.unref();
});
}
function format(req, _res, data) {
if (req.header('output') === 'raw') {
return data;
}
if (req.params.model === 'all') {
return {
model: data.ns.coll.replace('_events', ''),
entity: data.fullDocument,
};
}
return data.fullDocument;
}
function writeJSON(_req, res, data) {
if (!(0, isObject_1.default)(data)) {
return;
}
res.write(JSON.stringify(data));
}
function getPipelineFromReq(req) {
var _a;
if ('pipeline' in req.query) {
return JSON.parse((_a = req.query) === null || _a === void 0 ? void 0 : _a.pipeline);
}
if (Array.isArray(req.body)) {
return req.body;
}
return [];
}
async function getChangeStream(services, req) {
const modelName = req.params.model;
const source = req.params.source;
services.telemetry.logger.info('[api/stream] Creating a change stream');
let pipeline = getPipelineFromReq(req);
if (modelName !== 'all' && !!req.query.q) {
delete req.query.pipeline;
const Model = services.models.getModel(req.params.model);
// Map the query parameters to request services.MongoDb accordingly
const { query: mappedQuery } = (0, utils_1.mapFindQuery)(Model, req.query);
if (mappedQuery && Object.keys(mappedQuery).length > 0) {
pipeline.unshift({
$match: (0, mapKeys_1.default)(mappedQuery, (_v, k) => `fullDocument.${k}`),
});
}
}
let changeStreamSource = services.models
.getModel('internal_models')
.db(services.mongodb);
/* istanbul ignore else*/
if (modelName === 'all') {
let models = Array.from(services.models.MODELS.keys()).filter((m) => !services.models.isInternalModel(m));
if (req.header('only-models')) {
const onlyModels = req.header('only-models').split(',');
models = models.filter((m) => onlyModels.includes(m));
}
pipeline = [
{
$match: {
'ns.coll': {
$in: models.map((name) => name + (source === 'events' ? '_events' : '')),
},
},
},
...pipeline,
];
}
else if (modelName) {
const model = services.models.getModel(modelName);
if (source === 'events') {
changeStreamSource = model.getEventsCollection(model.db(services.mongodb));
}
else {
changeStreamSource = model.getStatesCollection(model.db(services.mongodb));
}
}
services.telemetry.logger.info('[api/stream] Streaming', {
model: modelName,
source,
pipeline,
});
const changeStream = await changeStreamSource.watch(pipeline);
changeStream.on('error', (err) => {
services.telemetry.logger.error('[api/stream] Stream error', { err });
});
return changeStream;
}
async function closeChangeStream(services, changeStream) {
const { telemetry: { logger }, } = services;
logger.info('[api/stream] Closing the change stream');
await changeStream.close();
}
async function close(services, req, res, changeStream) {
await closeChangeStream(services, changeStream);
res.end();
}
function serverSentEventKeepAlive(services, res) {
res.write(':\n\n');
const timeout = setTimeout(serverSentEventKeepAlive, services.config.features.api.sseKeepAliveTimeout, services, res);
timeout.unref();
}
async function initChangeStream(services, req, onChange, maxWaitInMilliseconds) {
services.metrics.incrementApiStreamSSE({
state: 'initializing',
model: req.params.model,
source: req.params.source,
});
const tic = Date.now();
while (Date.now() - tic < maxWaitInMilliseconds) {
try {
const changeStream = await getChangeStream(services, req);
changeStream.on('change', onChange);
services.metrics.incrementApiStreamSSE({
state: 'created',
model: req.params.model,
source: req.params.source,
});
return changeStream;
}
catch (err) {
await new Promise((resolve) => setTimeout(resolve, services.config.features.api.stream.reconnectDelayOnError));
}
}
services.metrics.incrementApiStreamSSE({
state: 'failed',
model: req.params.model,
source: req.params.source,
});
services.telemetry.logger.error('[api/stream] Failed initialized change stream', {});
throw new Error('Failed to initialized change stream');
}
function serverSentEvents(services) {
return async (req, res, next) => {
try {
services.metrics.incrementApiStreamSSE({
state: 'requested',
model: req.params.model,
source: req.params.source,
});
const onChange = (event) => {
res.write('data: ');
writeJSON(req, res, format(req, res, event));
res.write('\n\n');
};
const onError = async (err) => {
try {
services.metrics.incrementApiStreamSSE({
state: 'error',
model: req.params.model,
source: req.params.source,
});
services.telemetry.logger.info('[api/stream] Error on stream. Reconnecting...', { err });
await (changeStream === null || changeStream === void 0 ? void 0 : changeStream.close());
changeStream = await initChangeStream(services, req, onChange, services.config.features.api.stream
.maxWaitOnReconnectInMilliseconds);
changeStream.on('error', onError);
}
catch (err) {
services.telemetry.logger.error('[api/stream] Stream error', { err });
close(services, req, res, changeStream);
}
};
let changeStream = await initChangeStream(services, req, onChange, services.config.features.api.stream.maxWaitOnReconnectInMilliseconds);
changeStream.on('error', onError);
res.writeHead(200, {
'Content-Type': 'text/event-stream',
'Content-Encoding': 'none',
Connection: 'keep-alive',
'Cache-Control': 'no-cache',
});
const onClose = async () => {
services.signals.removeListener('stop', onClose);
res.removeListener('close', onClose);
await close(services, req, res, changeStream);
services.metrics.incrementApiStreamSSE({
state: 'closed',
model: req.params.model,
source: req.params.source,
});
};
res.once('close', onClose);
services.signals.once('stop', onClose);
await streamReady(changeStream, 1000);
serverSentEventKeepAlive(services, res);
}
catch (err) {
next(err);
}
};
}
/**
* @alpha
*
* Horizontal Scaling of the stream reader (async process to publish in
* a queue-like system such as RabbitMQ or Kafka)
* @see https://stackoverflow.com/questions/54295043/what-is-a-good-horizontal-scaling-strategy-for-a-mongodb-change-stream-reader
*
* @param services
*/
function stream(services) {
return async (req, res, next) => {
try {
services.metrics.incrementApiStreamLegacy({
state: 'requested',
model: req.params.model,
source: req.params.source,
});
let isFirstElement = true;
const onChange = (event) => {
if (isFirstElement !== true) {
res.write(',');
}
else {
isFirstElement = false;
}
writeJSON(req, res, format(req, res, event));
};
const onError = async (err) => {
try {
services.metrics.incrementApiStreamSSE({
state: 'error',
model: req.params.model,
source: req.params.source,
});
services.telemetry.logger.info('[api/stream] Error on stream. Reconnecting...', { err });
await (changeStream === null || changeStream === void 0 ? void 0 : changeStream.close());
changeStream = await initChangeStream(services, req, onChange, services.config.features.api.stream
.maxWaitOnReconnectInMilliseconds);
changeStream.on('error', onError);
}
catch (err) {
services.telemetry.logger.error('[api/stream] Stream error', { err });
close(services, req, res, changeStream);
}
};
let changeStream = await initChangeStream(services, req, onChange, services.config.features.api.stream.maxWaitOnReconnectInMilliseconds);
changeStream.on('error', onError);
res.writeHead(200, {
'Content-Type': 'application/json',
'Content-Encoding': 'none',
'Transfer-Encoding': 'chunked',
Connection: 'keep-alive',
'Cache-Control': 'no-cache',
});
res.write('[');
res.on('close', async () => {
res.write(']');
await close(services, req, res, changeStream);
services.metrics.incrementApiStreamLegacy({
state: 'closed',
model: req.params.model,
source: req.params.source,
});
});
await streamReady(changeStream, 1000);
}
catch (err) {
next(err);
}
};
}
//# sourceMappingURL=controllers.js.map