@getanthill/datastore
Version:
Event-Sourced Datastore
413 lines (345 loc) • 10.7 kB
text/typescript
import type { NextFunction, Request, Response } from 'express';
import type { Services } from '../../typings';
import type { ChangeStreamDocument } from 'mongodb';
import mapKeys from 'lodash/mapKeys';
import isObject from 'lodash/isObject';
import { mapFindQuery } from '../utils';
/**
* @credits https://stackoverflow.com/questions/56699849/how-do-i-know-when-a-stream-node-js-mongodb-is-ready-for-changes
*
* @param changeStream
* @returns
*/
async function streamReady(changeStream: any, timeout: number, step = 0) {
return new Promise<void>((ok) => {
const i = setInterval(() => {
if (changeStream.cursor.id) {
clearInterval(i);
return ok();
}
/* istanbul ignore next */
step += 1;
/* istanbul ignore next */
if (step > timeout) {
clearInterval(i);
ok();
}
}, 1);
i.unref();
});
}
function format(req: Request, _res: Response, data: any): any {
if (req.header('output') === 'raw') {
return data;
}
if (req.params.model === 'all') {
return {
model: data.ns.coll.replace('_events', ''),
entity: data.fullDocument,
};
}
return data.fullDocument;
}
export function writeJSON(_req: Request, res: Response, data: any): void {
if (!isObject(data)) {
return;
}
res.write(JSON.stringify(data));
}
function getPipelineFromReq(req: Request) {
if ('pipeline' in req.query) {
return JSON.parse(req.query?.pipeline as string);
}
if (Array.isArray(req.body)) {
return req.body;
}
return [];
}
async function getChangeStream(services: Services, req: Request) {
const modelName = req.params.model;
const source = req.params.source;
services.telemetry.logger.info('[api/stream] Creating a change stream');
let pipeline = getPipelineFromReq(req);
if (modelName !== 'all' && !!req.query.q) {
delete req.query.pipeline;
const Model = services.models.getModel(req.params.model);
// Map the query parameters to request services.MongoDb accordingly
const { query: mappedQuery } = mapFindQuery(Model, req.query);
if (mappedQuery && Object.keys(mappedQuery).length > 0) {
pipeline.unshift({
$match: mapKeys(mappedQuery, (_v, k) => `fullDocument.${k}`),
});
}
}
let changeStreamSource = services.models
.getModel('internal_models')
.db(services.mongodb);
/* istanbul ignore else*/
if (modelName === 'all') {
let models = Array.from(services.models.MODELS.keys()).filter(
(m) => !services.models.isInternalModel(m),
);
if (req.header('only-models')) {
const onlyModels: string[] = req.header('only-models')!.split(',');
models = models.filter((m) => onlyModels.includes(m));
}
pipeline = [
{
$match: {
'ns.coll': {
$in: models.map(
(name) => name + (source === 'events' ? '_events' : ''),
),
},
},
},
...pipeline,
];
} else if (modelName) {
const model = services.models.getModel(modelName);
if (source === 'events') {
changeStreamSource = model.getEventsCollection(
model.db(services.mongodb),
);
} else {
changeStreamSource = model.getStatesCollection(
model.db(services.mongodb),
);
}
}
services.telemetry.logger.info('[api/stream] Streaming', {
model: modelName,
source,
pipeline,
});
const changeStream = await changeStreamSource.watch(pipeline);
changeStream.on('error', (err) => {
services.telemetry.logger.error('[api/stream] Stream error', { err });
});
return changeStream;
}
async function closeChangeStream(
services: Services,
changeStream: any,
): Promise<void> {
const {
telemetry: { logger },
} = services;
logger.info('[api/stream] Closing the change stream');
await changeStream.close();
}
async function close(
services: Services,
req: Request,
res: Response,
changeStream: any,
): Promise<void> {
await closeChangeStream(services, changeStream);
res.end();
}
export function serverSentEventKeepAlive(
services: Services,
res: Response,
): void {
res.write(':\n\n');
const timeout = setTimeout(
serverSentEventKeepAlive,
services.config.features.api.sseKeepAliveTimeout,
services,
res,
);
timeout.unref();
}
export async function initChangeStream(
services: Services,
req: Request,
onChange: (change: ChangeStreamDocument<Document>) => void,
maxWaitInMilliseconds: number,
) {
services.metrics.incrementApiStreamSSE({
state: 'initializing',
model: req.params.model,
source: req.params.source as 'events' | 'entities',
});
const tic = Date.now();
while (Date.now() - tic < maxWaitInMilliseconds) {
try {
const changeStream = await getChangeStream(services, req);
changeStream.on('change', onChange);
services.metrics.incrementApiStreamSSE({
state: 'created',
model: req.params.model,
source: req.params.source as 'events' | 'entities',
});
return changeStream;
} catch (err) {
await new Promise((resolve) =>
setTimeout(
resolve,
services.config.features.api.stream.reconnectDelayOnError,
),
);
}
}
services.metrics.incrementApiStreamSSE({
state: 'failed',
model: req.params.model,
source: req.params.source as 'events' | 'entities',
});
services.telemetry.logger.error(
'[api/stream] Failed initialized change stream',
{},
);
throw new Error('Failed to initialized change stream');
}
export function serverSentEvents(services: Services) {
return async (req: Request, res: Response, next: NextFunction) => {
try {
services.metrics.incrementApiStreamSSE({
state: 'requested',
model: req.params.model,
source: req.params.source as 'events' | 'entities',
});
const onChange = (event: ChangeStreamDocument<Document>) => {
res.write('data: ');
writeJSON(req, res, format(req, res, event));
res.write('\n\n');
};
const onError = async (err: Error) => {
try {
services.metrics.incrementApiStreamSSE({
state: 'error',
model: req.params.model,
source: req.params.source as 'events' | 'entities',
});
services.telemetry.logger.info(
'[api/stream] Error on stream. Reconnecting...',
{ err },
);
await changeStream?.close();
changeStream = await initChangeStream(
services,
req,
onChange,
services.config.features.api.stream
.maxWaitOnReconnectInMilliseconds,
);
changeStream.on('error', onError);
} catch (err) {
services.telemetry.logger.error('[api/stream] Stream error', { err });
close(services, req, res, changeStream);
}
};
let changeStream = await initChangeStream(
services,
req,
onChange,
services.config.features.api.stream.maxWaitOnReconnectInMilliseconds,
);
changeStream.on('error', onError);
res.writeHead(200, {
'Content-Type': 'text/event-stream',
'Content-Encoding': 'none',
Connection: 'keep-alive',
'Cache-Control': 'no-cache',
});
const onClose = async () => {
services.signals.removeListener('stop', onClose);
res.removeListener('close', onClose);
await close(services, req, res, changeStream);
services.metrics.incrementApiStreamSSE({
state: 'closed',
model: req.params.model,
source: req.params.source as 'events' | 'entities',
});
};
res.once('close', onClose);
services.signals.once('stop', onClose);
await streamReady(changeStream, 1000);
serverSentEventKeepAlive(services, res);
} catch (err) {
next(err);
}
};
}
/**
* @alpha
*
* Horizontal Scaling of the stream reader (async process to publish in
* a queue-like system such as RabbitMQ or Kafka)
* @see https://stackoverflow.com/questions/54295043/what-is-a-good-horizontal-scaling-strategy-for-a-mongodb-change-stream-reader
*
* @param services
*/
export function stream(services: Services) {
return async (req: Request, res: Response, next: NextFunction) => {
try {
services.metrics.incrementApiStreamLegacy({
state: 'requested',
model: req.params.model,
source: req.params.source as 'events' | 'entities',
});
let isFirstElement = true;
const onChange = (event: ChangeStreamDocument<Document>) => {
if (isFirstElement !== true) {
res.write(',');
} else {
isFirstElement = false;
}
writeJSON(req, res, format(req, res, event));
};
const onError = async (err: Error) => {
try {
services.metrics.incrementApiStreamSSE({
state: 'error',
model: req.params.model,
source: req.params.source as 'events' | 'entities',
});
services.telemetry.logger.info(
'[api/stream] Error on stream. Reconnecting...',
{ err },
);
await changeStream?.close();
changeStream = await initChangeStream(
services,
req,
onChange,
services.config.features.api.stream
.maxWaitOnReconnectInMilliseconds,
);
changeStream.on('error', onError);
} catch (err) {
services.telemetry.logger.error('[api/stream] Stream error', { err });
close(services, req, res, changeStream);
}
};
let changeStream = await initChangeStream(
services,
req,
onChange,
services.config.features.api.stream.maxWaitOnReconnectInMilliseconds,
);
changeStream.on('error', onError);
res.writeHead(200, {
'Content-Type': 'application/json',
'Content-Encoding': 'none',
'Transfer-Encoding': 'chunked',
Connection: 'keep-alive',
'Cache-Control': 'no-cache',
});
res.write('[');
res.on('close', async () => {
res.write(']');
await close(services, req, res, changeStream);
services.metrics.incrementApiStreamLegacy({
state: 'closed',
model: req.params.model,
source: req.params.source as 'events' | 'entities',
});
});
await streamReady(changeStream, 1000);
} catch (err) {
next(err);
}
};
}