@getanthill/datastore
Version:
Event-Sourced Datastore
991 lines (839 loc) • 26.8 kB
text/typescript
import type { MongoDbConnector } from '@getanthill/mongodb-connector';
import type { FindCursor, Db } from 'mongodb';
import type {
GenericType,
ModelConfig,
ModelSchema,
Reducer,
Services,
Event,
ModelInstance,
EntityState,
AnyObject,
HandleOptions,
} from '../typings';
import _ from 'lodash';
import { MongoError } from 'mongodb';
import { EventSourcedFactory, Snapshots } from '@getanthill/event-source';
import { ObjectId } from '@getanthill/mongodb-connector';
import * as jsonpatch from 'fast-json-patch';
import crypto from 'crypto';
import * as c from '../constants';
import config from '../config';
import * as utils from '../utils';
// tslint:disable-next-line
export type ModelDefinition = MongoDbConnector.ModelDefinition;
export interface Options {
CORRELATION_FIELD?: string;
CREATED_AT_FIELD?: string;
CURRENT_HASH_FIELD?: string;
PREVIOUS_HASH_FIELD?: string;
NONCE_FIELD?: string;
EVENTS_COLLECTION_NAME?: string;
SNAPSHOTS_COLLECTION_NAME?: string;
WITH_GLOBAL_VERSION?: boolean;
WITH_BLOCKCHAIN_HASH?: boolean;
BLOCKCHAIN_HASH_DIFFICULTY?: number;
BLOCKCHAIN_HASH_GENESIS?: string;
//
SCHEMA: ModelSchema;
ORIGINAL_SCHEMA: ModelSchema;
MODEL_CONFIG: ModelConfig;
services: Services;
}
export default (
definition: ModelDefinition,
reducer: Reducer,
options: Options,
): GenericType =>
class Model
extends EventSourcedFactory(definition.COLLECTION, reducer, options)
implements GenericType
{
static IV_LENGTH = 32;
static RETRY_ERRORS = ['version_unicity', 'correlation_id_version_unicity'];
static encryptionKeys: any[] | null = null;
static hashedEncryptionKeys: any[] | null = null;
public static getSchema(): ModelSchema {
return options.SCHEMA;
}
public static getModelConfig(): ModelConfig {
return options.MODEL_CONFIG;
}
public static getOriginalSchema(): ModelSchema {
return options.ORIGINAL_SCHEMA;
}
public static getCorrelationField(): string {
return Model.options.CORRELATION_FIELD;
}
public static mustWaitStatePersistence(): boolean {
return (
options.MODEL_CONFIG.must_wait_state_persistence ??
options.services.config.features.mustWaitStatePersistence
);
}
public static getCollectionName(): string {
return definition.COLLECTION;
}
public static db(mongodb: MongoDbConnector, isRead = false): Db {
return mongodb.db(
`${definition.DATABASE}_${isRead === true ? 'read' : 'write'}`,
);
}
public static explain(cursor: any, query: any): void {
if (options.services.config.features.mongodb.explain !== true) {
return;
}
return cursor
.explain()
.then((plan: any) => {
const processedPlan = utils.processExplanationPlan(plan);
options.services.telemetry.logger.debug(
'[Generic#find] Query explain',
processedPlan,
);
options.services.telemetry.logger.info(
'[Generic#find] Query explain',
{
model_name: Model.getModelConfig().name,
query,
stage: processedPlan.stage,
executation_time_ms: processedPlan.executation_time_ms,
index_name: processedPlan.index_name,
keys_examined: processedPlan.keys_examined,
docs_examined: processedPlan.docs_examined,
},
);
if (
processedPlan.executation_time_ms >
options.services.config.features.mongodb
.slowQueryThresholdInMilliseconds
) {
options.services.telemetry.logger.warn(
'[Generic#find] Slow query detected',
{
model_name: Model.getModelConfig().name,
query,
stage: processedPlan.stage,
executation_time_ms: processedPlan.executation_time_ms,
index_name: processedPlan.index_name,
keys_examined: processedPlan.keys_examined,
docs_examined: processedPlan.docs_examined,
},
);
}
})
.catch((err: any) => {
options.services.telemetry.logger.error(
'[Generic#find] Query explain error',
{ err, model_name: Model.getModelConfig().name, query },
);
});
}
public static count(
mongodb: MongoDbConnector,
query: any,
): Promise<number> {
return Model.getStatesCollection(Model.db(mongodb, true)).count({
[Model.getIsArchivedProperty()]: {
$in: [null, false],
},
...query,
});
}
public static find(
mongodb: MongoDbConnector,
query: any,
opts: any = {},
): FindCursor<any> {
options.services.telemetry.logger.debug('[Generic#find] MaxTimeMS', {
maxTimeMS: options.services.config.features.mongodb.maxTimeMS,
});
const cursor = Model.getStatesCollection(
Model.db(mongodb, opts?.forcePrimary ? false : true),
)
.find(
{
[Model.getIsArchivedProperty()]: {
$in: [null, false],
},
...query,
},
opts,
)
.maxTimeMS(options.services.config.features.mongodb.maxTimeMS);
Model.explain(cursor, query);
return cursor;
}
mongodb: MongoDbConnector;
state: any;
config: {
retryDuration: number;
forcePrimary: boolean;
};
constructor(
services: Services,
correlationId: string = new ObjectId().toString(),
opts?: Partial<{
retryDuration: number;
forcePrimary: boolean;
}>,
) {
const db = Model.db(services.mongodb, opts?.forcePrimary ? false : true);
super({ db, logger: services.telemetry.logger }, correlationId);
this.mongodb = services.mongodb;
this.state = null;
this.config = {
retryDuration:
opts?.retryDuration ??
Model.getModelConfig().retry_duration ??
config.features.retryDuration,
forcePrimary: opts?.forcePrimary ?? false,
};
}
/**
* @warn Rollback a single event for now
* @param events any[]
* @param updatedState {}
*/
public async rollback(
events: any[],
updatedState: any,
): Promise<ModelInstance> {
if (events.length === 0) {
return this;
}
if (updatedState === null) {
// Nothing to rollback
return this;
}
if (
events[0].type === c.EVENT_TYPE_ROLLBACKED ||
events[0].type === c.EVENT_TYPE_RESTORED
) {
throw new Error('Can not rollback a restoration event');
}
const previousEvent = await Model.getEventsCollection(
Model.db(this.mongodb),
).findOne(
{
[Model.getCorrelationField()]: this.correlationId,
version: {
$lt: updatedState.version,
},
},
{
projection: {
version: 1,
},
sort: {
version: -1,
},
},
);
if (previousEvent === null) {
/**
* @warn Super removal because this is a creation
*/
await Model.getEventsCollection(Model.db(this.mongodb)).deleteOne({
[Model.getCorrelationField()]:
updatedState[Model.getCorrelationField()],
});
this.state = null;
return this;
}
const previousState = await this.getStateAtVersion(previousEvent.version);
/**
* @alpha The JSON PATCH feature is extremely useful for patching
* JSON documents with clear operations like "remove". This is for now
* not exposed on the API.
*/
return this.apply(c.EVENT_TYPE_ROLLBACKED, {
type: c.EVENT_TYPE_ROLLBACKED,
json_patch: jsonpatch.compare(
_.omit(JSON.parse(JSON.stringify(updatedState)), [
'version',
'created_at',
'updated_at',
]),
_.omit(JSON.parse(JSON.stringify(previousState)), [
'version',
'created_at',
'updated_at',
]),
),
});
}
public async handleWithRetry(
handler: () => Event[],
retryDuration: number,
storeStateErrorHandler: (...args: any) => void,
handleOptions?: HandleOptions,
) {
const _handleOptions: HandleOptions = {
isReadonly: Model.getIsReadonlyProperty(),
mustWaitStatePersistence: Model.mustWaitStatePersistence(),
...handleOptions,
};
if (retryDuration === 0) {
return this.handle(handler, storeStateErrorHandler, _handleOptions);
}
const tic = utils.getDateNow();
let lastError;
let lastUpdatedState = null;
while (utils.getDateNow() - tic < retryDuration) {
try {
return await this.handle(
handler,
(err: any, updatedState: any) => {
lastUpdatedState = updatedState;
throw err;
},
_handleOptions,
);
} catch (err: any) {
lastError = err;
/**
* Here, the insert in the state failed because we tried to
* upsert a new state violating the unicity constraint on the
* correlation_field. This is due to the fact that another
* process succeed in updating the state in parallel increasing
* the state version number;
*/
if (err.message.includes('correlation_id_unicity')) {
return this.getState();
}
const mustRetry: boolean = Model.RETRY_ERRORS.reduce((c, v) => {
return c || err.message.includes(v);
}, false);
if (mustRetry === true) {
continue;
}
break;
}
}
return storeStateErrorHandler(lastError, lastUpdatedState);
}
/* @ts-ignore */
public async apply(
eventType: string,
data: AnyObject,
handleOptions?: HandleOptions,
v?: string,
): Promise<ModelInstance> {
v = v ?? '0_0_0';
let appliedData = _.omit(data, 'version', 'type', 'v');
if (handleOptions?.mustEncrypt !== false) {
appliedData = Model.encrypt(appliedData);
}
const events: Event[] = [
{
type: eventType,
v,
...appliedData,
},
];
let updatedState = null;
try {
const _handleOptions = {
isReadonly: Model.getIsReadonlyProperty(),
...handleOptions,
};
if (_handleOptions.isReadonly in appliedData) {
_handleOptions.isReadonly = '';
}
this.state = await this.handleWithRetry(
() => events,
_handleOptions.retryDuration ?? this.config?.retryDuration,
(err: any, _updatedState: any) => {
updatedState = _updatedState;
throw err;
},
_handleOptions,
);
} catch (err) {
if (err instanceof MongoError) {
await this.rollback(events, updatedState);
}
throw err;
}
return this;
}
public async getState(): Promise<EntityState> {
const { currentState: state } = await Model.getState(
this.db,
this.correlationId,
);
this.state = state;
return this.state;
}
public getStateAtVersion(
version: number,
mustThrow = true,
): Promise<EntityState> {
return Model.getStateAtVersion(
Model.db(this.mongodb, true),
this.correlationId,
version,
mustThrow,
);
}
public getEvents(version = -1): FindCursor<any> {
return Model.getEvents(
Model.db(this.mongodb, true),
this.correlationId,
version,
);
}
public create(
data: Event,
handleOptions?: HandleOptions,
): Promise<ModelInstance> {
return this.apply(c.EVENT_TYPE_CREATED, data, handleOptions);
}
public update(
data: Event,
handleOptions?: HandleOptions,
): Promise<ModelInstance> {
return this.apply(c.EVENT_TYPE_UPDATED, data, handleOptions);
}
public patch(
data: Event,
handleOptions?: HandleOptions,
): Promise<ModelInstance> {
return this.apply(c.EVENT_TYPE_PATCHED, data, handleOptions);
}
public async restore(
version: number,
handleOptions?: HandleOptions,
): Promise<ModelInstance> {
const state = await this.getStateAtVersion(version);
return this.apply(
c.EVENT_TYPE_RESTORED,
{
type: c.EVENT_TYPE_RESTORED,
json_patch: jsonpatch.compare(
_.omit(this.state, ['version', 'created_at', 'updated_at']),
_.omit(state, ['version', 'created_at', 'updated_at']),
),
},
handleOptions,
);
}
public async upsert(
data: Event,
handleOptions?: HandleOptions,
): Promise<ModelInstance> {
try {
return await this.update(data, handleOptions);
} catch (err1: any) {
if (err1.message === 'Entity must be created first') {
try {
if (
handleOptions?.imperativeVersion &&
handleOptions.imperativeVersion > 0
) {
throw err1;
}
return await this.create(data);
} catch (err2: any) {
if (err2.message === 'Entity already created') {
return this.update(data, handleOptions);
}
throw err2;
}
}
throw err1;
}
}
public async getNextVersion(): Promise<number> {
let nextVersion = this.state.version + 1;
if (Model.options.WITH_GLOBAL_VERSION === true) {
const lastEvent = await Model.getLastEvent(
Model.db(this.mongodb, false),
);
/* istanbul ignore next */
nextVersion = lastEvent === null ? nextVersion : lastEvent.version + 1;
}
return nextVersion;
}
public async archive(): Promise<ModelInstance> {
await this.getState();
const isArchivedField: string = Model.getIsArchivedProperty();
if (this.state[isArchivedField] === true) {
return this;
}
const isReadonlyField: string = Model.getIsReadonlyProperty();
const isDeletedField: string = Model.getIsDeletedProperty();
const archivedData = await Model.encrypt(
this.state,
[],
options.services.config.security.encryptionKeys.archive,
options.services.config.security.encryptionKeys.archive?.map(
(k: string) => Model.hashValue(k),
),
'aes-256-gcm',
);
const nextVersion: number = await this.getNextVersion();
return this.apply(
c.EVENT_TYPE_ARCHIVED,
{
type: c.EVENT_TYPE_ARCHIVED,
..._.omit(archivedData, ['version', 'created_at', 'updated_at']),
[isReadonlyField]: true,
[isArchivedField]: true,
[isDeletedField]: false,
},
{
imperativeVersion: nextVersion,
mustEncrypt: false,
isReadonly: isArchivedField,
},
);
}
public async unarchive(): Promise<ModelInstance> {
await this.getState();
const isArchivedField: string = Model.getIsArchivedProperty();
const isDeletedField: string = Model.getIsDeletedProperty();
if (this.state[isArchivedField] !== true) {
return this;
}
if (this.state[isDeletedField] === true) {
return this;
}
const isReadonlyField: string = Model.getIsReadonlyProperty();
const [unarchivedData, lastEvent] = await Promise.all([
Model.decrypt(
this.state,
[],
options.services.config.security.encryptionKeys.archive,
options.services.config.security.encryptionKeys.archive?.map(
(k: string) => Model.hashValue(k),
),
),
this.getEvents(this.state.version - 2).next(),
]);
const nextVersion: number = await this.getNextVersion();
return this.apply(
c.EVENT_TYPE_RESTORED,
{
type: c.EVENT_TYPE_RESTORED,
..._.omit(unarchivedData, ['version', 'created_at', 'updated_at']),
// Last event must be the `is_readonly=true`
[isReadonlyField]: lastEvent[isReadonlyField] ?? false,
[isArchivedField]: false,
[isDeletedField]: false,
},
{
imperativeVersion: nextVersion,
mustEncrypt: false,
isReadonly: '',
},
);
}
public async delete(): Promise<void> {
await this.getState();
const isArchivedField: string = Model.getIsArchivedProperty();
const isDeletedField: string = Model.getIsDeletedProperty();
if (this.state[isArchivedField] !== true) {
throw new Error('Entity must be archived first');
}
const deleteAfterArchiveDurationInSeconds =
options.services.config.features.deleteAfterArchiveDurationInSeconds;
if (
utils.getDateNow() - utils.getDate(this.state.updated_at).getTime() <
deleteAfterArchiveDurationInSeconds * 1000
) {
throw new Error('Entity archived too recently');
}
const nextVersion: number = await this.getNextVersion();
await this.apply(
c.EVENT_TYPE_DELETED,
{
type: c.EVENT_TYPE_DELETED,
[isDeletedField]: true,
},
{
imperativeVersion: nextVersion,
mustEncrypt: false,
isReadonly: isDeletedField,
},
);
const encryptedFields = Model.getEncryptedFields();
const unset = {};
/* @ts-ignore */
encryptedFields.forEach((f) => (unset[f] = ''));
await Model.getStatesCollection(Model.db(this.mongodb, false)).updateOne(
{
[Model.getCorrelationField()]: this.correlationId,
},
{
$unset: unset,
},
);
await Model.getEventsCollection(Model.db(this.mongodb, false)).updateMany(
{
[Model.getCorrelationField()]: this.correlationId,
},
{
$unset: unset,
},
);
await this.getState();
}
public static getIsReadonlyProperty(modelConfig = Model.getModelConfig()) {
return (
modelConfig.is_readonly ??
options?.services?.config?.features?.properties?.is_readonly
);
}
public static getIsArchivedProperty(modelConfig = Model.getModelConfig()) {
return (
modelConfig.is_archived ??
options?.services?.config?.features?.properties?.is_archived
);
}
public static getIsDeletedProperty(modelConfig = Model.getModelConfig()) {
return (
modelConfig.is_deleted ??
options?.services?.config?.features?.properties?.is_deleted
);
}
public static getEncryptionKeys(
modelConfig = Model.getModelConfig(),
): string[] {
if (Array.isArray(Model.encryptionKeys)) {
return Model.encryptionKeys;
}
Model.encryptionKeys = [
...(options.services.config.security.encryptionKeys[modelConfig.name] ||
[]),
...(options.services.config.security.encryptionKeys.all || []),
].filter(utils.unique);
Model.hashedEncryptionKeys = Model.encryptionKeys.map((k) =>
Model.hashValue(k),
);
return Model.encryptionKeys;
}
public static getHashesEncryptionKeys(): string[] {
if (Array.isArray(Model.hashedEncryptionKeys)) {
return Model.hashedEncryptionKeys;
}
Model.hashedEncryptionKeys = Model.getEncryptionKeys().map((k) =>
Model.hashValue(k),
);
return Model.hashedEncryptionKeys;
}
public static getEligibleKeys(keys: string[]): string[] {
return keys.slice(
0,
options.services.config.security.activeNumberEncryptionKeys,
);
}
private static getEncryptionKeyIndex(keys: string[]): number {
return Math.floor(
utils.random() *
Math.min(
keys.length,
options.services.config.security.activeNumberEncryptionKeys,
),
);
}
private static getEncryptedFields() {
return Model.getModelConfig().encrypted_fields || [];
}
public static isEncryptedField(field: string): boolean {
return Model.getEncryptedFields().includes(field);
}
public static hashValue(data: any): string {
const hash = crypto.createHash('sha512');
hash.update(JSON.stringify(data));
return hash.digest('hex');
}
private static encryptValue(
data: any,
keys: string[],
hashes: string[],
algorithm = 'aes-256-gcm',
): { hash: string; encrypted: string } {
if (keys.length === 0) {
return data;
}
const value = JSON.stringify(data);
const iv = crypto.randomBytes(Model.IV_LENGTH);
const encryptionKeyIndex = Model.getEncryptionKeyIndex(keys);
const encryptionKey = keys[encryptionKeyIndex];
const hashedEncryptionKey = hashes[encryptionKeyIndex];
const cipher = crypto.createCipheriv(algorithm, encryptionKey, iv);
const enc1 = cipher.update(value, 'utf8');
const enc2 = cipher.final();
// @ts-ignore
const tag = cipher.getAuthTag();
const encrypted = Buffer.concat([enc1, enc2]);
return {
hash: Model.hashValue(data),
encrypted: [
hashedEncryptionKey.slice(0, 6),
iv.toString('hex'),
encrypted.toString('hex'),
tag.toString('hex'),
algorithm,
].join(':'),
};
}
public static encrypt(
data: any,
additionalEncryptedFields: string[] = [],
keys: string[] = Model.getEncryptionKeys(),
hashes: string[] = Model.getHashesEncryptionKeys(),
algorithm?: string,
) {
const encryptedFields = [
...Model.getEncryptedFields(),
...additionalEncryptedFields,
];
if (encryptedFields.length === 0) {
return data;
}
const encryptedData = _.cloneDeep(data);
for (const encryptedField of encryptedFields) {
if (_.has(encryptedData, encryptedField)) {
const value = _.get(encryptedData, encryptedField);
_.set(
encryptedData,
encryptedField,
Model.encryptValue(value, keys, hashes, algorithm),
);
}
}
return encryptedData;
}
/**
* @deprecated
*
* @warn fallback on lower secured encryption
* algorithm `aes-256-cbc`
*/
public static decryptValueLegacy(
value: {
hash?: string;
encrypted: string;
},
key: string,
) {
const parts = value.encrypted.split(':');
const iv = Buffer.from(parts[1], 'hex');
const encryptedText = Buffer.from(parts[2], 'hex');
const decipher = crypto.createDecipheriv(
'aes-256-cbc',
Buffer.from(key),
iv,
);
let decrypted = decipher.update(encryptedText);
decrypted = Buffer.concat([decrypted, decipher.final()]);
return JSON.parse(decrypted.toString());
}
private static decryptValue(
value: { hash?: string; encrypted: string },
keys: string[],
hashedKeys: string[],
) {
if (keys.length === 0) {
return value;
}
if (!value.encrypted) {
return value;
}
const parts = value.encrypted.split(':');
const key = keys.find(
(k, i) => k.startsWith(parts[0]) || hashedKeys[i].startsWith(parts[0]),
);
if (!key) {
return value;
}
const algorithm = parts[4];
if (!algorithm) {
return Model.decryptValueLegacy(value, key);
}
const iv = Buffer.from(parts[1], 'hex');
const encryptedText = Buffer.from(parts[2], 'hex');
const tag = Buffer.from(parts[3], 'hex');
const decipher = crypto.createDecipheriv('aes-256-gcm', key, iv);
decipher.setAuthTag(tag);
let decrypted = decipher.update(encryptedText, undefined, 'utf8');
decrypted += decipher.final('utf8');
return JSON.parse(decrypted);
}
public static decrypt(
data: AnyObject,
additionalEncryptedFields: string[] = [],
keys: string[] = Model.getEncryptionKeys(),
hashedKeys: string[] = Model.getHashesEncryptionKeys(),
) {
const encryptedFields = [
...Model.getEncryptedFields(),
...additionalEncryptedFields,
];
if (encryptedFields.length === 0) {
return data;
}
const decryptedData = _.cloneDeep(data);
for (const encryptedField of encryptedFields) {
if (_.has(decryptedData, encryptedField)) {
const encryptedValue = _.get(decryptedData, encryptedField);
_.set(
decryptedData,
encryptedField,
Model.decryptValue(encryptedValue, keys, hashedKeys),
);
}
}
return decryptedData;
}
static async snapshot(
mongodb: MongoDbConnector,
correlationId: string,
options?: {
version?: number;
removePastEvents?: boolean;
},
) {
const db = Model.db(mongodb, false);
const correlationField = Model.getCorrelationField();
let snapshot;
if (options?.version === undefined) {
const { stateInDb } = await Model.getState(db, correlationId);
snapshot = stateInDb;
} else {
snapshot = await Model.getStateAtVersion(
db,
correlationId,
options?.version,
);
}
if (snapshot === null) {
throw new Error('Snapshot state is invalid');
}
await Snapshots.create(
Model.getSnapshotsCollection(db),
correlationField,
snapshot,
);
if (options?.removePastEvents !== true) {
return snapshot;
}
const eventsCursor = await Model.getEvents(db, correlationId, -1);
while (await eventsCursor.hasNext()) {
const event = await eventsCursor.next();
if (event.version > snapshot.version) {
break;
}
await Model.getEventsCollection(Model.db(mongodb)).deleteOne({
[correlationField]: correlationId,
version: event.version,
});
}
eventsCursor.close();
return snapshot;
}
};