@getanthill/datastore
Version:
Event-Sourced Datastore
615 lines • 29.2 kB
JavaScript
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || (function () {
var ownKeys = function(o) {
ownKeys = Object.getOwnPropertyNames || function (o) {
var ar = [];
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
return ar;
};
return ownKeys(o);
};
return function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
__setModuleDefault(result, mod);
return result;
};
})();
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const lodash_1 = __importDefault(require("lodash"));
const mongodb_1 = require("mongodb");
const event_source_1 = require("@getanthill/event-source");
const mongodb_connector_1 = require("@getanthill/mongodb-connector");
const jsonpatch = __importStar(require("fast-json-patch"));
const crypto_1 = __importDefault(require("crypto"));
const c = __importStar(require("../constants"));
const config_1 = __importDefault(require("../config"));
const utils = __importStar(require("../utils"));
exports.default = (definition, reducer, options) => { var _a; return _a = class Model extends (0, event_source_1.EventSourcedFactory)(definition.COLLECTION, reducer, options) {
static getSchema() {
return options.SCHEMA;
}
static getModelConfig() {
return options.MODEL_CONFIG;
}
static getOriginalSchema() {
return options.ORIGINAL_SCHEMA;
}
static getCorrelationField() {
return _a.options.CORRELATION_FIELD;
}
static mustWaitStatePersistence() {
var _b;
return ((_b = options.MODEL_CONFIG.must_wait_state_persistence) !== null && _b !== void 0 ? _b : options.services.config.features.mustWaitStatePersistence);
}
static getCollectionName() {
return definition.COLLECTION;
}
static db(mongodb, isRead = false) {
return mongodb.db(`${definition.DATABASE}_${isRead === true ? 'read' : 'write'}`);
}
static explain(cursor, query) {
if (options.services.config.features.mongodb.explain !== true) {
return;
}
return cursor
.explain()
.then((plan) => {
const processedPlan = utils.processExplanationPlan(plan);
options.services.telemetry.logger.debug('[Generic#find] Query explain', processedPlan);
options.services.telemetry.logger.info('[Generic#find] Query explain', {
model_name: _a.getModelConfig().name,
query,
stage: processedPlan.stage,
executation_time_ms: processedPlan.executation_time_ms,
index_name: processedPlan.index_name,
keys_examined: processedPlan.keys_examined,
docs_examined: processedPlan.docs_examined,
});
if (processedPlan.executation_time_ms >
options.services.config.features.mongodb
.slowQueryThresholdInMilliseconds) {
options.services.telemetry.logger.warn('[Generic#find] Slow query detected', {
model_name: _a.getModelConfig().name,
query,
stage: processedPlan.stage,
executation_time_ms: processedPlan.executation_time_ms,
index_name: processedPlan.index_name,
keys_examined: processedPlan.keys_examined,
docs_examined: processedPlan.docs_examined,
});
}
})
.catch((err) => {
options.services.telemetry.logger.error('[Generic#find] Query explain error', { err, model_name: _a.getModelConfig().name, query });
});
}
static count(mongodb, query) {
return _a.getStatesCollection(_a.db(mongodb, true)).count({
[_a.getIsArchivedProperty()]: {
$in: [null, false],
},
...query,
});
}
static find(mongodb, query, opts = {}) {
options.services.telemetry.logger.debug('[Generic#find] MaxTimeMS', {
maxTimeMS: options.services.config.features.mongodb.maxTimeMS,
});
const cursor = _a.getStatesCollection(_a.db(mongodb, (opts === null || opts === void 0 ? void 0 : opts.forcePrimary) ? false : true))
.find({
[_a.getIsArchivedProperty()]: {
$in: [null, false],
},
...query,
}, opts)
.maxTimeMS(options.services.config.features.mongodb.maxTimeMS);
_a.explain(cursor, query);
return cursor;
}
constructor(services, correlationId = new mongodb_connector_1.ObjectId().toString(), opts) {
var _b, _c, _d;
const db = _a.db(services.mongodb, (opts === null || opts === void 0 ? void 0 : opts.forcePrimary) ? false : true);
super({ db, logger: services.telemetry.logger }, correlationId);
this.mongodb = services.mongodb;
this.state = null;
this.config = {
retryDuration: (_c = (_b = opts === null || opts === void 0 ? void 0 : opts.retryDuration) !== null && _b !== void 0 ? _b : _a.getModelConfig().retry_duration) !== null && _c !== void 0 ? _c : config_1.default.features.retryDuration,
forcePrimary: (_d = opts === null || opts === void 0 ? void 0 : opts.forcePrimary) !== null && _d !== void 0 ? _d : false,
};
}
/**
* @warn Rollback a single event for now
* @param events any[]
* @param updatedState {}
*/
async rollback(events, updatedState) {
if (events.length === 0) {
return this;
}
if (updatedState === null) {
// Nothing to rollback
return this;
}
if (events[0].type === c.EVENT_TYPE_ROLLBACKED ||
events[0].type === c.EVENT_TYPE_RESTORED) {
throw new Error('Can not rollback a restoration event');
}
const previousEvent = await _a.getEventsCollection(_a.db(this.mongodb)).findOne({
[_a.getCorrelationField()]: this.correlationId,
version: {
$lt: updatedState.version,
},
}, {
projection: {
version: 1,
},
sort: {
version: -1,
},
});
if (previousEvent === null) {
/**
* @warn Super removal because this is a creation
*/
await _a.getEventsCollection(_a.db(this.mongodb)).deleteOne({
[_a.getCorrelationField()]: updatedState[_a.getCorrelationField()],
});
this.state = null;
return this;
}
const previousState = await this.getStateAtVersion(previousEvent.version);
/**
* @alpha The JSON PATCH feature is extremely useful for patching
* JSON documents with clear operations like "remove". This is for now
* not exposed on the API.
*/
return this.apply(c.EVENT_TYPE_ROLLBACKED, {
type: c.EVENT_TYPE_ROLLBACKED,
json_patch: jsonpatch.compare(lodash_1.default.omit(JSON.parse(JSON.stringify(updatedState)), [
'version',
'created_at',
'updated_at',
]), lodash_1.default.omit(JSON.parse(JSON.stringify(previousState)), [
'version',
'created_at',
'updated_at',
])),
});
}
async handleWithRetry(handler, retryDuration, storeStateErrorHandler, handleOptions) {
const _handleOptions = {
isReadonly: _a.getIsReadonlyProperty(),
mustWaitStatePersistence: _a.mustWaitStatePersistence(),
...handleOptions,
};
if (retryDuration === 0) {
return this.handle(handler, storeStateErrorHandler, _handleOptions);
}
const tic = utils.getDateNow();
let lastError;
let lastUpdatedState = null;
while (utils.getDateNow() - tic < retryDuration) {
try {
return await this.handle(handler, (err, updatedState) => {
lastUpdatedState = updatedState;
throw err;
}, _handleOptions);
}
catch (err) {
lastError = err;
/**
* Here, the insert in the state failed because we tried to
* upsert a new state violating the unicity constraint on the
* correlation_field. This is due to the fact that another
* process succeed in updating the state in parallel increasing
* the state version number;
*/
if (err.message.includes('correlation_id_unicity')) {
return this.getState();
}
const mustRetry = _a.RETRY_ERRORS.reduce((c, v) => {
return c || err.message.includes(v);
}, false);
if (mustRetry === true) {
continue;
}
break;
}
}
return storeStateErrorHandler(lastError, lastUpdatedState);
}
/* @ts-ignore */
async apply(eventType, data, handleOptions, v) {
var _b, _c;
v = v !== null && v !== void 0 ? v : '0_0_0';
let appliedData = lodash_1.default.omit(data, 'version', 'type', 'v');
if ((handleOptions === null || handleOptions === void 0 ? void 0 : handleOptions.mustEncrypt) !== false) {
appliedData = _a.encrypt(appliedData);
}
const events = [
{
type: eventType,
v,
...appliedData,
},
];
let updatedState = null;
try {
const _handleOptions = {
isReadonly: _a.getIsReadonlyProperty(),
...handleOptions,
};
if (_handleOptions.isReadonly in appliedData) {
_handleOptions.isReadonly = '';
}
this.state = await this.handleWithRetry(() => events, (_b = _handleOptions.retryDuration) !== null && _b !== void 0 ? _b : (_c = this.config) === null || _c === void 0 ? void 0 : _c.retryDuration, (err, _updatedState) => {
updatedState = _updatedState;
throw err;
}, _handleOptions);
}
catch (err) {
if (err instanceof mongodb_1.MongoError) {
await this.rollback(events, updatedState);
}
throw err;
}
return this;
}
async getState() {
const { currentState: state } = await _a.getState(this.db, this.correlationId);
this.state = state;
return this.state;
}
getStateAtVersion(version, mustThrow = true) {
return _a.getStateAtVersion(_a.db(this.mongodb, true), this.correlationId, version, mustThrow);
}
getEvents(version = -1) {
return _a.getEvents(_a.db(this.mongodb, true), this.correlationId, version);
}
create(data, handleOptions) {
return this.apply(c.EVENT_TYPE_CREATED, data, handleOptions);
}
update(data, handleOptions) {
return this.apply(c.EVENT_TYPE_UPDATED, data, handleOptions);
}
patch(data, handleOptions) {
return this.apply(c.EVENT_TYPE_PATCHED, data, handleOptions);
}
async restore(version, handleOptions) {
const state = await this.getStateAtVersion(version);
return this.apply(c.EVENT_TYPE_RESTORED, {
type: c.EVENT_TYPE_RESTORED,
json_patch: jsonpatch.compare(lodash_1.default.omit(this.state, ['version', 'created_at', 'updated_at']), lodash_1.default.omit(state, ['version', 'created_at', 'updated_at'])),
}, handleOptions);
}
async upsert(data, handleOptions) {
try {
return await this.update(data, handleOptions);
}
catch (err1) {
if (err1.message === 'Entity must be created first') {
try {
if ((handleOptions === null || handleOptions === void 0 ? void 0 : handleOptions.imperativeVersion) &&
handleOptions.imperativeVersion > 0) {
throw err1;
}
return await this.create(data);
}
catch (err2) {
if (err2.message === 'Entity already created') {
return this.update(data, handleOptions);
}
throw err2;
}
}
throw err1;
}
}
async getNextVersion() {
let nextVersion = this.state.version + 1;
if (_a.options.WITH_GLOBAL_VERSION === true) {
const lastEvent = await _a.getLastEvent(_a.db(this.mongodb, false));
/* istanbul ignore next */
nextVersion = lastEvent === null ? nextVersion : lastEvent.version + 1;
}
return nextVersion;
}
async archive() {
var _b;
await this.getState();
const isArchivedField = _a.getIsArchivedProperty();
if (this.state[isArchivedField] === true) {
return this;
}
const isReadonlyField = _a.getIsReadonlyProperty();
const isDeletedField = _a.getIsDeletedProperty();
const archivedData = await _a.encrypt(this.state, [], options.services.config.security.encryptionKeys.archive, (_b = options.services.config.security.encryptionKeys.archive) === null || _b === void 0 ? void 0 : _b.map((k) => _a.hashValue(k)), 'aes-256-gcm');
const nextVersion = await this.getNextVersion();
return this.apply(c.EVENT_TYPE_ARCHIVED, {
type: c.EVENT_TYPE_ARCHIVED,
...lodash_1.default.omit(archivedData, ['version', 'created_at', 'updated_at']),
[isReadonlyField]: true,
[isArchivedField]: true,
[isDeletedField]: false,
}, {
imperativeVersion: nextVersion,
mustEncrypt: false,
isReadonly: isArchivedField,
});
}
async unarchive() {
var _b, _c;
await this.getState();
const isArchivedField = _a.getIsArchivedProperty();
const isDeletedField = _a.getIsDeletedProperty();
if (this.state[isArchivedField] !== true) {
return this;
}
if (this.state[isDeletedField] === true) {
return this;
}
const isReadonlyField = _a.getIsReadonlyProperty();
const [unarchivedData, lastEvent] = await Promise.all([
_a.decrypt(this.state, [], options.services.config.security.encryptionKeys.archive, (_b = options.services.config.security.encryptionKeys.archive) === null || _b === void 0 ? void 0 : _b.map((k) => _a.hashValue(k))),
this.getEvents(this.state.version - 2).next(),
]);
const nextVersion = await this.getNextVersion();
return this.apply(c.EVENT_TYPE_RESTORED, {
type: c.EVENT_TYPE_RESTORED,
...lodash_1.default.omit(unarchivedData, ['version', 'created_at', 'updated_at']),
// Last event must be the `is_readonly=true`
[isReadonlyField]: (_c = lastEvent[isReadonlyField]) !== null && _c !== void 0 ? _c : false,
[isArchivedField]: false,
[isDeletedField]: false,
}, {
imperativeVersion: nextVersion,
mustEncrypt: false,
isReadonly: '',
});
}
async delete() {
await this.getState();
const isArchivedField = _a.getIsArchivedProperty();
const isDeletedField = _a.getIsDeletedProperty();
if (this.state[isArchivedField] !== true) {
throw new Error('Entity must be archived first');
}
const deleteAfterArchiveDurationInSeconds = options.services.config.features.deleteAfterArchiveDurationInSeconds;
if (utils.getDateNow() - utils.getDate(this.state.updated_at).getTime() <
deleteAfterArchiveDurationInSeconds * 1000) {
throw new Error('Entity archived too recently');
}
const nextVersion = await this.getNextVersion();
await this.apply(c.EVENT_TYPE_DELETED, {
type: c.EVENT_TYPE_DELETED,
[isDeletedField]: true,
}, {
imperativeVersion: nextVersion,
mustEncrypt: false,
isReadonly: isDeletedField,
});
const encryptedFields = _a.getEncryptedFields();
const unset = {};
/* @ts-ignore */
encryptedFields.forEach((f) => (unset[f] = ''));
await _a.getStatesCollection(_a.db(this.mongodb, false)).updateOne({
[_a.getCorrelationField()]: this.correlationId,
}, {
$unset: unset,
});
await _a.getEventsCollection(_a.db(this.mongodb, false)).updateMany({
[_a.getCorrelationField()]: this.correlationId,
}, {
$unset: unset,
});
await this.getState();
}
static getIsReadonlyProperty(modelConfig = _a.getModelConfig()) {
var _b, _c, _d, _e, _f;
return ((_b = modelConfig.is_readonly) !== null && _b !== void 0 ? _b : (_f = (_e = (_d = (_c = options === null || options === void 0 ? void 0 : options.services) === null || _c === void 0 ? void 0 : _c.config) === null || _d === void 0 ? void 0 : _d.features) === null || _e === void 0 ? void 0 : _e.properties) === null || _f === void 0 ? void 0 : _f.is_readonly);
}
static getIsArchivedProperty(modelConfig = _a.getModelConfig()) {
var _b, _c, _d, _e, _f;
return ((_b = modelConfig.is_archived) !== null && _b !== void 0 ? _b : (_f = (_e = (_d = (_c = options === null || options === void 0 ? void 0 : options.services) === null || _c === void 0 ? void 0 : _c.config) === null || _d === void 0 ? void 0 : _d.features) === null || _e === void 0 ? void 0 : _e.properties) === null || _f === void 0 ? void 0 : _f.is_archived);
}
static getIsDeletedProperty(modelConfig = _a.getModelConfig()) {
var _b, _c, _d, _e, _f;
return ((_b = modelConfig.is_deleted) !== null && _b !== void 0 ? _b : (_f = (_e = (_d = (_c = options === null || options === void 0 ? void 0 : options.services) === null || _c === void 0 ? void 0 : _c.config) === null || _d === void 0 ? void 0 : _d.features) === null || _e === void 0 ? void 0 : _e.properties) === null || _f === void 0 ? void 0 : _f.is_deleted);
}
static getEncryptionKeys(modelConfig = _a.getModelConfig()) {
if (Array.isArray(_a.encryptionKeys)) {
return _a.encryptionKeys;
}
_a.encryptionKeys = [
...(options.services.config.security.encryptionKeys[modelConfig.name] ||
[]),
...(options.services.config.security.encryptionKeys.all || []),
].filter(utils.unique);
_a.hashedEncryptionKeys = _a.encryptionKeys.map((k) => _a.hashValue(k));
return _a.encryptionKeys;
}
static getHashesEncryptionKeys() {
if (Array.isArray(_a.hashedEncryptionKeys)) {
return _a.hashedEncryptionKeys;
}
_a.hashedEncryptionKeys = _a.getEncryptionKeys().map((k) => _a.hashValue(k));
return _a.hashedEncryptionKeys;
}
static getEligibleKeys(keys) {
return keys.slice(0, options.services.config.security.activeNumberEncryptionKeys);
}
static getEncryptionKeyIndex(keys) {
return Math.floor(utils.random() *
Math.min(keys.length, options.services.config.security.activeNumberEncryptionKeys));
}
static getEncryptedFields() {
return _a.getModelConfig().encrypted_fields || [];
}
static isEncryptedField(field) {
return _a.getEncryptedFields().includes(field);
}
static hashValue(data) {
const hash = crypto_1.default.createHash('sha512');
hash.update(JSON.stringify(data));
return hash.digest('hex');
}
static encryptValue(data, keys, hashes, algorithm = 'aes-256-gcm') {
if (keys.length === 0) {
return data;
}
const value = JSON.stringify(data);
const iv = crypto_1.default.randomBytes(_a.IV_LENGTH);
const encryptionKeyIndex = _a.getEncryptionKeyIndex(keys);
const encryptionKey = keys[encryptionKeyIndex];
const hashedEncryptionKey = hashes[encryptionKeyIndex];
const cipher = crypto_1.default.createCipheriv(algorithm, encryptionKey, iv);
const enc1 = cipher.update(value, 'utf8');
const enc2 = cipher.final();
// @ts-ignore
const tag = cipher.getAuthTag();
const encrypted = Buffer.concat([enc1, enc2]);
return {
hash: _a.hashValue(data),
encrypted: [
hashedEncryptionKey.slice(0, 6),
iv.toString('hex'),
encrypted.toString('hex'),
tag.toString('hex'),
algorithm,
].join(':'),
};
}
static encrypt(data, additionalEncryptedFields = [], keys = _a.getEncryptionKeys(), hashes = _a.getHashesEncryptionKeys(), algorithm) {
const encryptedFields = [
..._a.getEncryptedFields(),
...additionalEncryptedFields,
];
if (encryptedFields.length === 0) {
return data;
}
const encryptedData = lodash_1.default.cloneDeep(data);
for (const encryptedField of encryptedFields) {
if (lodash_1.default.has(encryptedData, encryptedField)) {
const value = lodash_1.default.get(encryptedData, encryptedField);
lodash_1.default.set(encryptedData, encryptedField, _a.encryptValue(value, keys, hashes, algorithm));
}
}
return encryptedData;
}
/**
* @deprecated
*
* @warn fallback on lower secured encryption
* algorithm `aes-256-cbc`
*/
static decryptValueLegacy(value, key) {
const parts = value.encrypted.split(':');
const iv = Buffer.from(parts[1], 'hex');
const encryptedText = Buffer.from(parts[2], 'hex');
const decipher = crypto_1.default.createDecipheriv('aes-256-cbc', Buffer.from(key), iv);
let decrypted = decipher.update(encryptedText);
decrypted = Buffer.concat([decrypted, decipher.final()]);
return JSON.parse(decrypted.toString());
}
static decryptValue(value, keys, hashedKeys) {
if (keys.length === 0) {
return value;
}
if (!value.encrypted) {
return value;
}
const parts = value.encrypted.split(':');
const key = keys.find((k, i) => k.startsWith(parts[0]) || hashedKeys[i].startsWith(parts[0]));
if (!key) {
return value;
}
const algorithm = parts[4];
if (!algorithm) {
return _a.decryptValueLegacy(value, key);
}
const iv = Buffer.from(parts[1], 'hex');
const encryptedText = Buffer.from(parts[2], 'hex');
const tag = Buffer.from(parts[3], 'hex');
const decipher = crypto_1.default.createDecipheriv('aes-256-gcm', key, iv);
decipher.setAuthTag(tag);
let decrypted = decipher.update(encryptedText, undefined, 'utf8');
decrypted += decipher.final('utf8');
return JSON.parse(decrypted);
}
static decrypt(data, additionalEncryptedFields = [], keys = _a.getEncryptionKeys(), hashedKeys = _a.getHashesEncryptionKeys()) {
const encryptedFields = [
..._a.getEncryptedFields(),
...additionalEncryptedFields,
];
if (encryptedFields.length === 0) {
return data;
}
const decryptedData = lodash_1.default.cloneDeep(data);
for (const encryptedField of encryptedFields) {
if (lodash_1.default.has(decryptedData, encryptedField)) {
const encryptedValue = lodash_1.default.get(decryptedData, encryptedField);
lodash_1.default.set(decryptedData, encryptedField, _a.decryptValue(encryptedValue, keys, hashedKeys));
}
}
return decryptedData;
}
static async snapshot(mongodb, correlationId, options) {
const db = _a.db(mongodb, false);
const correlationField = _a.getCorrelationField();
let snapshot;
if ((options === null || options === void 0 ? void 0 : options.version) === undefined) {
const { stateInDb } = await _a.getState(db, correlationId);
snapshot = stateInDb;
}
else {
snapshot = await _a.getStateAtVersion(db, correlationId, options === null || options === void 0 ? void 0 : options.version);
}
if (snapshot === null) {
throw new Error('Snapshot state is invalid');
}
await event_source_1.Snapshots.create(_a.getSnapshotsCollection(db), correlationField, snapshot);
if ((options === null || options === void 0 ? void 0 : options.removePastEvents) !== true) {
return snapshot;
}
const eventsCursor = await _a.getEvents(db, correlationId, -1);
while (await eventsCursor.hasNext()) {
const event = await eventsCursor.next();
if (event.version > snapshot.version) {
break;
}
await _a.getEventsCollection(_a.db(mongodb)).deleteOne({
[correlationField]: correlationId,
version: event.version,
});
}
eventsCursor.close();
return snapshot;
}
},
_a.IV_LENGTH = 32,
_a.RETRY_ERRORS = ['version_unicity', 'correlation_id_version_unicity'],
_a.encryptionKeys = null,
_a.hashedEncryptionKeys = null,
_a; };
//# sourceMappingURL=Generic.js.map