UNPKG

@liqd-js/mongodb-model

Version:
327 lines (326 loc) 18.3 kB
"use strict"; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.AbstractModel = exports.Aggregator = void 0; const mongodb_1 = require("mongodb"); const helpers_1 = require("./helpers"); const helpers_2 = require("./helpers"); const cache_1 = __importDefault(require("@liqd-js/cache")); const fast_object_hash_1 = __importDefault(require("@liqd-js/fast-object-hash")); const mongodb_query_optimizer_1 = __importDefault(require("@liqd-js/mongodb-query-optimizer")); exports.Aggregator = require('@liqd-js/aggregator'); /** * Abstract model class * @template DBE - Database entity * @template DTO - Data transfer object * @template Extensions - Model parameters * @class */ class AbstractModel { collection; abstractFindAggregator; converters; smartFilters; computedProperties; #models; cache; constructor(models, collection, params) { this.collection = collection; this.converters = params.converters ?? { dbe: { converter: (dbe) => dbe } }; this.smartFilters = params.smartFilters; this.computedProperties = params.computedProperties; params.cache && (this.cache = new cache_1.default(params.cache)); this.#models = models; models[helpers_1.REGISTER_MODEL](this, collection.collectionName); this.abstractFindAggregator = new exports.Aggregator(async (ids, conversion, accessControl) => { try { //const filter = accessControl ? { $and: [ { _id: { $in: ids.map( id => this.dbeID( id ))}}, accessControl ]} : { _id: { $in: ids.map( id => this.dbeID( id ))}}; //const documents = await this.collection.find( filter, { projection: this.converters[conversion].projection, collation: { locale: 'en' } }).toArray(); // const documents = accessControl // ? await this.collection.aggregate([{ $match: { $and: [ { _id: { $in: ids.map( id => this.dbeID( id ))}}, accessControl ]}}]).toArray() // : await this.collection.find( { _id: { $in: ids.map( id => this.dbeID( id ))}}, { projection: this.converters[conversion].projection, collation: { locale: 'en' } }).toArray(); const cacheKeys = ids.map(id => [id, this.cacheKey(id, 'dbe', accessControl)]); let documents = []; const missingIDs = cacheKeys.filter(([_, key]) => !this.cache?.get(key)).map(([id, _]) => id); const cachedIDs = ids.filter(id => !missingIDs.includes(id)); if (cachedIDs.length) { (0, helpers_1.LOG_FILE)(`FROM CACHE - Collection: ${this.collection.collectionName}`); (0, helpers_1.LOG_FILE)(`Count: ${ids.length}, IDs: ${ids}`, true); } (0, helpers_1.flowGet)('benchmark') && this.cache && console.log(`${helpers_1.formatter.format(new Date())} ${this.constructor.name}::aggregator - cached(${ids.length - missingIDs.length}), fetched(${missingIDs.length})`); if (missingIDs.length !== ids.length) { // TODO: cache - vracat clone nie referenciu documents.push(...ids .filter(id => !missingIDs.includes(id)) .map(id => this.cache?.get(this.cacheKey(id, 'dbe', accessControl)))); } if (missingIDs.length) { const pipeline = await this.pipeline({ filter: { $and: [ { _id: { $in: missingIDs.map(id => this.dbeID(id)) } }, accessControl || {} ] }, projection: this.converters[conversion].projection }, conversion); (0, helpers_1.flowGet)('log') && (0, helpers_1.DUMP)(pipeline); const start = Date.now(); documents.push(...await this.collection.aggregate(pipeline, { collation: { locale: 'en' } }).toArray()); (0, helpers_1.LOG_FILE)(`Collection: ${this.collection.collectionName}`); (0, helpers_1.LOG_FILE)(`TIME: ${Date.now() - start} ms`); (0, helpers_1.LOG_FILE)(pipeline, true); if (this.cache) { for (const doc of documents) { this.cache.set(this.cacheKey(doc._id, 'dbe', accessControl), doc); } } } const index = documents.reduce((i, dbe) => (i.set(this.dtoID(dbe._id ?? dbe.id), dbe), i), new Map()); return ids.map(id => index.get(this.dtoID(id)) ?? null); } catch (e) { if (e instanceof helpers_2.ModelError) { throw e; } throw new helpers_2.ModelError(this, e?.toString()); } }); } id() { return new mongodb_1.ObjectId().toString(); } dbeID(id) { return id; } dtoID(dbeID) { return dbeID; } async pipeline(options, conversion) { const { computedProperties: converterComputedProperties } = this.converters[conversion ?? 'dto']; const { filter, sort, projection, computedProperties: optionsComputedProperties, smartFilter } = (0, helpers_2.resolveBSONObject)(options); const converterProperties = { '': await this.resolveComputedProperties(converterComputedProperties) }; const optionsProperties = { '': await this.resolveComputedProperties(optionsComputedProperties) }; const computedProperties = (0, helpers_1.mergeComputedProperties)(converterProperties, optionsProperties)['']; const params = { filter, projection, computedProperties, sort, smartFilter: smartFilter && await this.resolveSmartFilter(smartFilter) || undefined, accessFilter: await this.accessFilter() || undefined, }; const queryBuilder = new helpers_2.QueryBuilder(); return await queryBuilder.pipeline(params); } async create(dbe, id, options) { /*if( options?.converter ) { dbe = await options.converter( dbe as T ); }*/ const _id = id ?? await this.id(); try { await this.collection.insertOne({ ...dbe, _id: this.dbeID(_id) } /*, { collation: { locale: 'en' } }*/); } catch (e) { if (options?.duplicateIgnore === true && e.code === 11000) { return this.dtoID(await this.collection.findOne(e.keyValue, { projection: { _id: 1 }, collation: { locale: 'en' } }).then(r => r?._id)); } throw e; } return _id; } async createFrom(data, id, options) { throw new Error('Method not implemented.'); } async update(id, update, options) { let matchedCount = 0, modifiedCount = 0, documentBefore, documentAfter; // TODO: do properly const canUpdate = await this.get(id, 'dbe'); if (!canUpdate) { return { matchedCount: 0, modifiedCount: 0, }; } if (options?.documentAfter) { await this.#models.transaction(async () => { documentBefore = options?.documentBefore ? (await this.collection.findOne({ _id: this.dbeID(id) }, { collation: { locale: 'en' } })) || undefined : undefined; // TODO remove documentAfter = (await this.collection.findOneAndUpdate({ _id: ( this.dbeID ? this.dbeID( id ) : id ) as WithId<DBE>['_id'] }, isUpdateOperator( update ) ? update : { $set: update } as UpdateFilter<DBE>/*, { collation: { locale: 'en' } }*/ )) as DBE || undefined; documentAfter = (await this.collection.findOneAndUpdate({ _id: (this.dbeID ? this.dbeID(id) : id) }, (0, helpers_1.toUpdateOperations)(update))) || undefined; matchedCount = documentAfter ? 1 : 0; modifiedCount = documentAfter ? 1 : 0; }); } else { documentBefore = options?.documentBefore ? (await this.collection.findOne({ _id: this.dbeID(id) }, { collation: { locale: 'en' } })) || undefined : undefined; // TODO remove const res = await this.collection.updateOne({ _id: ( this.dbeID ? this.dbeID( id ) : id ) as WithId<DBE>['_id'] }, isUpdateOperator( update ) ? update : { $set: update } as UpdateFilter<DBE>/*, { collation: { locale: 'en' } }*/ ); const res = await this.collection.updateOne({ _id: (this.dbeID ? this.dbeID(id) : id) }, (0, helpers_1.toUpdateOperations)(update)); matchedCount = res.matchedCount; modifiedCount = res.modifiedCount; } return { matchedCount, modifiedCount, documentBefore, documentAfter }; } async updateMany(id, update) { // TODO: do properly const canUpdate = await this.get(id, 'dbe'); if (!canUpdate || canUpdate.length !== id.length) { return { matchedCount: 0, modifiedCount: 0, }; } //const res = await this.collection.updateMany({ _id: { $in: id.map( id => this.dbeID( id ))}}, isUpdateOperator( update ) ? update : { $set: update } as UpdateFilter<DBE> ); const res = await this.collection.updateMany({ _id: { $in: id.map(id => this.dbeID(id)) } }, (0, helpers_1.toUpdateOperations)(update)); return { matchedCount: res.matchedCount, modifiedCount: res.modifiedCount }; } async updateOne(match, update, options) { throw new Error('Method not implemented.'); return { matchedCount: 1, modifiedCount: 1 }; } async get(id, conversion = 'dto', filtered = false) { const benchmark = (0, helpers_1.flowGet)('benchmark') ? new helpers_1.Benchmark(this.constructor.name + ':get(' + conversion + ')') : undefined; const documents = await this.abstractFindAggregator.call((0, helpers_1.Arr)(id), conversion, await this.accessFilter()); benchmark?.step('QUERY'); let entries = await Promise.all(documents.map(dbe => dbe ? (0, helpers_1.convert)(this, this.converters[conversion].converter, dbe, conversion) : null)); benchmark?.step('CONVERTER'); if (filtered) { entries = entries.filter(Boolean); } return Array.isArray(id) ? entries : entries[0] ?? null; } async find(options, conversion = 'dto', sort) { const { converter } = this.converters[conversion]; const benchmark = (0, helpers_1.flowGet)('benchmark') ? new helpers_1.Benchmark(this.constructor.name + ':find(' + conversion + ')') : undefined; const dbe = await this.aggregate([{ $limit: 1 }], { ...options, sort }).then(r => r[0]); benchmark?.step('QUERY'); const data = dbe ? await (0, helpers_1.convert)(this, converter, dbe, conversion) : null; benchmark?.step('CONVERTER'); return data; } async list(options, conversion = 'dto') { const { converter, computedProperties: compProps, cache } = this.converters[conversion]; let { filter = {}, sort = { _id: 1 }, cursor, limit, smartFilter: sFilter, countLimit, ...rest } = (0, helpers_2.resolveBSONObject)(options); const prev = cursor?.startsWith('prev:'); if (!sort._id) { sort = { ...sort, _id: 1 }; } const benchmark = (0, helpers_1.flowGet)('benchmark') ? new helpers_1.Benchmark(this.constructor.name + ':list(' + conversion + ')') : undefined; const accessFilter = await this.accessFilter() || undefined; const smartFilter = options.smartFilter && await this.resolveSmartFilter(options.smartFilter); const computedProperties = compProps && await this.resolveComputedProperties(Array.isArray(compProps) ? compProps : compProps()) || undefined; const params = { filter, sort, smartFilter, cursor, limit, countLimit, ...rest, accessFilter, pipeline: options.pipeline, computedProperties }; const queryBuilder = new helpers_2.QueryBuilder(); let [pipeline, countPipeline] = await Promise.all([ queryBuilder.pipeline(params), options.count ? queryBuilder.count(params) : undefined ]); (0, helpers_1.flowGet)('log') && (0, helpers_1.DUMP)(pipeline); (0, helpers_1.flowGet)('log') && countPipeline && (0, helpers_1.DUMP)(countPipeline); if ((0, helpers_1.flowGet)('experimentalFlags')?.['query-optimizer']) { let optimizer = new mongodb_query_optimizer_1.default(); pipeline = optimizer.optimizePipeline(pipeline); countPipeline && (countPipeline = optimizer.optimizePipeline(countPipeline)); } const start = Date.now(); let [entries, total] = await Promise.all([ this.collection.aggregate(pipeline, { collation: { locale: 'en' } }).toArray().then(r => { (0, helpers_1.LOG_FILE)(`Collection: ${this.collection.collectionName}`); (0, helpers_1.LOG_FILE)(`TIME: ${Date.now() - start} ms`); (0, helpers_1.LOG_FILE)(pipeline, true); return r; }), options.count ? this.collection.aggregate(countPipeline, { collation: { locale: 'en' } }).toArray().then(r => { (0, helpers_1.LOG_FILE)(`Collection: ${this.collection.collectionName}`); (0, helpers_1.LOG_FILE)(`TIME: ${Date.now() - start} ms`); (0, helpers_1.LOG_FILE)(countPipeline, true); return r[0]?.count ?? 0; }) : undefined ]); benchmark?.step('QUERY'); const result = await Promise.all(entries.map(async (dbe, i) => { const dto = await (0, helpers_1.convert)(this, converter, dbe, conversion); if (this.cache && !options.projection) { this.cache.set(this.cacheKey(dbe._id, 'dbe', await this.accessFilter()), dbe); } dto.$cursor = (0, helpers_2.getCursor)(dbe, sort); // TODO pozor valka klonu return dto; })); benchmark?.step('CONVERTER'); if (options.count) { Object.defineProperty(result, 'total', { value: total ?? 0, writable: false }); } return prev ? result.reverse() : result; } async aggregate(pipeline, options) { let aggregationPipeline = (0, helpers_1.isSet)(options) ? [...await this.pipeline(options, 'dbe'), ...(0, helpers_2.resolveBSONObject)(pipeline)] : (0, helpers_2.resolveBSONObject)(pipeline); (0, helpers_1.flowGet)('log') && (0, helpers_1.DUMP)(aggregationPipeline); if ((0, helpers_1.flowGet)('experimentalFlags')?.['query-optimizer']) { aggregationPipeline = new mongodb_query_optimizer_1.default().optimizePipeline(aggregationPipeline); } const start = Date.now(); const res = await this.collection.aggregate(aggregationPipeline, { collation: { locale: 'en' } }).toArray(); (0, helpers_1.LOG_FILE)(`Collection: ${this.collection.collectionName}`); (0, helpers_1.LOG_FILE)(`TIME: ${Date.now() - start} ms`); (0, helpers_1.LOG_FILE)(aggregationPipeline, true); return res; } async count(pipeline, options) { let countPipeline = [...pipeline, { $count: 'count' }]; if ((0, helpers_1.flowGet)('experimentalFlags')?.['query-optimizer']) { countPipeline = new mongodb_query_optimizer_1.default().optimizePipeline(countPipeline); } return this.aggregate(countPipeline, options).then(r => r[0]?.count ?? 0); } // TODO pridat podporu ze ked vrati false tak nerobi ani query ale throwne error async accessFilter() { } async resolveSmartFilter(smartFilter) { const pipeline = []; let filter = {}; const extraFilters = {}; for (const [key, value] of Object.entries(smartFilter)) { if ((0, helpers_1.hasPublicMethod)(this.smartFilters, key)) { const result = await this.smartFilters[key](value); result.pipeline && pipeline.push(...result.pipeline); result.filter && (filter = { $and: [{ ...filter }, result.filter].filter(f => Object.keys(f).length > 0) }); } else { extraFilters[key] = value; } } if (Object.keys(extraFilters).length > 0) { throw new Error(`Custom filter contains unsupported filters - ${JSON.stringify(extraFilters, null, 2)}`); } return { filter, pipeline }; } async resolveComputedProperties(properties) { const result = { fields: {}, pipeline: [] }; if (Array.isArray(properties)) { properties = properties.reduce((acc, val) => { acc[val] = null; return acc; }, {}); } for (const property in properties) { if ((0, helpers_1.hasPublicMethod)(this.computedProperties, property)) { const resolvedProperties = await this.computedProperties[property](properties[property]); result.fields = { ...result.fields, ...resolvedProperties.fields }; result.pipeline?.push(...(resolvedProperties.pipeline || [])); } } return { fields: result.fields && Object.keys(result.fields).length ? result.fields : null, pipeline: result.pipeline?.length ? result.pipeline : null }; } async delete(id) { // TODO: do properly const canUpdate = await this.get(id, 'dbe'); if (!canUpdate) { return false; } return (await this.collection.deleteOne({ _id: this.dbeID(id) } /*, { collation: { locale: 'en' } }*/)).deletedCount === 1; } cacheKey(id, conversion, accessControl) { return (0, fast_object_hash_1.default)({ id: this.dtoID(id), accessControl, projection: this.converters[conversion].projection }); } } exports.AbstractModel = AbstractModel;