@liqd-js/mongodb-model
Version:
Mongo model class
472 lines (469 loc) • 26.7 kB
JavaScript
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.AbstractPropertyModel = void 0;
const mongodb_1 = require("mongodb");
const helpers_1 = require("./helpers");
const helpers_2 = require("./helpers");
const model_1 = require("./model");
const cache_1 = __importDefault(require("@liqd-js/cache"));
const fast_object_hash_1 = __importDefault(require("@liqd-js/fast-object-hash"));
const mongodb_query_optimizer_1 = __importDefault(require("@liqd-js/mongodb-query-optimizer"));
/**
* Abstract class for property models
* @template RootDBE - Root Database entity
* @template DBE - Database entity
* @template DTO - Data transfer object
* @template Extensions - Model parameters
*/
class AbstractPropertyModel {
collection;
abstractFindAggregator;
paths;
prefix;
converters;
smartFilters;
computedProperties;
#models;
cache;
/**
*
* @param models \{AbstractModels\} - Models instance
* @param collection
* @param path
* @param params
*/
constructor(models, collection, path, params) {
this.collection = collection;
this.#models = models;
this.paths = [...path.matchAll(/[^\[\]]+(\[\])?/g)].map(m => ({ path: m[0].replace(/^\./, '').replace(/\[\]$/, ''), array: m[0].endsWith('[]') }));
this.prefix = this.paths.map(p => p.path).join('.');
this.converters = params.converters ?? { dbe: { converter: (dbe) => dbe } };
this.smartFilters = params.smartFilters;
this.computedProperties = params.computedProperties;
params.cache && (this.cache = new cache_1.default(params.cache));
this.abstractFindAggregator = new model_1.Aggregator(async (ids, conversion, accessControl) => {
try {
ids = ids.map(id => this.dtoID(id));
const cacheKeys = ids.map(id => [id, this.cacheKey(id, conversion, accessControl)]);
let documents = [];
const missingIDs = cacheKeys.filter(([_, key]) => !this.cache?.get(key)).map(([id, _]) => id);
const cachedIDs = ids.filter(id => !missingIDs.includes(id));
if (cachedIDs.length) {
(0, helpers_1.LOG_FILE)(`FROM CACHE - Collection: ${this.collection.collectionName}.${this.paths.map(el => el.path).join('.')}`);
(0, helpers_1.LOG_FILE)(`Count: ${ids.length}, IDs: ${ids}`, true);
}
(0, helpers_1.flowGet)('benchmark') && this.cache && console.log(`${helpers_1.formatter.format(new Date())} ${this.constructor.name}::aggregator - cached(${ids.length - missingIDs.length}), fetched(${missingIDs.length})`);
if (missingIDs.length !== ids.length) {
// TODO: cache - vracat clone nie referenciu
documents.push(...ids
.filter(id => !missingIDs.includes(id))
.map(id => this.cache?.get(this.cacheKey(id, conversion, accessControl))));
}
if (missingIDs.length) {
let pipeline = await this.pipeline({ filter: { id: { $in: ids.map(id => this.dbeID(id)) } }, projection: this.converters[conversion].projection });
const start = Date.now();
documents.push(...await this.collection.aggregate(pipeline, { collation: { locale: 'en' } }).toArray());
(0, helpers_1.LOG_FILE)(`Collection: ${this.collection.collectionName}.${this.paths.map(el => el.path).join('.')}`);
(0, helpers_1.LOG_FILE)(`TIME: ${Date.now() - start} ms`);
(0, helpers_1.LOG_FILE)(pipeline, true);
if (this.cache) {
for (const doc of documents) {
this.cache.set(this.cacheKey(doc.id, conversion, accessControl), doc);
}
}
}
return (0, helpers_1.map)(ids.map(id => this.dtoID(id)), documents, (dbe) => this.dtoID(dbe._id ?? dbe.id));
}
catch (e) {
if (e instanceof helpers_2.ModelError) {
throw e;
}
throw new helpers_2.ModelError(this, e?.toString());
}
});
models[helpers_1.REGISTER_MODEL](this, collection.collectionName, this.prefix);
}
id() { return new mongodb_1.ObjectId().toString(); }
/**
* Converts DTO['id'] to DBE['id']
* @param id {DTO['id'] | DBE['id']} - DTO or DBE id
* @returns {DBE['id']} - DBE id
*/
dbeID(id) { return id; }
/**
* Converts DBE['id'] to DTO['id']
* @param dbeID {DBE['id']} - DBE id
* @returns {DTO['id']} - DTO id
*/
dtoID(dbeID) { return dbeID; }
//private pipeline( rootFilter: Filter<RootDBE>, filter: Filter<DBE>, projection?: Document ): Document[]
async pipeline(options = {}, conversion) {
const { computedProperties } = this.converters[conversion ?? 'dto'];
let { filter = {}, sort = { id: -1 }, ...rest } = (0, helpers_1.resolveBSONObject)(options);
const queryBuilder = new helpers_2.QueryBuilder();
let pipeline = [], prefix = '$';
const smartFilter = options.smartFilter ? await this.resolveSmartFilter(options.smartFilter) : undefined;
const converterProperties = await this.resolveComputedProperties(computedProperties);
const optionsProperties = await this.resolveComputedProperties(options.computedProperties);
const computed = (0, helpers_1.mergeComputedProperties)(converterProperties, optionsProperties);
const gatheredFields = Object.values(computed || { fields: null, pipeline: null })
.reduce((acc, val) => {
const fields = { ...acc?.fields, ...val?.fields };
const pipeline = [...acc?.pipeline, ...(val?.pipeline || [])];
return { fields, pipeline };
}, { fields: {}, pipeline: [] });
let computedAddedFields = Object.fromEntries(([
...(0, helpers_1.collectAddedFields)([{ $addFields: gatheredFields?.fields || {} }]),
...(0, helpers_1.collectAddedFields)(gatheredFields?.pipeline || []),
]).map(f => {
if (f.startsWith(this.prefix + '.')) {
return [f.replace(new RegExp('^' + this.prefix + '.'), ''), 1];
}
return [f.split('.').reverse()[0], '$_root.' + f];
}));
computedAddedFields = (0, helpers_1.projectionToReplace)(computedAddedFields, this.prefix);
const gatheredFilters = Object.values(smartFilter || {})
.reduce((acc, val) => {
const filter = { ...acc?.filter, ...val?.filter };
const pipeline = [...acc?.pipeline, ...(val?.pipeline || [])];
return { filter, pipeline };
}, { filter: {}, pipeline: [] });
const needRoot = (0, helpers_1.getUsedFields)(gatheredFilters?.pipeline ?? []).used.some(el => el.startsWith('_root.'));
const stages = await this.filterStages(options);
const subPaths = (0, helpers_1.getSubPaths)(this.paths);
for (let i = 0; i <= subPaths.length; i++) {
const last = i === subPaths.length;
if (i !== 0 && (!last || this.paths[this.paths.length - 1].array)) {
pipeline.push({ $unwind: prefix = (prefix === '$' ? prefix : prefix + '.') + subPaths[i - 1] });
}
const tmpPrefix = prefix.replace(/^\$/, '');
needRoot && last && pipeline.push({ $addFields: { _root: '$$ROOT' } });
pipeline.push(...await queryBuilder.pipeline({
filter: stages[i],
smartFilter: smartFilter?.[tmpPrefix] ? smartFilter?.[tmpPrefix] : undefined,
computedProperties: computed?.[tmpPrefix] ? computed[tmpPrefix] : undefined,
}));
needRoot && last && pipeline.push({ $unset: '_root' });
}
let $project = '$' + this.prefix, $rootProject;
const { rootProjection, propertyProjection } = this.splitProjection(rest.projection ?? {});
const unsetFieldsRoot = (0, helpers_1.isExclusionProjection)(rootProjection) && (0, helpers_1.getUsedFields)([{ $match: rootProjection }]).used.map(el => ('_root.' + el)) || [];
const unsetFieldsProperty = (0, helpers_1.isExclusionProjection)(propertyProjection) && (0, helpers_1.getUsedFields)([{ $match: propertyProjection }]).used || [];
if ((0, helpers_1.isSet)(propertyProjection)) {
$project = (0, helpers_1.projectionToReplace)({ id: 1, ...propertyProjection }, this.prefix);
}
if ((0, helpers_1.isSet)(rootProjection)) {
$rootProject = typeof rootProjection === 'object' && unsetFieldsRoot.length === 0 ? (0, helpers_1.projectionToReplace)(rootProjection) : '$$ROOT';
}
if ($rootProject) {
pipeline.push({ $replaceWith: { $mergeObjects: [$project, { '_root': $rootProject, ...computedAddedFields }] } });
}
else {
pipeline.push({ $replaceWith: { $mergeObjects: [$project, computedAddedFields] } });
}
const unsetFields = [...unsetFieldsProperty, ...unsetFieldsRoot];
if (unsetFields.length) {
pipeline.push({ $unset: unsetFields });
}
const prev = options.cursor?.startsWith('prev:');
pipeline.push(...await queryBuilder.pipeline({
sort: prev ? (0, helpers_1.reverseSort)(sort) : sort,
skip: rest.skip,
limit: rest.limit,
pipeline: rest.pipeline,
//projection: options.projection,
}));
return pipeline;
}
async create(parentID, dbe, id) {
const _id = id ?? await this.id();
const parentModel = this.#models[helpers_1.GET_PARENT](this.collection.collectionName, this.prefix);
const parent = parentModel ? await parentModel.get(parentID) : null;
if (!parent || !parentModel) {
throw new helpers_2.ModelError(this, `Parent document not found: ${parentID}`);
}
const { parentIDPath, updatePath, arrayFilters } = (0, helpers_1.propertyModelUpdateParams)(this.paths, parentModel.dbeID(parentID));
const operation = this.paths[this.paths.length - 1].array ? '$push' : '$set';
await this.collection.updateOne({ [parentIDPath]: parentModel?.dbeID(parentID) }, { [operation]: { [updatePath]: { id: this.dbeID(_id), ...dbe } } }, { arrayFilters });
return _id;
}
async update(id, update, options) {
let path = this.paths.map(p => p.path).join('.') + '.id';
let operations = {};
let updateOptions = {};
// TODO: do properly
const canUpdate = await this.get(id, 'dbe');
if (!canUpdate) {
return {
matchedRootCount: 0,
modifiedRootCount: 0,
};
}
if (this.paths.length === 1 && !this.paths[0].array) {
operations = (0, helpers_1.addPrefixToUpdate)(update, this.paths[0].path);
}
// TODO - over či vazne else if
else if (this.paths[this.paths.length - 1].array) {
operations = (0, helpers_1.addPrefixToUpdate)(update, this.paths.map(p => p.path).join('.$[].') + '.$[entry]');
updateOptions = { ...updateOptions, arrayFilters: [{ 'entry.id': this.dbeID(id) }] };
}
else {
operations = (0, helpers_1.addPrefixToUpdate)(update, this.paths.slice(0, this.paths.length - 1).map(p => p.path).join('.$[].') + '.$[entry].' + this.paths[this.paths.length - 1].path);
updateOptions = { ...updateOptions, arrayFilters: [{ ['entry.' + this.paths[this.paths.length - 1].path + '.id']: this.dbeID(id) }] };
}
(0, helpers_1.flowGet)('log') && (0, helpers_1.LOG)({ match: { [path]: this.dbeID(id) }, operations, options: updateOptions });
const documentBefore = options?.documentBefore ? await this.get(id, 'dbe') || undefined : undefined;
//TODO remve let res = await this.collection.updateOne({[ path ]: this.dbeID( id )} as Filter<RootDBE>, isUpdateOperator( operations ) ? operations : { $set: operations } as UpdateFilter<RootDBE>, updateOptions );
let res = await this.collection.updateOne({ [path]: this.dbeID(id) }, (0, helpers_1.toUpdateOperations)(operations), updateOptions);
const documentAfter = options?.documentAfter ? await this.get(id, 'dbe') || undefined : undefined;
(0, helpers_1.flowGet)('log') && (0, helpers_1.LOG)({ res });
return { matchedRootCount: res.matchedCount, modifiedRootCount: res.modifiedCount, documentBefore, documentAfter };
}
async updateMany(ids, update) {
let path = this.paths.map(p => p.path).join('.') + '.id';
let operations = {};
let options = {};
// TODO: do properly
const canUpdate = await this.get(ids, 'dbe');
if (!canUpdate || canUpdate.length !== ids.length) {
return {
matchedRootCount: 0,
modifiedRootCount: 0,
};
}
if (this.paths.length === 1 && !this.paths[0].array) {
operations = (0, helpers_1.addPrefixToUpdate)(update, this.paths[0].path);
}
else if (this.paths[this.paths.length - 1].array) {
operations = (0, helpers_1.addPrefixToUpdate)(update, this.paths.map(p => p.path).join('.$[].') + '.$[entry]');
options = { ...options, arrayFilters: [{ 'entry.id': { $in: ids.map(id => this.dbeID(id)) } }] };
}
else {
operations = (0, helpers_1.addPrefixToUpdate)(update, this.paths.slice(0, this.paths.length - 1).map(p => p.path).join('.$[].') + '.$[entry].' + this.paths[this.paths.length - 1].path);
options = { ...options, arrayFilters: [{ ['entry.' + this.paths[this.paths.length - 1].path + '.id']: { $in: ids.map(id => this.dbeID(id)) } }] };
}
(0, helpers_1.flowGet)('log') && (0, helpers_1.LOG)({ match: { [path]: { $in: ids.map(id => this.dbeID(id)) } }, operations, options });
//TODO remove let res = await this.collection.updateMany({[ path ]: { $in: ids.map( id => this.dbeID( id )) }} as Filter<RootDBE>, isUpdateOperator( operations ) ? operations : { $set: operations } as UpdateFilter<RootDBE>, options );
let res = await this.collection.updateMany({ [path]: { $in: ids.map(id => this.dbeID(id)) } }, (0, helpers_1.toUpdateOperations)(operations), options);
(0, helpers_1.flowGet)('log') && (0, helpers_1.LOG)({ res });
return { matchedRootCount: res.matchedCount, modifiedRootCount: res.modifiedCount };
}
async get(id, conversion = 'dto', filtered = false) {
const benchmark = (0, helpers_1.flowGet)('benchmark') ? new helpers_2.Benchmark(this.constructor.name + ':get(' + conversion + ')') : undefined;
const documents = await this.abstractFindAggregator.call((0, helpers_1.Arr)(id), conversion, await this.accessFilter());
benchmark?.step('QUERY');
let entries = await Promise.all(documents.map(dbe => dbe ? (0, helpers_1.convert)(this, this.converters[conversion].converter, dbe, conversion) : null));
benchmark?.step('CONVERTER');
if (filtered) {
entries = entries.filter(Boolean);
}
return Array.isArray(id) ? entries : entries[0] ?? null;
}
async find(options, conversion = 'dto', sort) {
const { converter, projection } = this.converters[conversion];
const benchmark = (0, helpers_1.flowGet)('benchmark') ? new helpers_2.Benchmark(this.constructor.name + ':get(' + conversion + ')') : undefined;
let pipeline = await this.pipeline({ filter: options.filter, smartFilter: options.smartFilter, projection, sort, limit: 1 });
if ((0, helpers_1.flowGet)('experimentalFlags')?.['query-optimizer']) {
pipeline = new mongodb_query_optimizer_1.default().optimizePipeline(pipeline);
}
(0, helpers_1.flowGet)('log') && (console.log(this.constructor.name + '::find', options.filter), (0, helpers_1.DUMP)(pipeline));
const start = Date.now();
const dbe = (await this.collection.aggregate(pipeline).toArray())[0];
(0, helpers_1.LOG_FILE)(`Collection: ${this.collection.collectionName}.${this.paths.map(el => el.path).join('.')}`);
(0, helpers_1.LOG_FILE)(`TIME: ${Date.now() - start} ms`);
(0, helpers_1.LOG_FILE)(pipeline, true);
benchmark?.step('QUERY');
const data = dbe ? await (0, helpers_1.convert)(this, converter, dbe, conversion) : null;
benchmark?.step('CONVERTER');
return data;
}
async list(options, conversion = 'dto') {
const { converter, projection } = this.converters[conversion];
const prev = options.cursor?.startsWith('prev:');
const queryBuilder = new helpers_2.QueryBuilder();
const benchmark = (0, helpers_1.flowGet)('benchmark') ? new helpers_2.Benchmark(this.constructor.name + ':list(' + conversion + ')') : undefined;
const resolvedList = (0, helpers_1.resolveBSONObject)(options);
let { cursor, sort = { id: 1 }, limit, skip, countLimit, ...countOptions } = resolvedList;
if (!sort.id) {
sort = { ...sort, id: 1 };
}
let pipeline = await this.pipeline({ ...resolvedList, sort, projection }, conversion);
let countPipeline = queryBuilder.buildCountPipeline(await this.pipeline({
...countOptions,
projection
}), countLimit);
if ((0, helpers_1.flowGet)('experimentalFlags')?.['query-optimizer']) {
const optimizer = new mongodb_query_optimizer_1.default();
pipeline = optimizer.optimizePipeline(pipeline);
countPipeline = optimizer.optimizePipeline(countPipeline);
}
const start = Date.now();
const [entries, total] = await Promise.all([
this.collection.aggregate(pipeline, { collation: { locale: 'en' } }).toArray().then(r => {
(0, helpers_1.LOG_FILE)(`Collection: ${this.collection.collectionName}.${this.paths.map(el => el.path).join('.')}`);
(0, helpers_1.LOG_FILE)(`TIME: ${Date.now() - start} ms`);
(0, helpers_1.LOG_FILE)(pipeline, true);
return r;
}),
resolvedList.count
? this.collection.aggregate(countPipeline, { collation: { locale: 'en' } }).toArray().then(r => {
(0, helpers_1.LOG_FILE)(`Collection: ${this.collection.collectionName}.${this.paths.map(el => el.path).join('.')}`);
(0, helpers_1.LOG_FILE)(`TIME: ${Date.now() - start} ms`);
(0, helpers_1.LOG_FILE)(pipeline, true);
return r[0]?.count ?? 0;
})
: 0
]);
benchmark?.step('QUERY');
(0, helpers_1.flowGet)('log') && (0, helpers_1.LOG)({
list: pipeline,
total: resolvedList.count ? countPipeline : undefined
});
const result = await Promise.all(entries.map(async (dbe, i) => {
const dto = await (0, helpers_1.convert)(this, converter, dbe, conversion);
if (this.cache && !options.projection) {
this.cache.set(this.cacheKey(dbe._id ?? dbe.id, 'dbe', await this.accessFilter()), dbe);
}
dto.$cursor = (0, helpers_1.getCursor)(dbe, sort); // TODO pozor valka klonu
return dto;
}));
benchmark?.step('CONVERTER');
if (resolvedList.count) {
Object.defineProperty(result, 'total', { value: total ?? 0, writable: false });
}
return prev ? result.reverse() : result;
}
async aggregate(pipeline, options) {
let aggregationPipeline = [...await this.pipeline(options, 'dbe'), ...(0, helpers_1.resolveBSONObject)(pipeline)];
if ((0, helpers_1.flowGet)('experimentalFlags')?.['query-optimizer']) {
aggregationPipeline = new mongodb_query_optimizer_1.default().optimizePipeline(aggregationPipeline);
}
(0, helpers_1.flowGet)('log') && (0, helpers_1.DUMP)(aggregationPipeline);
const start = Date.now();
const res = await this.collection.aggregate(aggregationPipeline, { collation: { locale: 'en' } }).toArray();
(0, helpers_1.LOG_FILE)(`Collection: ${this.collection.collectionName}.${this.paths.map(el => el.path).join('.')}`);
(0, helpers_1.LOG_FILE)(`TIME: ${Date.now() - start} ms`);
(0, helpers_1.LOG_FILE)(aggregationPipeline, true);
return res;
/* WHY THE HELL WAS IT LIKE THAT
flowGet( 'log' ) && DUMP( isSet( options ) ? [ ...await this.pipeline( options! ), ...pipeline ] : pipeline );
return this.collection.aggregate( isSet( options ) ? [ ...await this.pipeline( options! ), ...pipeline ] : pipeline ).toArray() as Promise<T[]>;*/
}
async count(pipeline, options) {
let countPipeline = [...pipeline, { $count: 'count' }];
if ((0, helpers_1.flowGet)('experimentalFlags')?.['query-optimizer']) {
countPipeline = new mongodb_query_optimizer_1.default().optimizePipeline(countPipeline);
}
return await this.aggregate(countPipeline, options).then(r => r[0]?.count ?? 0);
}
// TODO pridat podporu ze ked vrati false tak nerobi ani query ale throwne error
async accessFilter() { }
async resolveSmartFilter(smartFilter) {
const result = {};
const pipeline = [];
let filter = {};
const extraFilters = {};
for (const [key, value] of Object.entries(smartFilter)) {
if ((0, helpers_1.hasPublicMethod)(this.smartFilters, key)) {
const result = await this.smartFilters[key](value);
result.pipeline && pipeline.push(...(0, helpers_1.addPrefixToPipeline)(result.pipeline, this.prefix));
result.filter && (filter = { $and: [{ ...filter }, (0, helpers_1.addPrefixToFilter)(result.filter, this.prefix)].filter(f => Object.keys(f).length > 0) });
}
else {
extraFilters[key] = value;
}
}
if (pipeline.length > 0 || Object.keys(filter).length > 0) {
result[this.prefix] = { filter, pipeline };
}
const parentModel = this.#models[helpers_1.GET_PARENT](this.collection.collectionName, this.prefix);
if (parentModel) {
const resolved = await parentModel.resolveSmartFilter(extraFilters);
if ('filter' in resolved && 'pipeline' in resolved) {
result[''] = resolved;
}
else if (Object.keys(resolved).length > 0) {
for (const prefix in resolved) {
result[prefix] = resolved[prefix];
}
}
}
else if (Object.keys(extraFilters).length > 0) {
throw new Error(`Custom filter contains unsupported filters - ${JSON.stringify(extraFilters, null, 2)}`);
}
return result;
}
async resolveComputedProperties(properties) {
const result = {};
let pipeline = [];
let fields = {};
const extraProperties = {};
if (Array.isArray(properties)) {
properties = properties.reduce((acc, val) => { acc[val] = null; return acc; }, {});
}
for (const property in properties) {
if ((0, helpers_1.hasPublicMethod)(this.computedProperties, property)) {
const resolvedProperties = await (this.computedProperties[property])(properties[property]);
for (const field in resolvedProperties.fields) {
fields[this.prefix + '.' + field] = (0, helpers_1.addPrefixToFilter)(resolvedProperties.fields[field], this.prefix);
}
if (resolvedProperties.pipeline) {
pipeline.push(...(0, helpers_1.addPrefixToPipeline)(resolvedProperties.pipeline, this.prefix));
}
}
else {
extraProperties[property] = properties[property];
}
}
if (pipeline.length > 0 || Object.keys(fields).length > 0) {
result[this.prefix] = { fields, pipeline };
}
const parentModel = this.#models[helpers_1.GET_PARENT](this.collection.collectionName, this.prefix);
if (parentModel) {
let resolvedProperties = await parentModel.resolveComputedProperties(extraProperties);
if ('fields' in resolvedProperties && 'pipeline' in resolvedProperties) {
result[''] = resolvedProperties;
}
else if (Object.keys(resolvedProperties).length > 0) {
for (const prefix in resolvedProperties) {
result[prefix] = resolvedProperties[prefix];
}
}
}
else if (extraProperties.length > 0) {
throw new Error(`Custom computed properties contain unsupported properties - ${extraProperties.join(', ')}`);
}
return result;
}
async filterStages(list) {
const sort = list.sort ?? { id: -1 };
const accessFilter = await this.accessFilter();
const cursorFilter = list.cursor ? (0, helpers_1.generateCursorCondition)(list.cursor, sort) : undefined;
const stageFilter = (0, helpers_1.optimizeMatch)({ $and: [(0, helpers_1.addPrefixToFilter)({ $and: [list.filter, accessFilter, cursorFilter] }, this.prefix)] });
return (0, helpers_1.splitFilterToStages)(stageFilter, this.paths);
}
splitProjection(projection) {
if (!projection) {
return {};
}
let { _root: rootProjection, ...propertyProjection } = projection;
// add to _root all properties from propertyProjection starting with _root
for (let key in propertyProjection) {
if (key.startsWith('_root.')) {
if (!rootProjection) {
rootProjection = {};
}
rootProjection[key.replace('_root.', '')] = propertyProjection[key];
delete propertyProjection[key];
}
}
return { rootProjection, propertyProjection };
}
cacheKey(id, conversion, accessControl) {
return (0, fast_object_hash_1.default)({ id: this.dtoID(id), accessControl, projection: this.converters[conversion].projection });
}
}
exports.AbstractPropertyModel = AbstractPropertyModel;