@naturalcycles/db-lib
Version:
Lowest Common Denominator API to supported Databases
842 lines (841 loc) • 31.8 kB
JavaScript
import { _isTruthy } from '@naturalcycles/js-lib';
import { _uniqBy } from '@naturalcycles/js-lib/array/array.util.js';
import { localTime } from '@naturalcycles/js-lib/datetime/localTime.js';
import { _assert, ErrorMode } from '@naturalcycles/js-lib/error';
import { _deepJsonEquals } from '@naturalcycles/js-lib/object/deepEquals.js';
import { _filterUndefinedValues, _objectAssignExact, } from '@naturalcycles/js-lib/object/object.util.js';
import { pMap } from '@naturalcycles/js-lib/promise/pMap.js';
import { _passthroughPredicate, _stringMapEntries, _stringMapValues, _typeCast, } from '@naturalcycles/js-lib/types';
import { stringId } from '@naturalcycles/nodejs-lib';
import { DBLibError } from '../cnst.js';
import { RunnableDBQuery } from '../query/dbQuery.js';
import { CommonDaoTransaction } from './commonDaoTransaction.js';
/**
* Lowest common denominator API between supported Databases.
*
* DBM = Database model (how it's stored in DB)
* BM = Backend model (optimized for API access)
* TM = Transport model (optimized to be sent over the wire)
*/
export class CommonDao {
cfg;
constructor(cfg) {
this.cfg = cfg;
this.cfg = {
generateId: true,
assignGeneratedIds: false,
useCreatedProperty: true,
useUpdatedProperty: true,
validateOnLoad: true,
validateOnSave: true,
logger: console,
...cfg,
hooks: {
parseNaturalId: () => ({}),
beforeCreate: bm => bm,
onValidationError: err => err,
...cfg.hooks,
},
};
if (this.cfg.generateId) {
this.cfg.hooks.createRandomId ||= () => stringId();
}
else {
delete this.cfg.hooks.createRandomId;
}
}
// CREATE
create(part = {}, opt = {}) {
const bm = this.cfg.hooks.beforeCreate(part);
// First assignIdCreatedUpdated, then validate!
this.assignIdCreatedUpdated(bm, opt);
return this.validateAndConvert(bm, undefined, opt);
}
// GET
async requireById(id, opt = {}) {
const bm = await this.getById(id, opt);
return this.ensureRequired(bm, id, opt);
}
async requireByIdAsDBM(id, opt = {}) {
const dbm = await this.getByIdAsDBM(id, opt);
return this.ensureRequired(dbm, id, opt);
}
async getByIdOrEmpty(id, part = {}, opt) {
const bm = await this.getById(id, opt);
if (bm)
return bm;
return this.create({ ...part, id }, opt);
}
async getById(id, opt = {}) {
if (!id)
return null;
const [dbm] = await this.loadByIds([id], opt);
return await this.dbmToBM(dbm, opt);
}
async getByIdAsDBM(id, opt = {}) {
if (!id)
return null;
const [row] = await this.loadByIds([id], opt);
return this.anyToDBM(row, opt) || null;
}
async getByIds(ids, opt = {}) {
const dbms = await this.loadByIds(ids, opt);
return await this.dbmsToBM(dbms, opt);
}
async getByIdsAsDBM(ids, opt = {}) {
const rows = await this.loadByIds(ids, opt);
return this.anyToDBMs(rows);
}
// DRY private method
async loadByIds(ids, opt = {}) {
if (!ids.length)
return [];
const table = opt.table || this.cfg.table;
return await (opt.tx || this.cfg.db).getByIds(table, ids, opt);
}
async getBy(by, value, limit = 0, opt) {
return await this.query().filterEq(by, value).limit(limit).runQuery(opt);
}
async getOneBy(by, value, opt) {
const [bm] = await this.query().filterEq(by, value).limit(1).runQuery(opt);
return bm || null;
}
async getAll(opt) {
return await this.query().runQuery(opt);
}
// QUERY
/**
* Pass `table` to override table
*/
query(table) {
return new RunnableDBQuery(this, table);
}
async runQuery(q, opt) {
const { rows } = await this.runQueryExtended(q, opt);
return rows;
}
async runQuerySingleColumn(q, opt) {
_assert(q._selectedFieldNames?.length === 1, `runQuerySingleColumn requires exactly 1 column to be selected: ${q.pretty()}`);
const col = q._selectedFieldNames[0];
const { rows } = await this.runQueryExtended(q, opt);
return rows.map((r) => r[col]);
}
/**
* Convenience method that runs multiple queries in parallel and then merges their results together.
* Does deduplication by id.
* Order is not guaranteed, as queries run in parallel.
*/
async runUnionQueries(queries, opt) {
const results = (await pMap(queries, async (q) => (await this.runQueryExtended(q, opt)).rows)).flat();
return _uniqBy(results, r => r.id);
}
async runQueryExtended(q, opt = {}) {
this.validateQueryIndexes(q); // throws if query uses `excludeFromIndexes` property
q.table = opt.table || q.table;
const { rows, ...queryResult } = await this.cfg.db.runQuery(q, opt);
const isPartialQuery = !!q._selectedFieldNames;
const bms = isPartialQuery ? rows : await this.dbmsToBM(rows, opt);
return {
rows: bms,
...queryResult,
};
}
async runQueryAsDBM(q, opt) {
const { rows } = await this.runQueryExtendedAsDBM(q, opt);
return rows;
}
async runQueryExtendedAsDBM(q, opt = {}) {
this.validateQueryIndexes(q); // throws if query uses `excludeFromIndexes` property
q.table = opt.table || q.table;
const { rows, ...queryResult } = await this.cfg.db.runQuery(q, opt);
const isPartialQuery = !!q._selectedFieldNames;
const dbms = isPartialQuery ? rows : this.anyToDBMs(rows, opt);
return { rows: dbms, ...queryResult };
}
async runQueryCount(q, opt = {}) {
this.validateQueryIndexes(q); // throws if query uses `excludeFromIndexes` property
q.table = opt.table || q.table;
return await this.cfg.db.runQueryCount(q, opt);
}
streamQueryAsDBM(q, opt = {}) {
const pipeline = this.streamQueryRaw(q, opt);
const isPartialQuery = !!q._selectedFieldNames;
if (isPartialQuery)
return pipeline;
opt.skipValidation ??= true;
opt.errorMode ||= ErrorMode.SUPPRESS;
return pipeline.mapSync(dbm => this.anyToDBM(dbm, opt), { errorMode: opt.errorMode });
}
streamQuery(q, opt = {}) {
const pipeline = this.streamQueryRaw(q, opt);
const isPartialQuery = !!q._selectedFieldNames;
if (isPartialQuery)
return pipeline;
opt.skipValidation ??= true;
opt.errorMode ||= ErrorMode.SUPPRESS;
return pipeline.map(async (dbm) => await this.dbmToBM(dbm, opt), { errorMode: opt.errorMode });
}
streamQueryRaw(q, opt = {}) {
this.validateQueryIndexes(q); // throws if query uses `excludeFromIndexes` property
q.table = opt.table || q.table;
return this.cfg.db.streamQuery(q, opt);
}
async queryIds(q, opt = {}) {
this.validateQueryIndexes(q); // throws if query uses `excludeFromIndexes` property
q.table = opt.table || q.table;
const { rows } = await this.cfg.db.runQuery(q.select(['id']), opt);
return rows.map(r => r.id);
}
streamQueryIds(q, opt = {}) {
this.validateQueryIndexes(q); // throws if query uses `excludeFromIndexes` property
q.table = opt.table || q.table;
opt.errorMode ||= ErrorMode.SUPPRESS;
return this.cfg.db.streamQuery(q.select(['id']), opt).mapSync((r) => r.id);
}
/**
* Mutates!
*/
assignIdCreatedUpdated(obj, opt = {}) {
const now = localTime.nowUnix();
if (this.cfg.useCreatedProperty) {
obj.created ||= obj.updated || now;
}
if (this.cfg.useUpdatedProperty) {
obj.updated = opt.preserveUpdated && obj.updated ? obj.updated : now;
}
if (this.cfg.generateId) {
obj.id ||= (this.cfg.hooks.createNaturalId?.(obj) ||
this.cfg.hooks.createRandomId());
}
}
// SAVE
/**
* Convenience method to replace 3 operations (loading+patching+saving) with one:
*
* 1. Loads the row by id.
* 1.1 Creates the row (via this.create()) if it doesn't exist
* (this will cause a validation error if Patch has not enough data for the row to be valid).
* 2. Applies the patch on top of loaded data.
* 3. Saves (as fast as possible since the read) with the Patch applied, but only if the data has changed.
*/
async patchById(id, patch, opt = {}) {
if (this.cfg.patchInTransaction && !opt.tx) {
// patchInTransaction means that we should run this op in Transaction
// But if opt.tx is passed - means that we are already in a Transaction,
// and should just continue as-is
return await this.patchByIdInTransaction(id, patch, opt);
}
let patched;
const loaded = await this.getById(id, {
// Skipping validation here for performance reasons.
// Validation is going to happen on save anyway, just down below.
skipValidation: true,
...opt,
});
if (loaded) {
patched = { ...loaded, ...patch };
if (_deepJsonEquals(loaded, patched)) {
// Skipping the save operation, as data is the same
return patched;
}
}
else {
const table = opt.table || this.cfg.table;
_assert(opt.createIfMissing, `DB row required, but not found in ${table}`, {
id,
table,
});
patched = this.create({ ...patch, id }, opt);
}
return await this.save(patched, opt);
}
/**
* Like patchById, but runs all operations within a Transaction.
*/
async patchByIdInTransaction(id, patch, opt) {
return await this.runInTransaction(async (daoTx) => {
return await this.patchById(id, patch, { ...opt, tx: daoTx.tx });
});
}
/**
* Same as patchById, but takes the whole object as input.
* This "whole object" is mutated with the patch and returned.
* Otherwise, similar behavior as patchById.
* It still loads the row from the DB.
*/
async patch(bm, patch, opt = {}) {
if (this.cfg.patchInTransaction && !opt.tx) {
// patchInTransaction means that we should run this op in Transaction
// But if opt.tx is passed - means that we are already in a Transaction,
// and should just continue as-is
return await this.patchInTransaction(bm, patch, opt);
}
if (opt.skipDBRead) {
const patched = {
...bm,
...patch,
};
if (_deepJsonEquals(bm, patched)) {
// Skipping the save operation, as data is the same
return bm;
}
Object.assign(bm, patch);
}
else {
const loaded = await this.requireById(bm.id, {
// Skipping validation here for performance reasons.
// Validation is going to happen on save anyway, just down below.
skipValidation: true,
...opt,
});
const loadedWithPatch = {
...loaded,
...patch,
};
// Make `bm` exactly the same as `loadedWithPatch`
_objectAssignExact(bm, loadedWithPatch);
if (_deepJsonEquals(loaded, loadedWithPatch)) {
// Skipping the save operation, as data is the same
return bm;
}
}
return await this.save(bm, opt);
}
/**
* Like patch, but runs all operations within a Transaction.
*/
async patchInTransaction(bm, patch, opt) {
return await this.runInTransaction(async (daoTx) => {
return await this.patch(bm, patch, { ...opt, tx: daoTx.tx });
});
}
/**
* Mutates with id, created, updated
*/
async save(bm, opt = {}) {
this.requireWriteAccess();
if (opt.skipIfEquals) {
// We compare with convertedBM, to account for cases when some extra property is assigned to bm,
// which should be removed post-validation, but it breaks the "equality check"
// Post-validation the equality check should work as intended
const convertedBM = this.validateAndConvert(bm, 'save', opt);
if (_deepJsonEquals(convertedBM, opt.skipIfEquals)) {
// Skipping the save operation
return bm;
}
}
this.assignIdCreatedUpdated(bm, opt); // mutates
_typeCast(bm);
const dbm = await this.bmToDBM(bm, opt); // validates BM
this.cfg.hooks.beforeSave?.(dbm);
const table = opt.table || this.cfg.table;
const saveOptions = this.prepareSaveOptions(opt);
await (opt.tx || this.cfg.db).saveBatch(table, [dbm], saveOptions);
if (saveOptions.assignGeneratedIds) {
bm.id = dbm.id;
}
return bm;
}
async saveAsDBM(dbm, opt = {}) {
this.requireWriteAccess();
this.assignIdCreatedUpdated(dbm, opt); // mutates
const row = this.anyToDBM(dbm, opt);
this.cfg.hooks.beforeSave?.(row);
const table = opt.table || this.cfg.table;
const saveOptions = this.prepareSaveOptions(opt);
await (opt.tx || this.cfg.db).saveBatch(table, [row], saveOptions);
if (saveOptions.assignGeneratedIds) {
dbm.id = row.id;
}
return row;
}
async saveBatch(bms, opt = {}) {
if (!bms.length)
return [];
this.requireWriteAccess();
bms.forEach(bm => this.assignIdCreatedUpdated(bm, opt));
const dbms = await this.bmsToDBM(bms, opt);
if (this.cfg.hooks.beforeSave) {
dbms.forEach(dbm => this.cfg.hooks.beforeSave(dbm));
}
const table = opt.table || this.cfg.table;
const saveOptions = this.prepareSaveOptions(opt);
await (opt.tx || this.cfg.db).saveBatch(table, dbms, saveOptions);
if (saveOptions.assignGeneratedIds) {
dbms.forEach((dbm, i) => (bms[i].id = dbm.id));
}
return bms;
}
async saveBatchAsDBM(dbms, opt = {}) {
if (!dbms.length)
return [];
this.requireWriteAccess();
dbms.forEach(dbm => this.assignIdCreatedUpdated(dbm, opt));
const rows = this.anyToDBMs(dbms, opt);
if (this.cfg.hooks.beforeSave) {
rows.forEach(row => this.cfg.hooks.beforeSave(row));
}
const table = opt.table || this.cfg.table;
const saveOptions = this.prepareSaveOptions(opt);
await (opt.tx || this.cfg.db).saveBatch(table, rows, saveOptions);
if (saveOptions.assignGeneratedIds) {
rows.forEach((row, i) => (dbms[i].id = row.id));
}
return rows;
}
prepareSaveOptions(opt) {
let { saveMethod, assignGeneratedIds = this.cfg.assignGeneratedIds, excludeFromIndexes = this.cfg.excludeFromIndexes, } = opt;
if (this.cfg.immutable && !opt.allowMutability && !opt.saveMethod) {
saveMethod = 'insert';
}
return {
...opt,
excludeFromIndexes,
saveMethod,
assignGeneratedIds,
};
}
/**
* "Streaming" is implemented by buffering incoming rows into **batches**
* (of size opt.chunkSize, which defaults to 500),
* and then executing db.saveBatch(chunk) with the concurrency
* of opt.chunkConcurrency (which defaults to 32).
*
* It takes a Pipeline as input, appends necessary saving transforms to it,
* and calls .run() on it.
*/
async streamSave(p, opt = {}) {
this.requireWriteAccess();
const table = opt.table || this.cfg.table;
opt.skipValidation ??= true;
opt.errorMode ||= ErrorMode.SUPPRESS;
if (this.cfg.immutable && !opt.allowMutability && !opt.saveMethod) {
opt = { ...opt, saveMethod: 'insert' };
}
const excludeFromIndexes = opt.excludeFromIndexes || this.cfg.excludeFromIndexes;
const { beforeSave } = this.cfg.hooks;
const { chunkSize = 500, chunkConcurrency = 32, errorMode } = opt;
await p
.map(async (bm) => {
this.assignIdCreatedUpdated(bm, opt);
const dbm = await this.bmToDBM(bm, opt);
beforeSave?.(dbm);
return dbm;
}, { errorMode })
.chunk(chunkSize)
.map(async (batch) => {
await this.cfg.db.saveBatch(table, batch, {
...opt,
excludeFromIndexes,
});
return batch;
}, {
concurrency: chunkConcurrency,
errorMode,
})
.logProgress({
metric: 'saved',
...opt,
})
.run();
}
// DELETE
/**
* @returns number of deleted items
*/
async deleteById(id, opt = {}) {
if (!id)
return 0;
return await this.deleteByIds([id], opt);
}
async deleteByIds(ids, opt = {}) {
if (!ids.length)
return 0;
this.requireWriteAccess();
this.requireObjectMutability(opt);
const table = opt.table || this.cfg.table;
return await (opt.tx || this.cfg.db).deleteByIds(table, ids, opt);
}
/**
* Pass `chunkSize: number` (e.g 500) option to use Streaming: it will Stream the query, chunk by 500, and execute
* `deleteByIds` for each chunk concurrently (infinite concurrency).
* This is expected to be more memory-efficient way of deleting large number of rows.
*/
async deleteByQuery(q, opt = {}) {
this.validateQueryIndexes(q); // throws if query uses `excludeFromIndexes` property
this.requireWriteAccess();
this.requireObjectMutability(opt);
q.table = opt.table || q.table;
let deleted = 0;
if (opt.chunkSize) {
const { chunkSize, chunkConcurrency = 8 } = opt;
await this.cfg.db
.streamQuery(q.select(['id']), opt)
.mapSync(r => r.id)
.chunk(chunkSize)
.map(async (ids) => {
await this.cfg.db.deleteByIds(q.table, ids, opt);
deleted += ids.length;
}, {
predicate: _passthroughPredicate,
concurrency: chunkConcurrency,
errorMode: opt.errorMode || ErrorMode.THROW_IMMEDIATELY,
})
// LogProgress should be AFTER the mapper, to be able to report correct stats
.logProgress({
metric: q.table,
logEvery: 2, // 500 * 2 === 1000
chunkSize,
...opt,
})
.run();
}
else {
deleted = await this.cfg.db.deleteByQuery(q, opt);
}
return deleted;
}
async patchByIds(ids, patch, opt = {}) {
if (!ids.length)
return 0;
return await this.patchByQuery(this.query().filterIn('id', ids), patch, opt);
}
async patchByQuery(q, patch, opt = {}) {
this.validateQueryIndexes(q); // throws if query uses `excludeFromIndexes` property
this.requireWriteAccess();
this.requireObjectMutability(opt);
q.table = opt.table || q.table;
return await this.cfg.db.patchByQuery(q, patch, opt);
}
/**
* Caveat: it doesn't update created/updated props.
*
* @experimental
*/
async increment(prop, id, by = 1, opt = {}) {
this.requireWriteAccess();
this.requireObjectMutability(opt);
const { table } = this.cfg;
const result = await this.cfg.db.incrementBatch(table, prop, {
[id]: by,
});
return result[id];
}
/**
* Caveat: it doesn't update created/updated props.
*
* @experimental
*/
async incrementBatch(prop, incrementMap, opt = {}) {
this.requireWriteAccess();
this.requireObjectMutability(opt);
const { table } = this.cfg;
return await this.cfg.db.incrementBatch(table, prop, incrementMap);
}
async dbmToBM(_dbm, opt = {}) {
if (!_dbm)
return null;
// optimization: no need to run full joi DBM validation, cause BM validation will be run
// const dbm = this.anyToDBM(_dbm, opt)
const dbm = { ..._dbm, ...this.cfg.hooks.parseNaturalId(_dbm.id) };
// DBM > BM
const bm = ((await this.cfg.hooks.beforeDBMToBM?.(dbm)) || dbm);
// Validate/convert BM
return this.validateAndConvert(bm, 'load', opt);
}
async dbmsToBM(dbms, opt = {}) {
return await pMap(dbms, async (dbm) => await this.dbmToBM(dbm, opt));
}
async bmToDBM(bm, opt) {
if (bm === undefined)
return null;
// bm gets assigned to the new reference
bm = this.validateAndConvert(bm, 'save', opt);
// BM > DBM
return ((await this.cfg.hooks.beforeBMToDBM?.(bm)) || bm);
}
async bmsToDBM(bms, opt = {}) {
// try/catch?
return await pMap(bms, async (bm) => await this.bmToDBM(bm, opt));
}
anyToDBM(dbm, _opt = {}) {
if (!dbm)
return null;
// this shouldn't be happening on load! but should on save!
// this.assignIdCreatedUpdated(dbm, opt)
dbm = { ...dbm, ...this.cfg.hooks.parseNaturalId(dbm.id) };
// Validate/convert DBM
// return this.validateAndConvert(dbm, this.cfg.dbmSchema, DBModelType.DBM, opt)
return dbm;
}
anyToDBMs(rows, opt = {}) {
return rows.map(entity => this.anyToDBM(entity, opt));
}
/**
* Returns *converted value* (NOT the same reference).
* Does NOT mutate the object.
* Validates (unless `skipValidation=true` passed).
*/
validateAndConvert(input, op, // this is to skip validation if validateOnLoad/Save is false
opt = {}) {
// We still filter `undefined` values here, because `beforeDBMToBM` can return undefined values
// and they can be annoying with snapshot tests
input = _filterUndefinedValues(input);
// Return as is if no schema is passed or if `skipConversion` is set
if (!this.cfg.validateBM ||
opt.skipValidation ||
(op === 'load' && !this.cfg.validateOnLoad) ||
(op === 'save' && !this.cfg.validateOnSave)) {
return input;
}
const inputName = opt.table || this.cfg.table;
const [error, convertedValue] = this.cfg.validateBM(input, {
// Passing `mutateInput` through allows to opt-out of mutation
// for individual operations, e.g `someDao.save(myObj, { mutateInput: false })`
// Default is undefined (the validation function decides whether to mutate or not).
mutateInput: opt.mutateInput,
inputName,
});
if (error) {
const processedError = this.cfg.hooks.onValidationError(error);
if (processedError)
throw processedError;
}
return convertedValue;
}
async getTableSchema() {
return await this.cfg.db.getTableSchema(this.cfg.table);
}
async createTable(schema, opt) {
this.requireWriteAccess();
await this.cfg.db.createTable(this.cfg.table, schema, opt);
}
/**
* Proxy to this.cfg.db.ping
*/
async ping() {
await this.cfg.db.ping();
}
withId(id) {
return {
dao: this,
id,
};
}
withIds(ids) {
return {
dao: this,
ids,
};
}
withRowsToSave(rows) {
return {
dao: this,
rows: rows,
};
}
withRowToSave(row, opt) {
return {
dao: this,
row: row,
opt: opt,
};
}
/**
* Load rows (by their ids) from Multiple tables at once.
* An optimized way to load data, minimizing DB round-trips.
*
* @experimental
*/
static async multiGet(inputMap, opt = {}) {
const db = Object.values(inputMap)[0]?.dao.cfg.db;
if (!db) {
return {};
}
const idsByTable = CommonDao.prepareMultiGetIds(inputMap);
// todo: support tx
const dbmsByTable = await db.multiGet(idsByTable, opt);
const dbmByTableById = CommonDao.multiGetMapByTableById(dbmsByTable);
return (await CommonDao.prepareMultiGetOutput(inputMap, dbmByTableById, opt));
}
static prepareMultiGetIds(inputMap) {
const idSetByTable = {};
for (const input of _stringMapValues(inputMap)) {
const { table } = input.dao.cfg;
idSetByTable[table] ||= new Set();
if ('id' in input) {
// Singular
idSetByTable[table].add(input.id);
}
else {
// Plural
for (const id of input.ids) {
idSetByTable[table].add(id);
}
}
}
const idsByTable = {};
for (const [table, idSet] of _stringMapEntries(idSetByTable)) {
idsByTable[table] = [...idSet];
}
return idsByTable;
}
static multiGetMapByTableById(dbmsByTable) {
// We create this "map of maps", to be able to track the results back to the input props
// This is needed to support:
// - having multiple props from the same table
const dbmByTableById = {};
for (const [table, dbms] of _stringMapEntries(dbmsByTable)) {
dbmByTableById[table] ||= {};
for (const dbm of dbms) {
dbmByTableById[table][dbm.id] = dbm;
}
}
return dbmByTableById;
}
static async prepareMultiGetOutput(inputMap, dbmByTableById, opt = {}) {
const bmsByProp = {};
// Loop over input props again, to produce the output of the same shape as requested
await pMap(_stringMapEntries(inputMap), async ([prop, input]) => {
const { dao } = input;
const { table } = dao.cfg;
if ('id' in input) {
// Singular
const dbm = dbmByTableById[table][input.id];
bmsByProp[prop] = (await dao.dbmToBM(dbm, opt)) || null;
}
else {
// Plural
// We apply filtering, to be able to support multiple input props fetching from the same table.
// Without filtering - every prop will get ALL rows from that table.
const dbms = input.ids.map(id => dbmByTableById[table][id]).filter(_isTruthy);
bmsByProp[prop] = await dao.dbmsToBM(dbms, opt);
}
});
return bmsByProp;
}
/**
* @experimental
*/
static async multiDelete(inputs, opt = {}) {
if (!inputs.length)
return 0;
const { db } = inputs[0].dao.cfg;
const idsByTable = {};
for (const input of inputs) {
const { dao } = input;
const { table } = dao.cfg;
dao.requireWriteAccess();
dao.requireObjectMutability(opt);
idsByTable[table] ||= [];
if ('id' in input) {
idsByTable[table].push(input.id);
}
else {
idsByTable[table].push(...input.ids);
}
}
return await db.multiDelete(idsByTable, opt);
}
static async multiSave(inputs, opt = {}) {
if (!inputs.length)
return;
const { db } = inputs[0].dao.cfg;
const dbmsByTable = {};
await pMap(inputs, async (input) => {
const { dao } = input;
const { table } = dao.cfg;
dbmsByTable[table] ||= [];
if ('row' in input) {
// Singular
const { row } = input;
if (input.opt?.skipIfEquals) {
// We compare with convertedBM, to account for cases when some extra property is assigned to bm,
// which should be removed post-validation, but it breaks the "equality check"
// Post-validation the equality check should work as intended
const convertedBM = dao.validateAndConvert(row, 'save', opt);
if (_deepJsonEquals(convertedBM, input.opt.skipIfEquals)) {
// Skipping the save operation
return;
}
}
dao.assignIdCreatedUpdated(row, opt);
const dbm = await dao.bmToDBM(row, opt);
dao.cfg.hooks.beforeSave?.(dbm);
dbmsByTable[table].push(dbm);
}
else {
// Plural
input.rows.forEach(bm => dao.assignIdCreatedUpdated(bm, opt));
const dbms = await dao.bmsToDBM(input.rows, opt);
if (dao.cfg.hooks.beforeSave) {
dbms.forEach(dbm => dao.cfg.hooks.beforeSave(dbm));
}
dbmsByTable[table].push(...dbms);
}
});
await db.multiSave(dbmsByTable);
}
async createTransaction(opt) {
const tx = await this.cfg.db.createTransaction(opt);
return new CommonDaoTransaction(tx, this.cfg.logger);
}
async runInTransaction(fn, opt) {
let r;
await this.cfg.db.runInTransaction(async (tx) => {
const daoTx = new CommonDaoTransaction(tx, this.cfg.logger);
try {
r = await fn(daoTx);
}
catch (err) {
await daoTx.rollback(); // graceful rollback that "never throws"
throw err;
}
}, opt);
return r;
}
ensureRequired(row, id, opt) {
const table = opt.table || this.cfg.table;
_assert(row, `DB row required, but not found in ${table}`, {
table,
id,
});
return row; // pass-through
}
/**
* Throws if readOnly is true
*/
requireWriteAccess() {
_assert(!this.cfg.readOnly, DBLibError.DAO_IS_READ_ONLY, {
table: this.cfg.table,
});
}
/**
* Throws if readOnly is true
*/
requireObjectMutability(opt) {
_assert(!this.cfg.immutable || opt.allowMutability, DBLibError.OBJECT_IS_IMMUTABLE, {
table: this.cfg.table,
});
}
/**
* Throws if query uses a property that is in `excludeFromIndexes` list.
*/
validateQueryIndexes(q) {
const { excludeFromIndexes, indexes } = this.cfg;
if (excludeFromIndexes) {
for (const f of q._filters) {
_assert(!excludeFromIndexes.includes(f.name), `cannot query on non-indexed property: ${this.cfg.table}.${f.name}`, {
query: q.pretty(),
});
}
}
if (indexes) {
for (const f of q._filters) {
_assert(f.name === 'id' || indexes.includes(f.name), `cannot query on non-indexed property: ${this.cfg.table}.${f.name}`, {
query: q.pretty(),
});
}
}
}
}