UNPKG

@naturalcycles/db-lib

Version:

Lowest Common Denominator API to supported Databases

264 lines (263 loc) 9.17 kB
import { _isEmptyObject } from '@naturalcycles/js-lib'; import { _assert } from '@naturalcycles/js-lib/error/assert.js'; import { _deepCopy, _sortObjectDeep } from '@naturalcycles/js-lib/object'; import { _stringMapEntries, _stringMapValues, } from '@naturalcycles/js-lib/types'; import { generateJsonSchemaFromData } from '@naturalcycles/nodejs-lib/ajv'; import { Pipeline } from '@naturalcycles/nodejs-lib/stream'; import { bufferReviver } from '@naturalcycles/nodejs-lib/stream/ndjson/transformJsonParse.js'; import { commonDBFullSupport, CommonDBType } from '../commondb/common.db.js'; import { queryInMemory } from './queryInMemory.js'; export class InMemoryDB { dbType = CommonDBType.document; support = { ...commonDBFullSupport, timeMachine: false, }; constructor(cfg) { this.cfg = { // defaults tablesPrefix: '', forbidTransactionReadAfterWrite: true, logger: console, ...cfg, }; } cfg; // data[table][id] > {id: 'a', created: ... } data = {}; /** * Returns internal "Data snapshot". * Deterministic - jsonSorted. */ getDataSnapshot() { return _sortObjectDeep(this.data); } async ping() { } /** * Resets InMemory DB data */ async resetCache(_table) { if (_table) { const table = this.cfg.tablesPrefix + _table; this.cfg.logger.log(`reset ${table}`); this.data[table] = {}; } else { ; (await this.getTables()).forEach(table => { this.data[table] = {}; }); this.cfg.logger.log('reset'); } } async getTables() { return Object.keys(this.data).filter(t => t.startsWith(this.cfg.tablesPrefix)); } async getTableSchema(_table) { const table = this.cfg.tablesPrefix + _table; return { ...generateJsonSchemaFromData(_stringMapValues(this.data[table] || {})), $id: `${table}.schema.json`, }; } async createTable(_table, _schema, opt = {}) { const table = this.cfg.tablesPrefix + _table; if (opt.dropIfExists) { this.data[table] = {}; } else { this.data[table] ||= {}; } } async getByIds(_table, ids, _opt) { const table = this.cfg.tablesPrefix + _table; this.data[table] ||= {}; return ids.map(id => this.data[table][id]).filter(Boolean); } async multiGet(map, _opt = {}) { const result = {}; for (const [tableName, ids] of _stringMapEntries(map)) { const table = this.cfg.tablesPrefix + tableName; result[table] = ids.map(id => this.data[table]?.[id]).filter(Boolean); } return result; } async saveBatch(_table, rows, opt = {}) { const table = this.cfg.tablesPrefix + _table; this.data[table] ||= {}; const isInsert = opt.saveMethod === 'insert'; const isUpdate = opt.saveMethod === 'update'; for (const r of rows) { if (!r.id) { this.cfg.logger.warn({ rows }); throw new Error(`InMemoryDB doesn't support id auto-generation in saveBatch, row without id was given`); } if (isInsert && this.data[table][r.id]) { throw new Error(`InMemoryDB: INSERT failed, entity exists: ${table}.${r.id}`); } if (isUpdate && !this.data[table][r.id]) { throw new Error(`InMemoryDB: UPDATE failed, entity doesn't exist: ${table}.${r.id}`); } // JSON parse/stringify (deep clone) is to: // 1. Not store values "by reference" (avoid mutation bugs) // 2. Simulate real DB that would do something like that in a transport layer anyway this.data[table][r.id] = JSON.parse(JSON.stringify(r), bufferReviver); } } async multiSave(map, opt = {}) { for (const [table, rows] of _stringMapEntries(map)) { await this.saveBatch(table, rows, opt); } } async patchById(_table, id, patch, _opt) { const table = this.cfg.tablesPrefix + _table; _assert(!this.data[table]?.[id], `InMemoryDB: patchById failed, entity doesn't exist: ${table}.${id}`); Object.assign(this.data[table][id], patch); } async deleteByQuery(q, _opt) { const table = this.cfg.tablesPrefix + q.table; if (!this.data[table]) return 0; const ids = queryInMemory(q, Object.values(this.data[table])).map(r => r.id); return await this.deleteByIds(q.table, ids); } async deleteByIds(_table, ids, _opt) { const table = this.cfg.tablesPrefix + _table; if (!this.data[table]) return 0; let count = 0; for (const id of ids) { if (!this.data[table][id]) continue; delete this.data[table][id]; count++; } return count; } async multiDelete(map, _opt) { let count = 0; for (const [table, ids] of _stringMapEntries(map)) { count += await this.deleteByIds(table, ids, _opt); } return count; } async patchByQuery(q, patch) { if (_isEmptyObject(patch)) return 0; const table = this.cfg.tablesPrefix + q.table; const rows = queryInMemory(q, Object.values(this.data[table] || {})); for (const row of rows) { Object.assign(row, patch); } return rows.length; } async runQuery(q, _opt) { const table = this.cfg.tablesPrefix + q.table; return { rows: queryInMemory(q, Object.values(this.data[table] || {})) }; } async runQueryCount(q, _opt) { const table = this.cfg.tablesPrefix + q.table; return queryInMemory(q, Object.values(this.data[table] || {})).length; } streamQuery(q, _opt) { const table = this.cfg.tablesPrefix + q.table; return Pipeline.fromArray(queryInMemory(q, Object.values(this.data[table] || {}))); } async runInTransaction(fn, opt = {}) { const tx = new InMemoryDBTransaction(this, { readOnly: false, ...opt, }); try { await fn(tx); await tx.commit(); } catch (err) { await tx.rollback(); throw err; } } async createTransaction(opt = {}) { return new InMemoryDBTransaction(this, { readOnly: false, ...opt, }); } async incrementBatch(table, prop, incrementMap, _opt) { const tbl = this.cfg.tablesPrefix + table; this.data[tbl] ||= {}; const result = {}; for (const [id, by] of _stringMapEntries(incrementMap)) { this.data[tbl][id] ||= { id }; const newValue = (this.data[tbl][id][prop] || 0) + by; this.data[tbl][id][prop] = newValue; result[id] = newValue; } return result; } } export class InMemoryDBTransaction { db; opt; constructor(db, opt) { this.db = db; this.opt = opt; } ops = []; // used to enforce forbidReadAfterWrite setting writeOperationHappened = false; async getByIds(table, ids, opt) { if (this.db.cfg.forbidTransactionReadAfterWrite) { _assert(!this.writeOperationHappened, `InMemoryDBTransaction: read operation attempted after write operation`); } return await this.db.getByIds(table, ids, opt); } async saveBatch(table, rows, opt) { _assert(!this.opt.readOnly, `InMemoryDBTransaction: saveBatch(${table}) called in readOnly mode`); this.writeOperationHappened = true; this.ops.push({ type: 'saveBatch', table, rows, opt, }); } async deleteByIds(table, ids, opt) { _assert(!this.opt.readOnly, `InMemoryDBTransaction: deleteByIds(${table}) called in readOnly mode`); this.writeOperationHappened = true; this.ops.push({ type: 'deleteByIds', table, ids, opt, }); return ids.length; } async commit() { const backup = _deepCopy(this.db.data); try { for (const op of this.ops) { if (op.type === 'saveBatch') { await this.db.saveBatch(op.table, op.rows, op.opt); } else if (op.type === 'deleteByIds') { await this.db.deleteByIds(op.table, op.ids, op.opt); } else { throw new Error(`DBOperation not supported: ${op.type}`); } } this.ops = []; } catch (err) { // rollback this.ops = []; this.db.data = backup; this.db.cfg.logger.log('InMemoryDB transaction rolled back'); throw err; } } async rollback() { this.ops = []; } }