UNPKG

y-leveldb

Version:

LevelDb database adapter for Yjs

955 lines (858 loc) 29 kB
'use strict'; var Y = require('yjs'); var encoding = require('lib0/dist/encoding.cjs'); var decoding = require('lib0/dist/decoding.cjs'); var binary = require('lib0/dist/binary.cjs'); var promise = require('lib0/dist/promise.cjs'); var buffer = require('lib0/dist/buffer.cjs'); var level = require('level'); var levelReadStream = require('level-read-stream'); var buffer$1 = require('buffer'); var t = require('lib0/dist/testing.cjs'); var environment_js = require('lib0/dist/environment.cjs'); var log = require('lib0/dist/logging.cjs'); function _interopNamespace(e) { if (e && e.__esModule) return e; var n = Object.create(null); if (e) { Object.keys(e).forEach(function (k) { if (k !== 'default') { var d = Object.getOwnPropertyDescriptor(e, k); Object.defineProperty(n, k, d.get ? d : { enumerable: true, get: function () { return e[k]; } }); } }); } n["default"] = e; return Object.freeze(n); } var Y__namespace = /*#__PURE__*/_interopNamespace(Y); var encoding__namespace = /*#__PURE__*/_interopNamespace(encoding); var decoding__namespace = /*#__PURE__*/_interopNamespace(decoding); var binary__namespace = /*#__PURE__*/_interopNamespace(binary); var promise__namespace = /*#__PURE__*/_interopNamespace(promise); var buffer__namespace = /*#__PURE__*/_interopNamespace(buffer); var t__namespace = /*#__PURE__*/_interopNamespace(t); var log__namespace = /*#__PURE__*/_interopNamespace(log); const PREFERRED_TRIM_SIZE = 500; const YEncodingString = 0; const YEncodingUint32 = 1; /** * @typedef {import('abstract-level').AbstractLevel<any, Array<String|number>, Uint8Array>} AbstractLevel */ /** * @typedef {['v1', string, 'update', number] | ['v1', string, 'meta', string] | ['v1_sv', number]} DocKey */ const valueEncoding = { buffer: true, type: 'y-value', encode: /** @param {any} data */ data => data, decode: /** @param {any} data */ data => data }; /** * Write two bytes as an unsigned integer in big endian order. * (most significant byte first) * * @function * @param {encoding.Encoder} encoder * @param {number} num The number that is to be encoded. */ const writeUint32BigEndian = (encoder, num) => { for (let i = 3; i >= 0; i--) { encoding__namespace.write(encoder, (num >>> (8 * i)) & binary__namespace.BITS8); } }; /** * Read 4 bytes as unsigned integer in big endian order. * (most significant byte first) * * @todo use lib0/decoding instead * * @function * @param {decoding.Decoder} decoder * @return {number} An unsigned integer. */ const readUint32BigEndian = decoder => { const uint = (decoder.arr[decoder.pos + 3] + (decoder.arr[decoder.pos + 2] << 8) + (decoder.arr[decoder.pos + 1] << 16) + (decoder.arr[decoder.pos] << 24)) >>> 0; decoder.pos += 4; return uint }; const keyEncoding = { buffer: true, type: 'y-keys', /* istanbul ignore next */ encode: /** @param {Array<string|number>} arr */ arr => { const encoder = encoding__namespace.createEncoder(); for (let i = 0; i < arr.length; i++) { const v = arr[i]; if (typeof v === 'string') { encoding__namespace.writeUint8(encoder, YEncodingString); encoding__namespace.writeVarString(encoder, v); } else /* istanbul ignore else */ if (typeof v === 'number') { encoding__namespace.writeUint8(encoder, YEncodingUint32); writeUint32BigEndian(encoder, v); } else { throw new Error('Unexpected key value') } } return buffer$1.Buffer.from(encoding__namespace.toUint8Array(encoder)) }, decode: /** @param {Uint8Array} buf */ buf => { const decoder = decoding__namespace.createDecoder(buf); const key = []; while (decoding__namespace.hasContent(decoder)) { switch (decoding__namespace.readUint8(decoder)) { case YEncodingString: key.push(decoding__namespace.readVarString(decoder)); break case YEncodingUint32: key.push(readUint32BigEndian(decoder)); break } } return key } }; /** * level returns an error if a value is not found. * * This helper method for level returns `null` instead if the key is not found. * * @param {AbstractLevel} db * @param {any} key * @return {Promise<Uint8Array | undefined>} */ const levelGet = async (db, key) => { let res; try { res = await db.get(key); } catch (err) { /* istanbul ignore else */ if (/** @type {any} */ (err).notFound) { return } else { throw err } } return res }; /** * Level expects a Buffer, but in Yjs we typically work with Uint8Arrays. * * Since Level thinks that these are two entirely different things, * we transform the Uint8array to a Buffer before storing it. * * @param {any} db * @param {any} key * @param {Uint8Array} val */ const levelPut = async (db, key, val) => db.put(key, buffer$1.Buffer.from(val)); /** * A "bulkier" implementation of level streams. Returns the result in one flush. * * @param {AbstractLevel} db * @param {import('abstract-level').AbstractIteratorOptions<any, Uint8Array>} opts * @return {Promise<Array<{ key: DocKey, value: Uint8Array }>>} */ const getLevelBulkEntries = (db, opts) => promise__namespace.create((resolve, reject) => { /** * @type {Array<any>} result */ const result = []; new levelReadStream.EntryStream(db, opts).on('data', data => { result.push(data); }).on('end', () => { resolve(result); }).on('error', reject); }); /** * A "bulkier" implementation of level streams. Returns the result in one flush. * * @param {AbstractLevel} db * @param {import('abstract-level').AbstractIteratorOptions<any, Uint8Array>} opts * @return {Promise<Array<DocKey>>} */ const getLevelBulkKeys = (db, opts) => promise__namespace.create((resolve, reject) => { /** * @type {Array<any>} result */ const result = []; new levelReadStream.KeyStream(db, opts).on('data', data => { result.push(data); }).on('end', () => { resolve(result); }).on('error', reject); }); /** * A "bulkier" implementation of level streams. Returns the result in one flush. * * @param {AbstractLevel} db * @param {import('abstract-level').AbstractIteratorOptions<DocKey, Uint8Array>} opts * @return {Promise<Array<Uint8Array>>} */ const getLevelBulkValues = (db, opts) => promise__namespace.create((resolve, reject) => { /** * @type {Array<any>} result */ const result = []; new levelReadStream.ValueStream(db, opts).on('data', data => { result.push(data); }).on('end', () => { resolve(result); }).on('error', reject); }); /** * Get all document updates for a specific document. * * @param {any} db * @param {string} docName * @param {any} [opts] * @return {Promise<Array<Uint8Array>>} */ const getLevelUpdates = (db, docName, opts = {}) => getLevelBulkValues(db, { gte: createDocumentUpdateKey(docName, 0), lt: createDocumentUpdateKey(docName, binary__namespace.BITS32), ...opts }); /** * Get all document updates for a specific document. * * @param {any} db * @param {string} docName * @param {any} [opts] * @return {Promise<Array<{key: DocKey, value: Uint8Array }>>} */ const getLevelUpdatesEntries = (db, docName, opts = {}) => getLevelBulkEntries(db, { gte: createDocumentUpdateKey(docName, 0), lt: createDocumentUpdateKey(docName, binary__namespace.BITS32), ...opts }); /** * Get all document updates for a specific document. * * @param {any} db * @param {string} docName * @param {any} opts * @return {Promise<Array<DocKey>>} */ /* istanbul ignore next */ const getLevelUpdatesKeys = (db, docName, opts = {}) => getLevelBulkKeys(db, { gte: createDocumentUpdateKey(docName, 0), lt: createDocumentUpdateKey(docName, binary__namespace.BITS32), ...opts }); /** * Get all document updates for a specific document. * * @param {AbstractLevel} db */ const getAllDocsKeys = (db) => getLevelBulkKeys(db, { gte: ['v1_sv'], lt: ['v1_sw'] }); /** * Get all document updates for a specific document. * * @param {AbstractLevel} db */ const getAllDocs = (db) => getLevelBulkEntries(db, { gte: ['v1_sv'], lt: ['v1_sw'] }); /** * @param {any} db * @param {string} docName * @return {Promise<number>} Returns -1 if this document doesn't exist yet */ const getCurrentUpdateClock = (db, docName) => getLevelUpdatesKeys(db, docName, { reverse: true, limit: 1 }).then(entries => { if (entries.length === 0) { return -1 } else { return /** @type {number} */ (entries[0][3]) } }); /** * @param {any} db * @param {Array<string|number>} gte Greater than or equal * @param {Array<string|number>} lt lower than (not equal) * @return {Promise<void>} */ const clearRange = async (db, gte, lt) => { /* istanbul ignore else */ if (db.supports.clear) { await db.clear({ gte, lt }); } else { const keys = await getLevelBulkKeys(db, { gte, lt }); const ops = keys.map(key => ({ type: 'del', key })); await db.batch(ops); } }; /** * @param {any} db * @param {string} docName * @param {number} from Greater than or equal * @param {number} to lower than (not equal) * @return {Promise<void>} */ const clearUpdatesRange = async (db, docName, from, to) => clearRange(db, createDocumentUpdateKey(docName, from), createDocumentUpdateKey(docName, to)); /** * Create a unique key for a update message. * We encode the result using `keyEncoding` which expects an array. * * @param {string} docName * @param {number} clock must be unique * @return {DocKey} */ const createDocumentUpdateKey = (docName, clock) => ['v1', docName, 'update', clock]; /** * @param {string} docName * @param {string} metaKey * @return {any} */ const createDocumentMetaKey = (docName, metaKey) => ['v1', docName, 'meta', metaKey]; /** * @param {string} docName * @return {any} */ const createDocumentMetaEndKey = (docName) => ['v1', docName, 'metb']; // simple trick /** * We have a separate state vector key so we can iterate efficiently over all documents * @param {string} docName */ const createDocumentStateVectorKey = (docName) => ['v1_sv', docName]; /** * @param {string} docName */ const createDocumentFirstKey = (docName) => ['v1', docName]; /** * We use this key as the upper limit of all keys that can be written. * Make sure that all document keys are smaller! Strings are encoded using varLength string encoding, * so we need to make sure that this key has the biggest size! * * @param {string} docName */ const createDocumentLastKey = (docName) => ['v1', docName, 'zzzzzzz']; // const emptyStateVector = (() => Y.encodeStateVector(new Y.Doc()))() /** * For now this is a helper method that creates a Y.Doc and then re-encodes a document update. * In the future this will be handled by Yjs without creating a Y.Doc (constant memory consumption). * * @param {Array<Uint8Array>} updates * @return {{update:Uint8Array, sv: Uint8Array}} */ const mergeUpdates = (updates) => { const ydoc = new Y__namespace.Doc(); ydoc.transact(() => { for (let i = 0; i < updates.length; i++) { Y__namespace.applyUpdate(ydoc, updates[i]); } }); return { update: Y__namespace.encodeStateAsUpdate(ydoc), sv: Y__namespace.encodeStateVector(ydoc) } }; /** * @param {any} db * @param {string} docName * @param {Uint8Array} sv state vector * @param {number} clock current clock of the document so we can determine when this statevector was created */ const writeStateVector = async (db, docName, sv, clock) => { const encoder = encoding__namespace.createEncoder(); encoding__namespace.writeVarUint(encoder, clock); encoding__namespace.writeVarUint8Array(encoder, sv); await levelPut(db, createDocumentStateVectorKey(docName), encoding__namespace.toUint8Array(encoder)); }; /** * @param {Uint8Array} buf * @return {{ sv: Uint8Array, clock: number }} */ const decodeLeveldbStateVector = buf => { const decoder = decoding__namespace.createDecoder(buf); const clock = decoding__namespace.readVarUint(decoder); const sv = decoding__namespace.readVarUint8Array(decoder); return { sv, clock } }; /** * @param {any} db * @param {string} docName */ const readStateVector$1 = async (db, docName) => { const buf = await levelGet(db, createDocumentStateVectorKey(docName)); if (buf == null) { // no state vector created yet or no document exists return { sv: null, clock: -1 } } return decodeLeveldbStateVector(buf) }; /** * @param {any} db * @param {string} docName * @param {Uint8Array} stateAsUpdate * @param {Uint8Array} stateVector * @return {Promise<number>} returns the clock of the flushed doc */ const flushDocument = async (db, docName, stateAsUpdate, stateVector) => { const clock = await storeUpdate(db, docName, stateAsUpdate); await writeStateVector(db, docName, stateVector, clock); await clearUpdatesRange(db, docName, 0, clock); // intentionally not waiting for the promise to resolve! return clock }; /** * @param {any} db * @param {string} docName * @param {Uint8Array} update * @return {Promise<number>} Returns the clock of the stored update */ const storeUpdate = async (db, docName, update) => { const clock = await getCurrentUpdateClock(db, docName); if (clock === -1) { // make sure that a state vector is aways written, so we can search for available documents const ydoc = new Y__namespace.Doc(); Y__namespace.applyUpdate(ydoc, update); const sv = Y__namespace.encodeStateVector(ydoc); await writeStateVector(db, docName, sv, 0); } await levelPut(db, createDocumentUpdateKey(docName, clock + 1), update); return clock + 1 }; class LeveldbPersistence { /** * @param {string} location * @param {object} opts * @param {any} [opts.Level] Level-compatible adapter. E.g. leveldown, level-rem, level-indexeddb. Defaults to `level` * @param {object} [opts.levelOptions] Options that are passed down to the level instance */ constructor (location, /* istanbul ignore next */ { Level = level.Level, levelOptions = {} } = {}) { /** * @type {import('abstract-level').AbstractLevel<any>} */ const db = new Level(location, { ...levelOptions, valueEncoding, keyEncoding }); this.tr = promise__namespace.resolve(); /** * Execute an transaction on a database. This will ensure that other processes are currently not writing. * * This is a private method and might change in the future. * * @todo only transact on the same room-name. Allow for concurrency of different rooms. * * @template T * * @param {function(any):Promise<T>} f A transaction that receives the db object * @return {Promise<T>} */ this._transact = f => { const currTr = this.tr; this.tr = (async () => { await currTr; let res = /** @type {any} */ (null); try { res = await f(db); } catch (err) { /* istanbul ignore next */ console.warn('Error during y-leveldb transaction', err); } return res })(); return this.tr }; } /** * @param {string} docName */ flushDocument (docName) { return this._transact(async db => { const updates = await getLevelUpdates(db, docName); const { update, sv } = mergeUpdates(updates); await flushDocument(db, docName, update, sv); }) } /** * @param {string} docName * @return {Promise<Y.Doc>} */ getYDoc (docName) { return this._transact(async db => { const updates = await getLevelUpdates(db, docName); const ydoc = new Y__namespace.Doc(); ydoc.transact(() => { for (let i = 0; i < updates.length; i++) { Y__namespace.applyUpdate(ydoc, updates[i]); } }); if (updates.length > PREFERRED_TRIM_SIZE) { await flushDocument(db, docName, Y__namespace.encodeStateAsUpdate(ydoc), Y__namespace.encodeStateVector(ydoc)); } return ydoc }) } /** * @param {string} docName * @return {Promise<Uint8Array>} */ getStateVector (docName) { return this._transact(async db => { const { clock, sv } = await readStateVector$1(db, docName); let curClock = -1; if (sv !== null) { curClock = await getCurrentUpdateClock(db, docName); } if (sv !== null && clock === curClock) { return sv } else { // current state vector is outdated const updates = await getLevelUpdates(db, docName); const { update, sv } = mergeUpdates(updates); await flushDocument(db, docName, update, sv); return sv } }) } /** * @param {string} docName * @param {Uint8Array} update * @return {Promise<number>} Returns the clock of the stored update */ storeUpdate (docName, update) { return this._transact(db => storeUpdate(db, docName, update)) } /** * @param {string} docName * @param {Uint8Array} stateVector */ async getDiff (docName, stateVector) { const ydoc = await this.getYDoc(docName); return Y__namespace.encodeStateAsUpdate(ydoc, stateVector) } /** * @param {string} docName * @return {Promise<void>} */ clearDocument (docName) { return this._transact(async db => { await db.del(createDocumentStateVectorKey(docName)); await clearRange(db, createDocumentFirstKey(docName), createDocumentLastKey(docName)); }) } /** * @param {string} docName * @param {string} metaKey * @param {any} value * @return {Promise<void>} */ setMeta (docName, metaKey, value) { return this._transact(db => levelPut(db, createDocumentMetaKey(docName, metaKey), buffer__namespace.encodeAny(value))) } /** * @param {string} docName * @param {string} metaKey * @return {Promise<any>} */ delMeta (docName, metaKey) { return this._transact(db => db.del(createDocumentMetaKey(docName, metaKey))) } /** * @param {string} docName * @param {string} metaKey * @return {Promise<any>} */ getMeta (docName, metaKey) { return this._transact(async db => { const res = await levelGet(db, createDocumentMetaKey(docName, metaKey)); if (res == null) { return } return buffer__namespace.decodeAny(res) }) } /** * @return {Promise<Array<string>>} */ getAllDocNames () { return this._transact(async db => { const docKeys = await getAllDocsKeys(db); return docKeys.map(key => /** @type {string} */ (key[1])) }) } /** * @return {Promise<Array<{ name: string, sv: Uint8Array, clock: number }>>} */ getAllDocStateVectors () { return this._transact(async db => { const docs = await getAllDocs(db); return docs.map(doc => { const { sv, clock } = decodeLeveldbStateVector(doc.value); return { name: /** @type {string} */ (doc.key[1]), sv, clock } }) }) } /** * @param {string} docName * @return {Promise<Map<string, any>>} */ getMetas (docName) { return this._transact(async db => { const data = await getLevelBulkEntries(db, { gte: createDocumentMetaKey(docName, ''), lt: createDocumentMetaEndKey(docName), keys: true, values: true }); const metas = new Map(); data.forEach(v => { metas.set(v.key[3], buffer__namespace.decodeAny(v.value)); }); return metas }) } /** * Close connection to a leveldb database and discard all state and bindings * * @return {Promise<void>} */ destroy () { return this._transact(db => db.close()) } /** * Delete all data in database. */ clearAll () { return this._transact(async db => db.clear()) } } // When changing this, also make sure to change the file in gitignore const storageName = 'tmp-leveldb-storage'; /** * Read state vector from Decoder and return as Map. This is a helper method that will be exported by Yjs directly. * * @param {decoding.Decoder} decoder * @return {Map<number,number>} Maps `client` to the number next expected `clock` from that client. * * @function */ const readStateVector = decoder => { const ss = new Map(); const ssLength = decoding__namespace.readVarUint(decoder); for (let i = 0; i < ssLength; i++) { const client = decoding__namespace.readVarUint(decoder); const clock = decoding__namespace.readVarUint(decoder); ss.set(client, clock); } return ss }; /** * Read decodedState and return State as Map. * * @param {Uint8Array} decodedState * @return {Map<number,number>} Maps `client` to the number next expected `clock` from that client. * * @function */ const decodeStateVector = decodedState => readStateVector(decoding__namespace.createDecoder(decodedState)); /** * Flushes all updates to ldb and delets items from updates array. * * @param {LeveldbPersistence} ldb * @param {string} docName * @param {Array<Uint8Array>} updates */ const flushUpdatesHelper = (ldb, docName, updates) => Promise.all(updates.splice(0).map(update => ldb.storeUpdate(docName, update))); /** * @param {t.TestCase} tc */ const testLeveldbUpdateStorage = async tc => { const docName = tc.testName; const ydoc1 = new Y__namespace.Doc(); ydoc1.clientID = 0; // so we can check the state vector const leveldbPersistence = new LeveldbPersistence(storageName); // clear all data, so we can check allData later await leveldbPersistence._transact(async db => db.clear()); t__namespace.compareArrays([], await leveldbPersistence.getAllDocNames()); /** * @type {Array<Uint8Array>} */ const updates = []; ydoc1.on('update', update => { updates.push(update); }); ydoc1.getArray('arr').insert(0, [1]); ydoc1.getArray('arr').insert(0, [2]); await flushUpdatesHelper(leveldbPersistence, docName, updates); const encodedSv = await leveldbPersistence.getStateVector(docName); const sv = decodeStateVector(encodedSv); t__namespace.assert(sv.size === 1); t__namespace.assert(sv.get(0) === 2); const ydoc2 = await leveldbPersistence.getYDoc(docName); t__namespace.compareArrays(ydoc2.getArray('arr').toArray(), [2, 1]); const allData = await leveldbPersistence._transact(async db => getLevelBulkEntries(db, { gte: ['v1'], lt: ['v2'] })); t__namespace.assert(allData.length > 0, 'some data exists'); t__namespace.compareArrays([docName], await leveldbPersistence.getAllDocNames()); await leveldbPersistence.clearDocument(docName); t__namespace.compareArrays([], await leveldbPersistence.getAllDocNames()); const allData2 = await leveldbPersistence._transact(async db => getLevelBulkEntries(db, { gte: ['v1'], lt: ['v2'] })); console.log(allData2); t__namespace.assert(allData2.length === 0, 'really deleted all data'); await leveldbPersistence.destroy(); }; /** * @param {t.TestCase} tc */ const testEncodeManyUpdates = async tc => { const N = PREFERRED_TRIM_SIZE * 7; const docName = tc.testName; const ydoc1 = new Y__namespace.Doc(); ydoc1.clientID = 0; // so we can check the state vector const leveldbPersistence = new LeveldbPersistence(storageName); await leveldbPersistence.clearDocument(docName); /** * @type {Array<Uint8Array>} */ const updates = []; ydoc1.on('update', update => { updates.push(update); }); await flushUpdatesHelper(leveldbPersistence, docName, updates); const keys = await leveldbPersistence._transact(db => getLevelUpdatesEntries(db, docName)); for (let i = 0; i < keys.length; i++) { t__namespace.assert(keys[i].key[3] === i); } const yarray = ydoc1.getArray('arr'); for (let i = 0; i < N; i++) { yarray.insert(0, [i]); } await flushUpdatesHelper(leveldbPersistence, docName, updates); const ydoc2 = await leveldbPersistence.getYDoc(docName); t__namespace.assert(ydoc2.getArray('arr').length === N); await leveldbPersistence.flushDocument(docName); const mergedKeys = await leveldbPersistence._transact(db => getLevelUpdatesEntries(db, docName)); t__namespace.assert(mergedKeys.length === 1); // getYDoc still works after flush/merge const ydoc3 = await leveldbPersistence.getYDoc(docName); t__namespace.assert(ydoc3.getArray('arr').length === N); // test if state vector is properly generated t__namespace.compare(Y__namespace.encodeStateVector(ydoc1), await leveldbPersistence.getStateVector(docName)); // add new update so that sv needs to be updated ydoc1.getArray('arr').insert(0, ['new']); await flushUpdatesHelper(leveldbPersistence, docName, updates); t__namespace.compare(Y__namespace.encodeStateVector(ydoc1), await leveldbPersistence.getStateVector(docName)); await leveldbPersistence.destroy(); }; /** * @param {t.TestCase} tc */ const testDiff = async tc => { const N = PREFERRED_TRIM_SIZE * 2; // primes are awesome - ensure that the document is at least flushed once const docName = tc.testName; const ydoc1 = new Y__namespace.Doc(); ydoc1.clientID = 0; // so we can check the state vector const leveldbPersistence = new LeveldbPersistence(storageName); await leveldbPersistence.clearDocument(docName); /** * @type {Array<Uint8Array>} */ const updates = []; ydoc1.on('update', update => { updates.push(update); }); const yarray = ydoc1.getArray('arr'); // create N changes for (let i = 0; i < N; i++) { yarray.insert(0, [i]); } await flushUpdatesHelper(leveldbPersistence, docName, updates); // create partially merged doc const ydoc2 = await leveldbPersistence.getYDoc(docName); // another N updates for (let i = 0; i < N; i++) { yarray.insert(0, [i]); } await flushUpdatesHelper(leveldbPersistence, docName, updates); // apply diff to doc const diffUpdate = await leveldbPersistence.getDiff(docName, Y__namespace.encodeStateVector(ydoc2)); Y__namespace.applyUpdate(ydoc2, diffUpdate); t__namespace.assert(ydoc2.getArray('arr').length === ydoc1.getArray('arr').length); t__namespace.assert(ydoc2.getArray('arr').length === N * 2); await leveldbPersistence.destroy(); }; /** * @param {t.TestCase} tc */ const testMetas = async tc => { const docName = tc.testName; const leveldbPersistence = new LeveldbPersistence(storageName); await leveldbPersistence.clearDocument(docName); await leveldbPersistence.setMeta(docName, 'a', 4); await leveldbPersistence.setMeta(docName, 'a', 5); await leveldbPersistence.setMeta(docName, 'b', 4); const a = await leveldbPersistence.getMeta(docName, 'a'); const b = await leveldbPersistence.getMeta(docName, 'b'); t__namespace.assert(a === 5); t__namespace.assert(b === 4); const metas = await leveldbPersistence.getMetas(docName); t__namespace.assert(metas.size === 2); t__namespace.assert(metas.get('a') === 5); t__namespace.assert(metas.get('b') === 4); await leveldbPersistence.delMeta(docName, 'a'); const c = await leveldbPersistence.getMeta(docName, 'a'); t__namespace.assert(c === undefined); await leveldbPersistence.clearDocument(docName); const metasEmpty = await leveldbPersistence.getMetas(docName); t__namespace.assert(metasEmpty.size === 0); await leveldbPersistence.destroy(); }; /** * @param {t.TestCase} tc */ const testDeleteEmptySv = async tc => { const docName = tc.testName; const leveldbPersistence = new LeveldbPersistence(storageName); await leveldbPersistence.clearAll(); const ydoc = new Y__namespace.Doc(); ydoc.clientID = 0; ydoc.getArray('arr').insert(0, [1]); const singleUpdate = Y__namespace.encodeStateAsUpdate(ydoc); t__namespace.compareArrays([], await leveldbPersistence.getAllDocNames()); await leveldbPersistence.storeUpdate(docName, singleUpdate); t__namespace.compareArrays([docName], await leveldbPersistence.getAllDocNames()); const docSvs = await leveldbPersistence.getAllDocStateVectors(); t__namespace.assert(docSvs.length === 1); t__namespace.compare([{ name: docName, clock: 0, sv: Y__namespace.encodeStateVector(ydoc) }], docSvs); await leveldbPersistence.clearDocument(docName); t__namespace.compareArrays([], await leveldbPersistence.getAllDocNames()); await leveldbPersistence.destroy(); }; /** * @param {t.TestCase} tc */ const testMisc = async tc => { const docName = tc.testName; const leveldbPersistence = new LeveldbPersistence(storageName); await leveldbPersistence.clearDocument(docName); const sv = await leveldbPersistence.getStateVector('does not exist'); t__namespace.assert(sv.byteLength === 1); await leveldbPersistence.destroy(); }; var leveldb = /*#__PURE__*/Object.freeze({ __proto__: null, testLeveldbUpdateStorage: testLeveldbUpdateStorage, testEncodeManyUpdates: testEncodeManyUpdates, testDiff: testDiff, testMetas: testMetas, testDeleteEmptySv: testDeleteEmptySv, testMisc: testMisc }); if (environment_js.isBrowser) { log__namespace.createVConsole(document.body); } t.runTests({ leveldb }).then(success => { /* istanbul ignore next */ if (environment_js.isNode) { process.exit(success ? 0 : 1); } }); //# sourceMappingURL=test.cjs.map