y-mongodb-provider
Version:
MongoDB database adapter for Yjs
149 lines (146 loc) • 5.17 kB
JavaScript
import * as Y from 'yjs';
import * as binary from 'lib0/binary';
import * as encoding from 'lib0/encoding';
import * as decoding from 'lib0/decoding';
import { Buffer } from 'buffer';
const PREFERRED_TRIM_SIZE = 400;
const MAX_DOCUMENT_SIZE = 15000000;
const clearUpdatesRange = async (db, docName, from, to) => db.del({
docName,
clock: {
$gte: from,
$lt: to,
},
});
const createDocumentUpdateKey = (docName, clock) => {
if (clock !== undefined) {
return {
version: 'v1',
action: 'update',
docName,
clock,
};
}
else {
return {
version: 'v1',
action: 'update',
docName,
};
}
};
const createDocumentStateVectorKey = (docName) => ({
docName,
version: 'v1_sv',
});
const createDocumentMetaKey = (docName, metaKey) => ({
version: 'v1',
docName,
metaKey: `meta_${metaKey}`,
});
const flushDB = (db) => db.flush();
const getCurrentUpdateClock = async (db, docName) => {
const cursor = db.readAsCursor({
...createDocumentUpdateKey(docName, 0),
clock: {
$gte: 0,
$lt: binary.BITS32,
},
}, { reverse: true, limit: 1 });
const update = await cursor.next();
return update ? update.clock : -1;
};
const writeStateVector = async (db, docName, sv, clock) => {
const encoder = encoding.createEncoder();
encoding.writeVarUint(encoder, clock);
encoding.writeVarUint8Array(encoder, sv);
await db.put(createDocumentStateVectorKey(docName), {
value: encoding.toUint8Array(encoder),
});
};
const storeUpdate = async (db, docName, update) => {
const clock = await getCurrentUpdateClock(db, docName);
if (clock === -1) {
const ydoc = new Y.Doc();
Y.applyUpdate(ydoc, update);
const sv = Y.encodeStateVector(ydoc);
await writeStateVector(db, docName, sv, 0);
}
if (update.length <= MAX_DOCUMENT_SIZE) {
await db.put(createDocumentUpdateKey(docName, clock + 1), {
value: update,
});
}
else {
const totalChunks = Math.ceil(update.length / MAX_DOCUMENT_SIZE);
const putPromises = [];
for (let i = 0; i < totalChunks; i++) {
const start = i * MAX_DOCUMENT_SIZE;
const end = Math.min(start + MAX_DOCUMENT_SIZE, update.length);
const chunk = update.subarray(start, end);
putPromises.push(db.put({ ...createDocumentUpdateKey(docName, clock + 1), part: i + 1 }, { value: chunk }));
}
await Promise.all(putPromises);
}
return clock + 1;
};
const decodeMongodbStateVector = (buf) => {
let decoder;
if (Buffer.isBuffer(buf)) {
decoder = decoding.createDecoder(buf);
}
else if (Buffer.isBuffer(buf?.buffer)) {
decoder = decoding.createDecoder(buf.buffer);
}
else {
throw new Error('No buffer provided at decodeMongodbStateVector()');
}
const clock = decoding.readVarUint(decoder);
const sv = decoding.readVarUint8Array(decoder);
return { sv, clock };
};
const readStateVector = async (db, docName) => {
const doc = await db.get({ ...createDocumentStateVectorKey(docName) });
if (!doc?.value) {
return { sv: null, clock: -1 };
}
return decodeMongodbStateVector(doc.value);
};
const getAllSVDocs = async (db) => db.readAsCursor({ version: 'v1_sv' }).toArray();
const flushDocument = async (db, docName, stateAsUpdate, stateVector) => {
const clock = await storeUpdate(db, docName, stateAsUpdate);
await writeStateVector(db, docName, stateVector, clock);
await clearUpdatesRange(db, docName, 0, clock);
return clock;
};
const getYDocFromDb = async (db, docName, flushSize) => {
const ydoc = new Y.Doc();
let updatesCount = 0;
const cursor = await db.readAsCursor(createDocumentUpdateKey(docName));
let currentUpdate = (await cursor.next());
let parts = [];
while (currentUpdate) {
if (parts.length && parts[0].clock !== currentUpdate.clock) {
Y.applyUpdate(ydoc, Buffer.concat(parts.map((part) => part.value.buffer)));
parts = [];
}
if ((parts.length === 0 && currentUpdate.part === 1) ||
(parts.length > 0 && currentUpdate.part === parts[parts.length - 1].part + 1)) {
parts.push(currentUpdate);
}
else {
Y.applyUpdate(ydoc, currentUpdate.value.buffer);
}
updatesCount += 1;
currentUpdate = (await cursor.next());
}
if (parts.length) {
Y.applyUpdate(ydoc, Buffer.concat(parts.map((part) => part.value.buffer)));
}
if (updatesCount > flushSize) {
await flushDocument(db, docName, Y.encodeStateAsUpdate(ydoc), Y.encodeStateVector(ydoc));
}
return ydoc;
};
export { PREFERRED_TRIM_SIZE, clearUpdatesRange, createDocumentMetaKey, createDocumentStateVectorKey, createDocumentUpdateKey, decodeMongodbStateVector, flushDB, flushDocument, getAllSVDocs, getCurrentUpdateClock, getYDocFromDb, readStateVector, storeUpdate, writeStateVector };
//# sourceMappingURL=utils.mjs.map