y-mongodb-provider
Version:
MongoDB database adapter for Yjs
186 lines (180 loc) • 6.58 kB
JavaScript
var Y = require('yjs');
var binary = require('lib0/dist/binary.cjs');
var encoding = require('lib0/dist/encoding.cjs');
var decoding = require('lib0/dist/decoding.cjs');
var buffer = require('buffer');
function _interopNamespaceDefault(e) {
var n = Object.create(null);
if (e) {
Object.keys(e).forEach(function (k) {
if (k !== 'default') {
var d = Object.getOwnPropertyDescriptor(e, k);
Object.defineProperty(n, k, d.get ? d : {
enumerable: true,
get: function () { return e[k]; }
});
}
});
}
n.default = e;
return Object.freeze(n);
}
var Y__namespace = /*#__PURE__*/_interopNamespaceDefault(Y);
var binary__namespace = /*#__PURE__*/_interopNamespaceDefault(binary);
var encoding__namespace = /*#__PURE__*/_interopNamespaceDefault(encoding);
var decoding__namespace = /*#__PURE__*/_interopNamespaceDefault(decoding);
const PREFERRED_TRIM_SIZE = 400;
const MAX_DOCUMENT_SIZE = 15000000;
const clearUpdatesRange = async (db, docName, from, to) => db.del({
docName,
clock: {
$gte: from,
$lt: to,
},
});
const createDocumentUpdateKey = (docName, clock) => {
if (clock !== undefined) {
return {
version: 'v1',
action: 'update',
docName,
clock,
};
}
else {
return {
version: 'v1',
action: 'update',
docName,
};
}
};
const createDocumentStateVectorKey = (docName) => ({
docName,
version: 'v1_sv',
});
const createDocumentMetaKey = (docName, metaKey) => ({
version: 'v1',
docName,
metaKey: `meta_${metaKey}`,
});
const flushDB = (db) => db.flush();
const getCurrentUpdateClock = async (db, docName) => {
const cursor = db.readAsCursor({
...createDocumentUpdateKey(docName, 0),
clock: {
$gte: 0,
$lt: binary__namespace.BITS32,
},
}, { reverse: true, limit: 1 });
const update = await cursor.next();
return update ? update.clock : -1;
};
const writeStateVector = async (db, docName, sv, clock) => {
const encoder = encoding__namespace.createEncoder();
encoding__namespace.writeVarUint(encoder, clock);
encoding__namespace.writeVarUint8Array(encoder, sv);
await db.put(createDocumentStateVectorKey(docName), {
value: encoding__namespace.toUint8Array(encoder),
});
};
const storeUpdate = async (db, docName, update) => {
const clock = await getCurrentUpdateClock(db, docName);
if (clock === -1) {
const ydoc = new Y__namespace.Doc();
Y__namespace.applyUpdate(ydoc, update);
const sv = Y__namespace.encodeStateVector(ydoc);
await writeStateVector(db, docName, sv, 0);
}
if (update.length <= MAX_DOCUMENT_SIZE) {
await db.put(createDocumentUpdateKey(docName, clock + 1), {
value: update,
});
}
else {
const totalChunks = Math.ceil(update.length / MAX_DOCUMENT_SIZE);
const putPromises = [];
for (let i = 0; i < totalChunks; i++) {
const start = i * MAX_DOCUMENT_SIZE;
const end = Math.min(start + MAX_DOCUMENT_SIZE, update.length);
const chunk = update.subarray(start, end);
putPromises.push(db.put({ ...createDocumentUpdateKey(docName, clock + 1), part: i + 1 }, { value: chunk }));
}
await Promise.all(putPromises);
}
return clock + 1;
};
const decodeMongodbStateVector = (buf) => {
let decoder;
if (buffer.Buffer.isBuffer(buf)) {
decoder = decoding__namespace.createDecoder(buf);
}
else if (buffer.Buffer.isBuffer(buf?.buffer)) {
decoder = decoding__namespace.createDecoder(buf.buffer);
}
else {
throw new Error('No buffer provided at decodeMongodbStateVector()');
}
const clock = decoding__namespace.readVarUint(decoder);
const sv = decoding__namespace.readVarUint8Array(decoder);
return { sv, clock };
};
const readStateVector = async (db, docName) => {
const doc = await db.get({ ...createDocumentStateVectorKey(docName) });
if (!doc?.value) {
return { sv: null, clock: -1 };
}
return decodeMongodbStateVector(doc.value);
};
const getAllSVDocs = async (db) => db.readAsCursor({ version: 'v1_sv' }).toArray();
const flushDocument = async (db, docName, stateAsUpdate, stateVector) => {
const clock = await storeUpdate(db, docName, stateAsUpdate);
await writeStateVector(db, docName, stateVector, clock);
await clearUpdatesRange(db, docName, 0, clock);
return clock;
};
const getYDocFromDb = async (db, docName, flushSize) => {
const ydoc = new Y__namespace.Doc();
let updatesCount = 0;
const cursor = await db.readAsCursor(createDocumentUpdateKey(docName));
let currentUpdate = (await cursor.next());
let parts = [];
while (currentUpdate) {
if (parts.length && parts[0].clock !== currentUpdate.clock) {
Y__namespace.applyUpdate(ydoc, buffer.Buffer.concat(parts.map((part) => part.value.buffer)));
parts = [];
}
if ((parts.length === 0 && currentUpdate.part === 1) ||
(parts.length > 0 && currentUpdate.part === parts[parts.length - 1].part + 1)) {
parts.push(currentUpdate);
}
else {
Y__namespace.applyUpdate(ydoc, currentUpdate.value.buffer);
}
updatesCount += 1;
currentUpdate = (await cursor.next());
}
if (parts.length) {
Y__namespace.applyUpdate(ydoc, buffer.Buffer.concat(parts.map((part) => part.value.buffer)));
}
if (updatesCount > flushSize) {
await flushDocument(db, docName, Y__namespace.encodeStateAsUpdate(ydoc), Y__namespace.encodeStateVector(ydoc));
}
return ydoc;
};
exports.PREFERRED_TRIM_SIZE = PREFERRED_TRIM_SIZE;
exports.clearUpdatesRange = clearUpdatesRange;
exports.createDocumentMetaKey = createDocumentMetaKey;
exports.createDocumentStateVectorKey = createDocumentStateVectorKey;
exports.createDocumentUpdateKey = createDocumentUpdateKey;
exports.decodeMongodbStateVector = decodeMongodbStateVector;
exports.flushDB = flushDB;
exports.flushDocument = flushDocument;
exports.getAllSVDocs = getAllSVDocs;
exports.getCurrentUpdateClock = getCurrentUpdateClock;
exports.getYDocFromDb = getYDocFromDb;
exports.readStateVector = readStateVector;
exports.storeUpdate = storeUpdate;
exports.writeStateVector = writeStateVector;
//# sourceMappingURL=utils.cjs.map
;