UNPKG

json-joy

Version:

Collection of libraries for building collaborative editing apps.

291 lines (290 loc) 12.9 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.Log = void 0; const AvlMap_1 = require("sonic-forest/lib/avl/AvlMap"); const util_1 = require("sonic-forest/lib/util"); const printTree_1 = require("tree-dump/lib/printTree"); const concat_1 = require("@jsonjoy.com/util/lib/buffers/concat"); const model_1 = require("../model"); const toSchema_1 = require("../schema/toSchema"); const json_crdt_patch_1 = require("../../json-crdt-patch"); const nodes_1 = require("../nodes"); /** * The `Log` represents a history of patches applied to a JSON CRDT model. It * consists of: (1) a starting {@link Model} instance, (2) a list of {@link Patch} instances, * that can be applied to the starting model to reach the current state of the * document, and (3) the current state of the document, the `end` {@link Model}. * * The log can be used to replay the history of patches to any point in time, * from the "start" to the "end" of the log, and return the resulting {@link Model} * state. * * @todo Make this implement UILifecycle (start, stop) interface. */ class Log { /** * Creates a `PatchLog` instance from a newly JSON CRDT model. Checks if * the model API buffer has any initial operations applied, if yes, it * uses them to create the initial state of the log. * * @param model A new JSON CRDT model, just created with * `Model.withLogicalClock()` or `Model.withServerClock()`. * @returns A new `PatchLog` instance. */ static fromNewModel(model) { const sid = model.clock.sid; const log = new Log(() => model_1.Model.create(undefined, sid)); /** @todo Maybe provide second arg to `new Log(...)` */ const api = model.api; if (api.builder.patch.ops.length) log.end.applyPatch(api.flush()); return log; } static from(model) { const frozen = model.toBinary(); const beginning = () => model_1.Model.fromBinary(frozen); return new Log(beginning, model); } constructor( /** * Model factory function that creates a new JSON CRDT model instance, which * is used as the starting point of the log. It is called every time a new * model is needed to replay the log. * * @readonly Internally this function may be updated, but externally it is * read-only. * * @todo Rename to something else to give way to a `start()` in UILifecycle. * Call "snapshot". Maybe introduce `type Snapshot<N> = () => Model<N>;`. */ start, /** * The end of the log, the current state of the document. It is the model * instance that is used to apply new patches to the log. * * @readonly */ end = start()) { this.start = start; this.end = end; /** * The collection of patches which are applied to the `start()` model to reach * the `end` model. The patches in the log, stored in an AVL tree for * efficient replaying. The patches are sorted by their logical timestamps * and applied in causal order. * * @readonly */ this.patches = new AvlMap_1.AvlMap(json_crdt_patch_1.compare); const onPatch = (patch) => { const id = patch.getId(); if (!id) return; this.patches.set(id, patch); }; const api = end.api; this.__onPatch = api.onPatch.listen(onPatch); this.__onFlush = api.onFlush.listen(onPatch); } /** * Call this method to destroy the {@link Log} instance. It unsubscribes patch * and flush listeners from the `end` model and clears the patch log. */ destroy() { this.__onPatch(); this.__onFlush(); this.patches.clear(); } /** * Creates a new model instance using the `start()` factory function and * replays all patches in the log to reach the current state of the document. * * @returns A new model instance with all patches replayed. */ replayToEnd() { const clone = this.start().clone(); for (let node = (0, util_1.first)(this.patches.root); node; node = (0, util_1.next)(node)) clone.applyPatch(node.v); return clone; } /** * Replays the patch log until a specified timestamp, including the patch * at the given timestamp. The model returned is a new instance of `start()` * with patches replayed up to the given timestamp. * * @param ts Timestamp ID of the patch to replay to. * @param inclusive If `true`, the patch at the given timestamp `ts` is included, * otherwise replays up to the patch before the given timestamp. Default is `true`. * @returns A new model instance with patches replayed up to the given timestamp. */ replayTo(ts, inclusive = true) { // TODO: PERF: Make `.clone()` implicit in `.start()`. const clone = this.start().clone(); let cmp = 0; for (let node = (0, util_1.first)(this.patches.root); node && (cmp = (0, json_crdt_patch_1.compare)(ts, node.k)) >= 0; node = (0, util_1.next)(node)) { if (cmp === 0 && !inclusive) break; clone.applyPatch(node.v); } return clone; } /** * Advance the start of the log to a specified timestamp, excluding the patch * at the given timestamp. This method removes all patches from the log that * are older than the given timestamp and updates the `start()` factory * function to replay the log from the new start. * * @param ts Timestamp ID of the patch to advance to. */ advanceTo(ts) { const newStartPatches = []; let node = (0, util_1.first)(this.patches.root); for (; node && (0, json_crdt_patch_1.compare)(ts, node.k) >= 0; node = (0, util_1.next)(node)) newStartPatches.push(node.v); for (const patch of newStartPatches) this.patches.del(patch.getId()); const oldStart = this.start; this.start = () => { const model = oldStart(); for (const patch of newStartPatches) model.applyPatch(patch); /** @todo Freeze the old model here, by `model.toBinary()`, it needs to be cloned on .start() anyways. */ return model; }; } /** * Creates a patch which reverts the given patch. The RGA insertion operations * are reversed just by deleting the inserted values. All other operations * require time travel to the state just before the patch was applied, so that * a copy of a mutated object can be created and inserted back into the model. * * @param patch The patch to undo * @returns A new patch that undoes the given patch */ undo(patch) { const ops = patch.ops; const length = ops.length; if (!length) throw new Error('EMPTY_PATCH'); const id = patch.getId(); let __model; const getModel = () => __model || (__model = this.replayTo(id, false)); const builder = this.end.api.builder; for (let i = length - 1; i >= 0; i--) { const op = ops[i]; const opId = op.id; if (op instanceof json_crdt_patch_1.InsStrOp || op instanceof json_crdt_patch_1.InsArrOp || op instanceof json_crdt_patch_1.InsBinOp) { builder.del(op.obj, [new json_crdt_patch_1.Timespan(opId.sid, opId.time, op.span())]); continue; } const model = getModel(); // TODO: Do not overwrite already deleted values? Or needed for concurrency? Orphaned nodes. if (op instanceof json_crdt_patch_1.InsValOp) { const val = model.index.get(op.obj); if (val instanceof nodes_1.ValNode) { const schema = (0, toSchema_1.toSchema)(val.node()); const newId = schema.build(builder); builder.setVal(op.obj, newId); } } else if (op instanceof json_crdt_patch_1.InsObjOp || op instanceof json_crdt_patch_1.InsVecOp) { const data = []; const container = model.index.get(op.obj); for (const [key] of op.data) { let value; if (container instanceof nodes_1.ObjNode) value = container.get(key + ''); else if (container instanceof nodes_1.VecNode) value = container.get(+key); if (value) { const schema = (0, toSchema_1.toSchema)(value); const newId = schema.build(builder); data.push([key, newId]); } else { data.push([key, builder.const(undefined)]); } } if (data.length) { if (op instanceof json_crdt_patch_1.InsObjOp) builder.insObj(op.obj, data); else if (op instanceof json_crdt_patch_1.InsVecOp) builder.insVec(op.obj, data); } } else if (op instanceof json_crdt_patch_1.DelOp) { const node = model.index.find(op.obj); if (node) { const rga = node.v; if (rga instanceof nodes_1.StrNode) { let str = ''; for (const span of op.what) str += rga.spanView(span).join(''); let after = op.obj; const firstDelSpan = op.what[0]; if (firstDelSpan) { const after2 = rga.prevId(firstDelSpan); if (after2) after = after2; } builder.insStr(op.obj, after, str); } else if (rga instanceof nodes_1.BinNode) { const buffers = []; for (const span of op.what) buffers.push(...rga.spanView(span)); let after = op.obj; const firstDelSpan = op.what[0]; if (firstDelSpan) { const after2 = rga.prevId(firstDelSpan); if (after2) after = after2; } const blob = (0, concat_1.listToUint8)(buffers); builder.insBin(op.obj, after, blob); } else if (rga instanceof nodes_1.ArrNode) { const copies = []; for (const span of op.what) { const ids2 = rga.spanView(span); for (const ids of ids2) { for (const id of ids) { const node = model.index.get(id); if (node) { const schema = (0, toSchema_1.toSchema)(node); const newId = schema.build(builder); copies.push(newId); } } } } let after = op.obj; const firstDelSpan = op.what[0]; if (firstDelSpan) { const after2 = rga.prevId(firstDelSpan); if (after2) after = after2; } builder.insArr(op.obj, after, copies); } } } } return builder.flush(); } // ---------------------------------------------------------------- Printable toString(tab) { const patches = []; // biome-ignore lint: patches are not iterable this.patches.forEach(({ v }) => patches.push(v)); return ('log' + (0, printTree_1.printTree)(tab, [ (tab) => 'start' + (0, printTree_1.printTree)(tab, [(tab) => this.start().toString(tab)]), () => '', (tab) => 'history' + (0, printTree_1.printTree)(tab, patches.map((patch, i) => (tab) => `${i}: ${patch.toString(tab)}`)), () => '', (tab) => 'end' + (0, printTree_1.printTree)(tab, [(tab) => this.end.toString(tab)]), ])); } } exports.Log = Log;