json-joy
Version:
Collection of libraries for building collaborative editing apps.
396 lines • 16 kB
JavaScript
import { AvlMap } from 'sonic-forest/lib/avl/AvlMap';
import { first, next, prev } from 'sonic-forest/lib/util';
import { printTree } from 'tree-dump/lib/printTree';
import { listToUint8 } from '@jsonjoy.com/buffers/lib/concat';
import { cloneBinary } from '@jsonjoy.com/util/lib/json-clone/cloneBinary';
import { Model } from '../model';
import { toSchema } from '../schema/toSchema';
import { DelOp, InsArrOp, InsBinOp, InsObjOp, InsStrOp, InsValOp, InsVecOp, Timespan, compare, } from '../../json-crdt-patch';
import { ArrNode, BinNode, ObjNode, StrNode, ValNode, VecNode } from '../nodes';
/**
* The `Log` represents a history of patches applied to a JSON CRDT model. It
* consists of: (1) a starting {@link Model} instance, (2) a list of {@link Patch} instances,
* that can be applied to the starting model to reach the current state of the
* document, and (3) the current state of the document, the `end` {@link Model}.
*
* The log can be used to replay the history of patches to any point in time,
* from the "start" to the "end" of the log, and return the resulting {@link Model}
* state.
*
* @todo Make this implement UILifecycle (start, stop) interface.
*/
export class Log {
start;
end;
/**
* Creates a `PatchLog` instance from a newly JSON CRDT model. Checks if
* the model API buffer has any initial operations applied, if yes, it
* uses them to create the initial state of the log.
*
* @param model A new JSON CRDT model, just created with
* `Model.create()` or `Model.withServerClock()`.
* @returns A new `PatchLog` instance.
*/
static fromNewModel(model) {
const sid = model.clock.sid;
const log = new Log(() => Model.create(undefined, sid)); /** @todo Maybe provide second arg to `new Log(...)` */
const api = model.api;
if (api.builder.patch.ops.length)
log.end.applyPatch(api.flush());
return log;
}
static from(model) {
const frozen = model.toBinary();
const beginning = () => Model.fromBinary(frozen);
return new Log(beginning, model);
}
/**
* Custom metadata associated with the log, it will be stored in the log's
* header when serialized with {@link LogEncoder} and can be used to store
* additional information about the log.
*/
metadata;
/**
* The collection of patches which are applied to the `start()` model to reach
* the `end` model. The patches in the log, stored in an AVL tree for
* efficient replaying. The patches are sorted by their logical timestamps
* and applied in causal order.
*
* @readonly
*/
patches = new AvlMap(compare);
__onPatch;
__onFlush;
constructor(
/**
* Model factory function that creates a new JSON CRDT model instance, which
* is used as the starting point of the log. It is called every time a new
* model is needed to replay the log.
*
* @readonly Internally this function may be updated, but externally it is
* read-only.
*
* @todo Rename to something else to give way to a `start()` in UILifecycle.
* Call "snapshot". Maybe introduce `type Snapshot<N> = () => Model<N>;`.
*/
start,
/**
* The end of the log, the current state of the document. It is the model
* instance that is used to apply new patches to the log.
*
* @readonly
*/
end = start(), metadata) {
this.start = start;
this.end = end;
const onPatch = (patch) => {
const id = patch.getId();
if (!id)
return;
this.patches.set(id, patch);
};
const api = end.api;
this.__onPatch = api.onPatch.listen(onPatch);
this.__onFlush = api.onFlush.listen(onPatch);
this.metadata = metadata ?? {};
}
/**
* Call this method to destroy the {@link Log} instance. It unsubscribes patch
* and flush listeners from the `end` model and clears the patch log.
*/
destroy() {
this.__onPatch();
this.__onFlush();
}
/**
* Creates a new model instance using the `start()` factory function and
* replays all patches in the log to reach the current state of the document.
*
* @returns A new model instance with all patches replayed.
*/
replayToEnd() {
const clone = this.start().clone();
for (let node = first(this.patches.root); node; node = next(node))
clone.applyPatch(node.v);
return clone;
}
/**
* Replays the patch log until a specified timestamp, including the patch
* at the given timestamp. The model returned is a new instance of `start()`
* with patches replayed up to the given timestamp.
*
* @param ts Timestamp ID of the patch to replay to.
* @param inclusive If `true`, the patch at the given timestamp `ts` is included,
* otherwise replays up to the patch before the given timestamp. Default is `true`.
* @returns A new model instance with patches replayed up to the given timestamp.
*/
replayTo(ts, inclusive = true) {
// TODO: PERF: Make `.clone()` implicit in `.start()`.
const clone = this.start().clone();
let cmp = 0;
for (let node = first(this.patches.root); node && (cmp = compare(ts, node.k)) >= 0; node = next(node)) {
if (cmp === 0 && !inclusive)
break;
clone.applyPatch(node.v);
}
return clone;
}
/**
* Advance the start of the log to a specified timestamp, excluding the patch
* at the given timestamp. This method removes all patches from the log that
* are older than the given timestamp and updates the `start()` factory
* function to replay the log from the new start.
*
* @param ts Timestamp ID of the patch to advance to.
*/
advanceTo(ts) {
const newStartPatches = [];
let node = first(this.patches.root);
for (; node && compare(ts, node.k) >= 0; node = next(node))
newStartPatches.push(node.v);
for (const patch of newStartPatches)
this.patches.del(patch.getId());
const oldStart = this.start;
this.start = () => {
const model = oldStart();
for (const patch of newStartPatches)
model.applyPatch(patch);
/** @todo Freeze the old model here, by `model.toBinary()`, it needs to be cloned on .start() anyways. */
return model;
};
}
/**
* Finds the latest patch for a given session ID.
*
* @param sid Session ID to find the latest patch for.
* @return The latest patch for the given session ID, or `undefined` if no
* such patch exists.
*/
findMax(sid) {
let curr = this.patches.max;
while (curr) {
if (curr.k.sid === sid)
return curr.v;
curr = prev(curr);
}
return;
}
/**
* @returns A deep clone of the log, including the start function, metadata,
* patches, and the end model.
*/
clone() {
const start = this.start;
const metadata = cloneBinary(this.metadata);
const end = this.end.clone();
const log = new Log(start, end, metadata);
for (const { v } of this.patches.entries()) {
const patch = v.clone();
const id = patch.getId();
if (!id)
continue;
log.patches.set(id, patch);
}
return log;
}
// /**
// * Adds a batch of patches to the log, without applying them to the `end`
// * model. It is assumed that the patches are already applied to the `end`
// * model, this method only adds them to the internal patch collection.
// *
// * If you need to apply patches to the `end` model, use `end.applyBatch(batch)`,
// * it will apply them to the model and add them to the log automatically.
// *
// * @param batch Array of patches to add to the log.
// */
// public add(batch: Patch[]): void {
// const patches = this.patches;
// for (const patch of batch) {
// const id = patch.getId();
// if (id) patches.set(id, patch);
// }
// }
/**
* Rebase a batch of patches on top of the current end of the log, or on top
* of the latest patch for a given session ID.
*
* @param batch A batch of patches to rebase.
* @param sid Session ID to find the latest patch for rebasing. If not provided,
* the latest patch in the log is used.
* @returns The rebased patches.
*/
rebaseBatch(batch, sid) {
const rebasePatch = sid ? this.findMax(sid) : this.patches.max?.v;
if (!rebasePatch)
return batch;
const rebaseId = rebasePatch.getId();
if (!rebaseId)
return batch;
let nextTime = rebaseId.time + rebasePatch.span();
const rebased = [];
const length = batch.length;
for (let i = 0; i < length; i++) {
const patch = batch[i].rebase(nextTime);
nextTime += patch.span();
rebased.push(patch);
}
return rebased;
}
/**
* Resets the log to the state of another log. Consumes all state fron the `to`
* log. The `to` log will be destroyed and should not be used after calling
* this method.
*
* If you want to preserve the `to` log, use `.clone()` method first.
*
* ```ts
* const log1 = new Log();
* const log2 = new Log();
* log1.reset(log2.clone());
* ```
*
* @param to The log to consume the state from.
*/
reset(to) {
this.start = to.start;
this.metadata = to.metadata;
this.patches = to.patches;
this.end.reset(to.end);
to.destroy();
}
/**
* Creates a patch which reverts the given patch. The RGA insertion operations
* are reversed just by deleting the inserted values. All other operations
* require time travel to the state just before the patch was applied, so that
* a copy of a mutated object can be created and inserted back into the model.
*
* @param patch The patch to undo
* @returns A new patch that undoes the given patch
*/
undo(patch) {
const ops = patch.ops;
const length = ops.length;
if (!length)
throw new Error('EMPTY_PATCH');
const id = patch.getId();
let __model;
const getModel = () => __model || (__model = this.replayTo(id, false));
const builder = this.end.api.builder;
for (let i = length - 1; i >= 0; i--) {
const op = ops[i];
const opId = op.id;
if (op instanceof InsStrOp || op instanceof InsArrOp || op instanceof InsBinOp) {
builder.del(op.obj, [new Timespan(opId.sid, opId.time, op.span())]);
continue;
}
const model = getModel();
// TODO: Do not overwrite already deleted values? Or needed for concurrency? Orphaned nodes.
if (op instanceof InsValOp) {
const val = model.index.get(op.obj);
if (val instanceof ValNode) {
const schema = toSchema(val.node());
const newId = schema.build(builder);
builder.setVal(op.obj, newId);
}
}
else if (op instanceof InsObjOp || op instanceof InsVecOp) {
const data = [];
const container = model.index.get(op.obj);
for (const [key] of op.data) {
let value;
if (container instanceof ObjNode)
value = container.get(key + '');
else if (container instanceof VecNode)
value = container.get(+key);
if (value) {
const schema = toSchema(value);
const newId = schema.build(builder);
data.push([key, newId]);
}
else {
data.push([key, builder.con(undefined)]);
}
}
if (data.length) {
if (op instanceof InsObjOp)
builder.insObj(op.obj, data);
else if (op instanceof InsVecOp)
builder.insVec(op.obj, data);
}
}
else if (op instanceof DelOp) {
const node = model.index.find(op.obj);
if (node) {
const rga = node.v;
if (rga instanceof StrNode) {
let str = '';
for (const span of op.what)
str += rga.spanView(span).join('');
let after = op.obj;
const firstDelSpan = op.what[0];
if (firstDelSpan) {
const after2 = rga.prevId(firstDelSpan);
if (after2)
after = after2;
}
builder.insStr(op.obj, after, str);
}
else if (rga instanceof BinNode) {
const buffers = [];
for (const span of op.what)
buffers.push(...rga.spanView(span));
let after = op.obj;
const firstDelSpan = op.what[0];
if (firstDelSpan) {
const after2 = rga.prevId(firstDelSpan);
if (after2)
after = after2;
}
const blob = listToUint8(buffers);
builder.insBin(op.obj, after, blob);
}
else if (rga instanceof ArrNode) {
const copies = [];
for (const span of op.what) {
const ids2 = rga.spanView(span);
for (const ids of ids2) {
for (const id of ids) {
const node = model.index.get(id);
if (node) {
const schema = toSchema(node);
const newId = schema.build(builder);
copies.push(newId);
}
}
}
}
let after = op.obj;
const firstDelSpan = op.what[0];
if (firstDelSpan) {
const after2 = rga.prevId(firstDelSpan);
if (after2)
after = after2;
}
builder.insArr(op.obj, after, copies);
}
}
}
}
return builder.flush();
}
// ---------------------------------------------------------------- Printable
toString(tab) {
const patches = [];
this.patches.forEach(({ v }) => patches.push(v));
return ('log' +
printTree(tab, [
(tab) => 'start' + printTree(tab, [(tab) => this.start().toString(tab)]),
() => '',
(tab) => 'history' +
printTree(tab, patches.map((patch, i) => (tab) => `${i}: ${patch.toString(tab)}`)),
() => '',
(tab) => 'end' + printTree(tab, [(tab) => this.end.toString(tab)]),
]));
}
}
//# sourceMappingURL=Log.js.map