@tldraw/store
Version:
tldraw infinite canvas SDK (store).
738 lines (737 loc) • 23 kB
JavaScript
"use strict";
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
var Store_exports = {};
__export(Store_exports, {
Store: () => Store,
createComputedCache: () => createComputedCache
});
module.exports = __toCommonJS(Store_exports);
var import_state = require("@tldraw/state");
var import_utils = require("@tldraw/utils");
var import_AtomMap = require("./AtomMap");
var import_RecordsDiff = require("./RecordsDiff");
var import_StoreQueries = require("./StoreQueries");
var import_StoreSideEffects = require("./StoreSideEffects");
var import_devFreeze = require("./devFreeze");
class Store {
/**
* The random id of the store.
*/
id;
/**
* An AtomMap containing the stores records.
*
* @internal
* @readonly
*/
records;
/**
* An atom containing the store's history.
*
* @public
* @readonly
*/
history = (0, import_state.atom)("history", 0, {
historyLength: 1e3
});
/**
* A StoreQueries instance for this store.
*
* @public
* @readonly
*/
query;
/**
* A set containing listeners that have been added to this store.
*
* @internal
*/
listeners = /* @__PURE__ */ new Set();
/**
* An array of history entries that have not yet been flushed.
*
* @internal
*/
historyAccumulator = new HistoryAccumulator();
/**
* A reactor that responds to changes to the history by squashing the accumulated history and
* notifying listeners of the changes.
*
* @internal
*/
historyReactor;
/**
* Function to dispose of any in-flight timeouts.
*
* @internal
*/
cancelHistoryReactor() {
}
schema;
props;
scopedTypes;
sideEffects = new import_StoreSideEffects.StoreSideEffects(this);
constructor(config) {
const { initialData, schema, id } = config;
this.id = id ?? (0, import_utils.uniqueId)();
this.schema = schema;
this.props = config.props;
if (initialData) {
this.records = new import_AtomMap.AtomMap(
"store",
(0, import_utils.objectMapEntries)(initialData).map(([id2, record]) => [
id2,
(0, import_devFreeze.devFreeze)(this.schema.validateRecord(this, record, "initialize", null))
])
);
} else {
this.records = new import_AtomMap.AtomMap("store");
}
this.query = new import_StoreQueries.StoreQueries(this.records, this.history);
this.historyReactor = (0, import_state.reactor)(
"Store.historyReactor",
() => {
this.history.get();
this._flushHistory();
},
{ scheduleEffect: (cb) => this.cancelHistoryReactor = (0, import_utils.throttleToNextFrame)(cb) }
);
this.scopedTypes = {
document: new Set(
(0, import_utils.objectMapValues)(this.schema.types).filter((t) => t.scope === "document").map((t) => t.typeName)
),
session: new Set(
(0, import_utils.objectMapValues)(this.schema.types).filter((t) => t.scope === "session").map((t) => t.typeName)
),
presence: new Set(
(0, import_utils.objectMapValues)(this.schema.types).filter((t) => t.scope === "presence").map((t) => t.typeName)
)
};
}
_flushHistory() {
if (this.historyAccumulator.hasChanges()) {
const entries = this.historyAccumulator.flush();
for (const { changes, source } of entries) {
let instanceChanges = null;
let documentChanges = null;
let presenceChanges = null;
for (const { onHistory, filters } of this.listeners) {
if (filters.source !== "all" && filters.source !== source) {
continue;
}
if (filters.scope !== "all") {
if (filters.scope === "document") {
documentChanges ??= this.filterChangesByScope(changes, "document");
if (!documentChanges) continue;
onHistory({ changes: documentChanges, source });
} else if (filters.scope === "session") {
instanceChanges ??= this.filterChangesByScope(changes, "session");
if (!instanceChanges) continue;
onHistory({ changes: instanceChanges, source });
} else {
presenceChanges ??= this.filterChangesByScope(changes, "presence");
if (!presenceChanges) continue;
onHistory({ changes: presenceChanges, source });
}
} else {
onHistory({ changes, source });
}
}
}
}
}
dispose() {
this.cancelHistoryReactor();
}
/**
* Filters out non-document changes from a diff. Returns null if there are no changes left.
* @param change - the records diff
* @param scope - the records scope
* @returns
*/
filterChangesByScope(change, scope) {
const result = {
added: (0, import_utils.filterEntries)(change.added, (_, r) => this.scopedTypes[scope].has(r.typeName)),
updated: (0, import_utils.filterEntries)(change.updated, (_, r) => this.scopedTypes[scope].has(r[1].typeName)),
removed: (0, import_utils.filterEntries)(change.removed, (_, r) => this.scopedTypes[scope].has(r.typeName))
};
if (Object.keys(result.added).length === 0 && Object.keys(result.updated).length === 0 && Object.keys(result.removed).length === 0) {
return null;
}
return result;
}
/**
* Update the history with a diff of changes.
*
* @param changes - The changes to add to the history.
*/
updateHistory(changes) {
this.historyAccumulator.add({
changes,
source: this.isMergingRemoteChanges ? "remote" : "user"
});
if (this.listeners.size === 0) {
this.historyAccumulator.clear();
}
this.history.set(this.history.get() + 1, changes);
}
validate(phase) {
this.allRecords().forEach((record) => this.schema.validateRecord(this, record, phase, null));
}
/**
* Add some records to the store. It's an error if they already exist.
*
* @param records - The records to add.
* @param phaseOverride - The phase override.
* @public
*/
put(records, phaseOverride) {
this.atomic(() => {
const updates = {};
const additions = {};
let record;
let didChange = false;
const source = this.isMergingRemoteChanges ? "remote" : "user";
for (let i = 0, n = records.length; i < n; i++) {
record = records[i];
const initialValue = this.records.__unsafe__getWithoutCapture(record.id);
if (initialValue) {
record = this.sideEffects.handleBeforeChange(initialValue, record, source);
const validated = this.schema.validateRecord(
this,
record,
phaseOverride ?? "updateRecord",
initialValue
);
if (validated === initialValue) continue;
record = (0, import_devFreeze.devFreeze)(record);
this.records.set(record.id, record);
didChange = true;
updates[record.id] = [initialValue, record];
this.addDiffForAfterEvent(initialValue, record);
} else {
record = this.sideEffects.handleBeforeCreate(record, source);
didChange = true;
record = this.schema.validateRecord(
this,
record,
phaseOverride ?? "createRecord",
null
);
record = (0, import_devFreeze.devFreeze)(record);
additions[record.id] = record;
this.addDiffForAfterEvent(null, record);
this.records.set(record.id, record);
}
}
if (!didChange) return;
this.updateHistory({
added: additions,
updated: updates,
removed: {}
});
});
}
/**
* Remove some records from the store via their ids.
*
* @param ids - The ids of the records to remove.
* @public
*/
remove(ids) {
this.atomic(() => {
const toDelete = new Set(ids);
const source = this.isMergingRemoteChanges ? "remote" : "user";
if (this.sideEffects.isEnabled()) {
for (const id of ids) {
const record = this.records.__unsafe__getWithoutCapture(id);
if (!record) continue;
if (this.sideEffects.handleBeforeDelete(record, source) === false) {
toDelete.delete(id);
}
}
}
const actuallyDeleted = this.records.deleteMany(toDelete);
if (actuallyDeleted.length === 0) return;
const removed = {};
for (const [id, record] of actuallyDeleted) {
removed[id] = record;
this.addDiffForAfterEvent(record, null);
}
this.updateHistory({ added: {}, updated: {}, removed });
});
}
/**
* Get the value of a store record by its id.
*
* @param id - The id of the record to get.
* @public
*/
get(id) {
return this.records.get(id);
}
/**
* Get the value of a store record by its id without updating its epoch.
*
* @param id - The id of the record to get.
* @public
*/
unsafeGetWithoutCapture(id) {
return this.records.__unsafe__getWithoutCapture(id);
}
/**
* Creates a JSON payload from the record store.
*
* @param scope - The scope of records to serialize. Defaults to 'document'.
* @returns The record store snapshot as a JSON payload.
*/
serialize(scope = "document") {
const result = {};
for (const [id, record] of this.records) {
if (scope === "all" || this.scopedTypes[scope].has(record.typeName)) {
result[id] = record;
}
}
return result;
}
/**
* Get a serialized snapshot of the store and its schema.
*
* ```ts
* const snapshot = store.getStoreSnapshot()
* store.loadStoreSnapshot(snapshot)
* ```
*
* @param scope - The scope of records to serialize. Defaults to 'document'.
*
* @public
*/
getStoreSnapshot(scope = "document") {
return {
store: this.serialize(scope),
schema: this.schema.serialize()
};
}
/**
* @deprecated use `getSnapshot` from the 'tldraw' package instead.
*/
getSnapshot(scope = "document") {
console.warn(
"[tldraw] `Store.getSnapshot` is deprecated and will be removed in a future release. Use `getSnapshot` from the `tldraw` package instead."
);
return this.getStoreSnapshot(scope);
}
/**
* Migrate a serialized snapshot of the store and its schema.
*
* ```ts
* const snapshot = store.getSnapshot()
* store.migrateSnapshot(snapshot)
* ```
*
* @param snapshot - The snapshot to load.
* @public
*/
migrateSnapshot(snapshot) {
const migrationResult = this.schema.migrateStoreSnapshot(snapshot);
if (migrationResult.type === "error") {
throw new Error(`Failed to migrate snapshot: ${migrationResult.reason}`);
}
return {
store: migrationResult.value,
schema: this.schema.serialize()
};
}
/**
* Load a serialized snapshot.
*
* ```ts
* const snapshot = store.getStoreSnapshot()
* store.loadStoreSnapshot(snapshot)
* ```
*
* @param snapshot - The snapshot to load.
* @public
*/
loadStoreSnapshot(snapshot) {
const migrationResult = this.schema.migrateStoreSnapshot(snapshot);
if (migrationResult.type === "error") {
throw new Error(`Failed to migrate snapshot: ${migrationResult.reason}`);
}
const prevSideEffectsEnabled = this.sideEffects.isEnabled();
try {
this.sideEffects.setIsEnabled(false);
this.atomic(() => {
this.clear();
this.put(Object.values(migrationResult.value));
this.ensureStoreIsUsable();
});
} finally {
this.sideEffects.setIsEnabled(prevSideEffectsEnabled);
}
}
/**
* @public
* @deprecated use `loadSnapshot` from the 'tldraw' package instead.
*/
loadSnapshot(snapshot) {
console.warn(
"[tldraw] `Store.loadSnapshot` is deprecated and will be removed in a future release. Use `loadSnapshot` from the 'tldraw' package instead."
);
this.loadStoreSnapshot(snapshot);
}
/**
* Get an array of all values in the store.
*
* @returns An array of all values in the store.
* @public
*/
allRecords() {
return Array.from(this.records.values());
}
/**
* Removes all records from the store.
*
* @public
*/
clear() {
this.remove(Array.from(this.records.keys()));
}
/**
* Update a record. To update multiple records at once, use the `update` method of the
* `TypedStore` class.
*
* @param id - The id of the record to update.
* @param updater - A function that updates the record.
*/
update(id, updater) {
const existing = this.unsafeGetWithoutCapture(id);
if (!existing) {
console.error(`Record ${id} not found. This is probably an error`);
return;
}
this.put([updater(existing)]);
}
/**
* Get whether the record store has a id.
*
* @param id - The id of the record to check.
* @public
*/
has(id) {
return this.records.has(id);
}
/**
* Add a new listener to the store.
*
* @param onHistory - The listener to call when the store updates.
* @param filters - Filters to apply to the listener.
* @returns A function to remove the listener.
*/
listen(onHistory, filters) {
this._flushHistory();
const listener = {
onHistory,
filters: {
source: filters?.source ?? "all",
scope: filters?.scope ?? "all"
}
};
if (!this.historyReactor.scheduler.isActivelyListening) {
this.historyReactor.start();
this.historyReactor.scheduler.execute();
}
this.listeners.add(listener);
return () => {
this.listeners.delete(listener);
if (this.listeners.size === 0) {
this.historyReactor.stop();
}
};
}
isMergingRemoteChanges = false;
/**
* Merge changes from a remote source
*
* @param fn - A function that merges the external changes.
* @public
*/
mergeRemoteChanges(fn) {
if (this.isMergingRemoteChanges) {
return fn();
}
if (this._isInAtomicOp) {
throw new Error("Cannot merge remote changes while in atomic operation");
}
try {
this.atomic(fn, true, true);
} finally {
this.ensureStoreIsUsable();
}
}
/**
* Run `fn` and return a {@link RecordsDiff} of the changes that occurred as a result.
*/
extractingChanges(fn) {
const changes = [];
const dispose = this.historyAccumulator.addInterceptor((entry) => changes.push(entry.changes));
try {
(0, import_state.transact)(fn);
return (0, import_RecordsDiff.squashRecordDiffs)(changes);
} finally {
dispose();
}
}
applyDiff(diff, {
runCallbacks = true,
ignoreEphemeralKeys = false
} = {}) {
this.atomic(() => {
const toPut = (0, import_utils.objectMapValues)(diff.added);
for (const [_from, to] of (0, import_utils.objectMapValues)(diff.updated)) {
const type = this.schema.getType(to.typeName);
if (ignoreEphemeralKeys && type.ephemeralKeySet.size) {
const existing = this.get(to.id);
if (!existing) {
toPut.push(to);
continue;
}
let changed = null;
for (const [key, value] of Object.entries(to)) {
if (type.ephemeralKeySet.has(key) || Object.is(value, (0, import_utils.getOwnProperty)(existing, key))) {
continue;
}
if (!changed) changed = { ...existing };
changed[key] = value;
}
if (changed) toPut.push(changed);
} else {
toPut.push(to);
}
}
const toRemove = (0, import_utils.objectMapKeys)(diff.removed);
if (toPut.length) {
this.put(toPut);
}
if (toRemove.length) {
this.remove(toRemove);
}
}, runCallbacks);
}
/**
* Create a cache based on values in the store. Pass in a function that takes and ID and a
* signal for the underlying record. Return a signal (usually a computed) for the cached value.
* For simple derivations, use {@link Store.createComputedCache}. This function is useful if you
* need more precise control over intermediate values.
*/
createCache(create) {
const cache = new import_utils.WeakCache();
return {
get: (id) => {
const atom2 = this.records.getAtom(id);
if (!atom2) return void 0;
return cache.get(atom2, () => create(id, atom2)).get();
}
};
}
/**
* Create a computed cache.
*
* @param name - The name of the derivation cache.
* @param derive - A function used to derive the value of the cache.
* @param opts - Options for the computed cache.
* @public
*/
createComputedCache(name, derive, opts) {
return this.createCache((id, record) => {
const recordSignal = opts?.areRecordsEqual ? (0, import_state.computed)(`${name}:${id}:isEqual`, () => record.get(), { isEqual: opts.areRecordsEqual }) : record;
return (0, import_state.computed)(
name + ":" + id,
() => {
return derive(recordSignal.get());
},
{
isEqual: opts?.areResultsEqual
}
);
});
}
_integrityChecker;
/** @internal */
ensureStoreIsUsable() {
this.atomic(() => {
this._integrityChecker ??= this.schema.createIntegrityChecker(this);
this._integrityChecker?.();
});
}
_isPossiblyCorrupted = false;
/** @internal */
markAsPossiblyCorrupted() {
this._isPossiblyCorrupted = true;
}
/** @internal */
isPossiblyCorrupted() {
return this._isPossiblyCorrupted;
}
pendingAfterEvents = null;
addDiffForAfterEvent(before, after) {
(0, import_utils.assert)(this.pendingAfterEvents, "must be in event operation");
if (before === after) return;
if (before && after) (0, import_utils.assert)(before.id === after.id);
if (!before && !after) return;
const id = (before || after).id;
const existing = this.pendingAfterEvents.get(id);
if (existing) {
existing.after = after;
} else {
this.pendingAfterEvents.set(id, { before, after });
}
}
flushAtomicCallbacks(isMergingRemoteChanges) {
let updateDepth = 0;
let source = isMergingRemoteChanges ? "remote" : "user";
while (this.pendingAfterEvents) {
const events = this.pendingAfterEvents;
this.pendingAfterEvents = null;
if (!this.sideEffects.isEnabled()) continue;
updateDepth++;
if (updateDepth > 100) {
throw new Error("Maximum store update depth exceeded, bailing out");
}
for (const { before, after } of events.values()) {
if (before && after && before !== after && !(0, import_utils.isEqual)(before, after)) {
this.sideEffects.handleAfterChange(before, after, source);
} else if (before && !after) {
this.sideEffects.handleAfterDelete(before, source);
} else if (!before && after) {
this.sideEffects.handleAfterCreate(after, source);
}
}
if (!this.pendingAfterEvents) {
this.sideEffects.handleOperationComplete(source);
} else {
source = "user";
}
}
}
_isInAtomicOp = false;
/** @internal */
atomic(fn, runCallbacks = true, isMergingRemoteChanges = false) {
return (0, import_state.transact)(() => {
if (this._isInAtomicOp) {
if (!this.pendingAfterEvents) this.pendingAfterEvents = /* @__PURE__ */ new Map();
const prevSideEffectsEnabled2 = this.sideEffects.isEnabled();
(0, import_utils.assert)(!isMergingRemoteChanges, "cannot call mergeRemoteChanges while in atomic operation");
try {
if (prevSideEffectsEnabled2 && !runCallbacks) {
this.sideEffects.setIsEnabled(false);
}
return fn();
} finally {
this.sideEffects.setIsEnabled(prevSideEffectsEnabled2);
}
}
this.pendingAfterEvents = /* @__PURE__ */ new Map();
const prevSideEffectsEnabled = this.sideEffects.isEnabled();
this.sideEffects.setIsEnabled(runCallbacks ?? prevSideEffectsEnabled);
this._isInAtomicOp = true;
if (isMergingRemoteChanges) {
this.isMergingRemoteChanges = true;
}
try {
const result = fn();
this.isMergingRemoteChanges = false;
this.flushAtomicCallbacks(isMergingRemoteChanges);
return result;
} finally {
this.pendingAfterEvents = null;
this.sideEffects.setIsEnabled(prevSideEffectsEnabled);
this._isInAtomicOp = false;
this.isMergingRemoteChanges = false;
}
});
}
/** @internal */
addHistoryInterceptor(fn) {
return this.historyAccumulator.addInterceptor(
(entry) => fn(entry, this.isMergingRemoteChanges ? "remote" : "user")
);
}
}
function squashHistoryEntries(entries) {
if (entries.length === 0) return [];
const chunked = [];
let chunk = [entries[0]];
let entry;
for (let i = 1, n = entries.length; i < n; i++) {
entry = entries[i];
if (chunk[0].source !== entry.source) {
chunked.push(chunk);
chunk = [];
}
chunk.push(entry);
}
chunked.push(chunk);
return (0, import_devFreeze.devFreeze)(
chunked.map((chunk2) => ({
source: chunk2[0].source,
changes: (0, import_RecordsDiff.squashRecordDiffs)(chunk2.map((e) => e.changes))
}))
);
}
class HistoryAccumulator {
_history = [];
_interceptors = /* @__PURE__ */ new Set();
addInterceptor(fn) {
this._interceptors.add(fn);
return () => {
this._interceptors.delete(fn);
};
}
add(entry) {
this._history.push(entry);
for (const interceptor of this._interceptors) {
interceptor(entry);
}
}
flush() {
const history = squashHistoryEntries(this._history);
this._history = [];
return history;
}
clear() {
this._history = [];
}
hasChanges() {
return this._history.length > 0;
}
}
function createComputedCache(name, derive, opts) {
const cache = new import_utils.WeakCache();
return {
get(context, id) {
const computedCache = cache.get(context, () => {
const store = context instanceof Store ? context : context.store;
return store.createComputedCache(name, (record) => derive(context, record), opts);
});
return computedCache.get(id);
}
};
}
//# sourceMappingURL=Store.js.map