@durable-streams/state
Version:
State change event protocol for Durable Streams
652 lines (647 loc) • 19.5 kB
JavaScript
;
//#region rolldown:runtime
var __create = Object.create;
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __getProtoOf = Object.getPrototypeOf;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") for (var keys = __getOwnPropNames(from), i = 0, n = keys.length, key; i < n; i++) {
key = keys[i];
if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, {
get: ((k) => from[k]).bind(null, key),
enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
});
}
return to;
};
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", {
value: mod,
enumerable: true
}) : target, mod));
//#endregion
const __tanstack_db = __toESM(require("@tanstack/db"));
const __durable_streams_client = __toESM(require("@durable-streams/client"));
//#region src/types.ts
/**
* Type guard to check if an event is a change event
*/
function isChangeEvent(event) {
return event != null && `operation` in event.headers;
}
/**
* Type guard to check if an event is a control event
*/
function isControlEvent(event) {
return event != null && `control` in event.headers;
}
//#endregion
//#region src/materialized-state.ts
/**
* MaterializedState maintains an in-memory view of state from change events.
*
* It organizes data by type, where each type contains a map of key -> value.
* This supports multi-type streams where different entity types can coexist.
*/
var MaterializedState = class {
data;
constructor() {
this.data = new Map();
}
/**
* Apply a single change event to update the materialized state
*/
apply(event) {
const { type, key, value, headers } = event;
let typeMap = this.data.get(type);
if (!typeMap) {
typeMap = new Map();
this.data.set(type, typeMap);
}
switch (headers.operation) {
case `insert`:
typeMap.set(key, value);
break;
case `update`:
typeMap.set(key, value);
break;
case `upsert`:
typeMap.set(key, value);
break;
case `delete`:
typeMap.delete(key);
break;
}
}
/**
* Apply a batch of change events
*/
applyBatch(events) {
for (const event of events) this.apply(event);
}
/**
* Get a specific value by type and key
*/
get(type, key) {
const typeMap = this.data.get(type);
if (!typeMap) return void 0;
return typeMap.get(key);
}
/**
* Get all entries for a specific type
*/
getType(type) {
return this.data.get(type) || new Map();
}
/**
* Clear all state
*/
clear() {
this.data.clear();
}
/**
* Get the number of types in the state
*/
get typeCount() {
return this.data.size;
}
/**
* Get all type names
*/
get types() {
return Array.from(this.data.keys());
}
};
//#endregion
//#region src/stream-db.ts
/**
* Internal event dispatcher that routes stream events to collection handlers
*/
var EventDispatcher = class {
/** Map from event type to collection handler */
handlers = new Map();
/** Handlers that have pending writes (need commit) */
pendingHandlers = new Set();
/** Whether we've received the initial up-to-date signal */
isUpToDate = false;
/** Resolvers and rejecters for preload promises */
preloadResolvers = [];
preloadRejecters = [];
/** Set of all txids that have been seen and committed */
seenTxids = new Set();
/** Txids collected during current batch (before commit) */
pendingTxids = new Set();
/** Resolvers waiting for specific txids */
txidResolvers = new Map();
/** Track existing keys per collection for upsert logic */
existingKeys = new Map();
/**
* Register a handler for a specific event type
*/
registerHandler(eventType, handler) {
this.handlers.set(eventType, handler);
if (!this.existingKeys.has(eventType)) this.existingKeys.set(eventType, new Set());
}
/**
* Dispatch a change event to the appropriate collection.
* Writes are buffered until commit() is called via markUpToDate().
*/
dispatchChange(event) {
if (!isChangeEvent(event)) return;
if (event.headers.txid && typeof event.headers.txid === `string`) this.pendingTxids.add(event.headers.txid);
const handler = this.handlers.get(event.type);
if (!handler) return;
let operation = event.headers.operation;
if (operation !== `delete`) {
if (typeof event.value !== `object` || event.value === null) throw new Error(`StreamDB collections require object values; got ${typeof event.value} for type=${event.type}, key=${event.key}`);
}
const originalValue = event.value ?? {};
const value = { ...originalValue };
value[handler.primaryKey] = event.key;
if (!this.pendingHandlers.has(handler)) {
handler.begin();
this.pendingHandlers.add(handler);
}
if (operation === `upsert`) {
const keys$1 = this.existingKeys.get(event.type);
const existing = keys$1?.has(event.key);
operation = existing ? `update` : `insert`;
}
const keys = this.existingKeys.get(event.type);
if (operation === `insert` || operation === `update`) keys?.add(event.key);
else keys?.delete(event.key);
try {
handler.write(value, operation);
} catch (error) {
console.error(`[StreamDB] Error in handler.write():`, error);
console.error(`[StreamDB] Event that caused error:`, {
type: event.type,
key: event.key,
operation
});
throw error;
}
}
/**
* Handle control events from the stream JSON items
*/
dispatchControl(event) {
if (!isControlEvent(event)) return;
switch (event.headers.control) {
case `reset`:
for (const handler of this.handlers.values()) handler.truncate();
for (const keys of this.existingKeys.values()) keys.clear();
this.pendingHandlers.clear();
this.isUpToDate = false;
break;
case `snapshot-start`:
case `snapshot-end`: break;
}
}
/**
* Commit all pending writes and handle up-to-date signal
*/
markUpToDate() {
for (const handler of this.pendingHandlers) try {
handler.commit();
} catch (error) {
console.error(`[StreamDB] Error in handler.commit():`, error);
if (error instanceof Error && error.message.includes(`already exists in the collection`) && error.message.includes(`live-query`)) {
console.warn(`[StreamDB] Known TanStack DB groupBy bug detected - continuing despite error`);
console.warn(`[StreamDB] Queries with groupBy may show stale data until fixed`);
continue;
}
throw error;
}
this.pendingHandlers.clear();
for (const txid of this.pendingTxids) {
this.seenTxids.add(txid);
const resolvers = this.txidResolvers.get(txid);
if (resolvers) {
for (const { resolve, timeoutId } of resolvers) {
clearTimeout(timeoutId);
resolve();
}
this.txidResolvers.delete(txid);
}
}
this.pendingTxids.clear();
if (!this.isUpToDate) {
this.isUpToDate = true;
for (const handler of this.handlers.values()) handler.markReady();
for (const resolve of this.preloadResolvers) resolve();
this.preloadResolvers = [];
}
}
/**
* Wait for the stream to reach up-to-date state
*/
waitForUpToDate() {
if (this.isUpToDate) return Promise.resolve();
return new Promise((resolve, reject) => {
this.preloadResolvers.push(resolve);
this.preloadRejecters.push(reject);
});
}
/**
* Reject all waiting preload promises with an error
*/
rejectAll(error) {
for (const reject of this.preloadRejecters) reject(error);
this.preloadResolvers = [];
this.preloadRejecters = [];
for (const resolvers of this.txidResolvers.values()) for (const { reject, timeoutId } of resolvers) {
clearTimeout(timeoutId);
reject(error);
}
this.txidResolvers.clear();
}
/**
* Check if we've received up-to-date
*/
get ready() {
return this.isUpToDate;
}
/**
* Wait for a specific txid to be seen in the stream
*/
awaitTxId(txid, timeout = 5e3) {
if (this.seenTxids.has(txid)) return Promise.resolve();
return new Promise((resolve, reject) => {
const timeoutId = setTimeout(() => {
const resolvers = this.txidResolvers.get(txid);
if (resolvers) {
const index = resolvers.findIndex((r) => r.timeoutId === timeoutId);
if (index !== -1) resolvers.splice(index, 1);
if (resolvers.length === 0) this.txidResolvers.delete(txid);
}
reject(new Error(`Timeout waiting for txid: ${txid}`));
}, timeout);
if (!this.txidResolvers.has(txid)) this.txidResolvers.set(txid, []);
this.txidResolvers.get(txid).push({
resolve,
reject,
timeoutId
});
});
}
};
/**
* Create a sync config for a stream-backed collection
*/
function createStreamSyncConfig(eventType, dispatcher, primaryKey) {
return { sync: ({ begin, write, commit, markReady, truncate }) => {
dispatcher.registerHandler(eventType, {
begin,
write: (value, type) => {
write({
value,
type
});
},
commit,
markReady,
truncate,
primaryKey
});
if (dispatcher.ready) markReady();
return () => {};
} };
}
/**
* Reserved collection names that would collide with StreamDB properties
* (collections are now namespaced, but we still prevent internal name collisions)
*/
const RESERVED_COLLECTION_NAMES = new Set([
`collections`,
`preload`,
`close`,
`utils`,
`actions`
]);
/**
* Create helper functions for a collection
*/
function createCollectionHelpers(eventType, primaryKey, schema) {
return {
insert: ({ key, value, headers }) => {
const result = schema[`~standard`].validate(value);
if (`issues` in result) throw new Error(`Validation failed for ${eventType} insert: ${result.issues?.map((i) => i.message).join(`, `) ?? `Unknown validation error`}`);
const derived = value[primaryKey];
const finalKey = key ?? (derived != null && derived !== `` ? String(derived) : void 0);
if (finalKey == null || finalKey === ``) throw new Error(`Cannot create ${eventType} insert event: must provide either 'key' or a value with a non-empty '${primaryKey}' field`);
return {
type: eventType,
key: finalKey,
value,
headers: {
...headers,
operation: `insert`
}
};
},
update: ({ key, value, oldValue, headers }) => {
const result = schema[`~standard`].validate(value);
if (`issues` in result) throw new Error(`Validation failed for ${eventType} update: ${result.issues?.map((i) => i.message).join(`, `) ?? `Unknown validation error`}`);
if (oldValue !== void 0) {
const oldResult = schema[`~standard`].validate(oldValue);
if (`issues` in oldResult) throw new Error(`Validation failed for ${eventType} update (oldValue): ${oldResult.issues?.map((i) => i.message).join(`, `) ?? `Unknown validation error`}`);
}
const derived = value[primaryKey];
const finalKey = key ?? (derived != null && derived !== `` ? String(derived) : void 0);
if (finalKey == null || finalKey === ``) throw new Error(`Cannot create ${eventType} update event: must provide either 'key' or a value with a non-empty '${primaryKey}' field`);
return {
type: eventType,
key: finalKey,
value,
old_value: oldValue,
headers: {
...headers,
operation: `update`
}
};
},
delete: ({ key, oldValue, headers }) => {
if (oldValue !== void 0) {
const result = schema[`~standard`].validate(oldValue);
if (`issues` in result) throw new Error(`Validation failed for ${eventType} delete (oldValue): ${result.issues?.map((i) => i.message).join(`, `) ?? `Unknown validation error`}`);
}
const finalKey = key ?? (oldValue ? String(oldValue[primaryKey]) : void 0);
if (!finalKey) throw new Error(`Cannot create ${eventType} delete event: must provide either 'key' or 'oldValue' with a ${primaryKey} field`);
return {
type: eventType,
key: finalKey,
old_value: oldValue,
headers: {
...headers,
operation: `delete`
}
};
},
upsert: ({ key, value, headers }) => {
const result = schema[`~standard`].validate(value);
if (`issues` in result) throw new Error(`Validation failed for ${eventType} upsert: ${result.issues?.map((i) => i.message).join(`, `) ?? `Unknown validation error`}`);
const derived = value[primaryKey];
const finalKey = key ?? (derived != null && derived !== `` ? String(derived) : void 0);
if (finalKey == null || finalKey === ``) throw new Error(`Cannot create ${eventType} upsert event: must provide either 'key' or a value with a non-empty '${primaryKey}' field`);
return {
type: eventType,
key: finalKey,
value,
headers: {
...headers,
operation: `upsert`
}
};
}
};
}
/**
* Create a state schema definition with typed collections and event helpers
*/
function createStateSchema(collections) {
for (const name of Object.keys(collections)) if (RESERVED_COLLECTION_NAMES.has(name)) throw new Error(`Reserved collection name "${name}" - this would collide with StreamDB properties (${Array.from(RESERVED_COLLECTION_NAMES).join(`, `)})`);
const typeToCollection = new Map();
for (const [collectionName, def] of Object.entries(collections)) {
const existing = typeToCollection.get(def.type);
if (existing) throw new Error(`Duplicate event type "${def.type}" - used by both "${existing}" and "${collectionName}" collections`);
typeToCollection.set(def.type, collectionName);
}
const enhancedCollections = {};
for (const [name, collectionDef] of Object.entries(collections)) enhancedCollections[name] = {
...collectionDef,
...createCollectionHelpers(collectionDef.type, collectionDef.primaryKey, collectionDef.schema)
};
return enhancedCollections;
}
/**
* Create a stream-backed database with TanStack DB collections
*
* This function is synchronous - it creates the stream handle and collections
* but does not start the stream connection. Call `db.preload()` to connect
* and sync initial data.
*
* @example
* ```typescript
* const stateSchema = createStateSchema({
* users: { schema: userSchema, type: "user", primaryKey: "id" },
* messages: { schema: messageSchema, type: "message", primaryKey: "id" },
* })
*
* // Create a stream DB (synchronous - stream is created lazily on preload)
* const db = createStreamDB({
* streamOptions: {
* url: "https://api.example.com/streams/my-stream",
* contentType: "application/json",
* },
* state: stateSchema,
* })
*
* // preload() creates the stream and loads initial data
* await db.preload()
* const user = await db.collections.users.get("123")
* ```
*/
function createStreamDB(options) {
const { streamOptions, state, actions: actionsFactory } = options;
const stream = new __durable_streams_client.DurableStream(streamOptions);
const dispatcher = new EventDispatcher();
const collectionInstances = {};
for (const [name, definition] of Object.entries(state)) {
const collection = (0, __tanstack_db.createCollection)({
id: `stream-db:${name}`,
schema: definition.schema,
getKey: (item) => String(item[definition.primaryKey]),
sync: createStreamSyncConfig(definition.type, dispatcher, definition.primaryKey),
startSync: true,
gcTime: 0
});
collectionInstances[name] = collection;
}
let streamResponse = null;
const abortController = new AbortController();
let consumerStarted = false;
/**
* Start the stream consumer (called lazily on first preload)
*/
const startConsumer = async () => {
if (consumerStarted) return;
consumerStarted = true;
streamResponse = await stream.stream({
live: true,
signal: abortController.signal
});
streamResponse.subscribeJson((batch) => {
try {
for (const event of batch.items) if (isChangeEvent(event)) dispatcher.dispatchChange(event);
else if (isControlEvent(event)) dispatcher.dispatchControl(event);
if (batch.upToDate) dispatcher.markUpToDate();
} catch (error) {
console.error(`[StreamDB] Error processing batch:`, error);
console.error(`[StreamDB] Failed batch:`, batch);
dispatcher.rejectAll(error);
abortController.abort();
}
return Promise.resolve();
});
};
const dbMethods = {
stream,
preload: async () => {
await startConsumer();
await dispatcher.waitForUpToDate();
},
close: () => {
dispatcher.rejectAll(new Error(`StreamDB closed`));
abortController.abort();
},
utils: { awaitTxId: (txid, timeout) => dispatcher.awaitTxId(txid, timeout) }
};
const db = {
collections: collectionInstances,
...dbMethods
};
if (actionsFactory) {
const actionDefs = actionsFactory({
db,
stream
});
const wrappedActions = {};
for (const [name, def] of Object.entries(actionDefs)) wrappedActions[name] = (0, __tanstack_db.createOptimisticAction)({
onMutate: def.onMutate,
mutationFn: def.mutationFn
});
return {
...db,
actions: wrappedActions
};
}
return db;
}
//#endregion
exports.MaterializedState = MaterializedState
Object.defineProperty(exports, 'and', {
enumerable: true,
get: function () {
return __tanstack_db.and;
}
});
Object.defineProperty(exports, 'avg', {
enumerable: true,
get: function () {
return __tanstack_db.avg;
}
});
Object.defineProperty(exports, 'count', {
enumerable: true,
get: function () {
return __tanstack_db.count;
}
});
Object.defineProperty(exports, 'createCollection', {
enumerable: true,
get: function () {
return __tanstack_db.createCollection;
}
});
Object.defineProperty(exports, 'createOptimisticAction', {
enumerable: true,
get: function () {
return __tanstack_db.createOptimisticAction;
}
});
exports.createStateSchema = createStateSchema
exports.createStreamDB = createStreamDB
Object.defineProperty(exports, 'eq', {
enumerable: true,
get: function () {
return __tanstack_db.eq;
}
});
Object.defineProperty(exports, 'gt', {
enumerable: true,
get: function () {
return __tanstack_db.gt;
}
});
Object.defineProperty(exports, 'gte', {
enumerable: true,
get: function () {
return __tanstack_db.gte;
}
});
Object.defineProperty(exports, 'ilike', {
enumerable: true,
get: function () {
return __tanstack_db.ilike;
}
});
Object.defineProperty(exports, 'inArray', {
enumerable: true,
get: function () {
return __tanstack_db.inArray;
}
});
exports.isChangeEvent = isChangeEvent
exports.isControlEvent = isControlEvent
Object.defineProperty(exports, 'isNull', {
enumerable: true,
get: function () {
return __tanstack_db.isNull;
}
});
Object.defineProperty(exports, 'isUndefined', {
enumerable: true,
get: function () {
return __tanstack_db.isUndefined;
}
});
Object.defineProperty(exports, 'like', {
enumerable: true,
get: function () {
return __tanstack_db.like;
}
});
Object.defineProperty(exports, 'lt', {
enumerable: true,
get: function () {
return __tanstack_db.lt;
}
});
Object.defineProperty(exports, 'lte', {
enumerable: true,
get: function () {
return __tanstack_db.lte;
}
});
Object.defineProperty(exports, 'max', {
enumerable: true,
get: function () {
return __tanstack_db.max;
}
});
Object.defineProperty(exports, 'min', {
enumerable: true,
get: function () {
return __tanstack_db.min;
}
});
Object.defineProperty(exports, 'not', {
enumerable: true,
get: function () {
return __tanstack_db.not;
}
});
Object.defineProperty(exports, 'or', {
enumerable: true,
get: function () {
return __tanstack_db.or;
}
});
Object.defineProperty(exports, 'sum', {
enumerable: true,
get: function () {
return __tanstack_db.sum;
}
});