UNPKG

@durable-streams/state

Version:
504 lines (500 loc) 16.2 kB
import { and, avg, count, createCollection, createCollection as createCollection$1, createOptimisticAction, createOptimisticAction as createOptimisticAction$1, eq, gt, gte, ilike, inArray, isNull, isUndefined, like, lt, lte, max, min, not, or, sum } from "@tanstack/db"; import { DurableStream } from "@durable-streams/client"; //#region src/types.ts /** * Type guard to check if an event is a change event */ function isChangeEvent(event) { return event != null && `operation` in event.headers; } /** * Type guard to check if an event is a control event */ function isControlEvent(event) { return event != null && `control` in event.headers; } //#endregion //#region src/materialized-state.ts /** * MaterializedState maintains an in-memory view of state from change events. * * It organizes data by type, where each type contains a map of key -> value. * This supports multi-type streams where different entity types can coexist. */ var MaterializedState = class { data; constructor() { this.data = new Map(); } /** * Apply a single change event to update the materialized state */ apply(event) { const { type, key, value, headers } = event; let typeMap = this.data.get(type); if (!typeMap) { typeMap = new Map(); this.data.set(type, typeMap); } switch (headers.operation) { case `insert`: typeMap.set(key, value); break; case `update`: typeMap.set(key, value); break; case `upsert`: typeMap.set(key, value); break; case `delete`: typeMap.delete(key); break; } } /** * Apply a batch of change events */ applyBatch(events) { for (const event of events) this.apply(event); } /** * Get a specific value by type and key */ get(type, key) { const typeMap = this.data.get(type); if (!typeMap) return void 0; return typeMap.get(key); } /** * Get all entries for a specific type */ getType(type) { return this.data.get(type) || new Map(); } /** * Clear all state */ clear() { this.data.clear(); } /** * Get the number of types in the state */ get typeCount() { return this.data.size; } /** * Get all type names */ get types() { return Array.from(this.data.keys()); } }; //#endregion //#region src/stream-db.ts /** * Internal event dispatcher that routes stream events to collection handlers */ var EventDispatcher = class { /** Map from event type to collection handler */ handlers = new Map(); /** Handlers that have pending writes (need commit) */ pendingHandlers = new Set(); /** Whether we've received the initial up-to-date signal */ isUpToDate = false; /** Resolvers and rejecters for preload promises */ preloadResolvers = []; preloadRejecters = []; /** Set of all txids that have been seen and committed */ seenTxids = new Set(); /** Txids collected during current batch (before commit) */ pendingTxids = new Set(); /** Resolvers waiting for specific txids */ txidResolvers = new Map(); /** Track existing keys per collection for upsert logic */ existingKeys = new Map(); /** * Register a handler for a specific event type */ registerHandler(eventType, handler) { this.handlers.set(eventType, handler); if (!this.existingKeys.has(eventType)) this.existingKeys.set(eventType, new Set()); } /** * Dispatch a change event to the appropriate collection. * Writes are buffered until commit() is called via markUpToDate(). */ dispatchChange(event) { if (!isChangeEvent(event)) return; if (event.headers.txid && typeof event.headers.txid === `string`) this.pendingTxids.add(event.headers.txid); const handler = this.handlers.get(event.type); if (!handler) return; let operation = event.headers.operation; if (operation !== `delete`) { if (typeof event.value !== `object` || event.value === null) throw new Error(`StreamDB collections require object values; got ${typeof event.value} for type=${event.type}, key=${event.key}`); } const originalValue = event.value ?? {}; const value = { ...originalValue }; value[handler.primaryKey] = event.key; if (!this.pendingHandlers.has(handler)) { handler.begin(); this.pendingHandlers.add(handler); } if (operation === `upsert`) { const keys$1 = this.existingKeys.get(event.type); const existing = keys$1?.has(event.key); operation = existing ? `update` : `insert`; } const keys = this.existingKeys.get(event.type); if (operation === `insert` || operation === `update`) keys?.add(event.key); else keys?.delete(event.key); try { handler.write(value, operation); } catch (error) { console.error(`[StreamDB] Error in handler.write():`, error); console.error(`[StreamDB] Event that caused error:`, { type: event.type, key: event.key, operation }); throw error; } } /** * Handle control events from the stream JSON items */ dispatchControl(event) { if (!isControlEvent(event)) return; switch (event.headers.control) { case `reset`: for (const handler of this.handlers.values()) handler.truncate(); for (const keys of this.existingKeys.values()) keys.clear(); this.pendingHandlers.clear(); this.isUpToDate = false; break; case `snapshot-start`: case `snapshot-end`: break; } } /** * Commit all pending writes and handle up-to-date signal */ markUpToDate() { for (const handler of this.pendingHandlers) try { handler.commit(); } catch (error) { console.error(`[StreamDB] Error in handler.commit():`, error); if (error instanceof Error && error.message.includes(`already exists in the collection`) && error.message.includes(`live-query`)) { console.warn(`[StreamDB] Known TanStack DB groupBy bug detected - continuing despite error`); console.warn(`[StreamDB] Queries with groupBy may show stale data until fixed`); continue; } throw error; } this.pendingHandlers.clear(); for (const txid of this.pendingTxids) { this.seenTxids.add(txid); const resolvers = this.txidResolvers.get(txid); if (resolvers) { for (const { resolve, timeoutId } of resolvers) { clearTimeout(timeoutId); resolve(); } this.txidResolvers.delete(txid); } } this.pendingTxids.clear(); if (!this.isUpToDate) { this.isUpToDate = true; for (const handler of this.handlers.values()) handler.markReady(); for (const resolve of this.preloadResolvers) resolve(); this.preloadResolvers = []; } } /** * Wait for the stream to reach up-to-date state */ waitForUpToDate() { if (this.isUpToDate) return Promise.resolve(); return new Promise((resolve, reject) => { this.preloadResolvers.push(resolve); this.preloadRejecters.push(reject); }); } /** * Reject all waiting preload promises with an error */ rejectAll(error) { for (const reject of this.preloadRejecters) reject(error); this.preloadResolvers = []; this.preloadRejecters = []; for (const resolvers of this.txidResolvers.values()) for (const { reject, timeoutId } of resolvers) { clearTimeout(timeoutId); reject(error); } this.txidResolvers.clear(); } /** * Check if we've received up-to-date */ get ready() { return this.isUpToDate; } /** * Wait for a specific txid to be seen in the stream */ awaitTxId(txid, timeout = 5e3) { if (this.seenTxids.has(txid)) return Promise.resolve(); return new Promise((resolve, reject) => { const timeoutId = setTimeout(() => { const resolvers = this.txidResolvers.get(txid); if (resolvers) { const index = resolvers.findIndex((r) => r.timeoutId === timeoutId); if (index !== -1) resolvers.splice(index, 1); if (resolvers.length === 0) this.txidResolvers.delete(txid); } reject(new Error(`Timeout waiting for txid: ${txid}`)); }, timeout); if (!this.txidResolvers.has(txid)) this.txidResolvers.set(txid, []); this.txidResolvers.get(txid).push({ resolve, reject, timeoutId }); }); } }; /** * Create a sync config for a stream-backed collection */ function createStreamSyncConfig(eventType, dispatcher, primaryKey) { return { sync: ({ begin, write, commit, markReady, truncate }) => { dispatcher.registerHandler(eventType, { begin, write: (value, type) => { write({ value, type }); }, commit, markReady, truncate, primaryKey }); if (dispatcher.ready) markReady(); return () => {}; } }; } /** * Reserved collection names that would collide with StreamDB properties * (collections are now namespaced, but we still prevent internal name collisions) */ const RESERVED_COLLECTION_NAMES = new Set([ `collections`, `preload`, `close`, `utils`, `actions` ]); /** * Create helper functions for a collection */ function createCollectionHelpers(eventType, primaryKey, schema) { return { insert: ({ key, value, headers }) => { const result = schema[`~standard`].validate(value); if (`issues` in result) throw new Error(`Validation failed for ${eventType} insert: ${result.issues?.map((i) => i.message).join(`, `) ?? `Unknown validation error`}`); const derived = value[primaryKey]; const finalKey = key ?? (derived != null && derived !== `` ? String(derived) : void 0); if (finalKey == null || finalKey === ``) throw new Error(`Cannot create ${eventType} insert event: must provide either 'key' or a value with a non-empty '${primaryKey}' field`); return { type: eventType, key: finalKey, value, headers: { ...headers, operation: `insert` } }; }, update: ({ key, value, oldValue, headers }) => { const result = schema[`~standard`].validate(value); if (`issues` in result) throw new Error(`Validation failed for ${eventType} update: ${result.issues?.map((i) => i.message).join(`, `) ?? `Unknown validation error`}`); if (oldValue !== void 0) { const oldResult = schema[`~standard`].validate(oldValue); if (`issues` in oldResult) throw new Error(`Validation failed for ${eventType} update (oldValue): ${oldResult.issues?.map((i) => i.message).join(`, `) ?? `Unknown validation error`}`); } const derived = value[primaryKey]; const finalKey = key ?? (derived != null && derived !== `` ? String(derived) : void 0); if (finalKey == null || finalKey === ``) throw new Error(`Cannot create ${eventType} update event: must provide either 'key' or a value with a non-empty '${primaryKey}' field`); return { type: eventType, key: finalKey, value, old_value: oldValue, headers: { ...headers, operation: `update` } }; }, delete: ({ key, oldValue, headers }) => { if (oldValue !== void 0) { const result = schema[`~standard`].validate(oldValue); if (`issues` in result) throw new Error(`Validation failed for ${eventType} delete (oldValue): ${result.issues?.map((i) => i.message).join(`, `) ?? `Unknown validation error`}`); } const finalKey = key ?? (oldValue ? String(oldValue[primaryKey]) : void 0); if (!finalKey) throw new Error(`Cannot create ${eventType} delete event: must provide either 'key' or 'oldValue' with a ${primaryKey} field`); return { type: eventType, key: finalKey, old_value: oldValue, headers: { ...headers, operation: `delete` } }; }, upsert: ({ key, value, headers }) => { const result = schema[`~standard`].validate(value); if (`issues` in result) throw new Error(`Validation failed for ${eventType} upsert: ${result.issues?.map((i) => i.message).join(`, `) ?? `Unknown validation error`}`); const derived = value[primaryKey]; const finalKey = key ?? (derived != null && derived !== `` ? String(derived) : void 0); if (finalKey == null || finalKey === ``) throw new Error(`Cannot create ${eventType} upsert event: must provide either 'key' or a value with a non-empty '${primaryKey}' field`); return { type: eventType, key: finalKey, value, headers: { ...headers, operation: `upsert` } }; } }; } /** * Create a state schema definition with typed collections and event helpers */ function createStateSchema(collections) { for (const name of Object.keys(collections)) if (RESERVED_COLLECTION_NAMES.has(name)) throw new Error(`Reserved collection name "${name}" - this would collide with StreamDB properties (${Array.from(RESERVED_COLLECTION_NAMES).join(`, `)})`); const typeToCollection = new Map(); for (const [collectionName, def] of Object.entries(collections)) { const existing = typeToCollection.get(def.type); if (existing) throw new Error(`Duplicate event type "${def.type}" - used by both "${existing}" and "${collectionName}" collections`); typeToCollection.set(def.type, collectionName); } const enhancedCollections = {}; for (const [name, collectionDef] of Object.entries(collections)) enhancedCollections[name] = { ...collectionDef, ...createCollectionHelpers(collectionDef.type, collectionDef.primaryKey, collectionDef.schema) }; return enhancedCollections; } /** * Create a stream-backed database with TanStack DB collections * * This function is synchronous - it creates the stream handle and collections * but does not start the stream connection. Call `db.preload()` to connect * and sync initial data. * * @example * ```typescript * const stateSchema = createStateSchema({ * users: { schema: userSchema, type: "user", primaryKey: "id" }, * messages: { schema: messageSchema, type: "message", primaryKey: "id" }, * }) * * // Create a stream DB (synchronous - stream is created lazily on preload) * const db = createStreamDB({ * streamOptions: { * url: "https://api.example.com/streams/my-stream", * contentType: "application/json", * }, * state: stateSchema, * }) * * // preload() creates the stream and loads initial data * await db.preload() * const user = await db.collections.users.get("123") * ``` */ function createStreamDB(options) { const { streamOptions, state, actions: actionsFactory } = options; const stream = new DurableStream(streamOptions); const dispatcher = new EventDispatcher(); const collectionInstances = {}; for (const [name, definition] of Object.entries(state)) { const collection = createCollection$1({ id: `stream-db:${name}`, schema: definition.schema, getKey: (item) => String(item[definition.primaryKey]), sync: createStreamSyncConfig(definition.type, dispatcher, definition.primaryKey), startSync: true, gcTime: 0 }); collectionInstances[name] = collection; } let streamResponse = null; const abortController = new AbortController(); let consumerStarted = false; /** * Start the stream consumer (called lazily on first preload) */ const startConsumer = async () => { if (consumerStarted) return; consumerStarted = true; streamResponse = await stream.stream({ live: true, signal: abortController.signal }); streamResponse.subscribeJson((batch) => { try { for (const event of batch.items) if (isChangeEvent(event)) dispatcher.dispatchChange(event); else if (isControlEvent(event)) dispatcher.dispatchControl(event); if (batch.upToDate) dispatcher.markUpToDate(); } catch (error) { console.error(`[StreamDB] Error processing batch:`, error); console.error(`[StreamDB] Failed batch:`, batch); dispatcher.rejectAll(error); abortController.abort(); } return Promise.resolve(); }); }; const dbMethods = { stream, preload: async () => { await startConsumer(); await dispatcher.waitForUpToDate(); }, close: () => { dispatcher.rejectAll(new Error(`StreamDB closed`)); abortController.abort(); }, utils: { awaitTxId: (txid, timeout) => dispatcher.awaitTxId(txid, timeout) } }; const db = { collections: collectionInstances, ...dbMethods }; if (actionsFactory) { const actionDefs = actionsFactory({ db, stream }); const wrappedActions = {}; for (const [name, def] of Object.entries(actionDefs)) wrappedActions[name] = createOptimisticAction$1({ onMutate: def.onMutate, mutationFn: def.mutationFn }); return { ...db, actions: wrappedActions }; } return db; } //#endregion export { MaterializedState, and, avg, count, createCollection, createOptimisticAction, createStateSchema, createStreamDB, eq, gt, gte, ilike, inArray, isChangeEvent, isControlEvent, isNull, isUndefined, like, lt, lte, max, min, not, or, sum };