UNPKG

@instantdb/core

Version:
708 lines • 23.6 kB
"use strict"; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.InstantStream = void 0; const id_ts_1 = __importDefault(require("./utils/id.js")); const Reactor_js_1 = require("./Reactor.js"); const InstantError_ts_1 = require("./InstantError.js"); function createWriteStream({ WStream, opts, startStream, appendStream, registerStream, }) { const clientId = opts.clientId; let streamId_ = null; let controller_ = null; const reconnectToken = (0, id_ts_1.default)(); let isDone = false; let closed = false; const closeCbs = []; const streamIdCbs = []; let disconnected = false; // Chunks that we haven't been notified are flushed to disk let bufferOffset = 0; let bufferByteSize = 0; const buffer = []; const encoder = new TextEncoder(); function markClosed() { closed = true; for (const cb of closeCbs) { cb(); } } function addCloseCb(cb) { closeCbs.push(cb); return () => { const i = closeCbs.indexOf(cb); if (i !== -1) { closeCbs.splice(i, 1); } }; } function addStreamIdCb(cb) { streamIdCbs.push(cb); return () => { const i = streamIdCbs.indexOf(cb); if (i !== -1) { streamIdCbs.splice(i, 1); } }; } function setStreamId(streamId) { streamId_ = streamId; for (const cb of streamIdCbs) { cb(streamId_); } } function onDisconnect() { disconnected = true; } // Clears data from our buffer after it has been flushed to a file function discardFlushed(offset) { let chunkOffset = bufferOffset; let segmentsToDrop = 0; let droppedSegmentsByteLen = 0; for (const { byteLen } of buffer) { const nextChunkOffset = chunkOffset + byteLen; if (nextChunkOffset > offset) { break; } chunkOffset = nextChunkOffset; segmentsToDrop++; droppedSegmentsByteLen += byteLen; } if (segmentsToDrop > 0) { bufferOffset += droppedSegmentsByteLen; bufferByteSize -= droppedSegmentsByteLen; buffer.splice(0, segmentsToDrop); } } async function onConnectionReconnect() { const result = await startStream({ clientId, reconnectToken, }); switch (result.type) { case 'ok': { const { streamId, offset } = result; streamId_ = streamId; discardFlushed(offset); if (buffer.length) { appendStream({ streamId: streamId, chunks: buffer.map((b) => b.chunk), offset: bufferOffset, }); } disconnected = false; break; } case 'disconnect': { onDisconnect(); break; } case 'error': { if (controller_) { controller_.error(result.error); markClosed(); } break; } } } // When the append fails, we'll just try to reconnect and start again function onAppendFailed() { onDisconnect(); onConnectionReconnect(); } function onFlush({ offset, done }) { discardFlushed(offset); if (done) { isDone = true; } } function error(controller, error) { markClosed(); controller.error(error); } function ensureSetup(controller) { if (isDone) { error(controller, new InstantError_ts_1.InstantError('Stream has been closed.')); } if (!streamId_) { error(controller, new InstantError_ts_1.InstantError('Stream has not been initialized.')); } return streamId_; } async function start(controller) { controller_ = controller; let tryAgain = true; let attempts = 0; while (tryAgain) { // rate-limit after the first few failed connects let nextAttempt = Date.now() + Math.min(15000, 500 * (attempts - 1)); tryAgain = false; const result = await startStream({ clientId: opts.clientId, reconnectToken, }); switch (result.type) { case 'ok': { const { streamId, offset } = result; if (offset !== 0) { const e = new InstantError_ts_1.InstantError('Write stream is corrupted'); error(controller, e); return; } setStreamId(streamId); registerStream(streamId, { onDisconnect, onFlush, onConnectionReconnect, onAppendFailed, }); disconnected = false; return; } case 'disconnect': { tryAgain = true; onDisconnect(); attempts++; await new Promise((resolve) => { // Try again immediately for the first two attempts, then back off setTimeout(resolve, nextAttempt - Date.now()); }); break; } case 'error': { error(controller, result.error); return; } } } } class WStreamEnhanced extends WStream { constructor(sink, strategy) { super(sink, strategy); } async streamId() { if (streamId_) { return streamId_; } return new Promise((resolve, reject) => { const cleanupFns = []; const cleanup = () => { for (const f of cleanupFns) { f(); } }; const resolveCb = (streamId) => { resolve(streamId); cleanup(); }; const rejectCb = () => { reject(new InstantError_ts_1.InstantError('Stream is closed.')); cleanup(); }; cleanupFns.push(addStreamIdCb(resolveCb)); cleanupFns.push(addCloseCb(rejectCb)); }); } } const stream = new WStreamEnhanced({ // TODO(dww): accept a storage so that write streams can survive across // browser restarts async start(controller) { try { await start(controller); } catch (e) { error(controller, e); } }, write(chunk, controller) { const streamId = ensureSetup(controller); if (streamId) { const byteLen = encoder.encode(chunk).length; buffer.push({ chunk, byteLen }); const offset = bufferOffset + bufferByteSize; bufferByteSize += byteLen; if (!disconnected) { appendStream({ streamId, chunks: [chunk], offset }); } } }, close() { if (streamId_) { appendStream({ streamId: streamId_, chunks: [], offset: bufferOffset + bufferByteSize, isDone: true, }); } markClosed(); }, abort(reason) { if (streamId_) { appendStream({ streamId: streamId_, chunks: [], offset: bufferOffset + bufferByteSize, isDone: true, abortReason: reason, }); } markClosed(); }, }); return { stream, addCloseCb, closed() { return closed; }, }; } class StreamIterator { items = []; resolvers = []; isClosed = false; constructor() { } push(item) { if (this.isClosed) return; const resolve = this.resolvers.shift(); if (resolve) { resolve({ value: item, done: false }); } else { this.items.push(item); } } close() { this.isClosed = true; while (this.resolvers.length > 0) { const resolve = this.resolvers.shift(); resolve({ value: undefined, done: true }); } } async *[Symbol.asyncIterator]() { while (true) { if (this.items.length > 0) { yield this.items.shift(); } else if (this.isClosed) { return; } else { const { value, done } = await new Promise((resolve) => { this.resolvers.push(resolve); }); if (done || !value) { return; } yield value; } } } } function createReadStream({ RStream, opts, startStream, cancelStream, }) { let seenOffset = opts.byteOffset || 0; let canceled = false; const decoder = new TextDecoder('utf-8'); const encoder = new TextEncoder(); let eventId; let closed = false; const closeCbs = []; function markClosed() { closed = true; for (const cb of closeCbs) { cb(); } } function addCloseCb(cb) { closeCbs.push(cb); return () => { const i = closeCbs.indexOf(cb); if (i !== -1) { closeCbs.splice(i, 1); } }; } function error(controller, e) { controller.error(e); markClosed(); } let fetchFailures = 0; async function runStartStream(opts, controller) { eventId = (0, id_ts_1.default)(); const streamOpts = { ...(opts || {}), eventId }; for await (const item of startStream(streamOpts)) { if (canceled) { return; } if (item.type === 'reconnect') { return { retry: true }; } if (item.type === 'error') { error(controller, item.error); return; } if (item.offset > seenOffset) { error(controller, new InstantError_ts_1.InstantError('Stream is corrupted.')); canceled = true; return; } let discardLen = seenOffset - item.offset; if (item.files && item.files.length) { const fetchAbort = new AbortController(); let nextFetch = fetch(item.files[0].url, { signal: fetchAbort.signal, }); for (let i = 0; i < item.files.length; i++) { const nextFile = item.files[i + 1]; const thisFetch = nextFetch; const res = await thisFetch; if (nextFile) { nextFetch = fetch(nextFile.url, { signal: fetchAbort.signal }); } if (!res.ok) { fetchFailures++; if (fetchFailures > 10) { error(controller, new InstantError_ts_1.InstantError('Unable to process stream.')); return; } return { retry: true }; } if (res.body) { for await (const bodyChunk of res.body) { if (canceled) { fetchAbort.abort(); return; } let chunk = bodyChunk; if (discardLen > 0) { chunk = bodyChunk.subarray(discardLen); discardLen -= bodyChunk.length - chunk.length; } if (!chunk.length) { continue; } seenOffset += chunk.length; const s = decoder.decode(chunk); controller.enqueue(s); } } else { // RN doesn't support request.body const bodyChunk = await res.arrayBuffer(); let chunk = bodyChunk; if (canceled) { fetchAbort.abort(); return; } if (discardLen > 0) { chunk = new Uint8Array(bodyChunk).subarray(discardLen); discardLen -= bodyChunk.byteLength - chunk.length; } if (!chunk.byteLength) { continue; } seenOffset += chunk.byteLength; const s = decoder.decode(chunk); controller.enqueue(s); } } } fetchFailures = 0; if (item.content) { let content = item.content; let encoded = encoder.encode(item.content); if (discardLen > 0) { const remaining = encoded.subarray(discardLen); discardLen -= encoded.length - remaining.length; if (!remaining.length) { continue; } encoded = remaining; content = decoder.decode(remaining); } seenOffset += encoded.length; controller.enqueue(content); } } } async function start(controller) { let retry = true; let attempts = 0; while (retry) { retry = false; let nextAttempt = Date.now() + Math.min(15000, 500 * (attempts - 1)); const res = await runStartStream({ ...opts, offset: seenOffset }, controller); if (res?.retry) { retry = true; attempts++; if (nextAttempt < Date.now() - 300000) { // reset attempts if we last tried 5 minutes ago attempts = 0; } await new Promise((resolve) => { setTimeout(resolve, nextAttempt - Date.now()); }); } } if (!canceled && !closed) { controller.close(); markClosed(); } } const stream = new RStream({ start(controller) { start(controller); }, cancel(_reason) { canceled = true; if (eventId) { cancelStream({ eventId }); } markClosed(); }, }); return { stream, addCloseCb, closed() { return closed; }, }; } class InstantStream { trySend; WStream; RStream; writeStreams = {}; startWriteStreamCbs = {}; readStreamIterators = {}; log; activeStreams = new Set(); constructor({ WStream, RStream, trySend, log, }) { this.WStream = WStream; this.RStream = RStream; this.trySend = trySend; this.log = log; } createWriteStream(opts) { const { stream, addCloseCb } = createWriteStream({ WStream: this.WStream, startStream: this.startWriteStream.bind(this), appendStream: this.appendStream.bind(this), registerStream: this.registerWriteStream.bind(this), opts, }); this.activeStreams.add(stream); addCloseCb(() => { this.activeStreams.delete(stream); }); return stream; } createReadStream(opts) { const { stream, addCloseCb } = createReadStream({ RStream: this.RStream, opts, startStream: this.startReadStream.bind(this), cancelStream: this.cancelReadStream.bind(this), }); this.activeStreams.add(stream); addCloseCb(() => { this.activeStreams.delete(stream); }); return stream; } startWriteStream(opts) { const eventId = (0, id_ts_1.default)(); let resolve = null; const promise = new Promise((r) => { resolve = r; }); this.startWriteStreamCbs[eventId] = resolve; const msg = { op: 'start-stream', 'client-id': opts.clientId, 'reconnect-token': opts.reconnectToken, }; this.trySend(eventId, msg); return promise; } registerWriteStream(streamId, cbs) { this.writeStreams[streamId] = cbs; } appendStream({ streamId, chunks, isDone, offset, abortReason, }) { const msg = { op: 'append-stream', 'stream-id': streamId, chunks, offset, done: !!isDone, }; if (abortReason) { msg['abort-reason'] = abortReason; } this.trySend((0, id_ts_1.default)(), msg); } onAppendFailed(msg) { const cbs = this.writeStreams[msg['stream-id']]; if (cbs) { cbs.onAppendFailed(); } } onStartStreamOk(msg) { const cb = this.startWriteStreamCbs[msg['client-event-id']]; if (!cb) { this.log.info('No stream for start-stream-ok', msg); return; } cb({ type: 'ok', streamId: msg['stream-id'], offset: msg.offset }); delete this.startWriteStreamCbs[msg['client-event-id']]; } onStreamFlushed(msg) { const streamId = msg['stream-id']; const cbs = this.writeStreams[streamId]; if (!cbs) { this.log.info('No stream cbs for stream-flushed', msg); return; } cbs.onFlush({ offset: msg.offset, done: msg.done }); if (msg.done) { delete this.writeStreams[streamId]; } } startReadStream({ eventId, clientId, streamId, offset, }) { const msg = { op: 'subscribe-stream' }; if (!streamId && !clientId) { throw new Error('Must provide one of streamId or clientId to subscribe to the stream.'); } if (streamId) { msg['stream-id'] = streamId; } if (clientId) { msg['client-id'] = clientId; } if (offset) { msg['offset'] = offset; } const iterator = new StreamIterator(); this.readStreamIterators[eventId] = iterator; this.trySend(eventId, msg); return iterator; } cancelReadStream({ eventId }) { const msg = { op: 'unsubscribe-stream', 'subscribe-event-id': eventId, }; this.trySend((0, id_ts_1.default)(), msg); delete this.readStreamIterators[eventId]; } onStreamAppend(msg) { const eventId = msg['client-event-id']; const iterator = this.readStreamIterators[eventId]; if (!iterator) { this.log.info('No iterator for read stream', msg); return; } if (msg.error) { if (msg.retry) { iterator.push({ type: 'reconnect' }); } else { iterator.push({ type: 'error', error: new InstantError_ts_1.InstantError(msg.error), }); } iterator.close(); delete this.readStreamIterators[eventId]; return; } if (msg.files?.length || msg.content) { iterator.push({ type: 'append', offset: msg.offset, files: msg.files, content: msg.content, }); } if (msg.done) { iterator.close(); delete this.readStreamIterators[eventId]; } } onConnectionStatusChange(status) { // Tell the writers to retry: for (const cb of Object.values(this.startWriteStreamCbs)) { cb({ type: 'disconnect' }); } this.startWriteStreamCbs = {}; if (status !== Reactor_js_1.STATUS.AUTHENTICATED) { // Notify the writers that they've been disconnected for (const { onDisconnect } of Object.values(this.writeStreams)) { onDisconnect(); } } else { // Notify the writers that they need to reconnect for (const { onConnectionReconnect } of Object.values(this.writeStreams)) { onConnectionReconnect(); } // Notify the readers that they need to reconnect for (const iterator of Object.values(this.readStreamIterators)) { iterator.push({ type: 'reconnect' }); iterator.close(); } this.readStreamIterators = {}; } } onRecieveError(msg) { const ev = msg['original-event']; switch (ev.op) { case 'append-stream': { const streamId = ev['stream-id']; const cbs = this.writeStreams[streamId]; cbs?.onAppendFailed(); break; } case 'start-stream': { const eventId = msg['client-event-id']; const cb = this.startWriteStreamCbs[eventId]; if (cb) { cb({ type: 'error', error: new InstantError_ts_1.InstantError(msg.message || 'Unknown error', msg.hint), }); delete this.startWriteStreamCbs[eventId]; } break; } case 'subscribe-stream': { const eventId = msg['client-event-id']; const iterator = this.readStreamIterators[eventId]; if (iterator) { iterator.push({ type: 'error', error: new InstantError_ts_1.InstantError(msg.message || 'Unknown error', msg.hint), }); iterator.close(); delete this.readStreamIterators[eventId]; } break; } case 'unsubscribe-stream': { break; } } } hasActiveStreams() { return this.activeStreams.size > 0; } } exports.InstantStream = InstantStream; //# sourceMappingURL=Stream.js.map