@durable-streams/client
Version:
TypeScript client for the Durable Streams protocol
1,573 lines (1,566 loc) • 96.6 kB
JavaScript
import fastq from "fastq";
//#region src/constants.ts
/**
* Durable Streams Protocol Constants
*
* Header and query parameter names following the Electric Durable Stream Protocol.
*/
/**
* Response header containing the next offset to read from.
* Offsets are opaque tokens - clients MUST NOT interpret the format.
*/
const STREAM_OFFSET_HEADER = `Stream-Next-Offset`;
/**
* Response header for cursor (used for CDN collapsing).
* Echo this value in subsequent long-poll requests.
*/
const STREAM_CURSOR_HEADER = `Stream-Cursor`;
/**
* Presence header indicating response ends at current end of stream.
* When present (any value), indicates up-to-date.
*/
const STREAM_UP_TO_DATE_HEADER = `Stream-Up-To-Date`;
/**
* Response/request header indicating stream is closed (EOF).
* When present with value "true", the stream is permanently closed.
*/
const STREAM_CLOSED_HEADER = `Stream-Closed`;
/**
* Request header for writer coordination sequence.
* Monotonic, lexicographic. If lower than last appended seq -> 409 Conflict.
*/
const STREAM_SEQ_HEADER = `Stream-Seq`;
/**
* Request header for stream TTL in seconds (on create).
*/
const STREAM_TTL_HEADER = `Stream-TTL`;
/**
* Request header for absolute stream expiry time (RFC3339, on create).
*/
const STREAM_EXPIRES_AT_HEADER = `Stream-Expires-At`;
/**
* Request header for producer ID (client-supplied stable identifier).
*/
const PRODUCER_ID_HEADER = `Producer-Id`;
/**
* Request/response header for producer epoch.
* Client-declared, server-validated monotonically increasing.
*/
const PRODUCER_EPOCH_HEADER = `Producer-Epoch`;
/**
* Request header for producer sequence number.
* Monotonically increasing per epoch, per-batch (not per-message).
*/
const PRODUCER_SEQ_HEADER = `Producer-Seq`;
/**
* Response header indicating expected sequence number on 409 Conflict.
*/
const PRODUCER_EXPECTED_SEQ_HEADER = `Producer-Expected-Seq`;
/**
* Response header indicating received sequence number on 409 Conflict.
*/
const PRODUCER_RECEIVED_SEQ_HEADER = `Producer-Received-Seq`;
/**
* Query parameter for starting offset.
*/
const OFFSET_QUERY_PARAM = `offset`;
/**
* Query parameter for live mode.
* Values: "long-poll", "sse"
*/
const LIVE_QUERY_PARAM = `live`;
/**
* Query parameter for echoing cursor (CDN collapsing).
*/
const CURSOR_QUERY_PARAM = `cursor`;
/**
* Response header indicating SSE data encoding (e.g., base64 for binary streams).
*/
const STREAM_SSE_DATA_ENCODING_HEADER = `stream-sse-data-encoding`;
/**
* SSE control event field for stream closed state.
* Note: Different from HTTP header name (camelCase vs Header-Case).
*/
const SSE_CLOSED_FIELD = `streamClosed`;
/**
* Content types that are natively compatible with SSE (UTF-8 text).
* Binary content types are also supported via automatic base64 encoding.
*/
const SSE_COMPATIBLE_CONTENT_TYPES = [`text/`, `application/json`];
/**
* Protocol query parameters that should not be set by users.
*/
const DURABLE_STREAM_PROTOCOL_QUERY_PARAMS = [
OFFSET_QUERY_PARAM,
LIVE_QUERY_PARAM,
CURSOR_QUERY_PARAM
];
//#endregion
//#region src/error.ts
/**
* Error thrown for transport/network errors.
* Following the @electric-sql/client FetchError pattern.
*/
var FetchError = class FetchError extends Error {
status;
text;
json;
headers;
constructor(status, text, json, headers, url, message) {
super(message || `HTTP Error ${status} at ${url}: ${text ?? JSON.stringify(json)}`);
this.url = url;
this.name = `FetchError`;
this.status = status;
this.text = text;
this.json = json;
this.headers = headers;
}
static async fromResponse(response, url) {
const status = response.status;
const headers = Object.fromEntries([...response.headers.entries()]);
let text = void 0;
let json = void 0;
const contentType = response.headers.get(`content-type`);
if (!response.bodyUsed && response.body !== null) if (contentType && contentType.includes(`application/json`)) try {
json = await response.json();
} catch {
text = await response.text();
}
else text = await response.text();
return new FetchError(status, text, json, headers, url);
}
};
/**
* Error thrown when a fetch operation is aborted during backoff.
*/
var FetchBackoffAbortError = class extends Error {
constructor() {
super(`Fetch with backoff aborted`);
this.name = `FetchBackoffAbortError`;
}
};
/**
* Protocol-level error for Durable Streams operations.
* Provides structured error handling with error codes.
*/
var DurableStreamError = class DurableStreamError extends Error {
/**
* HTTP status code, if applicable.
*/
status;
/**
* Structured error code for programmatic handling.
*/
code;
/**
* Additional error details (e.g., raw response body).
*/
details;
constructor(message, code, status, details) {
super(message);
this.name = `DurableStreamError`;
this.code = code;
this.status = status;
this.details = details;
}
/**
* Create a DurableStreamError from an HTTP response.
*/
static async fromResponse(response, url) {
const status = response.status;
let details;
const contentType = response.headers.get(`content-type`);
if (!response.bodyUsed && response.body !== null) if (contentType && contentType.includes(`application/json`)) try {
details = await response.json();
} catch {
details = await response.text();
}
else details = await response.text();
const code = statusToCode(status);
const message = `Durable stream error at ${url}: ${response.statusText || status}`;
return new DurableStreamError(message, code, status, details);
}
/**
* Create a DurableStreamError from a FetchError.
*/
static fromFetchError(error) {
const code = statusToCode(error.status);
return new DurableStreamError(error.message, code, error.status, error.json ?? error.text);
}
};
/**
* Map HTTP status codes to DurableStreamErrorCode.
*/
function statusToCode(status) {
switch (status) {
case 400: return `BAD_REQUEST`;
case 401: return `UNAUTHORIZED`;
case 403: return `FORBIDDEN`;
case 404: return `NOT_FOUND`;
case 409: return `CONFLICT_SEQ`;
case 429: return `RATE_LIMITED`;
case 503: return `BUSY`;
default: return `UNKNOWN`;
}
}
/**
* Error thrown when stream URL is missing.
*/
var MissingStreamUrlError = class extends Error {
constructor() {
super(`Invalid stream options: missing required url parameter`);
this.name = `MissingStreamUrlError`;
}
};
/**
* Error thrown when attempting to append to a closed stream.
*/
var StreamClosedError = class extends DurableStreamError {
code = `STREAM_CLOSED`;
status = 409;
streamClosed = true;
/**
* The final offset of the stream, if available from the response.
*/
finalOffset;
constructor(url, finalOffset) {
super(`Cannot append to closed stream`, `STREAM_CLOSED`, 409, url);
this.name = `StreamClosedError`;
this.finalOffset = finalOffset;
}
};
/**
* Error thrown when signal option is invalid.
*/
var InvalidSignalError = class extends Error {
constructor() {
super(`Invalid signal option. It must be an instance of AbortSignal.`);
this.name = `InvalidSignalError`;
}
};
//#endregion
//#region src/fetch.ts
/**
* HTTP status codes that should be retried.
*/
const HTTP_RETRY_STATUS_CODES = [429, 503];
/**
* Default backoff options.
*/
const BackoffDefaults = {
initialDelay: 100,
maxDelay: 6e4,
multiplier: 1.3,
maxRetries: Infinity
};
/**
* Parse Retry-After header value and return delay in milliseconds.
* Supports both delta-seconds format and HTTP-date format.
* Returns 0 if header is not present or invalid.
*/
function parseRetryAfterHeader(retryAfter) {
if (!retryAfter) return 0;
const retryAfterSec = Number(retryAfter);
if (Number.isFinite(retryAfterSec) && retryAfterSec > 0) return retryAfterSec * 1e3;
const retryDate = Date.parse(retryAfter);
if (!isNaN(retryDate)) {
const deltaMs = retryDate - Date.now();
return Math.max(0, Math.min(deltaMs, 36e5));
}
return 0;
}
/**
* Creates a fetch client that retries failed requests with exponential backoff.
*
* @param fetchClient - The base fetch client to wrap
* @param backoffOptions - Options for retry behavior
* @returns A fetch function with automatic retry
*/
function createFetchWithBackoff(fetchClient, backoffOptions = BackoffDefaults) {
const { initialDelay, maxDelay, multiplier, debug = false, onFailedAttempt, maxRetries = Infinity } = backoffOptions;
return async (...args) => {
const url = args[0];
const options = args[1];
let delay = initialDelay;
let attempt = 0;
while (true) try {
const result = await fetchClient(...args);
if (result.ok) return result;
const err = await FetchError.fromResponse(result, url.toString());
throw err;
} catch (e) {
onFailedAttempt?.();
if (options?.signal?.aborted) throw new FetchBackoffAbortError();
else if (e instanceof FetchError && !HTTP_RETRY_STATUS_CODES.includes(e.status) && e.status >= 400 && e.status < 500) throw e;
else {
attempt++;
if (attempt > maxRetries) {
if (debug) console.log(`Max retries reached (${attempt}/${maxRetries}), giving up`);
throw e;
}
const serverMinimumMs = e instanceof FetchError ? parseRetryAfterHeader(e.headers[`retry-after`]) : 0;
const jitter = Math.random() * delay;
const clientBackoffMs = Math.min(jitter, maxDelay);
const waitMs = Math.max(serverMinimumMs, clientBackoffMs);
if (debug) {
const source = serverMinimumMs > 0 ? `server+client` : `client`;
console.log(`Retry attempt #${attempt} after ${waitMs}ms (${source}, serverMin=${serverMinimumMs}ms, clientBackoff=${clientBackoffMs}ms)`);
}
await new Promise((resolve) => setTimeout(resolve, waitMs));
delay = Math.min(delay * multiplier, maxDelay);
}
}
};
}
/**
* Status codes where we shouldn't try to read the body.
*/
const NO_BODY_STATUS_CODES = [
201,
204,
205
];
/**
* Creates a fetch client that ensures the response body is fully consumed.
* This prevents issues with connection pooling when bodies aren't read.
*
* Uses arrayBuffer() instead of text() to preserve binary data integrity.
*
* @param fetchClient - The base fetch client to wrap
* @returns A fetch function that consumes response bodies
*/
function createFetchWithConsumedBody(fetchClient) {
return async (...args) => {
const url = args[0];
const res = await fetchClient(...args);
try {
if (res.status < 200 || NO_BODY_STATUS_CODES.includes(res.status)) return res;
const buf = await res.arrayBuffer();
return new Response(buf, {
status: res.status,
statusText: res.statusText,
headers: res.headers
});
} catch (err) {
if (args[1]?.signal?.aborted) throw new FetchBackoffAbortError();
throw new FetchError(res.status, void 0, void 0, Object.fromEntries([...res.headers.entries()]), url.toString(), err instanceof Error ? err.message : typeof err === `string` ? err : `failed to read body`);
}
};
}
//#endregion
//#region src/asyncIterableReadableStream.ts
/**
* Check if a value has Symbol.asyncIterator defined.
*/
function hasAsyncIterator(stream$1) {
return typeof Symbol !== `undefined` && typeof Symbol.asyncIterator === `symbol` && typeof stream$1[Symbol.asyncIterator] === `function`;
}
/**
* Define [Symbol.asyncIterator] and .values() on a ReadableStream instance.
*
* Uses getReader().read() to implement spec-consistent iteration.
* On completion or early exit (break/return/throw), releases lock and cancels as appropriate.
*
* **Iterator behavior notes:**
* - `return(value?)` accepts an optional cancellation reason passed to `reader.cancel()`
* - `return()` always resolves with `{ done: true, value: undefined }` regardless of the
* input value. This matches `for await...of` semantics where the return value is ignored.
* Manual iteration users should be aware of this behavior.
*/
function defineAsyncIterator(stream$1) {
if (typeof Symbol === `undefined` || typeof Symbol.asyncIterator !== `symbol`) return;
if (typeof stream$1[Symbol.asyncIterator] === `function`) return;
const createIterator = function() {
const reader = this.getReader();
let finished = false;
let pendingReads = 0;
const iterator = {
async next() {
if (finished) return {
done: true,
value: void 0
};
pendingReads++;
try {
const { value, done } = await reader.read();
if (done) {
finished = true;
reader.releaseLock();
return {
done: true,
value: void 0
};
}
return {
done: false,
value
};
} catch (err) {
finished = true;
try {
reader.releaseLock();
} catch {}
throw err;
} finally {
pendingReads--;
}
},
async return(value) {
if (pendingReads > 0) throw new TypeError(`Cannot close a readable stream reader when it has pending read requests`);
finished = true;
const cancelPromise = reader.cancel(value);
reader.releaseLock();
await cancelPromise;
return {
done: true,
value: void 0
};
},
async throw(err) {
if (pendingReads > 0) throw new TypeError(`Cannot close a readable stream reader when it has pending read requests`);
finished = true;
const cancelPromise = reader.cancel(err);
reader.releaseLock();
await cancelPromise;
throw err;
},
[Symbol.asyncIterator]() {
return this;
}
};
return iterator;
};
try {
Object.defineProperty(stream$1, Symbol.asyncIterator, {
configurable: true,
writable: true,
value: createIterator
});
} catch {
return;
}
try {
Object.defineProperty(stream$1, `values`, {
configurable: true,
writable: true,
value: createIterator
});
} catch {}
}
/**
* Ensure a ReadableStream is async-iterable.
*
* If the stream already has [Symbol.asyncIterator] defined (native or polyfilled),
* it is returned as-is. Otherwise, [Symbol.asyncIterator] is defined on the
* stream instance (not the prototype).
*
* The returned value is the same ReadableStream instance, so:
* - `stream instanceof ReadableStream` remains true
* - Any code relying on native branding/internal slots continues to work
*
* @example
* ```typescript
* const stream = someApiReturningReadableStream();
* const iterableStream = asAsyncIterableReadableStream(stream);
*
* // Now works on Safari/iOS:
* for await (const chunk of iterableStream) {
* console.log(chunk);
* }
* ```
*/
function asAsyncIterableReadableStream(stream$1) {
if (!hasAsyncIterator(stream$1)) defineAsyncIterator(stream$1);
return stream$1;
}
//#endregion
//#region src/sse.ts
/**
* Parse SSE events from a ReadableStream<Uint8Array>.
* Yields parsed events as they arrive.
*/
async function* parseSSEStream(stream$1, signal) {
const reader = stream$1.getReader();
const decoder = new TextDecoder();
let buffer = ``;
let currentEvent = { data: [] };
try {
while (true) {
if (signal?.aborted) break;
const { done, value } = await reader.read();
if (done) break;
buffer += decoder.decode(value, { stream: true });
buffer = buffer.replace(/\r\n/g, `\n`).replace(/\r/g, `\n`);
const lines = buffer.split(`\n`);
buffer = lines.pop() ?? ``;
for (const line of lines) if (line === ``) {
if (currentEvent.type && currentEvent.data.length > 0) {
const dataStr = currentEvent.data.join(`\n`);
if (currentEvent.type === `data`) yield {
type: `data`,
data: dataStr
};
else if (currentEvent.type === `control`) try {
const control = JSON.parse(dataStr);
yield {
type: `control`,
streamNextOffset: control.streamNextOffset,
streamCursor: control.streamCursor,
upToDate: control.upToDate,
streamClosed: control.streamClosed
};
} catch (err) {
const preview = dataStr.length > 100 ? dataStr.slice(0, 100) + `...` : dataStr;
throw new DurableStreamError(`Failed to parse SSE control event: ${err instanceof Error ? err.message : String(err)}. Data: ${preview}`, `PARSE_ERROR`);
}
}
currentEvent = { data: [] };
} else if (line.startsWith(`event:`)) {
const eventType = line.slice(6);
currentEvent.type = eventType.startsWith(` `) ? eventType.slice(1) : eventType;
} else if (line.startsWith(`data:`)) {
const content = line.slice(5);
currentEvent.data.push(content.startsWith(` `) ? content.slice(1) : content);
}
}
const remaining = decoder.decode();
if (remaining) buffer += remaining;
if (buffer && currentEvent.type && currentEvent.data.length > 0) {
const dataStr = currentEvent.data.join(`\n`);
if (currentEvent.type === `data`) yield {
type: `data`,
data: dataStr
};
else if (currentEvent.type === `control`) try {
const control = JSON.parse(dataStr);
yield {
type: `control`,
streamNextOffset: control.streamNextOffset,
streamCursor: control.streamCursor,
upToDate: control.upToDate,
streamClosed: control.streamClosed
};
} catch (err) {
const preview = dataStr.length > 100 ? dataStr.slice(0, 100) + `...` : dataStr;
throw new DurableStreamError(`Failed to parse SSE control event: ${err instanceof Error ? err.message : String(err)}. Data: ${preview}`, `PARSE_ERROR`);
}
}
} finally {
reader.releaseLock();
}
}
//#endregion
//#region src/stream-response-state.ts
/**
* Abstract base class for stream response state.
* All state transitions return new immutable state objects.
*/
var StreamResponseState = class {
shouldContinueLive(stopAfterUpToDate, liveMode) {
if (stopAfterUpToDate && this.upToDate) return false;
if (liveMode === false) return false;
if (this.streamClosed) return false;
return true;
}
};
/**
* State for long-poll mode. shouldUseSse() returns false.
*/
var LongPollState = class LongPollState extends StreamResponseState {
offset;
cursor;
upToDate;
streamClosed;
constructor(fields) {
super();
this.offset = fields.offset;
this.cursor = fields.cursor;
this.upToDate = fields.upToDate;
this.streamClosed = fields.streamClosed;
}
shouldUseSse() {
return false;
}
withResponseMetadata(update) {
return new LongPollState({
offset: update.offset ?? this.offset,
cursor: update.cursor ?? this.cursor,
upToDate: update.upToDate,
streamClosed: this.streamClosed || update.streamClosed
});
}
withSSEControl(event) {
const streamClosed = this.streamClosed || (event.streamClosed ?? false);
return new LongPollState({
offset: event.streamNextOffset,
cursor: event.streamCursor || this.cursor,
upToDate: event.streamClosed ?? false ? true : event.upToDate ?? this.upToDate,
streamClosed
});
}
pause() {
return new PausedState(this);
}
};
/**
* State for SSE mode. shouldUseSse() returns true.
* Tracks SSE connection resilience (short connection detection).
*/
var SSEState = class SSEState extends StreamResponseState {
offset;
cursor;
upToDate;
streamClosed;
consecutiveShortConnections;
connectionStartTime;
constructor(fields) {
super();
this.offset = fields.offset;
this.cursor = fields.cursor;
this.upToDate = fields.upToDate;
this.streamClosed = fields.streamClosed;
this.consecutiveShortConnections = fields.consecutiveShortConnections ?? 0;
this.connectionStartTime = fields.connectionStartTime;
}
shouldUseSse() {
return true;
}
withResponseMetadata(update) {
return new SSEState({
offset: update.offset ?? this.offset,
cursor: update.cursor ?? this.cursor,
upToDate: update.upToDate,
streamClosed: this.streamClosed || update.streamClosed,
consecutiveShortConnections: this.consecutiveShortConnections,
connectionStartTime: this.connectionStartTime
});
}
withSSEControl(event) {
const streamClosed = this.streamClosed || (event.streamClosed ?? false);
return new SSEState({
offset: event.streamNextOffset,
cursor: event.streamCursor || this.cursor,
upToDate: event.streamClosed ?? false ? true : event.upToDate ?? this.upToDate,
streamClosed,
consecutiveShortConnections: this.consecutiveShortConnections,
connectionStartTime: this.connectionStartTime
});
}
startConnection(now) {
return new SSEState({
offset: this.offset,
cursor: this.cursor,
upToDate: this.upToDate,
streamClosed: this.streamClosed,
consecutiveShortConnections: this.consecutiveShortConnections,
connectionStartTime: now
});
}
handleConnectionEnd(now, wasAborted, config) {
if (this.connectionStartTime === void 0) return {
action: `healthy`,
state: this
};
const duration = now - this.connectionStartTime;
if (duration < config.minConnectionDuration && !wasAborted) {
const newCount = this.consecutiveShortConnections + 1;
if (newCount >= config.maxShortConnections) return {
action: `fallback`,
state: new LongPollState({
offset: this.offset,
cursor: this.cursor,
upToDate: this.upToDate,
streamClosed: this.streamClosed
})
};
return {
action: `reconnect`,
state: new SSEState({
offset: this.offset,
cursor: this.cursor,
upToDate: this.upToDate,
streamClosed: this.streamClosed,
consecutiveShortConnections: newCount,
connectionStartTime: this.connectionStartTime
}),
backoffAttempt: newCount
};
}
if (duration >= config.minConnectionDuration) return {
action: `healthy`,
state: new SSEState({
offset: this.offset,
cursor: this.cursor,
upToDate: this.upToDate,
streamClosed: this.streamClosed,
consecutiveShortConnections: 0,
connectionStartTime: this.connectionStartTime
})
};
return {
action: `healthy`,
state: this
};
}
pause() {
return new PausedState(this);
}
};
/**
* Paused state wrapper. Delegates all sync field access to the inner state.
* resume() returns the wrapped state unchanged (identity preserved).
*/
var PausedState = class PausedState extends StreamResponseState {
#inner;
constructor(inner) {
super();
this.#inner = inner;
}
get offset() {
return this.#inner.offset;
}
get cursor() {
return this.#inner.cursor;
}
get upToDate() {
return this.#inner.upToDate;
}
get streamClosed() {
return this.#inner.streamClosed;
}
shouldUseSse() {
return this.#inner.shouldUseSse();
}
withResponseMetadata(update) {
const newInner = this.#inner.withResponseMetadata(update);
return new PausedState(newInner);
}
withSSEControl(event) {
const newInner = this.#inner.withSSEControl(event);
return new PausedState(newInner);
}
pause() {
return this;
}
resume() {
return {
state: this.#inner,
justResumed: true
};
}
};
//#endregion
//#region src/response.ts
/**
* Constant used as abort reason when pausing the stream due to visibility change.
*/
const PAUSE_STREAM = `PAUSE_STREAM`;
/**
* Implementation of the StreamResponse interface.
*/
var StreamResponseImpl = class {
url;
contentType;
live;
startOffset;
#headers;
#status;
#statusText;
#ok;
#isLoading;
#syncState;
#isJsonMode;
#abortController;
#fetchNext;
#startSSE;
#closedResolve;
#closedReject;
#closed;
#stopAfterUpToDate = false;
#consumptionMethod = null;
#state = `active`;
#requestAbortController;
#unsubscribeFromVisibilityChanges;
#pausePromise;
#pauseResolve;
#sseResilience;
#encoding;
#responseStream;
constructor(config) {
this.url = config.url;
this.contentType = config.contentType;
this.live = config.live;
this.startOffset = config.startOffset;
const syncFields = {
offset: config.initialOffset,
cursor: config.initialCursor,
upToDate: config.initialUpToDate,
streamClosed: config.initialStreamClosed
};
this.#syncState = config.startSSE ? new SSEState(syncFields) : new LongPollState(syncFields);
this.#headers = config.firstResponse.headers;
this.#status = config.firstResponse.status;
this.#statusText = config.firstResponse.statusText;
this.#ok = config.firstResponse.ok;
this.#isLoading = false;
this.#isJsonMode = config.isJsonMode;
this.#abortController = config.abortController;
this.#fetchNext = config.fetchNext;
this.#startSSE = config.startSSE;
this.#sseResilience = {
minConnectionDuration: config.sseResilience?.minConnectionDuration ?? 1e3,
maxShortConnections: config.sseResilience?.maxShortConnections ?? 3,
backoffBaseDelay: config.sseResilience?.backoffBaseDelay ?? 100,
backoffMaxDelay: config.sseResilience?.backoffMaxDelay ?? 5e3,
logWarnings: config.sseResilience?.logWarnings ?? true
};
this.#encoding = config.encoding;
this.#closed = new Promise((resolve, reject) => {
this.#closedResolve = resolve;
this.#closedReject = reject;
});
this.#responseStream = this.#createResponseStream(config.firstResponse);
this.#abortController.signal.addEventListener(`abort`, () => {
this.#requestAbortController?.abort(this.#abortController.signal.reason);
this.#pauseResolve?.();
this.#pausePromise = void 0;
this.#pauseResolve = void 0;
}, { once: true });
this.#subscribeToVisibilityChanges();
}
/**
* Subscribe to document visibility changes to pause/resume syncing.
* When the page is hidden, we pause to save battery and bandwidth.
* When visible again, we resume syncing.
*/
#subscribeToVisibilityChanges() {
if (typeof document === `object` && typeof document.hidden === `boolean` && typeof document.addEventListener === `function`) {
const visibilityHandler = () => {
if (document.hidden) this.#pause();
else this.#resume();
};
document.addEventListener(`visibilitychange`, visibilityHandler);
this.#unsubscribeFromVisibilityChanges = () => {
if (typeof document === `object`) document.removeEventListener(`visibilitychange`, visibilityHandler);
};
if (document.hidden) this.#pause();
}
}
/**
* Pause the stream when page becomes hidden.
* Aborts any in-flight request to free resources.
* Creates a promise that pull() will await while paused.
*/
#pause() {
if (this.#state === `active`) {
this.#state = `pause-requested`;
this.#syncState = this.#syncState.pause();
this.#pausePromise = new Promise((resolve) => {
this.#pauseResolve = resolve;
});
this.#requestAbortController?.abort(PAUSE_STREAM);
}
}
/**
* Resume the stream when page becomes visible.
* Resolves the pause promise to unblock pull().
*/
#resume() {
if (this.#state === `paused` || this.#state === `pause-requested`) {
if (this.#abortController.signal.aborted) return;
if (this.#syncState instanceof PausedState) this.#syncState = this.#syncState.resume().state;
this.#state = `active`;
this.#pauseResolve?.();
this.#pausePromise = void 0;
this.#pauseResolve = void 0;
}
}
get headers() {
return this.#headers;
}
get status() {
return this.#status;
}
get statusText() {
return this.#statusText;
}
get ok() {
return this.#ok;
}
get isLoading() {
return this.#isLoading;
}
get offset() {
return this.#syncState.offset;
}
get cursor() {
return this.#syncState.cursor;
}
get upToDate() {
return this.#syncState.upToDate;
}
get streamClosed() {
return this.#syncState.streamClosed;
}
#ensureJsonMode() {
if (!this.#isJsonMode) throw new DurableStreamError(`JSON methods are only valid for JSON-mode streams. Content-Type is "${this.contentType}" and json hint was not set.`, `BAD_REQUEST`);
}
#markClosed() {
this.#unsubscribeFromVisibilityChanges?.();
this.#closedResolve();
}
#markError(err) {
this.#unsubscribeFromVisibilityChanges?.();
this.#closedReject(err);
}
/**
* Ensure only one consumption method is used per StreamResponse.
* Throws if any consumption method was already called.
*/
#ensureNoConsumption(method) {
if (this.#consumptionMethod !== null) throw new DurableStreamError(`Cannot call ${method}() - this StreamResponse is already being consumed via ${this.#consumptionMethod}()`, `ALREADY_CONSUMED`);
this.#consumptionMethod = method;
}
/**
* Determine if we should continue with live updates based on live mode
* and whether we've received upToDate or streamClosed.
*/
#shouldContinueLive() {
return this.#syncState.shouldContinueLive(this.#stopAfterUpToDate, this.live);
}
/**
* Update state from response headers.
*/
#updateStateFromResponse(response) {
this.#syncState = this.#syncState.withResponseMetadata({
offset: response.headers.get(STREAM_OFFSET_HEADER) || void 0,
cursor: response.headers.get(STREAM_CURSOR_HEADER) || void 0,
upToDate: response.headers.has(STREAM_UP_TO_DATE_HEADER),
streamClosed: response.headers.get(STREAM_CLOSED_HEADER)?.toLowerCase() === `true`
});
this.#headers = response.headers;
this.#status = response.status;
this.#statusText = response.statusText;
this.#ok = response.ok;
}
/**
* Update instance state from an SSE control event.
*/
#updateStateFromSSEControl(controlEvent) {
this.#syncState = this.#syncState.withSSEControl(controlEvent);
}
/**
* Mark the start of an SSE connection for duration tracking.
* If the state is not SSEState (e.g., auto-detected SSE from content-type),
* transitions to SSEState first.
*/
#markSSEConnectionStart() {
if (!(this.#syncState instanceof SSEState)) this.#syncState = new SSEState({
offset: this.#syncState.offset,
cursor: this.#syncState.cursor,
upToDate: this.#syncState.upToDate,
streamClosed: this.#syncState.streamClosed
});
this.#syncState = this.#syncState.startConnection(Date.now());
}
/**
* Try to reconnect SSE and return the new iterator, or null if reconnection
* is not possible or fails.
*/
async #trySSEReconnect() {
if (!this.#syncState.shouldUseSse()) return null;
if (!this.#shouldContinueLive() || !this.#startSSE) return null;
const result = this.#syncState.handleConnectionEnd(Date.now(), this.#abortController.signal.aborted, this.#sseResilience);
this.#syncState = result.state;
if (result.action === `fallback`) {
if (this.#sseResilience.logWarnings) console.warn("[Durable Streams] SSE connections are closing immediately (possibly due to proxy buffering or misconfiguration). Falling back to long polling. Your proxy must support streaming SSE responses (not buffer the complete response). Configuration: Nginx add 'X-Accel-Buffering: no', Caddy add 'flush_interval -1' to reverse_proxy.");
return null;
}
if (result.action === `reconnect`) {
const maxDelay = Math.min(this.#sseResilience.backoffMaxDelay, this.#sseResilience.backoffBaseDelay * Math.pow(2, result.backoffAttempt));
const delayMs = Math.floor(Math.random() * maxDelay);
await new Promise((resolve) => setTimeout(resolve, delayMs));
}
this.#markSSEConnectionStart();
this.#requestAbortController = new AbortController();
const newSSEResponse = await this.#startSSE(this.offset, this.cursor, this.#requestAbortController.signal);
if (newSSEResponse.body) return parseSSEStream(newSSEResponse.body, this.#requestAbortController.signal);
return null;
}
/**
* Process SSE events from the iterator.
* Returns an object indicating the result:
* - { type: 'response', response, newIterator? } - yield this response
* - { type: 'closed' } - stream should be closed
* - { type: 'error', error } - an error occurred
* - { type: 'continue', newIterator? } - continue processing (control-only event)
*/
async #processSSEEvents(sseEventIterator) {
const { done, value: event } = await sseEventIterator.next();
if (done) {
try {
const newIterator = await this.#trySSEReconnect();
if (newIterator) return {
type: `continue`,
newIterator
};
} catch (err) {
return {
type: `error`,
error: err instanceof Error ? err : new Error(`SSE reconnection failed`)
};
}
return { type: `closed` };
}
if (event.type === `data`) return this.#processSSEDataEvent(event.data, sseEventIterator);
this.#updateStateFromSSEControl(event);
if (event.upToDate) {
const response = createSSESyntheticResponse(``, event.streamNextOffset, event.streamCursor, true, event.streamClosed ?? false, this.contentType, this.#encoding);
return {
type: `response`,
response
};
}
return { type: `continue` };
}
/**
* Process an SSE data event by waiting for its corresponding control event.
* In SSE protocol, control events come AFTER data events.
* Multiple data events may arrive before a single control event - we buffer them.
*
* For base64 mode, each data event is independently base64 encoded, so we
* collect them as an array and decode each separately.
*/
async #processSSEDataEvent(pendingData, sseEventIterator) {
const bufferedDataParts = [pendingData];
while (true) {
const { done: controlDone, value: controlEvent } = await sseEventIterator.next();
if (controlDone) {
const response = createSSESyntheticResponseFromParts(bufferedDataParts, this.offset, this.cursor, this.upToDate, this.streamClosed, this.contentType, this.#encoding, this.#isJsonMode);
try {
const newIterator = await this.#trySSEReconnect();
return {
type: `response`,
response,
newIterator: newIterator ?? void 0
};
} catch (err) {
return {
type: `error`,
error: err instanceof Error ? err : new Error(`SSE reconnection failed`)
};
}
}
if (controlEvent.type === `control`) {
this.#updateStateFromSSEControl(controlEvent);
const response = createSSESyntheticResponseFromParts(bufferedDataParts, controlEvent.streamNextOffset, controlEvent.streamCursor, controlEvent.upToDate ?? false, controlEvent.streamClosed ?? false, this.contentType, this.#encoding, this.#isJsonMode);
return {
type: `response`,
response
};
}
bufferedDataParts.push(controlEvent.data);
}
}
/**
* Create the core ReadableStream<Response> that yields responses.
* This is consumed once - all consumption methods use this same stream.
*
* For long-poll mode: yields actual Response objects.
* For SSE mode: yields synthetic Response objects created from SSE data events.
*/
#createResponseStream(firstResponse) {
let firstResponseYielded = false;
let sseEventIterator = null;
return new ReadableStream({
pull: async (controller) => {
try {
if (!firstResponseYielded) {
firstResponseYielded = true;
const isSSE = firstResponse.headers.get(`content-type`)?.includes(`text/event-stream`) ?? false;
if (isSSE && firstResponse.body) {
this.#markSSEConnectionStart();
this.#requestAbortController = new AbortController();
sseEventIterator = parseSSEStream(firstResponse.body, this.#requestAbortController.signal);
} else {
controller.enqueue(firstResponse);
if (this.upToDate && !this.#shouldContinueLive()) {
this.#markClosed();
controller.close();
return;
}
return;
}
}
if (sseEventIterator) {
if (this.#state === `pause-requested` || this.#state === `paused`) {
this.#state = `paused`;
if (this.#pausePromise) await this.#pausePromise;
if (this.#abortController.signal.aborted) {
this.#markClosed();
controller.close();
return;
}
const newIterator = await this.#trySSEReconnect();
if (newIterator) sseEventIterator = newIterator;
else {
this.#markClosed();
controller.close();
return;
}
}
while (true) {
const result = await this.#processSSEEvents(sseEventIterator);
switch (result.type) {
case `response`:
if (result.newIterator) sseEventIterator = result.newIterator;
controller.enqueue(result.response);
return;
case `closed`:
this.#markClosed();
controller.close();
return;
case `error`:
this.#markError(result.error);
controller.error(result.error);
return;
case `continue`:
if (result.newIterator) sseEventIterator = result.newIterator;
continue;
}
}
}
if (this.#shouldContinueLive()) {
let resumingFromPause = false;
if (this.#state === `pause-requested` || this.#state === `paused`) {
this.#state = `paused`;
if (this.#pausePromise) await this.#pausePromise;
if (this.#abortController.signal.aborted) {
this.#markClosed();
controller.close();
return;
}
resumingFromPause = true;
}
if (this.#abortController.signal.aborted) {
this.#markClosed();
controller.close();
return;
}
this.#requestAbortController = new AbortController();
const response = await this.#fetchNext(this.offset, this.cursor, this.#requestAbortController.signal, resumingFromPause);
this.#updateStateFromResponse(response);
controller.enqueue(response);
return;
}
this.#markClosed();
controller.close();
} catch (err) {
if (this.#requestAbortController?.signal.aborted && this.#requestAbortController.signal.reason === PAUSE_STREAM) {
if (this.#state === `pause-requested`) this.#state = `paused`;
return;
}
if (this.#abortController.signal.aborted) {
this.#markClosed();
controller.close();
} else {
this.#markError(err instanceof Error ? err : new Error(String(err)));
controller.error(err);
}
}
},
cancel: () => {
this.#abortController.abort();
this.#unsubscribeFromVisibilityChanges?.();
this.#markClosed();
}
});
}
/**
* Get the response stream reader. Can only be called once.
*/
#getResponseReader() {
return this.#responseStream.getReader();
}
async body() {
this.#ensureNoConsumption(`body`);
this.#stopAfterUpToDate = true;
const reader = this.#getResponseReader();
const blobs = [];
try {
let result = await reader.read();
while (!result.done) {
const wasUpToDate = this.upToDate;
const blob = await result.value.blob();
if (blob.size > 0) blobs.push(blob);
if (wasUpToDate) break;
result = await reader.read();
}
} finally {
reader.releaseLock();
}
this.#markClosed();
if (blobs.length === 0) return new Uint8Array(0);
if (blobs.length === 1) return new Uint8Array(await blobs[0].arrayBuffer());
const combined = new Blob(blobs);
return new Uint8Array(await combined.arrayBuffer());
}
async json() {
this.#ensureNoConsumption(`json`);
this.#ensureJsonMode();
this.#stopAfterUpToDate = true;
const reader = this.#getResponseReader();
const items = [];
try {
let result = await reader.read();
while (!result.done) {
const wasUpToDate = this.upToDate;
const text = await result.value.text();
const content = text.trim() || `[]`;
let parsed;
try {
parsed = JSON.parse(content);
} catch (err) {
const preview = content.length > 100 ? content.slice(0, 100) + `...` : content;
throw new DurableStreamError(`Failed to parse JSON response: ${err instanceof Error ? err.message : String(err)}. Data: ${preview}`, `PARSE_ERROR`);
}
if (Array.isArray(parsed)) items.push(...parsed);
else items.push(parsed);
if (wasUpToDate) break;
result = await reader.read();
}
} finally {
reader.releaseLock();
}
this.#markClosed();
return items;
}
async text() {
this.#ensureNoConsumption(`text`);
this.#stopAfterUpToDate = true;
const reader = this.#getResponseReader();
const parts = [];
try {
let result = await reader.read();
while (!result.done) {
const wasUpToDate = this.upToDate;
const text = await result.value.text();
if (text) parts.push(text);
if (wasUpToDate) break;
result = await reader.read();
}
} finally {
reader.releaseLock();
}
this.#markClosed();
return parts.join(``);
}
/**
* Internal helper to create the body stream without consumption check.
* Used by both bodyStream() and textStream().
*/
#createBodyStreamInternal() {
const { readable, writable } = new TransformStream();
const reader = this.#getResponseReader();
const pipeBodyStream = async () => {
try {
let result = await reader.read();
while (!result.done) {
const wasUpToDate = this.upToDate;
const body = result.value.body;
if (body) await body.pipeTo(writable, {
preventClose: true,
preventAbort: true,
preventCancel: true
});
if (wasUpToDate && !this.#shouldContinueLive()) break;
result = await reader.read();
}
await writable.close();
this.#markClosed();
} catch (err) {
if (this.#abortController.signal.aborted) {
try {
await writable.close();
} catch {}
this.#markClosed();
} else {
try {
await writable.abort(err);
} catch {}
this.#markError(err instanceof Error ? err : new Error(String(err)));
}
} finally {
reader.releaseLock();
}
};
pipeBodyStream();
return readable;
}
bodyStream() {
this.#ensureNoConsumption(`bodyStream`);
return asAsyncIterableReadableStream(this.#createBodyStreamInternal());
}
jsonStream() {
this.#ensureNoConsumption(`jsonStream`);
this.#ensureJsonMode();
const reader = this.#getResponseReader();
let pendingItems = [];
const stream$1 = new ReadableStream({
pull: async (controller) => {
if (pendingItems.length > 0) {
controller.enqueue(pendingItems.shift());
return;
}
let result = await reader.read();
while (!result.done) {
const response = result.value;
const text = await response.text();
const content = text.trim() || `[]`;
let parsed;
try {
parsed = JSON.parse(content);
} catch (err) {
const preview = content.length > 100 ? content.slice(0, 100) + `...` : content;
throw new DurableStreamError(`Failed to parse JSON response: ${err instanceof Error ? err.message : String(err)}. Data: ${preview}`, `PARSE_ERROR`);
}
pendingItems = Array.isArray(parsed) ? parsed : [parsed];
if (pendingItems.length > 0) {
controller.enqueue(pendingItems.shift());
return;
}
result = await reader.read();
}
this.#markClosed();
controller.close();
return;
},
cancel: () => {
reader.releaseLock();
this.cancel();
}
});
return asAsyncIterableReadableStream(stream$1);
}
textStream() {
this.#ensureNoConsumption(`textStream`);
const decoder = new TextDecoder();
const stream$1 = this.#createBodyStreamInternal().pipeThrough(new TransformStream({
transform(chunk, controller) {
controller.enqueue(decoder.decode(chunk, { stream: true }));
},
flush(controller) {
const remaining = decoder.decode();
if (remaining) controller.enqueue(remaining);
}
}));
return asAsyncIterableReadableStream(stream$1);
}
subscribeJson(subscriber) {
this.#ensureNoConsumption(`subscribeJson`);
this.#ensureJsonMode();
const abortController = new AbortController();
const reader = this.#getResponseReader();
const consumeJsonSubscription = async () => {
try {
let result = await reader.read();
while (!result.done) {
if (abortController.signal.aborted) break;
const response = result.value;
const { offset, cursor, upToDate, streamClosed } = getMetadataFromResponse(response, this.offset, this.cursor, this.streamClosed);
const text = await response.text();
const content = text.trim() || `[]`;
let parsed;
try {
parsed = JSON.parse(content);
} catch (err) {
const preview = content.length > 100 ? content.slice(0, 100) + `...` : content;
throw new DurableStreamError(`Failed to parse JSON response: ${err instanceof Error ? err.message : String(err)}. Data: ${preview}`, `PARSE_ERROR`);
}
const items = Array.isArray(parsed) ? parsed : [parsed];
await subscriber({
items,
offset,
cursor,
upToDate,
streamClosed
});
result = await reader.read();
}
this.#markClosed();
} catch (e) {
const isAborted = abortController.signal.aborted;
const isBodyError = e instanceof TypeError && String(e).includes(`Body`);
if (!isAborted && !isBodyError) this.#markError(e instanceof Error ? e : new Error(String(e)));
else this.#markClosed();
} finally {
reader.releaseLock();
}
};
consumeJsonSubscription();
return () => {
abortController.abort();
this.cancel();
};
}
subscribeBytes(subscriber) {
this.#ensureNoConsumption(`subscribeBytes`);
const abortController = new AbortController();
const reader = this.#getResponseReader();
const consumeBytesSubscription = async () => {
try {
let result = await reader.read();
while (!result.done) {
if (abortController.signal.aborted) break;
const response = result.value;
const { offset, cursor, upToDate, streamClosed } = getMetadataFromResponse(response, this.offset, this.cursor, this.streamClosed);
const buffer = await response.arrayBuffer();
await subscriber({
data: new Uint8Array(buffer),
offset,
cursor,
upToDate,
streamClosed
});
result = await reader.read();
}
this.#markClosed();
} catch (e) {
const isAborted = abortController.signal.aborted;
const isBodyError = e instanceof TypeError && String(e).includes(`Body`);
if (!isAborted && !isBodyError) this.#markError(e instanceof Error ? e : new Error(String(e)));
else this.#markClosed();
} finally {
reader.releaseLock();
}
};
consumeBytesSubscription();
return () => {
abortController.abort();
this.cancel();
};
}
subscribeText(subscriber) {
this.#ensureNoConsumption(`subscribeText`);
const abortController = new AbortController();
const reader = this.#getResponseReader();
const consumeTextSubscription = async () => {
try {
let result = await reader.read();
while (!result.done) {
if (abortController.signal.aborted) break;
const response = result.value;
const { offset, cursor, upToDate, streamClosed } = getMetadataFromResponse(response, this.offset, this.cursor, this.streamClosed);
const text = await response.text();
await subscriber({
text,
offset,
cursor,
upToDate,
streamClosed
});
result = await reader.read();
}
this.#markClosed();
} catch (e) {
const isAborted = abortController.signal.aborted;
const isBodyError = e instanceof TypeError && String(e).includes(`Body`);
if (!isAborted && !isBodyError) this.#markError(e instanceof Error ? e : new Error(String(e)));
else this.#markClosed();
} finally {
reader.releaseLock();
}
};
consumeTextSubscription();
return () => {
abortController.abort();
this.cancel();
};
}
cancel(reason) {
this.#abortController.abort(reason);
this.#unsubscribeFromVisibilityChanges?.();
this.#markClosed();
}
get closed() {
return this.#closed;
}
};
/**
* Extract stream metadata from Response headers.
* Falls back to the provided defaults when headers are absent.
*/
function getMetadataFromResponse(response, fallbackOffset, fallbackCursor, fallbackStreamClosed) {
const offset = response.headers.get(STREAM_OFFSET_HEADER);
const cursor = response.headers.get(STREAM_CURSOR_HEADER);
const upToDate = response.headers.has(STREAM_UP_TO_DATE_HEADER);
const streamClosed = response.headers.get(STREAM_CLOSED_HEADER)?.toLowerCase() === `true`;
return {
offset: offset ?? fallbackOffset,
cursor: cursor ?? fallbackCursor,
upToDate,
streamClosed: streamClosed || fallbackStreamClosed
};
}
/**
* Decode base64 string to Uint8Array.
* Per protocol: concatenate data lines, remove \n and \r, then decode.
*/
function decodeBase64(base64Str) {
const cleaned = base64Str.replace(/[\n\r]/g, ``);
if (cleaned.length === 0) return new Uint8Array(0);
if (cleaned.length % 4 !== 0) throw new DurableStreamError(`Invalid base64 data: length ${cleaned.length} is not a multiple of 4`, `PARSE_ERROR`);
try {
if (typeof Buffer !== `undefined`) return new Uint8Array(Buffer.from(cleaned, `base64`));
else {
const binaryStr = atob(cleaned);
const bytes = new Uint8Array(binaryStr.length);
for (let i = 0; i < binaryStr.length; i++) bytes[i] = binaryStr.charCodeAt(i);
return bytes;
}
} catch (err) {
throw new DurableStreamError(`Failed to decode base64 data: ${err instanceof Error ? err.message : String(err)}`, `PARSE_ERROR`);
}
}
/**
* Create a synthetic Response from SSE data with proper headers.
* Includes offset/cursor/upToDate/streamClosed in headers so subscribers can read them.
*/
function createSSESyntheticResponse(data, offset, cursor, upToDate, streamClosed, contentType, encoding) {
return createSSESyntheticResponseFromParts([data], offset, cursor, upToDate, streamClosed, contentType, encoding);
}
/**
* Create a synthetic Response from multiple SSE data parts.
* For base64 mode, each part is independently encoded, so we decode each
* separately and concatenate the binary results.
* For text mode, parts are simply concatenated as strings.
*/
function createSSESyntheticResponseFromParts(dataParts, offset, cursor, upToDate, streamClosed, contentType, encoding, isJsonMode) {
const headers = {
"content-type": contentType ?? `application/json`,
[STREAM_OFFSET_HEADER]: String(offset)
};
if (cursor) headers[STREAM_CURSOR_HEADER] = cursor;
if (upToDate) headers[STREAM_UP_TO_DATE_HEADER] = `true`;
if (streamClosed) headers[STREAM_CLOSED_HEADER] = `true`;
let body;
if (encoding === `base64`) {
const decodedParts = dataParts.filter((part) => part.length > 0).map((part) => decodeBase64(part));
if (decodedParts.length === 0) body = new ArrayBuffer(0);
else if (decodedParts.length === 1) {
const decoded = decodedParts[0];
body = decoded.buffer.slice(decoded.byteOffset, decoded.byteOffset + decoded.byteLength);
} else {
const totalLength = decodedParts.reduce((sum, part) => sum + part.length, 0);
const combined = new Uint8Array(totalLength);
let offset$1 = 0;
for (const part of decodedParts) {
combined.set(part, offset$1);
offset$1 += part.length;
}
body = combined.buffer;
}
} else if (isJsonMode) {
const mergedParts = [];
for (const part of dataParts) {
const trimmed = p