@electric-sql/client
Version:
Postgres everywhere - your data, in sync, wherever you need it.
1,458 lines (1,308 loc) • 75.2 kB
text/typescript
import {
Message,
Offset,
Schema,
Row,
MaybePromise,
GetExtensions,
ChangeMessage,
SnapshotMetadata,
SubsetParams,
} from './types'
import { MessageParser, Parser, TransformFunction } from './parser'
import {
ColumnMapper,
encodeWhereClause,
quoteIdentifier,
} from './column-mapper'
import {
getOffset,
isUpToDateMessage,
isChangeMessage,
bigintSafeStringify,
} from './helpers'
import {
FetchError,
FetchBackoffAbortError,
MissingShapeUrlError,
InvalidSignalError,
MissingShapeHandleError,
ReservedParamError,
MissingHeadersError,
StaleCacheError,
} from './error'
import {
BackoffDefaults,
BackoffOptions,
createFetchWithBackoff,
createFetchWithChunkBuffer,
createFetchWithConsumedMessages,
createFetchWithResponseHeadersCheck,
} from './fetch'
import {
CHUNK_LAST_OFFSET_HEADER,
LIVE_CACHE_BUSTER_HEADER,
LIVE_CACHE_BUSTER_QUERY_PARAM,
EXPIRED_HANDLE_QUERY_PARAM,
COLUMNS_QUERY_PARAM,
LIVE_QUERY_PARAM,
OFFSET_QUERY_PARAM,
SHAPE_HANDLE_HEADER,
SHAPE_HANDLE_QUERY_PARAM,
SHAPE_SCHEMA_HEADER,
WHERE_QUERY_PARAM,
WHERE_PARAMS_PARAM,
TABLE_QUERY_PARAM,
REPLICA_PARAM,
FORCE_DISCONNECT_AND_REFRESH,
PAUSE_STREAM,
SYSTEM_WAKE,
EXPERIMENTAL_LIVE_SSE_QUERY_PARAM,
LIVE_SSE_QUERY_PARAM,
ELECTRIC_PROTOCOL_QUERY_PARAMS,
LOG_MODE_QUERY_PARAM,
SUBSET_PARAM_WHERE,
SUBSET_PARAM_WHERE_PARAMS,
SUBSET_PARAM_LIMIT,
SUBSET_PARAM_OFFSET,
SUBSET_PARAM_ORDER_BY,
SUBSET_PARAM_WHERE_EXPR,
SUBSET_PARAM_ORDER_BY_EXPR,
CACHE_BUSTER_QUERY_PARAM,
} from './constants'
import { compileExpression, compileOrderBy } from './expression-compiler'
import {
EventSourceMessage,
fetchEventSource,
} from '@microsoft/fetch-event-source'
import { expiredShapesCache } from './expired-shapes-cache'
import { upToDateTracker } from './up-to-date-tracker'
import { SnapshotTracker } from './snapshot-tracker'
import {
createInitialState,
ErrorState,
PausedState,
ShapeStreamState,
} from './shape-stream-state'
import { PauseLock } from './pause-lock'
const RESERVED_PARAMS: Set<ReservedParamKeys> = new Set([
LIVE_CACHE_BUSTER_QUERY_PARAM,
SHAPE_HANDLE_QUERY_PARAM,
LIVE_QUERY_PARAM,
OFFSET_QUERY_PARAM,
CACHE_BUSTER_QUERY_PARAM,
])
const TROUBLESHOOTING_URL = `https://electric-sql.com/docs/guides/troubleshooting`
function createCacheBuster(): string {
return `${Date.now()}-${Math.random().toString(36).substring(2, 9)}`
}
type Replica = `full` | `default`
export type LogMode = `changes_only` | `full`
/**
* PostgreSQL-specific shape parameters that can be provided externally
*/
export interface PostgresParams<T extends Row<unknown> = Row> {
/** The root table for the shape. Not required if you set the table in your proxy. */
table?: string
/**
* The columns to include in the shape.
* Must include primary keys, and can only include valid columns.
* Defaults to all columns of the type `T`. If provided, must include primary keys, and can only include valid columns.
*/
columns?: (keyof T)[]
/** The where clauses for the shape */
where?: string
/**
* Positional where clause paramater values. These will be passed to the server
* and will substitute `$i` parameters in the where clause.
*
* It can be an array (note that positional arguments start at 1, the array will be mapped
* accordingly), or an object with keys matching the used positional parameters in the where clause.
*
* If where clause is `id = $1 or id = $2`, params must have keys `"1"` and `"2"`, or be an array with length 2.
*/
params?: Record<`${number}`, string> | string[]
/**
* If `replica` is `default` (the default) then Electric will only send the
* changed columns in an update.
*
* If it's `full` Electric will send the entire row with both changed and
* unchanged values. `old_value` will also be present on update messages,
* containing the previous value for changed columns.
*
* Setting `replica` to `full` will result in higher bandwidth
* usage and so is not generally recommended.
*/
replica?: Replica
}
type SerializableParamValue = string | string[] | Record<string, string>
type ParamValue =
| SerializableParamValue
| (() => SerializableParamValue | Promise<SerializableParamValue>)
/**
* External params type - what users provide.
* Excludes reserved parameters to prevent dynamic variations that could cause stream shape changes.
*/
export type ExternalParamsRecord<T extends Row<unknown> = Row> = {
[K in string]: ParamValue | undefined
} & Partial<PostgresParams<T>> & { [K in ReservedParamKeys]?: never }
type ReservedParamKeys =
| typeof LIVE_CACHE_BUSTER_QUERY_PARAM
| typeof SHAPE_HANDLE_QUERY_PARAM
| typeof LIVE_QUERY_PARAM
| typeof OFFSET_QUERY_PARAM
| typeof CACHE_BUSTER_QUERY_PARAM
| `subset__${string}`
/**
* External headers type - what users provide.
* Allows string or function values for any header.
*/
export type ExternalHeadersRecord = {
[key: string]: string | (() => string | Promise<string>)
}
/**
* Internal params type - used within the library.
* All values are converted to strings.
*/
type InternalParamsRecord = {
[K in string as K extends ReservedParamKeys ? never : K]:
| string
| Record<string, string>
}
/**
* Helper function to resolve a function or value to its final value
*/
export async function resolveValue<T>(
value: T | (() => T | Promise<T>)
): Promise<T> {
if (typeof value === `function`) {
return (value as () => T | Promise<T>)()
}
return value
}
/**
* Helper function to convert external params to internal format
*/
async function toInternalParams(
params: ExternalParamsRecord<Row>
): Promise<InternalParamsRecord> {
const entries = Object.entries(params)
const resolvedEntries = await Promise.all(
entries.map(async ([key, value]) => {
if (value === undefined) return [key, undefined]
const resolvedValue = await resolveValue(value)
return [
key,
Array.isArray(resolvedValue) ? resolvedValue.join(`,`) : resolvedValue,
]
})
)
return Object.fromEntries(
resolvedEntries.filter(([_, value]) => value !== undefined)
)
}
/**
* Helper function to resolve headers
*/
async function resolveHeaders(
headers?: ExternalHeadersRecord
): Promise<Record<string, string>> {
if (!headers) return {}
const entries = Object.entries(headers)
const resolvedEntries = await Promise.all(
entries.map(async ([key, value]) => [key, await resolveValue(value)])
)
return Object.fromEntries(resolvedEntries)
}
type RetryOpts = {
params?: ExternalParamsRecord
headers?: ExternalHeadersRecord
}
type ShapeStreamErrorHandler = (
error: Error
) => void | RetryOpts | Promise<void | RetryOpts>
/**
* Options for constructing a ShapeStream.
*/
export interface ShapeStreamOptions<T = never> {
/**
* The full URL to where the Shape is served. This can either be the Electric server
* directly or a proxy. E.g. for a local Electric instance, you might set `http://localhost:3000/v1/shape`
*/
url: string
/**
* The "offset" on the shape log. This is typically not set as the ShapeStream
* will handle this automatically. A common scenario where you might pass an offset
* is if you're maintaining a local cache of the log. If you've gone offline
* and are re-starting a ShapeStream to catch-up to the latest state of the Shape,
* you'd pass in the last offset and shapeHandle you'd seen from the Electric server
* so it knows at what point in the shape to catch you up from.
*/
offset?: Offset
/**
* Similar to `offset`, this isn't typically used unless you're maintaining
* a cache of the shape log.
*/
handle?: string
/**
* HTTP headers to attach to requests made by the client.
* Values can be strings or functions (sync or async) that return strings.
* Function values are resolved in parallel when needed, making this useful
* for authentication tokens or other dynamic headers.
*/
headers?: ExternalHeadersRecord
/**
* Additional request parameters to attach to the URL.
* Values can be strings, string arrays, or functions (sync or async) that return these types.
* Function values are resolved in parallel when needed, making this useful
* for user-specific parameters or dynamic filters.
*
* These will be merged with Electric's standard parameters.
* Note: You cannot use Electric's reserved parameter names
* (offset, handle, live, cursor).
*
* PostgreSQL-specific options like table, where, columns, and replica
* should be specified here.
*/
params?: ExternalParamsRecord
/**
* Automatically fetch updates to the Shape. If you just want to sync the current
* shape and stop, pass false.
*/
subscribe?: boolean
/**
* @deprecated No longer experimental, use {@link liveSse} instead.
*/
experimentalLiveSse?: boolean
/**
* Use Server-Sent Events (SSE) for live updates.
*/
liveSse?: boolean
/**
* Initial data loading mode
*/
log?: LogMode
signal?: AbortSignal
fetchClient?: typeof fetch
backoffOptions?: BackoffOptions
parser?: Parser<T>
/**
* Function to transform rows after parsing (e.g., for encryption, type coercion).
* Applied to data received from Electric.
*
* **Note**: If you're using `transformer` solely for column name transformation
* (e.g., snake_case → camelCase), consider using `columnMapper` instead, which
* provides bidirectional transformation and automatically encodes WHERE clauses.
*
* **Execution order** when both are provided:
* 1. `columnMapper.decode` runs first (renames columns)
* 2. `transformer` runs second (transforms values)
*
* @example
* ```typescript
* // For column renaming only - use columnMapper
* import { snakeCamelMapper } from '@electric-sql/client'
* const stream = new ShapeStream({ columnMapper: snakeCamelMapper() })
* ```
*
* @example
* ```typescript
* // For value transformation (encryption, etc.) - use transformer
* const stream = new ShapeStream({
* transformer: (row) => ({
* ...row,
* encrypted_field: decrypt(row.encrypted_field)
* })
* })
* ```
*
* @example
* ```typescript
* // Use both together
* const stream = new ShapeStream({
* columnMapper: snakeCamelMapper(), // Runs first: renames columns
* transformer: (row) => ({ // Runs second: transforms values
* ...row,
* encryptedData: decrypt(row.encryptedData)
* })
* })
* ```
*/
transformer?: TransformFunction<T>
/**
* Bidirectional column name mapper for transforming between database column names
* (e.g., snake_case) and application column names (e.g., camelCase).
*
* The mapper handles both:
* - **Decoding**: Database → Application (applied to query results)
* - **Encoding**: Application → Database (applied to WHERE clauses)
*
* @example
* ```typescript
* // Most common case: snake_case ↔ camelCase
* import { snakeCamelMapper } from '@electric-sql/client'
*
* const stream = new ShapeStream({
* url: 'http://localhost:3000/v1/shape',
* params: { table: 'todos' },
* columnMapper: snakeCamelMapper()
* })
* ```
*
* @example
* ```typescript
* // Custom mapping
* import { createColumnMapper } from '@electric-sql/client'
*
* const stream = new ShapeStream({
* columnMapper: createColumnMapper({
* user_id: 'userId',
* project_id: 'projectId',
* created_at: 'createdAt'
* })
* })
* ```
*/
columnMapper?: ColumnMapper
/**
* A function for handling shapestream errors.
*
* **Automatic retries**: The client automatically retries 5xx server errors, network
* errors, and 429 rate limits with exponential backoff. The `onError` callback is
* only invoked after these automatic retries are exhausted, or for non-retryable
* errors like 4xx client errors.
*
* When not provided, non-retryable errors will be thrown and syncing will stop.
*
* **Return value behavior**:
* - Return an **object** (RetryOpts or empty `{}`) to retry syncing:
* - `{}` - Retry with the same params and headers
* - `{ params }` - Retry with modified params
* - `{ headers }` - Retry with modified headers (e.g., refreshed auth token)
* - `{ params, headers }` - Retry with both modified
* - Return **void** or **undefined** to stop the stream permanently
*
* **Important**: If you want syncing to continue after an error (e.g., to retry
* on network failures), you MUST return at least an empty object `{}`. Simply
* logging the error and returning nothing will stop syncing.
*
* Supports async functions that return `Promise<void | RetryOpts>`.
*
* @example
* ```typescript
* // Retry on network errors, stop on others
* onError: (error) => {
* console.error('Stream error:', error)
* if (error instanceof FetchError && error.status >= 500) {
* return {} // Retry with same params
* }
* // Return void to stop on other errors
* }
* ```
*
* @example
* ```typescript
* // Refresh auth token on 401
* onError: async (error) => {
* if (error instanceof FetchError && error.status === 401) {
* const newToken = await refreshAuthToken()
* return { headers: { Authorization: `Bearer ${newToken}` } }
* }
* return {} // Retry other errors
* }
* ```
*/
onError?: ShapeStreamErrorHandler
/**
* HTTP method to use for subset snapshot requests (`requestSnapshot`/`fetchSnapshot`).
*
* - `'GET'` (default): Sends subset params as URL query parameters. May fail with
* HTTP 414 errors for large queries with many parameters.
* - `'POST'`: Sends subset params in request body as JSON. Recommended for queries
* with large parameter lists (e.g., `WHERE id = ANY($1)` with hundreds of IDs).
*
* This can be overridden per-request by passing `method` in the subset params.
*
* @example
* ```typescript
* const stream = new ShapeStream({
* url: 'http://localhost:3000/v1/shape',
* params: { table: 'items' },
* subsetMethod: 'POST', // Use POST for all subset requests
* })
* ```
*/
subsetMethod?: `GET` | `POST`
}
export interface ShapeStreamInterface<T extends Row<unknown> = Row> {
subscribe(
callback: (
messages: Message<T>[]
) => MaybePromise<void> | { columns?: (keyof T)[] },
onError?: (error: FetchError | Error) => void
): () => void
unsubscribeAll(): void
isLoading(): boolean
lastSyncedAt(): number | undefined
lastSynced(): number
isConnected(): boolean
hasStarted(): boolean
isUpToDate: boolean
lastOffset: Offset
shapeHandle?: string
error?: unknown
mode: LogMode
forceDisconnectAndRefresh(): Promise<void>
requestSnapshot(params: SubsetParams): Promise<{
metadata: SnapshotMetadata
data: Array<Message<T>>
}>
fetchSnapshot(opts: SubsetParams): Promise<{
metadata: SnapshotMetadata
data: Array<ChangeMessage<T>>
}>
}
/**
* Creates a canonical shape key from a URL excluding only Electric protocol parameters
*/
export function canonicalShapeKey(url: URL): string {
const cleanUrl = new URL(url.origin + url.pathname)
// Copy all params except Electric protocol ones that vary between requests.
// Use append() so duplicate keys (e.g. ?table=a&table=b) are preserved.
for (const [key, value] of url.searchParams) {
if (!ELECTRIC_PROTOCOL_QUERY_PARAMS.includes(key)) {
cleanUrl.searchParams.append(key, value)
}
}
cleanUrl.searchParams.sort()
return cleanUrl.toString()
}
/**
* Reads updates to a shape from Electric using HTTP requests and long polling or
* Server-Sent Events (SSE).
* Notifies subscribers when new messages come in. Doesn't maintain any history of the
* log but does keep track of the offset position and is the best way
* to consume the HTTP `GET /v1/shape` api.
*
* @constructor
* @param {ShapeStreamOptions} options - configure the shape stream
* @example
* Register a callback function to subscribe to the messages.
* ```
* const stream = new ShapeStream(options)
* stream.subscribe(messages => {
* // messages is 1 or more row updates
* })
* ```
*
* To use Server-Sent Events (SSE) for real-time updates:
* ```
* const stream = new ShapeStream({
* url: `http://localhost:3000/v1/shape`,
* liveSse: true
* })
* ```
*
* To abort the stream, abort the `signal`
* passed in via the `ShapeStreamOptions`.
* ```
* const aborter = new AbortController()
* const issueStream = new ShapeStream({
* url: `${BASE_URL}/${table}`
* subscribe: true,
* signal: aborter.signal,
* })
* // Later...
* aborter.abort()
* ```
*/
export class ShapeStream<T extends Row<unknown> = Row>
implements ShapeStreamInterface<T>
{
static readonly Replica = {
FULL: `full` as Replica,
DEFAULT: `default` as Replica,
}
readonly options: ShapeStreamOptions<GetExtensions<T>>
#error: unknown = null
readonly #fetchClient: typeof fetch
readonly #sseFetchClient: typeof fetch
readonly #messageParser: MessageParser<T>
readonly #subscribers = new Map<
object,
[
(messages: Message<T>[]) => MaybePromise<void>,
((error: Error) => void) | undefined,
]
>()
#started = false
#syncState: ShapeStreamState
#connected: boolean = false
#mode: LogMode
#onError?: ShapeStreamErrorHandler
#requestAbortController?: AbortController
#refreshCount = 0
#snapshotCounter = 0
get #isRefreshing(): boolean {
return this.#refreshCount > 0
}
#tickPromise?: Promise<void>
#tickPromiseResolver?: () => void
#tickPromiseRejecter?: (reason?: unknown) => void
#messageChain = Promise.resolve<void[]>([]) // promise chain for incoming messages
#snapshotTracker = new SnapshotTracker()
#pauseLock: PauseLock
#currentFetchUrl?: URL // Current fetch URL for computing shape key
#lastSseConnectionStartTime?: number
#minSseConnectionDuration = 1000 // Minimum expected SSE connection duration (1 second)
#maxShortSseConnections = 3 // Fall back to long polling after this many short connections
#sseBackoffBaseDelay = 100 // Base delay for exponential backoff (ms)
#sseBackoffMaxDelay = 5000 // Maximum delay cap (ms)
#unsubscribeFromVisibilityChanges?: () => void
#unsubscribeFromWakeDetection?: () => void
#maxStaleCacheRetries = 3
// Fast-loop detection: track recent non-live requests to detect tight retry
// loops caused by proxy/CDN misconfiguration or stale client-side caches
#recentRequestEntries: Array<{ timestamp: number; offset: string }> = []
#fastLoopWindowMs = 500
#fastLoopThreshold = 5
#fastLoopBackoffBaseMs = 100
#fastLoopBackoffMaxMs = 5_000
#fastLoopConsecutiveCount = 0
#fastLoopMaxCount = 5
#pendingRequestShapeCacheBuster?: string
#maxSnapshotRetries = 5
#expiredShapeRecoveryKey: string | null = null
#pendingSelfHealCheck: { shapeKey: string; staleHandle: string } | null = null
#consecutiveErrorRetries = 0
#maxConsecutiveErrorRetries = 50
constructor(options: ShapeStreamOptions<GetExtensions<T>>) {
this.options = { subscribe: true, ...options }
validateOptions(this.options)
this.#syncState = createInitialState({
offset: this.options.offset ?? `-1`,
handle: this.options.handle,
})
this.#pauseLock = new PauseLock({
onAcquired: () => {
this.#syncState = this.#syncState.pause()
if (this.#started) {
this.#requestAbortController?.abort(PAUSE_STREAM)
}
},
onReleased: () => {
if (!this.#started) return
if (this.options.signal?.aborted) return
// Don't transition syncState here — let #requestShape handle
// the PausedState→previous transition so it can detect
// resumingFromPause and avoid live long-polling.
this.#start().catch(() => {
// Errors from #start are handled internally via onError.
// This catch prevents unhandled promise rejection in Node/Bun.
})
},
})
// Build transformer chain: columnMapper.decode -> transformer
// columnMapper transforms column names, transformer transforms values
let transformer: TransformFunction<GetExtensions<T>> | undefined
if (options.columnMapper) {
const applyColumnMapper = (
row: Row<GetExtensions<T>>
): Row<GetExtensions<T>> => {
const result: Record<string, unknown> = {}
for (const [dbKey, value] of Object.entries(row)) {
const appKey = options.columnMapper!.decode(dbKey)
result[appKey] = value
}
return result as Row<GetExtensions<T>>
}
transformer = options.transformer
? (row: Row<GetExtensions<T>>) =>
options.transformer!(applyColumnMapper(row))
: applyColumnMapper
} else {
transformer = options.transformer
}
this.#messageParser = new MessageParser<T>(options.parser, transformer)
this.#onError = this.options.onError
this.#mode = this.options.log ?? `full`
const baseFetchClient =
options.fetchClient ??
((...args: Parameters<typeof fetch>) => fetch(...args))
const backOffOpts = {
...(options.backoffOptions ?? BackoffDefaults),
onFailedAttempt: () => {
this.#connected = false
options.backoffOptions?.onFailedAttempt?.()
},
}
const fetchWithBackoffClient = createFetchWithBackoff(
baseFetchClient,
backOffOpts
)
this.#sseFetchClient = createFetchWithResponseHeadersCheck(
createFetchWithChunkBuffer(fetchWithBackoffClient)
)
this.#fetchClient = createFetchWithConsumedMessages(this.#sseFetchClient)
this.#subscribeToVisibilityChanges()
}
get shapeHandle() {
return this.#syncState.handle
}
get error() {
return this.#error
}
get isUpToDate() {
return this.#syncState.isUpToDate
}
get lastOffset() {
return this.#syncState.offset
}
get mode() {
return this.#mode
}
async #start(): Promise<void> {
this.#started = true
this.#subscribeToWakeDetection()
try {
await this.#requestShape()
} catch (err) {
this.#error = err
if (err instanceof Error) {
this.#syncState = this.#syncState.toErrorState(err)
}
// Check if onError handler wants to retry
if (this.#onError) {
const retryOpts = await this.#onError(err as Error)
// Guard against null (typeof null === "object" in JavaScript)
const isRetryable = !(err instanceof MissingHeadersError)
if (retryOpts && typeof retryOpts === `object` && isRetryable) {
// Update params/headers but don't reset offset
// We want to continue from where we left off, not refetch everything
if (retryOpts.params) {
// Merge new params with existing params to preserve other parameters
this.options.params = {
...(this.options.params ?? {}),
...retryOpts.params,
}
}
if (retryOpts.headers) {
// Merge new headers with existing headers to preserve other headers
this.options.headers = {
...(this.options.headers ?? {}),
...retryOpts.headers,
}
}
// Bound the onError retry loop to prevent unbounded retries
this.#consecutiveErrorRetries++
if (
this.#consecutiveErrorRetries > this.#maxConsecutiveErrorRetries
) {
console.warn(
`[Electric] onError retry loop exhausted after ${this.#maxConsecutiveErrorRetries} consecutive retries. ` +
`The error was never resolved by the onError handler. ` +
`Error: ${err instanceof Error ? err.message : String(err)}`,
new Error(`stack trace`)
)
if (err instanceof Error) {
this.#sendErrorToSubscribers(err)
}
this.#teardown()
return
}
// Clear the error since we're retrying
this.#error = null
if (this.#syncState instanceof ErrorState) {
this.#syncState = this.#syncState.retry()
}
this.#fastLoopConsecutiveCount = 0
this.#recentRequestEntries = []
// Restart from current offset
this.#started = false
return this.#start()
}
// onError returned void, meaning it doesn't want to retry
// This is an unrecoverable error, notify subscribers
if (err instanceof Error) {
this.#sendErrorToSubscribers(err)
}
this.#teardown()
return
}
// No onError handler provided, this is an unrecoverable error
// Notify subscribers and throw
if (err instanceof Error) {
this.#sendErrorToSubscribers(err)
}
this.#teardown()
throw err
}
this.#teardown()
}
#teardown() {
this.#connected = false
this.#tickPromiseRejecter?.()
this.#unsubscribeFromWakeDetection?.()
}
async #requestShape(requestShapeCacheBuster?: string): Promise<void> {
// ErrorState should never reach the request loop — re-throw so
// #start's catch block can route it through onError properly.
if (this.#syncState instanceof ErrorState) {
throw this.#syncState.error
}
const activeCacheBuster =
requestShapeCacheBuster ?? this.#pendingRequestShapeCacheBuster
if (this.#pauseLock.isPaused) {
if (activeCacheBuster) {
this.#pendingRequestShapeCacheBuster = activeCacheBuster
}
return
}
if (
!this.options.subscribe &&
(this.options.signal?.aborted || this.#syncState.isUpToDate)
) {
return
}
// Only check for fast loops on non-live requests; live polling is expected to be rapid
if (!this.#syncState.isUpToDate) {
await this.#checkFastLoop()
} else {
this.#fastLoopConsecutiveCount = 0
this.#recentRequestEntries = []
}
let resumingFromPause = false
if (this.#syncState instanceof PausedState) {
resumingFromPause = true
this.#syncState = this.#syncState.resume()
}
const { url, signal } = this.options
const { fetchUrl, requestHeaders } = await this.#constructUrl(
url,
resumingFromPause
)
if (activeCacheBuster) {
fetchUrl.searchParams.set(CACHE_BUSTER_QUERY_PARAM, activeCacheBuster)
fetchUrl.searchParams.sort()
}
const abortListener = await this.#createAbortListener(signal)
const requestAbortController = this.#requestAbortController! // we know that it is not undefined because it is set by `this.#createAbortListener`
// Re-check after async setup — the lock may have been acquired
// during URL construction or abort controller creation (e.g., by
// requestSnapshot), when the abort controller didn't exist yet.
if (this.#pauseLock.isPaused) {
if (abortListener && signal) {
signal.removeEventListener(`abort`, abortListener)
}
if (activeCacheBuster) {
this.#pendingRequestShapeCacheBuster = activeCacheBuster
}
this.#requestAbortController = undefined
return
}
this.#pendingRequestShapeCacheBuster = undefined
try {
await this.#fetchShape({
fetchUrl,
requestAbortController,
headers: requestHeaders,
resumingFromPause,
})
} catch (e) {
const abortReason = requestAbortController.signal.reason
const isRestartAbort =
requestAbortController.signal.aborted &&
(abortReason === FORCE_DISCONNECT_AND_REFRESH ||
abortReason === SYSTEM_WAKE)
if (
(e instanceof FetchError || e instanceof FetchBackoffAbortError) &&
isRestartAbort
) {
return this.#requestShape()
}
if (e instanceof FetchBackoffAbortError) {
return // interrupted
}
if (e instanceof StaleCacheError) {
// Two paths throw StaleCacheError:
// 1. Normal stale-retry: response handle matched expired handle,
// #staleCacheBuster set to bypass CDN cache on next request.
// 2. Self-healing: stale retries exhausted, expired entry cleared,
// stream reset — retry without expired_handle param.
return this.#requestShape()
}
if (!(e instanceof FetchError)) throw e // should never happen
if (e.status == 409) {
// Upon receiving a 409, start from scratch with the newly
// provided shape handle (if present). An unconditional cache
// buster ensures the retry URL is always unique regardless of
// whether the server returns a new, same, or missing handle.
// Store the current shape URL as expired to avoid future 409s
if (this.#syncState.handle) {
const shapeKey = canonicalShapeKey(fetchUrl)
expiredShapesCache.markExpired(shapeKey, this.#syncState.handle)
}
const newShapeHandle = e.headers[SHAPE_HANDLE_HEADER]
if (!newShapeHandle) {
console.warn(
`[Electric] Received 409 response without a shape handle header. ` +
`This likely indicates a proxy or CDN stripping required headers.`,
new Error(`stack trace`)
)
}
const nextRequestShapeCacheBuster = createCacheBuster()
this.#reset(newShapeHandle)
// Notify subscribers that data must be re-fetched so they can
// clear accumulated state (e.g., Shape clears its row map).
// We publish a synthetic control message rather than the raw 409
// body to avoid delivering stale data rows to subscribers.
await this.#publish([{ headers: { control: `must-refetch` } }])
return this.#requestShape(nextRequestShapeCacheBuster)
} else {
// errors that have reached this point are not actionable without
// additional user input, such as 400s or failures to read the
// body of a response, so we exit the loop and let #start handle it
// Note: We don't notify subscribers here because onError might recover
throw e
}
} finally {
if (abortListener && signal) {
signal.removeEventListener(`abort`, abortListener)
}
this.#requestAbortController = undefined
}
this.#tickPromiseResolver?.()
return this.#requestShape()
}
/**
* Detects tight retry loops (e.g., from stale client-side caches or
* proxy/CDN misconfiguration) and attempts recovery. On first detection,
* clears client-side caches (in-memory and localStorage) and resets the
* stream to fetch from scratch.
* If the loop persists, applies exponential backoff and eventually throws.
*/
async #checkFastLoop(): Promise<void> {
const now = Date.now()
const currentOffset = this.#syncState.offset
this.#recentRequestEntries = this.#recentRequestEntries.filter(
(e) => now - e.timestamp < this.#fastLoopWindowMs
)
this.#recentRequestEntries.push({ timestamp: now, offset: currentOffset })
// Only flag as a fast loop if requests are stuck at the same offset.
// Normal rapid syncing advances the offset with each response.
const sameOffsetCount = this.#recentRequestEntries.filter(
(e) => e.offset === currentOffset
).length
if (sameOffsetCount < this.#fastLoopThreshold) return
this.#fastLoopConsecutiveCount++
if (this.#fastLoopConsecutiveCount >= this.#fastLoopMaxCount) {
throw new FetchError(
502,
undefined,
undefined,
{},
this.options.url,
`Client is stuck in a fast retry loop ` +
`(${this.#fastLoopThreshold} requests in ${this.#fastLoopWindowMs}ms at the same offset, ` +
`repeated ${this.#fastLoopMaxCount} times). ` +
`Client-side caches were cleared automatically on first detection, but the loop persists. ` +
`This usually indicates a proxy or CDN misconfiguration. ` +
`Common causes:\n` +
` - Proxy is not including query parameters (handle, offset) in its cache key\n` +
` - CDN is serving stale 409 responses\n` +
` - Proxy is stripping required Electric headers from responses\n` +
`For more information visit the troubleshooting guide: ${TROUBLESHOOTING_URL}`
)
}
if (this.#fastLoopConsecutiveCount === 1) {
console.warn(
`[Electric] Detected fast retry loop ` +
`(${this.#fastLoopThreshold} requests in ${this.#fastLoopWindowMs}ms at the same offset). ` +
`Clearing client-side caches and resetting stream to recover. ` +
`If this persists, check that your proxy includes all query parameters ` +
`(especially 'handle' and 'offset') in its cache key, ` +
`and that required Electric headers are forwarded to the client. ` +
`For more information visit the troubleshooting guide: ${TROUBLESHOOTING_URL}`,
new Error(`stack trace`)
)
if (this.#currentFetchUrl) {
const shapeKey = canonicalShapeKey(this.#currentFetchUrl)
expiredShapesCache.delete(shapeKey)
upToDateTracker.delete(shapeKey)
} else {
expiredShapesCache.clear()
upToDateTracker.clear()
}
this.#reset()
this.#recentRequestEntries = []
return
}
// Exponential backoff with full jitter
const maxDelay = Math.min(
this.#fastLoopBackoffMaxMs,
this.#fastLoopBackoffBaseMs * Math.pow(2, this.#fastLoopConsecutiveCount)
)
const delayMs = Math.floor(Math.random() * maxDelay)
await new Promise((resolve) => setTimeout(resolve, delayMs))
this.#recentRequestEntries = []
}
async #constructUrl(
url: string,
resumingFromPause: boolean,
subsetParams?: SubsetParams
) {
// Resolve headers and params in parallel
const [requestHeaders, params] = await Promise.all([
resolveHeaders(this.options.headers),
this.options.params
? toInternalParams(convertWhereParamsToObj(this.options.params))
: undefined,
])
// Validate params after resolution
if (params) validateParams(params)
const fetchUrl = new URL(url)
// Add PostgreSQL-specific parameters
if (params) {
if (params.table) setQueryParam(fetchUrl, TABLE_QUERY_PARAM, params.table)
if (params.where && typeof params.where === `string`) {
const encodedWhere = encodeWhereClause(
params.where,
this.options.columnMapper?.encode
)
setQueryParam(fetchUrl, WHERE_QUERY_PARAM, encodedWhere)
}
if (params.columns) {
// Get original columns array from options (before toInternalParams converted to string)
const originalColumns = await resolveValue(this.options.params?.columns)
if (Array.isArray(originalColumns)) {
// Apply columnMapper encoding if present
let encodedColumns = originalColumns.map(String)
if (this.options.columnMapper) {
encodedColumns = encodedColumns.map(
this.options.columnMapper.encode
)
}
// Quote each column name to handle special characters (commas, etc.)
const serializedColumns = encodedColumns
.map(quoteIdentifier)
.join(`,`)
setQueryParam(fetchUrl, COLUMNS_QUERY_PARAM, serializedColumns)
} else {
// Fallback: columns was already a string
setQueryParam(fetchUrl, COLUMNS_QUERY_PARAM, params.columns)
}
}
if (params.replica) setQueryParam(fetchUrl, REPLICA_PARAM, params.replica)
if (params.params)
setQueryParam(fetchUrl, WHERE_PARAMS_PARAM, params.params)
// Add any remaining custom parameters
const customParams = { ...params }
delete customParams.table
delete customParams.where
delete customParams.columns
delete customParams.replica
delete customParams.params
for (const [key, value] of Object.entries(customParams)) {
setQueryParam(fetchUrl, key, value)
}
}
if (subsetParams) {
// Prefer structured expressions when available (allows proper columnMapper application)
// Fall back to legacy string format for backwards compatibility
if (subsetParams.whereExpr) {
// Compile structured expression with columnMapper applied
const compiledWhere = compileExpression(
subsetParams.whereExpr,
this.options.columnMapper?.encode
)
setQueryParam(fetchUrl, SUBSET_PARAM_WHERE, compiledWhere)
// Also send the structured expression for servers that support it
fetchUrl.searchParams.set(
SUBSET_PARAM_WHERE_EXPR,
JSON.stringify(subsetParams.whereExpr)
)
} else if (subsetParams.where && typeof subsetParams.where === `string`) {
// Legacy string format (no columnMapper applied to already-compiled SQL)
const encodedWhere = encodeWhereClause(
subsetParams.where,
this.options.columnMapper?.encode
)
setQueryParam(fetchUrl, SUBSET_PARAM_WHERE, encodedWhere)
}
if (subsetParams.params)
// Serialize params as JSON to keep the parameter name constant for proxy configs
fetchUrl.searchParams.set(
SUBSET_PARAM_WHERE_PARAMS,
bigintSafeStringify(subsetParams.params)
)
if (subsetParams.limit !== undefined)
setQueryParam(fetchUrl, SUBSET_PARAM_LIMIT, subsetParams.limit)
if (subsetParams.offset !== undefined)
setQueryParam(fetchUrl, SUBSET_PARAM_OFFSET, subsetParams.offset)
// Prefer structured ORDER BY expressions when available
if (subsetParams.orderByExpr) {
// Compile structured ORDER BY with columnMapper applied
const compiledOrderBy = compileOrderBy(
subsetParams.orderByExpr,
this.options.columnMapper?.encode
)
setQueryParam(fetchUrl, SUBSET_PARAM_ORDER_BY, compiledOrderBy)
// Also send the structured expression for servers that support it
fetchUrl.searchParams.set(
SUBSET_PARAM_ORDER_BY_EXPR,
JSON.stringify(subsetParams.orderByExpr)
)
} else if (
subsetParams.orderBy &&
typeof subsetParams.orderBy === `string`
) {
// Legacy string format
const encodedOrderBy = encodeWhereClause(
subsetParams.orderBy,
this.options.columnMapper?.encode
)
setQueryParam(fetchUrl, SUBSET_PARAM_ORDER_BY, encodedOrderBy)
}
}
// Add state-specific parameters (offset, handle, live cache busters, etc.)
this.#syncState.applyUrlParams(fetchUrl, {
isSnapshotRequest: subsetParams !== undefined,
// Don't long-poll when resuming from pause or refreshing — avoids
// a 20s hold during which `isConnected` would be false
canLongPoll: !this.#isRefreshing && !resumingFromPause,
})
fetchUrl.searchParams.set(LOG_MODE_QUERY_PARAM, this.#mode)
// Add cache buster for shapes known to be expired to prevent 409s
const shapeKey = canonicalShapeKey(fetchUrl)
const expiredHandle = expiredShapesCache.getExpiredHandle(shapeKey)
if (expiredHandle) {
fetchUrl.searchParams.set(EXPIRED_HANDLE_QUERY_PARAM, expiredHandle)
}
// sort query params in-place for stable URLs and improved cache hits
fetchUrl.searchParams.sort()
return {
fetchUrl,
requestHeaders,
}
}
async #createAbortListener(signal?: AbortSignal) {
// Create a new AbortController for this request
this.#requestAbortController = new AbortController()
// If user provided a signal, listen to it and pass on the reason for the abort
if (signal) {
const abortListener = () => {
this.#requestAbortController?.abort(signal.reason)
}
signal.addEventListener(`abort`, abortListener, { once: true })
if (signal.aborted) {
// If the signal is already aborted, abort the request immediately
this.#requestAbortController?.abort(signal.reason)
}
return abortListener
}
}
/**
* Processes response metadata (headers, status) and updates sync state.
* Returns `true` if the response body should be processed by the caller,
* or `false` if the response was ignored (stale) and the body should be skipped.
* Throws on stale-retry (to trigger a retry with cache buster).
*/
async #onInitialResponse(response: Response): Promise<boolean> {
const { headers, status } = response
const shapeHandle = headers.get(SHAPE_HANDLE_HEADER)
const shapeKey = this.#currentFetchUrl
? canonicalShapeKey(this.#currentFetchUrl)
: null
const expiredHandle = shapeKey
? expiredShapesCache.getExpiredHandle(shapeKey)
: null
// If this response is the first one after a self-healing retry, check
// whether the proxy/CDN returned the exact handle we just marked expired.
// If so, the client is about to accept stale data silently — loudly warn
// so operators can detect and fix the proxy misconfiguration.
if (this.#pendingSelfHealCheck) {
const { shapeKey: healedKey, staleHandle } = this.#pendingSelfHealCheck
this.#pendingSelfHealCheck = null
if (shapeKey === healedKey && shapeHandle === staleHandle) {
console.warn(
`[Electric] Self-healing retry received the same handle "${staleHandle}" that was just marked expired. ` +
`This means your proxy/CDN is serving a stale cached response and ignoring cache-buster query params. ` +
`The client will proceed with this stale data to avoid a permanent failure, but it may be out of date until the cache refreshes. ` +
`Fix: configure your proxy/CDN to include all query parameters (especially 'handle' and 'offset') in its cache key. ` +
`For more information visit the troubleshooting guide: ${TROUBLESHOOTING_URL}`,
new Error(`stack trace`)
)
}
}
const transition = this.#syncState.handleResponseMetadata({
status,
responseHandle: shapeHandle,
responseOffset: headers.get(CHUNK_LAST_OFFSET_HEADER) as Offset | null,
responseCursor: headers.get(LIVE_CACHE_BUSTER_HEADER),
responseSchema: getSchemaFromHeaders(headers),
expiredHandle,
now: Date.now(),
maxStaleCacheRetries: this.#maxStaleCacheRetries,
createCacheBuster,
})
this.#syncState = transition.state
// Clear recovery guard on 204 (no-content), since the empty body means
// #onMessages won't run to clear it via the up-to-date path.
if (status === 204) {
this.#expiredShapeRecoveryKey = null
}
if (transition.action === `accepted` && status === 204) {
this.#consecutiveErrorRetries = 0
}
if (transition.action === `stale-retry`) {
// Cancel the response body to release the connection before retrying.
await response.body?.cancel()
if (transition.exceededMaxRetries) {
if (shapeKey) {
// Clear the expired entry — keeping it only poisons future sessions.
expiredShapesCache.delete(shapeKey)
// Try one self-healing retry per shape: reset the stream and
// retry without the expired_handle param. Since handles are never
// reused (see SPEC.md S0), the fresh response will have a new
// handle and won't trigger stale detection.
if (this.#expiredShapeRecoveryKey !== shapeKey) {
console.warn(
`[Electric] Stale cache retries exhausted (${this.#maxStaleCacheRetries} attempts). ` +
`Clearing expired handle entry and attempting self-healing retry without the expired_handle parameter. ` +
`For more information visit the troubleshooting guide: ${TROUBLESHOOTING_URL}`,
new Error(`stack trace`)
)
this.#expiredShapeRecoveryKey = shapeKey
// Arm a post-self-heal check: if the next response comes back
// with the same handle we just marked expired, the proxy/CDN is
// still serving stale data and we'll warn loudly instead of
// accepting it silently.
if (shapeHandle) {
this.#pendingSelfHealCheck = {
shapeKey,
staleHandle: shapeHandle,
}
}
this.#reset()
throw new StaleCacheError(
`Expired handle entry evicted for self-healing retry`
)
}
}
throw new FetchError(
502,
undefined,
undefined,
{},
this.#currentFetchUrl?.toString() ?? ``,
`CDN continues serving stale cached responses after ${this.#maxStaleCacheRetries} retry attempts. ` +
`This indicates a severe proxy/CDN misconfiguration. ` +
`Check that your proxy includes all query parameters (especially 'handle' and 'offset') in its cache key. ` +
`For more information visit the troubleshooting guide: ${TROUBLESHOOTING_URL}`
)
}
console.warn(
`[Electric] Received stale cached response with expired shape handle. ` +
`This should not happen and indicates a proxy/CDN caching misconfiguration. ` +
`The response contained handle "${shapeHandle}" which was previously marked as expired. ` +
`Check that your proxy includes all query parameters (especially 'handle' and 'offset') in its cache key. ` +
`For more information visit the troubleshooting guide: ${TROUBLESHOOTING_URL} ` +
`Retrying with a random cache buster to bypass the stale cache (attempt ${this.#syncState.staleCacheRetryCount}/${this.#maxStaleCacheRetries}).`,
new Error(`stack trace`)
)
throw new StaleCacheError(
`Received stale cached response with expired handle "${shapeHandle}". ` +
`This indicates a proxy/CDN caching misconfiguration. ` +
`Check that your proxy includes all query parameters (especially 'handle' and 'offset') in its cache key.`
)
}
if (transition.action === `ignored`) {
console.warn(
`[Electric] Response was ignored by state "${this.#syncState.kind}". ` +
`The response body will be skipped. ` +
`This may indicate a proxy/CDN caching issue or a client state machine bug.`,
new Error(`stack trace`)
)
return false
}
return true
}
async #onMessages(batch: Array<Message<T>>, isSseMessage = false) {
if (!Array.isArray(batch)) {
console.warn(
`[Electric] #onMessages called with non-array argument (${typeof batch}). ` +
`This is a client bug — please report it.`,
new Error(`stack trace`)
)
return
}
if (batch.length === 0) return
this.#consecutiveErrorRetries = 0
const lastMessage = batch[batch.length - 1]
const hasUpToDateMessage = isUpToDateMessage(lastMessage)
const upToDateOffset = hasUpToDateMessage
? getOffset(lastMessage)
: undefined
const transition = this.#syncState.handleMessageBatch({
hasMessages: true,
hasUpToDateMessage,
isSse: isSseMessage,
upToDateOffset,
now: Date.now(),
currentCursor: this.#syncState.liveCacheBuster,
})
this.#syncState = transition.state
if (hasUpToDateMessage) {
if (transition.suppressBatch) {
return
}
if (this.#currentFetchUrl) {
const shapeKey = canonicalShapeKey(this.#currentFetchUrl)
upToDateTracker.recordUpToDate(
shapeKey,
this.#syncState.liveCacheBuster
)
this.#expiredShapeRecoveryKey = null
}
}
// Filter messages using snapshot tracker
const messagesToProcess = batch.filter((message) => {
if (isChangeMessage(message)) {
return !this.#snapshotTracker.shouldRejectMessage(message)
}
return true // Always process control messages
})
await this.#publish(messagesToProcess)
}
/**
* Fetches the shape from the server using either long polling or SSE.
* Upon receiving a successful response, the #onInitialResponse method is called.
* Afterwards, the #onMessages method is called for all the incoming updates.
* @param opts - The options for the request.
* @returns A promise that resolves when the request is complete (i.e. the long poll receives a response or the SSE connection is closed).
*/
async #fetchShape(opts: {
fetchUrl: URL
requestAbortController: AbortController
headers: Record<string, string>
resumingFromPause?: boolean
}): Promise<void> {
// Store current fetch URL for shape key computation
this.#currentFetchUrl = opts.fetchUrl
// Check if we should enter replay mode (replaying cached responses)
// This happens when we're starting fresh (offset=-1 or before first up-to-date)
// and there's a recent up-to-date in localStorage (< 60s)
if (!this.#syncState.isUpToDate && this.#syncState.canEnterReplayMode()) {
const shapeKey = canonicalShapeKey(opts.fetchUrl)
const lastSeenCursor = upToDateTracker.shouldEnterReplayMode(shapeKey)
if (lastSeenCursor) {
// Enter replay mode and store the last seen cursor
this.#syncState = this.#syncState.enterReplayMode(lastSeenCursor)
}
}
const useSse = this.options.liveSse ?? this.options.experimentalLiveSse
if (
this.#syncState.shouldUseSse({
liveSseEnabled: !!useSse,
isRefreshing: this.#isRefreshing,
resumi