@tanstack/electric-db-collection
Version:
ElectricSQL collection for TanStack DB
765 lines (764 loc) • 26.1 kB
JavaScript
;
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
const client = require("@electric-sql/client");
const store = require("@tanstack/store");
const DebugModule = require("debug");
const db = require("@tanstack/db");
const errors = require("./errors.cjs");
const sqlCompiler = require("./sql-compiler.cjs");
const tagIndex = require("./tag-index.cjs");
const debug = DebugModule.debug(`ts/db:electric`);
const ELECTRIC_TEST_HOOKS = Symbol(`electricTestHooks`);
function isUpToDateMessage(message) {
return client.isControlMessage(message) && message.headers.control === `up-to-date`;
}
function isMustRefetchMessage(message) {
return client.isControlMessage(message) && message.headers.control === `must-refetch`;
}
function isSnapshotEndMessage(message) {
return client.isControlMessage(message) && message.headers.control === `snapshot-end`;
}
function isSubsetEndMessage(message) {
return client.isControlMessage(message) && message.headers.control === `subset-end`;
}
function parseSnapshotMessage(message) {
return {
xmin: message.headers.xmin,
xmax: message.headers.xmax,
xip_list: message.headers.xip_list
};
}
function hasTxids(message) {
return `txids` in message.headers && Array.isArray(message.headers.txids);
}
function createLoadSubsetDedupe({
stream,
syncMode,
isBufferingInitialSync,
begin,
write,
commit,
collectionId
}) {
if (syncMode === `eager`) {
return null;
}
const loadSubset = async (opts) => {
if (isBufferingInitialSync()) {
const snapshotParams = sqlCompiler.compileSQL(opts);
try {
const { data: rows } = await stream.fetchSnapshot(snapshotParams);
if (!isBufferingInitialSync()) {
debug(
`${collectionId ? `[${collectionId}] ` : ``}Ignoring snapshot - sync completed while fetching`
);
return;
}
if (rows.length > 0) {
begin();
for (const row of rows) {
write({
type: `insert`,
value: row.value,
metadata: {
...row.headers
}
});
}
commit();
debug(
`${collectionId ? `[${collectionId}] ` : ``}Applied snapshot with ${rows.length} rows`
);
}
} catch (error) {
debug(
`${collectionId ? `[${collectionId}] ` : ``}Error fetching snapshot: %o`,
error
);
throw error;
}
} else if (syncMode === `progressive`) {
return;
} else {
const { cursor, where, orderBy, limit } = opts;
if (cursor) {
const promises = [];
const whereCurrentOpts = {
where: where ? db.and(where, cursor.whereCurrent) : cursor.whereCurrent,
orderBy
// No limit - get all ties
};
const whereCurrentParams = sqlCompiler.compileSQL(whereCurrentOpts);
promises.push(stream.requestSnapshot(whereCurrentParams));
debug(
`${collectionId ? `[${collectionId}] ` : ``}Requesting cursor.whereCurrent snapshot (all ties)`
);
const whereFromOpts = {
where: where ? db.and(where, cursor.whereFrom) : cursor.whereFrom,
orderBy,
limit
};
const whereFromParams = sqlCompiler.compileSQL(whereFromOpts);
promises.push(stream.requestSnapshot(whereFromParams));
debug(
`${collectionId ? `[${collectionId}] ` : ``}Requesting cursor.whereFrom snapshot (with limit ${limit})`
);
await Promise.all(promises);
} else {
const snapshotParams = sqlCompiler.compileSQL(opts);
await stream.requestSnapshot(snapshotParams);
}
}
};
return new db.DeduplicatedLoadSubset({ loadSubset });
}
function electricCollectionOptions(config) {
const seenTxids = new store.Store(/* @__PURE__ */ new Set([]));
const seenSnapshots = new store.Store([]);
const internalSyncMode = config.syncMode ?? `eager`;
const finalSyncMode = internalSyncMode === `progressive` ? `on-demand` : internalSyncMode;
const pendingMatches = new store.Store(/* @__PURE__ */ new Map());
const currentBatchMessages = new store.Store([]);
const batchCommitted = new store.Store(false);
const removePendingMatches = (matchIds) => {
if (matchIds.length > 0) {
pendingMatches.setState((current) => {
const newMatches = new Map(current);
matchIds.forEach((id) => newMatches.delete(id));
return newMatches;
});
}
};
const resolveMatchedPendingMatches = () => {
const matchesToResolve = [];
pendingMatches.state.forEach((match, matchId) => {
if (match.matched) {
clearTimeout(match.timeoutId);
match.resolve(true);
matchesToResolve.push(matchId);
debug(
`${config.id ? `[${config.id}] ` : ``}awaitMatch resolved on up-to-date for match %s`,
matchId
);
}
});
removePendingMatches(matchesToResolve);
};
const sync = createElectricSync(config.shapeOptions, {
seenTxids,
seenSnapshots,
syncMode: internalSyncMode,
pendingMatches,
currentBatchMessages,
batchCommitted,
removePendingMatches,
resolveMatchedPendingMatches,
collectionId: config.id,
testHooks: config[ELECTRIC_TEST_HOOKS]
});
const awaitTxId = async (txId, timeout = 5e3) => {
debug(
`${config.id ? `[${config.id}] ` : ``}awaitTxId called with txid %d`,
txId
);
if (typeof txId !== `number`) {
throw new errors.ExpectedNumberInAwaitTxIdError(typeof txId, config.id);
}
const hasTxid = seenTxids.state.has(txId);
if (hasTxid) return true;
const hasSnapshot = seenSnapshots.state.some(
(snapshot) => client.isVisibleInSnapshot(txId, snapshot)
);
if (hasSnapshot) return true;
return new Promise((resolve, reject) => {
const timeoutId = setTimeout(() => {
unsubscribeSeenTxids();
unsubscribeSeenSnapshots();
reject(new errors.TimeoutWaitingForTxIdError(txId, config.id));
}, timeout);
const unsubscribeSeenTxids = seenTxids.subscribe(() => {
if (seenTxids.state.has(txId)) {
debug(
`${config.id ? `[${config.id}] ` : ``}awaitTxId found match for txid %o`,
txId
);
clearTimeout(timeoutId);
unsubscribeSeenTxids();
unsubscribeSeenSnapshots();
resolve(true);
}
});
const unsubscribeSeenSnapshots = seenSnapshots.subscribe(() => {
const visibleSnapshot = seenSnapshots.state.find(
(snapshot) => client.isVisibleInSnapshot(txId, snapshot)
);
if (visibleSnapshot) {
debug(
`${config.id ? `[${config.id}] ` : ``}awaitTxId found match for txid %o in snapshot %o`,
txId,
visibleSnapshot
);
clearTimeout(timeoutId);
unsubscribeSeenSnapshots();
unsubscribeSeenTxids();
resolve(true);
}
});
});
};
const awaitMatch = async (matchFn, timeout = 3e3) => {
debug(
`${config.id ? `[${config.id}] ` : ``}awaitMatch called with custom function`
);
return new Promise((resolve, reject) => {
const matchId = Math.random().toString(36);
const cleanupMatch = () => {
pendingMatches.setState((current) => {
const newMatches = new Map(current);
newMatches.delete(matchId);
return newMatches;
});
};
const onTimeout = () => {
cleanupMatch();
reject(new errors.TimeoutWaitingForMatchError(config.id));
};
const timeoutId = setTimeout(onTimeout, timeout);
const checkMatch = (message) => {
if (matchFn(message)) {
debug(
`${config.id ? `[${config.id}] ` : ``}awaitMatch found matching message, waiting for up-to-date`
);
pendingMatches.setState((current) => {
const newMatches = new Map(current);
const existing = newMatches.get(matchId);
if (existing) {
newMatches.set(matchId, { ...existing, matched: true });
}
return newMatches;
});
return true;
}
return false;
};
for (const message of currentBatchMessages.state) {
if (matchFn(message)) {
if (batchCommitted.state) {
debug(
`${config.id ? `[${config.id}] ` : ``}awaitMatch found immediate match in committed batch, resolving immediately`
);
clearTimeout(timeoutId);
resolve(true);
return;
}
debug(
`${config.id ? `[${config.id}] ` : ``}awaitMatch found immediate match in current batch, waiting for up-to-date`
);
pendingMatches.setState((current) => {
const newMatches = new Map(current);
newMatches.set(matchId, {
matchFn: checkMatch,
resolve,
reject,
timeoutId,
matched: true
// Already matched, will resolve on up-to-date
});
return newMatches;
});
return;
}
}
pendingMatches.setState((current) => {
const newMatches = new Map(current);
newMatches.set(matchId, {
matchFn: checkMatch,
resolve,
reject,
timeoutId,
matched: false
});
return newMatches;
});
});
};
const processMatchingStrategy = async (result) => {
if (result && `txid` in result) {
const timeout = result.timeout;
if (Array.isArray(result.txid)) {
await Promise.all(result.txid.map((txid) => awaitTxId(txid, timeout)));
} else {
await awaitTxId(result.txid, timeout);
}
}
};
const wrappedOnInsert = config.onInsert ? async (params) => {
const handlerResult = await config.onInsert(params);
await processMatchingStrategy(handlerResult);
return handlerResult;
} : void 0;
const wrappedOnUpdate = config.onUpdate ? async (params) => {
const handlerResult = await config.onUpdate(params);
await processMatchingStrategy(handlerResult);
return handlerResult;
} : void 0;
const wrappedOnDelete = config.onDelete ? async (params) => {
const handlerResult = await config.onDelete(params);
await processMatchingStrategy(handlerResult);
return handlerResult;
} : void 0;
const {
shapeOptions: _shapeOptions,
onInsert: _onInsert,
onUpdate: _onUpdate,
onDelete: _onDelete,
...restConfig
} = config;
return {
...restConfig,
syncMode: finalSyncMode,
sync,
onInsert: wrappedOnInsert,
onUpdate: wrappedOnUpdate,
onDelete: wrappedOnDelete,
utils: {
awaitTxId,
awaitMatch
}
};
}
function createElectricSync(shapeOptions, options) {
const {
seenTxids,
seenSnapshots,
syncMode,
pendingMatches,
currentBatchMessages,
batchCommitted,
removePendingMatches,
resolveMatchedPendingMatches,
collectionId,
testHooks
} = options;
const MAX_BATCH_MESSAGES = 1e3;
const relationSchema = new store.Store(void 0);
const tagCache = /* @__PURE__ */ new Map();
const parseTag = (tag) => {
const cachedTag = tagCache.get(tag);
if (cachedTag) {
return cachedTag;
}
const parsedTag = tag.split(`|`);
tagCache.set(tag, parsedTag);
return parsedTag;
};
const rowTagSets = /* @__PURE__ */ new Map();
const tagIndex$1 = [];
let tagLength = void 0;
const initializeTagIndex = (length) => {
if (tagIndex$1.length < length) {
for (let i = tagIndex$1.length; i < length; i++) {
tagIndex$1[i] = /* @__PURE__ */ new Map();
}
}
};
const addTagsToRow = (tags, rowId, rowTagSet) => {
for (const tag of tags) {
const parsedTag = parseTag(tag);
if (tagLength === void 0) {
tagLength = tagIndex.getTagLength(parsedTag);
initializeTagIndex(tagLength);
}
const currentTagLength = tagIndex.getTagLength(parsedTag);
if (currentTagLength !== tagLength) {
debug(
`${collectionId ? `[${collectionId}] ` : ``}Tag length mismatch: expected ${tagLength}, got ${currentTagLength}`
);
continue;
}
rowTagSet.add(tag);
tagIndex.addTagToIndex(parsedTag, rowId, tagIndex$1, tagLength);
}
};
const removeTagsFromRow = (removedTags, rowId, rowTagSet) => {
if (tagLength === void 0) {
return;
}
for (const tag of removedTags) {
const parsedTag = parseTag(tag);
rowTagSet.delete(tag);
tagIndex.removeTagFromIndex(parsedTag, rowId, tagIndex$1, tagLength);
tagCache.delete(tag);
}
};
const processTagsForChangeMessage = (tags, removedTags, rowId) => {
if (!rowTagSets.has(rowId)) {
rowTagSets.set(rowId, /* @__PURE__ */ new Set());
}
const rowTagSet = rowTagSets.get(rowId);
if (tags) {
addTagsToRow(tags, rowId, rowTagSet);
}
if (removedTags) {
removeTagsFromRow(removedTags, rowId, rowTagSet);
}
return rowTagSet;
};
const clearTagTrackingState = () => {
rowTagSets.clear();
tagIndex$1.length = 0;
tagLength = void 0;
};
const clearTagsForRow = (rowId) => {
if (tagLength === void 0) {
return;
}
const rowTagSet = rowTagSets.get(rowId);
if (!rowTagSet) {
return;
}
for (const tag of rowTagSet) {
const parsedTag = parseTag(tag);
const currentTagLength = tagIndex.getTagLength(parsedTag);
if (currentTagLength === tagLength) {
tagIndex.removeTagFromIndex(parsedTag, rowId, tagIndex$1, tagLength);
}
tagCache.delete(tag);
}
rowTagSets.delete(rowId);
};
const removeMatchingTagsFromRow = (rowId, pattern) => {
const rowTagSet = rowTagSets.get(rowId);
if (!rowTagSet) {
return false;
}
for (const tag of rowTagSet) {
const parsedTag = parseTag(tag);
if (tagIndex.tagMatchesPattern(parsedTag, pattern)) {
rowTagSet.delete(tag);
tagIndex.removeTagFromIndex(parsedTag, rowId, tagIndex$1, tagLength);
}
}
if (rowTagSet.size === 0) {
rowTagSets.delete(rowId);
return true;
}
return false;
};
const processMoveOutEvent = (patterns, begin, write, transactionStarted) => {
if (tagLength === void 0) {
debug(
`${collectionId ? `[${collectionId}] ` : ``}Received move-out message but no tag length set yet, ignoring`
);
return transactionStarted;
}
let txStarted = transactionStarted;
for (const pattern of patterns) {
const affectedRowIds = tagIndex.findRowsMatchingPattern(pattern, tagIndex$1);
for (const rowId of affectedRowIds) {
if (removeMatchingTagsFromRow(rowId, pattern)) {
if (!txStarted) {
begin();
txStarted = true;
}
write({
type: `delete`,
key: rowId
});
}
}
}
return txStarted;
};
const getSyncMetadata = () => {
const schema = relationSchema.state || `public`;
return {
relation: shapeOptions.params?.table ? [schema, shapeOptions.params.table] : void 0
};
};
let unsubscribeStream;
return {
sync: (params) => {
const { begin, write, commit, markReady, truncate, collection } = params;
let progressiveReadyGate = null;
const wrappedMarkReady = (isBuffering) => {
if (isBuffering && syncMode === `progressive` && testHooks?.beforeMarkingReady) {
progressiveReadyGate = testHooks.beforeMarkingReady();
progressiveReadyGate.then(() => {
markReady();
});
} else {
markReady();
}
};
const abortController = new AbortController();
if (shapeOptions.signal) {
shapeOptions.signal.addEventListener(
`abort`,
() => {
abortController.abort();
},
{
once: true
}
);
if (shapeOptions.signal.aborted) {
abortController.abort();
}
}
abortController.signal.addEventListener(`abort`, () => {
pendingMatches.setState((current) => {
current.forEach((match) => {
clearTimeout(match.timeoutId);
match.reject(new errors.StreamAbortedError());
});
return /* @__PURE__ */ new Map();
});
});
const stream = new client.ShapeStream({
...shapeOptions,
// In on-demand mode, we only want to sync changes, so we set the log to `changes_only`
log: syncMode === `on-demand` ? `changes_only` : void 0,
// In on-demand mode, we only need the changes from the point of time the collection was created
// so we default to `now` when there is no saved offset.
offset: shapeOptions.offset ?? (syncMode === `on-demand` ? `now` : void 0),
signal: abortController.signal,
onError: (errorParams) => {
markReady();
if (shapeOptions.onError) {
return shapeOptions.onError(errorParams);
} else {
console.error(
`An error occurred while syncing collection: ${collection.id},
it has been marked as ready to avoid blocking apps waiting for '.preload()' to finish.
You can provide an 'onError' handler on the shapeOptions to handle this error, and this message will not be logged.`,
errorParams
);
}
return;
}
});
let transactionStarted = false;
const newTxids = /* @__PURE__ */ new Set();
const newSnapshots = [];
let hasReceivedUpToDate = false;
const isBufferingInitialSync = () => syncMode === `progressive` && !hasReceivedUpToDate;
const bufferedMessages = [];
const syncedKeys = /* @__PURE__ */ new Set();
const processChangeMessage = (changeMessage) => {
if (!client.isChangeMessage(changeMessage)) {
return;
}
const tags = changeMessage.headers.tags;
const removedTags = changeMessage.headers.removed_tags;
const hasTags = tags || removedTags;
const rowId = collection.getKeyFromItem(changeMessage.value);
const operation = changeMessage.headers.operation;
const isDelete = operation === `delete`;
const isDuplicateInsert = operation === `insert` && syncedKeys.has(rowId);
if (isDelete) {
syncedKeys.delete(rowId);
} else {
syncedKeys.add(rowId);
}
if (isDelete) {
clearTagsForRow(rowId);
} else if (hasTags) {
processTagsForChangeMessage(tags, removedTags, rowId);
}
write({
type: isDuplicateInsert ? `update` : operation,
value: changeMessage.value,
// Include the primary key and relation info in the metadata
metadata: {
...changeMessage.headers
}
});
};
const loadSubsetDedupe = createLoadSubsetDedupe({
stream,
syncMode,
isBufferingInitialSync,
begin,
write,
commit,
collectionId
});
unsubscribeStream = stream.subscribe((messages) => {
let commitPoint = null;
batchCommitted.setState(() => false);
for (const message of messages) {
if (client.isChangeMessage(message) || tagIndex.isMoveOutMessage(message)) {
currentBatchMessages.setState((currentBuffer) => {
const newBuffer = [...currentBuffer, message];
if (newBuffer.length > MAX_BATCH_MESSAGES) {
newBuffer.splice(0, newBuffer.length - MAX_BATCH_MESSAGES);
}
return newBuffer;
});
}
if (hasTxids(message) && (!isBufferingInitialSync() || transactionStarted)) {
message.headers.txids?.forEach((txid) => newTxids.add(txid));
}
const matchesToRemove = [];
pendingMatches.state.forEach((match, matchId) => {
if (!match.matched) {
try {
match.matchFn(message);
} catch (err) {
clearTimeout(match.timeoutId);
match.reject(
err instanceof Error ? err : new Error(String(err))
);
matchesToRemove.push(matchId);
debug(`matchFn error: %o`, err);
}
}
});
removePendingMatches(matchesToRemove);
if (client.isChangeMessage(message)) {
const schema = message.headers.schema;
if (schema && typeof schema === `string`) {
relationSchema.setState(() => schema);
}
if (isBufferingInitialSync() && !transactionStarted) {
bufferedMessages.push(message);
} else {
if (!transactionStarted) {
begin();
transactionStarted = true;
}
processChangeMessage(message);
}
} else if (isSnapshotEndMessage(message)) {
if (!isBufferingInitialSync() || transactionStarted) {
newSnapshots.push(parseSnapshotMessage(message));
}
} else if (isUpToDateMessage(message)) {
commitPoint = `up-to-date`;
} else if (isSubsetEndMessage(message)) {
if (commitPoint !== `up-to-date`) {
commitPoint = `subset-end`;
}
} else if (tagIndex.isMoveOutMessage(message)) {
if (isBufferingInitialSync() && !transactionStarted) {
bufferedMessages.push(message);
} else {
transactionStarted = processMoveOutEvent(
message.headers.patterns,
begin,
write,
transactionStarted
);
}
} else if (isMustRefetchMessage(message)) {
debug(
`${collectionId ? `[${collectionId}] ` : ``}Received must-refetch message, starting transaction with truncate`
);
if (!transactionStarted) {
begin();
transactionStarted = true;
}
truncate();
clearTagTrackingState();
syncedKeys.clear();
loadSubsetDedupe?.reset();
commitPoint = null;
hasReceivedUpToDate = false;
bufferedMessages.length = 0;
}
}
if (commitPoint !== null) {
if (isBufferingInitialSync() && commitPoint === `up-to-date` && !transactionStarted) {
debug(
`${collectionId ? `[${collectionId}] ` : ``}Progressive mode: Performing atomic swap with ${bufferedMessages.length} buffered messages`
);
begin();
truncate();
clearTagTrackingState();
syncedKeys.clear();
for (const bufferedMsg of bufferedMessages) {
if (client.isChangeMessage(bufferedMsg)) {
processChangeMessage(bufferedMsg);
if (hasTxids(bufferedMsg)) {
bufferedMsg.headers.txids?.forEach(
(txid) => newTxids.add(txid)
);
}
} else if (isSnapshotEndMessage(bufferedMsg)) {
newSnapshots.push(parseSnapshotMessage(bufferedMsg));
} else if (tagIndex.isMoveOutMessage(bufferedMsg)) {
processMoveOutEvent(
bufferedMsg.headers.patterns,
begin,
write,
transactionStarted
);
}
}
commit();
bufferedMessages.length = 0;
debug(
`${collectionId ? `[${collectionId}] ` : ``}Progressive mode: Atomic swap complete, now in normal sync mode`
);
} else {
if (transactionStarted) {
commit();
transactionStarted = false;
}
}
wrappedMarkReady(isBufferingInitialSync());
if (commitPoint === `up-to-date`) {
hasReceivedUpToDate = true;
}
seenTxids.setState((currentTxids) => {
const clonedSeen = new Set(currentTxids);
if (newTxids.size > 0) {
debug(
`${collectionId ? `[${collectionId}] ` : ``}new txids synced from pg %O`,
Array.from(newTxids)
);
}
newTxids.forEach((txid) => clonedSeen.add(txid));
newTxids.clear();
return clonedSeen;
});
seenSnapshots.setState((currentSnapshots) => {
const seen = [...currentSnapshots, ...newSnapshots];
newSnapshots.forEach(
(snapshot) => debug(
`${collectionId ? `[${collectionId}] ` : ``}new snapshot synced from pg %o`,
snapshot
)
);
newSnapshots.length = 0;
return seen;
});
batchCommitted.setState(() => true);
resolveMatchedPendingMatches();
}
});
return {
loadSubset: loadSubsetDedupe?.loadSubset,
cleanup: () => {
unsubscribeStream();
abortController.abort();
loadSubsetDedupe?.reset();
}
};
},
// Expose the getSyncMetadata function
getSyncMetadata
};
}
Object.defineProperty(exports, "isChangeMessage", {
enumerable: true,
get: () => client.isChangeMessage
});
Object.defineProperty(exports, "isControlMessage", {
enumerable: true,
get: () => client.isControlMessage
});
exports.ELECTRIC_TEST_HOOKS = ELECTRIC_TEST_HOOKS;
exports.electricCollectionOptions = electricCollectionOptions;
//# sourceMappingURL=electric.cjs.map