@instantdb/core
Version:
Instant's core local abstraction
422 lines • 16.6 kB
JavaScript
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.getInfiniteQueryInitialSnapshot = exports.subscribeInfiniteQuery = void 0;
const index_ts_1 = require("./index.js");
const error_ts_1 = require("./utils/error.js");
// Example for {order: {value: "asc"}}
//
// 0
// <------------------|------------------------------------------------------>
// <- starter sub ->
//
// Bootstrap phase: until the limit (4 in this example) items are reached, the
// starter subscription is the only subscription and it writes to the forwardChunks map with the key PRE_BOOTSTRAP_CURSOR.
//
// When the limit is reached it automatically becomes a real forward chunk and has a definite start and end.
// A new reverse chunk gets added to watch for any new items at the start of the list.
//
// 0 1 2 3
// <------------------|------------------------------------------------------>
// <- starter sub ->
//
// ↓ BECOMES ↓
//
// 0 1 2 3
// <------------------|------------------------------------------------------>
// <-reverse chunk][forward chunk ]
//
// 0 1 2 3 4
// <------------------|------------------------------------------------------>
// <-reverse chunk][forward chunk ]
// When item 4 is added, the forward chunk subscription gets updated so that
// hasNextPage is `true`. This tells the user that a new page can be loaded.
//
// User clicks: loadNextPage
// 0 1 2 3 4
// <------------------|------------------------------------------------------>
// <-reverse chunk][ frozen forward chunk ][ new forward chunk ]
//
// More numbers get added
// 0 1 2 3 4 5 6 7 8
// <------------------|------------------------------------------------------>
// <-reverse chunk][ frozen forward chunk ][ forward chunk ] ^
// hasNextPage=true^
//
//
// User clicks: loadNextPage
//
// 0 1 2 3 4 5 6 7 8
// <------------------|------------------------------------------------------>
// <-reverse chunk][ frozen forward chunk ][ frozen forward chunk ][ new chunk
//
// The reverse chunks work in the same way as the forward chunks but the order in the query is reversed.
// When a reverse chunks recieves an update it will check to see if more can be loaded and it will
// automatically freeze the chunk and add a new one. i.e. : works the same as if
// loadNextPage was automatically clicked when hasNextPage became true.
//
// Chunks are indexed by their starting point cursor, for forward chunks this is the "[" point.
// Their starting point cursor is inclusive in the query and exclusive from the following query
const makeCursorKey = (cursor) => JSON.stringify(cursor);
const parseCursorKey = (cursorKey) => JSON.parse(cursorKey);
const chunkHasEndCursor = (chunk) => {
return !!chunk.endCursor;
};
const readCanLoadNextPage = (forwardChunks) => {
const chunksInOrder = Array.from(forwardChunks.values());
if (chunksInOrder.length === 0)
return false;
return chunksInOrder[chunksInOrder.length - 1]?.hasMore || false;
};
// Chunk sub key is used to create keys to keep track of the subscriptions
// while the chunk maps are keyed by the cursor, here we disinguish between
// forward and reverse because the first 2 chunks will have the same starting
// cursor.
const chunkSubKey = (direction, cursor) => `${direction}:${JSON.stringify(cursor)}`;
const reverseOrder = (order) => {
if (!order) {
return {
serverCreatedAt: 'asc',
};
}
const key = Object.keys(order).at(0);
if (!key) {
return {
serverCreatedAt: 'asc',
};
}
return {
[key]: order[key] === 'asc' ? 'desc' : 'asc',
};
};
const normalizeChunks = (forwardChunks, reverseChunks) => {
const chunks = [
...Array.from(reverseChunks.values()).slice().reverse(),
...Array.from(forwardChunks.values()),
];
const data = [
...Array.from(reverseChunks.values())
.slice()
.reverse()
.flatMap((chunk) => chunk.data.slice().reverse()),
...Array.from(forwardChunks.values()).flatMap((chunk) => chunk.data),
];
return { chunks, data };
};
const PRE_BOOTSTRAP_CURSOR = ['bootstrap', 'bootstrap', 'bootstrap', 1];
const subscribeInfiniteQuery = (db, fullQuery, cb, opts) => {
const { entityName, entityQuery: query } = splitAndValidateQuery(fullQuery);
const pageSize = query.$?.limit || 10;
const entity = entityName;
const forwardChunks = new Map();
const reverseChunks = new Map();
// Keeps track of all subscriptions (besides starter sub)
const allUnsubs = new Map();
let hasKickstarted = false;
let isActive = true;
let lastReverseAdvancedChunkKey = null;
let starterUnsub = null;
const sendError = (err) => {
cb({ error: err, data: undefined, canLoadNextPage: false });
};
const pushUpdate = () => {
if (!isActive)
return;
const { chunks, data } = normalizeChunks(forwardChunks, reverseChunks);
cb({
data: { [entity]: data },
// @ts-expect-error hidden debug variable
chunks,
canLoadNextPage: readCanLoadNextPage(forwardChunks),
});
};
const setForwardChunk = (startCursor, chunk) => {
forwardChunks.set(makeCursorKey(startCursor), chunk);
pushUpdate();
};
const setReverseChunk = (startCursor, chunk) => {
reverseChunks.set(makeCursorKey(startCursor), chunk);
maybeAdvanceReverse();
pushUpdate();
};
const freezeReverse = (chunkKey, chunk) => {
const startCursor = parseCursorKey(chunkKey);
const currentSub = allUnsubs.get(chunkSubKey('reverse', startCursor));
currentSub?.();
const nextSub = db.subscribeQuery({
[entity]: {
...query,
$: {
after: startCursor,
before: chunk.endCursor,
beforeInclusive: true,
where: query.$?.where,
fields: query.$?.fields,
order: reverseOrder(query.$?.order),
},
},
}, (frozenData) => {
if (frozenData.error) {
return sendError(frozenData.error);
}
const rows = frozenData.data[entity];
const pageInfo = frozenData.pageInfo[entity];
(0, error_ts_1.assert)(rows && pageInfo, 'Expected query subscription to contain rows and pageInfo');
setReverseChunk(startCursor, {
data: rows,
status: 'frozen',
hasMore: pageInfo.hasNextPage,
endCursor: pageInfo.endCursor,
});
}, opts);
allUnsubs.set(chunkSubKey('reverse', startCursor), nextSub);
};
const pushNewReverse = (startCursor) => {
const querySub = db.subscribeQuery({
[entity]: {
...query,
$: {
limit: pageSize,
after: startCursor,
where: query.$?.where,
fields: query.$?.fields,
order: reverseOrder(query.$?.order),
},
},
}, (windowData) => {
if (windowData.error) {
return sendError(windowData.error);
}
const rows = windowData.data[entity];
const pageInfo = windowData.pageInfo[entity];
(0, error_ts_1.assert)(rows && pageInfo, 'Expected rows and pageInfo');
setReverseChunk(startCursor, {
data: rows,
status: 'bootstrapping',
hasMore: pageInfo.hasNextPage,
endCursor: pageInfo.endCursor,
});
}, opts);
allUnsubs.set(chunkSubKey('reverse', startCursor), querySub);
};
const pushNewForward = (startCursor, afterInclusive = false) => {
const querySub = db.subscribeQuery({
[entity]: {
...query,
$: {
limit: pageSize,
after: startCursor,
afterInclusive,
where: query.$?.where,
fields: query.$?.fields,
order: query.$?.order,
},
},
}, (windowData) => {
if (windowData.error) {
return sendError(windowData.error);
}
const rows = windowData.data[entity];
const pageInfo = windowData.pageInfo[entity];
(0, error_ts_1.assert)(rows && pageInfo, 'Page info and rows');
setForwardChunk(startCursor, {
data: rows,
status: 'bootstrapping',
hasMore: pageInfo.hasNextPage,
endCursor: pageInfo.endCursor,
afterInclusive,
});
}, opts);
allUnsubs.set(chunkSubKey('forward', startCursor), querySub);
};
const freezeForward = (startCursor) => {
const key = makeCursorKey(startCursor);
const currentSub = allUnsubs.get(chunkSubKey('forward', startCursor));
currentSub?.();
const chunk = forwardChunks.get(key);
if (!chunk?.endCursor)
return;
const nextSub = db.subscribeQuery({
[entity]: {
...query,
$: {
after: startCursor,
afterInclusive: chunk.afterInclusive,
before: chunk.endCursor,
beforeInclusive: true,
where: query.$?.where,
fields: query.$?.fields,
order: query.$?.order,
},
},
}, (frozenData) => {
if (frozenData.error) {
return sendError(frozenData.error);
}
const rows = frozenData.data[entity];
const pageInfo = frozenData.pageInfo[entity];
(0, error_ts_1.assert)(rows && pageInfo, 'Expected rows and pageInfo');
setForwardChunk(startCursor, {
data: rows,
status: 'frozen',
hasMore: pageInfo.hasNextPage,
endCursor: pageInfo.endCursor,
afterInclusive: chunk.afterInclusive,
});
}, opts);
allUnsubs.set(chunkSubKey('forward', startCursor), nextSub);
};
// Consider order: {val: "asc"} with pageItems = 4
// A reverse chunk captures all the new items coming in before us.
// If we hit 4 then we freeze the current chunk and create a new reverse chunk
const maybeAdvanceReverse = () => {
const tailEntry = Array.from(reverseChunks.entries()).at(-1);
if (!tailEntry)
return;
const [chunkKey, chunk] = tailEntry;
// If a chunk has more, then it must have an endCursor
if (!chunk?.hasMore)
return;
if (!chunkHasEndCursor(chunk))
return;
// maybeAdvanceReverse can run multiple times if multiple changes are made
// to the reverse chunk
// This prevents adding the same new reverse frame twice
const advanceKey = `${chunkKey}:${makeCursorKey(chunk.endCursor)}`;
if (advanceKey == lastReverseAdvancedChunkKey)
return;
lastReverseAdvancedChunkKey = advanceKey;
freezeReverse(chunkKey, chunk);
pushNewReverse(chunk.endCursor);
};
const loadNextPage = () => {
const tailEntry = Array.from(forwardChunks.entries()).at(-1);
if (!tailEntry)
return;
const [chunkKey, chunk] = tailEntry;
// If the chunk has more items after it, it must have an end cursor, and we can
// load more items
// if (!chunk?.hasMore) return;
if (!chunk.endCursor)
return;
freezeForward(parseCursorKey(chunkKey));
pushNewForward(chunk.endCursor);
};
starterUnsub = db.subscribeQuery({
[entity]: {
...query,
$: {
limit: pageSize,
where: query.$?.where,
fields: query.$?.fields,
order: query.$?.order,
},
},
}, async (starterData) => {
if (hasKickstarted)
return;
if (starterData.error) {
return sendError(starterData.error);
}
const pageInfo = starterData.pageInfo[entity];
const rows = starterData?.data?.[entity];
(0, error_ts_1.assert)(rows && pageInfo, 'Expected rows and pageInfo');
if (rows.length < pageSize) {
// If the rows are less than the page size, then we don't need to
// create forward and reverse chunks.
// We just treat the starter query as a forward chunk
setForwardChunk(PRE_BOOTSTRAP_CURSOR, {
data: rows,
status: 'pre-bootstrap',
});
return;
}
// Consider a query with no items; the server will return a result with
// no start cursor. If we add {pageSize} optimistic updates we can
// get here and still have no startCursor. By returning we are skipping
// the optimistic update and just waiting for the result from the
// server.
const initialForwardCursor = pageInfo.startCursor;
if (!initialForwardCursor) {
return;
}
forwardChunks.delete(makeCursorKey(PRE_BOOTSTRAP_CURSOR));
pushNewForward(initialForwardCursor, true);
pushNewReverse(pageInfo.startCursor);
hasKickstarted = true;
// Flush the initial boostrap querysub data
// because immediately unsubscribing will never save it for offline in idb
await db._reactor.querySubs.flush();
// Unsubscribe the starter subscription
starterUnsub?.();
starterUnsub = null;
}, opts);
const unsubscribe = () => {
if (!isActive)
return;
isActive = false;
starterUnsub?.();
starterUnsub = null;
for (const unsub of allUnsubs.values()) {
unsub?.();
}
allUnsubs.clear();
};
return {
unsubscribe,
loadNextPage,
};
};
exports.subscribeInfiniteQuery = subscribeInfiniteQuery;
const getInfiniteQueryInitialSnapshot = (db, fullQuery, opts) => {
if (!fullQuery) {
return {
canLoadNextPage: false,
data: undefined,
error: undefined,
};
}
const { entityName, entityQuery } = splitAndValidateQuery(fullQuery);
const pageSize = entityQuery.$?.limit || 10;
let coercedQuery = fullQuery
? (0, index_ts_1.coerceQuery)({
[entityName]: {
...entityQuery,
$: {
limit: pageSize,
where: entityQuery.$?.where,
fields: entityQuery.$?.fields,
order: entityQuery.$?.order,
},
},
})
: null;
if (opts && 'ruleParams' in opts) {
coercedQuery = {
$$ruleParams: opts.ruleParams,
...fullQuery,
};
}
const queryResult = db._reactor.getPreviousResult(coercedQuery);
return {
canLoadNextPage: false,
data: queryResult?.data || undefined,
error: undefined,
};
};
exports.getInfiniteQueryInitialSnapshot = getInfiniteQueryInitialSnapshot;
/**
* @throws QueryValidationError
* @param fullQuery a ValidQuery with one key (entity)
*/
const splitAndValidateQuery = (fullQuery) => {
const entityNames = Object.keys(fullQuery);
if (entityNames.length !== 1) {
throw new index_ts_1.QueryValidationError('subscribeInfiniteQuery expects exactly one entity');
}
const [entityName, entityQuery] = Object.entries(fullQuery)[0];
if (!entityName || !entityQuery) {
throw new index_ts_1.QueryValidationError('No query provided for infinite query');
}
return { entityName, entityQuery };
};
//# sourceMappingURL=infiniteQuery.js.map