@magnetarjs/utils-firestore
Version:
Magnetar utils firestore
261 lines (260 loc) • 11.4 kB
JavaScript
import { logWithFlair } from '@magnetarjs/utils';
import { arrGetOrSet, mapGetOrSet } from 'getorset-anything';
import { isEmptyObject, isNumber } from 'is-what';
import { merge as mergeObjects } from 'merge-anything';
import { removeProp } from 'remove-anything';
import { Countdown } from './Countdown.js';
// https://firebase.google.com/docs/firestore/manage-data/transactions#batched-writes
// A batched write can contain up to 500 operations.
const MAX_OPERATION_COUNT = 500;
const newStack = () => ({
operationCount: 0,
resolves: [],
rejects: [],
batch: { insert: new Map(), assign: new Map(), merge: new Map(), replace: new Map(), deleteProp: new Map(), delete: new Set() }, // prettier-ignore
});
/**
* Each write operation in a batch counts towards the 500 limit.
* Within a write operation, field transforms like serverTimestamp,
* arrayUnion, and increment each count as an additional operation.
*
* @param {Record<string, unknown>} payload
* @returns {number}
*/
function countOperations(_payload) {
const count = 1;
// todo: when actions like serverTimestamp, arrayUnion and increment are supported, count them here
return count;
}
function preparePayload(_payload) {
// todo: properly handle any serverTimestamp, arrayUnion and increment in here
const payload = _payload;
const operationCount = countOperations(_payload);
return { payload, operationCount };
}
function prepareReturnPromise(stack) {
return new Promise((resolve, reject) => {
stack.resolves.push(() => resolve(stack.batch));
stack.rejects.push(reject);
});
}
/**
* Creates a BatchSync instance that will sync to firestore and automatically debounce
*
* @export
* @returns {BatchSync}
*/
export function batchSyncFactory(firestorePluginOptions, createWriteBatch, applySyncBatch) {
const { db, syncDebounceMs, debug } = firestorePluginOptions;
// const applySyncBatch = applySyncBatchFactory(db, )
const state = {
queue: [],
countdown: null,
};
async function prepareStack(operationCount, queueIndex = 0) {
const stack = arrGetOrSet(state.queue, queueIndex, () => newStack());
if (stack.operationCount + operationCount >= MAX_OPERATION_COUNT) {
return prepareStack(operationCount, queueIndex + 1);
}
stack.operationCount += operationCount;
return stack;
}
/**
* Removes one `stack` entry from the `queue` & executes batch.commit() and makes sure to reject or resolve all actions when this promise is resolved
*/
function executeSync() {
state.countdown = null;
const stack = state.queue.shift();
if (!stack) {
throw new Error('executeSync executed before it was instantiated');
}
const writeBatch = createWriteBatch(db);
try {
applySyncBatch(writeBatch, stack.batch, db);
}
catch (error) {
if (debug) {
logWithFlair('Error while preparing Firestore write batch', { error, batch: stack.batch });
}
stack.rejects.forEach((rej) => rej(error));
if (state.queue.length) {
triggerSync(0);
}
return;
}
if (debug) {
logWithFlair('Syncing to firestore...', stack.batch);
}
writeBatch
.commit()
.then(() => stack.resolves.forEach((res) => res()))
.catch((error) => {
if (debug) {
logWithFlair('Firestore batch commit failed', { error, batch: stack.batch });
}
stack.rejects.forEach((rej) => rej(error));
})
.finally(() => {
if (state.queue.length) {
triggerSync(0);
}
});
}
function forceSyncEarly() {
return new Promise((resolve) => {
// make sure all actions in same cycle are included
setTimeout(() => {
if (!state.countdown)
return resolve();
state.countdown.done.then(() => resolve());
state.countdown.forceFinish();
}, 0);
});
}
/**
* Sets a new countdown if it doesn't exist yet, and makes sure that the countdown will executeSync
*
* @param {number} [debounceMsOverwrite] Pass a number to set the batch sync countdown. If not set, it will use the globally set `syncDebounceMs`.
* @returns {CountdownInstance}
*/
function prepareCountdown(debounceMsOverwrite) {
if (!state.countdown) {
const ms = isNumber(debounceMsOverwrite) ? debounceMsOverwrite : syncDebounceMs;
state.countdown = Countdown(ms);
state.countdown.done.then(() => executeSync());
}
return state.countdown;
}
function triggerSync(debounceMsOverwrite) {
const countdown = prepareCountdown(debounceMsOverwrite);
countdown.restart(debounceMsOverwrite);
}
async function insert(documentPath, _payload, debounceMsOverwrite) {
const { payload, operationCount } = preparePayload(_payload);
const stack = await prepareStack(operationCount);
// discard any previously queued operations for this document that would be superseded by insert
stack.batch.merge.delete(documentPath);
stack.batch.assign.delete(documentPath);
stack.batch.deleteProp.delete(documentPath);
stack.batch.delete.delete(documentPath);
stack.batch.replace.delete(documentPath);
stack.batch.insert.set(documentPath, payload);
const promise = prepareReturnPromise(stack);
triggerSync(debounceMsOverwrite);
return promise;
}
async function assign(documentPath, _payload, debounceMsOverwrite) {
const { payload, operationCount } = preparePayload(_payload);
let stack = await prepareStack(operationCount);
// flush! Because these writes cannot be combined
if (stack.batch.merge.has(documentPath) || stack.batch.deleteProp.has(documentPath)) {
await forceSyncEarly();
stack = await prepareStack(operationCount);
}
// we need to update the payload if we already have any of `replace`, `insert` or `assign`
const replacePayload = stack.batch.replace.get(documentPath);
const insertPayload = stack.batch.insert.get(documentPath);
const assignPayload = stack.batch.assign.get(documentPath);
if (replacePayload) {
stack.batch.replace.set(documentPath, { ...replacePayload, ...payload });
}
else if (insertPayload) {
stack.batch.insert.set(documentPath, { ...insertPayload, ...payload });
}
else if (assignPayload) {
stack.batch.assign.set(documentPath, { ...assignPayload, ...payload });
}
else {
stack.batch.assign.set(documentPath, payload);
}
const promise = prepareReturnPromise(stack);
triggerSync(debounceMsOverwrite);
return promise;
}
async function merge(documentPath, _payload, debounceMsOverwrite) {
const { payload, operationCount } = preparePayload(_payload);
let stack = await prepareStack(operationCount);
// remove any empty objects as value from the payload
const payloadSafe = removeProp(payload, {});
// when there are no changes after removing all empty objects, return early
if (isEmptyObject(payloadSafe)) {
const promise = prepareReturnPromise(stack);
triggerSync(debounceMsOverwrite);
return promise;
}
// flush! Because these writes cannot be combined
if (stack.batch.assign.has(documentPath) || stack.batch.deleteProp.has(documentPath)) {
await forceSyncEarly();
stack = await prepareStack(operationCount);
}
// we need to update the payload if we already have any of `replace`, `insert` or `assign`
const replacePayload = stack.batch.replace.get(documentPath);
const insertPayload = stack.batch.insert.get(documentPath);
const mergePayload = stack.batch.merge.get(documentPath);
if (replacePayload) {
stack.batch.replace.set(documentPath, mergeObjects(replacePayload, payloadSafe));
}
else if (insertPayload) {
stack.batch.insert.set(documentPath, mergeObjects(insertPayload, payloadSafe));
}
else if (mergePayload) {
stack.batch.merge.set(documentPath, mergeObjects(mergePayload, payloadSafe));
}
else {
stack.batch.merge.set(documentPath, payloadSafe);
}
const promise = prepareReturnPromise(stack);
triggerSync(debounceMsOverwrite);
return promise;
}
async function replace(documentPath, _payload, debounceMsOverwrite) {
const { payload, operationCount } = preparePayload(_payload);
const stack = await prepareStack(operationCount);
// discard any previously queued operations for this document that would be superseded by replace
stack.batch.deleteProp.delete(documentPath);
stack.batch.merge.delete(documentPath);
stack.batch.assign.delete(documentPath);
stack.batch.delete.delete(documentPath);
stack.batch.insert.delete(documentPath);
stack.batch.replace.set(documentPath, payload);
const promise = prepareReturnPromise(stack);
triggerSync(debounceMsOverwrite);
return promise;
}
async function deleteProp(documentPath, propPaths, debounceMsOverwrite) {
const operationCount = 1;
let stack = await prepareStack(operationCount);
// if a full overwrite is queued, we cannot combine deleteProp meaningfully → be on the safe side and flush first
if (stack.batch.insert.has(documentPath) ||
stack.batch.replace.has(documentPath) ||
stack.batch.assign.has(documentPath) ||
stack.batch.merge.has(documentPath)) {
await forceSyncEarly();
stack = await prepareStack(operationCount);
}
// only apply if we're not deleting this document
if (!stack.batch.delete.has(documentPath)) {
const map = stack.batch.deleteProp;
const set = mapGetOrSet(map, documentPath, () => new Set());
propPaths.forEach((p) => set.add(p));
}
const promise = prepareReturnPromise(stack);
triggerSync(debounceMsOverwrite);
return promise;
}
async function _delete(documentPath, debounceMsOverwrite) {
const operationCount = 1;
const stack = await prepareStack(operationCount);
const promise = prepareReturnPromise(stack);
// all these changes don't matter anymore, so let's remove them from the stack.batch
stack.batch.insert.delete(documentPath);
stack.batch.replace.delete(documentPath);
stack.batch.merge.delete(documentPath);
stack.batch.assign.delete(documentPath);
stack.batch.deleteProp.delete(documentPath);
stack.batch.delete.add(documentPath);
triggerSync(debounceMsOverwrite);
return promise;
}
return { assign, merge, replace, insert, deleteProp, delete: _delete, forceSyncEarly };
}