@clickup/ent-framework
Version:
A PostgreSQL graph-database-alike library with microsharding and row-level security
143 lines • 6.63 kB
JavaScript
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.Batcher = void 0;
const delay_1 = __importDefault(require("delay"));
const p_defer_1 = __importDefault(require("p-defer"));
const DefaultMap_1 = require("../internal/DefaultMap");
const misc_1 = require("../internal/misc");
/**
* Batcher is similar to DataLoader, but with a few important differences:
* 1. It's strongly typed not only for the output, but for input too. And input
* can be arbitrary, not only strings (e.g. rows).
* 2. It does requests dedupping for all queries (including selects).
* 3. It's not limited by read-only requests like DataLoader, and thus it
* doesn't to any caching. Caching is delegated to some other layer (either
* above Batcher or in Runner).
*/
class Batcher {
constructor(runner, batchDelayMs, disableBatching) {
this.runner = runner;
this.batchDelayMs = batchDelayMs;
this.disableBatching = disableBatching;
// key -> input
this.queuedInputs = new Map();
// key -> DeferredPromise[]
this.queuedDefers = new DefaultMap_1.DefaultMap();
// Dedupped annotations; each annotation identifies a caller of the query.
this.queuedAnnotations = new Map();
this.flushQueue = async () => {
if (!this.queuedInputs.size) {
return;
}
const inputs = this.queuedInputs;
const defers = this.queuedDefers;
const annotations = [...this.queuedAnnotations.values()];
this.queuedInputs = new Map();
this.queuedDefers = new DefaultMap_1.DefaultMap();
this.queuedAnnotations = new Map();
let outputs = new Map();
const errors = new Map();
if (inputs.size === 1 || !this.runner.runBatch || this.disableBatching) {
// Relatively rare since most of the requests come batched.
await this.runSingleForEach(inputs, annotations, outputs, errors);
}
else {
// Called most of the times.
try {
outputs = await this.runner.runBatch(inputs, annotations);
}
catch (e) {
// Relatively rare under heavy load (since errors are rare).
if (this.runner.shouldDebatchOnError(e)) {
await this.runSingleForEach(inputs, incrementAttempt(annotations), outputs, errors);
}
else {
for (const key of defers.keys()) {
errors.set(key, e);
}
}
}
}
for (const [key, defersOfKey] of defers.entries()) {
const error = errors.get(key);
const output = outputs.get(key);
if (error === undefined) {
const outputOrDefault = output === undefined ? this.runner.default : output;
for (const { resolve } of defersOfKey) {
// There are typically multiple callers waiting for the query results
// (due to e.g. same-ID queries coalescing).
resolve(outputOrDefault);
}
}
else {
for (const { reject } of defersOfKey) {
reject(error);
}
}
}
};
}
async run(input, annotation) {
const key = this.runner.key(input);
const delay = (0, misc_1.maybeCall)(this.batchDelayMs);
// Queue return promise of this method.
const defer = (0, p_defer_1.default)();
this.queuedDefers.getOrAdd(key, Array).push(defer);
// In case of dedupping by key, prefer the last value. E.g. if 2 UPDATEs
// for the same ID have different values, then the last one will win, not
// the 1st one.
this.queuedInputs.set(key, input);
// Annotations are dedupped by their content.
this.queuedAnnotations.set(annotation.trace +
annotation.vc +
annotation.debugStack +
annotation.whyClient, annotation);
if (this.queuedInputs.size >= this.runner.maxBatchSize ||
!this.runner.runBatch ||
this.disableBatching) {
(0, misc_1.runInVoid)(this.flushQueue);
}
else if (this.queuedInputs.size === 1) {
// Defer calling of flushQueue() to the "end of the event loop's spin", to
// have a chance to collect more run() calls for it to execute. We
// actually defer twice (to the end of microtasks sub-loop and then once
// again), just in case: the original DataLoader library wraps the
// nextTick() call into a "global resolved Promise" object, so we do the
// same here blindly. See some of details here:
// https://github.com/graphql/dataloader/blob/fae38f14702e925d1e59051d7e5cb3a9a78bfde8/src/index.js#L234-L241
// https://stackoverflow.com/a/27648394
(0, misc_1.runInVoid)(Promise.resolve().then(() => delay > 0
? setTimeout(() => (0, misc_1.runInVoid)(this.flushQueue()), delay)
: process.nextTick(this.flushQueue)));
}
return defer.promise;
}
async runSingleForEach(inputs, annotations, outOutputs, outErrors) {
const promises = [];
for (const [key, input] of inputs) {
promises.push(this.runner
.runSingle(input, annotations)
.catch(async (error) => {
const retryMs = this.runner.delayForSingleQueryRetryOnError(error);
if (typeof retryMs === "number") {
await (0, delay_1.default)(retryMs);
}
if (retryMs !== "no_retry") {
return this.runner.runSingle(input, incrementAttempt(annotations));
}
throw error;
})
.then((output) => outOutputs.set(key, output))
.catch((error) => outErrors.set(key, error)));
}
await Promise["all"](promises);
}
}
exports.Batcher = Batcher;
function incrementAttempt(annotations) {
return annotations.map((a) => ({ ...a, attempt: a.attempt + 1 }));
}
//# sourceMappingURL=Batcher.js.map