@nerjs/batchloader
Version:
`BatchLoader` is a tool for batching data requests with support for deduplication, caching, and parallel task management. It is designed to enhance flexibility and performance in scenarios requiring asynchronous data processing. This module was inspired b
81 lines • 3.77 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.BatchAggregator = void 0;
const limited_timekeeper_1 = require("../timekeeper/limited.timekeeper");
const unlimited_timekeeper_1 = require("../timekeeper/unlimited.timekeeper");
const debug_1 = require("debug");
const errors_1 = require("../utils/errors");
const debug = (0, debug_1.default)('batchloader:aggregator');
const createTimekeeperMetrics = (metrics) => {
if (!metrics)
return undefined;
const tkMetrics = {};
if (metrics.resolveBatch)
tkMetrics.resolveTask = task => metrics.resolveBatch?.(task.data.requests.length);
if (metrics.rejectBatch)
tkMetrics.rejectTask = (_, task) => metrics.rejectBatch?.(task.data.requests.length);
if (metrics.parallelBatches)
tkMetrics.runTask = runnedSize => metrics.parallelBatches?.(runnedSize);
if (metrics.waitingBatches)
tkMetrics.waitTask = runnedSize => metrics.waitingBatches?.(runnedSize);
return tkMetrics;
};
class BatchAggregator {
constructor(batchLoaderFn, options, metrics) {
this.batchLoaderFn = batchLoaderFn;
this.options = options;
this.metrics = metrics;
this.batchRunner = async (task, signal) => {
this.metrics?.rejectBatch?.(task.data.requests.length);
debug(`Running batchRunner with a query array of length ${task.data.requests.length}. task id="${task.id}"`);
const response = await this.batchLoaderFn([...task.data.requests], signal);
if (!Array.isArray(response) || response.length !== task.data.requests.length)
throw new errors_1.LoaderError(`The result of batchLoadFn must be an array equal in length to the query array `);
task.data.responses = response;
};
const { concurrencyLimit, maxWaitingTimeMs, batchTimeMs: runMs, timeoutMs } = options;
const initialDataFactory = () => ({ requests: [], responses: [] });
this.timekeeper =
concurrencyLimit && concurrencyLimit > 0 && concurrencyLimit < Infinity
? new limited_timekeeper_1.LimitedTimekeeper({
concurrencyLimit,
initialDataFactory,
maxWaitingTimeMs: maxWaitingTimeMs || 60_000,
runMs,
runner: this.batchRunner,
timeoutMs,
callRejectedTask: false,
}, createTimekeeperMetrics(metrics))
: new unlimited_timekeeper_1.UnlimitedTimekeeper({
initialDataFactory,
runMs,
runner: this.batchRunner,
timeoutMs,
callRejectedTask: false,
}, createTimekeeperMetrics(metrics));
debug(`Create BatchAggregator with ${this.timekeeper.constructor.name}`);
}
getCurrentTask() {
const task = this.timekeeper.current();
if (task.data.requests.length >= this.options.maxBatchSize) {
debug(`The size of the current batch has reached the maximum. size=${task.data.requests.length}`);
this.timekeeper.run();
return this.getCurrentTask();
}
return task;
}
async load(request) {
const task = this.getCurrentTask();
const index = task.data.requests.length;
this.metrics?.loadBatchItem?.();
debug(`Load data. task id="${task.id}"; curent index="${index}"`);
task.data.requests.push(request);
await this.timekeeper.wait(task);
return task.data.responses[index];
}
clear() {
this.timekeeper.clear();
}
}
exports.BatchAggregator = BatchAggregator;
//# sourceMappingURL=batch-aggregator.js.map