prom-utils
Version:
Promise utilities: rate limiting, queueing/batching, defer, etc.
187 lines (186 loc) • 7.41 kB
TypeScript
import makeError from 'make-error';
import { AddOptions, Deferred, GetTimeframe, QueueOptions, QueueOptionsParallel, QueueResult, QueueResultParallel, RateLimitOptions, ThroughputLimiterOptions, WaitOptions } from './types';
export declare const OptionsError: makeError.Constructor<makeError.BaseError>;
export declare const TimeoutError: makeError.Constructor<makeError.BaseError>;
/**
* Limit the concurrency of promises. This can be used to control
* how many requests are made to a server, for example. Note:
* exceptions will be swallowed in order to prevent an UnhandledPromiseRejection
* from being thrown in the case where the promise rejects before the limit is
* reached. Therefore, you must handle exceptions on a per promise basis.
* Wrapping `rateLimit` method calls in a try/catch will not work. You can
* set `limit` to Infinity to disregard the limit.
*
* To limit the promises for a given period of time, use the `maxItemsPerPeriod`
* option. Optionally, specify a time period using the `period` option (default is 1 second).
* For example, the following limits the number of concurrent requests to 5
* and ensures that the rate never exceeds 75 requests per minute.
*
* @example
* ```typescript
* const limiter = rateLimit(5, { maxItemsPerPeriod: 75, period: ms('1m') })
* for (const url of urls) {
* // Will wait for one promise to finish if limit is reached
* await limiter.add(fetch(url))
* }
* // Wait for unresolved promises to resolve
* await limiter.finish()
* ```
*/
export declare const rateLimit: <T = unknown>(limit: number, options?: RateLimitOptions & ThroughputLimiterOptions) => {
add: (prom: Promise<T>, options?: AddOptions) => Promise<void>;
finish: () => Promise<void>;
/** Number of pending promises. */
readonly length: number;
getStats: () => {
itemsPerPeriod: number;
};
};
/**
* Return the elapsed time since the first entry in the sliding window.
* This evenly distributes the rate over the period.
*/
export declare const getTimeframeUsingElapsed: GetTimeframe;
/**
* Return the elapsed time since the first entry in the sliding window or the period,
* whichever is greater. This allows for high throughput at the start of the period.
*/
export declare const getTimeframeUsingPeriod: GetTimeframe;
/**
*
* Limit throughput by sleeping until the rate (units/period)
* is less than `maxUnitsPerPeriod`. Units and period are
* intentionally abstract since it could represent requests/min or bytes/sec,
* for example.
*
* @example
* ```typescript
* // Limit to at most 1000 items/sec
* const limiter = throughputLimiter(1000)
*
* for(const batch of batches) {
* // Will wait until the rate is < `maxUnitsPerPeriod`
* await limiter.throttleAndAppend(batch.length)
* console.log('Items/sec %d', limiter.getCurrentRate())
* }
* ```
*/
export declare const throughputLimiter: (maxUnitsPerPeriod: number, options?: ThroughputLimiterOptions) => {
getCurrentRate: () => number;
throttle: () => Promise<void>;
append: (numUnits: number) => void;
throttleAndAppend: (numUnits: number) => Promise<void>;
appendAndThrottle: (numUnits: number) => Promise<void>;
};
/**
* Batch calls via a local queue. This can be used to batch values before
* writing to a database, for example.
*
* Calls `fn` when either `batchSize`, `batchBytes`, or `timeout` is reached.
* `batchSize` defaults to 500 and therefore will always be in effect if
* no options are provided. You can pass `Infinity` to disregard `batchSize`.
* If `timeout` is passed, the timer will be started when the first item is
* enqueued and reset when `flush` is called explicitly or implicitly.
*
* Use `maxItemsPerSec` and/or `maxBytesPerSec` to limit throughput.
* Call `queue.getStats()` to get the items/sec and bytes/sec rates.
*
* Call `queue.flush()` to flush explicitly.
*
* The last result of calling `fn` can be obtained by referencing `lastResult`
* on the returned object.
*
* The cause of the last automatic queue flush can be obtained by referencing
* `lastFlush` on the returned object.
*
* ```typescript
* const writeToDatabase = async (records) => {...}
*
* const queue = batchQueue(writeToDatabase)
* for (const record of records) {
* // Will call `fn` when a threshold is met
* await queue.enqueue(record)
* }
* // Call `fn` with remaining queued items
* await queue.flush()
* ```
*/
export declare function batchQueue<A, B>(fn: (arr: A[]) => B, options?: QueueOptions): QueueResult<A, B>;
/**
* Batch calls via a local queue. This can be used to batch values before
* writing to a database, for example. Unlike `batchQueue`, this is safe to
* be called concurrently. In particular, you can pair `rateLimit` with this.
*
* Calls `fn` when either `batchSize` or `batchBytes` is reached.
* `batchSize` defaults to 500 and therefore will always be in effect if
* no options are provided. You can pass `Infinity` to disregard `batchSize`.
*
* Call `queue.flush()` to flush explicitly.
*/
export declare function batchQueueParallel<A, B>(fn: (arr: A[]) => B, options?: QueueOptionsParallel): QueueResultParallel<A, B>;
/**
* Defer resolving a promise until `done` is called.
*/
export declare function defer(): Deferred;
/**
* Pause a loop by awaiting `maybeBlock`. When `pause` is called `maybeBlock` will
* return a promise that is resolved when `resume` is called. Otherwise,
* `maybeBlock` will return immediately. If `timeout` is passed, `resume` will
* be called after `timeout` if it is not manually called first.
*
* ```typescript
* const shouldProcess = pausable()
*
* onSomeCondition(shouldProcess.pause)
* onSomeOtherCondition(shouldProcess.resume)
*
* for (const record of records) {
* await shouldProcess.maybeBlock()
* await processRecord(record)
* }
* ```
*/
export declare const pausable: (timeout?: number) => {
pause: () => void;
resume: () => void;
maybeBlock: () => Promise<void> | undefined;
readonly isPaused: boolean;
};
/**
* Call heartbeatFn every interval until promise resolves or rejects.
* `interval` defaults to 1000.
* @returns The value of the resolved promise.
*/
export declare const pacemaker: <T>(heartbeatFn: () => void, promise: Promise<T>, interval?: number) => Promise<T>;
/**
* Wait until the predicate returns truthy or the timeout expires.
* Will not hang like other implementations found on NPM.
* Inspired by https://www.npmjs.com/package/async-wait-until
* @returns A promise that resolves or rejects, accordingly.
*
* @example
* ```typescript
* let isTruthy = false
* setTimeout(() => { isTruthy = true }, 250)
* await waitUntil(() => isTruthy)
* ```
*/
export declare const waitUntil: (pred: () => Promise<boolean> | boolean, options?: WaitOptions) => Promise<void>;
/**
* Sleep for `time` ms before resolving the Promise.
*/
export declare const sleep: (time?: number) => Promise<unknown>;
export declare const TIMEOUT: unique symbol;
/**
* Returns the value of the promise if the promise resolves prior to timeout.
* If the timeout happens first, the exported TIMEOUT symbol is returned.
*
* @example
* ```ts
* const winner = await raceTimeout(someProm, 5)
* if (winner === TIMEOUT) {
* // Do something
* }
* ```
*/
export declare const raceTimeout: <A>(prom: Promise<A>, timeout: number) => Promise<typeof TIMEOUT | Awaited<A>>;