faastjs
Version:
Serverless batch computing made simple.
232 lines (231 loc) • 9.73 kB
TypeScript
import { PersistentCache } from "./cache";
export declare class Deferred<T = void> {
promise: Promise<T>;
resolve: (arg: T | PromiseLike<T>) => void;
reject: (err?: any) => void;
constructor();
}
export declare class DeferredWorker<T = void> extends Deferred<T> {
private worker;
private cancel?;
constructor(worker: () => Promise<T>, cancel?: (() => string | undefined) | undefined);
execute(): Promise<void>;
}
export type RetryType = number | ((err: any, retries: number) => boolean);
export declare function retryOp<T>(retryN: RetryType, fn: (retries: number) => Promise<T>): Promise<T>;
export declare class Funnel<T = void> {
concurrency: number;
protected shouldRetry?: RetryType | undefined;
protected pendingQueue: Set<DeferredWorker<T>>;
protected executingQueue: Set<DeferredWorker<T>>;
processed: number;
errors: number;
constructor(concurrency?: number, shouldRetry?: RetryType | undefined);
push(worker: () => Promise<T>, shouldRetry?: RetryType, cancel?: () => string | undefined): Promise<T>;
clear(): void;
promises(): Promise<T>[];
all(): Promise<(void | Awaited<T>)[]>;
size(): number;
setMaxConcurrency(maxConcurrency: number): void;
getConcurrency(): number;
protected doWork(): void;
}
/**
* @internal
*/
export interface PumpOptions {
concurrency: number;
verbose?: boolean;
}
/**
* @internal
*/
export declare class Pump<T = void> extends Funnel<T | void> {
protected options: PumpOptions;
protected worker: () => Promise<T>;
stopped: boolean;
constructor(options: PumpOptions, worker: () => Promise<T>);
start(): void;
stop(): void;
drain(): Promise<(void | Awaited<T>)[]>;
setMaxConcurrency(concurrency: number): void;
}
export declare class RateLimiter<T = void> {
protected targetRequestsPerSecond: number;
protected burst: number;
protected lastTick: number;
protected bucket: number;
protected queue: Set<DeferredWorker<T>>;
constructor(targetRequestsPerSecond: number, burst?: number);
push(worker: () => Promise<T>, cancel?: () => string | undefined): Promise<T>;
protected updateBucket(): void;
protected drainQueue(): Promise<void>;
clear(): void;
}
/**
* Specify {@link throttle} limits. These limits shape the way throttle invokes
* the underlying function.
* @public
*/
export interface Limits {
/**
* The maximum number of concurrent executions of the underlying function to
* allow. Must be supplied, there is no default. Specifying `0` or
* `Infinity` is allowed and means there is no concurrency limit.
*/
concurrency: number;
/**
* The maximum number of calls per second to allow to the underlying
* function. Default: no rate limit.
*/
rate?: number;
/**
* The maximum number of calls to the underlying function to "burst" -- e.g.
* the number that can be issued immediately as long as the rate limit is
* not exceeded. For example, if rate is 5 and burst is 5, and 10 calls are
* made to the throttled function, 5 calls are made immediately and then
* after 1 second, another 5 calls are made immediately. Setting burst to 1
* means calls are issued uniformly every `1/rate` seconds. If `rate` is not
* specified, then `burst` does not apply. Default: 1.
*/
burst?: number;
/**
* Retry if the throttled function returns a rejected promise. `retry` can
* be a number or a function. If it is a number `N`, then up to `N`
* additional attempts are made in addition to the initial call. If retry is
* a function, it should return `true` if another retry attempt should be
* made, otherwise `false`. The first argument will be the value of the
* rejected promise from the previous call attempt, and the second argument
* will be the number of previous retry attempts (e.g. the first call will
* have value 0). Default: 0 (no retry attempts).
*/
retry?: number | ((err: any, retries: number) => boolean);
/**
* If `memoize` is `true`, then every call to the throttled function will be
* saved as an entry in a map from arguments to return value. If same
* arguments are seen again in a future call, the return value is retrieved
* from the Map rather than calling the function again. This can be useful
* for avoiding redundant calls that are expected to return the same results
* given the same arguments.
*
* The arguments will be captured with `JSON.stringify`, therefore types
* that do not stringify uniquely won't be distinguished from each other.
* Care must be taken when specifying `memoize` to ensure avoid incorrect
* results.
*/
memoize?: boolean;
/**
* Similar to `memoize` except the map from function arguments to results is
* stored in a persistent cache on disk. This is useful to prevent redundant
* calls to APIs which are expected to return the same results for the same
* arguments, and which are likely to be called across many faast.js module
* instantiations. This is used internally by faast.js for caching cloud
* prices for AWS, and for saving the last garbage collection
* date for AWS. Persistent cache entries expire after a period of time. See
* {@link PersistentCache}.
*/
cache?: PersistentCache;
/**
* A promise that, if resolved, causes cancellation of pending throttled
* invocations. This is typically created using `Deferred`. The idea is to
* use the resolving of the promise as an asynchronous signal that any
* pending invocations in this throttled function should be cleared.
* @internal
*/
cancel?: Promise<void>;
}
export declare function memoizeFn<A extends any[], R>(fn: (...args: A) => R, cache?: Map<string, R>): (...args: A) => R;
export declare function cacheFn<A extends any[], R>(cache: PersistentCache, fn: (...args: A) => Promise<R>): (...args: A) => Promise<any>;
/**
* A decorator for rate limiting, concurrency limiting, retry, memoization, and
* on-disk caching. See {@link Limits}.
* @remarks
* When programming against cloud services, databases, and other resources, it
* is often necessary to control the rate of request issuance to avoid
* overwhelming the service provider. In many cases the provider has built-in
* safeguards against abuse, which automatically fail requests if they are
* coming in too fast. Some systems don't have safeguards and precipitously
* degrade their service level or fail outright when faced with excessive load.
*
* With faast.js it becomes very easy to (accidentally) generate requests from
* thousands of cloud functions. The `throttle` function can help manage request
* flow without resorting to setting up a separate service. This is in keeping
* with faast.js' zero-ops philosophy.
*
* Usage is simple:
*
* ```typescript
* async function operation() { ... }
* const throttledOperation = throttle({ concurrency: 10, rate: 5 }, operation);
* for(let i = 0; i < 100; i++) {
* // at most 10 concurrent executions at a rate of 5 invocations per second.
* throttledOperation();
* }
* ```
*
* Note that each invocation to `throttle` creates a separate function with a
* separate limits. Therefore it is likely that you want to use `throttle` in a
* global context, not within a dynamic context:
*
* ```typescript
* async function operation() { ... }
* for(let i = 0; i < 100; i++) {
* // WRONG - each iteration creates a separate throttled function that's only called once.
* const throttledOperation = throttle({ concurrency: 10, rate: 5 }, operation);
* throttledOperation();
* }
* ```
*
* A better way to use throttle avoids creating a named `operation` function
* altogether, ensuring it cannot be accidentally called without throttling:
*
* ```typescript
* const operation = throttle({ concurrency: 10, rate: 5 }, async () => {
* ...
* });
* ```
*
* Throttle supports functions with arguments automatically infers the correct
* type for the returned function:
*
* ```typescript
* // `operation` inferred to have type (str: string) => Promise<string>
* const operation = throttle({ concurrency: 10, rate: 5 }, async (str: string) => {
* return string;
* });
* ```
*
* In addition to limiting concurrency and invocation rate, `throttle` also
* supports retrying failed invocations, memoizing calls, and on-disk caching.
* See {@link Limits} for details.
*
* @param limits - see {@link Limits}.
* @param fn - The function to throttle. It can take any arguments, but must
* return a Promise (which includes `async` functions).
* @returns Returns a throttled function with the same signature as the argument
* `fn`.
* @public
*/
export declare function throttle<A extends any[], R>(limits: Limits, fn: (...args: A) => Promise<R>): (...args: A) => Promise<R>;
export declare class AsyncQueue<T> {
protected deferred: Array<Deferred<T>>;
protected enqueued: Promise<T>[];
enqueue(value: T | Promise<T>): void;
next(): Promise<T>;
clear(): void;
}
export declare class AsyncIterableQueue<T> extends AsyncQueue<IteratorResult<T>> {
push(value: T | Promise<T>): void;
done(): void;
[Symbol.asyncIterator](): this;
}
export declare class AsyncOrderedQueue<T> {
protected queue: AsyncQueue<T>;
protected arrived: Map<number, Promise<T>>;
protected current: number;
push(value: T | Promise<T>, sequence: number): void;
pushImmediate(value: T | Promise<T>): void;
enqueue(value: Promise<T>, sequence: number): void;
next(): Promise<T>;
clear(): void;
}