faastjs
Version:
Serverless batch computing made simple.
1,185 lines (1,160 loc) • 96.4 kB
TypeScript
/**
* Faast.js transforms ordinary JavaScript modules into serverless cloud
* functions that can run on AWS Lambda.
*
* The main entry point to faast.js is the {@link faast} function, which returns
* an object that implements the {@link FaastModule} interface. The most common
* options are {@link CommonOptions}. Using faast.js requires writing two
* modules, one containing the functions to upload to the cloud, and the other
* that invokes faast.js and calls the resulting cloud functions.
* @packageDocumentation
*/
/// <reference types="node" />
import childProcess from 'child_process';
import { CloudWatchLogs } from '@aws-sdk/client-cloudwatch-logs';
import { CreateFunctionRequest } from '@aws-sdk/client-lambda';
import { default as debug_2 } from 'debug';
import { IAM } from '@aws-sdk/client-iam';
import { Lambda } from '@aws-sdk/client-lambda';
import { Pricing } from '@aws-sdk/client-pricing';
import { S3 } from '@aws-sdk/client-s3';
import { SNS } from '@aws-sdk/client-sns';
import { SQS } from '@aws-sdk/client-sqs';
import { STS } from '@aws-sdk/client-sts';
import { VError } from 'verror';
import webpack from 'webpack';
import { Writable } from 'stream';
declare type AnyFunction = (...args: any[]) => any;
/**
* `Async<T>` maps regular values to Promises and Iterators to AsyncIterators,
* If `T` is already a Promise or an AsyncIterator, it remains the same. This
* type is used to infer the return value of cloud functions from the types of
* the functions in the user's input module.
* @public
*/
export declare type Async<T> = T extends AsyncGenerator<infer R> ? AsyncGenerator<R> : T extends Generator<infer R> ? AsyncGenerator<R> : T extends Promise<infer R> ? Promise<R> : Promise<T>;
/**
* `AsyncDetail<T>` is similar to {@link Async} except it maps retun values R to
* `Detail<R>`, which is the return value with additional information about each
* cloud function invocation.
* @public
*/
export declare type AsyncDetail<T> = T extends AsyncGenerator<infer R> ? AsyncGenerator<Detail<R>> : T extends Generator<infer R> ? AsyncGenerator<Detail<R>> : T extends Promise<infer R> ? Promise<Detail<R>> : Promise<Detail<T>>;
declare class AsyncIterableQueue<T> extends AsyncQueue<IteratorResult<T>> {
push(value: T | Promise<T>): void;
done(): void;
[Symbol.asyncIterator](): this;
}
declare class AsyncQueue<T> {
protected deferred: Array<Deferred<T>>;
protected enqueued: Promise<T>[];
enqueue(value: T | Promise<T>): void;
next(): Promise<T>;
clear(): void;
}
/**
* Factory for AWS service clients, which allows for custom construction and configuration.
* {@link https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/index.html#configuration | AWS Configuration}.
* @public
* @remarks
* This is an advanced option. This provides a way for a faast.js client to
* instantiate AWS service objects for itself to provide custom options.
* Note that if you create a service object yourself, it won't have the
* default options that faast.js would use, which are:
*
* - maxAttempts (faast.js default: 6)
* - region (faast.js default: "us-west-2")
* - logger (faast.js default: log.awssdk)
*/
export declare interface AwsClientFactory {
createCloudWatchLogs?: () => CloudWatchLogs;
createIAM?: () => IAM;
createLambda?: () => Lambda;
/**
* A special AWS Lambda factory for creating lambda functions that are
* used for faast.js invocations. These special clients have the following
* options set by default in faast.js:
*
* // Retries are handled by faast.js, not the sdk.
* maxAttempts: 0,
*/
createLambdaForInvocations?: () => Lambda;
createPricing?: () => Pricing;
createS3?: () => S3;
createSNS?: () => SNS;
createSQS?: () => SQS;
createSts?: () => STS;
}
/**
* The return type of {@link faastAws}. See {@link FaastModuleProxy}.
* @public
*/
export declare type AwsFaastModule<M extends object = object> = FaastModuleProxy<M, AwsOptions, AwsState>;
declare type AwsGcWork = {
type: "SetLogRetention";
logGroupName: string;
retentionInDays: number;
} | {
type: "DeleteResources";
resources: AwsResources;
} | {
type: "DeleteLayerVersion";
LayerName: string;
VersionNumber: number;
};
declare interface AwsLayerInfo {
Version: number;
LayerVersionArn: string;
LayerName: string;
}
declare class AwsMetrics {
outboundBytes: number;
sns64kRequests: number;
sqs64kRequests: number;
}
/**
* AWS-specific options for {@link faastAws}.
* @public
*/
export declare interface AwsOptions extends CommonOptions {
/**
* The region to create resources in. Garbage collection is also limited to
* this region. Default: `"us-west-2"`.
*/
region?: AwsRegion;
/**
* The role that the lambda function will assume when executing user code.
* Default: `"faast-cached-lambda-role"`. Rarely used.
* @remarks
* When a lambda executes, it first assumes an
* {@link https://docs.aws.amazon.com/lambda/latest/dg/lambda-intro-execution-role.html | execution role}
* to grant access to resources.
*
* By default, faast.js creates this execution role for you and leaves it
* permanently in your account (the role is shared across all lambda
* functions created by faast.js). By default, faast.js grants administrator
* privileges to this role so your code can perform any AWS operation it
* requires.
*
* You can
* {@link https://console.aws.amazon.com/iam/home#/roles | create a custom role}
* that specifies more limited permissions if you prefer not to grant
* administrator privileges. Any role you assign for faast.js modules needs
* at least the following permissions:
*
* - Execution Role:
* ```json
* {
* "Version": "2012-10-17",
* "Statement": [
* {
* "Effect": "Allow",
* "Action": ["logs:*"],
* "Resource": "arn:aws:logs:*:*:log-group:faast-*"
* },
* {
* "Effect": "Allow",
* "Action": ["sqs:*"],
* "Resource": "arn:aws:sqs:*:*:faast-*"
* }
* ]
* }
* ```
*
* - Trust relationship (also known as `AssumeRolePolicyDocument` in the AWS
* SDK):
* ```json
* {
* "Version": "2012-10-17",
* "Statement": [
* {
* "Effect": "Allow",
* "Principal": {
* "Service": "lambda.amazonaws.com"
* },
* "Action": "sts:AssumeRole"
* }
* ]
* }
* ```
*
*/
RoleName?: string;
/**
* Additional options to pass to AWS Lambda creation. See
* {@link https://docs.aws.amazon.com/lambda/latest/dg/API_CreateFunction.html | CreateFunction}.
* @remarks
* If you need specialized options, you can pass them to the AWS Lambda SDK
* directly. Note that if you override any settings set by faast.js, you may
* cause faast.js to not work:
*
* ```typescript
* const request: aws.Lambda.CreateFunctionRequest = {
* FunctionName,
* Role,
* Runtime: "nodejs18.x",
* Handler: "index.trampoline",
* Code,
* Description: "faast trampoline function",
* Timeout,
* MemorySize,
* ...awsLambdaOptions
* };
* ```
*/
awsLambdaOptions?: Partial<CreateFunctionRequest>;
/**
* AWS service factories. See {@link AwsClientFactory}.
*/
awsClientFactory?: AwsClientFactory;
/** @internal */
_gcWorker?: (work: AwsGcWork, services: AwsServices) => Promise<void>;
}
/**
* Valid AWS
* {@link https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using-regions-availability-zones.html | regions}.
* Not all of these regions have Lambda support.
* @public
*/
export declare type AwsRegion = "us-east-1" | "us-east-2" | "us-west-1" | "us-west-2" | "ca-central-1" | "eu-central-1" | "eu-west-1" | "eu-west-2" | "eu-west-3" | "ap-northeast-1" | "ap-northeast-2" | "ap-northeast-3" | "ap-southeast-1" | "ap-southeast-2" | "ap-south-1" | "sa-east-1";
declare interface AwsResources {
FunctionName: string;
RoleName: string;
region: AwsRegion;
ResponseQueueUrl?: string;
ResponseQueueArn?: string;
RequestTopicArn?: string;
SNSLambdaSubscriptionArn?: string;
logGroupName: string;
layer?: AwsLayerInfo;
Bucket?: string;
}
declare interface AwsServices {
readonly lambda: Lambda;
readonly lambda2: Lambda;
readonly cloudwatch: CloudWatchLogs;
readonly iam: IAM;
readonly sqs: SQS;
readonly sns: SNS;
readonly pricing: Pricing;
readonly sts: STS;
readonly s3: S3;
}
/**
* @public
*/
declare interface AwsState {
/** @internal */
resources: AwsResources;
/** @internal */
services: AwsServices;
/** @internal */
options: Required<AwsOptions>;
/** @internal */
metrics: AwsMetrics;
/** @internal */
gcPromise?: Promise<"done" | "skipped">;
}
declare interface CallId {
callId: string;
}
declare type CallId_2 = string;
declare interface CallingContext {
call: FunctionCall;
startTime: number;
logUrl?: string;
executionId?: string;
instanceId?: string;
}
/**
* Options that apply to the {@link FaastModule.cleanup} method.
* @public
*/
export declare interface CleanupOptions {
/**
* If true, delete provider cloud resources. Default: true.
* @remarks
* The cleanup operation has two functions: stopping the faast.js runtime
* and deleting cloud resources that were instantiated. If `deleteResources`
* is false, then only the runtime is stopped and no cloud resources are
* deleted. This can be useful for debugging and examining the state of
* resources created by faast.js.
*
* It is supported to call {@link FaastModule.cleanup} twice: once with
* `deleteResources` set to `false`, which only stops the runtime, and then
* again set to `true` to delete resources. This can be useful for testing.
*/
deleteResources?: boolean;
/**
* If true, delete cached resources. Default: false.
* @remarks
* Some resources are cached persistently between calls for performance
* reasons. If this option is set to true, these cached resources are
* deleted when cleanup occurs, instead of being left behind for future use.
* For example, on AWS this includes the Lambda Layers that are created for
* {@link CommonOptions.packageJson} dependencies. Note that only the cached
* resources created by this instance of FaastModule are deleted, not cached
* resources from other FaastModules. This is similar to setting
* `useCachedDependencies` to `false` during function construction, except
* `deleteCaches` can be set at function cleanup time, and any other
* FaastModules created before cleanup may use the cached Layers.
*/
deleteCaches?: boolean;
/**
* Number of seconds to wait for garbage collection. Default: 10.
* @remarks
* Garbage collection can still be operating when cleanup is called; this
* option limits the amount of time faast waits for the garbage collector.
* If set to 0, the wait is unlimited.
*/
gcTimeout?: number;
}
/**
* Options common across all faast.js providers. Used as argument to {@link faast}.
* @remarks
* There are also more specific options for each provider. See
* {@link AwsOptions} and {@link LocalOptions}.
* @public
*/
export declare interface CommonOptions {
/**
* If true, create a child process to isolate user code from faast
* scaffolding. Default: true.
* @remarks
* If a child process is not created, faast runs in the same node instance
* as the user code and may not execute in a timely fashion because user
* code may
* {@link https://nodejs.org/en/docs/guides/dont-block-the-event-loop/ | block the event loop}.
* Creating a child process for user code allows faast.js to continue
* executing even if user code never yields. This provides better
* reliability and functionality:
*
* - Detect timeout errors more reliably, even if the function doesn't
* relinquish the CPU. Not applicable to AWS, which sends separate failure
* messages in case of timeout. See {@link CommonOptions.timeout}.
*
* - CPU metrics used for detecting invocations with high latency, which can
* be used for automatically retrying calls to reduce tail latency.
*
* The cost of creating a child process is mainly in the memory overhead of
* creating another node process.
*/
childProcess?: boolean;
/**
* When childProcess is true, the child process will be spawned with the
* value of this property as the setting for --max-old-space-size.
* @remarks
* This is useful if a function requires the node process to limit its
* memory so that another spawned process (e.g. a browser instance) can use
* the rest.
* @public
*/
childProcessMemoryMb?: number;
/**
* The maximum number of concurrent invocations to allow. Default: 100,
* except for the `local` provider, where the default is 10.
* @remarks
* The concurrency limit applies to all invocations of all of the faast
* functions summed together. It is not a per-function limit. To apply a
* per-function limit, use {@link throttle}. A value of 0 is equivalent to
* Infinity. A value of 1 ensures mutually exclusive invocations.
*/
concurrency?: number;
/**
* A user-supplied description for this function, which may make it easier
* to track different functions when multiple functions are created.
*/
description?: string;
/**
* Exclude a subset of files included by {@link CommonOptions.include}.
* @remarks
* The exclusion can be a directory or glob. Exclusions apply to all included
* entries.
*/
exclude?: string[];
/**
* Rate limit invocations (invocations/sec). Default: no rate limit.
* @remarks
* Some services cannot handle more than a certain number of requests per
* second, and it is easy to overwhelm them with a large number of cloud
* functions. Specify a rate limit in invocation/second to restrict how
* faast.js issues requests.
*/
rate?: number;
/**
* Environment variables available during serverless function execution.
* Default: \{\}.
*/
env?: {
[key: string]: string;
};
/**
* Garbage collector mode. Default: `"auto"`.
* @remarks
* Garbage collection deletes resources that were created by previous
* instantiations of faast that were not cleaned up by
* {@link FaastModule.cleanup}, either because it was not called or because
* the process terminated and did not execute this cleanup step. In `"auto"`
* mode, garbage collection may be throttled to run up to once per hour no
* matter how many faast.js instances are created. In `"force"` mode,
* garbage collection is run without regard to whether another gc has
* already been performed recently. In `"off"` mode, garbage collection is
* skipped entirely. This can be useful for performance-sensitive tests, or
* for more control over when gc is performed.
*
* Garbage collection is cloud-specific, but in general garbage collection
* should not interfere with the behavior or performance of faast cloud
* functions. When {@link FaastModule.cleanup} runs, it waits for garbage
* collection to complete. Therefore the cleanup step can in some
* circumstances take a significant amount of time even after all
* invocations have returned.
*
* It is generally recommended to leave garbage collection in `"auto"` mode,
* otherwise garbage resources may accumulate over time and you will
* eventually hit resource limits on your account.
*
* Also see {@link CommonOptions.retentionInDays}.
*/
gc?: "auto" | "force" | "off";
/**
* Include files to make available in the remote function. See
* {@link IncludeOption}.
* @remarks
* Each include entry is a directory or glob pattern. Paths can be specified
* as relative or absolute paths. Relative paths are resolved relative to
* the current working directory, or relative to the `cwd` option.
*
* If the include entry is a directory `"foo/bar"`, the directory
* `"./foo/bar"` will be available in the cloud function. Directories are
* recursively added.
*
* Glob patterns use the syntax of
* {@link https://github.com/isaacs/node-glob | node glob}.
*
* Also see {@link CommonOptions.exclude} for file exclusions.
*/
include?: (string | IncludeOption)[];
/**
* Maximum number of times that faast will retry each invocation. Default: 2
* (invocations can therefore be attemped 3 times in total).
* @remarks
* Retries are automatically attempted for transient infrastructure-level
* failures such as rate limits or netowrk failures. User-level exceptions
* are not retried automatically. In addition to retries performed by faast,
* some providers automatically attempt retries. These are not controllable
* by faast. But as a result, your function may be retried many more times
* than this setting suggests.
*/
maxRetries?: number;
/**
* Memory limit for each function in MB. This setting has an effect on
* pricing. Default varies by provider.
* @remarks
* Each provider has different settings for memory size, and performance
* varies depending on the setting. By default faast picks a likely optimal
* value for each provider.
*
* - aws: 1728MB
*
* - local: 512MB (however, memory size limits aren't reliable in local mode.)
*/
memorySize?: number;
/**
* Specify invocation mode. Default: `"auto"`.
* @remarks
* Modes specify how invocations are triggered. In https mode, the functions
* are invoked through an https request or the provider's API. In queue
* mode, a provider-specific queue is used to invoke functions. Queue mode
* adds additional latency and (usually negligible) cost, but may scale
* better for some providers. In auto mode the best default is chosen for
* each provider depending on its particular performance characteristics.
*
* The defaults are:
*
* - aws: `"auto"` is `"https"`. In https mode, the AWS SDK api
* is used to invoke functions. In queue mode, an AWS SNS topic is created
* and triggers invocations. The AWS API Gateway service is never used by
* faast, as it incurs a higher cost and is not needed to trigger
* invocations.
*
* - local: The local provider ignores the mode setting and always uses an
* internal asynchronous queue to schedule calls.
*
* Size limits are affected by the choice of mode. On AWS the limit is 256kb
* for arguments and return values in `"queue"` mode, and 6MB for `"https"`
* mode.
*
* Note that no matter which mode is selected, faast.js always creates a
* queue for sending back intermediate results for bookeeping and
* performance monitoring.
*/
mode?: "https" | "queue" | "auto";
/**
* Specify a package.json file to include with the code package.
* @remarks
* By default, faast.js will use webpack to bundle dependencies your remote
* module imports. In normal usage there is no need to specify a separate
* package.json, as webpack will statically analyze your imports and
* determine which files to bundle.
*
* However, there are some use cases where this is not enough. For example,
* some dependencies contain native code compiled during installation, and
* webpack cannot bundle these native modules. such as dependencies with
* native code. or are specifically not designed to work with webpack. In
* these cases, you can create a separate `package.json` for these
* dependencies and pass the filename as the `packageJson` option. If
* `packageJson` is an `object`, it is assumed to be a parsed JSON object
* with the same structure as a package.json file (useful for specifying a
* synthetic `package.json` directly in code).
*
* The way the `packageJson` is handled varies by provider:
*
* - local: Runs `npm install` in a temporary directory it prepares for the
* function.
*
* - aws: Recursively calls faast.js to run `npm install` inside a separate
* lambda function specifically created for this purpose. Faast.js uses
* lambda to install dependencies to ensure that native dependencies are
* compiled in an environment that can produce binaries linked against
* lambda's
* {@link https://aws.amazon.com/blogs/compute/running-executables-in-aws-lambda/ | execution environment}.
* Packages are saved in a Lambda Layer.
*
* For AWS, if {@link CommonOptions.useDependencyCaching} is `true` (which
* is the default), then the Lambda Layer created will be reused in future
* function creation requests if the contents of `packageJson` are the same.
*
* The `FAAST_PACKAGE_DIR` environment variable can be useful for debugging
* `packageJson` issues.
*/
packageJson?: string | object;
/**
* Cache installed dependencies from {@link CommonOptions.packageJson}. Only
* applies to AWS. Default: true.
* @remarks
* If `useDependencyCaching` is `true`, The resulting `node_modules` folder
* is cached in a Lambda Layer with the name `faast-${key}`, where `key` is
* the SHA1 hash of the `packageJson` contents. These cache entries are
* removed by garbage collection, by default after 24h. Using caching
* reduces the need to install and upload dependencies every time a function
* is created. This is important for AWS because it creates an entirely
* separate lambda function to install dependencies remotely, which can
* substantially increase function deployment time.
*
* If `useDependencyCaching` is false, the lambda layer is created with the
* same name as the lambda function, and then is deleted when cleanup is
* run.
*/
useDependencyCaching?: boolean;
/**
* Specify how many days to wait before reclaiming cloud garbage. Default:
* 1.
* @remarks
* Garbage collection only deletes resources after they age beyond a certain
* number of days. This option specifies how many days old a resource needs
* to be before being considered garbage by the collector. Note that this
* setting is not recorded when the resources are created. For example,
* suppose this is the sequence of events:
*
* - Day 0: `faast()` is called with `retentionInDays` set to 5. Then, the
* function crashes (or omits the call to {@link FaastModule.cleanup}).
*
* - Day 1: `faast()` is called with `retentionInDays` set to 1.
*
* In this sequence of events, on Day 0 the garbage collector runs and
* removes resources with age older than 5 days. Then the function leaves
* new garbage behind because it crashed or did not complete cleanup. On Day
* 1, the garbage collector runs and deletes resources at least 1 day old,
* which includes garbage left behind from Day 0 (based on the creation
* timestamp of the resources). This deletion occurs even though retention
* was set to 5 days when resources were created on Day 0.
*
* Note that if `retentionInDays` is set to 0, garbage collection will
* remove all resources, even ones that may be in use by other running faast
* instances. Not recommended.
*
* See {@link CommonOptions.gc}.
*/
retentionInDays?: number;
/**
* Reduce tail latency by retrying invocations that take substantially
* longer than other invocations of the same function. Default: 3.
* @remarks
* faast.js automatically measures the mean and standard deviation (σ) of
* the time taken by invocations of each function. Retries are attempted
* when the time for an invocation exceeds the mean time by a certain
* threshold. `speculativeRetryThreshold` specifies how many multiples of σ
* an invocation needs to exceed the mean for a given function before retry
* is attempted.
*
* The default value of σ is 3. This means a call to a function is retried
* when the time to execute exceeds three standard deviations from the mean
* of all prior executions of the same function.
*
* This feature is experimental.
* @beta
*/
speculativeRetryThreshold?: number;
/**
* Execution time limit for each invocation, in seconds. Default: 60.
* @remarks
* Each provider has a maximum time limit for how long invocations can run
* before being automatically terminated (or frozen). The following are the
* maximum time limits as of February 2019:
*
* - aws:
* {@link https://docs.aws.amazon.com/lambda/latest/dg/limits.html | 15 minutes}
*
* - local: unlimited
*
* Faast.js has a proactive timeout detection feature. It automatically
* attempts to detect when the time limit is about to be reached and
* proactively sends a timeout exception. Faast does this because not all
* providers reliably send timely feedback when timeouts occur, leaving
* developers to look through cloud logs. In general faast.js' timeout will
* be up to 5s earlier than the timeout specified, in order to give time to
* allow faast.js to send a timeout message. Proactive timeout detection
* only works with {@link CommonOptions.childProcess} set to `true` (the
* default).
*/
timeout?: number;
/**
* Extra webpack options to use to bundle the code package.
* @remarks
* By default, faast.js uses webpack to bundle the code package. Webpack
* automatically handles finding and bundling dependencies, adding source
* mappings, etc. If you need specialized bundling, use this option to add
* or override the default webpack configuration. The library
* {@link https://github.com/survivejs/webpack-merge | webpack-merge} is
* used to combine configurations.
*
* ```typescript
* const config: webpack.Configuration = merge({
* entry,
* mode: "development",
* output: {
* path: "/",
* filename: outputFilename,
* libraryTarget: "commonjs2"
* },
* target: "node",
* resolveLoader: { modules: [__dirname, `${__dirname}/dist`] },
* node: { global: true, __dirname: false, __filename: false }
* },
* webpackOptions);
* ```
*
* Take care when setting the values of `entry`, `output`, or
* `resolveLoader`. If these options are overwritten, faast.js may fail to
* bundle your code. In particular, setting `entry` to an array value will
* help `webpack-merge` to concatenate its value instead of replacing the
* value that faast.js inserts for you.
*
* Default:
*
* - aws: `{ externals: [new RegExp("^aws-sdk/?")] }`. In the lambda
* environment `"aws-sdk"` is available in the ambient environment and
* does not need to be bundled.
*
* - other providers: `{}`
*
* The `FAAST_PACKAGE_DIR` environment variable can be useful for debugging
* webpack issues.
*/
webpackOptions?: webpack.Configuration;
/**
* Check arguments and return values from cloud functions are serializable
* without losing information. Default: true.
* @remarks
* Arguments to cloud functions are automatically serialized with
* `JSON.stringify` with a custom replacer that handles built-in JavaScript
* types such as `Date` and `Buffer`. Return values go through the same
* process. Some JavaScript objects cannot be serialized. By default
* `validateSerialization` will verify that every argument and return value
* can be serialized and deserialized without losing information. A
* `FaastError` will be thrown if faast.js detects a problem according to
* the following procedure:
*
* 1. Serialize arguments and return values with `JSON.stringify` using a
* special `replacer` function.
*
* 2. Deserialize the values with `JSON.parse` with a special `reviver`
* function.
*
* 3. Use
* {@link https://nodejs.org/api/assert.html#assert_assert_deepstrictequal_actual_expected_message | assert.deepStringEqual}
* to compare the original object with the deserialized object from step
* 2.
*
* There is some overhead to this process because each argument is
* serialized and deserialized, which can be costly if arguments or return
* values are large.
*/
validateSerialization?: boolean;
/**
* Debugging output options.
* @internal
*/
debugOptions?: {
[key: string]: boolean;
};
}
/**
* Analyze the cost of a workload across many provider configurations.
* @public
*/
export declare namespace CostAnalyzer {
/**
* An input to {@link CostAnalyzer.analyze}, specifying one
* configuration of faast.js to run against a workload. See
* {@link AwsOptions}.
* @public
*/
export type Configuration = {
provider: "aws";
options: AwsOptions;
};
/**
* Default AWS cost analyzer configurations include all memory sizes for AWS
* Lambda.
* @remarks
* The default AWS cost analyzer configurations include every memory size
* from 128MB to 3008MB in 64MB increments. Each configuration has the
* following settings:
*
* ```typescript
* {
* provider: "aws",
* options: {
* mode: "https",
* memorySize,
* timeout: 300,
* gc: "off",
* childProcess: true
* }
* }
* ```
*
* Use `Array.map` to change or `Array.filter` to remove some of these
* configurations. For example:
*
* ```typescript
* const configsWithAtLeast1GB = awsConfigurations.filter(c => c.memorySize > 1024)
* const shorterTimeout = awsConfigurations.map(c => ({...c, timeout: 60 }));
* ```
* @public
*/
const awsConfigurations: Configuration[];
/**
* User-defined custom metrics for a workload. These are automatically
* summarized in the output; see {@link CostAnalyzer.Workload}.
* @public
*/
export type WorkloadAttribute<A extends string> = {
[attr in A]: number;
};
/**
* A user-defined cost analyzer workload for {@link CostAnalyzer.analyze}.
* @public
* Example:
*/
export interface Workload<T extends object, A extends string> {
/**
* The imported module that contains the cloud functions to test.
*/
funcs: T;
/**
* A function that executes cloud functions on
* `faastModule.functions.*`. The work function should return `void` if
* there are no custom workload attributes. Otherwise, it should return
* a {@link CostAnalyzer.WorkloadAttribute} object which maps
* user-defined attribute names to numerical values for the workload.
* For example, this might measure bandwidth or some other metric not
* tracked by faast.js, but are relevant for evaluating the
* cost-performance tradeoff of the configurations analyzed by the cost
* analyzer.
*/
work: (faastModule: FaastModule<T>) => Promise<WorkloadAttribute<A> | void>;
/**
* An array of configurations to run the work function against (see
* {@link CostAnalyzer.Configuration}). For example, each entry in the
* array may specify a provider, memory size, and other options.
* Default: {@link CostAnalyzer.awsConfigurations}.
*/
configurations?: Configuration[];
/**
* Combine {@link CostAnalyzer.WorkloadAttribute} instances returned
* from multiple workload executions (caused by value of
* {@link CostAnalyzer.Workload.repetitions}). The default is a function
* that takes the average of each attribute.
*/
summarize?: (summaries: WorkloadAttribute<A>[]) => WorkloadAttribute<A>;
/**
* Format an attribute value for console output. This is displayed by
* the cost analyzer when all of the repetitions for a configuration
* have completed. The default returns
* `${attribute}:${value.toFixed(1)}`.
*/
format?: (attr: A, value: number) => string;
/**
* Format an attribute value for CSV. The default returns
* `value.toFixed(1)`.
*/
formatCSV?: (attr: A, value: number) => string;
/**
* If true, do not output live results to the console. Can be useful for
* running the cost analyzer as part of automated tests. Default: false.
*/
silent?: boolean;
/**
* The number of repetitions to run the workload for each cost analyzer
* configuration. Higher repetitions help reduce the jitter in the
* results. Repetitions execute in the same FaastModule instance.
* Default: 10.
*/
repetitions?: number;
/**
* The amount of concurrency to allow. Concurrency can arise from
* multiple repetitions of the same configuration, or concurrenct
* executions of different configurations. This concurrency limit
* throttles the total number of concurrent workload executions across
* both of these sources of concurrency. Default: 64.
*/
concurrency?: number;
}
/**
* A cost estimate result for a specific cost analyzer configuration.
* @public
*/
export interface Estimate<A extends string> {
/**
* The cost snapshot for the cost analysis of the specific (workload,
* configuration) combination. See {@link CostSnapshot}.
*/
costSnapshot: CostSnapshot;
/**
* The worload configuration that was analyzed. See
* {@link CostAnalyzer.Configuration}.
*/
config: Configuration;
/**
* Additional workload metrics returned from the work function. See
* {@link CostAnalyzer.WorkloadAttribute}.
*/
extraMetrics: WorkloadAttribute<A>;
}
/**
* Estimate the cost of a workload using multiple configurations and
* providers.
* @param userWorkload - a {@link CostAnalyzer.Workload} object specifying
* the workload to run and additional parameters.
* @returns A promise for a {@link CostAnalyzer.Result}
* @public
* @remarks
* It can be deceptively difficult to set optimal parameters for AWS Lambda
* and similar services. On the surface there appears to be only one
* parameter: memory size. Choosing more memory also gives more CPU
* performance, but it's unclear how much. It's also unclear where single
* core performance stops getting better. The workload cost analyzer solves
* these problems by making it easy to run cost experiments.
* ```text
* (AWS)
* ┌───────┐
* ┌────▶│ 128MB │
* │ └───────┘
* │ ┌───────┐
* ┌─────────────────┐ ├────▶│ 256MB │
* ┌──────────────┐ │ │ │ └───────┘
* │ workload │───▶│ │ │ ...
* └──────────────┘ │ │ │ ┌───────┐
* │ cost analyzer │─────┼────▶│3008MB │
* ┌──────────────┐ │ │ └───────┘
* │configurations│───▶│ │
* └──────────────┘ │ │
* └─────────────────┘
*
* ```
* `costAnalyzer` is the entry point. It automatically runs this workload
* against multiple configurations in parallel. Then it uses faast.js' cost
* snapshot mechanism to automatically determine the price of running the
* workload with each configuration.
*
* Example:
*
* ```typescript
* // functions.ts
* export function randomNumbers(n: number) {
* let sum = 0;
* for (let i = 0; i < n; i++) {
* sum += Math.random();
* }
* return sum;
* }
*
* // cost-analyzer-example.ts
* import { writeFileSync } from "fs";
* import { CostAnalyzer, FaastModule } from "faastjs";
* import * as funcs from "./functions";
*
* async function work(faastModule: FaastModule<typeof funcs>) {
* await faastModule.functions.randomNumbers(100000000);
* }
*
* async function main() {
* const results = await CostAnalyzer.analyze({ funcs, work });
* writeFileSync("cost.csv", results.csv());
* }
*
* main();
* ```
*
* Example output (this is printed to `console.log` unless the
* {@link CostAnalyzer.Workload.silent} is `true`):
* ```text
* ✔ aws 128MB queue 15.385s 0.274σ $0.00003921
* ✔ aws 192MB queue 10.024s 0.230σ $0.00003576
* ✔ aws 256MB queue 8.077s 0.204σ $0.00003779
* ▲ ▲ ▲ ▲ ▲ ▲
* │ │ │ │ │ │
* provider │ mode │ stdev average
* │ │ execution estimated
* memory │ time cost
* size │
* average cloud
* execution time
* ```
*
* The output lists the provider, memory size, ({@link CommonOptions.mode}),
* average time of a single execution of the workload, the standard
* deviation (in seconds) of the execution time, and average estimated cost
* for a single run of the workload.
*
* The "execution time" referenced here is not wall clock time, but rather
* execution time in the cloud function. The execution time does not include
* any time the workload spends waiting locally. If the workload invokes
* multiple cloud functions, their execution times will be summed even if
* they happen concurrently. This ensures the execution time and cost are
* aligned.
*/
export function analyze<T extends object, A extends string>(userWorkload: Workload<T, A>): Promise<Result<T, A>>;
/**
* Cost analyzer results for each workload and configuration.
* @remarks
* The `estimates` property has the cost estimates for each configuration.
* See {@link CostAnalyzer.Estimate}.
* @public
*/
export class Result<T extends object, A extends string> {
/** The workload analyzed. */
readonly workload: Required<Workload<T, A>>;
/**
* Cost estimates for each configuration of the workload. See
* {@link CostAnalyzer.Estimate}.
*/
readonly estimates: Estimate<A>[];
/** @internal */
constructor(
/** The workload analyzed. */
workload: Required<Workload<T, A>>,
/**
* Cost estimates for each configuration of the workload. See
* {@link CostAnalyzer.Estimate}.
*/
estimates: Estimate<A>[]);
/**
* Comma-separated output of cost analyzer. One line per cost analyzer
* configuration.
* @remarks
* The columns are:
*
* - `memory`: The memory size allocated.
*
* - `cloud`: The cloud provider.
*
* - `mode`: See {@link CommonOptions.mode}.
*
* - `options`: A string summarizing other faast.js options applied to the
* `workload`. See {@link CommonOptions}.
*
* - `completed`: Number of repetitions that successfully completed.
*
* - `errors`: Number of invocations that failed.
*
* - `retries`: Number of retries that were attempted.
*
* - `cost`: The average cost of executing the workload once.
*
* - `executionTime`: the aggregate time spent executing on the provider for
* all cloud function invocations in the workload. This is averaged across
* repetitions.
*
* - `executionTimeStdev`: The standard deviation of `executionTime`.
*
* - `billedTime`: the same as `exectionTime`, except rounded up to the next
* 100ms for each invocation. Usually very close to `executionTime`.
*/
csv(): string;
}
}
/**
* A line item in the cost estimate, including the resource usage metric
* measured and its pricing.
* @public
*/
export declare class CostMetric {
/** The name of the cost metric, e.g. `functionCallDuration` */
readonly name: string;
/** The price in USD per unit measured. */
readonly pricing: number;
/** The name of the units that pricing is measured in for this metric. */
readonly unit: string;
/** The measured value of the cost metric, in units. */
readonly measured: number;
/**
* The plural form of the unit name. By default the plural form will be the
* name of the unit with "s" appended at the end, unless the last letter is
* capitalized, in which case there is no plural form (e.g. "GB").
*/
readonly unitPlural?: string;
/**
* An optional comment, usually providing a link to the provider's pricing
* page and other data.
*/
readonly comment?: string;
/**
* True if this cost metric is only for informational purposes (e.g. AWS's
* `logIngestion`) and does not contribute cost.
*/
readonly informationalOnly?: boolean;
/** @internal */
constructor(arg: PropertiesExcept<CostMetric, AnyFunction>);
/**
* The cost contribution of this cost metric. Equal to
* {@link CostMetric.pricing} * {@link CostMetric.measured}.
*/
cost(): number;
/**
* Return a string with the cost estimate for this metric, omitting
* comments.
*/
describeCostOnly(): string;
/** Describe this cost metric, including comments. */
toString(): string;
}
/**
* A summary of the costs incurred by a faast.js module at a point in time.
* Output of {@link FaastModule.costSnapshot}.
* @remarks
* Cost information provided by faast.js is an estimate. It is derived from
* internal faast.js measurements and not by consulting data provided by your
* cloud provider.
*
* **Faast.js does not guarantee the accuracy of cost estimates.**
*
* **Use at your own risk.**
*
* Example using AWS:
* ```typescript
* const faastModule = await faast("aws", m);
* try {
* // Invoke faastModule.functions.*
* } finally {
* await faastModule.cleanup();
* console.log(`Cost estimate:`);
* console.log(`${await faastModule.costSnapshot()}`);
* }
* ```
*
* AWS example output:
* ```text
* Cost estimate:
* functionCallDuration $0.00002813/second 0.6 second $0.00001688 68.4% [1]
* sqs $0.00000040/request 9 requests $0.00000360 14.6% [2]
* sns $0.00000050/request 5 requests $0.00000250 10.1% [3]
* functionCallRequests $0.00000020/request 5 requests $0.00000100 4.1% [4]
* outboundDataTransfer $0.09000000/GB 0.00000769 GB $0.00000069 2.8% [5]
* logIngestion $0.50000000/GB 0 GB $0 0.0% [6]
* ---------------------------------------------------------------------------------------
* $0.00002467 (USD)
*
* * Estimated using highest pricing tier for each service. Limitations apply.
* ** Does not account for free tier.
* [1]: https://aws.amazon.com/lambda/pricing (rate = 0.00001667/(GB*second) * 1.6875 GB = 0.00002813/second)
* [2]: https://aws.amazon.com/sqs/pricing
* [3]: https://aws.amazon.com/sns/pricing
* [4]: https://aws.amazon.com/lambda/pricing
* [5]: https://aws.amazon.com/ec2/pricing/on-demand/#Data_Transfer
* [6]: https://aws.amazon.com/cloudwatch/pricing/ - Log ingestion costs not currently included.
* ```
*
* A cost snapshot contains several {@link CostMetric} values. Each `CostMetric`
* summarizes one component of the overall cost of executing the functions so
* far. Some cost metrics are common to all faast providers, and other metrics
* are provider-specific. The common metrics are:
*
* - `functionCallDuration`: the estimated billed CPU time (rounded to the next
* 100ms) consumed by completed cloud function calls. This is the metric that
* usually dominates cost.
*
* - `functionCallRequests`: the number of invocation requests made. Most
* providers charge for each invocation.
*
* Provider-specific metrics vary. For example, AWS has the following additional
* metrics:
*
* - `sqs`: AWS Simple Queueing Service. This metric captures the number of
* queue requests made to insert and retrieve queued results (each 64kb chunk
* is counted as an additional request). SQS is used even if
* {@link CommonOptions.mode} is not set to `"queue"`, because it is necessary
* for monitoring cloud function invocations.
*
* - `sns`: AWS Simple Notification Service. SNS is used to invoke Lambda
* functions when {@link CommonOptions.mode} is `"queue"`.
*
* - `outboundDataTransfer`: an estimate of the network data transferred out
* from the cloud provider for this faast.js module. This estimate only counts
* data returned from cloud function invocations and infrastructure that
* faast.js sets up. It does not count any outbound data sent by your cloud
* functions that are not known to faast.js. Note that if you run faast.js on
* EC2 in the same region (see {@link AwsOptions.region}), then the data
* transfer costs will be zero (however, the cost snapshot will not include
* EC2 costs). Also note that if your cloud function transfers data from/to S3
* buckets in the same region, there is no cost as long as that data is not
* returned from the function.
*
* - `logIngestion`: this cost metric is always zero for AWS. It is present to
* remind the user that AWS charges for log data ingested by CloudWatch Logs
* that are not measured by faast.js. Log entries may arrive significantly
* after function execution completes, and there is no way for faast.js to
* know exactly how long to wait, therefore it does not attempt to measure
* this cost. In practice, if your cloud functions do not perform extensive
* logging on all invocations, log ingestion costs from faast.js are likely to
* be low or fall within the free tier.
*
* The Local provider has no extra metrics.
*
* Prices are retrieved dynamically from AWS and cached locally.
* Cached prices expire after 24h. For each cost metric, faast.js uses the
* highest price tier to compute estimated pricing.
*
* Cost estimates do not take free tiers into account.
* @public
*/
export declare class CostSnapshot {
/** The {@link Provider}, e.g. "aws" */
readonly provider: string;
/**
* The options used to initialize the faast.js module where this cost
* snapshot was generated.
*/
readonly options: CommonOptions | AwsOptions;
/** The function statistics that were used to compute prices. */
readonly stats: FunctionStats;
/**
* The cost metric components for this cost snapshot. See
* {@link CostMetric}.
*/
readonly costMetrics: CostMetric[];
/** @internal */
constructor(
/** The {@link Provider}, e