aws-cdk-lib
Version:
Version 2 of the AWS Cloud Development Kit library
242 lines (241 loc) • 8.84 kB
TypeScript
import { StreamEventSource, BaseStreamEventSourceProps } from './stream';
import { ISecurityGroup, IVpc, SubnetSelection } from '../../aws-ec2';
import { IKey } from '../../aws-kms';
import * as lambda from '../../aws-lambda';
import { ISchemaRegistry } from '../../aws-lambda/lib/schema-registry';
import * as secretsmanager from '../../aws-secretsmanager';
import { Duration } from '../../core';
/**
* Properties for a Kafka event source
*/
export interface KafkaEventSourceProps extends BaseStreamEventSourceProps {
/**
* The Kafka topic to subscribe to
*/
readonly topic: string;
/**
* The secret with the Kafka credentials, see https://docs.aws.amazon.com/msk/latest/developerguide/msk-password.html for details
* This field is required if your Kafka brokers are accessed over the Internet
*
* @default none
*/
readonly secret?: secretsmanager.ISecret;
/**
* The identifier for the Kafka consumer group to join. The consumer group ID must be unique among all your Kafka event sources. After creating a Kafka event source mapping with the consumer group ID specified, you cannot update this value. The value must have a length between 1 and 200 and full the pattern '[a-zA-Z0-9-\/*:_+=.@-]*'.
* @see https://docs.aws.amazon.com/lambda/latest/dg/with-msk.html#services-msk-consumer-group-id
*
* @default - none
*/
readonly consumerGroupId?: string;
/**
* Add filter criteria to Event Source
* @see https://docs.aws.amazon.com/lambda/latest/dg/invocation-eventfiltering.html
*
* @default - none
*/
readonly filters?: Array<{
[key: string]: any;
}>;
/**
* Add Customer managed KMS key to encrypt Filter Criteria.
* @see https://docs.aws.amazon.com/lambda/latest/dg/invocation-eventfiltering.html
* By default, Lambda will encrypt Filter Criteria using AWS managed keys
* @see https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#aws-managed-cmk
*
* @default - none
*/
readonly filterEncryption?: IKey;
/**
* Add an on Failure Destination for this Kafka event.
*
* Supported destinations:
* - {@link KafkaDlq} - Send failed records to a Kafka topic
* - SNS topics - Send failed records to an SNS topic
* - SQS queues - Send failed records to an SQS queue
* - S3 buckets - Send failed records to an S3 bucket
*
* @default - discarded records are ignored
*/
readonly onFailure?: lambda.IEventSourceDlq;
/**
* The time from which to start reading, in Unix time seconds.
*
* @default - no timestamp
*/
readonly startingPositionTimestamp?: number;
/**
* Specific configuration settings for a Kafka schema registry.
*
* @default - none
*/
readonly schemaRegistryConfig?: ISchemaRegistry;
/***
* If the function returns an error, split the batch in two and retry.
*
* @default false
*/
readonly bisectBatchOnError?: boolean;
/**
* The maximum age of a record that Lambda sends to a function for processing.
*
* The default value is -1, which sets the maximum age to infinite.
* When the value is set to infinite, Lambda never discards old records.
* Record are valid until it expires in the event source.
*
* @default -1
*/
readonly maxRecordAge?: Duration;
/***
* Maximum number of retry attempts.
*
* Set to -1 for infinite retries (until the record expires in the event source).
*
* @default -1 (infinite retries)
*/
readonly retryAttempts?: number;
/***
* Allow functions to return partially successful responses for a batch of records.
*
* @default false
*/
readonly reportBatchItemFailures?: boolean;
}
/**
* Properties for a MSK event source
*/
export interface ManagedKafkaEventSourceProps extends KafkaEventSourceProps {
/**
* An MSK cluster construct
*/
readonly clusterArn: string;
}
/**
* The authentication method to use with SelfManagedKafkaEventSource
*/
export declare enum AuthenticationMethod {
/**
* SASL_SCRAM_512_AUTH authentication method for your Kafka cluster
*/
SASL_SCRAM_512_AUTH = "SASL_SCRAM_512_AUTH",
/**
* SASL_SCRAM_256_AUTH authentication method for your Kafka cluster
*/
SASL_SCRAM_256_AUTH = "SASL_SCRAM_256_AUTH",
/**
* BASIC_AUTH (SASL/PLAIN) authentication method for your Kafka cluster
*/
BASIC_AUTH = "BASIC_AUTH",
/**
* CLIENT_CERTIFICATE_TLS_AUTH (mTLS) authentication method for your Kafka cluster
*/
CLIENT_CERTIFICATE_TLS_AUTH = "CLIENT_CERTIFICATE_TLS_AUTH"
}
/**
* Properties for a self managed Kafka cluster event source.
* If your Kafka cluster is only reachable via VPC make sure to configure it.
*/
export interface SelfManagedKafkaEventSourceProps extends KafkaEventSourceProps {
/**
* The list of host and port pairs that are the addresses of the Kafka brokers in a "bootstrap" Kafka cluster that
* a Kafka client connects to initially to bootstrap itself. They are in the format `abc.xyz.com:xxxx`.
*/
readonly bootstrapServers: string[];
/**
* If your Kafka brokers are only reachable via VPC provide the VPC here
*
* @default none
*/
readonly vpc?: IVpc;
/**
* If your Kafka brokers are only reachable via VPC, provide the subnets selection here
*
* @default - none, required if setting vpc
*/
readonly vpcSubnets?: SubnetSelection;
/**
* If your Kafka brokers are only reachable via VPC, provide the security group here
*
* @default - none, required if setting vpc
*/
readonly securityGroup?: ISecurityGroup;
/**
* The authentication method for your Kafka cluster
*
* @default AuthenticationMethod.SASL_SCRAM_512_AUTH
*/
readonly authenticationMethod?: AuthenticationMethod;
/**
* The secret with the root CA certificate used by your Kafka brokers for TLS encryption
* This field is required if your Kafka brokers use certificates signed by a private CA
*
* @default - none
*/
readonly rootCACertificate?: secretsmanager.ISecret;
}
/**
* Use a MSK cluster as a streaming source for AWS Lambda
*
* @example
* import { ManagedKafkaEventSource } from 'aws-cdk-lib/aws-lambda-event-sources';
* import { StartingPosition, Function } from 'aws-cdk-lib/aws-lambda';
*
* // With provisioned pollers and poller group for cost optimization
* declare const myFunction: Function;
* myFunction.addEventSource(new ManagedKafkaEventSource({
* clusterArn: 'arn:aws:kafka:us-east-1:123456789012:cluster/my-cluster/abcd1234-abcd-cafe-abab-9876543210ab-4',
* topic: 'orders-topic',
* startingPosition: StartingPosition.LATEST,
* provisionedPollerConfig: {
* minimumPollers: 2,
* maximumPollers: 10,
* pollerGroupName: 'shared-kafka-pollers',
* },
* }));
*/
export declare class ManagedKafkaEventSource extends StreamEventSource {
private innerProps;
private _eventSourceMappingId?;
private _eventSourceMappingArn?;
constructor(props: ManagedKafkaEventSourceProps);
bind(target: lambda.IFunction): void;
private sourceAccessConfigurations;
/**
* The identifier for this EventSourceMapping
*/
get eventSourceMappingId(): string;
/**
* The ARN for this EventSourceMapping
*/
get eventSourceMappingArn(): string;
}
/**
* Use a self hosted Kafka installation as a streaming source for AWS Lambda.
*
* @example
* import { SelfManagedKafkaEventSource, AuthenticationMethod } from 'aws-cdk-lib/aws-lambda-event-sources';
* import { StartingPosition, Function } from 'aws-cdk-lib/aws-lambda';
* import { ISecret } from 'aws-cdk-lib/aws-secretsmanager';
*
* // With provisioned pollers and poller group for cost optimization
* declare const myFunction: Function;
* declare const kafkaCredentials: ISecret;
* myFunction.addEventSource(new SelfManagedKafkaEventSource({
* bootstrapServers: ['kafka-broker1.example.com:9092', 'kafka-broker2.example.com:9092'],
* topic: 'events-topic',
* secret: kafkaCredentials,
* startingPosition: StartingPosition.LATEST,
* authenticationMethod: AuthenticationMethod.SASL_SCRAM_512_AUTH,
* provisionedPollerConfig: {
* minimumPollers: 1,
* maximumPollers: 8,
* pollerGroupName: 'self-managed-kafka-group', // Group pollers to reduce costs
* },
* }));
*/
export declare class SelfManagedKafkaEventSource extends StreamEventSource {
private innerProps;
constructor(props: SelfManagedKafkaEventSourceProps);
bind(target: lambda.IFunction): void;
private mappingId;
private sourceAccessConfigurations;
}