UNPKG

@aws-lambda-powertools/batch

Version:

The batch processing package for the Powertools for AWS Lambda (TypeScript) library.

188 lines (187 loc) 6.74 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.BasePartialBatchProcessor = void 0; const env_1 = require("@aws-lambda-powertools/commons/utils/env"); const BasePartialProcessor_js_1 = require("./BasePartialProcessor.js"); const constants_js_1 = require("./constants.js"); const errors_js_1 = require("./errors.js"); /** * Base abstract class for processing batch records with partial failure handling * * This class extends the {@link BasePartialProcessor} class and adds additional * functionality to handle batch processing. Specifically, it provides methods * to collect failed records and build the partial failure response. */ class BasePartialBatchProcessor extends BasePartialProcessor_js_1.BasePartialProcessor { /** * Mapping of event types to their respective failure collectors * * Each service expects a different format for partial failure reporting, * this mapping ensures that the correct format is used for each event type. */ COLLECTOR_MAPPING; /** * Response to be returned after processing */ batchResponse; /** * Type of event that the processor is handling */ eventType; /** * A logger instance to be used for logging debug, warning, and error messages. * * When no logger is provided, we'll only log warnings and errors using the global `console` object. */ logger; /** * The configuration options for the parser integration */ parserConfig; /** * Initializes base batch processing class * * @param eventType The type of event to process (SQS, Kinesis, DynamoDB) */ constructor(eventType, parserConfig) { super(); this.eventType = eventType; this.batchResponse = constants_js_1.DEFAULT_RESPONSE; this.COLLECTOR_MAPPING = { [constants_js_1.EventType.SQS]: () => this.collectSqsFailures(), [constants_js_1.EventType.KinesisDataStreams]: () => this.collectKinesisFailures(), [constants_js_1.EventType.DynamoDBStreams]: () => this.collectDynamoDBFailures(), }; this.parserConfig = parserConfig; const alcLogLevel = (0, env_1.getStringFromEnv)({ key: 'AWS_LAMBDA_LOG_LEVEL', defaultValue: '', }); this.logger = parserConfig?.logger ?? { debug: alcLogLevel === 'DEBUG' ? console.debug : () => undefined, error: console.error, warn: console.warn, }; } /** * Clean up logic to be run after processing a batch * * If the entire batch failed this method will throw a {@link FullBatchFailureError | `FullBatchFailureError`} with the list of * errors that occurred during processing, unless the `throwOnFullBatchFailure` option is set to `false`. * * Otherwise, it will build the partial failure response based on the event type. */ clean() { if (!this.hasMessagesToReport()) { return; } if (this.options?.throwOnFullBatchFailure !== false && this.entireBatchFailed()) { throw new errors_js_1.FullBatchFailureError(this.errors); } const messages = this.getMessagesToReport(); this.batchResponse = { batchItemFailures: messages }; } /** * Collect the identifiers of failed items for a DynamoDB stream * * The failures are collected based on the sequence number of the record * and formatted as a list of objects with the key `itemIdentifier` as * expected by the service. */ collectDynamoDBFailures() { const failures = []; for (const msg of this.failureMessages) { const msgId = msg.dynamodb?.SequenceNumber; if (msgId) { failures.push({ itemIdentifier: msgId }); } } return failures; } /** * Collect identifiers of failed items for a Kinesis batch * * The failures are collected based on the sequence number of the record * and formatted as a list of objects with the key `itemIdentifier` as * expected by the service. */ collectKinesisFailures() { const failures = []; for (const msg of this.failureMessages) { const msgId = msg.kinesis.sequenceNumber; failures.push({ itemIdentifier: msgId }); } return failures; } /** * Collect identifiers of failed items for an SQS batch * * The failures are collected based on the message ID of the record * and formatted as a list of objects with the key `itemIdentifier` as * expected by the service. */ collectSqsFailures() { const failures = []; for (const msg of this.failureMessages) { const msgId = msg.messageId; failures.push({ itemIdentifier: msgId }); } return failures; } /** * Determine if the entire batch failed * * If the number of errors is equal to the number of records, then the * entire batch failed and this method will return `true`. */ entireBatchFailed() { return this.errors.length === this.records.length; } /** * Collect identifiers for failed batch items * * The method will call the appropriate collector based on the event type * and return the list of failed items. */ getMessagesToReport() { return this.COLLECTOR_MAPPING[this.eventType](); } /** * Determine if there are any failed records to report * * If there are no failed records, then the batch was successful * and this method will return `false`. */ hasMessagesToReport() { return this.failureMessages.length !== 0; } /** * Set up the processor with the initial state ready for processing */ prepare() { this.successMessages.length = 0; this.failureMessages.length = 0; this.errors.length = 0; this.batchResponse = constants_js_1.DEFAULT_RESPONSE; } /** * Get the response from the batch processing */ response() { return this.batchResponse; } /** * Forward a record to the appropriate batch type * * Based on the event type that the processor was initialized with, this method * will cast the record to the appropriate batch type handler. * * @param record The record to be processed * @param eventType The type of event to process */ toBatchType(record, eventType) { return constants_js_1.DATA_CLASS_MAPPING[eventType](record); } } exports.BasePartialBatchProcessor = BasePartialBatchProcessor;