@aws-lambda-powertools/batch
Version:
The batch processing package for the Powertools for AWS Lambda (TypeScript) library.
155 lines (154 loc) • 7.08 kB
JavaScript
;
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || (function () {
var ownKeys = function(o) {
ownKeys = Object.getOwnPropertyNames || function (o) {
var ar = [];
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
return ar;
};
return ownKeys(o);
};
return function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
__setModuleDefault(result, mod);
return result;
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.parser = void 0;
const constants_js_1 = require("./constants.js");
const errors_js_1 = require("./errors.js");
/**
* Extend the schema according to the event type passed.
*
* If `useTransformers` is true, extend using opinionated transformers.
* Otherwise, extend without any transformers.
*
* The vendor is already checked at runtime to ensure Zod is being used when required using `StandardSchemaV1['~standard'].vendor`.
*
* @param options - The options for creating the extended schema
* @param options.eventType - The type of event to process (SQS, Kinesis, DynamoDB)
* @param options.innerSchema - The StandardSchema to be used for parsing. To avoid forcing a direct dependency on Zod, we use `unknown` here, which is not ideal but necessary.
* @param options.useTransformers - Whether to use transformers for parsing
* @param options.logger - A logger instance for logging
*/
const createExtendedSchema = async (options) => {
const { eventType, innerSchema, transformer } = options;
let schema = innerSchema;
switch (transformer) {
case 'json': {
const { JSONStringified } = await Promise.resolve().then(() => __importStar(require('@aws-lambda-powertools/parser/helpers')));
// @ts-expect-error - we know it's a Zod schema due to the runtime check earlier
schema = JSONStringified(innerSchema);
break;
}
case 'base64': {
const { Base64Encoded } = await Promise.resolve().then(() => __importStar(require('@aws-lambda-powertools/parser/helpers')));
// @ts-expect-error - we know it's a Zod schema due to the runtime check earlier
schema = Base64Encoded(innerSchema);
break;
}
case 'unmarshall': {
const { DynamoDBMarshalled } = await Promise.resolve().then(() => __importStar(require('@aws-lambda-powertools/parser/helpers/dynamodb')));
// @ts-expect-error - we know it's a Zod schema due to the runtime check earlier
schema = DynamoDBMarshalled(innerSchema);
break;
}
}
if (eventType === constants_js_1.EventType.SQS) {
const { SqsRecordSchema } = await Promise.resolve().then(() => __importStar(require('@aws-lambda-powertools/parser/schemas/sqs')));
return SqsRecordSchema.extend({
body: schema,
});
}
if (eventType === constants_js_1.EventType.KinesisDataStreams) {
const { KinesisDataStreamRecord, KinesisDataStreamRecordPayload } = await Promise.resolve().then(() => __importStar(require('@aws-lambda-powertools/parser/schemas/kinesis')));
return KinesisDataStreamRecord.extend({
kinesis: KinesisDataStreamRecordPayload.extend({
data: schema,
}),
});
}
const { DynamoDBStreamRecord, DynamoDBStreamChangeRecordBase } = await Promise.resolve().then(() => __importStar(require('@aws-lambda-powertools/parser/schemas/dynamodb')));
return DynamoDBStreamRecord.extend({
dynamodb: DynamoDBStreamChangeRecordBase.extend({
OldImage: schema,
NewImage: schema,
}),
});
};
/**
* Parse the record with the passed schema and
* return the result or throw the error depending on parsing success
*
* @param record - The record to be parsed
* @param schema - The modified schema to parse with
* @param logger - A logger instance for logging
*/
const parseWithErrorHandling = async (record, schema, logger) => {
const { parse } = await Promise.resolve().then(() => __importStar(require('@aws-lambda-powertools/parser')));
const result = parse(record, undefined, schema, true);
if (result.success) {
return result.data;
}
const issues = result.error.cause;
const errorMessage = issues
.map((issue) => `${issue.path?.join('.')}: ${issue.message}`)
.join('; ');
logger.debug(`Failed to parse record: ${errorMessage}`);
throw new errors_js_1.ParsingError(errorMessage);
};
/**
* Parse the record according to the schema and event type passed.
*
* If the passed schema is already an extended schema,
* use the schema directly to parse the record.
*
* Parts of the parser integration within BatchProcessor rely on Zod for schema transformations,
* however some other parts also support other Standard Schema-compatible libraries.
*
* @param record - The record to be parsed
* @param eventType - The type of event to process
* @param logger - A logger instance for logging
* @param parserConfig - The parser configuration options
*/
const parser = async (record, eventType, logger, parserConfig) => {
const { schema, innerSchema, transformer } = parserConfig;
// If the external schema is specified, use it to parse the record
if (schema) {
return parseWithErrorHandling(record, schema, logger);
}
if (innerSchema) {
// Only proceed with schema extension if it's a Zod schema
if (innerSchema['~standard'].vendor !== constants_js_1.SchemaVendor.Zod) {
logger.error('The schema provided is not supported. Only Zod schemas are supported for extension.');
throw new errors_js_1.ParsingError('Unsupported schema type');
}
return parseWithErrorHandling(record, await createExtendedSchema({
eventType,
innerSchema,
...(transformer ? { transformer } : {}),
}), logger);
}
logger.error('There was no schema or innerSchema provided');
throw new errors_js_1.ParsingError('Either schema or innerSchema is required for parsing');
};
exports.parser = parser;