@allma/core-sdk
Version:
Core SDK with shared utilities (logging, auth, S3 utils) for building on the Allma serverless AI orchestration platform.
79 lines • 3.85 kB
JavaScript
import { S3Client, GetObjectCommand, PutObjectCommand } from '@aws-sdk/client-s3';
import { ENV_VAR_NAMES } from '@allma/core-types';
import { log_info, log_error, log_warn, log_debug } from "./logger.js";
const s3Client = new S3Client({});
const MAX_CONTEXT_DATA_SIZE_BYTES_DEFAULT = 10 * 1024; // 10KB default
const PAYLOAD_OFFLOAD_THRESHOLD_BYTES = process.env[ENV_VAR_NAMES.MAX_CONTEXT_DATA_SIZE_BYTES]
? parseInt(process.env[ENV_VAR_NAMES.MAX_CONTEXT_DATA_SIZE_BYTES] || '', 10)
: MAX_CONTEXT_DATA_SIZE_BYTES_DEFAULT;
/**
* Fetches the actual data from an S3 pointer.
* Tries to parse the content as JSON. If parsing fails (e.g. for plain text files),
* returns the raw string content.
*/
export async function resolveS3Pointer(s3Pointer, correlationId) {
log_info('Resolving S3 data pointer', { s3Pointer }, correlationId);
try {
const command = new GetObjectCommand({
Bucket: s3Pointer.bucket,
Key: s3Pointer.key,
});
const { Body } = await s3Client.send(command);
if (Body) {
const content = await Body.transformToString();
try {
return JSON.parse(content);
}
catch (jsonError) {
// If the content is not valid JSON, return it as a raw string.
// This supports hydration of text files (e.g. from FILE_DOWNLOAD step).
log_debug('S3 content is not JSON, returning as raw string.', { key: s3Pointer.key }, correlationId);
return content;
}
}
throw new Error('S3 object body for data pointer is empty.');
}
catch (e) {
log_error('Failed to fetch or parse data from S3 pointer', { s3Pointer, error: e.message }, correlationId);
throw new Error(`Failed to resolve S3 data pointer: ${e.message}`);
}
}
/**
* Checks the size of a payload and offloads it to S3 if it exceeds the defined threshold.
* This is a generic utility for any Lambda to use for its return payload.
*
* @param payload The object to potentially offload.
* @param bucketName The S3 bucket to upload to.
* @param keyPrefix A prefix for the S3 key (e.g., 'step_outputs/flow-id/step-id').
* @param correlationId For logging.
* @param thresholdBytes The size threshold to trigger offloading. Defaults to the configured environment variable.
* @returns The original payload if it's small, or an S3OutputPointerWrapper if it was offloaded.
*/
export async function offloadIfLarge(payload, bucketName, keyPrefix, correlationId, thresholdBytes = PAYLOAD_OFFLOAD_THRESHOLD_BYTES) {
if (!payload)
return undefined;
try {
const payloadString = JSON.stringify(payload);
const payloadSize = Buffer.byteLength(payloadString, 'utf-8');
log_debug(`offloadIfLarge for ${bucketName} ${keyPrefix}...`, { thresholdBytes }, correlationId);
if (payloadSize > thresholdBytes) {
const s3Key = `${keyPrefix}_${new Date().toISOString()}.json`;
log_warn(`Payload is large (${payloadSize} bytes). Offloading to S3.`, { s3Key, thresholdBytes }, correlationId);
await s3Client.send(new PutObjectCommand({
Bucket: bucketName,
Key: s3Key,
Body: payloadString,
ContentType: 'application/json',
}));
const s3Pointer = { bucket: bucketName, key: s3Key };
return { _s3_output_pointer: s3Pointer }; // Return the wrapper
}
// Payload is small enough, return as is
return payload;
}
catch (e) {
log_error(`Failed to offload payload to S3 for key prefix '${keyPrefix}'`, { error: e.message }, correlationId);
throw new Error(`Failed during S3 offload attempt: ${e.message}`);
}
}
//# sourceMappingURL=s3Utils.js.map