UNPKG

lambda-live-debugger

Version:

Debug Lambda functions locally like it is running in the cloud

123 lines (122 loc) 4.87 kB
import { AwsCredentials } from './awsCredentials.mjs'; import { Configuration } from './configuration.mjs'; import { IoTService } from './ioTService.mjs'; import { Logger } from './logger.mjs'; import { NodeHandler } from './nodeHandler.mjs'; let ioTServiceConnection; let topic; const lambdasProcessingObservableMode = new Set(); /** * Connect to the IoT Service */ async function connect() { topic = `${Configuration.config.debuggerId}/events`; ioTServiceConnection = await IoTService.connect({ onMessage: onMessageFromLambda, topic, region: Configuration.config.region, credentialsProvider: AwsCredentials.getCredentialsProvider({ profile: Configuration.config.profile, region: Configuration.config.region, role: Configuration.config.role, }), }); } /** * Handle incoming messages from the IoT Service * @param message IoT message */ async function onMessageFromLambda(message) { if (!Configuration.config.observable) { //immediately respond to the ping message to confirm the local debugging is alive await ioTServiceConnection.publish({ type: 'PING', data: { workerId: message.data.workerId, requestId: message.data.requestId, functionId: message.data.functionId, }, }, `${topic}/${message.data.workerId}`); } if (message.type !== 'INVOKE') { throw new Error(`Unexpected message type: ${message.type}`); } try { if (Configuration.config.observable) { // if we are in observable mode, we don't want to process the same // worker at the same time if (lambdasProcessingObservableMode.has(message.data.functionId)) { return; } lambdasProcessingObservableMode.add(message.data.functionId); // waitX5 seconds then remove the worker from the processing list // so we can get new event every X seconds if (Configuration.config.interval > 0) { setTimeout(() => { lambdasProcessingObservableMode.delete(message.data.functionId); }, Configuration.config.interval); } } if (Configuration.config.verbose) { Logger.log(`[Function ${message.data.functionId}] Response: `, JSON.stringify(message.data, null, 2)); } else { // first 50 characters of the response const requestPretty = message.data ? JSON.stringify(message.data).substring(0, 100) : ''; Logger.log(`[Function ${message.data.functionId}] Request: ${requestPretty}${requestPretty.length < 50 ? '' : '...'}`); } const response = await NodeHandler.invokeLambda(message.data); if (Configuration.config.verbose) { Logger.log(`[Function ${message.data.functionId}] Response: `, JSON.stringify(response, null, 2)); } else { // first 50 characters of the response const responsePretty = response ? JSON.stringify(response).substring(0, 100) : ''; Logger.log(`[Function ${message.data.functionId}] Response: ${responsePretty}${responsePretty.length < 50 ? '' : '...'}`); } if (Configuration.config.observable) { // if we are in observable mode, mark the worker as processed lambdasProcessingObservableMode.delete(message.data.functionId); } const payload = { type: 'SUCCESS', data: { functionId: message.data.functionId, requestId: message.data.requestId, workerId: message.data.workerId, body: response, }, }; if (!Configuration.config.observable) { await ioTServiceConnection.publish(payload, `${topic}/${message.data.workerId}`); } } catch (e) { Logger.error(`[Function ${message.data.functionId}] Error: `, e); const payload = { type: 'ERROR', data: { functionId: message.data.functionId, requestId: message.data.requestId, workerId: message.data.workerId, errorType: e.errorType, errorMessage: e.errorMessage, trace: e.trace, }, }; if (!Configuration.config.observable) { await ioTServiceConnection.publish(payload, `${topic}/${message.data.workerId}`); } else { // if we are in Observability mode, mark the worker as processed lambdasProcessingObservableMode.delete(message.data.functionId); } } } export const LambdaConnection = { connect, };