UNPKG

@sentry/node

Version:

Sentry Node SDK using OpenTelemetry for performance instrumentation

262 lines (227 loc) 11.6 kB
Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' }); const core = require('@sentry/core'); const instrument = require('../../../otel/instrument.js'); const addOriginToSpan = require('../../../utils/addOriginToSpan.js'); const ai_sdk_attributes = require('./ai_sdk_attributes.js'); const constants = require('./constants.js'); const instrumentation = require('./instrumentation.js'); const instrumentVercelAi = instrument.generateInstrumentOnce(constants.INTEGRATION_NAME, () => new instrumentation.SentryVercelAiInstrumentation({})); /** * Determines if the integration should be forced based on environment and package availability. * Returns true if the 'ai' package is available. */ function shouldForceIntegration(client) { const modules = client.getIntegrationByName('Modules'); return !!modules?.getModules?.()?.ai; } const _vercelAIIntegration = ((options = {}) => { let instrumentation; return { name: constants.INTEGRATION_NAME, options, setupOnce() { instrumentation = instrumentVercelAi(); }, afterAllSetup(client) { function registerProcessors() { client.on('spanStart', span => { const { data: attributes, description: name } = core.spanToJSON(span); if (!name) { return; } // Tool call spans // https://ai-sdk.dev/docs/ai-sdk-core/telemetry#tool-call-spans if ( attributes[ai_sdk_attributes.AI_TOOL_CALL_NAME_ATTRIBUTE] && attributes[ai_sdk_attributes.AI_TOOL_CALL_ID_ATTRIBUTE] && name === 'ai.toolCall' ) { addOriginToSpan.addOriginToSpan(span, 'auto.vercelai.otel'); span.setAttribute(core.SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.execute_tool'); span.setAttribute('gen_ai.tool.call.id', attributes[ai_sdk_attributes.AI_TOOL_CALL_ID_ATTRIBUTE]); span.setAttribute('gen_ai.tool.name', attributes[ai_sdk_attributes.AI_TOOL_CALL_NAME_ATTRIBUTE]); span.updateName(`execute_tool ${attributes[ai_sdk_attributes.AI_TOOL_CALL_NAME_ATTRIBUTE]}`); return; } // The AI and Provider must be defined for generate, stream, and embed spans. // The id of the model const aiModelId = attributes[ai_sdk_attributes.AI_MODEL_ID_ATTRIBUTE]; // the provider of the model const aiModelProvider = attributes[ai_sdk_attributes.AI_MODEL_PROVIDER_ATTRIBUTE]; if (typeof aiModelId !== 'string' || typeof aiModelProvider !== 'string' || !aiModelId || !aiModelProvider) { return; } addOriginToSpan.addOriginToSpan(span, 'auto.vercelai.otel'); const nameWthoutAi = name.replace('ai.', ''); span.setAttribute('ai.pipeline.name', nameWthoutAi); span.updateName(nameWthoutAi); // If a Telemetry name is set and it is a pipeline span, use that as the operation name const functionId = attributes[ai_sdk_attributes.AI_TELEMETRY_FUNCTION_ID_ATTRIBUTE]; if (functionId && typeof functionId === 'string' && name.split('.').length - 1 === 1) { span.updateName(`${nameWthoutAi} ${functionId}`); span.setAttribute('ai.pipeline.name', functionId); } if (attributes[ai_sdk_attributes.AI_PROMPT_ATTRIBUTE]) { span.setAttribute('gen_ai.prompt', attributes[ai_sdk_attributes.AI_PROMPT_ATTRIBUTE]); } if (attributes[ai_sdk_attributes.AI_MODEL_ID_ATTRIBUTE] && !attributes[ai_sdk_attributes.GEN_AI_RESPONSE_MODEL_ATTRIBUTE]) { span.setAttribute(ai_sdk_attributes.GEN_AI_RESPONSE_MODEL_ATTRIBUTE, attributes[ai_sdk_attributes.AI_MODEL_ID_ATTRIBUTE]); } span.setAttribute('ai.streaming', name.includes('stream')); // Generate Spans if (name === 'ai.generateText') { span.setAttribute(core.SEMANTIC_ATTRIBUTE_SENTRY_OP, 'ai.pipeline.generate_text'); return; } if (name === 'ai.generateText.doGenerate') { span.setAttribute(core.SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.generate_text'); span.updateName(`generate_text ${attributes[ai_sdk_attributes.AI_MODEL_ID_ATTRIBUTE]}`); return; } if (name === 'ai.streamText') { span.setAttribute(core.SEMANTIC_ATTRIBUTE_SENTRY_OP, 'ai.pipeline.stream_text'); return; } if (name === 'ai.streamText.doStream') { span.setAttribute(core.SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.stream_text'); span.updateName(`stream_text ${attributes[ai_sdk_attributes.AI_MODEL_ID_ATTRIBUTE]}`); return; } if (name === 'ai.generateObject') { span.setAttribute(core.SEMANTIC_ATTRIBUTE_SENTRY_OP, 'ai.pipeline.generate_object'); return; } if (name === 'ai.generateObject.doGenerate') { span.setAttribute(core.SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.generate_object'); span.updateName(`generate_object ${attributes[ai_sdk_attributes.AI_MODEL_ID_ATTRIBUTE]}`); return; } if (name === 'ai.streamObject') { span.setAttribute(core.SEMANTIC_ATTRIBUTE_SENTRY_OP, 'ai.pipeline.stream_object'); return; } if (name === 'ai.streamObject.doStream') { span.setAttribute(core.SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.stream_object'); span.updateName(`stream_object ${attributes[ai_sdk_attributes.AI_MODEL_ID_ATTRIBUTE]}`); return; } if (name === 'ai.embed') { span.setAttribute(core.SEMANTIC_ATTRIBUTE_SENTRY_OP, 'ai.pipeline.embed'); return; } if (name === 'ai.embed.doEmbed') { span.setAttribute(core.SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.embed'); span.updateName(`embed ${attributes[ai_sdk_attributes.AI_MODEL_ID_ATTRIBUTE]}`); return; } if (name === 'ai.embedMany') { span.setAttribute(core.SEMANTIC_ATTRIBUTE_SENTRY_OP, 'ai.pipeline.embed_many'); return; } if (name === 'ai.embedMany.doEmbed') { span.setAttribute(core.SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.embed_many'); span.updateName(`embed_many ${attributes[ai_sdk_attributes.AI_MODEL_ID_ATTRIBUTE]}`); return; } if (name.startsWith('ai.stream')) { span.setAttribute(core.SEMANTIC_ATTRIBUTE_SENTRY_OP, 'ai.run'); return; } }); client.addEventProcessor(event => { if (event.type === 'transaction' && event.spans?.length) { for (const span of event.spans) { const { data: attributes, description: name } = span; if (!name || span.origin !== 'auto.vercelai.otel') { continue; } if (attributes[ai_sdk_attributes.AI_USAGE_COMPLETION_TOKENS_ATTRIBUTE] != undefined) { attributes[ai_sdk_attributes.GEN_AI_USAGE_OUTPUT_TOKENS_ATTRIBUTE] = attributes[ai_sdk_attributes.AI_USAGE_COMPLETION_TOKENS_ATTRIBUTE]; delete attributes[ai_sdk_attributes.AI_USAGE_COMPLETION_TOKENS_ATTRIBUTE]; } if (attributes[ai_sdk_attributes.AI_USAGE_PROMPT_TOKENS_ATTRIBUTE] != undefined) { attributes[ai_sdk_attributes.GEN_AI_USAGE_INPUT_TOKENS_ATTRIBUTE] = attributes[ai_sdk_attributes.AI_USAGE_PROMPT_TOKENS_ATTRIBUTE]; delete attributes[ai_sdk_attributes.AI_USAGE_PROMPT_TOKENS_ATTRIBUTE]; } if ( typeof attributes[ai_sdk_attributes.GEN_AI_USAGE_OUTPUT_TOKENS_ATTRIBUTE] === 'number' && typeof attributes[ai_sdk_attributes.GEN_AI_USAGE_INPUT_TOKENS_ATTRIBUTE] === 'number' ) { attributes['gen_ai.usage.total_tokens'] = attributes[ai_sdk_attributes.GEN_AI_USAGE_OUTPUT_TOKENS_ATTRIBUTE] + attributes[ai_sdk_attributes.GEN_AI_USAGE_INPUT_TOKENS_ATTRIBUTE]; } // Rename AI SDK attributes to standardized gen_ai attributes if (attributes[ai_sdk_attributes.AI_PROMPT_MESSAGES_ATTRIBUTE] != undefined) { attributes['gen_ai.request.messages'] = attributes[ai_sdk_attributes.AI_PROMPT_MESSAGES_ATTRIBUTE]; delete attributes[ai_sdk_attributes.AI_PROMPT_MESSAGES_ATTRIBUTE]; } if (attributes[ai_sdk_attributes.AI_RESPONSE_TEXT_ATTRIBUTE] != undefined) { attributes['gen_ai.response.text'] = attributes[ai_sdk_attributes.AI_RESPONSE_TEXT_ATTRIBUTE]; delete attributes[ai_sdk_attributes.AI_RESPONSE_TEXT_ATTRIBUTE]; } if (attributes[ai_sdk_attributes.AI_RESPONSE_TOOL_CALLS_ATTRIBUTE] != undefined) { attributes['gen_ai.response.tool_calls'] = attributes[ai_sdk_attributes.AI_RESPONSE_TOOL_CALLS_ATTRIBUTE]; delete attributes[ai_sdk_attributes.AI_RESPONSE_TOOL_CALLS_ATTRIBUTE]; } if (attributes[ai_sdk_attributes.AI_PROMPT_TOOLS_ATTRIBUTE] != undefined) { attributes['gen_ai.request.available_tools'] = attributes[ai_sdk_attributes.AI_PROMPT_TOOLS_ATTRIBUTE]; delete attributes[ai_sdk_attributes.AI_PROMPT_TOOLS_ATTRIBUTE]; } } } return event; }); } // Auto-detect if we should force the integration when running with 'ai' package available // Note that this can only be detected if the 'Modules' integration is available, and running in CJS mode const shouldForce = options.force ?? shouldForceIntegration(client); if (shouldForce) { registerProcessors(); } else { instrumentation?.callWhenPatched(registerProcessors); } }, }; }) ; /** * Adds Sentry tracing instrumentation for the [ai](https://www.npmjs.com/package/ai) library. * * For more information, see the [`ai` documentation](https://sdk.vercel.ai/docs/ai-sdk-core/telemetry). * * @example * ```javascript * const Sentry = require('@sentry/node'); * * Sentry.init({ * integrations: [Sentry.vercelAIIntegration()], * }); * ``` * * The integration automatically detects when to force registration in CommonJS environments * when the 'ai' package is available. You can still manually set the `force` option if needed. * * By default this integration adds tracing support to all `ai` function calls. If you need to disable * collecting spans for a specific call, you can do so by setting `experimental_telemetry.isEnabled` to * `false` in the first argument of the function call. * * ```javascript * const result = await generateText({ * model: openai('gpt-4-turbo'), * experimental_telemetry: { isEnabled: false }, * }); * ``` * * If you want to collect inputs and outputs for a specific call, you must specifically opt-in to each * function call by setting `experimental_telemetry.recordInputs` and `experimental_telemetry.recordOutputs` * to `true`. * * ```javascript * const result = await generateText({ * model: openai('gpt-4-turbo'), * experimental_telemetry: { isEnabled: true, recordInputs: true, recordOutputs: true }, * }); */ const vercelAIIntegration = core.defineIntegration(_vercelAIIntegration); exports.instrumentVercelAi = instrumentVercelAi; exports.vercelAIIntegration = vercelAIIntegration; //# sourceMappingURL=index.js.map