UNPKG

@sentry/core

Version:
153 lines (132 loc) 5.08 kB
/** * AI SDK Telemetry Attributes * Based on https://ai-sdk.dev/docs/ai-sdk-core/telemetry#collected-data */ // ============================================================================= // SHARED ATTRIBUTES // ============================================================================= /** * `generateText` function - `ai.generateText` span * `streamText` function - `ai.streamText` span * * The prompt that was used when calling the function * @see https://ai-sdk.dev/docs/ai-sdk-core/telemetry#generatetext-function * @see https://ai-sdk.dev/docs/ai-sdk-core/telemetry#streamtext-function */ const AI_PROMPT_ATTRIBUTE = 'ai.prompt'; // ============================================================================= // GENERATETEXT FUNCTION - UNIQUE ATTRIBUTES // ============================================================================= /** * `generateText` function - `ai.generateText` span * * The text that was generated * @see https://ai-sdk.dev/docs/ai-sdk-core/telemetry#generatetext-function */ const AI_RESPONSE_TEXT_ATTRIBUTE = 'ai.response.text'; /** * `generateText` function - `ai.generateText` span * * The tool calls that were made as part of the generation (stringified JSON) * @see https://ai-sdk.dev/docs/ai-sdk-core/telemetry#generatetext-function */ const AI_RESPONSE_TOOL_CALLS_ATTRIBUTE = 'ai.response.toolCalls'; /** * `generateText` function - `ai.generateText.doGenerate` span * * The messages that were passed into the provider * @see https://ai-sdk.dev/docs/ai-sdk-core/telemetry#generatetext-function */ const AI_PROMPT_MESSAGES_ATTRIBUTE = 'ai.prompt.messages'; /** * `generateText` function - `ai.generateText.doGenerate` span * * Array of stringified tool definitions * @see https://ai-sdk.dev/docs/ai-sdk-core/telemetry#generatetext-function */ const AI_PROMPT_TOOLS_ATTRIBUTE = 'ai.prompt.tools'; /** * Basic LLM span information * Multiple spans * * The id of the model * @see https://ai-sdk.dev/docs/ai-sdk-core/telemetry#basic-llm-span-information */ const AI_MODEL_ID_ATTRIBUTE = 'ai.model.id'; /** * Basic LLM span information * Multiple spans * * The provider of the model * @see https://ai-sdk.dev/docs/ai-sdk-core/telemetry#basic-llm-span-information */ const AI_MODEL_PROVIDER_ATTRIBUTE = 'ai.model.provider'; /** * Basic LLM span information * Multiple spans * * The functionId that was set through `telemetry.functionId` * @see https://ai-sdk.dev/docs/ai-sdk-core/telemetry#basic-llm-span-information */ const AI_TELEMETRY_FUNCTION_ID_ATTRIBUTE = 'ai.telemetry.functionId'; /** * Basic LLM span information * Multiple spans * * The number of completion tokens that were used * @see https://ai-sdk.dev/docs/ai-sdk-core/telemetry#basic-llm-span-information */ const AI_USAGE_COMPLETION_TOKENS_ATTRIBUTE = 'ai.usage.completionTokens'; /** * Basic LLM span information * Multiple spans * * The number of prompt tokens that were used * @see https://ai-sdk.dev/docs/ai-sdk-core/telemetry#basic-llm-span-information */ const AI_USAGE_PROMPT_TOKENS_ATTRIBUTE = 'ai.usage.promptTokens'; /** * Semantic Conventions for GenAI operations * Individual LLM call spans * * The model that was used to generate the response * @see https://ai-sdk.dev/docs/ai-sdk-core/telemetry#call-llm-span-information */ const GEN_AI_RESPONSE_MODEL_ATTRIBUTE = 'gen_ai.response.model'; /** * Semantic Conventions for GenAI operations * Individual LLM call spans * * The number of prompt tokens that were used * @see https://ai-sdk.dev/docs/ai-sdk-core/telemetry#call-llm-span-information */ const GEN_AI_USAGE_INPUT_TOKENS_ATTRIBUTE = 'gen_ai.usage.input_tokens'; /** * Semantic Conventions for GenAI operations * Individual LLM call spans * * The number of completion tokens that were used * @see https://ai-sdk.dev/docs/ai-sdk-core/telemetry#call-llm-span-information */ const GEN_AI_USAGE_OUTPUT_TOKENS_ATTRIBUTE = 'gen_ai.usage.output_tokens'; // ============================================================================= // TOOL CALL SPANS // ============================================================================= /** * Tool call spans * `ai.toolCall` span * * The name of the tool * @see https://ai-sdk.dev/docs/ai-sdk-core/telemetry#tool-call-spans */ const AI_TOOL_CALL_NAME_ATTRIBUTE = 'ai.toolCall.name'; /** * Tool call spans * `ai.toolCall` span * * The id of the tool call * @see https://ai-sdk.dev/docs/ai-sdk-core/telemetry#tool-call-spans */ const AI_TOOL_CALL_ID_ATTRIBUTE = 'ai.toolCall.id'; export { AI_MODEL_ID_ATTRIBUTE, AI_MODEL_PROVIDER_ATTRIBUTE, AI_PROMPT_ATTRIBUTE, AI_PROMPT_MESSAGES_ATTRIBUTE, AI_PROMPT_TOOLS_ATTRIBUTE, AI_RESPONSE_TEXT_ATTRIBUTE, AI_RESPONSE_TOOL_CALLS_ATTRIBUTE, AI_TELEMETRY_FUNCTION_ID_ATTRIBUTE, AI_TOOL_CALL_ID_ATTRIBUTE, AI_TOOL_CALL_NAME_ATTRIBUTE, AI_USAGE_COMPLETION_TOKENS_ATTRIBUTE, AI_USAGE_PROMPT_TOKENS_ATTRIBUTE, GEN_AI_RESPONSE_MODEL_ATTRIBUTE, GEN_AI_USAGE_INPUT_TOKENS_ATTRIBUTE, GEN_AI_USAGE_OUTPUT_TOKENS_ATTRIBUTE }; //# sourceMappingURL=vercel-ai-attributes.js.map