axiom
Version:
Axiom AI SDK provides an API to wrap your AI calls with observability instrumentation.
1,565 lines (1,546 loc) • 56.5 kB
JavaScript
import {
Attr,
SCHEMA_BASE_URL,
SCHEMA_VERSION
} from "./chunk-EFEYUIIG.js";
import {
__publicField
} from "./chunk-KEXKKQVW.js";
// src/otel/initAxiomAI.ts
import { trace } from "@opentelemetry/api";
// package.json
var package_default = {
name: "axiom",
version: "0.13.0",
type: "module",
author: "Axiom, Inc.",
contributors: [
"Islam Shehata <islam@axiom.co>",
"Chris Ehrlich <chris@axiom.co>",
"Gabriel de Andrade <gabriel@axiom.co>"
],
scripts: {
dev: "tsup --watch",
build: "tsup && chmod +x dist/bin.js",
format: "prettier --write .",
"format:check": "prettier --check .",
lint: "eslint './**/*.{js,ts}'",
typecheck: "tsc --noEmit",
test: "vitest run",
"test:watch": "vitest --watch",
publint: "npx publint"
},
types: "./dist/index.d.ts",
main: "./dist/index.cjs",
module: "./dist/index.js",
bin: {
axiom: "./dist/bin.js"
},
exports: {
"./ai": {
import: {
types: "./dist/index.d.ts",
default: "./dist/index.js"
},
require: {
types: "./dist/index.d.cts",
default: "./dist/index.cjs"
}
},
"./ai/evals": {
import: {
types: "./dist/evals.d.ts",
default: "./dist/evals.js"
},
require: {
types: "./dist/evals.d.cts",
default: "./dist/evals.cjs"
}
}
},
keywords: [
"axiom",
"logging",
"ai",
"otel",
"opentelemetry"
],
repository: {
type: "git",
url: "git+https://github.com/axiomhq/ai.git",
directory: "packages/ai"
},
license: "MIT",
dependencies: {
"@next/env": "^15.4.2",
"@opentelemetry/auto-instrumentations-node": "^0.60.1",
"@opentelemetry/context-async-hooks": "^2.0.1",
"@opentelemetry/exporter-trace-otlp-http": "^0.202.0",
"@opentelemetry/resources": "^2.0.1",
"@opentelemetry/sdk-node": "^0.202.0",
"@opentelemetry/sdk-trace-node": "^2.0.1",
"@opentelemetry/semantic-conventions": "^1.36.0",
"@sinclair/typebox": "^0.34.37",
commander: "^14.0.0",
"console-table-printer": "^2.14.6",
esbuild: "^0.25.8",
handlebars: "^4.7.8",
nanoid: "^5.1.5",
vitest: "catalog:",
zod: "catalog:"
},
peerDependencies: {
"@opentelemetry/api": "^1.9.0"
},
devDependencies: {
"@ai-sdk/anthropicv1": "npm:@ai-sdk/anthropic@^1.2.12",
"@ai-sdk/anthropicv2": "npm:@ai-sdk/anthropic@2.0.0-beta.9",
"@ai-sdk/openaiv1": "npm:@ai-sdk/openai@^1.3.23",
"@ai-sdk/openaiv2": "npm:@ai-sdk/openai@2.0.0-beta.12",
"@ai-sdk/providerv1": "npm:@ai-sdk/provider@^1.1.3",
"@ai-sdk/providerv2": "npm:@ai-sdk/provider@2.0.0-beta.1",
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/core": "^2.0.1",
"@opentelemetry/sdk-trace-base": "^2.0.1",
"@opentelemetry/sdk-trace-node": "^2.0.1",
"@repo/eslint-config": "workspace:*",
"@types/node": "^22.15.29",
"@vitest/coverage-v8": "^3.2.4",
aiv4: "npm:ai@^4.3.19",
aiv5: "npm:ai@^5.0.0",
eslint: "catalog:",
prettier: "catalog:",
tsup: "catalog:",
typescript: "catalog:",
vitest: "catalog:"
},
files: [
"dist"
],
packageManager: "pnpm@10.11.1"
};
// src/otel/initAxiomAI.ts
var AXIOM_AI_SCOPE_KEY = Symbol.for("__axiom_ai_scope__");
function extractTracerScope(tracer) {
const tracerAny = tracer;
const name = tracerAny._instrumentationScope?.name || tracerAny.instrumentationLibrary?.name || package_default.name;
const version = tracerAny._instrumentationScope?.version || tracerAny.instrumentationLibrary?.version || package_default.version;
return { name, version };
}
function initAxiomAI(config) {
const newScope = extractTracerScope(config.tracer);
const existingScope = globalThis[AXIOM_AI_SCOPE_KEY];
if (existingScope && existingScope.name === newScope.name && existingScope.version === newScope.version) {
return;
}
if (existingScope) {
console.warn(
`[AxiomAI] initAxiomAI() called multiple times with different scopes. Previous: ${existingScope.name}@${existingScope.version}, New: ${newScope.name}@${newScope.version}`
);
}
globalThis[AXIOM_AI_SCOPE_KEY] = newScope;
}
function getGlobalTracer() {
const scope = globalThis[AXIOM_AI_SCOPE_KEY];
if (!scope) {
console.warn(
"[AxiomAI] AXIOM_AI_SCOPE_KEY is undefined. This probably means that initAxiomAI() was never called. Make sure to call initAxiomAI({ tracer }) in your instrumentation setup."
);
}
let { name, version } = scope || { name: package_default.name, version: package_default.version };
if (!name || !version) {
name = package_default.name;
version = package_default.version;
if (!name || !version) {
name = "axiom";
version = "unknown";
}
}
return trace.getTracer(name, version);
}
function resetAxiomAI() {
globalThis[AXIOM_AI_SCOPE_KEY] = void 0;
}
// src/otel/middleware.ts
import "@opentelemetry/api";
// src/otel/utils/contentSanitizer.ts
import { createHash } from "crypto";
function extractImageMetadata(url) {
if (url.startsWith("data:")) {
const [header, base64Data] = url.split(",");
const formatMatch = header.match(/data:image\/(\w+)/);
const format = formatMatch?.[1];
const sizeBytes = base64Data ? Math.floor(base64Data.length * 3 / 4) : 0;
const hash = base64Data ? createHash("sha256").update(base64Data).digest("hex").slice(0, 16) : "unknown";
return {
format,
size_bytes: sizeBytes,
hash,
is_data_url: true
};
} else {
const hash = createHash("sha256").update(url).digest("hex").slice(0, 16);
return {
hash,
is_data_url: false
};
}
}
function sanitizeImageUrl(url, detail) {
const metadata = extractImageMetadata(url);
if (metadata.is_data_url) {
const formatPart = metadata.format ? `:${metadata.format}` : "";
const sizePart = metadata.size_bytes ? `:${metadata.size_bytes}b` : "";
return {
url: `[IMAGE${formatPart}${sizePart}:${metadata.hash}]`,
detail,
...metadata
};
} else {
return {
url,
detail,
...metadata
};
}
}
function sanitizeMultimodalContent(content) {
if (Array.isArray(content)) {
return content.map((part) => {
if (part && typeof part === "object" && "type" in part && part.type === "image_url") {
const imagePart = part;
if (imagePart.image_url?.url) {
return {
...part,
image_url: sanitizeImageUrl(imagePart.image_url.url, imagePart.image_url.detail)
};
}
}
return part;
});
}
return content;
}
// src/otel/completionUtils.ts
function createSimpleCompletion({ text }) {
const assistantMessage = {
role: "assistant",
content: text ?? ""
};
return [assistantMessage];
}
// src/util/promptUtils.ts
function appendToolCalls(prompt, toolCalls, toolResults, assistantContent) {
const updatedPrompt = [...prompt];
updatedPrompt.push({
role: "assistant",
content: assistantContent || null,
tool_calls: toolCalls.map((toolCall) => ({
id: toolCall.toolCallId,
function: {
name: toolCall.toolName,
arguments: typeof toolCall.args === "string" ? toolCall.args : JSON.stringify(toolCall.args)
},
type: "function"
}))
});
for (const toolCall of toolCalls) {
const realToolResult = toolResults.get(toolCall.toolName);
if (realToolResult) {
updatedPrompt.push({
role: "tool",
tool_call_id: toolCall.toolCallId,
content: JSON.stringify(realToolResult)
});
}
}
return updatedPrompt;
}
function extractToolResultsFromRawPrompt(rawPrompt) {
const toolResultsMap = /* @__PURE__ */ new Map();
if (!Array.isArray(rawPrompt)) {
return toolResultsMap;
}
for (const message of rawPrompt) {
if (message?.role === "user" && Array.isArray(message.parts)) {
for (const part of message.parts) {
if (part?.functionResponse) {
const functionResponse = part.functionResponse;
if (functionResponse.name && functionResponse.response) {
toolResultsMap.set(
functionResponse.name,
functionResponse.response.content || functionResponse.response
);
}
}
}
}
if (message?.role === "tool" && message?.tool_call_id && message?.content) {
}
}
return toolResultsMap;
}
function extractToolResultsFromPromptV2(prompt) {
const idToName = /* @__PURE__ */ new Map();
const results = /* @__PURE__ */ new Map();
for (const message of prompt) {
if (message.role === "assistant" && Array.isArray(message.content)) {
for (const part of message.content) {
if (part.type === "tool-call") {
idToName.set(part.toolCallId, part.toolName);
}
}
}
}
for (const message of prompt) {
if (message.role === "tool" && Array.isArray(message.content)) {
for (const part of message.content) {
if (part.toolCallId && part.output !== void 0) {
const toolName = idToName.get(part.toolCallId);
if (toolName) {
results.set(toolName, part.output);
}
}
}
}
}
return results;
}
// src/otel/utils/wrapperUtils.ts
import {
trace as trace2,
context,
propagation,
SpanStatusCode as SpanStatusCode2
} from "@opentelemetry/api";
// src/otel/startActiveSpan.ts
import { SpanStatusCode } from "@opentelemetry/api";
var createStartActiveSpan = (tracer) => async (name, options, fn, callbacks) => {
return tracer.startActiveSpan(name, { ...options ?? {} }, async (span) => {
try {
const result = await fn(span);
callbacks?.onSuccess?.(span);
return result;
} catch (error) {
callbacks?.onError?.(error, span);
if (error instanceof Error) {
span.recordException(error);
span.setStatus({
code: SpanStatusCode.ERROR,
message: error.message
});
}
throw error;
} finally {
callbacks?.onFinally?.(span);
span.end();
}
});
};
// src/otel/withSpanBaggageKey.ts
var WITHSPAN_BAGGAGE_KEY = "__withspan_gen_ai_call";
// src/otel/utils/wrapperUtils.ts
function classifyError(err) {
if (err == null) return void 0;
if (err instanceof Error) {
const name = err.name.toLowerCase();
if (name.includes("timeout")) return "timeout";
if (name.includes("abort")) return "timeout";
if (name.includes("network") || name.includes("fetch")) return "network";
if (name.includes("validation")) return "validation";
if (name.includes("auth")) return "authentication";
if (name.includes("parse") || name.includes("json")) return "parsing";
if (name.includes("permission") || name.includes("forbidden")) return "authorization";
if (name.includes("rate") && name.includes("limit")) return "rate_limit";
if (name.includes("quota") || name.includes("limit")) return "quota_exceeded";
return void 0;
}
return void 0;
}
function classifyToolError(err, span) {
if (err instanceof Error) {
span.recordException(err);
} else {
span.recordException({
message: String(err),
name: "UnknownError"
});
}
span.setStatus({
code: SpanStatusCode2.ERROR,
message: err instanceof Error ? err.message : String(err)
});
let errorType = "unknown";
let statusCode;
if (err && typeof err === "object") {
const errObj = err;
const name = errObj.name?.toLowerCase() || "";
const message = errObj.message?.toLowerCase() || "";
if (name.includes("timeout") || name.includes("abort") || message.includes("timeout")) {
errorType = "timeout";
} else if (name.includes("validation") || errObj.code === "VALIDATION_ERROR" || message.includes("validation")) {
errorType = "validation";
} else if (name.includes("fetch") || name.includes("network") || message.includes("network") || message.includes("fetch failed")) {
errorType = "network";
statusCode = errObj.status || errObj.code;
} else if (name.includes("auth") || message.includes("auth") || message.includes("unauthorized")) {
errorType = "authentication";
} else if (name.includes("permission") || name.includes("forbidden") || message.includes("forbidden")) {
errorType = "authorization";
} else if (name.includes("rate") && (name.includes("limit") || message.includes("rate limit"))) {
errorType = "rate_limit";
} else if (name.includes("quota") || message.includes("quota") || message.includes("limit exceeded")) {
errorType = "quota_exceeded";
} else if (name.includes("parse") || name.includes("json") || message.includes("json") || message.includes("parse")) {
errorType = "parsing";
}
}
span.setAttribute(Attr.Error.Type, errorType);
if (err instanceof Error && err.message) {
span.setAttribute(Attr.Error.Message, err.message);
}
if (statusCode !== void 0) {
span.setAttribute(Attr.HTTP.Response.StatusCode, statusCode);
}
}
function isNoOpTracerProvider() {
const provider = trace2.getTracerProvider();
if (provider.constructor.name === "NoopTracerProvider") {
return true;
}
if (typeof provider.getTracer !== "function") {
return true;
}
return false;
}
function getTracer() {
const tracer = getGlobalTracer();
if (isNoOpTracerProvider()) {
console.warn(
"[AxiomAI] No TracerProvider registered - spans will be no-op. Make sure to call initAxiomAI() after your OpenTelemetry SDK has started (sdk.start())."
);
}
return tracer;
}
function createGenAISpanName(operation, suffix) {
return suffix ? `${operation} ${suffix}` : operation;
}
function setScopeAttributes(span) {
const bag = propagation.getActiveBaggage();
if (bag) {
const capability = bag.getEntry("capability")?.value;
if (capability) {
span.setAttribute(Attr.GenAI.Capability.Name, capability);
}
const step = bag.getEntry("step")?.value;
if (step) {
span.setAttribute(Attr.GenAI.Step.Name, step);
}
}
}
function setAxiomBaseAttributes(span) {
span.setAttributes({
[Attr.Axiom.GenAI.SchemaURL]: `${SCHEMA_BASE_URL}${SCHEMA_VERSION}`,
[Attr.Axiom.GenAI.SDK.Name]: package_default.name,
[Attr.Axiom.GenAI.SDK.Version]: package_default.version
});
}
function setBaseAttributes(span, provider, modelId) {
span.setAttributes({
[Attr.GenAI.Operation.Name]: Attr.GenAI.Operation.Name_Values.Chat,
[Attr.GenAI.Request.Model]: modelId
});
const systemValue = mapProviderToSystem(provider);
if (systemValue) {
span.setAttribute(Attr.GenAI.System, systemValue);
}
setAxiomBaseAttributes(span);
}
function setRequestParameterAttributes(span, params) {
const {
maxTokens,
frequencyPenalty,
presencePenalty,
temperature,
topP,
topK,
seed,
stopSequences
} = params;
if (maxTokens !== void 0) {
span.setAttribute(Attr.GenAI.Request.MaxTokens, maxTokens);
}
if (frequencyPenalty !== void 0) {
span.setAttribute(Attr.GenAI.Request.FrequencyPenalty, frequencyPenalty);
}
if (presencePenalty !== void 0) {
span.setAttribute(Attr.GenAI.Request.PresencePenalty, presencePenalty);
}
if (temperature !== void 0) {
span.setAttribute(Attr.GenAI.Request.Temperature, temperature);
}
if (topP !== void 0) {
span.setAttribute(Attr.GenAI.Request.TopP, topP);
}
if (topK !== void 0) {
span.setAttribute(Attr.GenAI.Request.TopK, topK);
}
if (seed !== void 0) {
span.setAttribute(Attr.GenAI.Request.Seed, seed);
}
if (stopSequences && stopSequences.length > 0) {
span.setAttribute(Attr.GenAI.Request.StopSequences, JSON.stringify(stopSequences));
}
}
function createStreamChildSpan(parentSpan, operationName) {
const tracer = getTracer();
const spanContext = trace2.setSpan(context.active(), parentSpan);
const childSpan = tracer.startSpan(operationName, void 0, spanContext);
childSpan.setAttributes({
[Attr.GenAI.Operation.Name]: Attr.GenAI.Operation.Name_Values.Chat
});
return childSpan;
}
function handleStreamError(span, err) {
if (err instanceof Error) {
span.recordException(err);
} else {
span.recordException({
message: String(err),
name: "UnknownError"
});
}
span.setStatus({
code: SpanStatusCode2.ERROR,
message: err instanceof Error ? err.message : String(err)
});
const errorType = classifyError(err);
span.setAttribute(Attr.Error.Type, errorType ?? "unknown");
if (err instanceof Error && err.message) {
span.setAttribute(Attr.Error.Message, err.message);
}
if (err && typeof err === "object" && "status" in err) {
span.setAttribute(Attr.HTTP.Response.StatusCode, err.status);
}
}
async function withSpanHandling(modelId, operation) {
const bag = propagation.getActiveBaggage();
const isWithinWithSpan = bag?.getEntry(WITHSPAN_BAGGAGE_KEY)?.value === "true";
const context3 = {
originalPrompt: [],
rawCall: void 0
};
if (isWithinWithSpan) {
const activeSpan = trace2.getActiveSpan();
if (!activeSpan) {
throw new Error("Expected active span when within withSpan");
}
activeSpan.updateName(createGenAISpanName(Attr.GenAI.Operation.Name_Values.Chat, modelId));
try {
return await operation(activeSpan, context3);
} catch (err) {
if (err instanceof Error) {
activeSpan.recordException(err);
} else {
activeSpan.recordException({
message: String(err),
name: "UnknownError"
});
}
activeSpan.setStatus({
code: SpanStatusCode2.ERROR,
message: err instanceof Error ? err.message : String(err)
});
const errorType = classifyError(err);
activeSpan.setAttribute(Attr.Error.Type, errorType ?? "unknown");
if (err instanceof Error && err.message) {
activeSpan.setAttribute(Attr.Error.Message, err.message);
}
if (err && typeof err === "object" && "status" in err) {
activeSpan.setAttribute(Attr.HTTP.Response.StatusCode, err.status);
}
throw err;
}
} else {
const tracer = getTracer();
const startActiveSpan = createStartActiveSpan(tracer);
const name = createGenAISpanName(Attr.GenAI.Operation.Name_Values.Chat, modelId);
return startActiveSpan(name, null, (span) => operation(span, context3), {
onError: (err, span) => {
const errorType = classifyError(err);
span.setAttribute(Attr.Error.Type, errorType ?? "unknown");
if (err instanceof Error && err.message) {
span.setAttribute(Attr.Error.Message, err.message);
}
if (err && typeof err === "object" && "status" in err) {
span.setAttribute(Attr.HTTP.Response.StatusCode, err.status);
}
}
});
}
}
function determineOutputTypeV1(options) {
if (options.responseFormat?.type) {
switch (options.responseFormat.type) {
case "json":
return Attr.GenAI.Output.Type_Values.Json;
case "text":
return Attr.GenAI.Output.Type_Values.Text;
}
}
if (options.mode?.type === "object-json" || options.mode?.type === "object-tool") {
return Attr.GenAI.Output.Type_Values.Json;
}
if (options.mode?.type === "regular") {
return Attr.GenAI.Output.Type_Values.Text;
}
return void 0;
}
function determineOutputTypeV2(options) {
if (options.responseFormat?.type) {
switch (options.responseFormat.type) {
case "json":
return Attr.GenAI.Output.Type_Values.Json;
case "text":
return Attr.GenAI.Output.Type_Values.Text;
}
}
return void 0;
}
function mapProviderToSystem(provider) {
const OTHER_VALUE = "_OTHER";
switch (provider) {
case "amazon-bedrock":
return Attr.GenAI.System_Values.AWSBedrock;
case "anthropic":
return Attr.GenAI.System_Values.Anthropic;
case "gateway":
return OTHER_VALUE;
case "google":
return Attr.GenAI.System_Values.GCPGemini;
case "groq":
return Attr.GenAI.System_Values.Groq;
case "mistral":
return Attr.GenAI.System_Values.MistralAI;
case "openai":
return Attr.GenAI.System_Values.OpenAI;
case "openai-compatible":
return OTHER_VALUE;
case "perplexity":
return Attr.GenAI.System_Values.Perplexity;
case "replicate":
return OTHER_VALUE;
case "togetherai":
return OTHER_VALUE;
case "xai":
return Attr.GenAI.System_Values.XAI;
// Specialized providers that should not have system attribute
case "assemblyai":
case "deepgram":
case "gladia":
case "revai":
return void 0;
// startswith + fall through
default: {
if (provider.startsWith("azure.")) {
return Attr.GenAI.System_Values.AzureAIOpenAI;
}
if (provider.startsWith("cerebras.")) {
return OTHER_VALUE;
}
if (provider.startsWith("cohere.")) {
return Attr.GenAI.System_Values.Cohere;
}
if (provider.startsWith("deepinfra.")) {
return OTHER_VALUE;
}
if (provider.startsWith("deepseek.")) {
return Attr.GenAI.System_Values.Deepseek;
}
if (provider.startsWith("elevenlabs.")) {
return OTHER_VALUE;
}
if (provider.startsWith("fal.")) {
return OTHER_VALUE;
}
if (provider.startsWith("fireworks.")) {
return OTHER_VALUE;
}
if (provider.startsWith("google.vertex.")) {
return Attr.GenAI.System_Values.GCPVertexAI;
}
if (provider.startsWith("hume.")) {
return OTHER_VALUE;
}
if (provider.startsWith("lmnt.")) {
return OTHER_VALUE;
}
if (provider.startsWith("luma.")) {
return OTHER_VALUE;
}
if (provider.startsWith("vercel.")) {
return OTHER_VALUE;
}
return void 0;
}
}
}
// src/otel/utils/normalized.ts
function normalizeV1ToolCall(toolCall) {
return {
toolCallId: toolCall.toolCallId,
toolName: toolCall.toolName,
args: typeof toolCall.args === "string" ? toolCall.args : JSON.stringify(toolCall.args),
toolCallType: "function"
};
}
function normalizeV2ToolCall(toolCall) {
return {
toolCallId: toolCall.toolCallId,
toolName: toolCall.toolName,
args: typeof toolCall.input === "string" ? toolCall.input.replace(/:\s+/g, ":") : JSON.stringify(toolCall.input),
toolCallType: "function"
};
}
function normalizeV1ToolCalls(toolCalls) {
return toolCalls.map(normalizeV1ToolCall);
}
function normalizeV2ToolCalls(toolCalls) {
return toolCalls.map(normalizeV2ToolCall);
}
function promptV1ToOpenAI(prompt) {
const results = [];
for (const message of prompt) {
switch (message.role) {
case "system":
results.push({
role: "system",
content: message.content
});
break;
case "assistant":
const textPart = message.content.find((part) => part.type === "text");
const toolCallParts = message.content.filter(
(part) => part.type === "tool-call"
);
results.push({
role: "assistant",
content: textPart?.text || null,
...toolCallParts.length > 0 ? {
tool_calls: toolCallParts.map((part) => ({
id: part.toolCallId,
function: {
name: part.toolName,
arguments: JSON.stringify(part.args)
},
type: "function"
}))
} : {}
});
break;
case "user":
results.push({
role: "user",
content: message.content.map((part) => {
switch (part.type) {
case "text":
return {
type: "text",
text: part.text
};
case "image":
return {
type: "image_url",
image_url: {
url: part.image.toString()
}
};
default:
return {
type: "text",
text: `[${part.type}]` + (typeof part === "object" && part !== null ? JSON.stringify(part) : String(part))
};
}
})
});
break;
case "tool":
for (const part of message.content) {
results.push({
role: "tool",
tool_call_id: part.toolCallId,
content: JSON.stringify(part.result)
});
}
break;
}
}
return results;
}
function promptV2ToOpenAI(prompt) {
const results = [];
for (const message of prompt) {
switch (message.role) {
case "system":
results.push({
role: "system",
content: message.content
});
break;
case "assistant":
const textContent = message.content.find(
(part) => part.type === "text"
);
const toolCalls = message.content.filter(
(part) => part.type === "tool-call"
);
results.push({
role: "assistant",
content: textContent?.text || null,
...toolCalls.length > 0 ? {
tool_calls: toolCalls.map((part) => ({
id: part.toolCallId,
function: {
name: part.toolName,
arguments: typeof part.input === "string" ? part.input : JSON.stringify(part.input)
},
type: "function"
}))
} : {}
});
break;
case "user":
results.push({
role: "user",
content: message.content.map((part) => {
switch (part.type) {
case "text":
return {
type: "text",
text: part.text
};
case "image":
return {
type: "image_url",
image_url: {
url: part.image.toString()
}
};
default:
return part;
}
})
});
break;
case "tool":
for (const part of message.content) {
results.push({
role: "tool",
tool_call_id: part.toolCallId,
content: formatV2ToolCallOutput(part.output)
});
}
break;
}
}
return results;
}
function formatV2ToolCallOutput(output) {
switch (output.type) {
case "text":
return output.value;
case "json":
return typeof output.value === "string" ? output.value : JSON.stringify(output.value);
case "error-text":
return output.value;
case "error-json":
return typeof output.value === "string" ? output.value : JSON.stringify(output.value);
case "content":
return JSON.stringify(output.value);
}
}
// src/util/currentUnixTime.ts
function currentUnixTime() {
return Date.now() / 1e3;
}
// src/otel/streaming/aggregators.ts
var ToolCallAggregator = class {
constructor() {
__publicField(this, "calls", {});
}
handleChunk(chunk) {
switch (chunk.type) {
case "tool-call":
this.calls[chunk.toolCallId] = {
toolCallType: chunk.toolCallType,
toolCallId: chunk.toolCallId,
toolName: chunk.toolName,
args: chunk.args
};
break;
case "tool-call-delta":
if (!this.calls[chunk.toolCallId]) {
this.calls[chunk.toolCallId] = {
toolCallType: chunk.toolCallType,
toolCallId: chunk.toolCallId,
toolName: chunk.toolName,
args: ""
};
}
this.calls[chunk.toolCallId].args += chunk.argsTextDelta;
break;
}
}
get result() {
return Object.values(this.calls);
}
};
var TextAggregator = class {
constructor() {
__publicField(this, "content", "");
}
feed(chunk) {
if (chunk.type === "text-delta") {
this.content += chunk.textDelta;
}
}
get text() {
return this.content || void 0;
}
};
var StreamStats = class {
constructor() {
__publicField(this, "startTime");
__publicField(this, "timeToFirstToken");
__publicField(this, "_usage");
__publicField(this, "_finishReason");
__publicField(this, "_responseId");
__publicField(this, "_responseModelId");
this.startTime = currentUnixTime();
}
feed(chunk) {
if (this.timeToFirstToken === void 0) {
this.timeToFirstToken = currentUnixTime() - this.startTime;
}
switch (chunk.type) {
case "response-metadata":
if (chunk.id) {
this._responseId = chunk.id;
}
if (chunk.modelId) {
this._responseModelId = chunk.modelId;
}
break;
case "finish":
this._usage = chunk.usage;
this._finishReason = chunk.finishReason;
break;
}
}
get result() {
return {
response: this._responseId || this._responseModelId ? {
id: this._responseId,
modelId: this._responseModelId
} : void 0,
finishReason: this._finishReason,
usage: this._usage
};
}
get firstTokenTime() {
return this.timeToFirstToken;
}
};
var ToolCallAggregatorV2 = class {
constructor() {
__publicField(this, "calls", {});
}
handleChunk(chunk) {
if (chunk.type === "tool-call") {
this.calls[chunk.toolCallId] = chunk;
}
}
get result() {
return Object.values(this.calls);
}
};
var TextAggregatorV2 = class {
constructor() {
__publicField(this, "content", "");
}
feed(chunk) {
switch (chunk.type) {
case "text-start":
this.content = "";
break;
case "text-delta":
this.content += chunk.delta;
break;
case "text-end":
break;
}
}
get text() {
return this.content || void 0;
}
};
var StreamStatsV2 = class {
constructor() {
__publicField(this, "startTime");
__publicField(this, "timeToFirstToken");
__publicField(this, "_usage");
__publicField(this, "_finishReason");
__publicField(this, "_responseMetadata");
this.startTime = currentUnixTime();
}
feed(chunk) {
if (this.timeToFirstToken === void 0) {
this.timeToFirstToken = currentUnixTime() - this.startTime;
}
switch (chunk.type) {
case "response-metadata":
this._responseMetadata = {
id: chunk.id,
modelId: chunk.modelId,
timestamp: chunk.timestamp
};
break;
case "finish":
this._usage = chunk.usage;
this._finishReason = chunk.finishReason;
break;
}
}
get result() {
return {
response: this._responseMetadata,
finishReason: this._finishReason,
usage: this._usage
};
}
get firstTokenTime() {
return this.timeToFirstToken;
}
};
// src/otel/middleware.ts
var appendPromptMetadataToSpan = (span, messages) => {
const lastMessage = messages?.[messages.length - 1];
let axiomMeta;
if ("providerMetadata" in lastMessage) {
axiomMeta = lastMessage?.providerMetadata?._axiomMeta;
} else if ("providerOptions" in lastMessage) {
axiomMeta = lastMessage?.providerOptions?._axiomMeta;
}
if (axiomMeta) {
if (axiomMeta.id) span.setAttribute(Attr.GenAI.PromptMetadata.ID, axiomMeta.id);
if (axiomMeta.name) span.setAttribute(Attr.GenAI.PromptMetadata.Name, axiomMeta.name);
if (axiomMeta.slug) span.setAttribute(Attr.GenAI.PromptMetadata.Slug, axiomMeta.slug);
if (axiomMeta.version) span.setAttribute(Attr.GenAI.PromptMetadata.Version, axiomMeta.version);
}
};
function axiomAIMiddlewareV1() {
return {
wrapGenerate: async ({ doGenerate, params, model }) => {
return withSpanHandling(model.modelId, async (span, commonContext) => {
const context3 = {
...commonContext,
originalPrompt: [],
rawCall: void 0
};
appendPromptMetadataToSpan(span, params.prompt);
setScopeAttributes(span);
setPreCallAttributesV1(span, params, context3, model);
const res = await doGenerate();
context3.rawCall = res.rawCall;
await setPostCallAttributesV1(span, res, context3, model);
return res;
});
},
wrapStream: async ({ doStream, params, model }) => {
return withSpanHandling(model.modelId, async (span, commonContext) => {
const context3 = {
...commonContext,
originalPrompt: [],
rawCall: void 0
};
appendPromptMetadataToSpan(span, params.prompt);
setScopeAttributes(span);
setPreCallAttributesV1(span, params, context3, model);
const { stream, ...head } = await doStream();
const childSpan = createStreamChildSpan(span, `chat ${model.modelId} stream`);
const stats = new StreamStats();
const toolAggregator = new ToolCallAggregator();
const textAggregator = new TextAggregator();
return {
...head,
stream: stream.pipeThrough(
new TransformStream({
transform(chunk, controller) {
try {
stats.feed(chunk);
toolAggregator.handleChunk(chunk);
textAggregator.feed(chunk);
controller.enqueue(chunk);
} catch (err) {
handleStreamError(childSpan, err);
childSpan.end();
controller.error(err);
}
},
async flush(controller) {
try {
await setPostCallAttributesV1(
span,
{
...head,
...stats.result,
toolCalls: toolAggregator.result.length > 0 ? toolAggregator.result : void 0,
text: textAggregator.text
},
context3,
model
);
childSpan.end();
controller.terminate();
} catch (err) {
handleStreamError(childSpan, err);
childSpan.end();
controller.error(err);
}
}
})
)
};
});
}
};
}
function axiomAIMiddleware(config) {
if (config.model.specificationVersion === "v1") {
return axiomAIMiddlewareV1();
} else if (config.model.specificationVersion === "v2") {
return axiomAIMiddlewareV2();
} else {
console.warn(
// @ts-expect-error - not allowed at type level, but users can still do it...
`Unsupported model specification version: ${JSON.stringify(config.model.specificationVersion)}. Creating no-op middleware instead.`
);
return {};
}
}
function axiomAIMiddlewareV2() {
return {
wrapGenerate: async ({ doGenerate, params, model }) => {
return withSpanHandling(model.modelId, async (span, commonContext) => {
const context3 = {
...commonContext,
originalPrompt: [],
originalV2Prompt: []
};
appendPromptMetadataToSpan(span, params.prompt);
setScopeAttributes(span);
setPreCallAttributesV2(span, params, context3, model);
const res = await doGenerate();
await setPostCallAttributesV2(span, res, context3, model);
return res;
});
},
wrapStream: async ({ doStream, params, model }) => {
return withSpanHandling(model.modelId, async (span, commonContext) => {
const context3 = {
...commonContext,
originalPrompt: [],
originalV2Prompt: []
};
appendPromptMetadataToSpan(span, params.prompt);
setScopeAttributes(span);
setPreCallAttributesV2(span, params, context3, model);
const ret = await doStream();
const childSpan = createStreamChildSpan(span, `chat ${model.modelId} stream`);
const stats = new StreamStatsV2();
const toolAggregator = new ToolCallAggregatorV2();
const textAggregator = new TextAggregatorV2();
return {
...ret,
stream: ret.stream.pipeThrough(
new TransformStream({
transform(chunk, controller) {
try {
stats.feed(chunk);
toolAggregator.handleChunk(chunk);
textAggregator.feed(chunk);
controller.enqueue(chunk);
} catch (err) {
handleStreamError(childSpan, err);
childSpan.end();
controller.error(err);
}
},
async flush(controller) {
try {
const streamResult = {
...stats.result,
content: [
...textAggregator.text ? [{ type: "text", text: textAggregator.text }] : [],
...toolAggregator.result
]
};
await setPostCallAttributesV2(span, streamResult, context3, model);
childSpan.end();
controller.terminate();
} catch (err) {
handleStreamError(childSpan, err);
childSpan.end();
controller.error(err);
}
}
})
)
};
});
}
};
}
function setPreCallAttributesV1(span, options, context3, model) {
const {
prompt,
maxTokens,
frequencyPenalty,
presencePenalty,
temperature,
topP,
topK,
seed,
stopSequences,
responseFormat,
mode
} = options;
const processedPrompt = promptV1ToOpenAI(prompt);
context3.originalPrompt = processedPrompt;
span.setAttribute(Attr.GenAI.Prompt, JSON.stringify(sanitizeMultimodalContent(processedPrompt)));
setBaseAttributes(span, model.provider, model.modelId);
const outputType = determineOutputTypeV1({ responseFormat, mode });
if (outputType) {
span.setAttribute(Attr.GenAI.Output.Type, outputType);
}
setRequestParameterAttributes(span, {
maxTokens,
frequencyPenalty,
presencePenalty,
temperature,
topP,
topK,
seed,
stopSequences
});
}
async function setPostCallAttributesV1(span, result, context3, _model) {
if (result.toolCalls && result.toolCalls.length > 0) {
const originalPrompt = context3.originalPrompt || [];
const normalizedToolCalls = normalizeV1ToolCalls(result.toolCalls);
const toolResultsMap = context3.rawCall?.rawPrompt ? extractToolResultsFromRawPrompt(context3.rawCall.rawPrompt) : /* @__PURE__ */ new Map();
const updatedPrompt = appendToolCalls(
originalPrompt,
normalizedToolCalls,
toolResultsMap,
result.text
);
span.setAttribute(Attr.GenAI.Prompt, JSON.stringify(sanitizeMultimodalContent(updatedPrompt)));
}
if (result.text) {
const completion = createSimpleCompletion({
text: result.text
});
span.setAttribute(Attr.GenAI.Completion, JSON.stringify(completion));
}
if (result.response?.id) {
span.setAttribute(Attr.GenAI.Response.ID, result.response.id);
}
if (result.response?.modelId) {
span.setAttribute(Attr.GenAI.Response.Model, result.response.modelId);
}
if (result.usage?.promptTokens) {
if (Number.isNaN(result.usage.promptTokens)) {
console.warn(
"usage.promptTokens is NaN. You might need to enable `compatibility: strict`. See: https://github.com/vercel/ai/discussions/1882",
result.usage.promptTokens
);
} else {
span.setAttribute(Attr.GenAI.Usage.InputTokens, result.usage.promptTokens);
}
}
if (result.usage?.completionTokens) {
if (Number.isNaN(result.usage.completionTokens)) {
console.warn(
"usage.completionTokens is NaN. You might need to enable `compatibility: strict`. See: https://github.com/vercel/ai/discussions/1882",
result.usage.completionTokens
);
} else {
span.setAttribute(Attr.GenAI.Usage.OutputTokens, result.usage.completionTokens);
}
}
if (result.finishReason) {
span.setAttribute(Attr.GenAI.Response.FinishReasons, JSON.stringify([result.finishReason]));
}
}
function setPreCallAttributesV2(span, options, context3, model) {
setBaseAttributes(span, model.provider, model.modelId);
const outputType = determineOutputTypeV2(options);
if (outputType) {
span.setAttribute(Attr.GenAI.Output.Type, outputType);
}
setRequestParameterAttributes(span, {
maxTokens: options.maxOutputTokens,
frequencyPenalty: options.frequencyPenalty,
presencePenalty: options.presencePenalty,
temperature: options.temperature,
topP: options.topP,
topK: options.topK,
seed: options.seed,
stopSequences: options.stopSequences
});
const processedPrompt = promptV2ToOpenAI(options.prompt);
context3.originalV2Prompt = options.prompt;
context3.originalPrompt = processedPrompt;
span.setAttribute(Attr.GenAI.Prompt, JSON.stringify(sanitizeMultimodalContent(processedPrompt)));
}
async function setPostCallAttributesV2(span, result, context3, _model) {
const toolCalls = result.content?.filter(
(c) => c.type === "tool-call"
);
const alreadySet = span.attributes?.[Attr.GenAI.Response.FinishReasons] !== void 0;
if (!alreadySet) {
if (result.response?.id) {
span.setAttribute(Attr.GenAI.Response.ID, result.response.id);
}
if (result.response?.modelId) {
span.setAttribute(Attr.GenAI.Response.Model, result.response.modelId);
}
if (result.usage?.inputTokens !== void 0) {
span.setAttribute(Attr.GenAI.Usage.InputTokens, result.usage.inputTokens);
}
if (result.usage?.outputTokens !== void 0) {
span.setAttribute(Attr.GenAI.Usage.OutputTokens, result.usage.outputTokens);
}
}
if (toolCalls && toolCalls.length > 0) {
const originalPrompt = context3.originalPrompt || [];
const normalizedToolCalls = normalizeV2ToolCalls(toolCalls);
const toolResultsMap = extractToolResultsFromPromptV2(context3.originalV2Prompt || []);
const textContent = result.content?.find((c) => c.type === "text");
const assistantText = textContent?.type === "text" ? textContent.text : void 0;
const updatedPrompt = appendToolCalls(
originalPrompt,
normalizedToolCalls,
toolResultsMap,
assistantText
);
span.setAttribute(Attr.GenAI.Prompt, JSON.stringify(sanitizeMultimodalContent(updatedPrompt)));
}
if (result.content && result.content.length > 0) {
await processToolCallsAndCreateSpansV2(span, result.content);
} else if (result.finishReason) {
const completion = createSimpleCompletion({
text: ""
});
span.setAttribute(Attr.GenAI.Completion, JSON.stringify(completion));
}
if (result.finishReason && !alreadySet) {
span.setAttribute(Attr.GenAI.Response.FinishReasons, JSON.stringify([result.finishReason]));
}
}
async function processToolCallsAndCreateSpansV2(parentSpan, content) {
const textContent = content.find((c) => c.type === "text");
const assistantText = textContent?.type === "text" ? textContent.text : void 0;
const toolCalls = content.filter((c) => c.type === "tool-call");
if (toolCalls.length === 0) {
const completion = [
{
role: "assistant",
content: sanitizeMultimodalContent(
content.length === 1 && assistantText ? assistantText : content
)
}
];
parentSpan.setAttribute(Attr.GenAI.Completion, JSON.stringify(completion));
}
}
// src/otel/AxiomWrappedLanguageModelV1.ts
function isLanguageModelV1(model) {
return model != null && typeof model === "object" && "specificationVersion" in model && "provider" in model && "modelId" in model && model.specificationVersion === "v1" && typeof model.provider === "string" && typeof model.modelId === "string";
}
var AxiomWrappedLanguageModelV1 = class {
constructor(model) {
const middleware = axiomAIMiddlewareV1();
return {
specificationVersion: model.specificationVersion,
provider: model.provider,
modelId: model.modelId,
defaultObjectGenerationMode: model.defaultObjectGenerationMode,
supportsImageUrls: model.supportsImageUrls,
supportsStructuredOutputs: model.supportsStructuredOutputs,
supportsUrl: model.supportsUrl?.bind(model),
doGenerate: async (params) => {
return middleware.wrapGenerate({
doGenerate: () => model.doGenerate(params),
doStream: () => model.doStream(params),
params,
model
});
},
doStream: async (params) => {
return middleware.wrapStream({
doGenerate: () => model.doGenerate(params),
doStream: () => model.doStream(params),
params,
model
});
}
};
}
};
// src/otel/AxiomWrappedLanguageModelV2.ts
function isLanguageModelV2(model) {
return model?.specificationVersion === "v2" && typeof model?.provider === "string" && typeof model?.modelId === "string";
}
var AxiomWrappedLanguageModelV2 = class {
constructor(model) {
const middleware = axiomAIMiddlewareV2();
return {
specificationVersion: model.specificationVersion,
provider: model.provider,
modelId: model.modelId,
supportedUrls: model.supportedUrls,
doGenerate: async (params) => {
return middleware.wrapGenerate({
doGenerate: () => model.doGenerate(params),
doStream: () => model.doStream(params),
params,
model
});
},
doStream: async (params) => {
return middleware.wrapStream({
doGenerate: () => model.doGenerate(params),
doStream: () => model.doStream(params),
params,
model
});
}
};
}
};
// src/otel/vercel.ts
function wrapAISDKModel(model) {
if (isLanguageModelV2(model)) {
return new AxiomWrappedLanguageModelV2(model);
} else if (isLanguageModelV1(model)) {
return new AxiomWrappedLanguageModelV1(model);
} else {
console.warn("Unsupported AI SDK model. Not wrapping.");
return model;
}
}
// src/otel/withSpan.ts
import {
context as context2,
propagation as propagation2,
trace as trace3,
SpanStatusCode as SpanStatusCode3
} from "@opentelemetry/api";
function withSpan(meta, fn, opts) {
const tracer = opts?.tracer ?? getTracer();
const span = tracer.startSpan("gen_ai.call_llm");
const spanContext = trace3.setSpan(context2.active(), span);
return context2.with(spanContext, async () => {
if (!span.isRecording()) {
const provider = trace3.getTracerProvider();
const providerIsNoOp = provider.constructor.name === "NoopTracerProvider";
if (providerIsNoOp) {
console.warn(
"[AxiomAI] No TracerProvider registered - spans are no-op. Make sure to call initAxiomAI() after your OpenTelemetry SDK has started."
);
}
}
const bag = propagation2.createBaggage({
capability: { value: meta.capability },
step: { value: meta.step },
// TODO: maybe we can just check the active span name instead?
[WITHSPAN_BAGGAGE_KEY]: { value: "true" }
// Mark that we're inside withSpan
});
const ctx = propagation2.setBaggage(context2.active(), bag);
let spanEnded = false;
const safeEndSpan = () => {
if (!spanEnded) {
spanEnded = true;
span.end();
}
};
const timeoutMs = opts?.timeoutMs ?? 6e5;
const timeoutId = setTimeout(() => {
safeEndSpan();
}, timeoutMs);
try {
const result = await context2.with(ctx, () => fn(span));
if (result instanceof Response && result.body) {
if (result.body.locked) {
console.warn("[AxiomAI] Response body is already locked, cannot instrument stream");
clearTimeout(timeoutId);
safeEndSpan();
return result;
}
const originalReader = result.body.getReader();
const wrappedStream = new ReadableStream({
async pull(controller) {
try {
const { value, done } = await context2.with(ctx, () => originalReader.read());
if (done) {
originalReader.releaseLock?.();
clearTimeout(timeoutId);
span.setStatus({ code: SpanStatusCode3.OK });
safeEndSpan();
controller.close();
} else {
controller.enqueue(value);
}
} catch (err) {
originalReader.releaseLock?.();
clearTimeout(timeoutId);
span.recordException(err);
span.setStatus({
code: SpanStatusCode3.ERROR,
message: err instanceof Error ? err.message : String(err)
});
safeEndSpan();
controller.error(err);
}
},
async cancel(reason) {
try {
originalReader.releaseLock?.();
clearTimeout(timeoutId);
if (reason instanceof Error) {
span.recordException(reason);
} else if (reason) {
span.recordException({ message: String(reason), name: "CancelError" });
}
span.setStatus({
code: SpanStatusCode3.ERROR,
message: reason instanceof Error ? reason.message : String(reason)
});
safeEndSpan();
await originalReader.cancel(reason);
} catch (_err) {
}
}
});
return new Response(wrappedStream, {
status: result.status,
statusText: result.statusText,
headers: result.headers
});
}
if (result && typeof result === "object" && "textStream" in result) {
console.warn(
"[AxiomAI] Detected streaming object with textStream. For proper sp