@tanstack/ai
Version:
Core TanStack AI library - Open source AI SDK
625 lines (624 loc) • 21.1 kB
JavaScript
import { SpanStatusCode, SpanKind, trace, context } from "@opentelemetry/api";
const stateByCtx = /* @__PURE__ */ new WeakMap();
const DEFAULT_MAX_CONTENT_LENGTH = 1e5;
const REDACTION_FAILED_SENTINEL = "[redaction_failed]";
function serializeContent(content) {
if (typeof content === "string") return content;
if (!Array.isArray(content)) return "";
const parts = [];
for (const part of content) {
if (!part || typeof part !== "object") continue;
const type = part.type;
switch (type) {
case "text":
parts.push(
(part.text ?? part.content ?? "").toString()
);
break;
case "image":
parts.push("[image]");
break;
case "audio":
parts.push("[audio]");
break;
case "video":
parts.push("[video]");
break;
case "document":
parts.push("[document]");
break;
default:
parts.push(`[${type ?? "unknown"}]`);
}
}
return parts.join(" ");
}
function messageEventName(role) {
switch (role) {
case "user":
return "gen_ai.user.message";
case "assistant":
return "gen_ai.assistant.message";
case "tool":
return "gen_ai.tool.message";
case "system":
return "gen_ai.system.message";
default:
return `gen_ai.${role}.message`;
}
}
function errorMessage(err) {
if (err instanceof Error) return err.message;
if (typeof err === "string") return err;
if (err && typeof err === "object" && "message" in err) {
const m = err.message;
if (typeof m === "string") return m;
}
return void 0;
}
function errorTypeName(err) {
if (err instanceof Error) return err.name || "Error";
if (err && typeof err === "object" && "name" in err) {
const n = err.name;
if (typeof n === "string") return n;
}
return "Error";
}
function safeCall(label, fn) {
try {
return fn();
} catch (err) {
console.warn(`[otelMiddleware] ${label} failed`, err);
return void 0;
}
}
function otelMiddleware(options) {
const {
tracer,
meter,
captureContent = false,
redact = (s) => s,
maxContentLength = DEFAULT_MAX_CONTENT_LENGTH,
spanNameFormatter,
attributeEnricher,
onBeforeSpanStart,
onSpanEnd
} = options;
const durationHistogram = meter?.createHistogram(
"gen_ai.client.operation.duration",
{
description: "GenAI client operation duration",
unit: "s"
}
);
const tokenHistogram = meter?.createHistogram("gen_ai.client.token.usage", {
description: "GenAI client token usage",
unit: "{token}"
});
const redactContent = (text) => {
try {
return redact(text);
} catch (err) {
console.warn("[otelMiddleware] otel.redact failed", err);
return REDACTION_FAILED_SENTINEL;
}
};
const appendAssistantText = (state, delta) => {
if (maxContentLength > 0) {
if (state.assistantTextBufferTruncated) return;
const remaining = maxContentLength - state.assistantTextBuffer.length;
if (remaining <= 0) {
state.assistantTextBufferTruncated = true;
state.assistantTextBuffer += "…";
return;
}
if (delta.length > remaining) {
state.assistantTextBuffer += delta.slice(0, remaining) + "…";
state.assistantTextBufferTruncated = true;
return;
}
}
state.assistantTextBuffer += delta;
};
const closeIterationSpan = (state, ctx) => {
if (!state.currentIterationSpan) return;
const span = state.currentIterationSpan;
const iteration = state.iterationCount - 1;
safeCall(
"otel.onSpanEnd",
() => onSpanEnd?.(
{ kind: "iteration", ctx, iteration },
span
)
);
span.end();
state.currentIterationSpan = null;
};
return {
name: "otel",
onStart(ctx) {
safeCall("otel.onStart", () => {
const info = { kind: "chat", ctx };
const name = safeCall("otel.spanNameFormatter", () => spanNameFormatter?.(info)) ?? `chat ${ctx.model}`;
const baseOptions = {
kind: SpanKind.INTERNAL,
attributes: {
"gen_ai.system": ctx.provider,
"gen_ai.request.model": ctx.model
// NOTE: `gen_ai.operation.name` is deliberately NOT set on the
// root span. The root represents a `chat()` invocation that may
// span multiple model calls; only iteration spans correspond to
// a single chat operation. Backends that map `operation.name=chat`
// to a "generation" event (e.g. PostHog LLM Analytics) would
// otherwise emit a duplicate generation for the wrapper span.
}
};
const spanOptions = safeCall(
"otel.onBeforeSpanStart",
() => onBeforeSpanStart?.(info, baseOptions)
) ?? baseOptions;
const rootSpan = tracer.startSpan(name, spanOptions);
const enriched = safeCall(
"otel.attributeEnricher",
() => attributeEnricher?.(info)
);
if (enriched) rootSpan.setAttributes(enriched);
stateByCtx.set(ctx, {
rootSpan,
currentIterationSpan: null,
toolSpans: /* @__PURE__ */ new Map(),
iterationCount: 0,
assistantTextBuffer: "",
assistantTextBufferTruncated: false,
startTime: Date.now()
});
});
},
onConfig(ctx, config) {
if (ctx.phase !== "beforeModel") return;
safeCall("otel.onConfig", () => {
const state = stateByCtx.get(ctx);
if (!state) return;
closeIterationSpan(state, ctx);
const info = {
kind: "iteration",
ctx,
iteration: ctx.iteration
};
const name = safeCall("otel.spanNameFormatter", () => spanNameFormatter?.(info)) ?? `chat ${ctx.model} #${ctx.iteration}`;
const baseAttrs = {
"gen_ai.system": ctx.provider,
"gen_ai.operation.name": "chat",
"gen_ai.request.model": ctx.model,
"tanstack.ai.iteration": ctx.iteration
};
if (config.temperature !== void 0)
baseAttrs["gen_ai.request.temperature"] = config.temperature;
if (config.topP !== void 0)
baseAttrs["gen_ai.request.top_p"] = config.topP;
if (config.maxTokens !== void 0)
baseAttrs["gen_ai.request.max_tokens"] = config.maxTokens;
const baseOptions = {
kind: SpanKind.CLIENT,
attributes: baseAttrs
};
const spanOptions = safeCall(
"otel.onBeforeSpanStart",
() => onBeforeSpanStart?.(info, baseOptions)
) ?? baseOptions;
const parentCtx = trace.setSpan(
context.active(),
state.rootSpan
);
let iterSpan;
context.with(parentCtx, () => {
iterSpan = tracer.startSpan(name, spanOptions, parentCtx);
});
const enriched = safeCall(
"otel.attributeEnricher",
() => attributeEnricher?.(info)
);
if (enriched) iterSpan.setAttributes(enriched);
state.currentIterationSpan = iterSpan;
state.assistantTextBuffer = "";
state.assistantTextBufferTruncated = false;
if (captureContent) {
for (const sys of config.systemPrompts) {
iterSpan.addEvent("gen_ai.system.message", {
content: redactContent(sys)
});
}
for (const m of config.messages) {
const body = serializeContent(m.content);
if (body.length === 0) continue;
iterSpan.addEvent(messageEventName(m.role), {
content: redactContent(body)
});
}
const inputMessages = [];
for (const sys of config.systemPrompts) {
inputMessages.push({
role: "system",
content: redactContent(sys)
});
}
for (const m of config.messages) {
const body = serializeContent(m.content);
if (body.length === 0) continue;
inputMessages.push({
role: m.role,
content: redactContent(body)
});
}
if (inputMessages.length > 0) {
const inputJson = JSON.stringify(inputMessages);
iterSpan.setAttribute("gen_ai.input.messages", inputJson);
iterSpan.setAttribute("langfuse.observation.input", inputJson);
if (state.iterationCount === 0) {
state.rootSpan.setAttribute(
"langfuse.observation.input",
inputJson
);
state.rootSpan.setAttribute("langfuse.trace.input", inputJson);
}
}
}
state.iterationCount += 1;
});
return void 0;
},
onChunk(ctx, chunk) {
safeCall("otel.onChunk", () => {
const state = stateByCtx.get(ctx);
if (!state) return;
if (captureContent && chunk.type === "TEXT_MESSAGE_CONTENT") {
appendAssistantText(state, chunk.delta);
}
if (chunk.type !== "RUN_FINISHED") return;
const span = state.currentIterationSpan;
if (!span) return;
if (chunk.finishReason) {
span.setAttribute("gen_ai.response.finish_reasons", [
chunk.finishReason
]);
}
if (chunk.model) span.setAttribute("gen_ai.response.model", chunk.model);
if (chunk.usage) {
span.setAttributes({
"gen_ai.usage.input_tokens": chunk.usage.promptTokens,
"gen_ai.usage.output_tokens": chunk.usage.completionTokens
});
}
if (captureContent && state.assistantTextBuffer.length > 0) {
const completion = redactContent(state.assistantTextBuffer);
const outputJson = JSON.stringify([
{ role: "assistant", content: completion }
]);
span.addEvent("gen_ai.choice", { content: completion });
span.setAttribute("gen_ai.output.messages", outputJson);
span.setAttribute("langfuse.observation.output", outputJson);
state.rootSpan.setAttribute("langfuse.observation.output", outputJson);
state.rootSpan.setAttribute("langfuse.trace.output", outputJson);
state.assistantTextBuffer = "";
state.assistantTextBufferTruncated = false;
}
});
return void 0;
},
onUsage(ctx, usage) {
safeCall("otel.onUsage", () => {
const state = stateByCtx.get(ctx);
if (!state) return;
if (tokenHistogram) {
const metricAttrs = {
"gen_ai.system": ctx.provider,
"gen_ai.operation.name": "chat",
"gen_ai.request.model": ctx.model
};
tokenHistogram.record(usage.promptTokens, {
...metricAttrs,
"gen_ai.token.type": "input"
});
tokenHistogram.record(usage.completionTokens, {
...metricAttrs,
"gen_ai.token.type": "output"
});
}
const span = state.currentIterationSpan ?? state.rootSpan;
span.setAttributes({
"gen_ai.usage.input_tokens": usage.promptTokens,
"gen_ai.usage.output_tokens": usage.completionTokens
});
});
},
onBeforeToolCall(ctx, hookCtx) {
safeCall("otel.onBeforeToolCall", () => {
const state = stateByCtx.get(ctx);
if (!state) return;
const parent = state.currentIterationSpan ?? state.rootSpan;
const info = {
kind: "tool",
ctx,
toolName: hookCtx.toolName,
toolCallId: hookCtx.toolCallId,
iteration: state.iterationCount - 1
};
const name = safeCall("otel.spanNameFormatter", () => spanNameFormatter?.(info)) ?? `execute_tool ${hookCtx.toolName}`;
const baseAttrs = {
"gen_ai.tool.name": hookCtx.toolName,
"gen_ai.tool.call.id": hookCtx.toolCallId,
"gen_ai.tool.type": "function"
};
const baseOptions = {
kind: SpanKind.INTERNAL,
attributes: baseAttrs
};
const spanOptions = safeCall(
"otel.onBeforeSpanStart",
() => onBeforeSpanStart?.(info, baseOptions)
) ?? baseOptions;
const parentCtx = trace.setSpan(context.active(), parent);
let toolSpan;
context.with(parentCtx, () => {
toolSpan = tracer.startSpan(name, spanOptions, parentCtx);
});
const enriched = safeCall(
"otel.attributeEnricher",
() => attributeEnricher?.(info)
);
if (enriched) toolSpan.setAttributes(enriched);
if (captureContent) {
const argsBody = typeof hookCtx.args === "string" ? hookCtx.args : safeCall(
"otel.serializeToolArgs",
() => JSON.stringify(hookCtx.args ?? null)
) ?? "[unserializable_tool_args]";
const redactedArgs = redactContent(argsBody);
const toolInputJson = JSON.stringify([
{ role: "tool", content: redactedArgs }
]);
toolSpan.setAttribute("gen_ai.input.messages", toolInputJson);
toolSpan.setAttribute("langfuse.observation.input", toolInputJson);
}
state.toolSpans.set(hookCtx.toolCallId, {
span: toolSpan,
toolName: hookCtx.toolName
});
});
return void 0;
},
onAfterToolCall(ctx, info) {
safeCall("otel.onAfterToolCall", () => {
const state = stateByCtx.get(ctx);
if (!state) return;
const entry = state.toolSpans.get(info.toolCallId);
if (!entry) return;
const { span: toolSpan } = entry;
const outcome = info.ok ? "success" : "error";
toolSpan.setAttribute("tanstack.ai.tool.outcome", outcome);
if (!info.ok && info.error !== void 0) {
toolSpan.recordException(info.error);
toolSpan.setStatus({
code: SpanStatusCode.ERROR,
message: errorMessage(info.error)
});
}
if (captureContent) {
const body = typeof info.result === "string" ? info.result : safeCall(
"otel.serializeToolResult",
() => JSON.stringify(info.result ?? null)
) ?? "[unserializable_tool_result]";
const redactedBody = redactContent(body);
if (state.currentIterationSpan) {
state.currentIterationSpan.addEvent("gen_ai.tool.message", {
content: redactedBody,
tool_call_id: info.toolCallId
});
}
const toolOutputJson = JSON.stringify([
{ role: "tool", content: redactedBody }
]);
toolSpan.setAttribute("gen_ai.output.messages", toolOutputJson);
toolSpan.setAttribute("langfuse.observation.output", toolOutputJson);
}
safeCall(
"otel.onSpanEnd",
() => onSpanEnd?.(
{
kind: "tool",
ctx,
toolName: info.toolName,
toolCallId: info.toolCallId,
iteration: state.iterationCount - 1
},
toolSpan
)
);
toolSpan.end();
state.toolSpans.delete(info.toolCallId);
});
},
onError(ctx, info) {
safeCall("otel.onError", () => {
const state = stateByCtx.get(ctx);
if (!state) return;
const errType = errorTypeName(info.error);
const message = errorMessage(info.error);
const exception = info.error;
if (state.currentIterationSpan) {
state.currentIterationSpan.recordException(exception);
state.currentIterationSpan.setStatus({
code: SpanStatusCode.ERROR,
message
});
safeCall(
"otel.onSpanEnd",
() => onSpanEnd?.(
{
kind: "iteration",
ctx,
iteration: state.iterationCount - 1
},
state.currentIterationSpan
)
);
state.currentIterationSpan.end();
state.currentIterationSpan = null;
}
for (const [id, entry] of state.toolSpans) {
const { span, toolName } = entry;
span.recordException(exception);
span.setStatus({ code: SpanStatusCode.ERROR, message });
safeCall(
"otel.onSpanEnd",
() => onSpanEnd?.(
{
kind: "tool",
ctx,
toolCallId: id,
toolName,
iteration: state.iterationCount - 1
},
span
)
);
span.end();
state.toolSpans.delete(id);
}
state.rootSpan.recordException(exception);
state.rootSpan.setStatus({ code: SpanStatusCode.ERROR, message });
if (durationHistogram) {
durationHistogram.record(info.duration / 1e3, {
"gen_ai.system": ctx.provider,
"gen_ai.operation.name": "chat",
"gen_ai.request.model": ctx.model,
"error.type": errType
});
}
safeCall(
"otel.onSpanEnd",
() => onSpanEnd?.({ kind: "chat", ctx }, state.rootSpan)
);
state.rootSpan.end();
stateByCtx.delete(ctx);
});
},
onAbort(ctx, info) {
safeCall("otel.onAbort", () => {
const state = stateByCtx.get(ctx);
if (!state) return;
const closeCancelled = (span) => {
span.setAttribute("tanstack.ai.completion.reason", "cancelled");
span.setStatus({ code: SpanStatusCode.ERROR, message: "cancelled" });
};
if (state.currentIterationSpan) {
closeCancelled(state.currentIterationSpan);
safeCall(
"otel.onSpanEnd",
() => onSpanEnd?.(
{
kind: "iteration",
ctx,
iteration: state.iterationCount - 1
},
state.currentIterationSpan
)
);
state.currentIterationSpan.end();
state.currentIterationSpan = null;
}
for (const [id, entry] of state.toolSpans) {
const { span, toolName } = entry;
closeCancelled(span);
safeCall(
"otel.onSpanEnd",
() => onSpanEnd?.(
{
kind: "tool",
ctx,
toolCallId: id,
toolName,
iteration: state.iterationCount - 1
},
span
)
);
span.end();
state.toolSpans.delete(id);
}
closeCancelled(state.rootSpan);
if (durationHistogram) {
durationHistogram.record(info.duration / 1e3, {
"gen_ai.system": ctx.provider,
"gen_ai.operation.name": "chat",
"gen_ai.request.model": ctx.model,
"error.type": "cancelled"
});
}
safeCall(
"otel.onSpanEnd",
() => onSpanEnd?.({ kind: "chat", ctx }, state.rootSpan)
);
state.rootSpan.end();
stateByCtx.delete(ctx);
});
},
onFinish(ctx, info) {
safeCall("otel.onFinish", () => {
const state = stateByCtx.get(ctx);
if (!state) return;
for (const [id, entry] of state.toolSpans) {
const { span, toolName } = entry;
span.setAttribute("tanstack.ai.tool.outcome", "unknown");
safeCall(
"otel.onSpanEnd",
() => onSpanEnd?.(
{
kind: "tool",
ctx,
toolCallId: id,
toolName,
iteration: state.iterationCount - 1
},
span
)
);
span.end();
state.toolSpans.delete(id);
}
closeIterationSpan(state, ctx);
if (durationHistogram) {
durationHistogram.record(info.duration / 1e3, {
"gen_ai.system": ctx.provider,
"gen_ai.operation.name": "chat",
"gen_ai.request.model": ctx.model
});
}
if (info.usage) {
state.rootSpan.setAttributes({
"gen_ai.usage.input_tokens": info.usage.promptTokens,
"gen_ai.usage.output_tokens": info.usage.completionTokens
});
}
if (info.finishReason) {
state.rootSpan.setAttribute("gen_ai.response.finish_reasons", [
info.finishReason
]);
}
state.rootSpan.setAttribute(
"tanstack.ai.iterations",
state.iterationCount
);
safeCall(
"otel.onSpanEnd",
() => onSpanEnd?.({ kind: "chat", ctx }, state.rootSpan)
);
state.rootSpan.end();
stateByCtx.delete(ctx);
});
}
};
}
export {
otelMiddleware
};
//# sourceMappingURL=otel.js.map