@traceloop/instrumentation-langchain
Version:
OpenTelemetry instrumentation for LangchainJS
464 lines (458 loc) • 20.2 kB
JavaScript
'use strict';
var api = require('@opentelemetry/api');
var instrumentation = require('@opentelemetry/instrumentation');
var aiSemanticConventions = require('@traceloop/ai-semantic-conventions');
var tslib = require('tslib');
var base = require('@langchain/core/callbacks/base');
/*
* Copyright Traceloop
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
class TraceloopCallbackHandler extends base.BaseCallbackHandler {
constructor(tracer, traceContent = true) {
super();
this.name = "traceloop_callback_handler";
this.spans = new Map();
this.tracer = tracer;
this.traceContent = traceContent;
}
handleChatModelStart(llm, messages, runId, _parentRunId, _extraParams, _tags, _metadata, _runName) {
return tslib.__awaiter(this, void 0, void 0, function* () {
var _a;
const className = ((_a = llm.id) === null || _a === void 0 ? void 0 : _a[llm.id.length - 1]) || "unknown";
const vendor = this.detectVendor(llm);
const spanBaseName = this.convertClassNameToSpanName(className);
// Create single LLM span like Python implementation
const span = this.tracer.startSpan(spanBaseName, {
kind: api.SpanKind.CLIENT,
});
const flatMessages = messages.flat();
span.setAttributes({
[aiSemanticConventions.SpanAttributes.LLM_SYSTEM]: vendor,
[aiSemanticConventions.SpanAttributes.LLM_REQUEST_TYPE]: "chat",
});
// Add prompts if tracing content
if (this.traceContent && flatMessages.length > 0) {
flatMessages.forEach((message, idx) => {
const role = this.mapMessageTypeToRole(message._getType());
span.setAttributes({
[`${aiSemanticConventions.SpanAttributes.LLM_PROMPTS}.${idx}.role`]: role,
[`${aiSemanticConventions.SpanAttributes.LLM_PROMPTS}.${idx}.content`]: typeof message.content === "string"
? message.content
: JSON.stringify(message.content),
});
});
}
this.spans.set(runId, { span, runId });
});
}
handleLLMStart(llm, prompts, runId, _parentRunId, _extraParams, _tags, _metadata, _runName) {
return tslib.__awaiter(this, void 0, void 0, function* () {
var _a;
const className = ((_a = llm.id) === null || _a === void 0 ? void 0 : _a[llm.id.length - 1]) || "unknown";
const vendor = this.detectVendor(llm);
const spanBaseName = this.convertClassNameToSpanName(className);
// Create single LLM span like handleChatModelStart
const span = this.tracer.startSpan(spanBaseName, {
kind: api.SpanKind.CLIENT,
});
span.setAttributes({
[aiSemanticConventions.SpanAttributes.LLM_SYSTEM]: vendor,
[aiSemanticConventions.SpanAttributes.LLM_REQUEST_TYPE]: "completion",
});
if (this.traceContent && prompts.length > 0) {
prompts.forEach((prompt, idx) => {
span.setAttributes({
[`${aiSemanticConventions.SpanAttributes.LLM_PROMPTS}.${idx}.role`]: "user",
[`${aiSemanticConventions.SpanAttributes.LLM_PROMPTS}.${idx}.content`]: prompt,
});
});
}
this.spans.set(runId, { span, runId });
});
}
handleLLMEnd(output, runId, _parentRunId, _tags, _extraParams) {
return tslib.__awaiter(this, void 0, void 0, function* () {
var _a, _b;
const spanData = this.spans.get(runId);
if (!spanData)
return;
const { span } = spanData;
if (this.traceContent &&
output.generations &&
output.generations.length > 0) {
output.generations.forEach((generation, idx) => {
if (generation && generation.length > 0) {
span.setAttributes({
[`${aiSemanticConventions.SpanAttributes.LLM_COMPLETIONS}.${idx}.role`]: "assistant",
[`${aiSemanticConventions.SpanAttributes.LLM_COMPLETIONS}.${idx}.content`]: generation[0].text,
});
}
});
}
// Extract model name from response only, like Python implementation
const modelName = this.extractModelNameFromResponse(output);
// Set both request and response model attributes like Python implementation
span.setAttributes({
[aiSemanticConventions.SpanAttributes.LLM_REQUEST_MODEL]: modelName || "unknown",
[aiSemanticConventions.SpanAttributes.LLM_RESPONSE_MODEL]: modelName || "unknown",
});
// Add usage metrics if available
if ((_a = output.llmOutput) === null || _a === void 0 ? void 0 : _a.usage) {
const usage = output.llmOutput.usage;
if (usage.input_tokens) {
span.setAttributes({
[aiSemanticConventions.SpanAttributes.LLM_USAGE_PROMPT_TOKENS]: usage.input_tokens,
});
}
if (usage.output_tokens) {
span.setAttributes({
[aiSemanticConventions.SpanAttributes.LLM_USAGE_COMPLETION_TOKENS]: usage.output_tokens,
});
}
const totalTokens = (usage.input_tokens || 0) + (usage.output_tokens || 0);
if (totalTokens > 0) {
span.setAttributes({
[aiSemanticConventions.SpanAttributes.LLM_USAGE_TOTAL_TOKENS]: totalTokens,
});
}
}
// Also check for tokenUsage format (for compatibility)
if ((_b = output.llmOutput) === null || _b === void 0 ? void 0 : _b.tokenUsage) {
const usage = output.llmOutput.tokenUsage;
if (usage.promptTokens) {
span.setAttributes({
[aiSemanticConventions.SpanAttributes.LLM_USAGE_PROMPT_TOKENS]: usage.promptTokens,
});
}
if (usage.completionTokens) {
span.setAttributes({
[aiSemanticConventions.SpanAttributes.LLM_USAGE_COMPLETION_TOKENS]: usage.completionTokens,
});
}
if (usage.totalTokens) {
span.setAttributes({
[aiSemanticConventions.SpanAttributes.LLM_USAGE_TOTAL_TOKENS]: usage.totalTokens,
});
}
}
span.setStatus({ code: api.SpanStatusCode.OK });
span.end();
this.spans.delete(runId);
});
}
handleChatModelEnd(output, runId, _parentRunId, _tags, _extraParams) {
return tslib.__awaiter(this, void 0, void 0, function* () {
// Same as handleLLMEnd for chat models
return this.handleLLMEnd(output, runId, _parentRunId, _tags, _extraParams);
});
}
handleLLMError(err, runId, _parentRunId, _tags, _extraParams) {
return tslib.__awaiter(this, void 0, void 0, function* () {
const spanData = this.spans.get(runId);
if (!spanData)
return;
const { span } = spanData;
span.recordException(err);
span.setStatus({ code: api.SpanStatusCode.ERROR, message: err.message });
span.end();
this.spans.delete(runId);
});
}
handleChainStart(chain, inputs, runId, _parentRunId, _tags, metadata, runType, runName) {
return tslib.__awaiter(this, void 0, void 0, function* () {
var _a;
const chainName = ((_a = chain.id) === null || _a === void 0 ? void 0 : _a[chain.id.length - 1]) || "unknown";
const spanName = `${chainName}.workflow`;
const span = this.tracer.startSpan(spanName, {
kind: api.SpanKind.CLIENT,
});
span.setAttributes({
"traceloop.span.kind": "workflow",
"traceloop.workflow.name": runName || chainName,
});
if (this.traceContent) {
span.setAttributes({
"traceloop.entity.input": JSON.stringify(inputs),
});
}
this.spans.set(runId, { span, runId });
});
}
handleChainEnd(outputs, runId, _parentRunId, _tags, _kwargs) {
return tslib.__awaiter(this, void 0, void 0, function* () {
const spanData = this.spans.get(runId);
if (!spanData)
return;
const { span } = spanData;
if (this.traceContent) {
span.setAttributes({
"traceloop.entity.output": JSON.stringify(outputs),
});
}
span.setStatus({ code: api.SpanStatusCode.OK });
span.end();
this.spans.delete(runId);
});
}
handleChainError(err, runId, _parentRunId, _tags, _kwargs) {
return tslib.__awaiter(this, void 0, void 0, function* () {
const spanData = this.spans.get(runId);
if (!spanData)
return;
const { span } = spanData;
span.recordException(err);
span.setStatus({ code: api.SpanStatusCode.ERROR, message: err.message });
span.end();
this.spans.delete(runId);
});
}
handleToolStart(tool, input, runId, _parentRunId, _tags, _metadata, _runName) {
return tslib.__awaiter(this, void 0, void 0, function* () {
var _a;
const toolName = ((_a = tool.id) === null || _a === void 0 ? void 0 : _a[tool.id.length - 1]) || "unknown";
const spanName = `${toolName}.task`;
const span = this.tracer.startSpan(spanName, {
kind: api.SpanKind.CLIENT,
});
span.setAttributes({
"traceloop.span.kind": "task",
"traceloop.entity.name": toolName,
});
if (this.traceContent) {
span.setAttributes({
"traceloop.entity.input": JSON.stringify({ args: [input] }),
});
}
this.spans.set(runId, { span, runId });
});
}
handleToolEnd(output, runId, _parentRunId, _tags) {
return tslib.__awaiter(this, void 0, void 0, function* () {
const spanData = this.spans.get(runId);
if (!spanData)
return;
const { span } = spanData;
if (this.traceContent) {
span.setAttributes({
"traceloop.entity.output": JSON.stringify(output),
});
}
span.setStatus({ code: api.SpanStatusCode.OK });
span.end();
this.spans.delete(runId);
});
}
handleToolError(err, runId, _parentRunId, _tags) {
return tslib.__awaiter(this, void 0, void 0, function* () {
const spanData = this.spans.get(runId);
if (!spanData)
return;
const { span } = spanData;
span.recordException(err);
span.setStatus({ code: api.SpanStatusCode.ERROR, message: err.message });
span.end();
this.spans.delete(runId);
});
}
extractModelNameFromResponse(output) {
// Follow Python implementation - extract from llm_output first
if (output.llmOutput) {
const modelName = output.llmOutput.model_name ||
output.llmOutput.model_id ||
output.llmOutput.model;
if (modelName && typeof modelName === "string") {
return modelName;
}
}
return null;
}
convertClassNameToSpanName(className) {
// Convert PascalCase to lowercase with dots
// BedrockChat -> bedrock.chat
// ChatOpenAI -> chat.openai
return className.replace(/([A-Z])/g, (match, char, index) => {
return index === 0 ? char.toLowerCase() : `.${char.toLowerCase()}`;
});
}
detectVendor(llm) {
var _a;
const className = ((_a = llm.id) === null || _a === void 0 ? void 0 : _a[llm.id.length - 1]) || "";
if (!className) {
return "Langchain";
}
// Follow Python implementation with exact matches and patterns
// Ordered by specificity (most specific first)
// Azure (most specific - check first)
if (["AzureChatOpenAI", "AzureOpenAI", "AzureOpenAIEmbeddings"].includes(className) ||
className.toLowerCase().includes("azure")) {
return "Azure";
}
// OpenAI
if (["ChatOpenAI", "OpenAI", "OpenAIEmbeddings"].includes(className) ||
className.toLowerCase().includes("openai")) {
return "openai";
}
// AWS Bedrock
if (["ChatBedrock", "BedrockEmbeddings", "Bedrock", "BedrockChat"].includes(className) ||
className.toLowerCase().includes("bedrock") ||
className.toLowerCase().includes("aws")) {
return "AWS";
}
// Anthropic
if (["ChatAnthropic", "AnthropicLLM"].includes(className) ||
className.toLowerCase().includes("anthropic")) {
return "Anthropic";
}
// Google (Vertex/PaLM/Gemini)
if ([
"ChatVertexAI",
"VertexAI",
"VertexAIEmbeddings",
"ChatGoogleGenerativeAI",
"GoogleGenerativeAI",
"GooglePaLM",
"ChatGooglePaLM",
].includes(className) ||
className.toLowerCase().includes("vertex") ||
className.toLowerCase().includes("google") ||
className.toLowerCase().includes("palm") ||
className.toLowerCase().includes("gemini")) {
return "Google";
}
// Cohere
if (["ChatCohere", "CohereEmbeddings", "Cohere"].includes(className) ||
className.toLowerCase().includes("cohere")) {
return "Cohere";
}
// HuggingFace
if ([
"HuggingFacePipeline",
"HuggingFaceTextGenInference",
"HuggingFaceEmbeddings",
"ChatHuggingFace",
].includes(className) ||
className.toLowerCase().includes("huggingface")) {
return "HuggingFace";
}
// Ollama
if (["ChatOllama", "OllamaEmbeddings", "Ollama"].includes(className) ||
className.toLowerCase().includes("ollama")) {
return "Ollama";
}
// Together
if (["Together", "ChatTogether"].includes(className) ||
className.toLowerCase().includes("together")) {
return "TogetherAI";
}
// Replicate
if (["Replicate", "ChatReplicate"].includes(className) ||
className.toLowerCase().includes("replicate")) {
return "Replicate";
}
return "Langchain";
}
mapMessageTypeToRole(messageType) {
// Map LangChain message types to standard OpenTelemetry roles
switch (messageType) {
case "human":
return "user";
case "ai":
return "assistant";
case "system":
return "system";
case "function":
return "tool";
default:
return messageType;
}
}
}
var version = "0.19.0";
/*
* Copyright Traceloop
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
class LangChainInstrumentation extends instrumentation.InstrumentationBase {
constructor(config = {}) {
super("@traceloop/instrumentation-langchain", version, config);
// Manually instrument CallbackManager immediately since module detection doesn't work
this.instrumentCallbackManagerDirectly();
}
manuallyInstrument({ callbackManagerModule, }) {
if (callbackManagerModule) {
this._diag.debug("Manually instrumenting @langchain/core/callbacks/manager");
this.patchCallbackManager(callbackManagerModule.CallbackManager);
}
}
init() {
// Return empty array since we handle patching in constructor
return [];
}
instrumentCallbackManagerDirectly() {
try {
// eslint-disable-next-line @typescript-eslint/no-var-requires
const callbackManagerModule = require("@langchain/core/callbacks/manager");
if (callbackManagerModule === null || callbackManagerModule === void 0 ? void 0 : callbackManagerModule.CallbackManager) {
this.patchCallbackManager(callbackManagerModule.CallbackManager);
}
}
catch (error) {
this._diag.debug("Error instrumenting callback manager:", error);
}
}
patchCallbackManager(CallbackManager) {
const callbackManagerAny = CallbackManager;
if (callbackManagerAny._configureSync &&
!callbackManagerAny._traceloopPatched) {
const originalConfigureSync = callbackManagerAny._configureSync;
// eslint-disable-next-line @typescript-eslint/no-this-alias
const self = this;
callbackManagerAny._configureSync = function (inheritableHandlers, localHandlers, inheritableTags, localTags, inheritableMetadata, localMetadata) {
// Add our callback handler to inheritable handlers
const callbackHandler = new TraceloopCallbackHandler(self.tracer, self._shouldSendPrompts());
const updatedInheritableHandlers = inheritableHandlers && Array.isArray(inheritableHandlers)
? [...inheritableHandlers, callbackHandler]
: [callbackHandler];
return originalConfigureSync.call(this, updatedInheritableHandlers, localHandlers, inheritableTags, localTags, inheritableMetadata, localMetadata);
};
// Mark as patched to avoid double patching
callbackManagerAny._traceloopPatched = true;
}
}
_shouldSendPrompts() {
const contextShouldSendPrompts = api.context
.active()
.getValue(aiSemanticConventions.CONTEXT_KEY_ALLOW_TRACE_CONTENT);
if (contextShouldSendPrompts !== undefined) {
return !!contextShouldSendPrompts;
}
return this._config.traceContent !== undefined
? this._config.traceContent
: true;
}
}
exports.LangChainInstrumentation = LangChainInstrumentation;
exports.TraceloopCallbackHandler = TraceloopCallbackHandler;
//# sourceMappingURL=index.js.map