@traceloop/instrumentation-langchain
Version:
OpenTelemetry instrumentation for LangchainJS
62 lines (57 loc) • 3.58 kB
TypeScript
import { InstrumentationConfig, InstrumentationBase, InstrumentationModuleDefinition } from '@opentelemetry/instrumentation';
import { BaseCallbackHandler } from '@langchain/core/callbacks/base';
import { BaseMessage } from '@langchain/core/messages';
import { LLMResult } from '@langchain/core/outputs';
import { Serialized } from '@langchain/core/load/serializable';
import { ChainValues } from '@langchain/core/utils/types';
import { Tracer } from '@opentelemetry/api';
interface LangChainInstrumentationConfig extends InstrumentationConfig {
/**
* Whether to log prompts, completions and embeddings on traces.
* @default true
*/
traceContent?: boolean;
/**
* A custom logger to log any exceptions that happen during span creation.
*/
exceptionLogger?: (e: Error) => void;
}
declare class LangChainInstrumentation extends InstrumentationBase {
protected _config: LangChainInstrumentationConfig;
constructor(config?: LangChainInstrumentationConfig);
manuallyInstrument({ callbackManagerModule, }: {
callbackManagerModule?: any;
}): void;
protected init(): InstrumentationModuleDefinition[];
private instrumentCallbackManagerDirectly;
private patchCallbackManager;
private _shouldSendPrompts;
}
declare class TraceloopCallbackHandler extends BaseCallbackHandler {
name: string;
private tracer;
private spans;
private traceContent;
constructor(tracer: Tracer, traceContent?: boolean);
handleChatModelStart(llm: Serialized, messages: BaseMessage[][], runId: string, _parentRunId?: string, _extraParams?: Record<string, unknown>, _tags?: string[], _metadata?: Record<string, unknown>, _runName?: string): Promise<void>;
handleLLMStart(llm: Serialized, prompts: string[], runId: string, _parentRunId?: string, _extraParams?: Record<string, unknown>, _tags?: string[], _metadata?: Record<string, unknown>, _runName?: string): Promise<void>;
handleLLMEnd(output: LLMResult, runId: string, _parentRunId?: string, _tags?: string[], _extraParams?: Record<string, unknown>): Promise<void>;
handleChatModelEnd(output: LLMResult, runId: string, _parentRunId?: string, _tags?: string[], _extraParams?: Record<string, unknown>): Promise<void>;
handleLLMError(err: Error, runId: string, _parentRunId?: string, _tags?: string[], _extraParams?: Record<string, unknown>): Promise<void>;
handleChainStart(chain: Serialized, inputs: ChainValues, runId: string, _parentRunId?: string, _tags?: string[], metadata?: Record<string, unknown>, runType?: string, runName?: string): Promise<void>;
handleChainEnd(outputs: ChainValues, runId: string, _parentRunId?: string, _tags?: string[], _kwargs?: {
inputs?: Record<string, unknown>;
}): Promise<void>;
handleChainError(err: Error, runId: string, _parentRunId?: string, _tags?: string[], _kwargs?: {
inputs?: Record<string, unknown>;
}): Promise<void>;
handleToolStart(tool: Serialized, input: string, runId: string, _parentRunId?: string, _tags?: string[], _metadata?: Record<string, unknown>, _runName?: string): Promise<void>;
handleToolEnd(output: any, runId: string, _parentRunId?: string, _tags?: string[]): Promise<void>;
handleToolError(err: Error, runId: string, _parentRunId?: string, _tags?: string[]): Promise<void>;
private extractModelNameFromResponse;
private convertClassNameToSpanName;
private detectVendor;
private mapMessageTypeToRole;
}
export { LangChainInstrumentation, TraceloopCallbackHandler };
export type { LangChainInstrumentationConfig };