@traceloop/instrumentation-llamaindex
Version: 
Llamaindex Instrumentation
19 lines • 1.06 kB
TypeScript
import type * as llamaindex from "llamaindex";
import { Tracer, Span, Context, DiagLogger } from "@opentelemetry/api";
import { LlamaIndexInstrumentationConfig } from "./types";
type LLM = llamaindex.LLM;
type ResponseType = llamaindex.ChatResponse | llamaindex.CompletionResponse;
type AsyncResponseType = AsyncIterable<llamaindex.ChatResponseChunk> | AsyncIterable<llamaindex.CompletionResponse>;
export declare class CustomLLMInstrumentation {
    private config;
    private diag;
    private tracer;
    constructor(config: LlamaIndexInstrumentationConfig, diag: DiagLogger, tracer: () => Tracer);
    chatWrapper({ className }: {
        className: string;
    }): (original: LLM["chat"]) => (this: LLM, ...args: Parameters<LLM["chat"]>) => any;
    handleResponse<T extends ResponseType>(result: T, span: Span, metadata: llamaindex.LLMMetadata): T;
    handleStreamingResponse<T extends AsyncResponseType>(result: T, span: Span, execContext: Context, metadata: llamaindex.LLMMetadata): T;
}
export {};
//# sourceMappingURL=custom-llm-instrumentation.d.ts.map