@posthog/ai
Version:
PostHog Node.js AI integrations
71 lines (68 loc) • 3.84 kB
TypeScript
import { PostHog } from 'posthog-node';
import { BaseCallbackHandler } from '@langchain/core/callbacks/base';
import { Serialized } from '@langchain/core/load/serializable';
import { ChainValues } from '@langchain/core/utils/types';
import { LLMResult } from '@langchain/core/outputs';
import { AgentAction, AgentFinish } from '@langchain/core/agents';
import { DocumentInterface } from '@langchain/core/documents';
import { BaseMessage } from '@langchain/core/messages';
declare class LangChainCallbackHandler extends BaseCallbackHandler {
name: string;
private client;
private distinctId?;
private traceId?;
private properties;
private privacyMode;
private groups;
private debug;
private runs;
private parentTree;
constructor(options: {
client: PostHog;
distinctId?: string | number;
traceId?: string | number;
properties?: Record<string, any>;
privacyMode?: boolean;
groups?: Record<string, any>;
debug?: boolean;
});
handleChainStart(chain: Serialized, inputs: ChainValues, runId: string, parentRunId?: string, tags?: string[], metadata?: Record<string, unknown>, runType?: string, runName?: string): void;
handleChainEnd(outputs: ChainValues, runId: string, parentRunId?: string, tags?: string[], kwargs?: {
inputs?: Record<string, unknown>;
}): void;
handleChainError(error: Error, runId: string, parentRunId?: string, tags?: string[], kwargs?: {
inputs?: Record<string, unknown>;
}): void;
handleChatModelStart(serialized: Serialized, messages: BaseMessage[][], runId: string, parentRunId?: string, extraParams?: Record<string, unknown>, tags?: string[], metadata?: Record<string, unknown>, runName?: string): void;
handleLLMStart(serialized: Serialized, prompts: string[], runId: string, parentRunId?: string, extraParams?: Record<string, unknown>, tags?: string[], metadata?: Record<string, unknown>, runName?: string): void;
handleLLMEnd(output: LLMResult, runId: string, parentRunId?: string, tags?: string[], extraParams?: Record<string, unknown>): void;
handleLLMError(err: Error, runId: string, parentRunId?: string, tags?: string[], extraParams?: Record<string, unknown>): void;
handleToolStart(tool: Serialized, input: string, runId: string, parentRunId?: string, tags?: string[], metadata?: Record<string, unknown>, runName?: string): void;
handleToolEnd(output: any, runId: string, parentRunId?: string, tags?: string[]): void;
handleToolError(err: Error, runId: string, parentRunId?: string, tags?: string[]): void;
handleRetrieverStart(retriever: Serialized, query: string, runId: string, parentRunId?: string, tags?: string[], metadata?: Record<string, unknown>, name?: string): void;
handleRetrieverEnd(documents: DocumentInterface[], runId: string, parentRunId?: string, tags?: string[]): void;
handleRetrieverError(err: Error, runId: string, parentRunId?: string, tags?: string[]): void;
handleAgentAction(action: AgentAction, runId: string, parentRunId?: string, tags?: string[]): void;
handleAgentEnd(action: AgentFinish, runId: string, parentRunId?: string, tags?: string[]): void;
private _setParentOfRun;
private _popParentOfRun;
private _findRootRun;
private _setTraceOrSpanMetadata;
private _setLLMMetadata;
private _popRunMetadata;
private _getTraceId;
private _getParentRunId;
private _popRunAndCaptureTraceOrSpan;
private _captureTraceOrSpan;
private _popRunAndCaptureGeneration;
private _captureGeneration;
private _logDebugEvent;
private _getLangchainRunName;
private _convertLcToolCallsToOai;
private _extractRawResponse;
private _convertMessageToDict;
private _parseUsageModel;
private parseUsage;
}
export { LangChainCallbackHandler };