@posthog/ai
Version:
PostHog Node.js AI integrations
245 lines (235 loc) • 12.1 kB
TypeScript
import OpenAIOrignal, { OpenAI, APIPromise, ClientOptions as ClientOptions$1, AzureOpenAI } from 'openai';
import { PostHog } from 'posthog-node';
import { Stream } from 'openai/streaming';
import { ParsedResponse } from 'openai/resources/responses/responses';
import { LanguageModelV2 } from '@ai-sdk/provider';
import AnthropicOriginal from '@anthropic-ai/sdk';
import { RequestOptions as RequestOptions$2, APIPromise as APIPromise$1 } from '@anthropic-ai/sdk/core';
import { Stream as Stream$1 } from '@anthropic-ai/sdk/streaming';
import { GoogleGenAI } from '@google/genai';
import { BaseCallbackHandler } from '@langchain/core/callbacks/base';
import { Serialized } from '@langchain/core/load/serializable';
import { ChainValues } from '@langchain/core/utils/types';
import { LLMResult } from '@langchain/core/outputs';
import { AgentAction, AgentFinish } from '@langchain/core/agents';
import { DocumentInterface } from '@langchain/core/documents';
import { BaseMessage } from '@langchain/core/messages';
interface MonitoringParams {
posthogDistinctId?: string;
posthogTraceId?: string;
posthogProperties?: Record<string, any>;
posthogPrivacyMode?: boolean;
posthogGroups?: Record<string, any>;
posthogModelOverride?: string;
posthogProviderOverride?: string;
posthogCostOverride?: CostOverride;
posthogCaptureImmediate?: boolean;
}
interface CostOverride {
inputCost: number;
outputCost: number;
}
declare const Chat: typeof OpenAI.Chat;
declare const Completions: typeof OpenAI.Chat.Completions;
declare const Responses: typeof OpenAI.Responses;
type ChatCompletion$1 = OpenAI.ChatCompletion;
type ChatCompletionChunk$1 = OpenAI.ChatCompletionChunk;
type ChatCompletionCreateParamsBase$1 = OpenAI.Chat.Completions.ChatCompletionCreateParams;
type ChatCompletionCreateParamsNonStreaming$1 = OpenAI.Chat.Completions.ChatCompletionCreateParamsNonStreaming;
type ChatCompletionCreateParamsStreaming$1 = OpenAI.Chat.Completions.ChatCompletionCreateParamsStreaming;
type ResponsesCreateParamsBase = OpenAI.Responses.ResponseCreateParams;
type ResponsesCreateParamsNonStreaming = OpenAI.Responses.ResponseCreateParamsNonStreaming;
type ResponsesCreateParamsStreaming = OpenAI.Responses.ResponseCreateParamsStreaming;
interface MonitoringOpenAIConfig$1 extends ClientOptions$1 {
apiKey: string;
posthog: PostHog;
baseURL?: string;
}
type RequestOptions$1 = Record<string, any>;
declare class PostHogOpenAI extends OpenAI {
private readonly phClient;
chat: WrappedChat$1;
responses: WrappedResponses;
constructor(config: MonitoringOpenAIConfig$1);
}
declare class WrappedChat$1 extends Chat {
constructor(parentClient: PostHogOpenAI, phClient: PostHog);
completions: WrappedCompletions$1;
}
declare class WrappedCompletions$1 extends Completions {
private readonly phClient;
constructor(client: OpenAI, phClient: PostHog);
create(body: ChatCompletionCreateParamsNonStreaming$1 & MonitoringParams, options?: RequestOptions$1): APIPromise<ChatCompletion$1>;
create(body: ChatCompletionCreateParamsStreaming$1 & MonitoringParams, options?: RequestOptions$1): APIPromise<Stream<ChatCompletionChunk$1>>;
create(body: ChatCompletionCreateParamsBase$1 & MonitoringParams, options?: RequestOptions$1): APIPromise<ChatCompletion$1 | Stream<ChatCompletionChunk$1>>;
}
declare class WrappedResponses extends Responses {
private readonly phClient;
constructor(client: OpenAI, phClient: PostHog);
create(body: ResponsesCreateParamsNonStreaming & MonitoringParams, options?: RequestOptions$1): APIPromise<OpenAI.Responses.Response>;
create(body: ResponsesCreateParamsStreaming & MonitoringParams, options?: RequestOptions$1): APIPromise<Stream<OpenAI.Responses.ResponseStreamEvent>>;
create(body: ResponsesCreateParamsBase & MonitoringParams, options?: RequestOptions$1): APIPromise<OpenAI.Responses.Response | Stream<OpenAI.Responses.ResponseStreamEvent>>;
parse<Params extends ResponsesCreateParamsBase, ParsedT = any>(body: Params & MonitoringParams, options?: RequestOptions$1): APIPromise<ParsedResponse<ParsedT>>;
}
type ChatCompletion = OpenAIOrignal.ChatCompletion;
type ChatCompletionChunk = OpenAIOrignal.ChatCompletionChunk;
type ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams;
type ChatCompletionCreateParamsNonStreaming = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParamsNonStreaming;
type ChatCompletionCreateParamsStreaming = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParamsStreaming;
interface MonitoringOpenAIConfig {
apiKey: string;
posthog: PostHog;
baseURL?: string;
}
type RequestOptions = Record<string, any>;
declare class PostHogAzureOpenAI extends AzureOpenAI {
private readonly phClient;
chat: WrappedChat;
constructor(config: MonitoringOpenAIConfig);
}
declare class WrappedChat extends AzureOpenAI.Chat {
constructor(parentClient: PostHogAzureOpenAI, phClient: PostHog);
completions: WrappedCompletions;
}
declare class WrappedCompletions extends AzureOpenAI.Chat.Completions {
private readonly phClient;
constructor(client: AzureOpenAI, phClient: PostHog);
create(body: ChatCompletionCreateParamsNonStreaming & MonitoringParams, options?: RequestOptions): APIPromise<ChatCompletion>;
create(body: ChatCompletionCreateParamsStreaming & MonitoringParams, options?: RequestOptions): APIPromise<Stream<ChatCompletionChunk>>;
create(body: ChatCompletionCreateParamsBase & MonitoringParams, options?: RequestOptions): APIPromise<ChatCompletion | Stream<ChatCompletionChunk>>;
}
interface ClientOptions {
posthogDistinctId?: string;
posthogTraceId?: string;
posthogProperties?: Record<string, any>;
posthogPrivacyMode?: boolean;
posthogGroups?: Record<string, any>;
posthogModelOverride?: string;
posthogProviderOverride?: string;
posthogCostOverride?: CostOverride;
posthogCaptureImmediate?: boolean;
}
declare const wrapVercelLanguageModel: (model: LanguageModelV2, phClient: PostHog, options: ClientOptions) => LanguageModelV2;
type MessageCreateParamsNonStreaming = AnthropicOriginal.Messages.MessageCreateParamsNonStreaming;
type MessageCreateParamsStreaming = AnthropicOriginal.Messages.MessageCreateParamsStreaming;
type Message = AnthropicOriginal.Messages.Message;
type RawMessageStreamEvent = AnthropicOriginal.Messages.RawMessageStreamEvent;
type MessageCreateParamsBase = AnthropicOriginal.Messages.MessageCreateParams;
interface MonitoringAnthropicConfig {
apiKey: string;
posthog: PostHog;
baseURL?: string;
}
declare class PostHogAnthropic extends AnthropicOriginal {
private readonly phClient;
messages: WrappedMessages;
constructor(config: MonitoringAnthropicConfig);
}
declare class WrappedMessages extends AnthropicOriginal.Messages {
private readonly phClient;
constructor(parentClient: PostHogAnthropic, phClient: PostHog);
create(body: MessageCreateParamsNonStreaming, options?: RequestOptions$2): APIPromise$1<Message>;
create(body: MessageCreateParamsStreaming & MonitoringParams, options?: RequestOptions$2): APIPromise$1<Stream$1<RawMessageStreamEvent>>;
create(body: MessageCreateParamsBase & MonitoringParams, options?: RequestOptions$2): APIPromise$1<Stream$1<RawMessageStreamEvent> | Message>;
}
type GenerateContentRequest = {
model: string;
contents: any;
config?: any;
[key: string]: any;
};
type GenerateContentResponse = {
text?: string;
candidates?: any[];
usageMetadata?: {
promptTokenCount?: number;
candidatesTokenCount?: number;
totalTokenCount?: number;
thoughtsTokenCount?: number;
cachedContentTokenCount?: number;
};
[key: string]: any;
};
interface MonitoringGeminiConfig {
apiKey?: string;
vertexai?: boolean;
project?: string;
location?: string;
apiVersion?: string;
posthog: PostHog;
}
declare class PostHogGoogleGenAI {
private readonly phClient;
private readonly client;
models: WrappedModels;
constructor(config: MonitoringGeminiConfig);
}
declare class WrappedModels {
private readonly phClient;
private readonly client;
constructor(client: GoogleGenAI, phClient: PostHog);
generateContent(params: GenerateContentRequest & MonitoringParams): Promise<GenerateContentResponse>;
generateContentStream(params: GenerateContentRequest & MonitoringParams): AsyncGenerator<any, void, unknown>;
private formatInput;
private formatInputForPostHog;
}
declare class LangChainCallbackHandler extends BaseCallbackHandler {
name: string;
private client;
private distinctId?;
private traceId?;
private properties;
private privacyMode;
private groups;
private debug;
private runs;
private parentTree;
constructor(options: {
client: PostHog;
distinctId?: string | number;
traceId?: string | number;
properties?: Record<string, any>;
privacyMode?: boolean;
groups?: Record<string, any>;
debug?: boolean;
});
handleChainStart(chain: Serialized, inputs: ChainValues, runId: string, parentRunId?: string, tags?: string[], metadata?: Record<string, unknown>, runType?: string, runName?: string): void;
handleChainEnd(outputs: ChainValues, runId: string, parentRunId?: string, tags?: string[], kwargs?: {
inputs?: Record<string, unknown>;
}): void;
handleChainError(error: Error, runId: string, parentRunId?: string, tags?: string[], kwargs?: {
inputs?: Record<string, unknown>;
}): void;
handleChatModelStart(serialized: Serialized, messages: BaseMessage[][], runId: string, parentRunId?: string, extraParams?: Record<string, unknown>, tags?: string[], metadata?: Record<string, unknown>, runName?: string): void;
handleLLMStart(serialized: Serialized, prompts: string[], runId: string, parentRunId?: string, extraParams?: Record<string, unknown>, tags?: string[], metadata?: Record<string, unknown>, runName?: string): void;
handleLLMEnd(output: LLMResult, runId: string, parentRunId?: string, tags?: string[], extraParams?: Record<string, unknown>): void;
handleLLMError(err: Error, runId: string, parentRunId?: string, tags?: string[], extraParams?: Record<string, unknown>): void;
handleToolStart(tool: Serialized, input: string, runId: string, parentRunId?: string, tags?: string[], metadata?: Record<string, unknown>, runName?: string): void;
handleToolEnd(output: any, runId: string, parentRunId?: string, tags?: string[]): void;
handleToolError(err: Error, runId: string, parentRunId?: string, tags?: string[]): void;
handleRetrieverStart(retriever: Serialized, query: string, runId: string, parentRunId?: string, tags?: string[], metadata?: Record<string, unknown>, name?: string): void;
handleRetrieverEnd(documents: DocumentInterface[], runId: string, parentRunId?: string, tags?: string[]): void;
handleRetrieverError(err: Error, runId: string, parentRunId?: string, tags?: string[]): void;
handleAgentAction(action: AgentAction, runId: string, parentRunId?: string, tags?: string[]): void;
handleAgentEnd(action: AgentFinish, runId: string, parentRunId?: string, tags?: string[]): void;
private _setParentOfRun;
private _popParentOfRun;
private _findRootRun;
private _setTraceOrSpanMetadata;
private _setLLMMetadata;
private _popRunMetadata;
private _getTraceId;
private _getParentRunId;
private _popRunAndCaptureTraceOrSpan;
private _captureTraceOrSpan;
private _popRunAndCaptureGeneration;
private _captureGeneration;
private _logDebugEvent;
private _getLangchainRunName;
private _convertLcToolCallsToOai;
private _extractRawResponse;
private _convertMessageToDict;
private _parseUsageModel;
private parseUsage;
}
export { PostHogAnthropic as Anthropic, PostHogAzureOpenAI as AzureOpenAI, PostHogGoogleGenAI as GoogleGenAI, LangChainCallbackHandler, PostHogOpenAI as OpenAI, wrapVercelLanguageModel as withTracing };