@posthog/ai
Version:
PostHog Node.js AI integrations
763 lines (730 loc) • 23.9 kB
JavaScript
import { OpenAI } from 'openai';
import { v4 } from 'uuid';
import { Buffer } from 'buffer';
const STRING_FORMAT = 'utf8';
const getModelParams = params => {
if (!params) {
return {};
}
const modelParams = {};
const paramKeys = ['temperature', 'max_tokens', 'max_completion_tokens', 'top_p', 'frequency_penalty', 'presence_penalty', 'n', 'stop', 'stream', 'streaming'];
for (const key of paramKeys) {
if (key in params && params[key] !== undefined) {
modelParams[key] = params[key];
}
}
return modelParams;
};
const formatResponseOpenAI = response => {
const output = [];
if (response.choices) {
for (const choice of response.choices) {
const content = [];
let role = 'assistant';
if (choice.message) {
if (choice.message.role) {
role = choice.message.role;
}
if (choice.message.content) {
content.push({
type: 'text',
text: choice.message.content
});
}
if (choice.message.tool_calls) {
for (const toolCall of choice.message.tool_calls) {
content.push({
type: 'function',
id: toolCall.id,
function: {
name: toolCall.function.name,
arguments: toolCall.function.arguments
}
});
}
}
}
if (content.length > 0) {
output.push({
role,
content
});
}
}
}
// Handle Responses API format
if (response.output) {
const content = [];
let role = 'assistant';
for (const item of response.output) {
if (item.type === 'message') {
role = item.role;
if (item.content && Array.isArray(item.content)) {
for (const contentItem of item.content) {
if (contentItem.type === 'output_text' && contentItem.text) {
content.push({
type: 'text',
text: contentItem.text
});
} else if (contentItem.text) {
content.push({
type: 'text',
text: contentItem.text
});
} else if (contentItem.type === 'input_image' && contentItem.image_url) {
content.push({
type: 'image',
image: contentItem.image_url
});
}
}
} else if (item.content) {
content.push({
type: 'text',
text: String(item.content)
});
}
} else if (item.type === 'function_call') {
content.push({
type: 'function',
id: item.call_id || item.id || '',
function: {
name: item.name,
arguments: item.arguments || {}
}
});
}
}
if (content.length > 0) {
output.push({
role,
content
});
}
}
return output;
};
const withPrivacyMode = (client, privacyMode, input) => {
return client.privacy_mode || privacyMode ? null : input;
};
/**
* Extract available tool calls from the request parameters.
* These are the tools provided to the LLM, not the tool calls in the response.
*/
const extractAvailableToolCalls = (provider, params) => {
{
if (params.tools) {
return params.tools;
}
return null;
}
};
function sanitizeValues(obj) {
if (obj === undefined || obj === null) {
return obj;
}
const jsonSafe = JSON.parse(JSON.stringify(obj));
if (typeof jsonSafe === 'string') {
return Buffer.from(jsonSafe, STRING_FORMAT).toString(STRING_FORMAT);
} else if (Array.isArray(jsonSafe)) {
return jsonSafe.map(sanitizeValues);
} else if (jsonSafe && typeof jsonSafe === 'object') {
return Object.fromEntries(Object.entries(jsonSafe).map(([k, v]) => [k, sanitizeValues(v)]));
}
return jsonSafe;
}
const sendEventToPosthog = async ({
client,
distinctId,
traceId,
model,
provider,
input,
output,
latency,
baseURL,
params,
httpStatus = 200,
usage = {},
isError = false,
error,
tools,
captureImmediate = false
}) => {
if (!client.capture) {
return Promise.resolve();
}
// sanitize input and output for UTF-8 validity
const safeInput = sanitizeValues(input);
const safeOutput = sanitizeValues(output);
const safeError = sanitizeValues(error);
let errorData = {};
if (isError) {
errorData = {
$ai_is_error: true,
$ai_error: safeError
};
}
let costOverrideData = {};
if (params.posthogCostOverride) {
const inputCostUSD = (params.posthogCostOverride.inputCost ?? 0) * (usage.inputTokens ?? 0);
const outputCostUSD = (params.posthogCostOverride.outputCost ?? 0) * (usage.outputTokens ?? 0);
costOverrideData = {
$ai_input_cost_usd: inputCostUSD,
$ai_output_cost_usd: outputCostUSD,
$ai_total_cost_usd: inputCostUSD + outputCostUSD
};
}
const additionalTokenValues = {
...(usage.reasoningTokens ? {
$ai_reasoning_tokens: usage.reasoningTokens
} : {}),
...(usage.cacheReadInputTokens ? {
$ai_cache_read_input_tokens: usage.cacheReadInputTokens
} : {}),
...(usage.cacheCreationInputTokens ? {
$ai_cache_creation_input_tokens: usage.cacheCreationInputTokens
} : {})
};
const properties = {
$ai_provider: params.posthogProviderOverride ?? provider,
$ai_model: params.posthogModelOverride ?? model,
$ai_model_parameters: getModelParams(params),
$ai_input: withPrivacyMode(client, params.posthogPrivacyMode ?? false, safeInput),
$ai_output_choices: withPrivacyMode(client, params.posthogPrivacyMode ?? false, safeOutput),
$ai_http_status: httpStatus,
$ai_input_tokens: usage.inputTokens ?? 0,
$ai_output_tokens: usage.outputTokens ?? 0,
...additionalTokenValues,
$ai_latency: latency,
$ai_trace_id: traceId,
$ai_base_url: baseURL,
...params.posthogProperties,
...(distinctId ? {} : {
$process_person_profile: false
}),
...(tools ? {
$ai_tools: tools
} : {}),
...errorData,
...costOverrideData
};
const event = {
distinctId: distinctId ?? traceId,
event: '$ai_generation',
properties,
groups: params.posthogGroups
};
if (captureImmediate) {
// await capture promise to send single event in serverless environments
await client.captureImmediate(event);
} else {
client.capture(event);
}
};
// Type guards for safer type checking
const isString = value => {
return typeof value === 'string';
};
const isObject = value => {
return value !== null && typeof value === 'object' && !Array.isArray(value);
};
const REDACTED_IMAGE_PLACEHOLDER = '[base64 image redacted]';
// ============================================
// Base64 Detection Helpers
// ============================================
const isBase64DataUrl = str => {
return /^data:([^;]+);base64,/.test(str);
};
const isValidUrl = str => {
try {
new URL(str);
return true;
} catch {
// Not an absolute URL, check if it's a relative URL or path
return str.startsWith('/') || str.startsWith('./') || str.startsWith('../');
}
};
const isRawBase64 = str => {
// Skip if it's a valid URL or path
if (isValidUrl(str)) {
return false;
}
// Check if it's a valid base64 string
// Base64 images are typically at least a few hundred chars, but we'll be conservative
return str.length > 20 && /^[A-Za-z0-9+/]+=*$/.test(str);
};
function redactBase64DataUrl(str) {
if (!isString(str)) return str;
// Check for data URL format
if (isBase64DataUrl(str)) {
return REDACTED_IMAGE_PLACEHOLDER;
}
// Check for raw base64 (Vercel sends raw base64 for inline images)
if (isRawBase64(str)) {
return REDACTED_IMAGE_PLACEHOLDER;
}
return str;
}
// ============================================
// Common Message Processing
// ============================================
const processMessages = (messages, transformContent) => {
if (!messages) return messages;
const processContent = content => {
if (typeof content === 'string') return content;
if (!content) return content;
if (Array.isArray(content)) {
return content.map(transformContent);
}
// Handle single object content
return transformContent(content);
};
const processMessage = msg => {
if (!isObject(msg) || !('content' in msg)) return msg;
return {
...msg,
content: processContent(msg.content)
};
};
// Handle both arrays and single messages
if (Array.isArray(messages)) {
return messages.map(processMessage);
}
return processMessage(messages);
};
// ============================================
// Provider-Specific Image Sanitizers
// ============================================
const sanitizeOpenAIImage = item => {
if (!isObject(item)) return item;
// Handle image_url format
if (item.type === 'image_url' && 'image_url' in item && isObject(item.image_url) && 'url' in item.image_url) {
return {
...item,
image_url: {
...item.image_url,
url: redactBase64DataUrl(item.image_url.url)
}
};
}
return item;
};
const sanitizeOpenAIResponseImage = item => {
if (!isObject(item)) return item;
// Handle input_image format
if (item.type === 'input_image' && 'image_url' in item) {
return {
...item,
image_url: redactBase64DataUrl(item.image_url)
};
}
return item;
};
// Export individual sanitizers for tree-shaking
const sanitizeOpenAI = data => {
return processMessages(data, sanitizeOpenAIImage);
};
const sanitizeOpenAIResponse = data => {
return processMessages(data, sanitizeOpenAIResponseImage);
};
const Chat = OpenAI.Chat;
const Completions = Chat.Completions;
const Responses = OpenAI.Responses;
class PostHogOpenAI extends OpenAI {
constructor(config) {
const {
posthog,
...openAIConfig
} = config;
super(openAIConfig);
this.phClient = posthog;
this.chat = new WrappedChat(this, this.phClient);
this.responses = new WrappedResponses(this, this.phClient);
}
}
class WrappedChat extends Chat {
constructor(parentClient, phClient) {
super(parentClient);
this.completions = new WrappedCompletions(parentClient, phClient);
}
}
class WrappedCompletions extends Completions {
constructor(client, phClient) {
super(client);
this.phClient = phClient;
}
// --- Overload #1: Non-streaming
// --- Overload #2: Streaming
// --- Overload #3: Generic base
// --- Implementation Signature
create(body, options) {
const {
posthogDistinctId,
posthogTraceId,
posthogProperties,
// eslint-disable-next-line @typescript-eslint/no-unused-vars
posthogPrivacyMode = false,
posthogGroups,
posthogCaptureImmediate,
...openAIParams
} = body;
const traceId = posthogTraceId ?? v4();
const startTime = Date.now();
const parentPromise = super.create(openAIParams, options);
if (openAIParams.stream) {
return parentPromise.then(value => {
if ('tee' in value) {
const [stream1, stream2] = value.tee();
(async () => {
try {
let accumulatedContent = '';
let usage = {
inputTokens: 0,
outputTokens: 0
};
for await (const chunk of stream1) {
const delta = chunk?.choices?.[0]?.delta?.content ?? '';
accumulatedContent += delta;
if (chunk.usage) {
usage = {
inputTokens: chunk.usage.prompt_tokens ?? 0,
outputTokens: chunk.usage.completion_tokens ?? 0,
reasoningTokens: chunk.usage.completion_tokens_details?.reasoning_tokens ?? 0,
cacheReadInputTokens: chunk.usage.prompt_tokens_details?.cached_tokens ?? 0
};
}
}
const latency = (Date.now() - startTime) / 1000;
const availableTools = extractAvailableToolCalls('openai', openAIParams);
await sendEventToPosthog({
client: this.phClient,
distinctId: posthogDistinctId,
traceId,
model: openAIParams.model,
provider: 'openai',
input: sanitizeOpenAI(openAIParams.messages),
output: [{
content: accumulatedContent,
role: 'assistant'
}],
latency,
baseURL: this.baseURL ?? '',
params: body,
httpStatus: 200,
usage,
tools: availableTools,
captureImmediate: posthogCaptureImmediate
});
} catch (error) {
await sendEventToPosthog({
client: this.phClient,
distinctId: posthogDistinctId,
traceId,
model: openAIParams.model,
provider: 'openai',
input: sanitizeOpenAI(openAIParams.messages),
output: [],
latency: 0,
baseURL: this.baseURL ?? '',
params: body,
httpStatus: error?.status ? error.status : 500,
usage: {
inputTokens: 0,
outputTokens: 0
},
isError: true,
error: JSON.stringify(error),
captureImmediate: posthogCaptureImmediate
});
}
})();
// Return the other stream to the user
return stream2;
}
return value;
});
} else {
const wrappedPromise = parentPromise.then(async result => {
if ('choices' in result) {
const latency = (Date.now() - startTime) / 1000;
const availableTools = extractAvailableToolCalls('openai', openAIParams);
await sendEventToPosthog({
client: this.phClient,
distinctId: posthogDistinctId,
traceId,
model: openAIParams.model,
provider: 'openai',
input: sanitizeOpenAI(openAIParams.messages),
output: formatResponseOpenAI(result),
latency,
baseURL: this.baseURL ?? '',
params: body,
httpStatus: 200,
usage: {
inputTokens: result.usage?.prompt_tokens ?? 0,
outputTokens: result.usage?.completion_tokens ?? 0,
reasoningTokens: result.usage?.completion_tokens_details?.reasoning_tokens ?? 0,
cacheReadInputTokens: result.usage?.prompt_tokens_details?.cached_tokens ?? 0
},
tools: availableTools,
captureImmediate: posthogCaptureImmediate
});
}
return result;
}, async error => {
await sendEventToPosthog({
client: this.phClient,
distinctId: posthogDistinctId,
traceId,
model: openAIParams.model,
provider: 'openai',
input: sanitizeOpenAI(openAIParams.messages),
output: [],
latency: 0,
baseURL: this.baseURL ?? '',
params: body,
httpStatus: error?.status ? error.status : 500,
usage: {
inputTokens: 0,
outputTokens: 0
},
isError: true,
error: JSON.stringify(error),
captureImmediate: posthogCaptureImmediate
});
throw error;
});
return wrappedPromise;
}
}
}
class WrappedResponses extends Responses {
constructor(client, phClient) {
super(client);
this.phClient = phClient;
}
// --- Overload #1: Non-streaming
// --- Overload #2: Streaming
// --- Overload #3: Generic base
// --- Implementation Signature
create(body, options) {
const {
posthogDistinctId,
posthogTraceId,
posthogProperties,
// eslint-disable-next-line @typescript-eslint/no-unused-vars
posthogPrivacyMode = false,
posthogGroups,
posthogCaptureImmediate,
...openAIParams
} = body;
const traceId = posthogTraceId ?? v4();
const startTime = Date.now();
const parentPromise = super.create(openAIParams, options);
if (openAIParams.stream) {
return parentPromise.then(value => {
if ('tee' in value && typeof value.tee === 'function') {
const [stream1, stream2] = value.tee();
(async () => {
try {
let finalContent = [];
let usage = {
inputTokens: 0,
outputTokens: 0
};
for await (const chunk of stream1) {
if (chunk.type === 'response.completed' && 'response' in chunk && chunk.response?.output && chunk.response.output.length > 0) {
finalContent = chunk.response.output;
}
if ('response' in chunk && chunk.response?.usage) {
usage = {
inputTokens: chunk.response.usage.input_tokens ?? 0,
outputTokens: chunk.response.usage.output_tokens ?? 0,
reasoningTokens: chunk.response.usage.output_tokens_details?.reasoning_tokens ?? 0,
cacheReadInputTokens: chunk.response.usage.input_tokens_details?.cached_tokens ?? 0
};
}
}
const latency = (Date.now() - startTime) / 1000;
const availableTools = extractAvailableToolCalls('openai', openAIParams);
await sendEventToPosthog({
client: this.phClient,
distinctId: posthogDistinctId,
traceId,
//@ts-expect-error
model: openAIParams.model,
provider: 'openai',
input: sanitizeOpenAIResponse(openAIParams.input),
output: finalContent,
latency,
baseURL: this.baseURL ?? '',
params: body,
httpStatus: 200,
usage,
tools: availableTools,
captureImmediate: posthogCaptureImmediate
});
} catch (error) {
await sendEventToPosthog({
client: this.phClient,
distinctId: posthogDistinctId,
traceId,
//@ts-expect-error
model: openAIParams.model,
provider: 'openai',
input: sanitizeOpenAIResponse(openAIParams.input),
output: [],
latency: 0,
baseURL: this.baseURL ?? '',
params: body,
httpStatus: error?.status ? error.status : 500,
usage: {
inputTokens: 0,
outputTokens: 0
},
isError: true,
error: JSON.stringify(error),
captureImmediate: posthogCaptureImmediate
});
}
})();
return stream2;
}
return value;
});
} else {
const wrappedPromise = parentPromise.then(async result => {
if ('output' in result) {
const latency = (Date.now() - startTime) / 1000;
const availableTools = extractAvailableToolCalls('openai', openAIParams);
await sendEventToPosthog({
client: this.phClient,
distinctId: posthogDistinctId,
traceId,
//@ts-expect-error
model: openAIParams.model,
provider: 'openai',
input: sanitizeOpenAIResponse(openAIParams.input),
output: formatResponseOpenAI({
output: result.output
}),
latency,
baseURL: this.baseURL ?? '',
params: body,
httpStatus: 200,
usage: {
inputTokens: result.usage?.input_tokens ?? 0,
outputTokens: result.usage?.output_tokens ?? 0,
reasoningTokens: result.usage?.output_tokens_details?.reasoning_tokens ?? 0,
cacheReadInputTokens: result.usage?.input_tokens_details?.cached_tokens ?? 0
},
tools: availableTools,
captureImmediate: posthogCaptureImmediate
});
}
return result;
}, async error => {
await sendEventToPosthog({
client: this.phClient,
distinctId: posthogDistinctId,
traceId,
//@ts-expect-error
model: openAIParams.model,
provider: 'openai',
input: sanitizeOpenAIResponse(openAIParams.input),
output: [],
latency: 0,
baseURL: this.baseURL ?? '',
params: body,
httpStatus: error?.status ? error.status : 500,
usage: {
inputTokens: 0,
outputTokens: 0
},
isError: true,
error: JSON.stringify(error),
captureImmediate: posthogCaptureImmediate
});
throw error;
});
return wrappedPromise;
}
}
parse(body, options) {
const {
posthogDistinctId,
posthogTraceId,
posthogProperties,
// eslint-disable-next-line @typescript-eslint/no-unused-vars
posthogPrivacyMode = false,
posthogGroups,
posthogCaptureImmediate,
...openAIParams
} = body;
const traceId = posthogTraceId ?? v4();
const startTime = Date.now();
// Create a temporary instance that bypasses our wrapped create method
const originalCreate = super.create.bind(this);
const originalSelf = this;
const tempCreate = originalSelf.create;
originalSelf.create = originalCreate;
try {
const parentPromise = super.parse(openAIParams, options);
const wrappedPromise = parentPromise.then(async result => {
const latency = (Date.now() - startTime) / 1000;
await sendEventToPosthog({
client: this.phClient,
distinctId: posthogDistinctId,
traceId,
//@ts-expect-error
model: openAIParams.model,
provider: 'openai',
input: sanitizeOpenAIResponse(openAIParams.input),
output: result.output,
latency,
baseURL: this.baseURL ?? '',
params: body,
httpStatus: 200,
usage: {
inputTokens: result.usage?.input_tokens ?? 0,
outputTokens: result.usage?.output_tokens ?? 0,
reasoningTokens: result.usage?.output_tokens_details?.reasoning_tokens ?? 0,
cacheReadInputTokens: result.usage?.input_tokens_details?.cached_tokens ?? 0
},
captureImmediate: posthogCaptureImmediate
});
return result;
}, async error => {
await sendEventToPosthog({
client: this.phClient,
distinctId: posthogDistinctId,
traceId,
//@ts-expect-error
model: openAIParams.model,
provider: 'openai',
input: sanitizeOpenAIResponse(openAIParams.input),
output: [],
latency: 0,
baseURL: this.baseURL ?? '',
params: body,
httpStatus: error?.status ? error.status : 500,
usage: {
inputTokens: 0,
outputTokens: 0
},
isError: true,
error: JSON.stringify(error),
captureImmediate: posthogCaptureImmediate
});
throw error;
});
return wrappedPromise;
} finally {
// Restore our wrapped create method
originalSelf.create = tempCreate;
}
}
}
export { PostHogOpenAI as OpenAI, PostHogOpenAI, WrappedChat, WrappedCompletions, WrappedResponses, PostHogOpenAI as default };
//# sourceMappingURL=index.mjs.map