@posthog/ai
Version:
PostHog Node.js AI integrations
466 lines (453 loc) • 13.3 kB
JavaScript
'use strict';
Object.defineProperty(exports, '__esModule', { value: true });
var genai = require('@google/genai');
var uuid = require('uuid');
var buffer = require('buffer');
const STRING_FORMAT = 'utf8';
const getModelParams = params => {
if (!params) {
return {};
}
const modelParams = {};
const paramKeys = ['temperature', 'max_tokens', 'max_completion_tokens', 'top_p', 'frequency_penalty', 'presence_penalty', 'n', 'stop', 'stream', 'streaming'];
for (const key of paramKeys) {
if (key in params && params[key] !== undefined) {
modelParams[key] = params[key];
}
}
return modelParams;
};
const formatResponseGemini = response => {
const output = [];
if (response.candidates && Array.isArray(response.candidates)) {
for (const candidate of response.candidates) {
if (candidate.content && candidate.content.parts) {
const content = [];
for (const part of candidate.content.parts) {
if (part.text) {
content.push({
type: 'text',
text: part.text
});
} else if (part.functionCall) {
content.push({
type: 'function',
function: {
name: part.functionCall.name,
arguments: part.functionCall.args
}
});
}
}
if (content.length > 0) {
output.push({
role: 'assistant',
content
});
}
} else if (candidate.text) {
output.push({
role: 'assistant',
content: [{
type: 'text',
text: candidate.text
}]
});
}
}
} else if (response.text) {
output.push({
role: 'assistant',
content: [{
type: 'text',
text: response.text
}]
});
}
return output;
};
const withPrivacyMode = (client, privacyMode, input) => {
return client.privacy_mode || privacyMode ? null : input;
};
/**
* Extract available tool calls from the request parameters.
* These are the tools provided to the LLM, not the tool calls in the response.
*/
const extractAvailableToolCalls = (provider, params) => {
{
if (params.config && params.config.tools) {
return params.config.tools;
}
return null;
}
};
function sanitizeValues(obj) {
if (obj === undefined || obj === null) {
return obj;
}
const jsonSafe = JSON.parse(JSON.stringify(obj));
if (typeof jsonSafe === 'string') {
return buffer.Buffer.from(jsonSafe, STRING_FORMAT).toString(STRING_FORMAT);
} else if (Array.isArray(jsonSafe)) {
return jsonSafe.map(sanitizeValues);
} else if (jsonSafe && typeof jsonSafe === 'object') {
return Object.fromEntries(Object.entries(jsonSafe).map(([k, v]) => [k, sanitizeValues(v)]));
}
return jsonSafe;
}
const sendEventToPosthog = async ({
client,
distinctId,
traceId,
model,
provider,
input,
output,
latency,
baseURL,
params,
httpStatus = 200,
usage = {},
isError = false,
error,
tools,
captureImmediate = false
}) => {
if (!client.capture) {
return Promise.resolve();
}
// sanitize input and output for UTF-8 validity
const safeInput = sanitizeValues(input);
const safeOutput = sanitizeValues(output);
const safeError = sanitizeValues(error);
let errorData = {};
if (isError) {
errorData = {
$ai_is_error: true,
$ai_error: safeError
};
}
let costOverrideData = {};
if (params.posthogCostOverride) {
const inputCostUSD = (params.posthogCostOverride.inputCost ?? 0) * (usage.inputTokens ?? 0);
const outputCostUSD = (params.posthogCostOverride.outputCost ?? 0) * (usage.outputTokens ?? 0);
costOverrideData = {
$ai_input_cost_usd: inputCostUSD,
$ai_output_cost_usd: outputCostUSD,
$ai_total_cost_usd: inputCostUSD + outputCostUSD
};
}
const additionalTokenValues = {
...(usage.reasoningTokens ? {
$ai_reasoning_tokens: usage.reasoningTokens
} : {}),
...(usage.cacheReadInputTokens ? {
$ai_cache_read_input_tokens: usage.cacheReadInputTokens
} : {}),
...(usage.cacheCreationInputTokens ? {
$ai_cache_creation_input_tokens: usage.cacheCreationInputTokens
} : {})
};
const properties = {
$ai_provider: params.posthogProviderOverride ?? provider,
$ai_model: params.posthogModelOverride ?? model,
$ai_model_parameters: getModelParams(params),
$ai_input: withPrivacyMode(client, params.posthogPrivacyMode ?? false, safeInput),
$ai_output_choices: withPrivacyMode(client, params.posthogPrivacyMode ?? false, safeOutput),
$ai_http_status: httpStatus,
$ai_input_tokens: usage.inputTokens ?? 0,
$ai_output_tokens: usage.outputTokens ?? 0,
...additionalTokenValues,
$ai_latency: latency,
$ai_trace_id: traceId,
$ai_base_url: baseURL,
...params.posthogProperties,
...(distinctId ? {} : {
$process_person_profile: false
}),
...(tools ? {
$ai_tools: tools
} : {}),
...errorData,
...costOverrideData
};
const event = {
distinctId: distinctId ?? traceId,
event: '$ai_generation',
properties,
groups: params.posthogGroups
};
if (captureImmediate) {
// await capture promise to send single event in serverless environments
await client.captureImmediate(event);
} else {
client.capture(event);
}
};
// Type guards for safer type checking
const isObject = value => {
return value !== null && typeof value === 'object' && !Array.isArray(value);
};
const REDACTED_IMAGE_PLACEHOLDER = '[base64 image redacted]';
const sanitizeGeminiPart = part => {
if (!isObject(part)) return part;
// Handle Gemini's inline data format
if ('inlineData' in part && isObject(part.inlineData) && 'data' in part.inlineData) {
return {
...part,
inlineData: {
...part.inlineData,
data: REDACTED_IMAGE_PLACEHOLDER
}
};
}
return part;
};
const processGeminiItem = item => {
if (!isObject(item)) return item;
// If it has parts, process them
if ('parts' in item && item.parts) {
const parts = Array.isArray(item.parts) ? item.parts.map(sanitizeGeminiPart) : sanitizeGeminiPart(item.parts);
return {
...item,
parts
};
}
return item;
};
const sanitizeGemini = data => {
// Gemini has a different structure with 'parts' directly on items instead of 'content'
// So we need custom processing instead of using processMessages
if (!data) return data;
if (Array.isArray(data)) {
return data.map(processGeminiItem);
}
return processGeminiItem(data);
};
// Types from @google/genai
class PostHogGoogleGenAI {
constructor(config) {
const {
posthog,
...geminiConfig
} = config;
this.phClient = posthog;
this.client = new genai.GoogleGenAI(geminiConfig);
this.models = new WrappedModels(this.client, this.phClient);
}
}
class WrappedModels {
constructor(client, phClient) {
this.client = client;
this.phClient = phClient;
}
async generateContent(params) {
const {
posthogDistinctId,
posthogTraceId,
posthogProperties,
posthogGroups,
posthogCaptureImmediate,
...geminiParams
} = params;
const traceId = posthogTraceId ?? uuid.v4();
const startTime = Date.now();
try {
const response = await this.client.models.generateContent(geminiParams);
const latency = (Date.now() - startTime) / 1000;
const availableTools = extractAvailableToolCalls('gemini', geminiParams);
await sendEventToPosthog({
client: this.phClient,
distinctId: posthogDistinctId,
traceId,
model: geminiParams.model,
provider: 'gemini',
input: this.formatInputForPostHog(geminiParams.contents),
output: formatResponseGemini(response),
latency,
baseURL: 'https://generativelanguage.googleapis.com',
params: params,
httpStatus: 200,
usage: {
inputTokens: response.usageMetadata?.promptTokenCount ?? 0,
outputTokens: response.usageMetadata?.candidatesTokenCount ?? 0,
reasoningTokens: response.usageMetadata?.thoughtsTokenCount ?? 0,
cacheReadInputTokens: response.usageMetadata?.cachedContentTokenCount ?? 0
},
tools: availableTools,
captureImmediate: posthogCaptureImmediate
});
return response;
} catch (error) {
const latency = (Date.now() - startTime) / 1000;
await sendEventToPosthog({
client: this.phClient,
distinctId: posthogDistinctId,
traceId,
model: geminiParams.model,
provider: 'gemini',
input: this.formatInputForPostHog(geminiParams.contents),
output: [],
latency,
baseURL: 'https://generativelanguage.googleapis.com',
params: params,
httpStatus: error?.status ?? 500,
usage: {
inputTokens: 0,
outputTokens: 0
},
isError: true,
error: JSON.stringify(error),
captureImmediate: posthogCaptureImmediate
});
throw error;
}
}
async *generateContentStream(params) {
const {
posthogDistinctId,
posthogTraceId,
posthogProperties,
posthogGroups,
posthogCaptureImmediate,
...geminiParams
} = params;
const traceId = posthogTraceId ?? uuid.v4();
const startTime = Date.now();
let accumulatedContent = '';
let usage = {
inputTokens: 0,
outputTokens: 0
};
try {
const stream = await this.client.models.generateContentStream(geminiParams);
for await (const chunk of stream) {
if (chunk.text) {
accumulatedContent += chunk.text;
}
if (chunk.usageMetadata) {
usage = {
inputTokens: chunk.usageMetadata.promptTokenCount ?? 0,
outputTokens: chunk.usageMetadata.candidatesTokenCount ?? 0,
reasoningTokens: chunk.usageMetadata.thoughtsTokenCount ?? 0,
cacheReadInputTokens: chunk.usageMetadata.cachedContentTokenCount ?? 0
};
}
yield chunk;
}
const latency = (Date.now() - startTime) / 1000;
const availableTools = extractAvailableToolCalls('gemini', geminiParams);
await sendEventToPosthog({
client: this.phClient,
distinctId: posthogDistinctId,
traceId,
model: geminiParams.model,
provider: 'gemini',
input: this.formatInputForPostHog(geminiParams.contents),
output: [{
content: accumulatedContent,
role: 'assistant'
}],
latency,
baseURL: 'https://generativelanguage.googleapis.com',
params: params,
httpStatus: 200,
usage,
tools: availableTools,
captureImmediate: posthogCaptureImmediate
});
} catch (error) {
const latency = (Date.now() - startTime) / 1000;
await sendEventToPosthog({
client: this.phClient,
distinctId: posthogDistinctId,
traceId,
model: geminiParams.model,
provider: 'gemini',
input: this.formatInputForPostHog(geminiParams.contents),
output: [],
latency,
baseURL: 'https://generativelanguage.googleapis.com',
params: params,
httpStatus: error?.status ?? 500,
usage: {
inputTokens: 0,
outputTokens: 0
},
isError: true,
error: JSON.stringify(error),
captureImmediate: posthogCaptureImmediate
});
throw error;
}
}
formatInput(contents) {
if (typeof contents === 'string') {
return [{
role: 'user',
content: contents
}];
}
if (Array.isArray(contents)) {
return contents.map(item => {
if (typeof item === 'string') {
return {
role: 'user',
content: item
};
}
if (item && typeof item === 'object') {
if (item.text) {
return {
role: item.role || 'user',
content: item.text
};
}
if (item.content) {
return {
role: item.role || 'user',
content: item.content
};
}
if (item.parts) {
return {
role: item.role || 'user',
content: item.parts.map(part => part.text ? part.text : part)
};
}
}
return {
role: 'user',
content: String(item)
};
});
}
if (contents && typeof contents === 'object') {
if (contents.text) {
return [{
role: 'user',
content: contents.text
}];
}
if (contents.content) {
return [{
role: 'user',
content: contents.content
}];
}
}
return [{
role: 'user',
content: String(contents)
}];
}
formatInputForPostHog(contents) {
const sanitized = sanitizeGemini(contents);
return this.formatInput(sanitized);
}
}
exports.Gemini = PostHogGoogleGenAI;
exports.PostHogGoogleGenAI = PostHogGoogleGenAI;
exports.WrappedModels = WrappedModels;
exports.default = PostHogGoogleGenAI;
//# sourceMappingURL=index.cjs.map