@helicone/helpers
Version:
A Node.js wrapper for some of Helicone's common functionalities
859 lines (855 loc) • 27.8 kB
JavaScript
// manual_logger/HeliconeLogBuilder.ts
var HeliconeLogBuilder = class {
/**
* Creates a new HeliconeLogBuilder
* @param logger - The HeliconeManualLogger instance to use for logging
* @param request - The request object to log
* @param additionalHeaders - Additional headers to send with the request
*/
constructor(logger, request, additionalHeaders) {
this.endTime = 0;
this.responseBody = "";
this.error = null;
this.streamTexts = [];
this.status = 200;
this.wasCancelled = false;
this.streamState = {
isPolling: false,
alreadyAttached: false
};
this.attachedStream = null;
this.logger = logger;
this.request = request;
this.additionalHeaders = additionalHeaders;
this.startTime = performance.now();
}
/**
* Sets an error that occurred during the request
* @param error - The error that occurred
*/
setError(error) {
this.error = error;
this.endTime = performance.now();
this.status = 500;
}
/**
* Collects streaming responses and converts them to a readable stream
* while also capturing the response for logging
* @param stream - The stream from an LLM provider response
* @returns A ReadableStream that can be returned to the client
*/
toReadableStream(stream) {
if (this.streamState.alreadyAttached) {
throw new Error("Cannot attach multiple streams");
}
this.streamState.alreadyAttached = true;
const self = stream;
let iter;
const encoder = new TextEncoder();
const builder = this;
this.streamState.isPolling = true;
return new ReadableStream({
async start() {
iter = self[Symbol.asyncIterator]();
},
async pull(ctrl) {
try {
const { value, done } = await iter.next();
if (done) {
builder.endTime = performance.now();
builder.streamState.isPolling = false;
return ctrl.close();
}
if (!builder.timeToFirstToken) {
builder.timeToFirstToken = performance.now() - builder.startTime;
}
const json = JSON.stringify(value) + "\n";
builder.streamTexts.push(json);
const bytes = encoder.encode(json);
ctrl.enqueue(bytes);
} catch (err) {
builder.error = err;
builder.endTime = performance.now();
builder.status = 500;
builder.streamState.isPolling = false;
ctrl.error(err);
}
},
async cancel() {
builder.wasCancelled = true;
builder.endTime = performance.now();
builder.streamState.isPolling = false;
await iter.return?.();
}
});
}
addAdditionalHeaders(headers) {
this.additionalHeaders = {
...this.additionalHeaders,
...headers
};
}
/**
* Attaches a stream to the log builder, this will consume the stream and log it on sendLog
* @param stream - The stream to attach
*/
async attachStream(stream) {
if (this.attachedStream) {
throw new Error("Cannot attach multiple streams");
}
this.attachedStream = stream;
await this.consumeStream();
}
/**
* Sets the response body for non-streaming responses
* @param body - The response body
*/
setResponse(body) {
this.responseBody = body;
this.endTime = performance.now();
}
async waitForStreamToFinish() {
const maxWaitTime = 1e4;
const startTime = performance.now();
while (this.streamState.isPolling) {
if (performance.now() - startTime > maxWaitTime) {
throw new Error("Stream took too long to finish");
}
await new Promise((resolve) => setTimeout(resolve, 100));
}
return;
}
async consumeStream() {
if (this.attachedStream && !this.streamState.isPolling) {
const stream = this.toReadableStream(this.attachedStream);
const reader = stream.getReader();
while (true) {
const { done } = await reader.read();
if (done) {
break;
}
}
}
}
/**
* Sends the log to Helicone
* @returns A Promise that resolves when logging is complete
*/
async sendLog() {
await this.waitForStreamToFinish();
if (this.endTime === 0) {
this.endTime = performance.now();
}
try {
if (this.wasCancelled) {
this.status = -3;
}
let response = this.streamTexts.length > 0 ? this.streamTexts.join("") : this.responseBody;
if (this.error && !this.wasCancelled) {
response = (this.error instanceof Error ? this.error.stack || this.error.message : String(this.error)) + "\n\n" + response;
}
const startTimeUnix = Date.now() - Math.round(performance.now() - this.startTime);
const endTimeUnix = Date.now() - Math.round(performance.now() - this.endTime);
const timeToFirstTokenMs = this.timeToFirstToken ? Math.round(this.timeToFirstToken) : void 0;
await this.logger.sendLog(this.request, response, {
startTime: startTimeUnix,
endTime: endTimeUnix,
additionalHeaders: this.additionalHeaders,
timeToFirstToken: timeToFirstTokenMs,
status: this.status
});
} catch (error) {
console.error("Error sending log to Helicone:", error);
throw error;
}
}
};
// manual_logger/HeliconeManualLogger.ts
var HeliconeManualLogger = class {
constructor(opts) {
this.LOGGING_ENDPOINT = "https://api.worker.helicone.ai/custom/v1/log";
this.apiKey = opts.apiKey;
this.headers = opts.headers || {};
this.LOGGING_ENDPOINT = opts.loggingEndpoint || this.LOGGING_ENDPOINT;
}
getLoggingEndpoint(provider) {
let endpoint = this.LOGGING_ENDPOINT;
const key = provider ? String(provider).toUpperCase() : void 0;
const providerRoutes = /* @__PURE__ */ new Map([
["OPENAI", "oai"],
["ANTHROPIC", "anthropic"],
["GOOGLE", "googleapis"]
]);
const route = key ? providerRoutes.get(key) || "custom" : "custom";
const knownRouteRegex = /(\/(custom|oai|anthropic|googleapis)\/v1\/log)$/;
return knownRouteRegex.test(endpoint) ? endpoint.replace(knownRouteRegex, `/${route}/v1/log`) : endpoint.replace(/\/$/, "") + `/${route}/v1/log`;
}
/**
* Creates a log builder for more flexible stream handling with error management
* @param request - The request object to log
* @param additionalHeaders - Additional headers to send with the request
* @returns A HeliconeLogBuilder instance
*/
logBuilder(request, additionalHeaders) {
return new HeliconeLogBuilder(this, request, additionalHeaders);
}
/**
* Logs a custom request to Helicone
* @param request - The request object to log
* @param operation - The operation which will be executed and logged
* @param additionalHeaders - Additional headers to send with the request
* @returns The result of the `operation` function
*/
async logRequest(request, operation, additionalHeaders, provider) {
const startTime = Date.now();
const resultRecorder = new HeliconeResultRecorder();
try {
const result = await operation(resultRecorder);
const endTime = Date.now();
await this.sendLog(request, resultRecorder.getResults(), {
startTime,
endTime,
additionalHeaders,
status: 200,
provider
});
return result;
} catch (error) {
console.error("Error during operation:", error);
throw error;
}
}
/**
* Logs a single stream to Helicone
* @param request - The request object to log
* @param stream - The ReadableStream to consume and log
* @param additionalHeaders - Additional headers to send with the request
* @returns A Promise that resolves when logging is complete
*/
async logSingleStream(request, stream, additionalHeaders) {
const startTime = Date.now();
const resultRecorder = new HeliconeStreamResultRecorder();
resultRecorder.attachStream(stream);
let firstChunkTimeUnix = null;
const streamedData = [];
const decoder = new TextDecoder();
for await (const chunk of stream) {
if (!firstChunkTimeUnix) {
firstChunkTimeUnix = Date.now();
}
streamedData.push(decoder.decode(chunk));
}
await this.sendLog(request, streamedData.join(""), {
startTime,
endTime: Date.now(),
additionalHeaders,
timeToFirstToken: firstChunkTimeUnix ? firstChunkTimeUnix - startTime : void 0,
status: 200
});
}
/**
* Logs a single request with a response body to Helicone
* @param request - The request object to log
* @param body - The response body as a string
* @param additionalHeaders - Additional headers to send with the request
* @param latencyMs - The latency of the request in milliseconds
* @returns A Promise that resolves when logging is complete
*
* @example
* ```typescript
* helicone.logSingleRequest(request, body, { additionalHeaders: { "Helicone-User-Id": userId }, latencyMs: 1000 });
* ```
*/
async logSingleRequest(request, body, options) {
const startTime = Date.now();
const endTime = options.latencyMs ? startTime + options.latencyMs : Date.now();
await this.sendLog(request, body, {
startTime,
endTime,
additionalHeaders: options.additionalHeaders,
status: 200
});
}
/**
* Logs a streaming operation to Helicone
* @param request - The request object to log
* @param operation - The operation which will be executed and logged, with access to a stream recorder
* @param additionalHeaders - Additional headers to send with the request
* @returns The result of the `operation` function
*
* @example
* ```typescript
* const response = await llmProvider.createChatCompletion({ stream: true, ... });
* const [stream1, stream2] = response.tee();
*
* helicone.logStream(
* requestBody,
* async (resultRecorder) => {
* resultRecorder.attachStream(stream2.toReadableStream());
* return stream1;
* },
* { "Helicone-User-Id": userId }
* );
* ```
*/
async logStream(request, operation, additionalHeaders) {
const startTime = Date.now();
const resultRecorder = new HeliconeStreamResultRecorder();
const result = await operation(resultRecorder);
try {
await resultRecorder.getStreamTexts().then(async (texts) => {
const endTime = Date.now();
await this.sendLog(request, texts.join(""), {
startTime,
endTime,
additionalHeaders,
timeToFirstToken: resultRecorder.firstChunkTimeUnix ? resultRecorder.firstChunkTimeUnix - startTime : void 0,
status: 200
});
});
return result;
} catch (error) {
console.error("Helicone error during stream logging:", error);
throw error;
}
}
async sendLog(request, response, options) {
const { startTime, endTime, additionalHeaders, status = 200 } = options;
const providerRequest = {
url: "custom-model-nopath",
json: {
...request
},
meta: {}
};
const isResponseString = typeof response === "string";
const providerResponse = {
headers: this.headers,
status,
json: isResponseString ? {} : {
...response,
_type: request._type,
toolName: request.toolName
},
textBody: isResponseString ? response : void 0
};
const timing = {
startTime: {
seconds: Math.trunc(startTime / 1e3),
milliseconds: startTime % 1e3
},
endTime: {
seconds: Math.trunc(endTime / 1e3),
milliseconds: endTime % 1e3
},
timeToFirstToken: options.timeToFirstToken
};
const fetchOptions = {
method: "POST",
headers: {
Authorization: `Bearer ${this.apiKey}`,
"Content-Type": "application/json",
...this.headers,
...additionalHeaders
},
body: JSON.stringify({
providerRequest,
providerResponse,
timing,
provider: options.provider
})
};
try {
const response2 = await fetch(this.getLoggingEndpoint(options.provider), fetchOptions);
if (!response2.ok) {
console.error(
"Error making request to Helicone log endpoint:",
response2.statusText
);
}
} catch (error) {
console.error(
"Error making request to Helicone log endpoint:",
error?.message,
error
);
}
}
};
var HeliconeStreamResultRecorder = class {
constructor() {
this.streams = [];
this.firstChunkTimeUnix = null;
}
/**
* Attaches a ReadableStream to be processed
* @param stream - The ReadableStream to attach
*/
attachStream(stream) {
this.streams.push(stream);
}
/**
* Processes all attached streams and returns their contents as strings
* @returns Promise resolving to an array of strings containing the content of each stream
*/
async getStreamTexts() {
const decoder = new TextDecoder();
return Promise.all(
this.streams.map(async (stream) => {
if (!this.firstChunkTimeUnix) {
this.firstChunkTimeUnix = Date.now();
}
const streamedData = [];
for await (const chunk of stream) {
streamedData.push(decoder.decode(chunk));
}
return streamedData.join("");
})
);
}
};
var HeliconeResultRecorder = class {
constructor() {
this.results = {};
}
/**
* Appends data to the results object
* @param data - The data to append to the results
*/
appendResults(data) {
this.results = { ...this.results, ...data };
}
/**
* Gets the current results
* @returns The current results object
*/
getResults() {
return this.results;
}
};
// ../../../packages/prompts/templates.ts
var TEMPLATE_REGEX = /\{\{\s*hc\s*:\s*([a-zA-Z_-][a-zA-Z0-9_-]*)\s*:\s*([a-zA-Z_-][a-zA-Z0-9_-]*)\s*\}\}/g;
var BOOLEAN_VALUES = ["true", "false", "yes", "no"];
var HeliconeTemplateManager = class {
/**
* Extract all distinct variables and their types from a template string
* @param template - The template string containing {{hc:NAME:type}} patterns
* @returns Array of unique variables with their names and types
*/
static extractVariables(template) {
const variables = /* @__PURE__ */ new Map();
let match;
TEMPLATE_REGEX.lastIndex = 0;
while ((match = TEMPLATE_REGEX.exec(template)) !== null) {
const [fullMatch, name, type] = match;
const trimmedName = name.trim();
if (!variables.has(trimmedName)) {
variables.set(trimmedName, {
name: trimmedName,
type: type.trim(),
raw: fullMatch
});
}
}
return Array.from(variables.values());
}
/**
* Check if actual type is compatible with expected type
* @param value - The actual value to check
* @param expectedType - The expected type from template
* @returns True if types are compatible
*/
static isTypeCompatible(value, expectedType) {
switch (expectedType) {
case "string":
return true;
case "number":
return typeof value === "number" || !isNaN(Number(value));
case "boolean":
return typeof value === "boolean" || typeof value === "string" && BOOLEAN_VALUES.includes(value.toLowerCase());
default:
return true;
}
}
/**
* Substitute variables in template with provided inputs after type validation
* @param template - The template string containing {{hc:NAME:type}} patterns
* @param inputs - Hash map of input values
* @returns Result object with success status and either result string or errors
*/
static substituteVariables(template, inputs) {
const variables = this.extractVariables(template);
const errors = [];
for (const variable of variables) {
const value = variable.name in inputs ? inputs[variable.name] : void 0;
if (!value) {
continue;
}
if (!this.isTypeCompatible(value, variable.type)) {
errors.push({
variable: variable.name,
expected: variable.type,
value
});
}
}
if (errors.length > 0) {
return {
success: false,
errors
};
}
TEMPLATE_REGEX.lastIndex = 0;
const result = template.replace(TEMPLATE_REGEX, (match, name) => {
const value = name.trim() in inputs ? inputs[name.trim()] : void 0;
return value ? String(value) : match;
});
return {
success: true,
result
};
}
static performRegexReplacement(str, inputs) {
TEMPLATE_REGEX.lastIndex = 0;
return str.replace(TEMPLATE_REGEX, (match, name) => {
const value = inputs[name.trim()];
return value !== void 0 && value !== null ? String(value) : match;
});
}
static processObjectKV(obj, inputs, errors) {
if (typeof obj === "string") {
if (this.isWholeMatch(obj)) {
const varName = this.getVariableName(obj);
if (varName && inputs[varName] !== void 0) {
return inputs[varName];
}
}
return this.performRegexReplacement(obj, inputs);
} else if (Array.isArray(obj)) {
return obj.map((item) => this.processObjectKV(item, inputs, errors));
} else if (obj !== null && typeof obj === "object") {
const result = {};
for (const [key, value] of Object.entries(obj)) {
let processedKey = key;
if (typeof key === "string") {
if (this.isWholeMatch(key)) {
const varName = this.getVariableName(key);
if (varName && inputs[varName]) {
const inputValue = inputs[varName];
if (typeof inputValue === "string") {
processedKey = inputValue;
} else {
errors.push({
variable: varName,
expected: "string",
value: inputValue
});
continue;
}
}
} else {
processedKey = this.performRegexReplacement(key, inputs);
}
}
const processedValue = this.processObjectKV(value, inputs, errors);
result[processedKey] = processedValue;
}
return result;
}
return obj;
}
/**
* Substitute variables in JSON format object with provided inputs
* @param json - The JSON object containing "{{hc:NAME:type}}" patterns
* @param inputs - Hash map of input values
* @returns Result object with success status and either result object or errors
*/
static substituteVariablesJSON(json, inputs) {
const variables = this.extractVariables(JSON.stringify(json));
const errors = [];
for (const variable of variables) {
const value = inputs[variable.name];
if (!value || !this.isTypeCompatible(value, variable.type)) {
errors.push({
variable: variable.name,
expected: variable.type,
value
});
}
}
if (errors.length > 0) {
return {
success: false,
errors
};
}
try {
const result = this.processObjectKV(json, inputs, errors);
if (errors.length > 0) {
return {
success: false,
errors
};
}
return {
success: true,
result
};
} catch (error) {
return {
success: false,
errors: [{
variable: "unknown",
expected: "valid",
value: error
}]
};
}
}
/**
* Get a list of all variable names from a template (convenience method)
* @param template - The template string
* @returns Array of variable names
*/
static getVariableNames(template) {
return this.extractVariables(template).map((v) => v.name);
}
};
HeliconeTemplateManager.isWholeMatch = (str) => {
if (typeof str !== "string") return false;
TEMPLATE_REGEX.lastIndex = 0;
const match = TEMPLATE_REGEX.exec(str);
return match !== null && match[0] === str;
};
HeliconeTemplateManager.getVariableName = (str) => {
if (typeof str !== "string") return null;
TEMPLATE_REGEX.lastIndex = 0;
const match = TEMPLATE_REGEX.exec(str);
return match ? match[1].trim() : null;
};
// ../../../packages/prompts/HeliconePromptManager.ts
var HeliconePromptManager = class {
constructor(options) {
this.apiKey = options.apiKey;
this.baseUrl = options.baseUrl || "https://api.helicone.ai";
}
/**
* Finds the prompt version dynamically based on prompt params
* @param params - The chat completion parameters containing prompt_id, optional version_id, inputs, and other OpenAI parameters
* @returns Object containing the compiled prompt body and any validation/substitution errors
*/
async pullPromptVersion(params) {
const { prompt_id, version_id, environment } = params;
if (environment) {
return await this.getEnvironmentVersion(prompt_id, environment);
}
if (version_id) {
return await this.getPromptVersion(version_id);
}
return await this.getProductionVersion(prompt_id);
}
/**
* Pulls a prompt body from Helicone storage by prompt ID and optional version ID
* @param promptId - The unique identifier of the prompt
* @param versionId - Optional version ID, if not provided uses production version
* @returns The raw prompt body from storage
*/
async pullPromptBody(params) {
try {
const promptVersion = await this.pullPromptVersion(params);
const promptBody = await this.fetchPromptBodyFromS3(
promptVersion?.s3_url
);
return promptBody;
} catch (error) {
console.error("Error pulling prompt body:", error);
throw error;
}
}
/**
* Pulls a prompt body from Helicone storage by version ID
* @param versionId - The unique identifier of the prompt version
* @returns The raw prompt body from storage
*/
async pullPromptBodyByVersionId(versionId) {
try {
const promptVersion = await this.getPromptVersion(versionId);
const promptBody = await this.fetchPromptBodyFromS3(
promptVersion?.s3_url
);
return promptBody;
} catch (error) {
console.error("Error pulling prompt body:", error);
throw error;
}
}
/**
* Merge
* @param params - The chat completion parameters containing prompt_id, optional version_id, inputs, and other OpenAI parameters
* @returns Object containing the compiled prompt body and any validation/substitution errors
*/
async mergePromptBody(params, sourcePromptBody) {
const errors = [];
const substitutionValues = params.inputs || {};
const mergedMessages = [
...sourcePromptBody.messages || [],
...params.messages || []
];
const substitutedMessages = mergedMessages.map((message) => {
if (typeof message.content === "string") {
const substituted = HeliconeTemplateManager.substituteVariables(
message.content,
substitutionValues
);
if (!substituted.success) {
errors.push(...substituted.errors || []);
}
return {
...message,
content: substituted.success ? substituted.result : message.content
};
}
return message;
});
let finalResponseFormat = params.response_format ?? sourcePromptBody.response_format;
if (finalResponseFormat) {
const substitutedResponseFormat = HeliconeTemplateManager.substituteVariablesJSON(
finalResponseFormat,
substitutionValues
);
if (!substitutedResponseFormat.success) {
errors.push(...substitutedResponseFormat.errors || []);
}
finalResponseFormat = substitutedResponseFormat.success ? substitutedResponseFormat.result : finalResponseFormat;
}
let finalTools = [...sourcePromptBody.tools ?? [], ...params.tools ?? []];
if (finalTools) {
const substitutedTools = HeliconeTemplateManager.substituteVariablesJSON(
finalTools,
substitutionValues
);
if (!substitutedTools.success) {
errors.push(...substitutedTools.errors || []);
}
finalTools = substitutedTools.success ? substitutedTools.result : finalTools;
}
const { prompt_id, version_id, inputs, environment, ...inputOpenaiParams } = params;
const mergedBody = {
...sourcePromptBody,
...inputOpenaiParams,
messages: substitutedMessages,
response_format: finalResponseFormat,
tools: finalTools
};
return { body: mergedBody, errors };
}
/**
* Retrieves and merges prompt body with input parameters and variable substitution
* @param params - The chat completion parameters containing prompt_id, optional version_id, inputs, and other OpenAI parameters
* @returns Object containing the compiled prompt body and any validation/substitution errors
*/
async getPromptBody(params) {
if (!params.prompt_id) {
const { prompt_id, version_id, inputs, environment, ...openaiParams } = params;
return { body: openaiParams, errors: [] };
}
const pulledPromptBody = await this.pullPromptBody(params);
try {
return await this.mergePromptBody(params, pulledPromptBody);
} catch (error) {
console.error("Error getting prompt body:", error);
throw error;
}
}
async getPromptVersion(versionId) {
const response = await fetch(
`${this.baseUrl}/v1/prompt-2025/query/version`,
{
method: "POST",
headers: {
Authorization: `Bearer ${this.apiKey}`,
"Content-Type": "application/json"
},
body: JSON.stringify({
promptVersionId: versionId
})
}
);
if (!response.ok) {
throw new Error(`Failed to get prompt version: ${response.statusText}`);
}
const result = await response.json();
if (result.error) {
throw new Error(`API error: ${result.error}`);
}
return result.data;
}
async getProductionVersion(promptId) {
const response = await fetch(
`${this.baseUrl}/v1/prompt-2025/query/production-version`,
{
method: "POST",
headers: {
Authorization: `Bearer ${this.apiKey}`,
"Content-Type": "application/json"
},
body: JSON.stringify({
promptId
})
}
);
if (!response.ok) {
throw new Error(
`Failed to get production version: ${response.statusText}`
);
}
const result = await response.json();
if (result.error) {
throw new Error(`API error: ${result.error}`);
}
return result.data;
}
async getEnvironmentVersion(promptId, environment) {
const response = await fetch(
`${this.baseUrl}/v1/prompt-2025/query/environment-version`,
{
method: "POST",
headers: {
Authorization: `Bearer ${this.apiKey}`,
"Content-Type": "application/json"
},
body: JSON.stringify({
promptId,
environment
})
}
);
if (!response.ok) {
throw new Error(
`Failed to get environment version: ${response.statusText}`
);
}
const result = await response.json();
if (result.error) {
throw new Error(`API error: ${result.error}`);
}
return result.data;
}
async fetchPromptBodyFromS3(s3Url) {
if (!s3Url) {
throw new Error("No S3 URL provided for prompt body");
}
const response = await fetch(s3Url);
if (!response.ok) {
throw new Error(
`Failed to fetch prompt body from S3: ${response.statusText}`
);
}
return await response.json();
}
};
export {
HeliconeLogBuilder,
HeliconeManualLogger,
HeliconePromptManager
};