@helicone/async
Version:
A Node.js wrapper for logging llm traces directly to Helicone, bypassing the proxy, with OpenLLMetry
49 lines • 1.62 kB
TypeScript
import OpenAI from "openai";
import * as anthropic from "@anthropic-ai/sdk";
import * as azureOpenAI from "@azure/openai";
import * as cohere from "cohere-ai";
import * as bedrock from "@aws-sdk/client-bedrock-runtime";
import * as google_aiplatform from "@google-cloud/aiplatform";
import Together from "together-ai";
import * as ChainsModule from "langchain/chains";
import * as AgentsModule from "langchain/agents";
import * as ToolsModule from "langchain/tools";
type IHeliconeAsyncLoggerOptions = {
apiKey: string;
baseUrl?: string;
providers: {
openAI?: typeof OpenAI;
anthropic?: typeof anthropic;
azureOpenAI?: typeof azureOpenAI;
cohere?: typeof cohere;
bedrock?: typeof bedrock;
google_aiplatform?: typeof google_aiplatform;
together?: typeof Together;
langchain?: {
chainsModule?: typeof ChainsModule;
agentsModule?: typeof AgentsModule;
toolsModule?: typeof ToolsModule;
};
};
headers?: Record<string, string>;
};
export declare class HeliconeAsyncLogger {
private apiKey;
private baseUrl;
private openAI?;
private anthropic?;
private azureOpenAI?;
private together?;
private cohere?;
private bedrock?;
private google_aiplatform?;
private chainsModule?;
private agentsModule?;
private toolsModule?;
private headers?;
constructor(opts: IHeliconeAsyncLoggerOptions);
init(): void;
withProperties(properties: Record<string, string>, fn: () => any): any;
}
export {};
//# sourceMappingURL=HeliconeAsyncLogger.d.ts.map