UNPKG

@helicone/async

Version:

A Node.js wrapper for logging llm traces directly to Helicone, bypassing the proxy, with OpenLLMetry

46 lines 1.55 kB
import type OpenAI from "openai"; import type * as anthropic from "@anthropic-ai/sdk"; import type * as cohere from "cohere-ai"; import type * as bedrock from "@aws-sdk/client-bedrock-runtime"; import type * as google_aiplatform from "@google-cloud/aiplatform"; import type Together from "together-ai"; import type * as ChainsModule from "langchain/chains"; import type * as AgentsModule from "langchain/agents"; import type * as ToolsModule from "langchain/tools"; type IHeliconeAsyncLoggerOptions = { apiKey: string; baseUrl?: string; providers: { openAI?: typeof OpenAI; anthropic?: typeof anthropic; cohere?: typeof cohere; bedrock?: typeof bedrock; google_aiplatform?: typeof google_aiplatform; together?: typeof Together; langchain?: { chainsModule?: typeof ChainsModule; agentsModule?: typeof AgentsModule; toolsModule?: typeof ToolsModule; }; }; headers?: Record<string, string>; }; export declare class HeliconeAsyncLogger { private apiKey; private baseUrl; private openAI?; private anthropic?; private together?; private cohere?; private bedrock?; private google_aiplatform?; private chainsModule?; private agentsModule?; private toolsModule?; private headers?; constructor(opts: IHeliconeAsyncLoggerOptions); init(): void; withProperties(properties: Record<string, string>, fn: () => any): any; } export {}; //# sourceMappingURL=HeliconeAsyncLogger.d.ts.map