UNPKG

@just-every/ensemble

Version:

LLM provider abstraction layer with unified streaming interface

20 lines 1.36 kB
import { ProviderStreamEvent, ResponseInput, AgentDefinition } from '../types/types.js'; import { BaseModelProvider } from './base_provider.js'; import OpenAI from 'openai'; import { ModelProviderID } from '../data/model_data.js'; export declare function addImagesToInput(input: OpenAI.Chat.Completions.ChatCompletionMessageParam[], images: Record<string, string>, source: string): Promise<OpenAI.Chat.Completions.ChatCompletionMessageParam[]>; export declare class OpenAIChat extends BaseModelProvider { protected _client?: OpenAI; protected provider: ModelProviderID; protected baseURL: string | undefined; protected commonParams: any; protected apiKey?: string; protected defaultHeaders?: Record<string, string | null | undefined>; constructor(provider?: ModelProviderID, apiKey?: string, baseURL?: string, defaultHeaders?: Record<string, string | null | undefined>, commonParams?: any); private getEnvVarName; protected get client(): OpenAI; prepareParameters(requestParams: OpenAI.Chat.Completions.ChatCompletionCreateParamsStreaming): OpenAI.Chat.Completions.ChatCompletionCreateParamsStreaming; private _parseAndPrepareSimulatedToolCalls; createResponseStream(messages: ResponseInput, model: string, agent: AgentDefinition): AsyncGenerator<ProviderStreamEvent>; } //# sourceMappingURL=openai_chat.d.ts.map