@unified-llm/core
Version:
Unified LLM interface.
31 lines (30 loc) • 1.09 kB
TypeScript
import OpenAI from 'openai';
import { UnifiedChatRequest, UnifiedChatResponse, Tool } from '../../types/unified-api';
import BaseProvider from '../base-provider';
export declare class OpenAIProvider extends BaseProvider {
protected client: OpenAI;
private apiKey;
private useResponsesAPI;
constructor({ apiKey, model, tools, options }: {
apiKey: string;
model?: string;
tools?: Tool[];
options?: {
useResponsesAPI?: boolean;
};
});
chat(request: UnifiedChatRequest): Promise<UnifiedChatResponse>;
private chatWithChatCompletions;
private chatWithResponsesAPI;
stream(request: UnifiedChatRequest): AsyncIterableIterator<UnifiedChatResponse>;
private streamWithChatCompletions;
private streamWithResponsesAPI;
private convertToOpenAIFormat;
private convertToResponsesAPIFormat;
private convertFromOpenAIFormat;
private convertFromResponsesAPIFormat;
private convertStreamChunk;
private convertResponsesStreamChunk;
private handleError;
private mapErrorType;
}