@unified-llm/core
Version:
Unified LLM interface (in-memory).
20 lines • 774 B
TypeScript
import { UnifiedChatRequest, UnifiedChatResponse, UnifiedStreamEventResponse, Tool } from '../../types/unified-api.js';
import { MCPServerConfig } from '../../types/mcp.js';
import BaseProvider from '../base-provider.js';
export declare class OpenAIProvider extends BaseProvider {
private provider;
constructor(options: {
apiKey: string;
model?: string;
baseURL?: string;
tools?: Tool[];
mcpServers?: MCPServerConfig[];
options?: {
useResponsesAPI?: boolean;
};
logLevel?: string;
});
chat(request: UnifiedChatRequest): Promise<UnifiedChatResponse>;
stream(request: UnifiedChatRequest): AsyncIterableIterator<UnifiedStreamEventResponse>;
}
//# sourceMappingURL=provider.d.ts.map