UNPKG

@unified-llm/core

Version:

Unified LLM interface (in-memory).

41 lines 1.32 kB
import BaseProvider from './providers/base-provider.js'; import { Tool, UnifiedStreamEventResponse, ProviderType } from './types/unified-api.js'; export interface LLMClientConfig { id?: string; provider: ProviderType; apiKey?: string; model?: string; baseURL?: string; deploymentName?: string; apiVersion?: string; tools?: Tool[]; generationConfig?: { temperature?: number; maxTokens?: number; topP?: number; frequencyPenalty?: number; presencePenalty?: number; stopSequences?: string[]; responseFormat?: any; }; systemPrompt?: string; instructions?: string; logLevel?: string; } export declare class LLMClient { private baseProvider; private tools?; private id?; private systemPrompt?; private provider; private defaultModel?; constructor(config: LLMClientConfig); static create(provider: ProviderType, apiKey: string, model: string): BaseProvider; private generateToolDefinitions; private executeFunction; chat(request: any): Promise<import("./types/unified-api.js").UnifiedChatResponse>; stream(request: any): AsyncIterableIterator<UnifiedStreamEventResponse>; private generateMessageId; } export default LLMClient; //# sourceMappingURL=llm-client.d.ts.map