@unified-llm/core
Version:
Unified LLM interface (in-memory).
36 lines (35 loc) • 1.25 kB
TypeScript
import BaseProvider from './providers/base-provider';
import { Tool } from './types/unified-api';
export interface LLMClientRuntimeConfig {
id?: string;
provider: 'openai' | 'anthropic' | 'google' | 'deepseek' | 'azure';
apiKey: string;
model?: string;
tools?: Tool[];
generationConfig?: {
temperature?: number;
maxTokens?: number;
topP?: number;
frequencyPenalty?: number;
presencePenalty?: number;
stopSequences?: string[];
responseFormat?: any;
};
systemPrompt?: string;
instructions?: string;
}
export type LLMClientConfig = LLMClientRuntimeConfig;
export declare class LLMClient {
private baseProvider;
private tools?;
private id?;
private systemPrompt?;
constructor(config: LLMClientRuntimeConfig);
static create(provider: 'openai' | 'anthropic' | 'google' | 'deepseek', apiKey: string, model: string): BaseProvider;
private generateToolDefinitions;
private executeFunction;
chat(request: any): Promise<import("./types/unified-api").UnifiedChatResponse>;
stream(request: any): AsyncGenerator<import("./types/unified-api").UnifiedChatResponse, void, any>;
private generateMessageId;
}
export default LLMClient;