jorel
Version:
A unified wrapper for working with LLMs from multiple providers, including streams, images, documents & automatic tool use.
22 lines (21 loc) • 1.08 kB
TypeScript
import { LlmCoreProvider, LlmGenerationConfig, LlmMessage, LlmResponse, LlmStreamProviderResponseChunkEvent, LlmStreamResponse, LlmStreamResponseWithToolCalls } from "../providers";
export interface TestProviderConfig {
name?: string;
defaultResponse?: string;
defaultStreamResponse?: string[];
simulateDelay?: number;
failOnModels?: string[];
}
export declare class TestProvider implements LlmCoreProvider {
readonly name: string;
private defaultResponse;
private defaultStreamResponse;
private simulateDelay;
private failOnModels;
constructor(config?: TestProviderConfig);
private delay;
generateResponse(model: string, messages: LlmMessage[], config?: LlmGenerationConfig): Promise<LlmResponse>;
generateResponseStream(model: string, messages: LlmMessage[], config?: LlmGenerationConfig): AsyncGenerator<LlmStreamProviderResponseChunkEvent | LlmStreamResponse | LlmStreamResponseWithToolCalls, void, unknown>;
getAvailableModels(): Promise<string[]>;
createEmbedding(model: string, text: string): Promise<number[]>;
}