@llumiverse/drivers
Version:
LLM driver implementations. Currently supported are: openai, huggingface, bedrock, replicate.
15 lines • 905 B
TypeScript
import { AIModel, Completion, PromptSegment, ExecutionOptions, CompletionChunkObject } from "@llumiverse/core";
import { VertexAIDriver } from "./index.js";
export interface ModelDefinition<PromptT = any> {
model: AIModel;
versions?: string[];
createPrompt: (driver: VertexAIDriver, segments: PromptSegment[], options: ExecutionOptions) => Promise<PromptT>;
requestTextCompletion: (driver: VertexAIDriver, prompt: PromptT, options: ExecutionOptions) => Promise<Completion>;
requestTextCompletionStream: (driver: VertexAIDriver, prompt: PromptT, options: ExecutionOptions) => Promise<AsyncIterable<CompletionChunkObject>>;
preValidationProcessing?(result: Completion, options: ExecutionOptions): {
result: Completion;
options: ExecutionOptions;
};
}
export declare function getModelDefinition(model: string): ModelDefinition;
//# sourceMappingURL=models.d.ts.map