UNPKG

@llumiverse/drivers

Version:

LLM driver implementations. Currently supported are: openai, huggingface, bedrock, replicate.

35 lines (34 loc) 1.43 kB
import { InferenceClient } from "@huggingface/inference"; import { AIModel, AbstractDriver, CompletionChunkObject, DriverOptions, EmbeddingsResult, ExecutionOptions } from "@llumiverse/core"; import { FetchClient } from "@vertesia/api-fetch-client"; export interface HuggingFaceIEDriverOptions extends DriverOptions { apiKey: string; endpoint_url: string; } export declare class HuggingFaceIEDriver extends AbstractDriver<HuggingFaceIEDriverOptions, string> { static PROVIDER: string; provider: string; service: FetchClient; _executor?: InferenceClient; constructor(options: HuggingFaceIEDriverOptions); getModelURLEndpoint(modelId: string): Promise<{ url: string; status: string; }>; getExecutor(model: string): Promise<InferenceClient>; requestTextCompletionStream(prompt: string, options: ExecutionOptions): Promise<AsyncIterable<CompletionChunkObject>>; requestTextCompletion(prompt: string, options: ExecutionOptions): Promise<{ result: { type: "text"; value: string; }[]; finish_reason: string; token_usage: { result: number | undefined; }; original_response: import("@huggingface/inference").TextGenerationOutput | undefined; }>; listModels(): Promise<AIModel[]>; validateConnection(): Promise<boolean>; generateEmbeddings(): Promise<EmbeddingsResult>; }