@llumiverse/drivers
Version:
LLM driver implementations. Currently supported are: openai, huggingface, bedrock, replicate.
50 lines • 2.51 kB
TypeScript
import { TokenCredential } from "@azure/identity";
import { AbstractDriver, AIModel, Completion, CompletionChunkObject, DriverOptions, EmbeddingsOptions, EmbeddingsResult, ExecutionOptions, Providers } from "@llumiverse/core";
import { AIProjectClient, ModelDeployment } from '@azure/ai-projects';
import { ChatCompletionMessageParam } from "openai/resources";
import type { ChatRequestMessage } from "@azure-rest/ai-inference";
export interface AzureFoundryDriverOptions extends DriverOptions {
/**
* The credentials to use to access Azure AI Foundry
*/
azureADTokenProvider?: TokenCredential;
endpoint?: string;
apiVersion?: string;
}
export interface AzureFoundryInferencePrompt {
messages: ChatRequestMessage[];
}
export interface AzureFoundryOpenAIPrompt {
messages: ChatCompletionMessageParam[];
}
export type AzureFoundryPrompt = AzureFoundryInferencePrompt | AzureFoundryOpenAIPrompt;
export declare class AzureFoundryDriver extends AbstractDriver<AzureFoundryDriverOptions, ChatCompletionMessageParam[]> {
service: AIProjectClient;
readonly provider = Providers.azure_foundry;
OPENAI_API_VERSION: string;
INFERENCE_API_VERSION: string;
constructor(opts: AzureFoundryDriverOptions);
/**
* Get default authentication for Azure AI Foundry API
*/
getDefaultAIFoundryAuth(): () => Promise<string>;
isOpenAIDeployment(model: string): Promise<boolean>;
protected canStream(_options: ExecutionOptions): Promise<boolean>;
requestTextCompletion(prompt: ChatCompletionMessageParam[], options: ExecutionOptions): Promise<Completion>;
requestTextCompletionStream(prompt: ChatCompletionMessageParam[], options: ExecutionOptions): Promise<AsyncIterable<CompletionChunkObject>>;
private processStreamResponse;
private extractDataFromResponse;
private convertFinishReason;
validateConnection(): Promise<boolean>;
generateEmbeddings(options: EmbeddingsOptions): Promise<EmbeddingsResult>;
generateTextEmbeddings(options: EmbeddingsOptions): Promise<EmbeddingsResult>;
generateImageEmbeddings(options: EmbeddingsOptions): Promise<EmbeddingsResult>;
listModels(): Promise<AIModel[]>;
_listModels(filter?: (m: ModelDeployment) => boolean): Promise<AIModel[]>;
}
export declare function parseAzureFoundryModelId(compositeId: string): {
deploymentName: string;
baseModel: string;
};
export declare function isCompositeModelId(modelId: string): boolean;
//# sourceMappingURL=azure_foundry.d.ts.map