@llumiverse/drivers
Version:
LLM driver implementations. Currently supported are: openai, huggingface, bedrock, replicate.
62 lines • 3.31 kB
TypeScript
import { Bedrock, FoundationModelSummary } from "@aws-sdk/client-bedrock";
import { BedrockRuntime, ConverseRequest, ConverseResponse, ConverseStreamOutput } from "@aws-sdk/client-bedrock-runtime";
import { AwsCredentialIdentity, Provider } from "@aws-sdk/types";
import { AbstractDriver, AIModel, Completion, CompletionChunkObject, DataSource, DriverOptions, EmbeddingsOptions, EmbeddingsResult, ExecutionOptions, PromptSegment, TrainingJob, TrainingOptions } from "@llumiverse/core";
import { NovaMessagesPrompt } from "@llumiverse/core/formatters";
import { TwelvelabsPegasusRequest } from "./twelvelabs.js";
export interface BedrockModelCapabilities {
name: string;
canStream: boolean;
}
export interface BedrockDriverOptions extends DriverOptions {
/**
* The AWS region
*/
region: string;
/**
* The bucket name to be used for training.
* It will be created if does not already exist.
*/
training_bucket?: string;
/**
* The role ARN to be used for training
*/
training_role_arn?: string;
/**
* The credentials to use to access AWS
*/
credentials?: AwsCredentialIdentity | Provider<AwsCredentialIdentity>;
}
export type BedrockPrompt = NovaMessagesPrompt | ConverseRequest | TwelvelabsPegasusRequest;
export declare class BedrockDriver extends AbstractDriver<BedrockDriverOptions, BedrockPrompt> {
static PROVIDER: string;
provider: string;
private _executor?;
private _service?;
private _service_region?;
constructor(options: BedrockDriverOptions);
getExecutor(): BedrockRuntime;
getService(region?: string): Bedrock;
protected formatPrompt(segments: PromptSegment[], opts: ExecutionOptions): Promise<BedrockPrompt>;
getExtractedExecution(result: ConverseResponse, _prompt?: BedrockPrompt, options?: ExecutionOptions): CompletionChunkObject;
getExtractedStream(result: ConverseStreamOutput, _prompt?: BedrockPrompt, options?: ExecutionOptions): CompletionChunkObject;
extractRegion(modelString: string, defaultRegion: string): string;
private getCanStream;
protected canStream(options: ExecutionOptions): Promise<boolean>;
requestTextCompletion(prompt: BedrockPrompt, options: ExecutionOptions): Promise<Completion>;
private requestTwelvelabsPegasusCompletion;
private requestTwelvelabsPegasusCompletionStream;
requestTextCompletionStream(prompt: BedrockPrompt, options: ExecutionOptions): Promise<AsyncIterable<CompletionChunkObject>>;
preparePayload(prompt: ConverseRequest, options: ExecutionOptions): ConverseRequest;
requestImageGeneration(prompt: NovaMessagesPrompt, options: ExecutionOptions): Promise<Completion>;
startTraining(dataset: DataSource, options: TrainingOptions): Promise<TrainingJob>;
cancelTraining(jobId: string): Promise<TrainingJob>;
getTrainingJob(jobId: string): Promise<TrainingJob>;
validateConnection(): Promise<boolean>;
listTrainableModels(): Promise<AIModel<string>[]>;
listModels(): Promise<AIModel[]>;
_listModels(foundationFilter?: (m: FoundationModelSummary) => boolean): Promise<AIModel[]>;
generateEmbeddings({ text, image, model }: EmbeddingsOptions): Promise<EmbeddingsResult>;
private generateTwelvelabsMarengoEmbeddings;
}
//# sourceMappingURL=index.d.ts.map