UNPKG

jorel

Version:

A unified wrapper for working with LLMs from multiple providers, including streams, images, documents & automatic tool use.

30 lines (29 loc) 1.54 kB
import { Content, HarmBlockThreshold, HarmCategory, VertexAI } from "@google-cloud/vertexai"; import { LlmCoreProvider, LlmGenerationConfig, LlmMessage, LlmResponse, LlmStreamProviderResponseChunkEvent, LlmStreamResponseEvent } from "../../providers"; export { HarmBlockThreshold as VertexAiHarmBlockThreshold, HarmCategory as VertexAiHarmCategory }; export interface GoogleVertexAiConfig { project?: string; location?: string; keyFilename?: string; safetySettings?: { category: HarmCategory; threshold: HarmBlockThreshold; }[]; name?: string; } /** Provides access to GoogleVertexAi and other compatible services */ export declare class GoogleVertexAiProvider implements LlmCoreProvider { static readonly defaultName = "google-vertex-ai"; readonly name: string; readonly client: VertexAI; constructor({ project, location, keyFilename, safetySettings, name }?: GoogleVertexAiConfig); generateResponse(model: string, messages: LlmMessage[], config?: LlmGenerationConfig): Promise<LlmResponse>; generateResponseStream(model: string, messages: LlmMessage[], config?: LlmGenerationConfig): AsyncGenerator<LlmStreamProviderResponseChunkEvent | LlmStreamResponseEvent, void, unknown>; getAvailableModels(): Promise<string[]>; countTokens(model: string, contents: Content[]): Promise<{ model: string; inputTokens: number; characterCount: number; }>; createEmbedding(model: string, text: string, abortSignal?: AbortSignal): Promise<number[]>; }