@jackhua/mini-langchain
Version:
A lightweight TypeScript implementation of LangChain with cost optimization features
51 lines • 1.52 kB
TypeScript
import { BaseChatLLM } from './base';
import { Message, LLMResult, LLMCallOptions, GenerationChunk } from '../core/types';
interface GeminiConfig {
apiKey: string;
model?: string;
baseURL?: string;
defaultTemperature?: number;
defaultMaxTokens?: number;
defaultTopP?: number;
defaultTopK?: number;
}
interface GeminiMessage {
role: 'user' | 'model';
parts: Array<{
text: string;
}>;
}
export declare class Gemini extends BaseChatLLM {
/**
* Get the identifying parameters of the LLM
*/
get identifyingParams(): Record<string, any>;
/**
* Get the type of LLM
*/
get llmType(): string;
private apiKey;
private model;
private client;
protected defaultTemperature: number;
protected defaultMaxTokens: number;
private defaultTopP?;
private defaultTopK?;
constructor(config: GeminiConfig);
/**
* Format messages for Gemini API
*/
protected formatMessages(messages: Message[]): GeminiMessage[];
/**
* Merge call options with defaults
*/
protected mergeOptions(options?: LLMCallOptions): LLMCallOptions;
generate(messages: Message[], options?: LLMCallOptions): Promise<LLMResult>;
stream(messages: Message[], options?: LLMCallOptions): AsyncGenerator<GenerationChunk>;
}
/**
* Create a Gemini instance from environment variables
*/
export declare function createGeminiFromEnv(config?: Partial<GeminiConfig>): Gemini;
export {};
//# sourceMappingURL=gemini.d.ts.map