@jackhua/mini-langchain
Version:
A lightweight TypeScript implementation of LangChain with cost optimization features
37 lines • 1.21 kB
TypeScript
import { BaseChatLLM } from './base';
import { Message, LLMResult, LLMCallOptions, GenerationChunk } from '../core/types';
interface OpenAIConfig {
apiKey: string;
model?: string;
baseURL?: string;
defaultTemperature?: number;
defaultMaxTokens?: number;
organization?: string;
}
interface OpenAIMessage {
role: 'system' | 'user' | 'assistant' | 'function';
content: string;
name?: string;
function_call?: {
name: string;
arguments: string;
};
}
export declare class OpenAI extends BaseChatLLM {
private apiKey;
private model;
private client;
private organization?;
constructor(config: OpenAIConfig);
protected formatMessages(messages: Message[]): OpenAIMessage[];
generate(messages: Message[], options?: LLMCallOptions): Promise<LLMResult>;
stream(messages: Message[], options?: LLMCallOptions): AsyncGenerator<GenerationChunk>;
get identifyingParams(): Record<string, any>;
get llmType(): string;
}
/**
* Helper function to create OpenAI instance from environment variables
*/
export declare function createOpenAIFromEnv(config?: Partial<OpenAIConfig>): OpenAI;
export {};
//# sourceMappingURL=openai.d.ts.map