llama-flow
Version:
The Typescript-first prompt engineering toolkit for working with chat based LLMs.
8 lines • 390 B
TypeScript
import { ChatConfig, ChatRequestOptions, ChatResponse, Message, ModelConfig } from '../types';
export interface Model {
defaults: ModelConfig;
config: ChatConfig;
request(messages: Message[], config?: Partial<ModelConfig>, opt?: ChatRequestOptions): Promise<ChatResponse<string>>;
getTokensFromMessages(messages: Message[]): number;
}
//# sourceMappingURL=interface.d.ts.map