UNPKG

catai

Version:

Chat UI and Local API for the Llama models

15 lines (14 loc) 640 B
import { ModelSettings } from '../../../storage/app-db.js'; import { ChatContext } from '../chat-context.js'; import { NodeLlamaCppOptions } from './node-llama-cpp/node-llama-cpp-v2/node-llama-cpp-v2.js'; export type CreateChatOptions = NodeLlamaCppOptions & { model: string; }; export default abstract class BaseBindClass<Settings> { modelSettings: ModelSettings<Settings>; static shortName?: string; static description?: string; constructor(modelSettings: ModelSettings<Settings>); abstract initialize(): Promise<void> | void; abstract createChat(overrideSettings?: CreateChatOptions): Promise<ChatContext>; }