catai
Version:
Chat UI and Local API for the Llama models
13 lines (12 loc) • 790 B
TypeScript
import BaseBindClass, { CreateChatOptions } from './binds/base-bind-class.js';
import { ModelSettings } from '../../storage/app-db.js';
import NodeLlamaCppV2 from './binds/node-llama-cpp/node-llama-cpp-v2/node-llama-cpp-v2.js';
import { ChatContext } from './chat-context.js';
import type { LLamaChatPromptOptions } from 'node-llama-cpp';
export declare const ALL_BINDS: (typeof NodeLlamaCppV2)[];
export declare function findLocalModel(modelName?: string): ModelSettings<{
[settingName: string]: any;
}>;
export declare function getCacheBindClass(modelDetails?: ModelSettings<any>): BaseBindClass<unknown> | null;
export default function createChat(options?: CreateChatOptions): Promise<ChatContext<LLamaChatPromptOptions>>;
export declare function getModelPath(name: string): string;