node-llama-cpp
Version:
Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level
7 lines (6 loc) • 344 B
TypeScript
export declare function consolePromptQuestion(question: string, { validate, renderSummaryOnExit, exitOnCtrlC, defaultValue }?: {
validate?: (input: string) => string | null | Promise<string | null>;
renderSummaryOnExit?: (item: string | null) => string;
exitOnCtrlC?: boolean;
defaultValue?: string;
}): Promise<string | null>;