UNPKG

node-llama-cpp

Version:

Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level

8 lines (7 loc) 259 B
import { CommandModule } from "yargs"; declare const debugFunctions: readonly ["vram", "cmakeOptions"]; type DebugCommand = { function: (typeof debugFunctions)[number]; }; export declare const DebugCommand: CommandModule<object, DebugCommand>; export {};