UNPKG

node-llama-cpp

Version:

Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level

14 lines (13 loc) 329 B
import { CommandModule } from "yargs"; type PullCommand = { urls: string[]; header?: string[]; override: boolean; noProgress: boolean; noTempFile: boolean; directory: string; filename?: string; parallel?: number; }; export declare const PullCommand: CommandModule<object, PullCommand>; export {};