UNPKG

node-llama-cpp

Version:

Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level

12 lines (11 loc) 441 B
export declare function spawnCommand(command: string, args: string[], cwd: string, env?: NodeJS.ProcessEnv, progressLogs?: boolean): Promise<{ stdout: string; stderr: string; combinedStd: string; }>; export declare class SpawnError extends Error { readonly stdout: string; readonly stderr: string; readonly combinedStd: string; constructor(message: string, stdout: string, stderr: string, combinedStd: string); }