node-llama-cpp
Version:
Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level
7 lines (6 loc) • 397 B
TypeScript
export declare function getIsInDocumentationMode(): boolean;
export declare function setIsInDocumentationMode(value: boolean): void;
export declare function getIsRunningFromCLI(): boolean;
export declare function setIsRunningFromCLI(value: boolean): void;
export declare function getForceShowConsoleLogPrefix(): boolean;
export declare function setForceShowConsoleLogPrefix(value: boolean): void;