node-llama-cpp
Version:
Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level
22 lines • 610 B
JavaScript
let isInDocumentationMode = false;
let isInCLI = false;
let forceShowConsoleLogPrefix = false;
export function getIsInDocumentationMode() {
return isInDocumentationMode;
}
export function setIsInDocumentationMode(value) {
isInDocumentationMode = value;
}
export function getIsRunningFromCLI() {
return isInCLI;
}
export function setIsRunningFromCLI(value) {
isInCLI = value;
}
export function getForceShowConsoleLogPrefix() {
return forceShowConsoleLogPrefix;
}
export function setForceShowConsoleLogPrefix(value) {
forceShowConsoleLogPrefix = value;
}
//# sourceMappingURL=state.js.map