node-llama-cpp
Version:
Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level
11 lines • 363 B
JavaScript
import isUnicodeSupported from "is-unicode-supported";
const unicodeSupported = isUnicodeSupported();
export const maxRecentDetokenizerTokens = 3;
export const UNKNOWN_UNICODE_CHAR = "\ufffd";
export const clockChar = unicodeSupported
? "\u25f7"
: "+";
export const arrowChar = unicodeSupported
? "\u276f"
: ">";
//# sourceMappingURL=consts.js.map