UNPKG

node-llama-cpp

Version:

Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level

14 lines 487 B
import { isLlamaText } from "./LlamaText.js"; import { isToken } from "./isToken.js"; export function tokenizeInput(input, tokenizer, options, clone = false) { if (typeof input === "string") return tokenizer(input, false, options); else if (isLlamaText(input)) return input.tokenize(tokenizer, options); else if (isToken(input)) return [input]; if (clone) return input.slice(); return input; } //# sourceMappingURL=tokenizeInput.js.map