UNPKG

node-llama-cpp

Version:

Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level

4 lines (3 loc) 254 B
import { Token, Tokenizer } from "../types.js"; import { LlamaText } from "./LlamaText.js"; export declare function tokenizeInput(input: Token | Token[] | string | LlamaText, tokenizer: Tokenizer, options?: "trimLeadingSpace", clone?: boolean): Token[];