UNPKG

node-llama-cpp

Version:

Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level

14 lines (13 loc) 529 B
import { GbnfTerminal } from "../GbnfTerminal.js"; import { GbnfJsonScopeState } from "../utils/GbnfJsonScopeState.js"; export declare class GbnfWhitespace extends GbnfTerminal { readonly scopeState: GbnfJsonScopeState; readonly newLine: "before" | "after" | false; constructor(scopeState: GbnfJsonScopeState, { newLine }?: { newLine?: "before" | "after" | false; }); getGrammar(): string; protected getGrammarFromResolve(): string; private _getGrammar; protected getRuleName(): string; }