UNPKG

node-llama-cpp

Version:

Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level

10 lines (9 loc) 454 B
import { GbnfTerminal } from "../GbnfTerminal.js"; import { GbnfGrammarGenerator } from "../GbnfGrammarGenerator.js"; export declare class GbnfOr extends GbnfTerminal { readonly values: readonly GbnfTerminal[]; readonly useRawGrammar: boolean; constructor(values: readonly GbnfTerminal[], useRawGrammar?: boolean); getGrammar(grammarGenerator: GbnfGrammarGenerator): string; resolve(grammarGenerator: GbnfGrammarGenerator): string; }