node-llama-cpp
Version:
Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level
16 lines (15 loc) • 613 B
TypeScript
import { GbnfTerminal } from "../GbnfTerminal.js";
import { GbnfGrammarGenerator } from "../GbnfGrammarGenerator.js";
export declare class GbnfRepetition extends GbnfTerminal {
readonly value: GbnfTerminal;
readonly separator?: GbnfTerminal;
readonly minRepetitions: number;
readonly maxRepetitions: number | null;
constructor({ value, separator, minRepetitions, maxRepetitions }: {
value: GbnfTerminal;
separator?: GbnfTerminal;
minRepetitions?: number;
maxRepetitions?: number | null;
});
getGrammar(grammarGenerator: GbnfGrammarGenerator): string;
}