node-llama-cpp
Version:
Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level
12 lines (11 loc) • 440 B
TypeScript
export declare class GbnfGrammarGenerator {
rules: Map<string, string | null>;
ruleContentToRuleName: Map<string, string>;
literalValueRuleNames: Map<string | number, string>;
private ruleId;
private valueRuleId;
generateRuleName(): string;
generateRuleNameForLiteralValue(value: string | number): string;
generateGbnfFile(rootGrammar: string): string;
getProposedLiteralValueRuleNameLength(): number;
}