node-llama-cpp
Version:
Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level
21 lines • 684 B
JavaScript
import { GbnfTerminal } from "../GbnfTerminal.js";
export class GbnfNumberValue extends GbnfTerminal {
value;
constructor(value) {
super();
this.value = value;
}
getGrammar() {
return '"' + JSON.stringify(this.value) + '"';
}
resolve(grammarGenerator) {
const grammar = this.getGrammar();
if (grammar.length <= grammarGenerator.getProposedLiteralValueRuleNameLength())
return grammar;
return super.resolve(grammarGenerator);
}
generateRuleName(grammarGenerator) {
return grammarGenerator.generateRuleNameForLiteralValue(this.value);
}
}
//# sourceMappingURL=GbnfNumberValue.js.map