node-llama-cpp
Version:
Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level
26 lines • 740 B
JavaScript
import { GbnfTerminal } from "../GbnfTerminal.js";
export class GbnfStringValue extends GbnfTerminal {
value;
constructor(value) {
super();
this.value = value;
}
getGrammar() {
return [
'"',
'\\"',
this.value
.replaceAll("\\", "\\\\\\\\")
.replaceAll("\t", "\\\\t")
.replaceAll("\r", "\\\\r")
.replaceAll("\n", "\\\\n")
.replaceAll('"', "\\\\" + '\\"'),
'\\"',
'"'
].join("");
}
generateRuleName(grammarGenerator) {
return grammarGenerator.generateRuleNameForLiteralValue(this.value);
}
}
//# sourceMappingURL=GbnfStringValue.js.map