node-llama-cpp
Version:
Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level
24 lines • 712 B
JavaScript
import { GbnfTerminal } from "../GbnfTerminal.js";
import { reservedRuleNames } from "./gbnfConsts.js";
export class GbnfInsideStringChar extends GbnfTerminal {
getGrammar() {
return [
negatedCharacterSet([
'"',
"\\\\",
"\\x7F",
"\\x00-\\x1F"
]),
// escape sequences
'"\\\\" ["\\\\/bfnrt]',
'"\\\\u" [0-9a-fA-F]{4}'
].join(" | ");
}
getRuleName() {
return reservedRuleNames.stringChar;
}
}
function negatedCharacterSet(characterDefinitions) {
return "[^" + characterDefinitions.join("") + "]";
}
//# sourceMappingURL=GbnfInsideStringChar.js.map