node-llama-cpp
Version:
Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level
16 lines • 749 B
JavaScript
const xtcArgFormat = /^(\d+|\d*\.\d+),(\d*|\d?\.\d+)$/;
export function parseXtcArg(xtcString) {
if (xtcString == null || xtcString === "")
return undefined;
const match = xtcString.match(xtcArgFormat);
if (match != null && match[1] != null && match[2] != null) {
const probability = parseFloat(match[1]);
const threshold = parseFloat(match[2]);
if (probability >= 0 && probability <= 1 && threshold >= 0 && threshold <= 1) {
return { probability, threshold };
}
}
throw new Error(`Invalid xtc argument: ${xtcString}. ` +
'Expected format: "probability,threshold" where probability and threshold are numbers between 0 and 1.');
}
//# sourceMappingURL=parseXtcArg.js.map