node-llama-cpp
Version:
Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level
15 lines • 499 B
JavaScript
export function findBestOption({ generator, score }) {
let bestOption = null;
let bestScore = null;
for (const option of generator()) {
const currentScore = score(option);
if (currentScore === Infinity)
return option;
if (currentScore != null && (bestScore == null || currentScore > bestScore)) {
bestOption = option;
bestScore = currentScore;
}
}
return bestOption;
}
//# sourceMappingURL=findBestOption.js.map