node-llama-cpp
Version:
Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level
16 lines • 553 B
JavaScript
export function firstInFirstOutStrategy({ items, size }) {
const res = [];
const sortedItems = items
.slice()
.sort((a, b) => b.evaluationPriority - a.evaluationPriority);
let leftFreeTokens = size;
for (const item of sortedItems) {
const processAmount = Math.min(item.tokens.length, leftFreeTokens);
res.push({ item, processAmount });
leftFreeTokens -= processAmount;
if (leftFreeTokens === 0)
break;
}
return res;
}
//# sourceMappingURL=firstInFirstOutStrategy.js.map