UNPKG

node-llama-cpp

Version:

Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level

15 lines 559 B
import { getConsoleLogPrefix } from "../../utils/getConsoleLogPrefix.js"; /** * Returns a promise that resolves to true if every promise in the array resolves to true, otherwise false. * Note that this function will not throw on error and instead will log the error to the console. */ export async function asyncEvery(promises) { try { return (await Promise.all(promises)).every(Boolean); } catch (err) { console.error(getConsoleLogPrefix(false, false), err); return false; } } //# sourceMappingURL=asyncEvery.js.map