node-llama-cpp
Version:
Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level
6 lines (5 loc) • 298 B
TypeScript
/**
* Returns a promise that resolves to true if every promise in the array resolves to true, otherwise false.
* Note that this function will not throw on error and instead will log the error to the console.
*/
export declare function asyncEvery(promises: Promise<boolean>[]): Promise<boolean>;