node-llama-cpp
Version:
Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level
29 lines • 927 B
JavaScript
const safeCallbackSymbol = Symbol("safeCallback");
export function safeEventCallback(callback, message) {
if (callback == null)
return undefined;
// do not wrap the callback if it's already wrapped
if (callback?.[safeCallbackSymbol] === true)
return callback;
const res = (...args) => {
try {
const res = callback(...args);
if (res instanceof Promise)
res.catch((error) => {
if (message != null)
console.error(message, error);
else
console.error(error);
});
}
catch (error) {
if (message != null)
console.error(message, error);
else
console.error(error);
}
};
res[safeCallbackSymbol] = true;
return res;
}
//# sourceMappingURL=safeEventCallback.js.map