node-llama-cpp
Version:
Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level
9 lines • 346 B
JavaScript
import { defaultLlamaCppGitHubRepo, defaultLlamaCppGpuSupport, defaultLlamaCppRelease } from "../config.js";
export async function getBuildDefaults() {
return {
repo: defaultLlamaCppGitHubRepo,
release: defaultLlamaCppRelease,
gpuSupport: defaultLlamaCppGpuSupport
};
}
//# sourceMappingURL=getBuildDefaults.js.map