UNPKG

node-llama-cpp

Version:

Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level

8 lines 557 B
import { builtinLlamaCppGitHubRepo, builtinLlamaCppRelease } from "../../config.js"; import { getClonedLlamaCppRepoReleaseInfo } from "./cloneLlamaCppRepo.js"; export async function getCanUsePrebuiltBinaries() { const clonedLlamaCppRepoReleaseInfo = await getClonedLlamaCppRepoReleaseInfo(); return clonedLlamaCppRepoReleaseInfo == null || (clonedLlamaCppRepoReleaseInfo.tag === builtinLlamaCppRelease && clonedLlamaCppRepoReleaseInfo.llamaCppGithubRepo === builtinLlamaCppGitHubRepo); } //# sourceMappingURL=getCanUsePrebuiltBinaries.js.map