UNPKG

node-llama-cpp

Version:

Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level

12 lines (11 loc) 586 B
type ClonedLlamaCppRepoTagFile = { tag: string; llamaCppGithubRepo: string; }; export declare function cloneLlamaCppRepo(githubOwner: string, githubRepo: string, tag: string, useBundles?: boolean, progressLogs?: boolean, recursive?: boolean): Promise<void>; export declare function getClonedLlamaCppRepoReleaseInfo(): Promise<ClonedLlamaCppRepoTagFile | null>; export declare function isLlamaCppRepoCloned(waitForLock?: boolean): Promise<boolean>; export declare function ensureLlamaCppRepoIsCloned({ progressLogs }?: { progressLogs?: boolean; }): Promise<void>; export {};