UNPKG

node-llama-cpp

Version:

Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level

3 lines (2 loc) 281 B
export declare function hasFileInPath(fileToSearch: string, additionalSearchPaths?: (string | null | undefined)[]): Promise<boolean>; export declare function resolveFileLocationInPath(fileToSearch: string, additionalSearchPaths?: (string | null | undefined)[]): Promise<string[]>;