inference-server
Version:
Libraries and server to build AI applications. Adapters to various native bindings allowing local inference. Integrate it with your application, or use as a microservice.
16 lines (15 loc) • 710 B
TypeScript
interface ResolveModelFileLocationArgs {
url?: string;
filePath?: string;
modelsCachePath: string;
}
/**
* Resolve a model file/url to an absolute path to either a file or directory.
* @param url - Optional URL to the model file. Location will be derived from it.
* @param filePath - Optional relative (to modelsCachePath) or absolute file path that short-circuits resolution.
* @param modelsCachePath - The path to the models cache directory.
* @returns The abs file path on the local filesystem.
* @throws If the model location could not be resolved.
*/
export declare function resolveModelFileLocation({ url, filePath, modelsCachePath }: ResolveModelFileLocationArgs): string;
export {};