inference-server
Version:
Libraries and server to build AI applications. Adapters to various native bindings allowing local inference. Integrate it with your application, or use as a microservice.
10 lines (9 loc) • 429 B
TypeScript
export declare function fetchBuffer(url: string): Promise<Buffer>;
export declare function remoteFileExists(url: string): Promise<boolean>;
interface HuggingfaceModelInfo {
modelId: string;
branch: string;
}
export declare function parseHuggingfaceModelIdAndBranch(url: string): HuggingfaceModelInfo;
export declare function normalizeTransformersJsClass<T>(classLike: T | string | undefined, fallback?: T): T;
export {};