inference-server
Version:
Libraries and server to build AI applications. Adapters to various native bindings allowing local inference. Integrate it with your application, or use as a microservice.
34 lines (33 loc) • 856 B
TypeScript
import { ModelOptions } from '../../types/index.js';
interface File {
name: string;
absPath: string;
relPath: string;
type: 'file';
size: number;
sizeFormatted: string;
isModelLocation: boolean;
}
interface Directory {
name: string;
absPath: string;
relPath: string;
type: 'directory';
size: number;
sizeFormatted: string;
children: FileTreeItem[];
isModelLocation: boolean;
fileCount: number;
}
export type FileTreeItem = File | Directory;
export interface ModelCacheInfo {
fileTree: FileTreeItem[];
fileCount: number;
}
interface IndexModelCacheOptions {
includeFiles?: boolean;
includeUnused?: boolean;
usedModels?: Record<string, ModelOptions>;
}
export declare function indexModelCache(dir: string, opts?: IndexModelCacheOptions): Promise<ModelCacheInfo>;
export {};