node-llama-cpp
Version:
Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level
16 lines (15 loc) • 468 B
TypeScript
export declare function parseModelFileName(filename: string): {
name: string | undefined;
subType: string;
quantization: string | undefined;
fileType: string | undefined;
version: string | undefined;
contextSize: string | undefined;
parameters: `${number}B` | undefined;
parts: {
part: string;
parts: string;
} | undefined;
otherInfo: string[];
};
export declare function isFilePartText(text?: string): boolean;