node-llama-cpp
Version:
Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level
5 lines (4 loc) • 811 B
TypeScript
/**
* Convert a GGUF file type number to its corresponding type name
*/
export declare function getGgufFileTypeName(fileType?: number): "ALL_F32" | "MOSTLY_F16" | "MOSTLY_Q4_0" | "MOSTLY_Q4_1" | "MOSTLY_Q4_1_SOME_F16" | "MOSTLY_Q4_2" | "MOSTLY_Q4_3" | "MOSTLY_Q8_0" | "MOSTLY_Q5_0" | "MOSTLY_Q5_1" | "MOSTLY_Q2_K" | "MOSTLY_Q3_K_S" | "MOSTLY_Q3_K_M" | "MOSTLY_Q3_K_L" | "MOSTLY_Q4_K_S" | "MOSTLY_Q4_K_M" | "MOSTLY_Q5_K_S" | "MOSTLY_Q5_K_M" | "MOSTLY_Q6_K" | "MOSTLY_IQ2_XXS" | "MOSTLY_IQ2_XS" | "MOSTLY_Q2_K_S" | "MOSTLY_IQ3_XS" | "MOSTLY_IQ3_XXS" | "MOSTLY_IQ1_S" | "MOSTLY_IQ4_NL" | "MOSTLY_IQ3_S" | "MOSTLY_IQ3_M" | "MOSTLY_IQ2_S" | "MOSTLY_IQ2_M" | "MOSTLY_IQ4_XS" | "MOSTLY_IQ1_M" | "MOSTLY_BF16" | "MOSTLY_Q4_0_4_4" | "MOSTLY_Q4_0_4_8" | "MOSTLY_Q4_0_8_8" | "MOSTLY_TQ1_0" | "MOSTLY_TQ2_0" | undefined;