@react-native-ai/mlc
Version:
MLC LLM provider for Vercel AI SDK
29 lines (28 loc) • 1.03 kB
JSON
{
"model_list": [
{
"model_id": "Llama-3.2-1B-Instruct",
"model_lib": "llama_q4f16_0_ab8405c00238688ffdecfe094f4e736b",
"model_url": "https://huggingface.co/mlc-ai/Llama-3.2-1B-Instruct-q4f16_0-MLC",
"estimated_vram_bytes": 1200000000
},
{
"model_id": "Llama-3.2-3B-Instruct",
"model_lib": "llama_q4f16_0_1e226c5589ddce6135180794ca768b8f",
"model_url": "https://huggingface.co/mlc-ai/Llama-3.2-3B-Instruct-q4f16_0-MLC",
"estimated_vram_bytes": 2000000000
},
{
"model_id": "Phi-3.5-mini-instruct",
"model_lib": "phi3_q4f16_0_5bff41345ebf5c96d70378aaa01354ae",
"model_url": "https://huggingface.co/mlc-ai/Phi-3.5-mini-instruct-q4f16_0-MLC",
"estimated_vram_bytes": 2300000000
},
{
"model_id": "Qwen2-1.5B-Instruct",
"model_lib": "qwen2_q4f16_0_06d1594f7e74c6af42b2f3b27806e6a9",
"model_url": "https://huggingface.co/mlc-ai/Qwen2-1.5B-Instruct-q4f16_0-MLC",
"estimated_vram_bytes": 600000000
}
]
}