@react-native-ai/mlc
Version:
MLC LLM provider for Vercel AI SDK
29 lines (28 loc) • 1.03 kB
JSON
{
"model_list": [
{
"model_id": "Llama-3.2-1B-Instruct",
"model_lib": "llama_q4f16_1_422de0f41b1e664268ece4b3d06d0d1e",
"model_url": "https://huggingface.co/mlc-ai/Llama-3.2-1B-Instruct-q4f16_1-MLC",
"estimated_vram_bytes": 1200000000
},
{
"model_id": "Llama-3.2-3B-Instruct",
"model_lib": "llama_q4f16_1_0f09c7bb461e9d68d8bb190e77c978a9",
"model_url": "https://huggingface.co/mlc-ai/Llama-3.2-3B-Instruct-q4f16_1-MLC",
"estimated_vram_bytes": 2000000000
},
{
"model_id": "Phi-3.5-mini-instruct",
"model_lib": "phi3_q4f16_1_22ce7b9d58d896caf0838eb723dc2ce6",
"model_url": "https://huggingface.co/mlc-ai/Phi-3.5-mini-instruct-q4f16_1-MLC",
"estimated_vram_bytes": 2300000000
},
{
"model_id": "Qwen2.5-0.5B-Instruct",
"model_lib": "qwen2_q4f16_1_835c3ba663789f49d3cd9435ae8d4eb5",
"model_url": "https://huggingface.co/mlc-ai/Qwen2.5-0.5B-Instruct-q4f16_1-MLC",
"estimated_vram_bytes": 600000000
}
]
}