@huggingface/inference
Version:
Typescript client for the Hugging Face Inference Providers and Inference Endpoints
21 lines (20 loc) • 394 B
JavaScript
export const INFERENCE_PROVIDERS = [
"black-forest-labs",
"cerebras",
"cohere",
"fal-ai",
"featherless-ai",
"fireworks-ai",
"groq",
"hf-inference",
"hyperbolic",
"nebius",
"novita",
"nscale",
"openai",
"ovhcloud",
"replicate",
"sambanova",
"together",
];
export const PROVIDERS_OR_POLICIES = [...INFERENCE_PROVIDERS, "auto"];