llm-info
Version:
Information on LLM models, context window token limit, output token limit, pricing and more
229 lines (222 loc) • 8.36 kB
TypeScript
declare enum ModelEnum {
'gpt-4' = "gpt-4",
'gpt-4-turbo' = "gpt-4-turbo",
'gpt-4o' = "gpt-4o",
'gpt-4o-64k-output-alpha' = "gpt-4o-64k-output-alpha",
'gpt-4o-mini' = "gpt-4o-mini",
'gpt-4o-2024-08-06' = "gpt-4o-2024-08-06",
'gpt-4.1' = "gpt-4.1",
'gpt-4.1-mini' = "gpt-4.1-mini",
'gpt-4.1-nano' = "gpt-4.1-nano",
'gpt-5' = "gpt-5",
'gpt-5-mini' = "gpt-5-mini",
'gpt-5-nano' = "gpt-5-nano",
'o1-preview' = "o1-preview",
'o1-mini' = "o1-mini",
'o1' = "o1",
'o3' = "o3",
'o3-mini' = "o3-mini",
'o4-mini' = "o4-mini",
'claude-3-5-sonnet-20240620' = "claude-3-5-sonnet-20240620",
'claude-3-5-sonnet-20241022' = "claude-3-5-sonnet-20241022",
'claude-3-5-haiku-20241022' = "claude-3-5-haiku-20241022",
'claude-3-7-sonnet-20250219' = "claude-3-7-sonnet-20250219",
'claude-opus-4-20250514' = "claude-opus-4-20250514",
'claude-opus-4-1-20250805' = "claude-opus-4-1-20250805",
'claude-sonnet-4-20250514' = "claude-sonnet-4-20250514",
'deepseek-chat' = "deepseek-chat",
'deepseek-reasoner' = "deepseek-reasoner",
'gemini-2.5-pro-exp-03-25' = "gemini-2.5-pro-exp-03-25",
'gemini-2.5-pro-preview-03-25' = "gemini-2.5-pro-preview-03-25",
'gemini-2.5-pro-preview-05-06' = "gemini-2.5-pro-preview-05-06",
'gemini-2.5-pro-preview-06-05' = "gemini-2.5-pro-preview-06-05",
'gemini-2.5-pro' = "gemini-2.5-pro",
'gemini-2.5-flash-preview-04-17' = "gemini-2.5-flash-preview-04-17",
'gemini-2.5-flash-preview-05-20' = "gemini-2.5-flash-preview-05-20",
'gemini-2.5-flash' = "gemini-2.5-flash",
'grok-4' = "grok-4",
'grok-code-fast-1' = "grok-code-fast-1"
}
declare enum NonModelEnum {
'chatgpt' = "chatgpt"
}
declare const AllModels: ModelEnum[];
declare const AllModelLikes: (ModelEnum | NonModelEnum.chatgpt)[];
type ModelLike = ModelEnum | NonModelEnum;
declare const AI_PROVIDERS: {
readonly OPENAI: "openai";
readonly ANTHROPIC: "anthropic";
readonly AZURE_OPENAI: "azure-openai";
readonly DEEPSEEK: "deepseek";
readonly OPENROUTER: "openrouter";
readonly GOOGLE: "google";
readonly GOOGLE_VERTEX_AI: "google-vertex-ai";
readonly FIREWORKS: "fireworks";
readonly XAI: "xai";
};
type AI_PROVIDER_TYPE = (typeof AI_PROVIDERS)[keyof typeof AI_PROVIDERS];
type FIRST_PARTY_SDK_AI_PROVIDER_TYPE = {
name: string;
firstParty: true;
thirdParty: false;
nativeSDK: true;
baseURL?: string;
website?: string;
apiKeysPage?: string;
};
type FIRST_PARTY_BASE_URL_AI_PROVIDER_TYPE = {
name: string;
firstParty: true;
thirdParty: false;
baseURL: string;
nativeSDK: false;
website?: string;
apiKeysPage?: string;
};
type THIRD_PARTY_AI_PROVIDER_TYPE = {
name: string;
firstParty: false;
thirdParty: true;
nativeSDK: false;
baseURL: string;
website: string;
apiKeysPage?: string;
};
type AI_PROVIDER_CONFIG_TYPE = FIRST_PARTY_SDK_AI_PROVIDER_TYPE | FIRST_PARTY_BASE_URL_AI_PROVIDER_TYPE | THIRD_PARTY_AI_PROVIDER_TYPE;
declare const AI_PROVIDER_CONFIG: Record<AI_PROVIDER_TYPE, AI_PROVIDER_CONFIG_TYPE>;
declare const AI_PROVIDER_NAME_MAP: Record<AI_PROVIDER_TYPE, string>;
type ModelInfoCurrent = {
name: string;
provider: AI_PROVIDER_TYPE;
id: string;
contextWindowTokenLimit: number;
outputTokenLimit: number | null;
pricePerMillionInputTokens: number | null;
pricePerMillionOutputTokens: number | null;
tokenizerId: string | null;
notes?: string;
notesUrl?: string;
legacy: false;
alpha?: boolean;
small?: boolean;
reasoning?: boolean;
recommendedForCoding?: boolean;
recommendedForWriting?: boolean;
supportsImageInput?: boolean;
openRouterModelId?: string;
releaseDate?: string;
};
type ModelInfoLegacy = {
name: string;
provider: AI_PROVIDER_TYPE;
id: string;
contextWindowTokenLimit: number;
outputTokenLimit: number | null;
pricePerMillionInputTokens: number | null;
pricePerMillionOutputTokens: number | null;
tokenizerId: string | null;
notes?: string;
notesUrl?: string;
legacy: true;
legacyReason: string;
alpha?: boolean;
small?: boolean;
reasoning?: boolean;
recommendedForCoding?: boolean;
recommendedForWriting?: boolean;
supportsImageInput?: boolean;
openRouterModelId?: string;
releaseDate?: string;
};
type ModelInfoDeprecated = {
name: string;
provider: AI_PROVIDER_TYPE;
id: ModelEnum;
contextWindowTokenLimit: number;
outputTokenLimit: number | null;
pricePerMillionInputTokens: number | null;
pricePerMillionOutputTokens: number | null;
tokenizerId: string | null;
notes?: string;
notesUrl?: string;
legacy: true;
legacyReason: string;
deprecated: true;
alpha?: boolean;
small?: boolean;
reasoning?: boolean;
recommendedForCoding?: boolean;
recommendedForWriting?: boolean;
supportsImageInput?: boolean;
openRouterModelId?: string;
releaseDate: string;
};
type ModelInfo = ModelInfoCurrent | ModelInfoLegacy | ModelInfoDeprecated;
type OpenRouterModelResponse = {
data: Array<{
id: string;
name: string;
created: number;
description: string;
architecture: {
input_modalities: string[];
output_modalities: string[];
tokenizer: string;
};
top_provider: {
is_moderated: boolean;
};
pricing: {
prompt: string;
completion: string;
image: string;
request: string;
input_cache_read: string;
input_cache_write: string;
web_search: string;
internal_reasoning: string;
};
context_length: number;
per_request_limits: Record<string, string>;
}>;
};
/**
* Convert OpenRouter model to ModelInfo
* @param model OpenRouter model
* @returns ModelInfo object
*/
declare function convertOpenRouterModelToModelInfo(model: OpenRouterModelResponse['data'][0]): ModelInfo;
/**
* Get all available models from OpenRouter API
* @returns Promise with the OpenRouter API response
*/
declare function getOpenRouterModels(): Promise<OpenRouterModelResponse>;
/**
* Get all models from a specific provider
* @param provider The AI provider to get models from
* @returns Promise with an array of ModelInfo objects for the specified provider
*/
declare function getModelsByProvider(provider: AI_PROVIDER_TYPE): Promise<ModelInfo[]>;
/**
* Get all models that share the same API model ID
* This is useful for models like DeepSeek V3 and V3.1 that use the same API endpoint
* @param apiModelId The API model ID to search for
* @returns Array of ModelInfo objects that share the same API model ID
*/
declare function getModelsByApiId(apiModelId: string): ModelInfo[];
declare const ModelInfoMap: Record<ModelLike, Omit<ModelInfoCurrent, 'id'> | Omit<ModelInfoLegacy, 'id'>>;
declare function getModelInfoWithId(id: ModelLike): ModelInfo;
declare function getAllModelsWithIds(): ModelInfo[];
interface DeprecatedModelMapping {
modelId: ModelEnum;
deprecatedName: string;
deprecatedReleaseDate?: string;
transitionDate?: string;
notes?: string;
deprecatedModelInfo: ModelInfoDeprecated;
}
declare const DEPRECATED_MODEL_MAPPINGS: DeprecatedModelMapping[];
declare function getDeprecatedMappingByModelId(modelId: string): DeprecatedModelMapping[];
declare function getAllDeprecatedMappings(): DeprecatedModelMapping[];
declare function getAllDeprecatedModelsInfo(): ModelInfoDeprecated[];
export { AI_PROVIDERS, AI_PROVIDER_CONFIG, type AI_PROVIDER_CONFIG_TYPE, AI_PROVIDER_NAME_MAP, type AI_PROVIDER_TYPE, AllModelLikes, AllModels, DEPRECATED_MODEL_MAPPINGS, type DeprecatedModelMapping, type FIRST_PARTY_BASE_URL_AI_PROVIDER_TYPE, type FIRST_PARTY_SDK_AI_PROVIDER_TYPE, ModelEnum, type ModelInfo, type ModelInfoCurrent, type ModelInfoDeprecated, type ModelInfoLegacy, ModelInfoMap, type ModelLike, NonModelEnum, type OpenRouterModelResponse, type THIRD_PARTY_AI_PROVIDER_TYPE, convertOpenRouterModelToModelInfo, getAllDeprecatedMappings, getAllDeprecatedModelsInfo, getAllModelsWithIds, getDeprecatedMappingByModelId, getModelInfoWithId, getModelsByApiId, getModelsByProvider, getOpenRouterModels };