UNPKG

llm-info

Version:

Information on LLM models, context window token limit, output token limit, pricing and more

47 lines (43 loc) 1.2 kB
import { AI_PROVIDER_TYPE } from './provider'; export type ModelInfoCurrent = { name: string; provider: AI_PROVIDER_TYPE; id: string; contextWindowTokenLimit: number; outputTokenLimit: number | null; pricePerMillionInputTokens: number | null; pricePerMillionOutputTokens: number | null; tokenizerId: string | null; notes?: string; notesUrl?: string; legacy: false; alpha?: boolean; small?: boolean; reasoning?: boolean; recommendedForCoding?: boolean; recommendedForWriting?: boolean; supportsImageInput?: boolean; openRouterModelId?: string; }; export type ModelInfoLegacy = { name: string; provider: AI_PROVIDER_TYPE; id: string; contextWindowTokenLimit: number; outputTokenLimit: number | null; pricePerMillionInputTokens: number | null; pricePerMillionOutputTokens: number | null; tokenizerId: string | null; notes?: string; notesUrl?: string; legacy: true; legacyReason: string; alpha?: boolean; small?: boolean; reasoning?: boolean; recommendedForCoding?: boolean; recommendedForWriting?: boolean; supportsImageInput?: boolean; openRouterModelId?: string; }; export type ModelInfo = ModelInfoCurrent | ModelInfoLegacy;