llm-info
Version:
Information on LLM models, context window token limit, output token limit, pricing and more
67 lines (66 loc) • 1.62 kB
JSON
{
"name": "llm-info",
"version": "1.0.69",
"description": "Information on LLM models, context window token limit, output token limit, pricing and more",
"homepage": "https://eval.16x.engineer/",
"repository": {
"type": "git",
"url": "git+ssh://git@github.com/paradite/llm-info.git"
},
"main": "dist/index.js",
"type": "module",
"module": "./dist/index.js",
"types": "./dist/index.d.ts",
"exports": {
".": {
"require": "./dist/index.cjs",
"import": "./dist/index.js",
"types": "./dist/index.d.ts"
}
},
"scripts": {
"compile": "tsup src/index.ts --dts --format esm,cjs",
"prepublishOnly": "rm -rf ./dist && npm run compile",
"test": "NODE_OPTIONS=\"$NODE_OPTIONS --experimental-vm-modules\" jest",
"test:openai": "jest test/openai.test.ts --runInBand",
"test:anthropic": "jest test/anthropic.test.ts --runInBand"
},
"author": "paradite",
"license": "MIT",
"keywords": [
"llm",
"language model",
"model",
"info",
"eval",
"prompt",
"token",
"context-window",
"pricing",
"token-limit",
"provider",
"gpt",
"claude",
"gemini",
"deepseek",
"openai",
"anthropic",
"azure-openai",
"deepseek",
"openrouter",
"google",
"fireworks"
],
"devDependencies": {
"@anthropic-ai/sdk": "^0.71.2",
"@google/genai": "^0.13.0",
"@types/jest": "^29.5.12",
"@xenova/transformers": "^2.17.2",
"jest": "^29.7.0",
"openai": "^4.82.0",
"ts-jest": "^29.2.3",
"ts-node": "^10.9.2",
"tsup": "^8.2.0",
"typescript": "^5.5.3"
}
}