UNPKG

inference-server

Version:

Libraries and server to build AI applications. Adapters to various native bindings allowing local inference. Integrate it with your application, or use as a microservice.

120 lines (119 loc) 3.54 kB
{ "name": "inference-server", "version": "1.0.0-beta.31", "description": "Libraries and server to build AI applications. Adapters to various native bindings allowing local inference. Integrate it with your application, or use as a microservice.", "main": "dist/index.js", "source": "src/index.ts", "types": "dist/index.d.ts", "type": "module", "license": "MIT", "bin": { "infs": "./dist/cli/index.js", "inference-server": "./dist/cli/index.js" }, "repository": "github:iimez/inference-server", "bugs": { "url": "https://github.com/iimez/inference-server/issues" }, "scripts": { "upgrade": "npx npm-check-updates -i", "reinstall": "rimraf node_modules && npm install", "clean": "rimraf dist", "download-testmodels": "node ./dist/cli/index.js prepare ./tests/testmodels.config.js", "prebuild": "npm run clean", "build": "tsc -p tsconfig.release.json && tsc-alias -p tsconfig.release.json", "test": "vitest --run", "test:pool": "vitest tests/pool.test.ts", "test:openai": "vitest tests/openai.test.ts", "test:gpt4all": "vitest tests/engines/gpt4all.test.ts", "test:llama": "vitest tests/engines/node-llama-cpp.test.ts", "test:transformers": "vitest tests/engines/transformers.test.ts", "test:sd": "vitest tests/engines/stable-diffusion.test.ts", "test:experiments": "vitest tests/engines/experiments.test.ts", "test:server": "vitest tests/server.test.ts", "dev": "tsc -p tsconfig.release.json && node ./dist/standalone.js --enable-source-maps", "watch": "concurrently -k -n \"tsc,srv\" \"tsc -p tsconfig.release.json --watch --preserveWatchOutput --assumeChangesOnlyAffectDirectDependencies\" \"node --watch ./dist/standalone.js --enable-source-maps\"", "start": "cross-env NODE_ENV=production node dist/standalone.js" }, "keywords": [ "local ai", "inference server", "model pool", "gpt4all", "node-llama-cpp", "transformers.js", "llama.cpp", "chatbot", "bot", "llm", "ai", "nlp", "openai api" ], "engines": { "node": ">=18.16.0" }, "imports": { "#package/*": "./dist/*" }, "peerDependencies": { "@huggingface/transformers": ">=3.0.0", "@lmagder/node-stable-diffusion-cpp": ">=0.1.6", "gpt4all": ">=4.0.0", "node-llama-cpp": ">=3.0.0" }, "peerDependenciesMeta": { "node-llama-cpp": { "optional": true }, "gpt4all": { "optional": true }, "@huggingface/transformers": { "optional": true }, "@lmagder/node-stable-diffusion-cpp": { "optional": true } }, "dependencies": { "@alexanderolsen/libsamplerate-js": "^2.1.2", "@huggingface/gguf": "^0.1.12", "ajv": "^8.17.1", "audio-decode": "^2.2.2", "chalk": "^5.4.1", "cors": "^2.8.5", "express": "^4.21.2", "ipull": "^3.9.2", "micromatch": "^4.0.8", "nanoid": "^5.1.2", "p-queue": "^8.1.0", "pretty-bytes": "^6.1.1", "pretty-ms": "^9.2.0", "proper-lockfile": "^4.1.2", "sharp": "^0.33.5", "wavefile": "^11.0.0", "yargs": "^17.7.2" }, "devDependencies": { "@huggingface/transformers": "^3.3.3", "@lmagder/node-stable-diffusion-cpp": "^0.1.7", "@types/cors": "^2.8.17", "@types/express": "^5.0.0", "@types/micromatch": "^4.0.9", "@types/node": "^22.13.5", "@types/proper-lockfile": "^4.1.4", "@types/supertest": "^6.0.2", "@types/yargs": "^17.0.33", "concurrently": "^9.1.2", "cross-env": "^7.0.3", "gpt4all": "^4.0.0", "node-llama-cpp": "^3.6.0", "openai": "^4.85.4", "supertest": "^7.0.0", "tsc-alias": "^1.8.10", "typescript": "^5.7.3", "vite-tsconfig-paths": "^5.1.4", "vitest": "^3.0.6" } }