llm-emulator
Version:
Enterprise-grade LLM mock server for local and CI: scenarios, faults, latency, contracts, VCR. Supports standalone server and Express middleware.
48 lines (47 loc) • 1.07 kB
JSON
{
"name": "llm-emulator",
"version": "0.5.1",
"description": "Enterprise-grade LLM mock server for local and CI: scenarios, faults, latency, contracts, VCR. Supports standalone server and Express middleware.",
"type": "module",
"bin": {
"llm-emulator": "./bin/llm-emulator.js"
},
"exports": {
".": {
"import": "./src/index.js"
}
},
"scripts": {
"start": "node ./bin/llm-emulator.js ./examples/config.mjs",
"dev": "node ./bin/llm-emulator.js ./examples/config.mjs --env local --scenario checkout-graph",
"test": "node --test --experimental-test-coverage"
},
"keywords": [
"llm",
"mock",
"llm-mock",
"mock-llm",
"testing",
"openai",
"anthropic",
"gemini",
"ollama",
"scenarios",
"faults",
"vcr",
"middleware",
"express"
],
"author": "mapredux@gmail.com",
"license": "MIT",
"engines": {
"node": ">=18.17"
},
"dependencies": {
"ajv": "^8.17.1",
"cors": "^2.8.5",
"express": "^4.19.2",
"minimist": "^1.2.8",
"uuid": "^9.0.1"
}
}