UNPKG

inference-server

Version:

Libraries and server to build AI applications. Adapters to various native bindings allowing local inference. Integrate it with your application, or use as a microservice.

14 lines 724 B
import { createChatCompletionHandler } from './handlers/chat.js'; import { createCompletionHandler } from './handlers/completions.js'; import { createModelsHandler } from './handlers/models.js'; import { createEmbeddingsHandler } from './handlers/embeddings.js'; // See OpenAI API specs at https://github.com/openai/openai-openapi/blob/master/openapi.yaml export function createOpenAIRequestHandlers(inferenceServer) { return { chatCompletions: createChatCompletionHandler(inferenceServer), completions: createCompletionHandler(inferenceServer), models: createModelsHandler(inferenceServer), embeddings: createEmbeddingsHandler(inferenceServer), }; } //# sourceMappingURL=index.js.map