UNPKG

@knn_labs/conduit-core-client

Version:

Official Node.js client library for Conduit Core API - OpenAI-compatible multi-provider LLM gateway

779 lines (764 loc) 25.5 kB
'use strict'; var axios = require('axios'); function _interopDefault (e) { return e && e.__esModule ? e : { default: e }; } var axios__default = /*#__PURE__*/_interopDefault(axios); // src/client/BaseClient.ts // src/utils/errors.ts var ConduitError = class _ConduitError extends Error { constructor(message, statusCode, code, type, param) { super(message); this.name = "ConduitError"; this.statusCode = statusCode; this.code = code; this.type = type; this.param = param; Object.setPrototypeOf(this, _ConduitError.prototype); } static fromErrorResponse(response, statusCode) { return new _ConduitError( response.error.message, statusCode, response.error.code || void 0, response.error.type, response.error.param || void 0 ); } }; var AuthenticationError = class _AuthenticationError extends ConduitError { constructor(message = "Authentication failed") { super(message, 401, "authentication_error", "invalid_request_error"); this.name = "AuthenticationError"; Object.setPrototypeOf(this, _AuthenticationError.prototype); } }; var RateLimitError = class _RateLimitError extends ConduitError { constructor(message = "Rate limit exceeded", retryAfter) { super(message, 429, "rate_limit_error", "rate_limit_error"); this.name = "RateLimitError"; this.retryAfter = retryAfter; Object.setPrototypeOf(this, _RateLimitError.prototype); } }; var ValidationError = class _ValidationError extends ConduitError { constructor(message, param) { super(message, 400, "validation_error", "invalid_request_error", param); this.name = "ValidationError"; Object.setPrototypeOf(this, _ValidationError.prototype); } }; var NetworkError = class _NetworkError extends ConduitError { constructor(message = "Network request failed") { super(message); this.name = "NetworkError"; Object.setPrototypeOf(this, _NetworkError.prototype); } }; var StreamError = class _StreamError extends ConduitError { constructor(message = "Stream processing failed") { super(message); this.name = "StreamError"; Object.setPrototypeOf(this, _StreamError.prototype); } }; // src/client/BaseClient.ts var BaseClient = class { constructor(config) { this.config = { apiKey: config.apiKey, baseURL: config.baseURL || "https://api.conduit.ai", timeout: config.timeout || 6e4, maxRetries: config.maxRetries || 3, headers: config.headers || {}, debug: config.debug || false }; this.retryConfig = { maxRetries: this.config.maxRetries, initialDelay: 1e3, maxDelay: 3e4, factor: 2 }; this.client = axios__default.default.create({ baseURL: this.config.baseURL, timeout: this.config.timeout, headers: { "Authorization": `Bearer ${this.config.apiKey}`, "Content-Type": "application/json", "User-Agent": "@conduit/core/0.1.0", ...this.config.headers } }); this.setupInterceptors(); } setupInterceptors() { this.client.interceptors.request.use( (config) => { if (this.config.debug) { console.debug(`[Conduit] ${config.method?.toUpperCase()} ${config.url}`); } return config; }, (error) => { if (this.config.debug) { console.error("[Conduit] Request error:", error); } return Promise.reject(error); } ); this.client.interceptors.response.use( (response) => { if (this.config.debug) { console.debug(`[Conduit] Response ${response.status} from ${response.config.url}`); } return response; }, (error) => { if (this.config.debug) { console.error("[Conduit] Response error:", error); } return Promise.reject(error); } ); } async request(config, options) { const requestConfig = { ...config, signal: options?.signal, timeout: options?.timeout || this.config.timeout, headers: { ...config.headers, ...options?.headers, ...options?.correlationId && { "X-Correlation-Id": options.correlationId } } }; return this.executeWithRetry(requestConfig); } async executeWithRetry(config, attempt = 1) { try { const response = await this.client.request(config); return response.data; } catch (error) { if (attempt >= this.retryConfig.maxRetries) { throw this.handleError(error); } if (this.shouldRetry(error)) { const delay = this.calculateDelay(attempt); if (this.config.debug) { console.debug(`[Conduit] Retrying request (attempt ${attempt + 1}) after ${delay}ms`); } await this.sleep(delay); return this.executeWithRetry(config, attempt + 1); } throw this.handleError(error); } } shouldRetry(error) { if (error instanceof axios.AxiosError) { const status = error.response?.status; if (status === 429 || status === 503 || status === 504) { return true; } if (!error.response && error.code === "ECONNABORTED") { return true; } } return false; } calculateDelay(attempt) { const delay = Math.min( this.retryConfig.initialDelay * Math.pow(this.retryConfig.factor, attempt - 1), this.retryConfig.maxDelay ); return delay + Math.random() * 1e3; } sleep(ms) { return new Promise((resolve) => setTimeout(resolve, ms)); } handleError(error) { if (error instanceof axios.AxiosError) { const status = error.response?.status; const data = error.response?.data; if (data && this.isErrorResponse(data)) { const errorData = data; if (status === 401) { return new AuthenticationError(errorData.error.message); } if (status === 429) { const retryAfter = error.response?.headers["retry-after"]; return new RateLimitError( errorData.error.message, retryAfter ? parseInt(retryAfter, 10) : void 0 ); } return ConduitError.fromErrorResponse(errorData, status); } if (!error.response) { return new NetworkError(error.message || "Network request failed"); } return new ConduitError( error.message || "Request failed", status, error.code ); } if (error instanceof Error) { return error; } return new ConduitError("An unknown error occurred"); } isErrorResponse(data) { return !!data && typeof data === "object" && "error" in data; } }; // src/models/images.ts var IMAGE_MODELS = { DALL_E_2: "dall-e-2", DALL_E_3: "dall-e-3", MINIMAX_IMAGE: "minimax-image" }; var IMAGE_MODEL_CAPABILITIES = { [IMAGE_MODELS.DALL_E_2]: { maxPromptLength: 1e3, supportedSizes: ["256x256", "512x512", "1024x1024"], supportedQualities: ["standard"], supportedStyles: [], maxImages: 10, supportsEdit: true, supportsVariation: true }, [IMAGE_MODELS.DALL_E_3]: { maxPromptLength: 4e3, supportedSizes: ["1024x1024", "1792x1024", "1024x1792"], supportedQualities: ["standard", "hd"], supportedStyles: ["vivid", "natural"], maxImages: 1, supportsEdit: false, supportsVariation: false }, [IMAGE_MODELS.MINIMAX_IMAGE]: { maxPromptLength: 2e3, supportedSizes: ["1024x1024", "1792x1024", "1024x1792"], supportedQualities: ["standard", "hd"], supportedStyles: ["vivid", "natural"], maxImages: 4, supportsEdit: false, supportsVariation: false } }; var IMAGE_DEFAULTS = { model: IMAGE_MODELS.DALL_E_3, n: 1, quality: "standard", response_format: "url", size: "1024x1024", style: "vivid" }; // src/utils/validation.ts function validateChatCompletionRequest(request) { if (!request.model) { throw new ValidationError("Model is required", "model"); } if (!request.messages || !Array.isArray(request.messages)) { throw new ValidationError("Messages must be an array", "messages"); } if (request.messages.length === 0) { throw new ValidationError("Messages array cannot be empty", "messages"); } for (let i = 0; i < request.messages.length; i++) { const message = request.messages[i]; if (!message.role) { throw new ValidationError(`Message at index ${i} must have a role`, "messages"); } const validRoles = ["system", "user", "assistant", "tool"]; if (!validRoles.includes(message.role)) { throw new ValidationError( `Invalid role '${message.role}' at index ${i}. Must be one of: ${validRoles.join(", ")}`, "messages" ); } if (message.content === null && !message.tool_calls) { throw new ValidationError( `Message at index ${i} must have content or tool_calls`, "messages" ); } if (message.role === "tool" && !message.tool_call_id) { throw new ValidationError( `Tool message at index ${i} must have tool_call_id`, "messages" ); } } if (request.temperature !== void 0) { if (request.temperature < 0 || request.temperature > 2) { throw new ValidationError("Temperature must be between 0 and 2", "temperature"); } } if (request.top_p !== void 0) { if (request.top_p < 0 || request.top_p > 1) { throw new ValidationError("top_p must be between 0 and 1", "top_p"); } } if (request.frequency_penalty !== void 0) { if (request.frequency_penalty < -2 || request.frequency_penalty > 2) { throw new ValidationError("frequency_penalty must be between -2 and 2", "frequency_penalty"); } } if (request.presence_penalty !== void 0) { if (request.presence_penalty < -2 || request.presence_penalty > 2) { throw new ValidationError("presence_penalty must be between -2 and 2", "presence_penalty"); } } if (request.n !== void 0 && request.n < 1) { throw new ValidationError("n must be at least 1", "n"); } if (request.max_tokens !== void 0 && request.max_tokens < 1) { throw new ValidationError("max_tokens must be at least 1", "max_tokens"); } } function validateImageGenerationRequest(request) { if (!request.prompt) { throw new ValidationError("Prompt is required", "prompt"); } if (request.prompt.trim().length === 0) { throw new ValidationError("Prompt cannot be empty", "prompt"); } if (request.model && IMAGE_MODEL_CAPABILITIES[request.model]) { const capabilities = IMAGE_MODEL_CAPABILITIES[request.model]; if (request.prompt.length > capabilities.maxPromptLength) { throw new ValidationError( `Prompt exceeds maximum length of ${capabilities.maxPromptLength} characters for model ${request.model}`, "prompt" ); } if (request.n !== void 0 && request.n > capabilities.maxImages) { throw new ValidationError( `Number of images (${request.n}) exceeds maximum of ${capabilities.maxImages} for model ${request.model}`, "n" ); } if (request.size && !capabilities.supportedSizes.includes(request.size)) { throw new ValidationError( `Size '${request.size}' is not supported for model ${request.model}. Supported sizes: ${capabilities.supportedSizes.join(", ")}`, "size" ); } if (request.quality && !capabilities.supportedQualities.includes(request.quality)) { throw new ValidationError( `Quality '${request.quality}' is not supported for model ${request.model}. Supported qualities: ${capabilities.supportedQualities.join(", ")}`, "quality" ); } if (request.style && capabilities.supportedStyles.length > 0 && !capabilities.supportedStyles.includes(request.style)) { throw new ValidationError( `Style '${request.style}' is not supported for model ${request.model}. Supported styles: ${capabilities.supportedStyles.join(", ")}`, "style" ); } } if (request.n !== void 0 && (request.n < 1 || request.n > 10)) { throw new ValidationError("Number of images must be between 1 and 10", "n"); } if (request.response_format && !["url", "b64_json"].includes(request.response_format)) { throw new ValidationError('response_format must be either "url" or "b64_json"', "response_format"); } if (request.quality && !["standard", "hd"].includes(request.quality)) { throw new ValidationError('quality must be either "standard" or "hd"', "quality"); } if (request.style && !["vivid", "natural"].includes(request.style)) { throw new ValidationError('style must be either "vivid" or "natural"', "style"); } const validSizes = ["256x256", "512x512", "1024x1024", "1792x1024", "1024x1792"]; if (request.size && !validSizes.includes(request.size)) { throw new ValidationError( `size must be one of: ${validSizes.join(", ")}`, "size" ); } } // src/utils/streaming.ts async function* streamAsyncIterator(stream) { let buffer = ""; for await (const chunk of stream) { buffer += chunk.toString(); const lines = buffer.split("\n"); buffer = lines.pop() || ""; for (const line of lines) { const trimmedLine = line.trim(); if (trimmedLine === "" || trimmedLine.startsWith(":")) { continue; } if (trimmedLine.startsWith("data: ")) { const data = trimmedLine.slice(6); if (data === "[DONE]") { return; } try { const event = JSON.parse(data); yield event; } catch (error) { console.error("Failed to parse SSE data:", data); throw new StreamError(`Failed to parse stream event: ${data}`); } } } } if (buffer.trim()) { console.warn("Unprocessed data in buffer:", buffer); } } // src/services/ChatService.ts var ChatService = class { constructor(client) { this.client = client; } async create(request, options) { validateChatCompletionRequest(request); if (request.stream === true) { return this.createStream(request, options); } return this.createCompletion(request, options); } async createCompletion(request, options) { return this.client["request"]( { method: "POST", url: "/v1/chat/completions", data: request }, options ); } async createStream(request, options) { const response = await this.client["client"].post("/v1/chat/completions", request, { responseType: "stream", signal: options?.signal, timeout: 0, headers: { ...options?.headers, ...options?.correlationId && { "X-Correlation-Id": options.correlationId } } }); return streamAsyncIterator(response.data); } }; // src/services/ModelsService.ts var ModelsService = class { // 5 minutes constructor(client) { this.client = client; this.cacheTTL = 5 * 60 * 1e3; } async list(options) { if (options?.useCache !== false && this.isCacheValid()) { return this.cachedModels; } const response = await this.client["request"]( { method: "GET", url: "/v1/models" }, options ); this.cachedModels = response.data; this.cacheExpiry = Date.now() + this.cacheTTL; return response.data; } async get(modelId, options) { const models = await this.list(options); return models.find((model) => model.id === modelId) || null; } async exists(modelId, options) { const model = await this.get(modelId, options); return model !== null; } clearCache() { this.cachedModels = void 0; this.cacheExpiry = void 0; } isCacheValid() { return !!(this.cachedModels && this.cacheExpiry && Date.now() < this.cacheExpiry); } }; // src/services/ImagesService.ts var ImagesService = class { constructor(client) { this.client = client; } /** * Creates an image given a text prompt. * @param request The image generation request * @param options Optional request options * @returns Promise resolving to image generation response */ async generate(request, options) { validateImageGenerationRequest(request); return this.client["request"]( { method: "POST", url: "/v1/images/generations", data: request }, options ); } /** * Creates an edited or extended image given an original image and a prompt. * @param request The image edit request * @param options Optional request options * @returns Promise resolving to image edit response */ async edit(request, options) { const formData = new FormData(); formData.append("image", request.image); formData.append("prompt", request.prompt); if (request.mask) { formData.append("mask", request.mask); } if (request.model) { formData.append("model", request.model); } if (request.n !== void 0) { formData.append("n", request.n.toString()); } if (request.response_format) { formData.append("response_format", request.response_format); } if (request.size) { formData.append("size", request.size); } if (request.user) { formData.append("user", request.user); } return this.client["request"]( { method: "POST", url: "/v1/images/edits", data: formData, headers: { "Content-Type": "multipart/form-data" } }, options ); } /** * Creates a variation of a given image. * @param request The image variation request * @param options Optional request options * @returns Promise resolving to image variation response */ async createVariation(request, options) { const formData = new FormData(); formData.append("image", request.image); if (request.model) { formData.append("model", request.model); } if (request.n !== void 0) { formData.append("n", request.n.toString()); } if (request.response_format) { formData.append("response_format", request.response_format); } if (request.size) { formData.append("size", request.size); } if (request.user) { formData.append("user", request.user); } return this.client["request"]( { method: "POST", url: "/v1/images/variations", data: formData, headers: { "Content-Type": "multipart/form-data" } }, options ); } }; // src/client/ConduitCoreClient.ts var ConduitCoreClient = class _ConduitCoreClient extends BaseClient { constructor(config) { super(config); this.chat = { completions: new ChatService(this) }; this.images = new ImagesService(this); this.models = new ModelsService(this); } static fromApiKey(apiKey, baseURL) { return new _ConduitCoreClient({ apiKey, baseURL }); } }; // src/utils/capabilities.ts var CoreModelCapability = /* @__PURE__ */ ((CoreModelCapability2) => { CoreModelCapability2["CHAT"] = "chat"; CoreModelCapability2["VISION"] = "vision"; CoreModelCapability2["IMAGE_GENERATION"] = "image-generation"; CoreModelCapability2["IMAGE_EDIT"] = "image-edit"; CoreModelCapability2["IMAGE_VARIATION"] = "image-variation"; return CoreModelCapability2; })(CoreModelCapability || {}); function modelSupportsCapability(modelId, capability) { if (modelId in IMAGE_MODEL_CAPABILITIES) { const imageCapabilities = IMAGE_MODEL_CAPABILITIES[modelId]; switch (capability) { case "image-generation" /* IMAGE_GENERATION */: return true; // All models in IMAGE_MODEL_CAPABILITIES support generation case "image-edit" /* IMAGE_EDIT */: return imageCapabilities.supportsEdit; case "image-variation" /* IMAGE_VARIATION */: return imageCapabilities.supportsVariation; case "vision" /* VISION */: case "chat" /* CHAT */: return false; // Image generation models don't support chat/vision default: return false; } } const lowerModelId = modelId.toLowerCase(); switch (capability) { case "chat" /* CHAT */: return !lowerModelId.includes("dall-e") && !lowerModelId.includes("image") && !lowerModelId.includes("stable-diffusion"); case "vision" /* VISION */: return lowerModelId.includes("vision") || lowerModelId.includes("gpt-4") || lowerModelId.includes("claude-3"); case "image-generation" /* IMAGE_GENERATION */: return lowerModelId.includes("dall-e") || lowerModelId.includes("image") || lowerModelId.includes("stable-diffusion") || lowerModelId.includes("minimax-image"); default: return false; } } function getModelCapabilities(modelId) { const capabilities = []; Object.values(CoreModelCapability).forEach((capability) => { if (modelSupportsCapability(modelId, capability)) { capabilities.push(capability); } }); return capabilities; } function validateModelCompatibility(modelId, requestType) { const errors = []; const suggestions = []; const capabilityMap = { "chat": "chat" /* CHAT */, "image-generation": "image-generation" /* IMAGE_GENERATION */, "image-edit": "image-edit" /* IMAGE_EDIT */, "image-variation": "image-variation" /* IMAGE_VARIATION */ }; const requiredCapability = capabilityMap[requestType]; if (!modelSupportsCapability(modelId, requiredCapability)) { errors.push(`Model '${modelId}' does not support ${requestType}`); switch (requestType) { case "image-generation": suggestions.push("Try using models like: dall-e-3, dall-e-2, or minimax-image"); break; case "image-edit": case "image-variation": suggestions.push("Try using dall-e-2 for image editing and variations"); break; case "chat": suggestions.push("Try using models like: gpt-4, gpt-3.5-turbo, or claude-3"); break; } } return { isValid: errors.length === 0, errors, suggestions: suggestions.length > 0 ? suggestions : void 0 }; } function getRecommendedModels(capability, preferences) { const { prioritizeQuality, prioritizeSpeed, prioritizeCost } = preferences || {}; switch (capability) { case "chat" /* CHAT */: if (prioritizeQuality) { return ["gpt-4", "claude-3-sonnet", "gpt-3.5-turbo"]; } if (prioritizeSpeed) { return ["gpt-3.5-turbo", "gpt-4", "claude-3-haiku"]; } if (prioritizeCost) { return ["gpt-3.5-turbo", "claude-3-haiku", "gpt-4"]; } return ["gpt-4", "gpt-3.5-turbo", "claude-3-sonnet"]; case "vision" /* VISION */: if (prioritizeQuality) { return ["gpt-4-vision-preview", "claude-3-sonnet", "gpt-4"]; } return ["gpt-4-vision-preview", "claude-3-sonnet"]; case "image-generation" /* IMAGE_GENERATION */: if (prioritizeQuality) { return ["dall-e-3", "minimax-image", "dall-e-2"]; } if (prioritizeSpeed) { return ["dall-e-2", "minimax-image", "dall-e-3"]; } if (prioritizeCost) { return ["dall-e-2", "minimax-image", "dall-e-3"]; } return ["dall-e-3", "dall-e-2", "minimax-image"]; case "image-edit" /* IMAGE_EDIT */: case "image-variation" /* IMAGE_VARIATION */: return ["dall-e-2"]; // Currently only DALL-E 2 supports these default: return []; } } function getCapabilityDisplayName(capability) { switch (capability) { case "chat" /* CHAT */: return "Chat Completion"; case "vision" /* VISION */: return "Vision (Image Understanding)"; case "image-generation" /* IMAGE_GENERATION */: return "Image Generation"; case "image-edit" /* IMAGE_EDIT */: return "Image Editing"; case "image-variation" /* IMAGE_VARIATION */: return "Image Variation"; default: return capability; } } function areModelsEquivalent(modelA, modelB, capability) { if (!modelSupportsCapability(modelA, capability) || !modelSupportsCapability(modelB, capability)) { return false; } if (capability === "image-generation" /* IMAGE_GENERATION */) { const capabilitiesA = IMAGE_MODEL_CAPABILITIES[modelA]; const capabilitiesB = IMAGE_MODEL_CAPABILITIES[modelB]; if (capabilitiesA && capabilitiesB) { return capabilitiesA.maxImages === capabilitiesB.maxImages && JSON.stringify(capabilitiesA.supportedSizes) === JSON.stringify(capabilitiesB.supportedSizes); } } const normalizeModel = (model) => model.toLowerCase().replace(/[^a-z0-9]/g, ""); return normalizeModel(modelA) === normalizeModel(modelB); } exports.AuthenticationError = AuthenticationError; exports.ConduitCoreClient = ConduitCoreClient; exports.ConduitError = ConduitError; exports.CoreModelCapability = CoreModelCapability; exports.IMAGE_DEFAULTS = IMAGE_DEFAULTS; exports.IMAGE_MODELS = IMAGE_MODELS; exports.IMAGE_MODEL_CAPABILITIES = IMAGE_MODEL_CAPABILITIES; exports.NetworkError = NetworkError; exports.RateLimitError = RateLimitError; exports.StreamError = StreamError; exports.ValidationError = ValidationError; exports.areModelsEquivalent = areModelsEquivalent; exports.getCapabilityDisplayName = getCapabilityDisplayName; exports.getModelCapabilities = getModelCapabilities; exports.getRecommendedModels = getRecommendedModels; exports.modelSupportsCapability = modelSupportsCapability; exports.validateModelCompatibility = validateModelCompatibility; //# sourceMappingURL=index.js.map //# sourceMappingURL=index.js.map