UNPKG

antarys

Version:

High-performance Node.js client for Antarys vector database with HTTP/2, connection pooling, and intelligent caching

415 lines (414 loc) 15.8 kB
"use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || (function () { var ownKeys = function(o) { ownKeys = Object.getOwnPropertyNames || function (o) { var ar = []; for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; return ar; }; return ownKeys(o); }; return function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); __setModuleDefault(result, mod); return result; }; })(); Object.defineProperty(exports, "__esModule", { value: true }); exports.Client = void 0; exports.createClient = createClient; const https = __importStar(require("https")); const http = __importStar(require("http")); const url_1 = require("url"); const util_1 = require("util"); const zlib_1 = require("zlib"); const perf_hooks_1 = require("perf_hooks"); const events_1 = require("events"); const os_1 = require("os"); const vector_ops_1 = require("./vector_ops"); const caching_1 = require("./caching"); const gzipAsync = (0, util_1.promisify)(zlib_1.gzip); const gunzipAsync = (0, util_1.promisify)(zlib_1.gunzip); const brotliCompressAsync = (0, util_1.promisify)(zlib_1.brotliCompress); const brotliDecompressAsync = (0, util_1.promisify)(zlib_1.brotliDecompress); class ModuleLogger { constructor(enabled = false) { this.enabled = enabled; } debug(message, ...args) { if (this.enabled) console.debug(`[DEBUG] ${message}`, ...args); } info(message, ...args) { if (this.enabled) console.info(`[INFO] ${message}`, ...args); } warn(message, ...args) { if (this.enabled) console.warn(`[WARN] ${message}`, ...args); } error(message, ...args) { if (this.enabled) console.error(`[ERROR] ${message}`, ...args); } } const globalClients = new Set(); let globalSigintHandlerInstalled = false; function installGlobalSigintHandler() { if (globalSigintHandlerInstalled) return; globalSigintHandlerInstalled = true; const cleanup = async () => { const cleanupPromises = Array.from(globalClients).map(async (client) => { try { await client.close(); } catch (error) { } }); try { await Promise.race([ Promise.all(cleanupPromises), new Promise(resolve => setTimeout(resolve, 2000)) ]); } catch (error) { } process.exit(0); }; process.on('SIGINT', cleanup); process.on('SIGTERM', cleanup); } class Client extends events_1.EventEmitter { constructor(host, config = {}) { super(); this.collectionCache = new Map(); this.requestCount = 0; this.requestTimes = []; this.startTime = Date.now(); this.isDestroyed = false; this.pendingRequests = new Set(); this.host = host.replace(/\/$/, ''); const cpuCount = (0, os_1.cpus)().length; this.config = { timeout: config.timeout ?? 120, connectionPoolSize: config.connectionPoolSize ?? Math.max(cpuCount * 5, 20), retryAttempts: config.retryAttempts ?? 5, compression: config.compression ?? true, debug: config.debug ?? false, cacheSize: config.cacheSize ?? 1000, threadPoolSize: config.threadPoolSize ?? Math.max(cpuCount * 2, 8), cacheTtl: config.cacheTtl ?? 300 }; this.logger = new ModuleLogger(this.config.debug); this.queryCache = new caching_1.QueryCache(this.config.cacheSize, this.config.cacheTtl * 1000); this.bufferPool = new caching_1.BufferPool(50); this.cleanupInterval = setInterval(() => this.cleanup(), 30000); globalClients.add(this); installGlobalSigintHandler(); this.on('error', (error) => { this.logger.error('Client error:', error); }); } async createCollection(params) { const payload = { name: params.name, dimensions: params.dimensions, enable_hnsw: params.enableHnsw ?? true, shards: params.shards ?? 16, m: params.m ?? 16, ef_construction: params.efConstruction ?? 200 }; const response = await this.request({ method: 'POST', path: '/collections', body: JSON.stringify(payload) }); if (response.success) { this.collectionCache.set(params.name, { name: params.name, dimensions: params.dimensions, enableHnsw: params.enableHnsw ?? true, shards: params.shards ?? 16, m: params.m ?? 16, efConstruction: params.efConstruction ?? 200 }); } return response; } async getCollectionDimensions(collectionName) { const cached = this.collectionCache.get(collectionName); if (cached?.dimensions) { return cached.dimensions; } try { const info = await this.request({ method: 'GET', path: `/collections/${collectionName}` }); if (info.dimensions) { this.collectionCache.set(collectionName, info); return info.dimensions; } } catch (error) { this.logger.warn(`Could not get dimensions for collection ${collectionName}:`, error); } return undefined; } async validateVectorDimensions(collectionName, vector) { const expectedDims = await this.getCollectionDimensions(collectionName); if (expectedDims === undefined) return true; if (vector.length !== expectedDims) { throw new Error(`Vector dimension mismatch: got ${vector.length}, expected ${expectedDims} for collection '${collectionName}'`); } return true; } async upsert(collectionName, records, options = {}) { if (!records.length) { return { upserted_count: 0 }; } const { validateDimensions = true } = options; if (validateDimensions && records.length > 0) { const expectedDims = await this.getCollectionDimensions(collectionName); if (expectedDims !== undefined) { for (let i = 0; i < Math.min(records.length, 10); i++) { const vector = records[i].values || records[i].vector || []; if (vector.length !== expectedDims) { throw new Error(`Vector dimension mismatch in record ${i}: got ${vector.length}, expected ${expectedDims} for collection '${collectionName}'`); } } } } const vectorOps = this.vectorOperations(collectionName); return vectorOps.upsert(records, options); } vectorOperations(collectionName = 'default') { return new vector_ops_1.VectorOperations(this.host, collectionName, { request: this.request.bind(this), queryCache: this.queryCache, bufferPool: this.bufferPool, logger: this.logger, collectionCache: this.collectionCache, getCollectionDimensions: this.getCollectionDimensions.bind(this), validateVectorDimensions: this.validateVectorDimensions.bind(this) }); } vector_operations(collectionName = 'default') { return this.vectorOperations(collectionName); } async commit() { return this.request({ method: 'POST', path: '/admin/commit' }); } async health() { return this.request({ method: 'GET', path: '/health', timeout: 10 }); } async info() { const cacheKey = 'server_info'; const cached = this.queryCache.get(cacheKey); if (cached?.matches) { return cached.matches; } return this.request({ method: 'GET', path: '/info', timeout: 10 }); } async clearCache() { this.queryCache.clear(); this.collectionCache.clear(); return { success: true, message: 'All caches cleared' }; } async request(options) { if (this.isDestroyed) { throw new Error('Client has been destroyed'); } const { method, path, headers = {}, body, timeout = this.config.timeout } = options; let lastError; for (let attempt = 0; attempt < this.config.retryAttempts; attempt++) { const controller = new AbortController(); this.pendingRequests.add(controller); try { const startTime = perf_hooks_1.performance.now(); const result = await this.performRequest({ method, path, headers: { 'Content-Type': 'application/json', 'Accept': 'application/json', ...(this.config.compression && { 'Accept-Encoding': 'gzip, br' }), ...headers }, body, timeout: timeout * 1000 }); const duration = perf_hooks_1.performance.now() - startTime; this.requestCount++; this.requestTimes.push(duration); if (this.requestTimes.length > 1000) { this.requestTimes = this.requestTimes.slice(-500); } this.pendingRequests.delete(controller); return result; } catch (error) { this.pendingRequests.delete(controller); lastError = error; this.logger.debug(`Request attempt ${attempt + 1} failed:`, error); if (attempt < this.config.retryAttempts - 1) { const delay = Math.min(1000 * Math.pow(2, attempt) + Math.random() * 100, 10000); await new Promise(resolve => setTimeout(resolve, delay)); } } } throw lastError; } async performRequest(options) { const url = new url_1.URL(this.host + options.path); const requestModule = url.protocol === 'https:' ? https : http; return new Promise((resolve, reject) => { const timeoutId = setTimeout(() => { reject(new Error(`Request timeout after ${options.timeout}ms`)); }, options.timeout); const req = requestModule.request({ hostname: url.hostname, port: url.port, path: url.pathname + url.search, method: options.method, headers: options.headers, timeout: options.timeout }, async (res) => { clearTimeout(timeoutId); let responseData = Buffer.alloc(0); res.on('data', (chunk) => { responseData = Buffer.concat([responseData, chunk]); }); res.on('end', async () => { try { let finalData = responseData; const contentEncoding = res.headers['content-encoding']; if (contentEncoding === 'gzip') { finalData = await gunzipAsync(responseData); } else if (contentEncoding === 'br') { finalData = await brotliDecompressAsync(responseData); } if (res.statusCode >= 400) { let errorMsg = `HTTP ${res.statusCode}`; try { const errorData = JSON.parse(finalData.toString()); if (errorData.error) { errorMsg += ` - ${errorData.error}`; } } catch { errorMsg += ` - ${finalData.toString().substring(0, 100)}`; } reject(new Error(errorMsg)); return; } const result = JSON.parse(finalData.toString()); resolve(result); } catch (parseError) { reject(parseError); } }); }); req.on('error', (error) => { clearTimeout(timeoutId); reject(error); }); req.on('timeout', () => { clearTimeout(timeoutId); req.destroy(); reject(new Error('Request timeout')); }); if (options.body) { req.write(options.body); } req.end(); }); } cleanup() { if (this.isDestroyed) return; this.emit('metrics', { requestCount: this.requestCount, avgRequestTime: this.requestTimes.length > 0 ? this.requestTimes.reduce((a, b) => a + b, 0) / this.requestTimes.length : 0, cacheStats: this.queryCache.getStats(), uptime: Date.now() - this.startTime }); } getStats() { const now = Date.now(); return { requests: { total: this.requestCount, avgTime: this.requestTimes.length > 0 ? this.requestTimes.reduce((a, b) => a + b, 0) / this.requestTimes.length : 0, recentTimes: this.requestTimes.slice(-10) }, cache: this.queryCache.getStats(), bufferPool: this.bufferPool.getStats(), uptime: now - this.startTime, config: this.config }; } async close() { if (this.isDestroyed) return; this.isDestroyed = true; globalClients.delete(this); for (const controller of this.pendingRequests) { try { controller.abort(); } catch (error) { } } this.pendingRequests.clear(); if (this.cleanupInterval) { clearInterval(this.cleanupInterval); this.cleanupInterval = undefined; } try { this.queryCache.destroy(); this.bufferPool.clear(); this.collectionCache.clear(); } catch (error) { } this.removeAllListeners(); } } exports.Client = Client; function createClient(host, config = {}) { return new Client(host, config); }