@neureus/sdk
Version:
Neureus Platform SDK - AI-native, edge-first application platform
1 lines • 12.5 kB
Source Map (JSON)
{"version":3,"sources":["../src/ai.ts"],"names":["ky"],"mappings":";;;;;;;;;AAwHO,IAAM,WAAN,MAAe;AAAA,EACZ,IAAA;AAAA,EACA,MAAA;AAAA,EAER,YAAY,MAAA,EAAwB;AAClC,IAAA,IAAA,CAAK,MAAA,GAAS;AAAA,MACZ,QAAQ,MAAA,CAAO,MAAA;AAAA,MACf,OAAA,EAAS,OAAO,OAAA,IAAW,wBAAA;AAAA,MAC3B,OAAA,EAAS,OAAO,OAAA,IAAW,GAAA;AAAA,MAC3B,OAAA,EAAS,OAAO,OAAA,IAAW,CAAA;AAAA,MAC3B,MAAA,EAAQ,OAAO,MAAA,IAAU,EAAA;AAAA,MACzB,MAAA,EAAQ,OAAO,MAAA,IAAU;AAAA,KAC3B;AAEA,IAAA,IAAA,CAAK,IAAA,GAAOA,oBAAG,MAAA,CAAO;AAAA,MACpB,SAAA,EAAW,KAAK,MAAA,CAAO,OAAA;AAAA,MACvB,OAAA,EAAS,KAAK,MAAA,CAAO,OAAA;AAAA,MACrB,KAAA,EAAO;AAAA,QACL,KAAA,EAAO,KAAK,MAAA,CAAO,OAAA;AAAA,QACnB,OAAA,EAAS,CAAC,KAAA,EAAO,MAAM,CAAA;AAAA,QACvB,WAAA,EAAa,CAAC,GAAA,EAAK,GAAA,EAAK,KAAK,GAAA,EAAK,GAAA,EAAK,KAAK,GAAG;AAAA,OACjD;AAAA,MACA,KAAA,EAAO;AAAA,QACL,aAAA,EAAe;AAAA,UACb,CAAC,OAAA,KAAY;AACX,YAAA,OAAA,CAAQ,QAAQ,GAAA,CAAI,eAAA,EAAiB,UAAU,IAAA,CAAK,MAAA,CAAO,MAAM,CAAA,CAAE,CAAA;AACnE,YAAA,OAAA,CAAQ,OAAA,CAAQ,GAAA,CAAI,cAAA,EAAgB,kBAAkB,CAAA;AACtD,YAAA,OAAA,CAAQ,OAAA,CAAQ,GAAA,CAAI,YAAA,EAAc,mBAAmB,CAAA;AAAA,UACvD;AAAA;AACF;AACF,KACD,CAAA;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKO,IAAA,GAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAkBZ,MAAA,EAAQ,OACN,QAAA,EACA,OAAA,KACoC;AACpC,MAAA,MAAM,OAAA,GAAiC;AAAA,QACrC,KAAA,EAAO,SAAS,KAAA,IAAS,eAAA;AAAA,QACzB,QAAA;AAAA,QACA,WAAA,EAAa,SAAS,WAAA,IAAe,GAAA;AAAA,QACrC,WAAW,OAAA,EAAS,SAAA;AAAA,QACpB,MAAM,OAAA,EAAS,IAAA;AAAA,QACf,kBAAkB,OAAA,EAAS,gBAAA;AAAA,QAC3B,iBAAiB,OAAA,EAAS,eAAA;AAAA,QAC1B,MAAM,OAAA,EAAS,IAAA;AAAA,QACf,MAAA,EAAQ,KAAA;AAAA,QACR,KAAA,EAAO,SAAS,KAAA,IAAS,IAAA;AAAA,QACzB,UAAU,OAAA,EAAS,QAAA;AAAA,QACnB,UAAU,OAAA,EAAS,QAAA;AAAA,QACnB,MAAA,EAAQ,OAAA,EAAS,MAAA,IAAU,IAAA,CAAK,OAAO,MAAA,IAAU,MAAA;AAAA,QACjD,MAAA,EAAQ,OAAA,EAAS,MAAA,IAAU,IAAA,CAAK,OAAO,MAAA,IAAU;AAAA,OACnD;AAEA,MAAA,OAAO,IAAA,CAAK,IAAA,CAAK,IAAA,CAAK,qBAAA,EAAuB;AAAA,QAC3C,IAAA,EAAM;AAAA,OACP,EAAE,IAAA,EAA6B;AAAA,IAClC,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAoBA,MAAA,EAAQ,OACN,QAAA,EACA,OAAA,KACsD;AACtD,MAAA,MAAM,OAAA,GAAiC;AAAA,QACrC,KAAA,EAAO,SAAS,KAAA,IAAS,eAAA;AAAA,QACzB,QAAA;AAAA,QACA,WAAA,EAAa,SAAS,WAAA,IAAe,GAAA;AAAA,QACrC,WAAW,OAAA,EAAS,SAAA;AAAA,QACpB,MAAM,OAAA,EAAS,IAAA;AAAA,QACf,kBAAkB,OAAA,EAAS,gBAAA;AAAA,QAC3B,iBAAiB,OAAA,EAAS,eAAA;AAAA,QAC1B,MAAM,OAAA,EAAS,IAAA;AAAA,QACf,MAAA,EAAQ,IAAA;AAAA,QACR,KAAA,EAAO,SAAS,KAAA,IAAS,IAAA;AAAA,QACzB,UAAU,OAAA,EAAS,QAAA;AAAA,QACnB,UAAU,OAAA,EAAS,QAAA;AAAA,QACnB,MAAA,EAAQ,OAAA,EAAS,MAAA,IAAU,IAAA,CAAK,OAAO,MAAA,IAAU,MAAA;AAAA,QACjD,MAAA,EAAQ,OAAA,EAAS,MAAA,IAAU,IAAA,CAAK,OAAO,MAAA,IAAU;AAAA,OACnD;AAEA,MAAA,MAAM,QAAA,GAAW,MAAM,IAAA,CAAK,IAAA,CAAK,KAAK,qBAAA,EAAuB;AAAA,QAC3D,IAAA,EAAM;AAAA,OACP,CAAA;AAED,MAAA,OAAO,IAAA,CAAK,cAAA,CAAe,QAAA,CAAS,IAAK,CAAA;AAAA,IAC3C;AAAA,GACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWO,MAAA,GAAS;AAAA,IACd,MAAM,YAAoC;AACxC,MAAA,OAAO,IAAA,CAAK,IAAA,CAAK,GAAA,CAAI,WAAW,EAAE,IAAA,EAAoB;AAAA,IACxD;AAAA,GACF;AAAA;AAAA;AAAA;AAAA,EAKA,OAAe,eAAe,IAAA,EAA4E;AACxG,IAAA,MAAM,MAAA,GAAS,KAAK,SAAA,EAAU;AAC9B,IAAA,MAAM,OAAA,GAAU,IAAI,WAAA,EAAY;AAChC,IAAA,IAAI,MAAA,GAAS,EAAA;AAEb,IAAA,IAAI;AACF,MAAA,OAAO,IAAA,EAAM;AACX,QAAA,MAAM,EAAE,IAAA,EAAM,KAAA,EAAM,GAAI,MAAM,OAAO,IAAA,EAAK;AAE1C,QAAA,IAAI,IAAA,EAAM;AAEV,QAAA,MAAA,IAAU,QAAQ,MAAA,CAAO,KAAA,EAAO,EAAE,MAAA,EAAQ,MAAM,CAAA;AAChD,QAAA,MAAM,KAAA,GAAQ,MAAA,CAAO,KAAA,CAAM,IAAI,CAAA;AAC/B,QAAA,MAAA,GAAS,KAAA,CAAM,KAAI,IAAK,EAAA;AAExB,QAAA,KAAA,MAAW,QAAQ,KAAA,EAAO;AACxB,UAAA,MAAM,OAAA,GAAU,KAAK,IAAA,EAAK;AAE1B,UAAA,IAAI,YAAY,EAAA,EAAI;AACpB,UAAA,IAAI,OAAA,CAAQ,UAAA,CAAW,GAAG,CAAA,EAAG;AAC7B,UAAA,IAAI,YAAY,cAAA,EAAgB;AAEhC,UAAA,IAAI,OAAA,CAAQ,UAAA,CAAW,QAAQ,CAAA,EAAG;AAChC,YAAA,MAAM,IAAA,GAAO,OAAA,CAAQ,KAAA,CAAM,CAAC,CAAA;AAC5B,YAAA,IAAI;AACF,cAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,KAAA,CAAM,IAAI,CAAA;AAC7B,cAAA,MAAM,KAAA;AAAA,YACR,SAAS,KAAA,EAAO;AACd,cAAA,OAAA,CAAQ,KAAA,CAAM,2BAAA,EAA6B,IAAA,EAAM,KAAK,CAAA;AAAA,YACxD;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAA,SAAE;AACA,MAAA,MAAA,CAAO,WAAA,EAAY;AAAA,IACrB;AAAA,EACF;AACF;AAcO,SAAS,eAAe,MAAA,EAAkC;AAC/D,EAAA,OAAO,IAAI,SAAS,MAAM,CAAA;AAC5B","file":"ai.cjs","sourcesContent":["/**\n * Neureus AI Gateway SDK Client\n *\n * Provides a simple interface to interact with the Neureus AI Gateway,\n * supporting multiple LLM providers with automatic fallback, caching,\n * and cost optimization.\n *\n * @example\n * ```typescript\n * import { AIClient } from '@neureus/sdk/ai';\n *\n * const ai = new AIClient({\n * apiKey: 'nru_...',\n * baseUrl: 'https://api.neureus.ai'\n * });\n *\n * // Non-streaming chat completion\n * const response = await ai.chat.create({\n * model: 'gpt-4',\n * messages: [\n * { role: 'user', content: 'What is Neureus?' }\n * ]\n * });\n *\n * // Streaming chat completion\n * const stream = await ai.chat.stream({\n * model: 'gpt-4',\n * messages: [\n * { role: 'user', content: 'Tell me a story' }\n * ]\n * });\n *\n * for await (const chunk of stream) {\n * process.stdout.write(chunk.choices[0]?.delta?.content || '');\n * }\n * ```\n */\n\nimport ky, { type KyInstance } from 'ky';\nimport type {\n ChatCompletionRequest,\n ChatCompletionResponse,\n ChatCompletionStreamChunk,\n ChatMessage,\n ModelConfig,\n} from '@neureus/ai-gateway';\n\n// Re-export types from ai-gateway for convenience\nexport type {\n ChatCompletionRequest,\n ChatCompletionResponse,\n ChatCompletionStreamChunk,\n ChatMessage,\n ModelConfig,\n AIProvider,\n ProviderConfig,\n ProviderCapabilities,\n RoutingStrategy,\n CacheConfig,\n AIGatewayError,\n ProviderError,\n RateLimitError,\n AuthenticationError,\n} from '@neureus/ai-gateway';\n\n/**\n * Configuration options for AIClient\n */\nexport interface AIClientConfig {\n /**\n * Neureus API key (required)\n * Get your API key from https://app.neureus.ai/settings/api-keys\n */\n apiKey: string;\n\n /**\n * Base URL for the Neureus API\n * @default 'https://api.neureus.ai'\n */\n baseUrl?: string;\n\n /**\n * Request timeout in milliseconds\n * @default 60000 (60 seconds)\n */\n timeout?: number;\n\n /**\n * Number of retry attempts for failed requests\n * @default 3\n */\n retries?: number;\n\n /**\n * User ID for usage tracking (optional)\n * @default ''\n */\n userId?: string;\n\n /**\n * Team ID for usage tracking (optional)\n * @default ''\n */\n teamId?: string;\n}\n\n/**\n * Request options for chat completion\n */\nexport interface ChatCompletionOptions extends Omit<ChatCompletionRequest, 'messages' | 'model'> {\n /**\n * Model to use for completion\n * @default 'gpt-3.5-turbo'\n */\n model?: string;\n}\n\n/**\n * Main AI client class\n */\nexport class AIClient {\n private http: KyInstance;\n private config: Required<AIClientConfig>;\n\n constructor(config: AIClientConfig) {\n this.config = {\n apiKey: config.apiKey,\n baseUrl: config.baseUrl || 'https://api.neureus.ai',\n timeout: config.timeout || 60000,\n retries: config.retries || 3,\n userId: config.userId || '',\n teamId: config.teamId || '',\n };\n\n this.http = ky.create({\n prefixUrl: this.config.baseUrl,\n timeout: this.config.timeout,\n retry: {\n limit: this.config.retries,\n methods: ['get', 'post'],\n statusCodes: [408, 413, 429, 500, 502, 503, 504],\n },\n hooks: {\n beforeRequest: [\n (request) => {\n request.headers.set('Authorization', `Bearer ${this.config.apiKey}`);\n request.headers.set('Content-Type', 'application/json');\n request.headers.set('User-Agent', 'Neureus-SDK/0.2.0');\n },\n ],\n },\n });\n }\n\n /**\n * Chat completion API\n */\n public chat = {\n /**\n * Create a non-streaming chat completion\n *\n * @example\n * ```typescript\n * const response = await ai.chat.create({\n * model: 'gpt-4',\n * messages: [\n * { role: 'system', content: 'You are a helpful assistant.' },\n * { role: 'user', content: 'What is the capital of France?' }\n * ],\n * temperature: 0.7\n * });\n *\n * console.log(response.choices[0].message.content);\n * ```\n */\n create: async (\n messages: ChatMessage[],\n options?: ChatCompletionOptions\n ): Promise<ChatCompletionResponse> => {\n const request: ChatCompletionRequest = {\n model: options?.model || 'gpt-3.5-turbo',\n messages,\n temperature: options?.temperature ?? 0.7,\n maxTokens: options?.maxTokens,\n topP: options?.topP,\n frequencyPenalty: options?.frequencyPenalty,\n presencePenalty: options?.presencePenalty,\n stop: options?.stop,\n stream: false,\n cache: options?.cache ?? true,\n fallback: options?.fallback,\n metadata: options?.metadata,\n userId: options?.userId || this.config.userId || undefined,\n teamId: options?.teamId || this.config.teamId || undefined,\n };\n\n return this.http.post('ai/chat/completions', {\n json: request,\n }).json<ChatCompletionResponse>();\n },\n\n /**\n * Create a streaming chat completion\n *\n * @example\n * ```typescript\n * const stream = await ai.chat.stream({\n * model: 'gpt-4',\n * messages: [{ role: 'user', content: 'Tell me a story' }]\n * });\n *\n * for await (const chunk of stream) {\n * const content = chunk.choices[0]?.delta?.content;\n * if (content) {\n * process.stdout.write(content);\n * }\n * }\n * ```\n */\n stream: async (\n messages: ChatMessage[],\n options?: ChatCompletionOptions\n ): Promise<AsyncIterable<ChatCompletionStreamChunk>> => {\n const request: ChatCompletionRequest = {\n model: options?.model || 'gpt-3.5-turbo',\n messages,\n temperature: options?.temperature ?? 0.7,\n maxTokens: options?.maxTokens,\n topP: options?.topP,\n frequencyPenalty: options?.frequencyPenalty,\n presencePenalty: options?.presencePenalty,\n stop: options?.stop,\n stream: true,\n cache: options?.cache ?? true,\n fallback: options?.fallback,\n metadata: options?.metadata,\n userId: options?.userId || this.config.userId || undefined,\n teamId: options?.teamId || this.config.teamId || undefined,\n };\n\n const response = await this.http.post('ai/chat/completions', {\n json: request,\n });\n\n return this.parseSSEStream(response.body!);\n },\n };\n\n /**\n * List available models\n *\n * @example\n * ```typescript\n * const models = await ai.models.list();\n * console.log(models); // [{ name: 'gpt-4', provider: 'openai', ... }]\n * ```\n */\n public models = {\n list: async (): Promise<ModelConfig[]> => {\n return this.http.get('ai/models').json<ModelConfig[]>();\n },\n };\n\n /**\n * Parse Server-Sent Events stream into async iterable\n */\n private async *parseSSEStream(body: ReadableStream<Uint8Array>): AsyncIterable<ChatCompletionStreamChunk> {\n const reader = body.getReader();\n const decoder = new TextDecoder();\n let buffer = '';\n\n try {\n while (true) {\n const { done, value } = await reader.read();\n\n if (done) break;\n\n buffer += decoder.decode(value, { stream: true });\n const lines = buffer.split('\\n');\n buffer = lines.pop() || '';\n\n for (const line of lines) {\n const trimmed = line.trim();\n\n if (trimmed === '') continue;\n if (trimmed.startsWith(':')) continue; // Comment\n if (trimmed === 'data: [DONE]') return;\n\n if (trimmed.startsWith('data: ')) {\n const data = trimmed.slice(6);\n try {\n const chunk = JSON.parse(data) as ChatCompletionStreamChunk;\n yield chunk;\n } catch (error) {\n console.error('Failed to parse SSE data:', data, error);\n }\n }\n }\n }\n } finally {\n reader.releaseLock();\n }\n }\n}\n\n/**\n * Create an AI client instance\n *\n * @example\n * ```typescript\n * import { createAIClient } from '@neureus/sdk/ai';\n *\n * const ai = createAIClient({\n * apiKey: process.env.NEUREUS_API_KEY\n * });\n * ```\n */\nexport function createAIClient(config: AIClientConfig): AIClient {\n return new AIClient(config);\n}\n"]}