UNPKG

@ai-foundry/llm-sdk

Version:

This is just another SDK for the common LLM API providers.

43 lines 1.7 kB
export class LLMApiService { async createAssistantMessage(messages, tools, options) { const url = this.getURL(); const additionalHeaders = options?.providerOptions?.headers || {}; const headers = { 'Content-Type': 'application/json', ...this.getHeaders(), ...additionalHeaders }; const body = { messages: messages.map(this.formatMessagePayload), ...(tools && tools.length > 0 ? { tools: tools.map(this.formatToolCallPayload) } : {}), ...(options ? this.formatOptionsPayload(options) : {}) }; const response = await fetch(url, { method: 'POST', headers, body: JSON.stringify(body) }); if (!response.ok) { const errorResponse = await response.json(); console.error('Error response:', errorResponse); throw new Error(`Failed to generate text: ${response.statusText}`); } const data = await response.json(); const parsedResponse = this.parseAssistantResponse(data); // Filter out sensitive headers const sensitiveHeaders = ['authorization', 'api-key']; const filteredHeaders = Object.fromEntries(Object.entries(headers).filter(([key]) => !sensitiveHeaders.includes(key.toLowerCase()))); return { request: { body, headers: filteredHeaders }, response: { body: data, headers: Object.fromEntries(response.headers.entries()) }, ...parsedResponse }; } } //# sourceMappingURL=llm-api.service.js.map