UNPKG

@robota-sdk/openai

Version:

OpenAI integration for Robota SDK - GPT-4, GPT-3.5, function calling, and tool integration with OpenAI's API

1 lines 6.23 kB
import u from'openai';import {BaseAIProvider,SilentLogger}from'@robota-sdk/agents';var i=class{logger;constructor(s){this.logger=s||SilentLogger;}parseResponse(s){try{let e=s.choices?.[0];if(!e)throw new Error("No choices found in OpenAI response");let t=e.message,o=t.content||"",r=t.tool_calls?.map(l=>({id:l.id,type:"function",function:{name:l.function.name,arguments:l.function.arguments}}))||[],a=s.usage?{promptTokens:s.usage.prompt_tokens,completionTokens:s.usage.completion_tokens,totalTokens:s.usage.total_tokens}:void 0;return {role:"assistant",content:o,timestamp:new Date,...r.length>0&&{toolCalls:r},...a&&{usage:a},metadata:{finishReason:e.finish_reason||void 0}}}catch(e){let t=e instanceof Error?e.message:"OpenAI response parsing failed";throw this.logger.error("Response parsing failed",{error:t}),new Error(`OpenAI response parsing failed: ${t}`)}}parseStreamingChunk(s){try{let e=s.choices?.[0];if(!e)return null;let t=e.delta,o=e.finish_reason;if(t.tool_calls){let a=t.tool_calls.map(n=>({id:n.id||"",type:"function",function:{name:n.function?.name||"",arguments:n.function?.arguments||""}}));return {role:"assistant",content:"",timestamp:new Date,toolCalls:a,metadata:{isStreamChunk:!0,isComplete:o==="stop"||o==="tool_calls"}}}return {role:"assistant",content:t.content||"",timestamp:new Date,metadata:{isStreamChunk:!0,isComplete:o==="stop"||o==="tool_calls"}}}catch(e){let t=e instanceof Error?e.message:"OpenAI chunk parsing failed";throw this.logger.error("Chunk parsing failed",{error:t}),new Error(`OpenAI chunk parsing failed: ${t}`)}}};var c=class extends BaseAIProvider{name="openai";version="1.0.0";client;options;payloadLogger;responseParser;logger;constructor(s){if(super(),this.options=s,s.client)this.client=s.client;else if(s.apiKey)this.client=new u({apiKey:s.apiKey,...s.organization&&{organization:s.organization},...s.timeout&&{timeout:s.timeout},...s.baseURL&&{baseURL:s.baseURL}});else throw new Error("Either OpenAI client or apiKey is required");this.logger=s.logger||SilentLogger,this.responseParser=new i(this.logger),this.payloadLogger=this.initializePayloadLogger(s)??void 0;}initializePayloadLogger(s){return s.payloadLogger}async chat(s,e){this.validateMessages(s);try{let t=this.convertToOpenAIMessages(s);if(!e?.model)throw new Error("Model is required in ChatOptions. Please specify a model in defaultModel configuration.");let o={model:e.model,messages:t,...e?.temperature!==void 0&&{temperature:e.temperature},...e?.maxTokens&&{max_tokens:e.maxTokens},...e?.tools&&{tools:this.convertToOpenAITools(e.tools),tool_choice:"auto"}};if(this.payloadLogger?.isEnabled()){let a={model:o.model,messagesCount:t.length,hasTools:!!o.tools,temperature:o.temperature??void 0,maxTokens:o.max_tokens??void 0,timestamp:new Date().toISOString()};await this.payloadLogger.logPayload(a,"chat");}let r=await this.client.chat.completions.create(o);return this.responseParser.parseResponse(r)}catch(t){let r=t.message||"OpenAI API request failed";throw new Error(`OpenAI chat failed: ${r}`)}}async*chatStream(s,e){this.validateMessages(s);try{let t=this.convertToOpenAIMessages(s);if(!e?.model)throw new Error("Model is required in ChatOptions. Please specify a model in defaultModel configuration.");let o={model:e.model,messages:t,stream:!0,...e?.temperature!==void 0&&{temperature:e.temperature},...e?.maxTokens&&{max_tokens:e.maxTokens},...e?.tools&&{tools:this.convertToOpenAITools(e.tools),tool_choice:"auto"}};if(this.payloadLogger?.isEnabled()){let a={model:o.model,messagesCount:t.length,hasTools:!!o.tools,temperature:o.temperature??void 0,maxTokens:o.max_tokens??void 0,timestamp:new Date().toISOString()};await this.payloadLogger.logPayload(a,"stream");}let r=await this.client.chat.completions.create(o);for await(let a of r){let n=this.responseParser.parseStreamingChunk(a);n&&(yield n);}}catch(t){let r=t.message||"OpenAI API request failed";throw new Error(`OpenAI stream failed: ${r}`)}}supportsTools(){return true}validateConfig(){return !!this.client&&!!this.options}async dispose(){}convertToOpenAIMessages(s){return s.map(e=>{switch(e.role){case "user":return {role:"user",content:e.content||""};case "assistant":{let t=e;return t.toolCalls&&t.toolCalls.length>0?{role:"assistant",content:t.content===""?null:t.content||null,tool_calls:t.toolCalls.map(o=>({id:o.id,type:"function",function:{name:o.function.name,arguments:o.function.arguments}}))}:{role:"assistant",content:e.content||""}}case "system":return {role:"system",content:e.content||""};case "tool":return {role:"tool",content:e.content||"",tool_call_id:e.toolCallId||""};default:throw new Error(`Unsupported message role: ${e.role}`)}})}convertToOpenAITools(s){return s.map(e=>({type:"function",function:{name:e.name,description:e.description,parameters:e.parameters}}))}validateMessages(s){super.validateMessages(s);for(let e of s)if(e.role==="assistant"){let t=e;if(t.toolCalls&&t.toolCalls.length>0&&t.content==="")continue}}};var m=class{static filterMessagesForOpenAI(s){return s.filter(e=>{if(e.role==="user"||e.role==="assistant"||e.role==="system")return true;if(e.role==="tool"){let t=e;return !!(t.toolCallId&&t.toolCallId.trim()!==""&&t.toolCallId!=="unknown")}return false})}static toOpenAIFormat(s){return this.filterMessagesForOpenAI(s).map(t=>this.convertMessage(t))}static convertMessage(s){let e=s.role;if(e==="user")return {role:"user",content:s.content||""};if(e==="assistant"){let t=s;return t.toolCalls&&t.toolCalls.length>0?{role:"assistant",content:t.content===""?null:t.content||null,tool_calls:t.toolCalls.map(r=>({id:r.id,type:"function",function:{name:r.function.name,arguments:r.function.arguments}}))}:{role:"assistant",content:t.content===null||t.content===""?null:t.content||""}}if(e==="system")return {role:"system",content:s.content||""};if(e==="tool"){let t=s;if(!t.toolCallId||t.toolCallId.trim()==="")throw new Error(`Tool message missing toolCallId: ${JSON.stringify(t)}`);return {role:"tool",content:t.content||"",tool_call_id:t.toolCallId}}throw new Error(`Unsupported message role: ${s.role}`)}static addSystemPromptIfNeeded(s,e){return !e||s.some(o=>o.role==="system")?s:[{role:"system",content:e},...s]}};export{m as OpenAIConversationAdapter,c as OpenAIProvider};