UNPKG

llm-polyglot

Version:

A universal LLM client - provides adapters for various LLM providers to adhere to a universal interface - the openai sdk - allows you to use providers like anthropic using the same openai interface and transforms the responses in the same way - this allow

13 lines (11 loc) 11.6 kB
var d=class{constructor(s,e="info"){this._transports=[];this._prefix=s??"",this._severity=e}log(s,e){switch(s){case"debug":this._severity==="debug"&&this._logToTransports("debug",this._prefix,e);break;case"info":(this._severity==="debug"||this._severity==="info")&&this._logToTransports("info",this._prefix,e);break;case"warn":(this._severity==="debug"||this._severity==="info"||this._severity==="warn")&&this._logToTransports("warn",this._prefix,e);break;case"error":this._logToTransports("error",this._prefix,e);break}}debug(s){this._logToTransports("debug",this._prefix,s)}info(s){this._logToTransports("info",this._prefix,s)}warn(s){this._logToTransports("warn",this._prefix,s)}error(s){this._logToTransports("error",this._prefix,s)}addTransport(s){return this._transports.push(s),()=>{this._transports.splice(this._transports.indexOf(s),1)}}_logToTransports(s,e,t){let o=new Date().toTimeString().split(" ")[0];for(let n of this._transports)n(s,t,o,e)}};var m=(g,s,e,t)=>{let o={debug:console.debug,info:console.info,warn:console.warn,error:console.error}[g];o(`LLM-CLIENT--${t} ${e}: ${s}`)};import P from"@anthropic-ai/sdk";var u=class extends P{constructor(e){let t=e?.apiKey??process.env?.ANTHROPIC_API_KEY??null;if(!t)throw new Error("API key is required for AnthropicProvider - please provide it in the constructor or set it as an environment variable named ANTHROPIC_API_KEY.");super({apiKey:t});this.logLevel=process.env?.LOG_LEVEL??"info";this.chat={completions:{create:this.create.bind(this)}};this.logLevel=e?.logLevel??this.logLevel,this.apiKey=t,this.logger=new d("GEMINI-CLIENT"),this.logger.addTransport(m)}async transformResponse(e,{stream:t}={}){if(!e.id)throw new Error("Response id is undefined");this.logger.log("debug",`Response: ${e}`),e.content.forEach(n=>{n.type==="tool_use"?this.logger.log("debug",`JSON Summary: ${JSON.stringify(n.input,null,2)}`):this.logger.log("debug",`No JSON summary found in the response. ${JSON.stringify(n,null,2)}`)});let o={id:e.id,originResponse:e,model:e.model,usage:{prompt_tokens:e.usage.input_tokens,completion_tokens:e.usage.output_tokens,total_tokens:e.usage.input_tokens+e.usage.output_tokens}};if(!t){let n=e.content.filter(c=>c.type==="tool_use"),a=e.content.filter(c=>c.type==="text"),i=n.map(c=>({id:c.id,type:"function",function:{name:c.name,arguments:JSON.stringify(c.input)}})),l=a.map(c=>c.text).join("");return{...o,object:"chat.completion",choices:[{message:{role:"assistant",content:l,...i?.length?{tool_calls:i}:{}},finish_reason:i?.length?"tool_calls":"stop",index:0,logprobs:null}]}}return o}transformParamsRegular(e){let t=[],o=e.messages.filter(i=>i.role==="system"),n=e.messages.filter(i=>i.role==="user"||i.role==="assistant"),a=o?.length?o.map(i=>i.content).join(` `):"";if(o.length&&console.warn("Anthropic does not support system messages - concatenating them all into a single 'system' property."),!e.max_tokens)throw new Error("max_tokens is required");return"tools"in e&&Array.isArray(e.tools)&&e.tools.length>0&&(t=e.tools.map(i=>({name:i.function.name??"",description:i.function.description??"",input_schema:{type:"object",...i.function.parameters}}))),{model:e.model,tools:t,system:a?.length?a:void 0,messages:n,max_tokens:e.max_tokens,stop_sequences:e.stop?Array.isArray(e.stop)?e.stop:[e.stop]:void 0,temperature:e.temperature??void 0,top_p:e.top_p??void 0,top_k:e.n??void 0,stream:e?.stream??!1,tool_choice:"tool_choice"in e&&typeof e.tool_choice=="object"&&"function"in e.tool_choice?{type:"tool",name:e.tool_choice.function.name}:void 0}}async*streamChatCompletion(e){let t=null;for await(let o of e)switch(o.type){case"message_start":this.logger.log("debug",`Message start: ${o}`),t={id:o.message.id,object:"chat.completion.chunk",created:Date.now(),model:o.message.model,choices:[{index:0,delta:{role:"assistant"},finish_reason:null}],usage:{prompt_tokens:o.message.usage.input_tokens,completion_tokens:0,total_tokens:o.message.usage.input_tokens},originResponse:o.message},yield t;break;case"content_block_start":this.logger.log("debug",`Content block start: ${o}`);break;case"content_block_delta":t&&t.choices&&(o.delta.type==="text_delta"?t.choices[0].delta={content:o.delta.text,role:"assistant"}:o.delta.type==="input_json_delta"&&(t.choices[0].delta={content:o.delta.partial_json,role:"assistant"}),yield t);break;case"content_block_stop":this.logger.log("debug",`Content block stop: ${o}`);break;case"message_delta":t&&t.usage&&(t.usage.completion_tokens=o.usage?.output_tokens||0,t.usage.total_tokens=t.usage.prompt_tokens+t.usage.completion_tokens);break;case"message_stop":this.logger.log("debug",`Message stop: ${o}`),t&&t.choices&&(t.choices[0].finish_reason="stop",t.choices[0].delta={content:null,role:"assistant"},yield t);break;default:this.logger.log("warn",`Unknown event type: ${o}`)}}async create(e){try{let t=this.transformParamsRegular(e);if(e.stream){this.logger.log("debug","Starting streaming completion response");let o=await this.messages.stream({...t});return this.streamChatCompletion(o)}else{let o=await this.messages.create({...t,stream:!1});return await this.transformResponse(o)}}catch(t){throw this.logger.error(new Error("Error in Anthropic API request:",{cause:t})),t}}};import{DynamicRetrievalMode as k,FunctionCallingMode as x,GoogleGenerativeAI as A,GoogleGenerativeAIError as L}from"@google/generative-ai";import{GoogleAICacheManager as E}from"@google/generative-ai/server";var f=class extends A{constructor(e){let t=e?.apiKey??process.env?.GEMINI_API_KEY??null;if(!t)throw new Error("API key is required for GeminiProvider - please provide it in the constructor or set it as an environment variable named GEMINI_API_KEY.");super(t);this.logLevel=process.env?.LOG_LEVEL??"info";this.activeChatSessions=new Map;this.chat={completions:{create:this.create.bind(this)}};this.cacheManager={create:this.createCacheManager.bind(this),get:async e=>await this.googleCacheManager.get(e),list:async()=>await this.googleCacheManager.list(),update:async(e,t)=>{let o=this.transformHistory(t.messages);return await this.googleCacheManager.update(e,{cachedContent:{contents:o}})},delete:async e=>await this.googleCacheManager.delete(e)};this.logLevel=e?.logLevel??this.logLevel,this.apiKey=t,this.googleCacheManager=new E(t),this.logger=new d("GEMINI-CLIENT"),this.logger.addTransport(m)}cleanSchema(e){let{additionalProperties:t,_additionalProperties:o,...n}=e;return this.logger.log(this.logLevel,`Removing unsupported 'additionalProperties' from schema - ${JSON.stringify(t??{})}`),n.properties&&typeof n.properties=="object"&&(n.properties=Object.entries(n.properties).reduce((a,[i,l])=>(a[i]=typeof l=="object"&&l!==null?this.cleanSchema(l):l,a),{})),n.items&&typeof n.items=="object"&&n.items!==null&&(n.items=this.cleanSchema(n.items)),n}createGenerationConfig(e){return{temperature:e.temperature??void 0,topP:e.top_p??void 0,topK:e.n??void 0,maxOutputTokens:e.max_tokens??void 0,stopSequences:e.stop?Array.isArray(e.stop)?e.stop:[e.stop]:void 0,candidateCount:e.n??void 0}}transformHistory(e){return e.map(t=>({role:t.role==="assistant"?"model":"user",parts:[{text:t.content.toString()}]}))}getModelConfig(e){let t=e.additionalProperties,o={model:e?.model,safetySettings:t?.safetySettings,generationConfig:t?.modelGenerationConfig};return e.groundingThreshold!==void 0&&(o.tools=[{googleSearchRetrieval:{dynamicRetrievalConfig:{mode:k.MODE_DYNAMIC,dynamicThreshold:e.groundingThreshold}}}]),e.systemInstruction&&(o.systemInstruction=e.systemInstruction),o}async getChatSession(e){let t=e.additionalProperties,o=t?.sessionId;if(o&&this.activeChatSessions.has(o))return this.activeChatSessions.get(o);let n=this.createGenerationConfig(e),a=this.transformHistory(e.messages),i;if(t?.cacheName){let r=await this.googleCacheManager.get(t.cacheName);i=this.getGenerativeModelFromCachedContent(r)}else i=this.getGenerativeModel(this.getModelConfig(e));let l={generationConfig:n,history:a};if(e.tools?.length){let r=e.tools.map(p=>({name:p.function.name??"",description:p.function.description??"",parameters:{type:"object",...p.function.parameters?this.cleanSchema(p.function.parameters):{}}})),h=e.tool_choice;l.tools=[{functionDeclarations:r}],h?.type==="function"&&(l.toolConfig={functionCallingConfig:{mode:x.ANY,allowedFunctionNames:[h.function.name]}})}let c=i.startChat(l);return o&&this.activeChatSessions.set(o,c),c}transformResponse(e,t){let o=e.text(),n=e.candidates?.[0]?.content?.parts?.flatMap(r=>r.functionCall?[{index:0,id:`call_${Math.random().toString(36).slice(2)}`,function:{name:r.functionCall.name,arguments:JSON.stringify(r.functionCall.args)},type:"function"}]:[])??[],a=e.candidates?.[0]?.groundingMetadata,i=o,l=[];a&&(l=a.groundingChunks?.map(r=>({url:r.web?.uri??"",title:r.web?.title??""}))??[],a.groundingSupports?.length&&(i+=` **Grounded Segments** `,a.groundingSupports.forEach(r=>{let p=r.groundingChunkIndices.map(y=>l[y]?.title).filter(Boolean).join(", "),v=r.confidenceScores.reduce((y,b)=>y+b,0)/r.confidenceScores.length;i+=`> "${r.segment.text}" `,i+=`> Sources: ${p} (Confidence: ${(v*100).toFixed(1)}%) `})),l.length>0&&(i+=` **Grounding Sources** `,l.forEach(r=>{i+=`- [${r.title}](${r.url}) `})));let c={id:`chatcmpl-${Math.random().toString(36).slice(2)}`,object:t.stream?"chat.completion.chunk":"chat.completion",created:Date.now(),model:t.model,system_fingerprint:void 0,originResponse:e,choices:[{index:0,message:{role:"assistant",content:n.length?"":i,...n.length?{tool_calls:n}:{}},...a?{grounding_metadata:{search_queries:a.webSearchQueries,sources:l,search_suggestion_html:a.searchEntryPoint?.renderedContent,supports:a.groundingSupports?.map(r=>({text:r.segment.text,sources:r.groundingChunkIndices.map(h=>l[h]),confidence:r.confidenceScores}))}}:{},finish_reason:e.candidates?.[0]?.finishReason?.toLowerCase()??null,logprobs:null}]};return t.stream?{...c,choices:[{...c.choices[0],delta:c.choices[0].message}]}:c}async*streamChatCompletion(e,t){for await(let o of e)yield this.transformResponse(o,t)}async create(e){try{if(!e?.model||!e?.messages?.length)throw new Error("model and messages are required");let t=await this.getChatSession(e),o=e.messages[e.messages.length-1];if(e?.stream){this.logger.log(this.logLevel,"Starting streaming completion response");let n=await t.sendMessageStream(o.content.toString());return this.streamChatCompletion(n.stream,e)}else{let n=await t.sendMessage(o.content.toString());if(!n?.response)throw new Error("Chat response failed");let a=this.transformResponse(n.response,e);return a.model=e.model,a}}catch(t){throw this.logger.log(this.logLevel,new Error("Error in Google API request:",{cause:t})),t}}async createCacheManager(e){let t=this.transformHistory(e.messages);try{return await this.googleCacheManager.create({ttlSeconds:e.ttlSeconds,model:e.model,contents:t})}catch(o){let n=o;throw o instanceof L&&(n=new Error("Failed to create Gemini cache manager, ensure your API key supports caching (i.e. pay-as-you-go)"),n.stack=o.stack),this.logger.log(this.logLevel,n),o}}};import{OpenAI as I}from"openai";var C=class extends I{constructor(s){super(s)}};var _=class{constructor(s){switch(s?.provider){case"anthropic":this.providerInstance=new u(s);break;case"google":this.providerInstance=new f(s);break;case"openai":default:this.providerInstance=new C(s)}let e={get:(t,o,n)=>{if(o in t)return Reflect.get(t,o,n)}};this.providerInstance=new Proxy(this.providerInstance,e)}getProviderInstance(){return this.providerInstance}};function ne(g={provider:"openai"}){return new _(g).getProviderInstance()}export{_ as LLMClient,ne as createLLMClient}; //# sourceMappingURL=index.js.map