UNPKG

llm-polyglot

Version:

A universal LLM client - provides adapters for various LLM providers to adhere to a universal interface - the openai sdk - allows you to use providers like anthropic using the same openai interface and transforms the responses in the same way - this allow

13 lines (11 loc) 15.6 kB
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var d=class{constructor(s,e="info"){this._transports=[];this._prefix=_nullishCoalesce(s, () => ("")),this._severity=e}log(s,e){switch(s){case"debug":this._severity==="debug"&&this._logToTransports("debug",this._prefix,e);break;case"info":(this._severity==="debug"||this._severity==="info")&&this._logToTransports("info",this._prefix,e);break;case"warn":(this._severity==="debug"||this._severity==="info"||this._severity==="warn")&&this._logToTransports("warn",this._prefix,e);break;case"error":this._logToTransports("error",this._prefix,e);break}}debug(s){this._logToTransports("debug",this._prefix,s)}info(s){this._logToTransports("info",this._prefix,s)}warn(s){this._logToTransports("warn",this._prefix,s)}error(s){this._logToTransports("error",this._prefix,s)}addTransport(s){return this._transports.push(s),()=>{this._transports.splice(this._transports.indexOf(s),1)}}_logToTransports(s,e,t){let o=new Date().toTimeString().split(" ")[0];for(let n of this._transports)n(s,t,o,e)}};var m=(g,s,e,t)=>{let o={debug:console.debug,info:console.info,warn:console.warn,error:console.error}[g];o(`LLM-CLIENT--${t} ${e}: ${s}`)};var _sdk = require('@anthropic-ai/sdk'); var _sdk2 = _interopRequireDefault(_sdk);var u=class extends _sdk2.default{constructor(e){let t=_nullishCoalesce(_nullishCoalesce(_optionalChain([e, 'optionalAccess', _2 => _2.apiKey]), () => (_optionalChain([process, 'access', _3 => _3.env, 'optionalAccess', _4 => _4.ANTHROPIC_API_KEY]))), () => (null));if(!t)throw new Error("API key is required for AnthropicProvider - please provide it in the constructor or set it as an environment variable named ANTHROPIC_API_KEY.");super({apiKey:t});this.logLevel=_nullishCoalesce(_optionalChain([process, 'access', _5 => _5.env, 'optionalAccess', _6 => _6.LOG_LEVEL]), () => ("info"));this.chat={completions:{create:this.create.bind(this)}};this.logLevel=_nullishCoalesce(_optionalChain([e, 'optionalAccess', _7 => _7.logLevel]), () => (this.logLevel)),this.apiKey=t,this.logger=new d("GEMINI-CLIENT"),this.logger.addTransport(m)}async transformResponse(e,{stream:t}={}){if(!e.id)throw new Error("Response id is undefined");this.logger.log("debug",`Response: ${e}`),e.content.forEach(n=>{n.type==="tool_use"?this.logger.log("debug",`JSON Summary: ${JSON.stringify(n.input,null,2)}`):this.logger.log("debug",`No JSON summary found in the response. ${JSON.stringify(n,null,2)}`)});let o={id:e.id,originResponse:e,model:e.model,usage:{prompt_tokens:e.usage.input_tokens,completion_tokens:e.usage.output_tokens,total_tokens:e.usage.input_tokens+e.usage.output_tokens}};if(!t){let n=e.content.filter(c=>c.type==="tool_use"),a=e.content.filter(c=>c.type==="text"),i=n.map(c=>({id:c.id,type:"function",function:{name:c.name,arguments:JSON.stringify(c.input)}})),l=a.map(c=>c.text).join("");return{...o,object:"chat.completion",choices:[{message:{role:"assistant",content:l,..._optionalChain([i, 'optionalAccess', _8 => _8.length])?{tool_calls:i}:{}},finish_reason:_optionalChain([i, 'optionalAccess', _9 => _9.length])?"tool_calls":"stop",index:0,logprobs:null}]}}return o}transformParamsRegular(e){let t=[],o=e.messages.filter(i=>i.role==="system"),n=e.messages.filter(i=>i.role==="user"||i.role==="assistant"),a=_optionalChain([o, 'optionalAccess', _10 => _10.length])?o.map(i=>i.content).join(` `):"";if(o.length&&console.warn("Anthropic does not support system messages - concatenating them all into a single 'system' property."),!e.max_tokens)throw new Error("max_tokens is required");return"tools"in e&&Array.isArray(e.tools)&&e.tools.length>0&&(t=e.tools.map(i=>({name:_nullishCoalesce(i.function.name, () => ("")),description:_nullishCoalesce(i.function.description, () => ("")),input_schema:{type:"object",...i.function.parameters}}))),{model:e.model,tools:t,system:_optionalChain([a, 'optionalAccess', _11 => _11.length])?a:void 0,messages:n,max_tokens:e.max_tokens,stop_sequences:e.stop?Array.isArray(e.stop)?e.stop:[e.stop]:void 0,temperature:_nullishCoalesce(e.temperature, () => (void 0)),top_p:_nullishCoalesce(e.top_p, () => (void 0)),top_k:_nullishCoalesce(e.n, () => (void 0)),stream:_nullishCoalesce(_optionalChain([e, 'optionalAccess', _12 => _12.stream]), () => (!1)),tool_choice:"tool_choice"in e&&typeof e.tool_choice=="object"&&"function"in e.tool_choice?{type:"tool",name:e.tool_choice.function.name}:void 0}}async*streamChatCompletion(e){let t=null;for await(let o of e)switch(o.type){case"message_start":this.logger.log("debug",`Message start: ${o}`),t={id:o.message.id,object:"chat.completion.chunk",created:Date.now(),model:o.message.model,choices:[{index:0,delta:{role:"assistant"},finish_reason:null}],usage:{prompt_tokens:o.message.usage.input_tokens,completion_tokens:0,total_tokens:o.message.usage.input_tokens},originResponse:o.message},yield t;break;case"content_block_start":this.logger.log("debug",`Content block start: ${o}`);break;case"content_block_delta":t&&t.choices&&(o.delta.type==="text_delta"?t.choices[0].delta={content:o.delta.text,role:"assistant"}:o.delta.type==="input_json_delta"&&(t.choices[0].delta={content:o.delta.partial_json,role:"assistant"}),yield t);break;case"content_block_stop":this.logger.log("debug",`Content block stop: ${o}`);break;case"message_delta":t&&t.usage&&(t.usage.completion_tokens=_optionalChain([o, 'access', _13 => _13.usage, 'optionalAccess', _14 => _14.output_tokens])||0,t.usage.total_tokens=t.usage.prompt_tokens+t.usage.completion_tokens);break;case"message_stop":this.logger.log("debug",`Message stop: ${o}`),t&&t.choices&&(t.choices[0].finish_reason="stop",t.choices[0].delta={content:null,role:"assistant"},yield t);break;default:this.logger.log("warn",`Unknown event type: ${o}`)}}async create(e){try{let t=this.transformParamsRegular(e);if(e.stream){this.logger.log("debug","Starting streaming completion response");let o=await this.messages.stream({...t});return this.streamChatCompletion(o)}else{let o=await this.messages.create({...t,stream:!1});return await this.transformResponse(o)}}catch(t){throw this.logger.error(new Error("Error in Anthropic API request:",{cause:t})),t}}};var _generativeai = require('@google/generative-ai');var _server = require('@google/generative-ai/server');var f=class extends _generativeai.GoogleGenerativeAI{constructor(e){let t=_nullishCoalesce(_nullishCoalesce(_optionalChain([e, 'optionalAccess', _15 => _15.apiKey]), () => (_optionalChain([process, 'access', _16 => _16.env, 'optionalAccess', _17 => _17.GEMINI_API_KEY]))), () => (null));if(!t)throw new Error("API key is required for GeminiProvider - please provide it in the constructor or set it as an environment variable named GEMINI_API_KEY.");super(t);this.logLevel=_nullishCoalesce(_optionalChain([process, 'access', _18 => _18.env, 'optionalAccess', _19 => _19.LOG_LEVEL]), () => ("info"));this.activeChatSessions=new Map;this.chat={completions:{create:this.create.bind(this)}};this.cacheManager={create:this.createCacheManager.bind(this),get:async e=>await this.googleCacheManager.get(e),list:async()=>await this.googleCacheManager.list(),update:async(e,t)=>{let o=this.transformHistory(t.messages);return await this.googleCacheManager.update(e,{cachedContent:{contents:o}})},delete:async e=>await this.googleCacheManager.delete(e)};this.logLevel=_nullishCoalesce(_optionalChain([e, 'optionalAccess', _20 => _20.logLevel]), () => (this.logLevel)),this.apiKey=t,this.googleCacheManager=new (0, _server.GoogleAICacheManager)(t),this.logger=new d("GEMINI-CLIENT"),this.logger.addTransport(m)}cleanSchema(e){let{additionalProperties:t,_additionalProperties:o,...n}=e;return this.logger.log(this.logLevel,`Removing unsupported 'additionalProperties' from schema - ${JSON.stringify(_nullishCoalesce(t, () => ({})))}`),n.properties&&typeof n.properties=="object"&&(n.properties=Object.entries(n.properties).reduce((a,[i,l])=>(a[i]=typeof l=="object"&&l!==null?this.cleanSchema(l):l,a),{})),n.items&&typeof n.items=="object"&&n.items!==null&&(n.items=this.cleanSchema(n.items)),n}createGenerationConfig(e){return{temperature:_nullishCoalesce(e.temperature, () => (void 0)),topP:_nullishCoalesce(e.top_p, () => (void 0)),topK:_nullishCoalesce(e.n, () => (void 0)),maxOutputTokens:_nullishCoalesce(e.max_tokens, () => (void 0)),stopSequences:e.stop?Array.isArray(e.stop)?e.stop:[e.stop]:void 0,candidateCount:_nullishCoalesce(e.n, () => (void 0))}}transformHistory(e){return e.map(t=>({role:t.role==="assistant"?"model":"user",parts:[{text:t.content.toString()}]}))}getModelConfig(e){let t=e.additionalProperties,o={model:_optionalChain([e, 'optionalAccess', _21 => _21.model]),safetySettings:_optionalChain([t, 'optionalAccess', _22 => _22.safetySettings]),generationConfig:_optionalChain([t, 'optionalAccess', _23 => _23.modelGenerationConfig])};return e.groundingThreshold!==void 0&&(o.tools=[{googleSearchRetrieval:{dynamicRetrievalConfig:{mode:_generativeai.DynamicRetrievalMode.MODE_DYNAMIC,dynamicThreshold:e.groundingThreshold}}}]),e.systemInstruction&&(o.systemInstruction=e.systemInstruction),o}async getChatSession(e){let t=e.additionalProperties,o=_optionalChain([t, 'optionalAccess', _24 => _24.sessionId]);if(o&&this.activeChatSessions.has(o))return this.activeChatSessions.get(o);let n=this.createGenerationConfig(e),a=this.transformHistory(e.messages),i;if(_optionalChain([t, 'optionalAccess', _25 => _25.cacheName])){let r=await this.googleCacheManager.get(t.cacheName);i=this.getGenerativeModelFromCachedContent(r)}else i=this.getGenerativeModel(this.getModelConfig(e));let l={generationConfig:n,history:a};if(_optionalChain([e, 'access', _26 => _26.tools, 'optionalAccess', _27 => _27.length])){let r=e.tools.map(p=>({name:_nullishCoalesce(p.function.name, () => ("")),description:_nullishCoalesce(p.function.description, () => ("")),parameters:{type:"object",...p.function.parameters?this.cleanSchema(p.function.parameters):{}}})),h=e.tool_choice;l.tools=[{functionDeclarations:r}],_optionalChain([h, 'optionalAccess', _28 => _28.type])==="function"&&(l.toolConfig={functionCallingConfig:{mode:_generativeai.FunctionCallingMode.ANY,allowedFunctionNames:[h.function.name]}})}let c=i.startChat(l);return o&&this.activeChatSessions.set(o,c),c}transformResponse(e,t){let o=e.text(),n=_nullishCoalesce(_optionalChain([e, 'access', _29 => _29.candidates, 'optionalAccess', _30 => _30[0], 'optionalAccess', _31 => _31.content, 'optionalAccess', _32 => _32.parts, 'optionalAccess', _33 => _33.flatMap, 'call', _34 => _34(r=>r.functionCall?[{index:0,id:`call_${Math.random().toString(36).slice(2)}`,function:{name:r.functionCall.name,arguments:JSON.stringify(r.functionCall.args)},type:"function"}]:[])]), () => ([])),a=_optionalChain([e, 'access', _35 => _35.candidates, 'optionalAccess', _36 => _36[0], 'optionalAccess', _37 => _37.groundingMetadata]),i=o,l=[];a&&(l=_nullishCoalesce(_optionalChain([a, 'access', _38 => _38.groundingChunks, 'optionalAccess', _39 => _39.map, 'call', _40 => _40(r=>({url:_nullishCoalesce(_optionalChain([r, 'access', _41 => _41.web, 'optionalAccess', _42 => _42.uri]), () => ("")),title:_nullishCoalesce(_optionalChain([r, 'access', _43 => _43.web, 'optionalAccess', _44 => _44.title]), () => (""))}))]), () => ([])),_optionalChain([a, 'access', _45 => _45.groundingSupports, 'optionalAccess', _46 => _46.length])&&(i+=` **Grounded Segments** `,a.groundingSupports.forEach(r=>{let p=r.groundingChunkIndices.map(y=>_optionalChain([l, 'access', _47 => _47[y], 'optionalAccess', _48 => _48.title])).filter(Boolean).join(", "),v=r.confidenceScores.reduce((y,b)=>y+b,0)/r.confidenceScores.length;i+=`> "${r.segment.text}" `,i+=`> Sources: ${p} (Confidence: ${(v*100).toFixed(1)}%) `})),l.length>0&&(i+=` **Grounding Sources** `,l.forEach(r=>{i+=`- [${r.title}](${r.url}) `})));let c={id:`chatcmpl-${Math.random().toString(36).slice(2)}`,object:t.stream?"chat.completion.chunk":"chat.completion",created:Date.now(),model:t.model,system_fingerprint:void 0,originResponse:e,choices:[{index:0,message:{role:"assistant",content:n.length?"":i,...n.length?{tool_calls:n}:{}},...a?{grounding_metadata:{search_queries:a.webSearchQueries,sources:l,search_suggestion_html:_optionalChain([a, 'access', _49 => _49.searchEntryPoint, 'optionalAccess', _50 => _50.renderedContent]),supports:_optionalChain([a, 'access', _51 => _51.groundingSupports, 'optionalAccess', _52 => _52.map, 'call', _53 => _53(r=>({text:r.segment.text,sources:r.groundingChunkIndices.map(h=>l[h]),confidence:r.confidenceScores}))])}}:{},finish_reason:_nullishCoalesce(_optionalChain([e, 'access', _54 => _54.candidates, 'optionalAccess', _55 => _55[0], 'optionalAccess', _56 => _56.finishReason, 'optionalAccess', _57 => _57.toLowerCase, 'call', _58 => _58()]), () => (null)),logprobs:null}]};return t.stream?{...c,choices:[{...c.choices[0],delta:c.choices[0].message}]}:c}async*streamChatCompletion(e,t){for await(let o of e)yield this.transformResponse(o,t)}async create(e){try{if(!_optionalChain([e, 'optionalAccess', _59 => _59.model])||!_optionalChain([e, 'optionalAccess', _60 => _60.messages, 'optionalAccess', _61 => _61.length]))throw new Error("model and messages are required");let t=await this.getChatSession(e),o=e.messages[e.messages.length-1];if(_optionalChain([e, 'optionalAccess', _62 => _62.stream])){this.logger.log(this.logLevel,"Starting streaming completion response");let n=await t.sendMessageStream(o.content.toString());return this.streamChatCompletion(n.stream,e)}else{let n=await t.sendMessage(o.content.toString());if(!_optionalChain([n, 'optionalAccess', _63 => _63.response]))throw new Error("Chat response failed");let a=this.transformResponse(n.response,e);return a.model=e.model,a}}catch(t){throw this.logger.log(this.logLevel,new Error("Error in Google API request:",{cause:t})),t}}async createCacheManager(e){let t=this.transformHistory(e.messages);try{return await this.googleCacheManager.create({ttlSeconds:e.ttlSeconds,model:e.model,contents:t})}catch(o){let n=o;throw o instanceof _generativeai.GoogleGenerativeAIError&&(n=new Error("Failed to create Gemini cache manager, ensure your API key supports caching (i.e. pay-as-you-go)"),n.stack=o.stack),this.logger.log(this.logLevel,n),o}}};var _openai = require('openai');var C=class extends _openai.OpenAI{constructor(s){super(s)}};var _=class{constructor(s){switch(_optionalChain([s, 'optionalAccess', _64 => _64.provider])){case"anthropic":this.providerInstance=new u(s);break;case"google":this.providerInstance=new f(s);break;case"openai":default:this.providerInstance=new C(s)}let e={get:(t,o,n)=>{if(o in t)return Reflect.get(t,o,n)}};this.providerInstance=new Proxy(this.providerInstance,e)}getProviderInstance(){return this.providerInstance}};function ne(g={provider:"openai"}){return new _(g).getProviderInstance()}exports.LLMClient = _; exports.createLLMClient = ne; //# sourceMappingURL=index.cjs.map