@adaline/openai
Version:
Adaline OpenAI
15 lines (13 loc) • 60.1 kB
JavaScript
'use strict';
var zod = require('zod');
var provider = require('@adaline/provider');
var types = require('@adaline/types');
var Nn=Object.defineProperty,zn=Object.defineProperties;var Bn=Object.getOwnPropertyDescriptors;var yt=Object.getOwnPropertySymbols;var Un=Object.prototype.hasOwnProperty,jn=Object.prototype.propertyIsEnumerable;var X=(n,e)=>(e=Symbol[n])?e:Symbol.for("Symbol."+n),Fn=n=>{throw TypeError(n)};var Ot=(n,e,t)=>e in n?Nn(n,e,{enumerable:!0,configurable:!0,writable:!0,value:t}):n[e]=t,P=(n,e)=>{for(var t in e||(e={}))Un.call(e,t)&&Ot(n,t,e[t]);if(yt)for(var t of yt(e))jn.call(e,t)&&Ot(n,t,e[t]);return n},q=(n,e)=>zn(n,Bn(e));var A=(n,e,t)=>new Promise((o,s)=>{var a=l=>{try{r(t.next(l));}catch(g){s(g);}},m=l=>{try{r(t.throw(l));}catch(g){s(g);}},r=l=>l.done?o(l.value):Promise.resolve(l.value).then(a,m);r((t=t.apply(n,e)).next());}),Ct=function(n,e){this[0]=n,this[1]=e;},co=(n,e,t)=>{var o=(m,r,l,g)=>{try{var T=t[m](r),b=(r=T.value)instanceof Ct,O=T.done;Promise.resolve(b?r[0]:r).then(C=>b?o(m==="return"?m:"next",r[1]?{done:C.done,value:C.value}:C,l,g):l({value:C,done:O})).catch(C=>o("throw",C,l,g));}catch(C){g(C);}},s=m=>a[m]=r=>new Promise((l,g)=>o(m,r,l,g)),a={};return t=t.apply(n,e),a[X("asyncIterator")]=()=>a,s("next"),s("throw"),s("return"),a},bt=n=>{var e=n[X("asyncIterator")],t=!1,o,s={};return e==null?(e=n[X("iterator")](),o=a=>s[a]=m=>e[a](m)):(e=e.call(n),o=a=>s[a]=m=>{if(t){if(t=!1,a==="throw")throw m;return m}return t=!0,{done:!1,value:new Ct(new Promise(r=>{var l=e[a](m);l instanceof Object||Fn("Object expected"),r(l);}),1)}}),s[X("iterator")]=()=>s,o("next"),"throw"in e?o("throw"):s.throw=a=>{throw a},"return"in e&&o("return"),s};var uo=provider.RangeConfigItem({param:"temperature",title:provider.CHAT_CONFIG.TEMPERATURE.title,description:provider.CHAT_CONFIG.TEMPERATURE.description,min:0,max:2,step:.01,default:1}),ho=n=>provider.RangeConfigItem({param:"max_completion_tokens",title:provider.CHAT_CONFIG.MAX_TOKENS.title,description:provider.CHAT_CONFIG.MAX_TOKENS.description,min:0,max:n,step:1,default:0}),fo=n=>provider.MultiStringConfigItem({param:"stop",title:provider.CHAT_CONFIG.STOP(n).title,description:provider.CHAT_CONFIG.STOP(n).description,max:n}),_o=provider.RangeConfigItem({param:"top_p",title:provider.CHAT_CONFIG.TOP_P.title,description:provider.CHAT_CONFIG.TOP_P.description,min:0,max:1,step:.01,default:1}),To=provider.RangeConfigItem({param:"frequency_penalty",title:provider.CHAT_CONFIG.FREQUENCY_PENALTY.title,description:provider.CHAT_CONFIG.FREQUENCY_PENALTY.description,min:-2,max:2,step:.01,default:0}),go=provider.RangeConfigItem({param:"presence_penalty",title:provider.CHAT_CONFIG.PRESENCE_PENALTY.title,description:provider.CHAT_CONFIG.PRESENCE_PENALTY.description,min:-2,max:2,step:.01,default:0}),Mo=provider.RangeConfigItem({param:"seed",title:provider.CHAT_CONFIG.SEED.title,description:provider.CHAT_CONFIG.SEED.description,min:0,max:1e6,step:1,default:0}),yo=provider.SelectBooleanConfigItem({param:"logprobs",title:provider.CHAT_CONFIG.LOG_PROBS.title,description:provider.CHAT_CONFIG.LOG_PROBS.description,default:!1}),Oo=provider.RangeConfigItem({param:"top_logprobs",title:provider.CHAT_CONFIG.TOP_LOG_PROBS.title,description:provider.CHAT_CONFIG.TOP_LOG_PROBS.description,min:0,max:20,step:1,default:0}),Co=provider.SelectStringConfigItem({param:"tool_choice",title:"Tool choice",description:"Controls which (if any) tool is called by the model. 'none' means the model will not call a function. 'auto' means the model can pick between generating a message or calling a tool.",default:"auto",choices:["auto","required","none"]});var F=(n,e)=>zod.z.object({temperature:uo.schema,maxTokens:ho(n).schema,stop:fo(e).schema,topP:_o.schema,frequencyPenalty:To.schema,presencePenalty:go.schema,seed:Mo.schema.transform(t=>t===0?void 0:t),logProbs:yo.schema,topLogProbs:Oo.schema,toolChoice:Co.schema}),$=(n,e)=>({temperature:uo.def,maxTokens:ho(n).def,stop:fo(e).def,topP:_o.def,frequencyPenalty:To.def,presencePenalty:go.def,seed:Mo.def,logProbs:yo.def,topLogProbs:Oo.def,toolChoice:Co.def});var Pt=provider.ObjectSchemaConfigItem({param:"response_schema",title:provider.CHAT_CONFIG.RESPONSE_SCHEMA.title,description:provider.CHAT_CONFIG.RESPONSE_SCHEMA.description,objectSchema:types.ResponseSchema}),St=provider.SelectStringConfigItem({param:"response_format",title:provider.CHAT_CONFIG.RESPONSE_FORMAT_WITH_SCHEMA.title,description:provider.CHAT_CONFIG.RESPONSE_FORMAT_WITH_SCHEMA.description,default:"text",choices:["text","json_object","json_schema"]}),ee=(n,e)=>q(P({},$(n,e)),{responseFormat:St.def,responseSchema:Pt.def}),oe=(n,e)=>F(n,e).extend({responseFormat:St.schema,responseSchema:Pt.schema});var xt=provider.RangeConfigItem({param:"temperature",title:provider.CHAT_CONFIG.TEMPERATURE.title,description:provider.CHAT_CONFIG.TEMPERATURE.description,min:1,max:1,step:.01,default:1}),Rt=provider.SelectStringConfigItem({param:"reasoning_effort",title:"Reasoning Effort",description:"Constrains effort on reasoning for reasoning models. Reducing reasoning effort can result in faster responses and fewer tokens used on reasoning in a response.",default:"medium",choices:["low","medium","high"]}),At=(n,e)=>q(P({},ee(n,e)),{temperature:xt.def,reasoningEffort:Rt.def}),Et=(n,e)=>oe(n,e).extend({temperature:xt.schema,reasoningEffort:Rt.schema});var kt=provider.SelectStringConfigItem({param:"response_format",title:provider.CHAT_CONFIG.RESPONSE_FORMAT.title,description:provider.CHAT_CONFIG.RESPONSE_FORMAT.description,default:"text",choices:["text","json_object"]}),wt=(n,e)=>q(P({},$(n,e)),{responseFormat:kt.def}),vt=(n,e)=>F(n,e).extend({responseFormat:kt.schema});var bo=provider.SelectStringConfigItem({param:"encoding_format",title:"Encoding format",description:"Select the encoding format for the word embedding.",default:"float",choices:["float","base64"]}),Po=n=>provider.RangeConfigItem({param:"dimensions",title:"Dimensions",description:"Select the number of dimensions for the word embedding.",min:1,max:n,step:1,default:n});var te=()=>zod.z.object({encodingFormat:bo.schema}),ne=()=>({encodingFormat:bo.def});var Lt=n=>te().extend({dimensions:Po(n).schema}),Dt=n=>q(P({},ne()),{dimensions:Po(n).def});var i={base:(n,e)=>({def:$(n,e),schema:F(n,e)}),responseFormat:(n,e)=>({def:wt(n,e),schema:vt(n,e)}),responseSchema:(n,e)=>({def:ee(n,e),schema:oe(n,e)}),oSeries:(n,e)=>({def:At(n,e),schema:Et(n,e)})},v={base:()=>({def:ne(),schema:te()}),dimensions:n=>({def:Dt(n),schema:Lt(n)})};var _={"gpt-3.5-turbo-0125":{modelName:"gpt-3.5-turbo-0125",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:.5,outputPricePerMillion:1.5}}}]},"gpt-3.5-turbo-1106":{modelName:"gpt-3.5-turbo-1106",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:.5,outputPricePerMillion:1.5}}}]},"gpt-3.5-turbo":{modelName:"gpt-3.5-turbo",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:.5,outputPricePerMillion:1.5}}}]},"gpt-4-0125-preview":{modelName:"gpt-4-0125-preview",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:30,outputPricePerMillion:60}}}]},"gpt-4-0613":{modelName:"gpt-4-0613",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:30,outputPricePerMillion:60}}}]},"gpt-4-1106-preview":{modelName:"gpt-4-1106-preview",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:30,outputPricePerMillion:60}}}]},"gpt-4-turbo-2024-04-09":{modelName:"gpt-4-turbo-2024-04-09",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:10,outputPricePerMillion:30}}}]},"gpt-4-turbo-preview":{modelName:"gpt-4-turbo-preview",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:10,outputPricePerMillion:30}}}]},"gpt-4-turbo":{modelName:"gpt-4-turbo",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:10,outputPricePerMillion:30}}}]},"gpt-4":{modelName:"gpt-4",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:30,outputPricePerMillion:60}}}]},"gpt-4o-2024-05-13":{modelName:"gpt-4o-2024-05-13",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:5,outputPricePerMillion:20}}}]},"gpt-4o-2024-08-06":{modelName:"gpt-4o-2024-08-06",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:2.5,outputPricePerMillion:10}}}]},"gpt-4o-mini-2024-07-18":{modelName:"gpt-4o-mini-2024-07-18",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:.6,outputPricePerMillion:2.4}}}]},"gpt-4o-mini":{modelName:"gpt-4o-mini",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:.6,outputPricePerMillion:2.4}}}]},"gpt-4o":{modelName:"gpt-4o",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:5,outputPricePerMillion:20}}}]},"o1-2024-12-17":{modelName:"o1-2024-12-17",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:15,outputPricePerMillion:60}}}]},o1:{modelName:"o1",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:15,outputPricePerMillion:60}}}]},"o3-mini-2025-01-31":{modelName:"o3-mini-2025-01-31",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:1.1,outputPricePerMillion:4.4}}}]},"o3-mini":{modelName:"o3-mini",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:1.1,outputPricePerMillion:4.4}}}]},"o3-2025-04-16":{modelName:"o3-2025-04-16",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:10,outputPricePerMillion:40}}}]},o3:{modelName:"o3",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:10,outputPricePerMillion:40}}}]},"o4-mini-2025-04-16":{modelName:"o4-mini-2025-04-16",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:1.1,outputPricePerMillion:4.4}}}]},"o4-mini":{modelName:"o4-mini",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:1.1,outputPricePerMillion:4.4}}}]},"gpt-4.1":{modelName:"gpt-4.1",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:2,outputPricePerMillion:8}}}]},"gpt-4.1-mini":{modelName:"gpt-4.1-mini",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:.4,outputPricePerMillion:1.6}}}]},"gpt-4.1-nano":{modelName:"gpt-4.1-nano",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:.1,outputPricePerMillion:.4}}}]},"gpt-5":{modelName:"gpt-5",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:1.25,outputPricePerMillion:10}}}]},"gpt-5-mini":{modelName:"gpt-5-mini",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:.25,outputPricePerMillion:2}}}]},"gpt-5-nano":{modelName:"gpt-5-nano",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:.05,outputPricePerMillion:.4}}}]},"gpt-5-chat-latest":{modelName:"gpt-5-chat-latest",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:1.25,outputPricePerMillion:10}}}]}};var ss="openai",B=class{constructor(){this.version="v1";this.name=ss;this.chatModelFactories={[pe]:{model:le,modelOptions:Bt,modelSchema:xo},[se]:{model:ie,modelOptions:Nt,modelSchema:So},[ae]:{model:re,modelOptions:zt,modelSchema:Io},[me]:{model:de,modelOptions:Ut,modelSchema:Ro},[ce]:{model:ue,modelOptions:jt,modelSchema:Ao},[he]:{model:fe,modelOptions:Ft,modelSchema:Eo},[_e]:{model:Te,modelOptions:$t,modelSchema:Go},[ge]:{model:Me,modelOptions:Ht,modelSchema:ko},[ye]:{model:Oe,modelOptions:Vt,modelSchema:wo},[Ce]:{model:be,modelOptions:Kt,modelSchema:vo},[Pe]:{model:Se,modelOptions:Jt,modelSchema:Lo},[Ie]:{model:xe,modelOptions:Wt,modelSchema:Do},[Re]:{model:Ae,modelOptions:Yt,modelSchema:qo},[Ee]:{model:Ge,modelOptions:Qt,modelSchema:No},[ke]:{model:we,modelOptions:Xt,modelSchema:zo},[ve]:{model:Le,modelOptions:Zt,modelSchema:Bo},[De]:{model:qe,modelOptions:en,modelSchema:Uo},[Be]:{model:Ue,modelOptions:tn,modelSchema:Fo},[$e]:{model:He,modelOptions:sn,modelSchema:Ho},[Ve]:{model:Ke,modelOptions:an,modelSchema:Vo},[je]:{model:Fe,modelOptions:nn,modelSchema:$o},[Ne]:{model:ze,modelOptions:on,modelSchema:jo},[Ye]:{model:Qe,modelOptions:pn,modelSchema:Jo},[Je]:{model:We,modelOptions:rn,modelSchema:Ko},[ot]:{model:oo,modelOptions:cn,modelSchema:tt},[Zo]:{model:eo,modelOptions:dn,modelSchema:et},[Wo]:{model:Xe,modelOptions:ln,modelSchema:Yo},[Qo]:{model:Ze,modelOptions:mn,modelSchema:Xo},[nt]:{model:to,modelOptions:un,modelSchema:st},[it]:{model:no,modelOptions:hn,modelSchema:at}};this.embeddingModelFactories={[rt]:{model:so,modelOptions:fn,modelSchema:pt},[lt]:{model:io,modelOptions:_n,modelSchema:mt},[dt]:{model:ao,modelOptions:Tn,modelSchema:ct}};}chatModelLiterals(){return Object.keys(this.chatModelFactories)}chatModelSchemas(){return Object.keys(this.chatModelFactories).reduce((e,t)=>(e[t]=this.chatModelFactories[t].modelSchema,e),{})}chatModel(e){let t=e.modelName;if(!(t in this.chatModelFactories))throw new provider.ProviderError({info:`OpenAI chat model: ${t} not found`,cause:new Error(`OpenAI chat model: ${t} not found, available chat models:
[${this.chatModelLiterals().join(", ")}]`)});let o=this.chatModelFactories[t].model,s=this.chatModelFactories[t].modelOptions.parse(e);return new o(s)}embeddingModelLiterals(){return Object.keys(this.embeddingModelFactories)}embeddingModelSchemas(){return Object.keys(this.embeddingModelFactories).reduce((e,t)=>(e[t]=this.embeddingModelFactories[t].modelSchema,e),{})}embeddingModel(e){let t=e.modelName;if(!(t in this.embeddingModelFactories))throw new provider.ProviderError({info:`OpenAI embedding model: ${t} not found`,cause:new Error(`OpenAI embedding model: ${t} not found, available embedding models:
[${this.embeddingModelLiterals().join(", ")}]`)});let o=this.embeddingModelFactories[t].model,s=this.embeddingModelFactories[t].modelOptions.parse(e);return new o(s)}};B.baseUrl="https://api.openai.com/v1";var h=zod.z.enum([types.SystemRoleLiteral,types.UserRoleLiteral,types.AssistantRoleLiteral,types.ToolRoleLiteral]),f={system:types.SystemRoleLiteral,user:types.UserRoleLiteral,assistant:types.AssistantRoleLiteral,tool:types.ToolRoleLiteral};var M=[types.TextModalityLiteral,types.ImageModalityLiteral,types.ToolCallModalityLiteral,types.ToolResponseModalityLiteral],y=zod.z.enum([types.TextModalityLiteral,types.ImageModalityLiteral,types.ToolCallModalityLiteral,types.ToolResponseModalityLiteral]),Ja=[types.TextModalityLiteral],Wa=zod.z.enum([types.TextModalityLiteral]),I=[types.TextModalityLiteral,types.ToolCallModalityLiteral,types.ToolResponseModalityLiteral],x=zod.z.enum([types.TextModalityLiteral,types.ToolCallModalityLiteral,types.ToolResponseModalityLiteral]);var lo=zod.z.object({token:zod.z.string(),logprob:zod.z.number(),bytes:zod.z.array(zod.z.number()).nullable()}),bn=zod.z.object({content:zod.z.array(lo.extend({top_logprobs:zod.z.array(lo)})).nullable().optional(),refusal:zod.z.array(lo.extend({top_logprobs:zod.z.array(lo)})).nullable().optional()}).nullable(),as=zod.z.array(zod.z.object({id:zod.z.string().min(1),type:zod.z.enum(["function"]),function:zod.z.object({name:zod.z.string(),arguments:zod.z.string()})})),Pn=zod.z.object({id:zod.z.string(),object:zod.z.literal("chat.completion"),created:zod.z.number(),model:zod.z.string(),system_fingerprint:zod.z.string().nullable(),choices:zod.z.array(zod.z.object({index:zod.z.number(),message:zod.z.object({role:zod.z.string(),content:zod.z.string().nullable().optional(),tool_calls:as.optional(),refusal:zod.z.string().nullable().optional()}),logprobs:bn.optional(),finish_reason:zod.z.string()})),usage:zod.z.object({prompt_tokens:zod.z.number(),completion_tokens:zod.z.number(),total_tokens:zod.z.number()})}),rs=zod.z.array(zod.z.object({index:zod.z.number().int(),id:zod.z.string().min(1).optional(),type:zod.z.enum(["function"]).optional(),function:zod.z.object({name:zod.z.string().min(1).optional(),arguments:zod.z.string().optional()}).optional()})),Sn=zod.z.object({id:zod.z.string(),object:zod.z.string(),created:zod.z.number(),model:zod.z.string(),system_fingerprint:zod.z.string().nullable().optional(),choices:zod.z.array(zod.z.object({index:zod.z.number(),delta:zod.z.object({content:zod.z.string().nullable().optional(),tool_calls:rs.optional(),refusal:zod.z.string().nullable().optional()}).or(zod.z.object({})),logprobs:bn.optional(),finish_reason:zod.z.string().nullable()})),usage:zod.z.object({prompt_tokens:zod.z.number(),completion_tokens:zod.z.number(),total_tokens:zod.z.number()}).nullable().optional()});var ps=zod.z.object({type:zod.z.literal("function"),function:zod.z.object({name:zod.z.string().min(1),description:zod.z.string().min(1).optional(),strict:zod.z.boolean().optional(),parameters:zod.z.any()})}),ls=zod.z.enum(["none","auto","required"]),ms=zod.z.object({type:zod.z.literal("function"),function:zod.z.object({name:zod.z.string().min(1)})}),ds=zod.z.object({type:zod.z.enum(["text","json_object"])}).or(zod.z.object({type:zod.z.literal("json_schema"),json_schema:zod.z.object({name:zod.z.string().min(1),description:zod.z.string().min(1).optional(),strict:zod.z.boolean().optional(),schema:zod.z.any()})})),ht=zod.z.object({text:zod.z.string().min(1),type:zod.z.literal("text")}),cs=zod.z.object({type:zod.z.literal("image_url"),image_url:zod.z.object({url:zod.z.string().url().min(1),detail:zod.z.enum(["low","high","auto"]).optional()})}),us=zod.z.object({id:zod.z.string().min(1),type:zod.z.literal("function"),function:zod.z.object({name:zod.z.string().min(1),arguments:zod.z.string().min(1)})}),hs=zod.z.object({role:zod.z.literal("system"),content:zod.z.string().min(1).or(zod.z.array(ht).min(1))}),fs=zod.z.object({role:zod.z.literal("user"),content:zod.z.string().min(1).or(zod.z.array(zod.z.union([ht,cs])).min(1))}),_s=zod.z.object({role:zod.z.literal("assistant"),content:zod.z.string().min(1).or(zod.z.array(ht).min(1)).optional(),tool_calls:zod.z.array(us).min(1).optional()}),Ts=zod.z.object({role:zod.z.literal("tool"),tool_call_id:zod.z.string().min(1),content:zod.z.string().min(1)}),gs=zod.z.union([hs,fs,_s,Ts]),In=zod.z.object({model:zod.z.string().min(1).optional(),messages:zod.z.array(gs).min(1),frequency_penalty:zod.z.number().min(-2).max(2).nullable().optional(),logprobs:zod.z.boolean().nullable().optional(),top_logprobs:zod.z.number().min(0).max(20).nullable().optional(),max_completion_tokens:zod.z.number().min(0).nullable().optional(),presence_penalty:zod.z.number().min(-2).max(2).nullable().optional(),response_format:ds.optional(),seed:zod.z.number().nullable().optional(),stop:zod.z.string().or(zod.z.array(zod.z.string()).max(4)).nullable().optional(),temperature:zod.z.number().min(0).max(2).nullable().optional(),top_p:zod.z.number().min(0).max(1).nullable().optional(),tools:zod.z.array(ps).optional(),tool_choice:ls.or(ms).optional()});var u=zod.z.object({modelName:zod.z.string(),apiKey:zod.z.string(),baseUrl:zod.z.string().url().optional(),completeChatUrl:zod.z.string().url().optional(),streamChatUrl:zod.z.string().url().optional(),organization:zod.z.string().optional()}),d=class{constructor(e,t){this.version="v1";let o=u.parse(t);this.modelSchema=e,this.modelName=o.modelName,this.apiKey=o.apiKey,this.baseUrl=provider.urlWithoutTrailingSlash(o.baseUrl||B.baseUrl),this.streamChatUrl=provider.urlWithoutTrailingSlash(o.streamChatUrl||`${this.baseUrl}/chat/completions`),this.completeChatUrl=provider.urlWithoutTrailingSlash(o.completeChatUrl||`${this.baseUrl}/chat/completions`),this.organization=o.organization;}getDefaultBaseUrl(){return this.baseUrl}getDefaultHeaders(){return P({Authorization:`Bearer ${this.apiKey}`,"Content-Type":"application/json"},this.organization?{"OpenAI-Organization":this.organization}:{})}getDefaultParams(){return {model:this.modelName}}getRetryDelay(e){let t=r=>{let l=/(\d+)(h|m|s|ms)/g,g={h:36e5,m:6e4,s:1e3,ms:1},T,b=0;for(;(T=l.exec(r))!==null;){let O=parseInt(T[1]),C=T[2];b+=O*g[C];}return b},o=0,s=0,a=!0;e["x-ratelimit-reset-requests"]&&(o=t(e["x-ratelimit-reset-requests"])),e["x-ratelimit-reset-tokens"]&&(s=t(e["x-ratelimit-reset-tokens"]));let m=Math.max(o,s);return {shouldRetry:a,delayMs:m}}getTokenCount(e){return e.reduce((t,o)=>t+o.content.map(s=>s.modality==="text"?s.value:"").join(" ").length,0)}transformModelRequest(e){let t=In.safeParse(e);if(!t.success)throw new provider.InvalidModelRequestError({info:"Invalid model request",cause:t.error});let o=t.data,s=o.model;if(o.tool_choice&&(!o.tools||o.tools.length===0))throw new provider.InvalidModelRequestError({info:`Invalid model request for model : '${this.modelName}'`,cause:new Error("'tools' are required when 'tool_choice' is specified")});let a={};o.response_format&&(a.responseFormat=o.response_format.type,o.response_format.type==="json_schema"&&(a.responseSchema={name:o.response_format.json_schema.name,description:o.response_format.json_schema.description||"",strict:o.response_format.json_schema.strict,schema:o.response_format.json_schema.schema})),o.tool_choice&&(typeof o.tool_choice=="string"?a.toolChoice=o.tool_choice:a.toolChoice=o.tool_choice.function.name),a.seed=o.seed,a.maxTokens=o.max_completion_tokens,a.temperature=o.temperature,a.topP=o.top_p,a.presencePenalty=o.presence_penalty,a.frequencyPenalty=o.frequency_penalty,a.stop=o.stop,a.logProbs=o.logprobs,a.topLogProbs=o.top_logprobs;let m=types.Config().parse(provider.removeUndefinedEntries(a)),r=[],l={};o.messages.forEach(T=>{let b=T.role;switch(b){case"system":{let O=T.content;if(typeof O=="string")r.push({role:b,content:[{modality:types.TextModalityLiteral,value:O}]});else {let C=O.map(S=>({modality:types.TextModalityLiteral,value:S.text}));r.push({role:b,content:C});}}break;case"user":{let O=T.content;if(typeof O=="string")r.push({role:b,content:[{modality:types.TextModalityLiteral,value:O}]});else {let C=O.map(S=>S.type==="text"?{modality:types.TextModalityLiteral,value:S.text}:S.image_url.url.startsWith("data:")?{modality:types.ImageModalityLiteral,detail:S.image_url.detail||"auto",value:{type:types.Base64ImageContentTypeLiteral,base64:S.image_url.url,mediaType:provider.getMimeTypeFromBase64(S.image_url.url)}}:{modality:types.ImageModalityLiteral,detail:S.image_url.detail||"auto",value:{type:types.UrlImageContentTypeLiteral,url:S.image_url.url}});r.push({role:b,content:C});}}break;case"assistant":{let O=[];if(!T.content&&!T.tool_calls)throw new provider.InvalidModelRequestError({info:`Invalid model request for model : '${this.modelName}'`,cause:new Error("one of'content' or 'tool_calls' must be provided")});if(T.content){let C=T.content;typeof C=="string"?O.push({modality:types.TextModalityLiteral,value:C}):C.forEach(S=>{O.push({modality:types.TextModalityLiteral,value:S.text});});}T.tool_calls&&T.tool_calls.forEach((S,E)=>{let D={modality:types.ToolCallModalityLiteral,id:S.id,index:E,name:S.function.name,arguments:S.function.arguments};O.push(D),l[D.id]=D;}),r.push({role:b,content:O});}break;case"tool":{let O=T;r.push({role:b,content:[{modality:types.ToolResponseModalityLiteral,id:O.tool_call_id,index:l[O.tool_call_id].index,name:l[O.tool_call_id].name,data:O.content}]});}break}});let g=[];return o.tools&&o.tools.forEach(T=>{g.push({type:"function",definition:{schema:{name:T.function.name,description:T.function.description||"",strict:T.function.strict,parameters:T.function.parameters}}});}),{modelName:s,config:m,messages:r,tools:g.length>0?g:void 0}}transformConfig(e,t,o){let s=e.toolChoice;delete e.toolChoice;let a=this.modelSchema.config.schema.safeParse(e);if(!a.success)throw new provider.InvalidConfigError({info:`Invalid config for model : '${this.modelName}'`,cause:a.error});let m=a.data;s!==void 0&&(m.toolChoice=s),Object.keys(m).forEach(l=>{if(!(l in this.modelSchema.config.def))throw new provider.InvalidConfigError({info:`Invalid config for model : '${this.modelName}'`,cause:new Error(`Invalid config key : '${l}',
available keys : [${Object.keys(this.modelSchema.config.def).join(", ")}]`)})});let r=Object.keys(m).reduce((l,g)=>{let T=this.modelSchema.config.def[g],b=T.param,O=m[g];return b==="max_completion_tokens"&&T.type==="range"&&O===0?l[b]=T.max:l[b]=O,l},{});if(r.top_logprobs&&!r.logprobs)throw new provider.InvalidConfigError({info:`Invalid config for model : '${this.modelName}'`,cause:new Error("'logprobs' must be 'true' when 'top_logprobs' is specified")});if("tool_choice"in r&&r.tool_choice!==void 0){let l=r.tool_choice;if(!o||o&&o.length===0)throw new provider.InvalidConfigError({info:`Invalid config for model : '${this.modelName}'`,cause:new Error("'tools' are required when 'toolChoice' is specified")});if(o&&o.length>0){let g=this.modelSchema.config.def.toolChoice;if(!g.choices.includes(l))if(o.map(T=>T.definition.schema.name).includes(l))r.tool_choice={type:"function",function:{name:l}};else throw new provider.InvalidConfigError({info:`Invalid config for model : '${this.modelName}'`,cause:new Error(`toolChoice : '${l}' is not part of provided 'tools' names or
one of [${g.choices.join(", ")}]`)})}}if("response_format"in r&&r.response_format!==void 0){let l=r.response_format;if(l==="json_schema")if("response_schema"in r)r.response_format={type:"json_schema",json_schema:r.response_schema},delete r.response_schema;else throw new provider.InvalidConfigError({info:`Invalid config for model : '${this.modelName}'`,cause:new Error("'responseSchema' is required in config when 'responseFormat' is 'json_schema'")});else r.response_format={type:l};}return r}transformMessages(e){if(!e||e&&e.length===0)return {messages:[]};let t=e.map(s=>{let a=types.Message().safeParse(s);if(!a.success)throw new provider.InvalidMessagesError({info:"Invalid messages",cause:a.error});return a.data});return t.forEach(s=>{s.content.forEach(a=>{if(!this.modelSchema.modalities.includes(a.modality))throw new provider.InvalidMessagesError({info:`Invalid message content for model : '${this.modelName}'`,cause:new Error(`model : '${this.modelName}' does not support modality : '${a.modality}',
available modalities : [${this.modelSchema.modalities.join(", ")}]`)})});}),t.forEach(s=>{if(!Object.keys(this.modelSchema.roles).includes(s.role))throw new provider.InvalidMessagesError({info:`Invalid message content for model : '${this.modelName}'`,cause:new Error(`model : '${this.modelName}' does not support role : '${s.role}',
available roles : [${Object.keys(this.modelSchema.roles).join(", ")}]`)})}),{messages:t.map(s=>{switch(s.role){case types.SystemRoleLiteral:{let a=[];return s.content.forEach(m=>{if(m.modality===types.TextModalityLiteral)a.push({type:"text",text:m.value});else throw new provider.InvalidMessagesError({info:`Invalid message 'role' and 'modality' combination for model : ${this.modelName}`,cause:new Error(`role : '${s.role}' cannot have content with modality : '${m.modality}'`)})}),{role:this.modelSchema.roles[s.role],content:a}}case types.AssistantRoleLiteral:{let a=[],m=[];return s.content.forEach(r=>{if(r.modality===types.TextModalityLiteral)a.push({type:"text",text:r.value});else if(r.modality===types.ToolCallModalityLiteral)m.push({id:r.id,type:"function",function:{name:r.name,arguments:r.arguments}});else throw new provider.InvalidMessagesError({info:`Invalid message 'role' and 'modality' combination for model : ${this.modelName}`,cause:new Error(`role : '${s.role}' cannot have content with modality : '${r.modality}'`)})}),P({role:this.modelSchema.roles[s.role],content:a},m.length>0?{tool_calls:m}:{})}case types.UserRoleLiteral:{let a=[],m=[];s.content.forEach(l=>{if(l.modality===types.TextModalityLiteral)a.push({type:"text",text:l.value});else if(l.modality===types.ImageModalityLiteral)m.push({type:"image_url",image_url:{url:l.value.type==="url"?l.value.url:l.value.base64,detail:l.detail}});else throw new provider.InvalidMessagesError({info:`Invalid message 'role' and 'modality' combination for model : ${this.modelName}`,cause:new Error(`role : '${s.role}' cannot have content with modality : '${l.modality}'`)})});let r=[...a,...m];return {role:this.modelSchema.roles[s.role],content:r}}case types.ToolRoleLiteral:{if(s.content.length!==1)throw new provider.InvalidMessagesError({info:`Invalid message for role : '${s.role}'`,cause:new Error(`role : '${s.role}' must have exactly one content item`)});if(s.content[0].modality!==types.ToolResponseModalityLiteral)throw new provider.InvalidMessagesError({info:`Invalid message 'role' and 'modality' combination for model : ${this.modelName}`,cause:new Error(`role : '${s.role}' must have content with modality : '${types.ToolResponseModalityLiteral}'`)});let a=s.content[0];return {role:this.modelSchema.roles[s.role],tool_call_id:a.id,content:a.data}}default:throw new provider.InvalidMessagesError({info:`Invalid message 'role' for model : ${this.modelName}`,cause:new Error(`role : '${s.role}' is not supported,
available roles : [${Object.keys(this.modelSchema.roles).join(", ")}]`)})}})}}transformTools(e){if(!this.modelSchema.modalities.includes(types.ToolCallModalityLiteral))throw new provider.InvalidToolsError({info:`Invalid tool 'modality' for model : ${this.modelName}`,cause:new Error(`model : '${this.modelName}' does not support tool modality : '${types.ToolCallModalityLiteral}'`)});return !e||e&&e.length===0?{tools:[]}:{tools:e.map(s=>{let a=types.Tool().safeParse(s);if(!a.success)throw new provider.InvalidToolsError({info:"Invalid tools",cause:a.error});return a.data}).map(s=>({type:"function",function:s.definition.schema}))}}getCompleteChatUrl(e,t,o){return A(this,null,function*(){return new Promise(s=>{s(this.completeChatUrl);})})}getCompleteChatHeaders(e,t,o){return A(this,null,function*(){return new Promise(s=>{s(this.getDefaultHeaders());})})}getCompleteChatData(e,t,o){return A(this,null,function*(){let s=this.transformConfig(e,t,o),a=this.transformMessages(t);if(a.messages&&a.messages.length===0)throw new provider.InvalidMessagesError({info:"Messages are required",cause:new Error("Messages are required")});let m=o?this.transformTools(o):{};return new Promise(r=>{r(P(P(P(P({},this.getDefaultParams()),s),a),m));})})}transformCompleteChatResponse(e){let t=Pn.safeParse(e);if(t.success){if(t.data.choices.length===0)throw new provider.ModelResponseError({info:"Invalid response from model",cause:new Error(`No choices in response : ${JSON.stringify(t.data)}`)});let o=t.data,s=[{role:types.AssistantRoleLiteral,content:[]}],a=o.choices[0].message;a.content&&s[0].content.push(types.createTextContent(a.content)),a.refusal&&s[0].content.push(types.createTextContent(a.refusal)),a.tool_calls&&a.tool_calls.forEach((g,T)=>{s[0].content.push(types.createToolCallContent(T,g.id,g.function.name,g.function.arguments));});let m={promptTokens:o.usage.prompt_tokens,completionTokens:o.usage.completion_tokens,totalTokens:o.usage.total_tokens},r=[],l=o.choices[0].logprobs;return l&&(l.content&&r.push(...l.content.map(g=>({token:g.token,logProb:g.logprob,bytes:g.bytes,topLogProbs:g.top_logprobs.map(T=>({token:T.token,logProb:T.logprob,bytes:T.bytes}))}))),l.refusal&&r.push(...l.refusal.map(g=>({token:g.token,logProb:g.logprob,bytes:g.bytes,topLogProbs:g.top_logprobs.map(T=>({token:T.token,logProb:T.logprob,bytes:T.bytes}))})))),{messages:s,usage:m,logProbs:r}}throw new provider.ModelResponseError({info:"Invalid response from model",cause:t.error})}getStreamChatUrl(e,t,o){return A(this,null,function*(){return new Promise(s=>{s(this.streamChatUrl);})})}getStreamChatHeaders(e,t,o){return A(this,null,function*(){return new Promise(s=>{s(this.getDefaultHeaders());})})}getStreamChatData(e,t,o){return A(this,null,function*(){let s=this.transformConfig(e,t,o),a=this.transformMessages(t);if(a.messages&&a.messages.length===0)throw new provider.InvalidMessagesError({info:"Messages are required",cause:new Error("Messages are required")});let m=o?this.transformTools(o):{};return new Promise(r=>{r(P(P(P(P({stream:!0,stream_options:{include_usage:!0}},this.getDefaultParams()),s),a),m));})})}transformStreamChatResponseChunk(e,t){return co(this,null,function*(){var r,l;let o=t+e,s=[],a="",m=0;for(;m<o.length;){let g=o.indexOf(`
`,m);if(g===-1){a=o.substring(m);break}else {let T=o.substring(m,g).trim();T&&s.push(T),m=g+1;}}for(let g of s){if(g==="data: [DONE]")return;if(g.startsWith("data: ")){let T=g.substring(6);try{let b=JSON.parse(T),O=Sn.safeParse(b);if(O.success){let C={partialMessages:[]},S=O.data;if(S.choices.length>0){let E=S.choices[0].delta;if(E!==void 0&&Object.keys(E).length!==0){if("content"in E&&E.content!==null)C.partialMessages.push(types.createPartialTextMessage(types.AssistantRoleLiteral,E.content));else if("refusal"in E&&E.refusal!==null)C.partialMessages.push(types.createPartialTextMessage(types.AssistantRoleLiteral,E.refusal));else if("tool_calls"in E&&E.tool_calls!==void 0){let D=E.tool_calls.at(0);C.partialMessages.push(types.createPartialToolCallMessage(types.AssistantRoleLiteral,D.index,D.id,(r=D.function)==null?void 0:r.name,(l=D.function)==null?void 0:l.arguments));}}}S.usage&&(C.usage={promptTokens:S.usage.prompt_tokens,completionTokens:S.usage.completion_tokens,totalTokens:S.usage.total_tokens}),yield {partialResponse:C,buffer:a};}else throw new provider.ModelResponseError({info:"Invalid response from model",cause:O.error})}catch(b){throw new provider.ModelResponseError({info:`Malformed JSON received in stream: ${T}`,cause:b})}}}yield {partialResponse:{partialMessages:[]},buffer:a};})}transformProxyStreamChatResponseChunk(e,t,o,s,a){return co(this,null,function*(){yield*bt(this.transformStreamChatResponseChunk(e,t));})}getProxyStreamChatUrl(e,t,o){return A(this,null,function*(){return new Promise(s=>{s(this.streamChatUrl);})})}getProxyCompleteChatUrl(e,t,o){return A(this,null,function*(){return new Promise(s=>{s(this.completeChatUrl);})})}getProxyCompleteChatHeaders(e,t,o){return A(this,null,function*(){if(!t)return {};let s=P({},t);return delete s.host,delete s["content-length"],s})}getProxyStreamChatHeaders(e,t,o){return A(this,null,function*(){return yield this.getProxyCompleteChatHeaders(e,t,o)})}getModelPricing(){if(!(this.modelName in _))throw new provider.ModelResponseError({info:`Invalid model pricing for model : '${this.modelName}'`,cause:new Error(`No pricing configuration found for model "${this.modelName}"`)});return _[this.modelName]}};var se="gpt-3.5-turbo-0125",ks="The latest GPT-3.5 Turbo model with higher accuracy at responding in requested formats and a fix for a bug which caused a text encoding issue for non-English language function calls. Training data up to Sept 2021.",So=provider.ChatModelSchema(h,x).parse({name:se,description:ks,maxInputTokens:4092,maxOutputTokens:4092,roles:f,modalities:I,config:{def:i.responseFormat(4092,4).def,schema:i.responseFormat(4092,4).schema},price:_[se]}),Nt=u,ie=class extends d{constructor(e){super(So,e);}};var ae="gpt-3.5-turbo-1106",vs="The latest GPT-3.5 Turbo model with improved instruction following, JSON mode, reproducible outputs, parallel function calling, and more. Returns a maximum of 4,096 output tokens. Training data up to Sept 2021.",Io=provider.ChatModelSchema(h,x).parse({name:ae,description:vs,maxInputTokens:4092,maxOutputTokens:16385,roles:f,modalities:I,config:{def:i.responseFormat(16385,4).def,schema:i.responseFormat(16385,4).schema},price:_[ae]}),zt=u,re=class extends d{constructor(e){super(Io,e);}};var pe="gpt-3.5-turbo",Ds="Currently points to gpt-3.5-turbo-0125. Training data up to Sept 2021.",xo=provider.ChatModelSchema(h,x).parse({name:pe,description:Ds,maxInputTokens:4092,maxOutputTokens:4092,roles:f,modalities:I,config:{def:i.responseFormat(4092,4).def,schema:i.responseFormat(4092,4).schema},price:_[pe]}),Bt=u,le=class extends d{constructor(e){super(xo,e);}};var me="gpt-4-0125-preview",Ns="The latest GPT-4 model intended to reduce cases of \u201Claziness\u201D where the model doesn\u2019t complete a task. Training data up to Apr 2023.",Ro=provider.ChatModelSchema(h,x).parse({name:me,description:Ns,maxInputTokens:128e3,maxOutputTokens:4092,roles:f,modalities:I,config:{def:i.base(4092,4).def,schema:i.base(4092,4).schema},price:_[me]}),Ut=u,de=class extends d{constructor(e){super(Ro,e);}};var ce="gpt-4-0613",Bs="Snapshot of gpt-4 from June 13th 2023 with improved function calling support. Training data up to Sept 2021.",Ao=provider.ChatModelSchema(h,x).parse({name:ce,description:Bs,maxInputTokens:8192,maxOutputTokens:4092,roles:f,modalities:I,config:{def:i.base(4092,4).def,schema:i.base(4092,4).schema},price:_[ce]}),jt=u,ue=class extends d{constructor(e){super(Ao,e);}};var he="gpt-4-1106-preview",js="GPT-4 Turbo model featuring improved instruction following, JSON mode, reproducible outputs, parallel function calling, and more. Returns a maximum of 4,096 output tokens. This preview model is not yet suited for production traffic. Training data up to Apr 2023.",Eo=provider.ChatModelSchema(h,x).parse({name:he,description:js,maxInputTokens:128e3,maxOutputTokens:4092,roles:f,modalities:I,config:{def:i.base(4092,4).def,schema:i.base(4092,4).schema},price:_[he]}),Ft=u,fe=class extends d{constructor(e){super(Eo,e);}};var _e="gpt-4.1",$s="Flagship model for complex tasks. It is well suited for problem solving across domains. Training data up to May 2024.",Go=provider.ChatModelSchema(h,y).parse({name:_e,description:$s,maxInputTokens:1047576,maxOutputTokens:32768,roles:f,modalities:M,config:{def:i.responseSchema(32768,4).def,schema:i.responseSchema(32768,4).schema},price:_[_e]}),$t=u,Te=class extends d{constructor(e){super(Go,e);}};var ge="gpt-4.1-mini",Vs="Provides a balance between intelligence, speed, and cost that makes it an attractive model for many use cases. Training data up to May 2024.",ko=provider.ChatModelSchema(h,y).parse({name:ge,description:Vs,maxInputTokens:1047576,maxOutputTokens:32768,roles:f,modalities:M,config:{def:i.responseSchema(32768,4).def,schema:i.responseSchema(32768,4).schema},price:_[ge]}),Ht=u,Me=class extends d{constructor(e){super(ko,e);}};var ye="gpt-4.1-nano",Js="Fastest, most cost-effective GPT-4.1 model. Training data up to May 2024.",wo=provider.ChatModelSchema(h,y).parse({name:ye,description:Js,maxInputTokens:1047576,maxOutputTokens:32768,roles:f,modalities:M,config:{def:i.responseSchema(32768,4).def,schema:i.responseSchema(32768,4).schema},price:_[ye]}),Vt=u,Oe=class extends d{constructor(e){super(wo,e);}};var Ce="gpt-5",Ys="Most advanced GPT-5 model for complex reasoning and problem-solving tasks. Training data up to October 2024.",vo=provider.ChatModelSchema(h,y).parse({name:Ce,description:Ys,maxInputTokens:4e5,maxOutputTokens:131072,roles:f,modalities:M,config:{def:i.responseSchema(131072,4).def,schema:i.responseSchema(131072,4).schema},price:_[Ce]}),Kt=u,be=class extends d{constructor(e){super(vo,e);}};var Pe="gpt-5-mini",Xs="Faster, more cost-effective GPT-5 model that balances intelligence and efficiency. Training data up to October 2024.",Lo=provider.ChatModelSchema(h,y).parse({name:Pe,description:Xs,maxInputTokens:4e5,maxOutputTokens:131072,roles:f,modalities:M,config:{def:i.responseSchema(131072,4).def,schema:i.responseSchema(131072,4).schema},price:_[Pe]}),Jt=u,Se=class extends d{constructor(e){super(Lo,e);}};var Ie="gpt-5-nano",ei="Most cost-effective GPT-5 model optimized for speed and efficiency. Training data up to October 2024.",Do=provider.ChatModelSchema(h,y).parse({name:Ie,description:ei,maxInputTokens:4e5,maxOutputTokens:131072,roles:f,modalities:M,config:{def:i.responseSchema(131072,4).def,schema:i.responseSchema(131072,4).schema},price:_[Ie]}),Wt=u,xe=class extends d{constructor(e){super(Do,e);}};var Re="gpt-5-chat-latest",ni="Latest GPT-5 model optimized for conversational use. Does not support function calling or structured outputs. Training data up to October 2024.",si=[types.TextModalityLiteral,types.ImageModalityLiteral],ii=zod.z.enum([types.TextModalityLiteral,types.ImageModalityLiteral]),qo=provider.ChatModelSchema(h,ii).parse({name:Re,description:ni,maxInputTokens:4e5,maxOutputTokens:131072,roles:f,modalities:si,config:{def:i.base(131072,4).def,schema:i.base(131072,4).schema},price:_[Re]}),Yt=u,Ae=class extends d{constructor(e){super(qo,e);}};var Ee="gpt-4-turbo-2024-04-09",ri="GPT-4 Turbo with Vision model. Vision requests can now use JSON mode and function calling. gpt-4-turbo currently points to this version. Training data up to Dec 2023.",No=provider.ChatModelSchema(h,y).parse({name:Ee,description:ri,maxInputTokens:128e3,maxOutputTokens:4096,roles:f,modalities:M,config:{def:i.responseFormat(4096,4).def,schema:i.responseFormat(4096,4).schema},price:_[Ee]}),Qt=u,Ge=class extends d{constructor(e){super(No,e);}};var ke="gpt-4-turbo-preview",li="Currently points to gpt-4-0125-preview. Training data up to Apr 2023.",zo=provider.ChatModelSchema(h,x).parse({name:ke,description:li,maxInputTokens:128e3,maxOutputTokens:4092,roles:f,modalities:I,config:{def:i.responseFormat(4092,4).def,schema:i.responseFormat(4092,4).schema},price:_[ke]}),Xt=u,we=class extends d{constructor(e){super(zo,e);}};var ve="gpt-4-turbo",di="The latest GPT-4 Turbo model with vision capabilities. Vision requests can now use JSON mode and function calling. Currently points to gpt-4-turbo-2024-04-09. Training data up to Dec 2023.",Bo=provider.ChatModelSchema(h,y).parse({name:ve,description:di,maxInputTokens:128e3,maxOutputTokens:4092,roles:f,modalities:M,config:{def:i.responseFormat(4092,4).def,schema:i.responseFormat(4092,4).schema},price:_[ve]}),Zt=u,Le=class extends d{constructor(e){super(Bo,e);}};var De="gpt-4",ui="Currently points to gpt-4-0613. Training data up to Sept 2021.",Uo=provider.ChatModelSchema(h,x).parse({name:De,description:ui,maxInputTokens:8192,maxOutputTokens:4092,roles:f,modalities:I,config:{def:i.base(4092,4).def,schema:i.base(4092,4).schema},price:_[De]}),en=u,qe=class extends d{constructor(e){super(Uo,e);}};var Ne="gpt-4o-2024-05-13",fi="Latest snapshot of gpt-4o that supports Structured Outputs. Training data up to Oct 2023.",jo=provider.ChatModelSchema(h,y).parse({name:Ne,description:fi,maxInputTokens:128e3,maxOutputTokens:4092,roles:f,modalities:M,config:{def:i.responseSchema(4092,4).def,schema:i.responseSchema(4092,4).schema},price:_[Ne]}),on=u,ze=class extends d{constructor(e){super(jo,e);}};var Be="gpt-4o-2024-08-06",Ti="Latest snapshot of gpt-4o that supports Structured Outputs. Training data up to Oct 2023.",Fo=provider.ChatModelSchema(h,y).parse({name:Be,description:Ti,maxInputTokens:128e3,maxOutputTokens:4092,roles:f,modalities:M,config:{def:i.responseSchema(4092,4).def,schema:i.responseSchema(4092,4).schema},price:_[Be]}),tn=u,Ue=class extends d{constructor(e){super(Fo,e);}};var je="gpt-4o-mini-2024-07-18",Mi="Most advanced, multimodal flagship model that is cheaper and faster than GPT-4 Turbo. Currently points to gpt-4o-2024-05-13. Training data up to Oct 2023.",$o=provider.ChatModelSchema(h,y).parse({name:je,description:Mi,maxInputTokens:128e3,maxOutputTokens:4092,roles:f,modalities:M,config:{def:i.responseSchema(4092,4).def,schema:i.responseSchema(4092,4).schema},price:_[je]}),nn=u,Fe=class extends d{constructor(e){super($o,e);}};var $e="gpt-4o-mini",Oi="Most advanced, multimodal flagship model that is cheaper and faster than GPT-4 Turbo. Currently points to gpt-4o-2024-05-13. Training data up to Oct 2023.",Ho=provider.ChatModelSchema(h,y).parse({name:$e,description:Oi,maxInputTokens:128e3,maxOutputTokens:4092,roles:f,modalities:M,config:{def:i.responseSchema(4092,4).def,schema:i.responseSchema(4092,4).schema},price:_[$e]}),sn=u,He=class extends d{constructor(e){super(Ho,e);}};var Ve="gpt-4o",bi="Most advanced, multimodal flagship model that is cheaper and faster than GPT-4 Turbo. Currently points to gpt-4o-2024-05-13. Training data up to Oct 2023.",Vo=provider.ChatModelSchema(h,y).parse({name:Ve,description:bi,maxInputTokens:128e3,maxOutputTokens:4092,roles:f,modalities:M,config:{def:i.responseSchema(4092,4).def,schema:i.responseSchema(4092,4).schema},price:_[Ve]}),an=u,Ke=class extends d{constructor(e){super(Vo,e);}};var Je="o1-2024-12-17",Si="A stable release model for production use, offering robust performance and advanced features. Training data up to December 2024.",Ko=provider.ChatModelSchema(h,y).parse({name:Je,description:Si,maxInputTokens:2e5,maxOutputTokens:1e5,roles:f,modalities:M,config:{def:i.oSeries(1e5,4).def,schema:i.oSeries(1e5,4).schema},price:_[Je]}),rn=u,We=class extends d{constructor(e){super(Ko,e);}};var Ye="o1",xi="Highly capable general-purpose reasoning model with advanced capabilities in language, coding, and reasoning. Training data up to Oct 2023.",Jo=provider.ChatModelSchema(h,y).parse({name:Ye,description:xi,maxInputTokens:2e5,maxOutputTokens:1e5,roles:f,modalities:M,config:{def:i.oSeries(1e5,4).def,schema:i.oSeries(1e5,4).schema},price:_[Ye]}),pn=u,Qe=class extends d{constructor(e){super(Jo,e);}};var Wo="o3-2025-04-16",Ai="A new standard for math, science, coding, and visual reasoning tasks. Training data up to Jun 2024.",Yo=provider.ChatModelSchema(h,y).parse({name:Wo,description:Ai,maxInputTokens:2e5,maxOutputTokens:1e5,roles:f,modalities:M,config:{def:i.oSeries(1e5,4).def,schema:i.oSeries(1e5,4).schema}}),ln=u,Xe=class extends d{constructor(e){super(Yo,e);}};var Qo="o3",Gi="A new standard for math, science, coding, and visual reasoning tasks. Training data up to Jun 2024.",Xo=provider.ChatModelSchema(h,y).parse({name:Qo,description:Gi,maxInputTokens:2e5,maxOutputTokens:1e5,roles:f,modalities:M,config:{def:i.oSeries(1e5,4).def,schema:i.oSeries(1e5,4).schema}}),mn=u,Ze=class extends d{constructor(e){super(Xo,e);}};var Zo="o3-mini",wi="o3-mini is the newest small reasoning model, providing high intelligence at the same cost and latency targets of o1-mini. Training data up to Sep 2023.",et=provider.ChatModelSchema(h,x).parse({name:Zo,description:wi,maxInputTokens:2e5,maxOutputTokens:1e5,roles:f,modalities:I,config:{def:i.oSeries(1e5,4).def,schema:i.oSeries(1e5,4).schema}}),dn=u,eo=class extends d{constructor(e){super(et,e);}};var ot="o3-mini-2025-01-31",Li="o3-mini is the newest small reasoning model, providing high intelligence at the same cost and latency targets of o1-mini. Training data up to Sep 2023.",tt=provider.ChatModelSchema(h,x).parse({name:ot,description:Li,maxInputTokens:2e5,maxOutputTokens:1e5,roles:f,modalities:I,config:{def:i.oSeries(1e5,4).def,schema:i.oSeries(1e5,4).schema}}),cn=u,oo=class extends d{constructor(e){super(tt,e);}};var nt="o4-mini-2025-04-16",qi="Optimized for fast, effective reasoning with exceptionally efficient performance in coding and visual tasks. Training data up to Jun 2024.",st=provider.ChatModelSchema(h,y).parse({name:nt,description:qi,maxInputTokens:2e5,maxOutputTokens:1e5,roles:f,modalities:M,config:{def:i.oSeries(1e5,4).def,schema:i.oSeries(1e5,4).schema}}),un=u,to=class extends d{constructor(e){super(st,e);}};var it="o4-mini",zi="Optimized for fast, effective reasoning with exceptionally efficient performance in coding and visual tasks. Training data up to Jun 2024.",at=provider.ChatModelSchema(h,y).parse({name:it,description:zi,maxInputTokens:2e5,maxOutputTokens:1e5,roles:f,modalities:M,config:{def:i.oSeries(1e5,4).def,schema:i.oSeries(1e5,4).schema}}),hn=u,no=class extends d{constructor(e){super(at,e);}};var K=[types.EmbeddingTextModalityLiteral,types.EmbeddingTokenModalityLiteral],J=zod.z.enum([types.EmbeddingTextModalityLiteral,types.EmbeddingTokenModalityLiteral]);var vn=zod.z.object({object:zod.z.literal("list"),model:zod.z.string(),data:zod.z.array(zod.z.object({index:zod.z.number(),object:zod.z.literal("embedding"),embedding:zod.z.array(zod.z.number()).or(zod.z.string().base64())})),usage:zod.z.object({prompt_tokens:zod.z.number().nonnegative(),total_tokens:zod.z.number().nonnegative()})});var Ui=zod.z.string().min(1).or(zod.z.array(zod.z.string().min(1)).min(1)).or(zod.z.array(zod.z.number().int().nonnegative()).min(1)).or(zod.z.array(zod.z.array(zod.z.number().int().nonnegative()).min(1)).min(1)),Ln=zod.z.object({model:zod.z.string().min(1).optional(),input:Ui,encoding_format:zod.z.enum(["float","base64"]).optional(),dimensions:zod.z.number().int().min(1).optional()});var j=zod.z.object({modelName:zod.z.string(),apiKey:zod.z.string(),baseUrl:zod.z.string().url().optional(),getEmbeddingsUrl:zod.z.string().url().optional()}),N=class{constructor(e,t){this.version="v1";let o=j.parse(t);this.modelSchema=e,this.modelName=o.modelName,this.apiKey=o.apiKey,this.baseUrl=provider.urlWithoutTrailingSlash(o.baseUrl||B.baseUrl),this.getEmbeddingsUrl=provider.urlWithoutTrailingSlash(o.getEmbeddingsUrl||`${this.baseUrl}/embeddings`);}getDefaultBaseUrl(){return this.baseUrl}getDefaultHeaders(){return {Authorization:`Bearer ${this.apiKey}`,"Content-Type":"application/json"}}getDefaultParams(){return {model:this.modelSchema.name}}getRetryDelay(e){let t=r=>{let l=/(\d+)(h|m|s|ms)/g,g={h:36e5,m:6e4,s:1e3,ms:1},T,b=0;for(;(T=l.exec(r))!==null;){let O=parseInt(T[1]),C=T[2];b+=O*g[C];}return b},o=0,s=0,a=!0;e["x-ratelimit-reset-requests"]&&(o=t(e["x-ratelimit-reset-requests"])),e["x-ratelimit-reset-tokens"]&&(s=t(e["x-ratelimit-reset-tokens"]));let m=Math.max(o,s);return {shouldRetry:a,delayMs:m}}getTokenCount(e){return e.requests.reduce((t,o)=>t+o.length,0)}transformModelRequest(e){let t=Ln.safeParse(e);if(!t.success)throw new provider.InvalidModelRequestError({info:"Invalid model request",cause:t.error});let o=t.data,s=o.model,a={encodingFormat:o.encoding_format,dimensions:o.dimensions},m=types.Config().parse(provider.removeUndefine