intellinode
Version:
Create AI agents using the latest models, including ChatGPT, Llama, Diffusion, Cohere, Gemini, and Hugging Face.
1 lines • 179 kB
JavaScript
(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.IntelliNode=f()}})(function(){var define,module,exports;return function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i<t.length;i++)o(t[i]);return o}return r}()({1:[function(require,module,exports){module.exports={url:{intellicloud:{base:"https://ue8sdr9bij.execute-api.us-east-2.amazonaws.com/v1",semantic_search:"/semantic_search/"},openai:{base:"https://api.openai.com",completions:"/v1/completions",chatgpt:"/v1/chat/completions",responses:"/v1/responses",imagegenerate:"/v1/images/generations",embeddings:"/v1/embeddings",audiotranscriptions:"/v1/audio/transcriptions",audiospeech:"/v1/audio/speech",files:"/v1/files",finetuning:"/v1/fine_tuning/jobs",organization:null},azure_openai:{base:"https://{resource-name}.openai.azure.com/openai",completions:"/deployments/{deployment-id}/completions?api-version={api-version}",chatgpt:"/deployments/{deployment-id}/chat/completions?api-version={api-version}",responses:"/deployments/{deployment-id}/responses?api-version={api-version}",imagegenerate:"/images/generations:submit?api-version={api-version}",embeddings:"/deployments/{deployment-id}/embeddings?api-version={api-version}",audiotranscriptions:"/deployments/{deployment-id}/audio/transcriptions?api-version={api-version}",audiospeech:"/deployments/{deployment-id}/audio/speech?api-version={api-version}",files:"/files?api-version={api-version}",finetuning:"/fine_tuning/jobs?api-version={api-version}"},cohere:{base:"https://api.cohere.ai",completions:"/generate",embed:"/v1/embed",version:"2022-12-06"},google:{base:"https://{1}.googleapis.com/v1/",speech:{prefix:"texttospeech",synthesize:{postfix:"text:synthesize"}}},stability:{base:"https://api.stability.ai",text_to_image:"/v1/generation/{1}/text-to-image",upscale:"/v1/generation/{1}/image-to-image/upscale",image_to_image:"/v1/generation/{1}/image-to-image",inpaint:"/v2beta/stable-image/edit/inpaint",outpaint:"/v2beta/stable-image/edit/outpaint",image_to_video:"/v2beta/image-to-video",fetch_video:"/v2beta/image-to-video/result/",control_sketch:"/v2beta/stable-image/control/sketch",control_structure:"/v2beta/stable-image/control/structure",control_style:"/v2beta/stable-image/control/style"},huggingface:{base:"https://api-inference.huggingface.co/models"},replicate:{base:"https://api.replicate.com",predictions:"/v1/predictions"},mistral:{base:"https://api.mistral.ai",completions:"/v1/chat/completions",embed:"/v1/embeddings"},gemini:{base:"https://generativelanguage.googleapis.com/v1beta/models/",contentEndpoint:"gemini-pro:generateContent",visionEndpoint:"gemini-pro-vision:generateContent",embeddingEndpoint:"embedding-001:embedContent",batchEmbeddingEndpoint:"embedding-001:batchEmbedContents"},anthropic:{base:"https://api.anthropic.com",messages:"/v1/messages",version:"2023-06-01"}},nvidia:{base:"https://integrate.api.nvidia.com",chat:"/v1/chat/completions",retrieval:"/v1/retrieval",version:"v1"},models:{replicate:{llama:{"70b":"70b-chat","13b":"13b-chat","70b-chat":"70b-chat","13b-chat":"13b-chat","34b-code":"34b-code","34b-python":"34b-python","13b-code-instruct":"13b-code-instruct","llama-2-13b-embeddings":"llama-2-13b-embeddings","70b-chat-version":"02e509c789964a7ea8736978a43525956ef40397be9033abf9fd2badfe68c9e3","13b-chat-version":"f4e2de70d66816a838a89eeeb621910adffb0dd0baba3976c96980970978018d","34b-code-version":"efbd2ef6feefb242f359030fa6fe08ce32bfced18f3868b2915db41d41251b46","34b-python-version":"482ba325daab209d121f45a0030f2f3ed942df98b185d41635ab3f19165a3547","13b-code-instruct-version":"ca8c51bf3c1aaf181f9df6f10f31768f065c9dddce4407438adc5975a59ce530","llama-2-13b-embeddings-version":"7115a4c65b86815e31412e53de1211c520164c190945a84c425b59dccbc47148"}}}}},{}],2:[function(require,module,exports){const OpenAIWrapper=require("../wrappers/OpenAIWrapper");const CohereAIWrapper=require("../wrappers/CohereAIWrapper");const ReplicateWrapper=require("../wrappers/ReplicateWrapper");const GeminiAIWrapper=require("../wrappers/GeminiAIWrapper");const EmbedInput=require("../model/input/EmbedInput");const VLLMWrapper=require("../wrappers/VLLMWrapper");const SupportedEmbedModels={OPENAI:"openai",COHERE:"cohere",REPLICATE:"replicate",GEMINI:"gemini",NVIDIA:"nvidia",VLLM:"vllm"};class RemoteEmbedModel{constructor(keyValue,provider,customProxyHelper=null){if(!provider){provider=SupportedEmbedModels.OPENAI}const supportedModels=this.getSupportedModels();if(supportedModels.includes(provider)){this.initiate(keyValue,provider,customProxyHelper)}else{const models=supportedModels.join(" - ");throw new Error(`The received keyValue is not supported. Send any model from: ${models}`)}}initiate(keyValue,keyType,customProxyHelper=null){this.keyType=keyType;if(keyType===SupportedEmbedModels.OPENAI){this.openaiWrapper=new OpenAIWrapper(keyValue,customProxyHelper)}else if(keyType===SupportedEmbedModels.COHERE){this.cohereWrapper=new CohereAIWrapper(keyValue)}else if(keyType===SupportedEmbedModels.REPLICATE){this.replicateWrapper=new ReplicateWrapper(keyValue)}else if(keyType===SupportedEmbedModels.GEMINI){this.geminiWrapper=new GeminiAIWrapper(keyValue)}else if(keyType===SupportedEmbedModels.NVIDIA){this.nvidiaWrapper=new NvidiaWrapper(keyValue,customProxyHelper)}else if(keyType===SupportedEmbedModels.VLLM){const baseUrl=customProxyHelper.baseUrl;this.vllmWrapper=new VLLMWrapper(baseUrl)}else{throw new Error("Invalid provider name")}}getSupportedModels(){return Object.values(SupportedEmbedModels)}async getEmbeddings(embedInput){let inputs;if(embedInput instanceof EmbedInput){if(this.keyType===SupportedEmbedModels.OPENAI){inputs=embedInput.getOpenAIInputs()}else if(this.keyType===SupportedEmbedModels.COHERE){inputs=embedInput.getCohereInputs()}else if(this.keyType===SupportedEmbedModels.REPLICATE){inputs=embedInput.getLlamaReplicateInput()}else if(this.keyType===SupportedEmbedModels.GEMINI){inputs=embedInput.getGeminiInputs()}else if(this.keyType===SupportedEmbedModels.NVIDIA){inputs=embedInput.getNvidiaInputs()}else if(this.keyType===SupportedEmbedModels.VLLM){inputs=embedInput.getVLLMInputs()}else{throw new Error("The keyType is not supported")}}else if(typeof embedInput==="object"){inputs=embedInput}else{throw new Error("Invalid input: Must be an instance of EmbedInput or a dictionary")}if(this.keyType===SupportedEmbedModels.OPENAI){const results=await this.openaiWrapper.getEmbeddings(inputs);return results.data}else if(this.keyType===SupportedEmbedModels.COHERE){const results=await this.cohereWrapper.getEmbeddings(inputs);let embeddings=results.embeddings;embeddings=embeddings.map((embedding,index)=>({object:"embedding",index:index,embedding:embedding}));return embeddings}else if(this.keyType===SupportedEmbedModels.REPLICATE){const prediction=await this.replicateWrapper.predict("replicate",inputs);return new Promise((resolve,reject)=>{const poll=setInterval(async()=>{try{const status=await this.replicateWrapper.getPredictionStatus(prediction.id);if(status.status==="succeeded"||status.status==="failed"){clearInterval(poll);if(status.status==="succeeded"){let embeddings=status.output;embeddings=embeddings.map((embedding,index)=>({object:"embedding",index:index,embedding:embedding}));resolve(embeddings)}else{reject(new Error("Replicate prediction failed: "+status.error))}}}catch(error){clearInterval(poll);reject(new Error("Error while polling for Replicate prediction status: "+error.message))}},1e3)})}else if(this.keyType===SupportedEmbedModels.GEMINI){return await this.geminiWrapper.getEmbeddings(inputs)}else if(this.keyType===SupportedEmbedModels.NVIDIA){const result=await this.nvidiaWrapper.generateRetrieval(inputs);return Array.isArray(result)?result:[]}else if(this.keyType===SupportedEmbedModels.VLLM){const results=await this.vllmWrapper.getEmbeddings(inputs.texts);return results.embeddings.map((embedding,index)=>({object:"embedding",index:index,embedding:embedding}))}else{throw new Error("The keyType is not supported")}}}module.exports={RemoteEmbedModel:RemoteEmbedModel,SupportedEmbedModels:SupportedEmbedModels}},{"../model/input/EmbedInput":14,"../wrappers/CohereAIWrapper":43,"../wrappers/GeminiAIWrapper":44,"../wrappers/OpenAIWrapper":50,"../wrappers/ReplicateWrapper":51,"../wrappers/VLLMWrapper":53}],3:[function(require,module,exports){const OpenAIWrapper=require("../wrappers/OpenAIWrapper");const FineTuneInput=require("../model/input/FineTuneInput");const SupportedFineTuneModels={OPENAI:"openAi"};class RemoteFineTuneModel{constructor(keyValue,provider){if(!provider){provider=SupportedFineTuneModels.OPENAI}const supportedModels=this.getSupportedModels();if(supportedModels.includes(provider)){this.initiate(keyValue,provider)}else{const models=supportedModels.join(" - ");throw new Error(`The received keyValue is not supported. Send any model from: ${models}`)}}initiate(keyValue,keyType){this.keyType=keyType;if(keyType===SupportedFineTuneModels.OPENAI){this.openAIWrapper=new OpenAIWrapper(keyValue)}else{throw new Error("Invalid provider name")}}getSupportedModels(){return Object.values(SupportedFineTuneModels)}async generateFineTune(input){if(this.keyType===SupportedFineTuneModels.OPENAI){let params;if(input instanceof FineTuneInput){params=input.getOpenAIInput()}else if(typeof input==="object"){params=input}else{throw new Error("Invalid input: Must be an instance of FineTuneInput or a dictionary")}const response=await this.openAIWrapper.storeFineTuningData(params);return response}else{throw new Error("The keyType is not supported")}}async listFineTune(input){if(this.keyType===SupportedFineTuneModels.OPENAI){const response=await this.openAIWrapper.listFineTuningData(input);return response}else{throw new Error("The keyType is not supported")}}async uploadFile(filePayload){if(this.keyType===SupportedFineTuneModels.OPENAI){return await this.openAIWrapper.uploadFile(filePayload)}else{throw new Error("The keyType is not supported")}}}module.exports={RemoteFineTuneModel:RemoteFineTuneModel,SupportedFineTuneModels:SupportedFineTuneModels}},{"../model/input/FineTuneInput":15,"../wrappers/OpenAIWrapper":50}],4:[function(require,module,exports){const SupportedImageModels={OPENAI:"openai",STABILITY:"stability"};const OpenAIWrapper=require("../wrappers/OpenAIWrapper");const StabilityAIWrapper=require("../wrappers/StabilityAIWrapper");const ImageModelInput=require("../model/input/ImageModelInput");class RemoteImageModel{constructor(keyValue,provider){if(!provider){provider=SupportedImageModels.OPENAI}const supportedModels=RemoteImageModel.getSupportedModels();if(supportedModels.includes(provider)){this.initiate(keyValue,provider)}else{const models=supportedModels.join(" - ");throw new Error(`The received keyValue is not supported. Send any model from: ${models}`)}}initiate(keyValue,keyType){this.keyType=keyType;if(keyType===SupportedImageModels.OPENAI){this.openaiWrapper=new OpenAIWrapper(keyValue)}else if(keyType===SupportedImageModels.STABILITY){this.stabilityWrapper=new StabilityAIWrapper(keyValue)}else{throw new Error("Invalid provider name")}}static getSupportedModels(){return Object.values(SupportedImageModels)}async generateImages(imageInput){let inputs;if(imageInput instanceof ImageModelInput){if(this.keyType===SupportedImageModels.OPENAI){inputs=imageInput.getOpenAIInputs()}else if(this.keyType===SupportedImageModels.STABILITY){inputs=imageInput.getStabilityInputs()}else{throw new Error("The keyType is not supported")}}else if(typeof imageInput==="object"){inputs=imageInput}else{throw new Error("Invalid input: Must be an instance of ImageModelInput or a dictionary")}if(this.keyType===SupportedImageModels.OPENAI){const results=await this.openaiWrapper.generateImages(inputs);return results.data.map(data=>{if(data.url){return data.url}else if(data.b64_json){return data.b64_json}else{throw new Error("Unexpected image data format")}})}else if(this.keyType===SupportedImageModels.STABILITY){const results=await this.stabilityWrapper.generateImageDispatcher(inputs);return results.artifacts.map(imageObj=>imageObj.base64)}else{throw new Error(`This version supports ${SupportedImageModels.OPENAI} keyType only`)}}}module.exports={RemoteImageModel:RemoteImageModel,SupportedImageModels:SupportedImageModels}},{"../model/input/ImageModelInput":17,"../wrappers/OpenAIWrapper":50,"../wrappers/StabilityAIWrapper":52}],5:[function(require,module,exports){const OpenAIWrapper=require("../wrappers/OpenAIWrapper");const CohereAIWrapper=require("../wrappers/CohereAIWrapper");const LanguageModelInput=require("../model/input/LanguageModelInput");const SupportedLangModels={OPENAI:"openai",COHERE:"cohere"};class RemoteLanguageModel{constructor(keyValue,provider){if(!provider){provider=SupportedLangModels.OPENAI}const supportedModels=RemoteLanguageModel.getSupportedModels();if(supportedModels.includes(provider)){this.initiate(keyValue,provider)}else{const models=supportedModels.join(" - ");throw new Error(`The received keyValue is not supported. Send any model from: ${models}`)}}initiate(keyValue,keyType){this.keyType=keyType;if(keyType===SupportedLangModels.OPENAI){this.openaiWrapper=new OpenAIWrapper(keyValue)}else if(keyType===SupportedLangModels.COHERE){this.cohereWrapper=new CohereAIWrapper(keyValue)}else{throw new Error("Invalid provider name")}}static getSupportedModels(){return Object.values(SupportedLangModels)}async generateText(langInput){let inputs;if(langInput instanceof LanguageModelInput){if(this.keyType===SupportedLangModels.OPENAI){inputs=langInput.getOpenAIInputs()}else if(this.keyType===SupportedLangModels.COHERE){inputs=langInput.getCohereInputs()}else{throw new Error("The keyType is not supported")}}else if(typeof langInput==="object"){inputs=langInput}else{throw new Error("Invalid input: Must be an instance of LanguageModelInput or a dictionary")}if(this.keyType===SupportedLangModels.OPENAI){const results=await this.openaiWrapper.generateText(inputs);return results.choices.map(choice=>choice.text)}else if(this.keyType===SupportedLangModels.COHERE){const results=await this.cohereWrapper.generateText(inputs);return results.generations.map(generation=>generation.text)}else{throw new Error("The keyType is not supported")}}}module.exports={RemoteLanguageModel:RemoteLanguageModel,SupportedLangModels:SupportedLangModels}},{"../model/input/LanguageModelInput":18,"../wrappers/CohereAIWrapper":43,"../wrappers/OpenAIWrapper":50}],6:[function(require,module,exports){const GoogleAIWrapper=require("../wrappers/GoogleAIWrapper");const OpenAIWrapper=require("../wrappers/OpenAIWrapper");const Text2SpeechInput=require("../model/input/Text2SpeechInput");const SupportedSpeechModels={GOOGLE:"google",OPENAI:"openAi"};class RemoteSpeechModel{constructor(keyValue,provider){if(!provider){provider=SupportedSpeechModels.GOOGLE}const supportedModels=this.getSupportedModels();if(supportedModels.includes(provider)){this.initiate(keyValue,provider)}else{const models=supportedModels.join(" - ");throw new Error(`The received keyValue is not supported. Send any model from: ${models}`)}}initiate(keyValue,keyType){this.keyType=keyType;if(keyType===SupportedSpeechModels.GOOGLE){this.googleWrapper=new GoogleAIWrapper(keyValue)}else if(keyType===SupportedSpeechModels.OPENAI){this.openAIWrapper=new OpenAIWrapper(keyValue)}else{throw new Error("Invalid provider name")}}getSupportedModels(){return Object.values(SupportedSpeechModels)}async generateSpeech(input){if(this.keyType===SupportedSpeechModels.GOOGLE){let params;if(input instanceof Text2SpeechInput){params=input.getGoogleInput()}else if(typeof input==="object"){params=input}else{throw new Error("Invalid input: Must be an instance of Text2SpeechInput or a dictionary")}const response=await this.googleWrapper.generateSpeech(params);return response.audioContent}else if(this.keyType===SupportedSpeechModels.OPENAI){let params;if(input instanceof Text2SpeechInput){params=input.getOpenAIInput()}else if(typeof input==="object"){params=input}else{throw new Error("Invalid input: Must be an instance of Text2SpeechInput or a dictionary")}const response=await this.openAIWrapper.textToSpeech(params);return response}else{throw new Error("The keyType is not supported")}}}module.exports={RemoteSpeechModel:RemoteSpeechModel,SupportedSpeechModels:SupportedSpeechModels}},{"../model/input/Text2SpeechInput":19,"../wrappers/GoogleAIWrapper":45,"../wrappers/OpenAIWrapper":50}],7:[function(require,module,exports){const OpenAIWrapper=require("../wrappers/OpenAIWrapper");const ReplicateWrapper=require("../wrappers/ReplicateWrapper");const AWSEndpointWrapper=require("../wrappers/AWSEndpointWrapper");const{GPTStreamParser,CohereStreamParser,VLLMStreamParser}=require("../utils/StreamParser");const CohereAIWrapper=require("../wrappers/CohereAIWrapper");const IntellicloudWrapper=require("../wrappers/IntellicloudWrapper");const MistralAIWrapper=require("../wrappers/MistralAIWrapper");const GeminiAIWrapper=require("../wrappers/GeminiAIWrapper");const AnthropicWrapper=require("../wrappers/AnthropicWrapper");const SystemHelper=require("../utils/SystemHelper");const NvidiaWrapper=require("../wrappers/NvidiaWrapper");const VLLMWrapper=require("../wrappers/VLLMWrapper");const{ChatGPTInput,ChatModelInput,ChatGPTMessage,ChatLLamaInput,LLamaReplicateInput,CohereInput,LLamaSageInput,MistralInput,GeminiInput,AnthropicInput,NvidiaInput,VLLMInput}=require("../model/input/ChatModelInput");const SupportedChatModels={OPENAI:"openai",REPLICATE:"replicate",SAGEMAKER:"sagemaker",COHERE:"cohere",MISTRAL:"mistral",GEMINI:"gemini",ANTHROPIC:"anthropic",NVIDIA:"nvidia",VLLM:"vllm"};class Chatbot{constructor(keyValue,provider=SupportedChatModels.OPENAI,customProxyHelper=null,options={}){const supportedModels=this.getSupportedModels();if(supportedModels.includes(provider)){this.initiate(keyValue,provider,customProxyHelper,options)}else{const models=supportedModels.join(" - ");throw new Error(`The received keyValue is not supported. Send any model from: ${models}`)}}initiate(keyValue,provider,customProxyHelper=null,options={}){this.provider=provider;if(provider===SupportedChatModels.OPENAI){this.openaiWrapper=new OpenAIWrapper(keyValue,customProxyHelper)}else if(provider===SupportedChatModels.REPLICATE){this.replicateWrapper=new ReplicateWrapper(keyValue)}else if(provider===SupportedChatModels.SAGEMAKER){this.sagemakerWrapper=new AWSEndpointWrapper(customProxyHelper.url,keyValue)}else if(provider===SupportedChatModels.COHERE){this.cohereWrapper=new CohereAIWrapper(keyValue)}else if(provider===SupportedChatModels.MISTRAL){this.mistralWrapper=new MistralAIWrapper(keyValue)}else if(provider===SupportedChatModels.GEMINI){this.geminiWrapper=new GeminiAIWrapper(keyValue)}else if(provider===SupportedChatModels.ANTHROPIC){this.anthropicWrapper=new AnthropicWrapper(keyValue)}else if(provider===SupportedChatModels.NVIDIA){const my_options=options||{};const baseUrl=my_options.nvidiaOptions&&my_options.nvidiaOptions.baseUrl||my_options.baseUrl;if(baseUrl){this.nvidiaWrapper=new NvidiaWrapper(keyValue,{baseUrl:baseUrl})}else{this.nvidiaWrapper=new NvidiaWrapper(keyValue)}}else if(provider===SupportedChatModels.VLLM){const baseUrl=options.baseUrl;if(!baseUrl)throw new Error("VLLM requires 'baseUrl' in options.");this.vllmWrapper=new VLLMWrapper(baseUrl)}else{throw new Error("Invalid provider name")}if(options&&options.oneKey){const apiBase=options.intelliBase?options.intelliBase:null;this.extendedController=options.oneKey.startsWith("in")?new IntellicloudWrapper(options.oneKey,apiBase):null}}getSupportedModels(){return Object.values(SupportedChatModels)}async chat(modelInput,functions=null,function_call=null,debugMode=true){let references=await this.getSemanticSearchContext(modelInput);if(this.provider!=SupportedChatModels.OPENAI&&(functions!=null||function_call!=null)){throw new Error("The functions and function_call are supported for chatGPT models only.")}if(this.provider===SupportedChatModels.OPENAI){const result=await this._chatGPT(modelInput,functions,function_call);return modelInput.attachReference?{result:result,references:references}:result}else if(this.provider===SupportedChatModels.REPLICATE){const result=await this._chatReplicateLLama(modelInput,debugMode);return modelInput.attachReference?{result:result,references:references}:result}else if(this.provider===SupportedChatModels.SAGEMAKER){const result=await this._chatSageMaker(modelInput);return modelInput.attachReference?{result:result,references:references}:result}else if(this.provider===SupportedChatModels.COHERE){const result=await this._chatCohere(modelInput);return modelInput.attachReference?{result:result,references:references}:result}else if(this.provider===SupportedChatModels.MISTRAL){const result=await this._chatMistral(modelInput);return modelInput.attachReference?{result:result,references:references}:result}else if(this.provider===SupportedChatModels.GEMINI){const result=await this._chatGemini(modelInput);return modelInput.attachReference?{result:result,references:references}:result}else if(this.provider===SupportedChatModels.ANTHROPIC){const result=await this._chatAnthropic(modelInput);return modelInput.attachReference?{result:result,references:references}:result}else if(this.provider===SupportedChatModels.NVIDIA){let result=await this._chatNvidia(modelInput);return modelInput.attachReference?{result:result,references:references}:result}else if(this.provider===SupportedChatModels.VLLM){let result=await this._chatVLLM(modelInput);return modelInput.attachReference?{result:result,references:references}:result}else{throw new Error("The provider is not supported")}}async*stream(modelInput){await this.getSemanticSearchContext(modelInput);if(this.provider===SupportedChatModels.OPENAI){yield*this._chatGPTStream(modelInput)}else if(this.provider===SupportedChatModels.COHERE){yield*this._streamCohere(modelInput)}else if(this.provider===SupportedChatModels.NVIDIA){yield*this._streamNvidia(modelInput)}else if(this.provider===SupportedChatModels.VLLM){yield*this._streamVLLM(modelInput)}else{throw new Error("The stream function support only chatGPT, for other providers use chat function.")}}async*_streamVLLM(modelInput){let params=modelInput instanceof VLLMInput?modelInput.getChatInput():modelInput;params.stream=true;const completionOnlyModels=["google/gemma-2-2b-it"];const isCompletionOnly=completionOnlyModels.includes(params.model);let stream;if(isCompletionOnly){const promptMessages=params.messages.map(msg=>`${msg.role.charAt(0).toUpperCase()+msg.role.slice(1)}: ${msg.content}`).join("\n")+"\nAssistant:";const completionParams={model:params.model,prompt:promptMessages,max_tokens:params.max_tokens||100,temperature:params.temperature||.7,stream:true};stream=await this.vllmWrapper.generateText(completionParams)}else{stream=await this.vllmWrapper.generateChatText(params)}const streamParser=new VLLMStreamParser;for await(const chunk of stream){const chunkText=chunk.toString("utf8");yield*streamParser.feed(chunkText)}}async getSemanticSearchContext(modelInput){let references={};if(!this.extendedController){return references}let messages,lastMessage;if(modelInput instanceof ChatLLamaInput&&typeof modelInput.prompt==="string"){messages=modelInput.prompt.split("\n").map(line=>{const role=line.startsWith("User:")?"user":"assistant";const content=line.replace(/^(User|Assistant): /,"");return{role:role,content:content}})}else if(modelInput instanceof GeminiInput){messages=modelInput.messages.map(message=>{const role=message.role;const content=message.parts.map(part=>part.text).join(" ");return{role:role,content:content}})}else if(Array.isArray(modelInput.messages)){messages=modelInput.messages}else{console.log("The input format does not support augmented search.");return references}lastMessage=messages[messages.length-1];if(lastMessage&&lastMessage.role==="user"){const semanticResult=await this.extendedController.semanticSearch(lastMessage.content,modelInput.searchK);if(semanticResult&&semanticResult.length>0){references=semanticResult.reduce((acc,doc)=>{if(!acc[doc.document_name]){acc[doc.document_name]={pages:[]}}return acc},{});let contextData=semanticResult.map(doc=>doc.data.map(dataItem=>dataItem.text).join("\n")).join("\n").trim();const templateWrapper=(new SystemHelper).loadStaticPrompt("augmented_chatbot");const augmentedMessage=templateWrapper.replace("${semantic_search}",contextData).replace("${user_query}",lastMessage.content);if(modelInput instanceof ChatLLamaInput&&modelInput.prompt){const promptLines=modelInput.prompt.trim().split("\n");promptLines.pop();promptLines.push(`User: ${augmentedMessage}`);modelInput.prompt=promptLines.join("\n")}else if(modelInput instanceof ChatModelInput){modelInput.deleteLastMessage(lastMessage);modelInput.addUserMessage(augmentedMessage)}else if(typeof modelInput==="object"&&Array.isArray(modelInput.messages)&&messages.length>0){if(lastMessage.content){lastMessage.content=augmentedMessage}}}}return references}async _chatVLLM(modelInput){let params=modelInput instanceof ChatModelInput?modelInput.getChatInput():modelInput;const completionOnlyModels=["google/gemma-2-2b-it"];const isCompletionOnly=completionOnlyModels.includes(params.model);if(isCompletionOnly){const promptMessages=params.messages.map(msg=>`${msg.role.charAt(0).toUpperCase()+msg.role.slice(1)}: ${msg.content}`).join("\n")+"\nAssistant:";const completionParams={model:params.model,prompt:promptMessages,max_tokens:params.max_tokens||100,temperature:params.temperature||.7};const result=await this.vllmWrapper.generateText(completionParams);return result.choices.map(c=>c.text.trim())}else{const result=await this.vllmWrapper.generateChatText(params);return result.choices.map(c=>c.message.content)}}async*_chatGPTStream(modelInput){let params;if(modelInput instanceof ChatModelInput){params=modelInput.getChatInput();params.stream=true}else if(typeof modelInput==="object"){params=modelInput;params.stream=true}else{throw new Error("Invalid input: Must be an instance of ChatGPTInput or a dictionary")}const isGPT5=params.model&¶ms.model.toLowerCase().includes("gpt-5");if(isGPT5){throw new Error("GPT-5 streaming is not yet supported. Please use the chat() method instead.")}const streamParser=new GPTStreamParser;const stream=await this.openaiWrapper.generateChatText(params);for await(const chunk of stream){const chunkText=chunk.toString("utf8");yield*streamParser.feed(chunkText)}}async _chatGPT(modelInput,functions=null,function_call=null){let params;if(modelInput instanceof ChatModelInput){params=modelInput.getChatInput()}else if(typeof modelInput==="object"){params=modelInput}else{throw new Error("Invalid input: Must be an instance of ChatGPTInput or a dictionary")}const isGPT5=params.model&¶ms.model.toLowerCase().includes("gpt-5");if(isGPT5){const results=await this.openaiWrapper.generateGPT5Response(params);if(results.output&&Array.isArray(results.output)){const messageObjects=results.output.filter(item=>item.type==="message");const responses=messageObjects.map(msg=>{if(msg.content&&Array.isArray(msg.content)){return msg.content.map(c=>c.text||c).join("")}return msg.content||""});return responses.length>0?responses:[""]}else if(results.choices&&results.choices.length>0){return results.choices.map(choice=>choice.output||choice.text||choice.message?.content)}return[""]}else{const results=await this.openaiWrapper.generateChatText(params,functions,function_call);return results.choices.map(choice=>{if(choice.finish_reason==="function_call"&&choice.message.function_call){return{content:choice.message.content,function_call:choice.message.function_call}}else{return choice.message.content}})}}async _chatReplicateLLama(modelInput,debugMode){let params;const waitTime=2500,maxIterate=200;let iteration=0;if(modelInput instanceof ChatModelInput){params=modelInput.getChatInput()}else if(typeof modelInput==="object"){params=modelInput}else{throw new Error("Invalid input: Must be an instance of ChatLLamaInput or a dictionary")}try{const modelName=params.model;const inputData=params.inputData;const prediction=await this.replicateWrapper.predict(modelName,inputData);return new Promise((resolve,reject)=>{const poll=setInterval(async()=>{const status=await this.replicateWrapper.getPredictionStatus(prediction.id);if(debugMode){console.log("The current status:",status.status)}if(status.status==="succeeded"||status.status==="failed"){clearInterval(poll);if(status.status==="succeeded"){resolve([status.output.join("")])}else{console.error("LLama prediction failed:",status.error);reject(new Error("LLama prediction failed."))}}if(iteration>maxIterate){reject(new Error("Replicate taking too long to process the input, try again later!"))}iteration+=1},waitTime)})}catch(error){console.error("LLama Error:",error);throw error}}async _chatSageMaker(modelInput){let params;if(modelInput instanceof LLamaSageInput){params=modelInput.getChatInput()}else if(typeof modelInput==="object"){params=modelInput}else{throw new Error("Invalid input: Must be an instance of LLamaSageInput or a dictionary")}const results=await this.sagemakerWrapper.predict(params);return results.map(result=>result.generation?result.generation.content:result)}async _chatCohere(modelInput){let params;if(modelInput instanceof CohereInput){params=modelInput.getChatInput()}else if(typeof modelInput==="object"){params=modelInput}else{throw new Error("Invalid input: Must be an instance of ChatGPTInput or an object")}const results=await this.cohereWrapper.generateChatText(params);const responseText=results.text;return[responseText]}async*_streamCohere(modelInput){let params;if(modelInput instanceof CohereInput){params=modelInput.getChatInput();params.stream=true}else if(typeof modelInput==="object"){params=modelInput;params.stream=true}else{throw new Error("Invalid input: Must be an instance of ChatGPTInput or a dictionary")}const streamParser=new CohereStreamParser;const stream=await this.cohereWrapper.generateChatText(params);for await(const chunk of stream){const chunkText=chunk.toString("utf8");yield*streamParser.feed(chunkText)}}async _chatMistral(modelInput){let params;if(modelInput instanceof MistralInput){params=modelInput.getChatInput()}if(modelInput instanceof ChatGPTInput){params=modelInput.getChatInput()}else if(typeof modelInput==="object"){params=modelInput}else{throw new Error("Invalid input: Must be an instance of MistralInput or an object")}const results=await this.mistralWrapper.generateText(params);return results.choices.map(choice=>choice.message.content)}async _chatGemini(modelInput){let params;if(modelInput instanceof GeminiInput){params=modelInput.getChatInput()}else if(typeof modelInput==="object"){params=modelInput}else{throw new Error("Invalid input: Must be an instance of GeminiInput")}const result=await this.geminiWrapper.generateContent(params);if(!Array.isArray(result.candidates)||result.candidates.length===0){throw new Error("Invalid response from Gemini API: Expected 'candidates' array with content")}const responses=result.candidates.map(candidate=>{return candidate.content.parts.map(part=>part.text).join(" ")});return responses}async _chatAnthropic(modelInput){let params;if(modelInput instanceof AnthropicInput){params=modelInput.getChatInput()}else{throw new Error("Invalid input: Must be an instance of AnthropicInput")}const results=await this.anthropicWrapper.generateText(params);return results.content.map(choice=>choice.text)}async _chatNvidia(modelInput){let params=modelInput instanceof NvidiaInput?modelInput.getChatInput():modelInput;if(params.stream)throw new Error("Use stream() for NVIDIA streaming.");let resp=await this.nvidiaWrapper.generateText(params);return resp.choices.map(c=>c.message.content)}async*_streamNvidia(modelInput){let params=modelInput instanceof NvidiaInput?modelInput.getChatInput():modelInput;params.stream=true;const stream=await this.nvidiaWrapper.generateTextStream(params);let buffer="";for await(const chunk of stream){const lines=chunk.toString("utf8").split("\n");for(let line of lines){line=line.trim();if(!line)continue;if(line.startsWith("data: [DONE]")){yield buffer;return}if(line.startsWith("data: ")){try{let parsed=JSON.parse(line.replace("data: ",""));let content=parsed.choices?.[0]?.delta?.content||"";buffer+=content;yield content}catch(e){}}}}}}module.exports={Chatbot:Chatbot,SupportedChatModels:SupportedChatModels}},{"../model/input/ChatModelInput":13,"../utils/StreamParser":39,"../utils/SystemHelper":40,"../wrappers/AWSEndpointWrapper":41,"../wrappers/AnthropicWrapper":42,"../wrappers/CohereAIWrapper":43,"../wrappers/GeminiAIWrapper":44,"../wrappers/IntellicloudWrapper":47,"../wrappers/MistralAIWrapper":48,"../wrappers/NvidiaWrapper":49,"../wrappers/OpenAIWrapper":50,"../wrappers/ReplicateWrapper":51,"../wrappers/VLLMWrapper":53}],8:[function(require,module,exports){(function(Buffer){(function(){const{RemoteLanguageModel}=require("../controller/RemoteLanguageModel");const{RemoteImageModel,SupportedImageModels}=require("../controller/RemoteImageModel");const{RemoteSpeechModel}=require("../controller/RemoteSpeechModel");const LanguageModelInput=require("../model/input/LanguageModelInput");const ImageModelInput=require("../model/input/ImageModelInput");const Text2SpeechInput=require("../model/input/Text2SpeechInput");const{Chatbot,SupportedChatModels}=require("../function/Chatbot");const{ChatGPTInput,ChatGPTMessage,NvidiaInput}=require("../model/input/ChatModelInput");const{SupportedLangModels}=require("../controller/RemoteLanguageModel");const SystemHelper=require("../utils/SystemHelper");const Prompt=require("../utils/Prompt");const FileHelper=require("../utils/FileHelper");const path=require("path");function stripThinking(text){return text.replace(/<think>[\s\S]*?<\/think>/,"").trim()}class Gen{static async get_marketing_desc(promptString,apiKey,provider=SupportedLangModels.OPENAI,customProxyHelper=null){if(provider===SupportedLangModels.OPENAI){const chatbot=new Chatbot(apiKey,SupportedChatModels.OPENAI,customProxyHelper);const input=new ChatGPTInput("generate marketing description",{maxTokens:800});input.addUserMessage(`Create a marketing description for the following: ${promptString}`);const responses=await chatbot.chat(input);return responses[0].trim()}else if(provider===SupportedLangModels.COHERE){const langInput=new LanguageModelInput({prompt:`Create a marketing description for the following: ${promptString}`});langInput.setDefaultValues(SupportedLangModels.COHERE,400);const cohereLanguageModel=new RemoteLanguageModel(apiKey,provider);const responses=await cohereLanguageModel.generateText(langInput);return responses[0].trim()}else if(provider===SupportedChatModels.NVIDIA){const chatbot=new Chatbot(apiKey,SupportedChatModels.NVIDIA,customProxyHelper);const input=new NvidiaInput("generate marketing description",{maxTokens:800,model:"deepseek-ai/deepseek-r1",temperature:.6});input.addUserMessage(`Create a marketing description for the following: ${promptString}`);const responses=await chatbot.chat(input);let text=responses[0].trim();return stripThinking(text)}else{const supported=RemoteLanguageModel.getSupportedModels().join(" - ");throw new Error(`Unsupported provider. Use one of: ${supported}, ${SupportedChatModels.NVIDIA}`)}}static async get_blog_post(promptString,apiKey,provider=SupportedLangModels.OPENAI,customProxyHelper=null){if(provider===SupportedLangModels.OPENAI){const chatbot=new Chatbot(apiKey,SupportedChatModels.OPENAI,customProxyHelper);const input=new ChatGPTInput("generate blog post",{maxTokens:1200});input.addUserMessage(`Write a blog post about ${promptString}`);const responses=await chatbot.chat(input);return responses[0].trim()}else if(provider===SupportedLangModels.COHERE){const langInput=new LanguageModelInput({prompt:`Write a blog post with section titles about ${promptString}`});langInput.setDefaultValues(SupportedLangModels.COHERE,1200);const cohereLanguageModel=new RemoteLanguageModel(apiKey,provider);const responses=await cohereLanguageModel.generateText(langInput);return responses[0].trim()}else if(provider===SupportedChatModels.NVIDIA){const chatbot=new Chatbot(apiKey,SupportedChatModels.NVIDIA,customProxyHelper);const input=new NvidiaInput("generate blog post",{maxTokens:1200,model:"deepseek-ai/deepseek-r1",temperature:.6});input.addUserMessage(`Write a blog post about ${promptString}`);const responses=await chatbot.chat(input);let text=responses[0].trim();return stripThinking(text)}else{const supported=RemoteLanguageModel.getSupportedModels().join(" - ");throw new Error(`Unsupported provider. Use one of: ${supported}, ${SupportedChatModels.NVIDIA}`)}}static async getImageDescription(promptString,apiKey,customProxyHelper=null){const chatbot=new Chatbot(apiKey,SupportedChatModels.OPENAI,customProxyHelper);const input=new ChatGPTInput("Generate image description",{});input.addUserMessage(`Generate image description from the following text: ${promptString}`);const responses=await chatbot.chat(input);return responses[0].trim()}static async generate_image_from_desc(promptString,openaiKey,imageApiKey,is_base64=true,width=1024,height=1024,provider=SupportedImageModels.STABILITY,customProxyHelper=null){const imageDescription=await Gen.getImageDescription(promptString,openaiKey,customProxyHelper);const imgModel=new RemoteImageModel(imageApiKey,provider);const images=await imgModel.generateImages(new ImageModelInput({prompt:imageDescription,numberOfImages:1,width:width,height:height,responseFormat:"b64_json"}));return is_base64?images[0]:Buffer.from(images[0],"base64")}static async generate_speech_synthesis(text,googleKey){const speechModel=new RemoteSpeechModel(googleKey,"google");const input=new Text2SpeechInput({text:text,language:"en-gb"});return await speechModel.generateSpeech(input)}static async generate_html_page(text,apiKey,model_name="gpt-4o",provider=SupportedLangModels.OPENAI,customProxyHelper=null){const template=(new SystemHelper).loadPrompt("html_page");const promptTemp=new Prompt(template);let tokenSize=8e3;if(model_name.includes("-16k")){tokenSize=8e3}else if(model_name.includes("gpt-4o")){tokenSize=12e3}else if(model_name.includes("gpt-4")){tokenSize=4e3}else if(model_name.includes("deepseek")){tokenSize=15e3}let chatbot,input;if(provider===SupportedLangModels.OPENAI){chatbot=new Chatbot(apiKey,SupportedChatModels.OPENAI,customProxyHelper);input=new ChatGPTInput('generate html, css and javascript. Follow this template: {"html": "<code>", "message":"<text>"}',{maxTokens:tokenSize,model:model_name,temperature:.8})}else if(provider===SupportedChatModels.NVIDIA){chatbot=new Chatbot(apiKey,SupportedChatModels.NVIDIA,customProxyHelper);input=new NvidiaInput('generate html, css and javascript. Follow this template: {"html": "<code>", "message":"<text>"}',{maxTokens:tokenSize,model:"deepseek-ai/deepseek-r1",temperature:.8})}else{throw new Error("Unsupported provider for generate_html_page.")}input.addUserMessage(promptTemp.format({text:text}));const responses=await chatbot.chat(input);let cleaned=responses[0].trim().replace(/```json/g,"").replace(/```/g,"");if(provider===SupportedChatModels.NVIDIA){cleaned=stripThinking(cleaned)}return JSON.parse(cleaned)}static async save_html_page(text,folder,file_name,apiKey,model_name="gpt-4o",provider=SupportedLangModels.OPENAI,customProxyHelper=null){const htmlCode=await Gen.generate_html_page(text,apiKey,model_name,provider,customProxyHelper);const folderPath=path.join(folder,file_name+".html");FileHelper.writeDataToFile(folderPath,htmlCode["html"]);return true}static async generate_dashboard(csvStrData,topic,apiKey,model_name="gpt-4o",num_graphs=1,provider=SupportedLangModels.OPENAI,customProxyHelper=null){if(num_graphs<1||num_graphs>4){throw new Error("num_graphs must be between 1 and 4.")}const template=(new SystemHelper).loadPrompt("graph_dashboard");const promptTemp=new Prompt(template);let tokenSize=2100;if(model_name.includes("-16k")){tokenSize=8e3}else if(model_name.includes("gpt-4o")){tokenSize=12e3}else if(model_name.includes("gpt-4")){tokenSize=3900}else if(model_name.includes("deepseek")){tokenSize=15e3}let chatbot,input;if(provider===SupportedLangModels.OPENAI){chatbot=new Chatbot(apiKey,SupportedChatModels.OPENAI,customProxyHelper);input=new ChatGPTInput("Generate HTML graphs from CSV data. Response must be valid JSON with full HTML code.",{maxTokens:tokenSize,model:model_name,temperature:.3})}else if(provider===SupportedChatModels.NVIDIA){chatbot=new Chatbot(apiKey,SupportedChatModels.NVIDIA,customProxyHelper);input=new NvidiaInput("Generate HTML graphs from CSV data. Response must be valid JSON with full HTML code.",{maxTokens:tokenSize,model:"deepseek-ai/deepseek-r1",temperature:.3})}else{throw new Error("Unsupported provider for generate_dashboard.")}input.addUserMessage(promptTemp.format({count:num_graphs,topic:topic,text:csvStrData}));const responses=await chatbot.chat(input);let cleaned=responses[0].trim().replace(/```json/g,"").replace(/```/g,"");if(provider===SupportedChatModels.NVIDIA){cleaned=stripThinking(cleaned)}return JSON.parse(cleaned)[0]}static async instructUpdate(modelOutput,userInstruction,type="",apiKey,model_name="gpt-4o",provider=SupportedLangModels.OPENAI,customProxyHelper=null){const template=(new SystemHelper).loadPrompt("instruct_update");const promptTemp=new Prompt(template);let tokenSize=2e3;if(model_name.includes("gpt-4")){tokenSize=3900}let chatbot,input;if(provider===SupportedLangModels.OPENAI){chatbot=new Chatbot(apiKey,SupportedChatModels.OPENAI,customProxyHelper);input=new ChatGPTInput("Update the model message based on user feedback while maintaining format.",{maxTokens:tokenSize,model:model_name,temperature:.2})}else if(provider===SupportedChatModels.NVIDIA){chatbot=new Chatbot(apiKey,SupportedChatModels.NVIDIA,customProxyHelper);input=new NvidiaInput("Update the model message based on user feedback while maintaining format.",{maxTokens:tokenSize,model:"deepseek-ai/deepseek-r1",temperature:.2})}else{throw new Error("Unsupported provider for instructUpdate.")}input.addUserMessage(promptTemp.format({model_output:modelOutput,user_instruction:userInstruction,type:type}));const responses=await chatbot.chat(input);let text=responses[0].trim();if(provider===SupportedChatModels.NVIDIA){text=stripThinking(text)}return text}}module.exports={Gen:Gen}}).call(this)}).call(this,require("buffer").Buffer)},{"../controller/RemoteImageModel":4,"../controller/RemoteLanguageModel":5,"../controller/RemoteSpeechModel":6,"../function/Chatbot":7,"../model/input/ChatModelInput":13,"../model/input/ImageModelInput":17,"../model/input/LanguageModelInput":18,"../model/input/Text2SpeechInput":19,"../utils/FileHelper":32,"../utils/Prompt":37,"../utils/SystemHelper":40,buffer:22,path:26}],9:[function(require,module,exports){const{RemoteEmbedModel,SupportedEmbedModels}=require("../controller/RemoteEmbedModel");const EmbedInput=require("../model/input/EmbedInput");const MatchHelpers=require("../utils/MatchHelpers");class SemanticSearch{constructor(keyValue,provider=SupportedEmbedModels.OPENAI,customProxyHelper=null){this.keyValue=keyValue;this.provider=provider;this.remoteEmbedModel=new RemoteEmbedModel(keyValue,provider,customProxyHelper)}async getTopMatches(pivotItem,searchArray,numberOfMatches,modelName=null){if(numberOfMatches>searchArray.length){throw new Error("numberOfMatches should not be greater than the searchArray")}const embedInput=new EmbedInput({texts:[pivotItem,...searchArray],model:modelName});if(modelName==null){embedInput.setDefaultValues(this.provider)}const embeddingsResponse=await this.remoteEmbedModel.getEmbeddings(embedInput);let embeddings;if(this.provider===SupportedEmbedModels.OPENAI){embeddings=embeddingsResponse.map(item=>item.embedding)}else if(this.provider===SupportedEmbedModels.COHERE){embeddings=embeddingsResponse.map(item=>item.embedding)}else{throw new Error("Invalid provider name")}const pivotEmbedding=embeddings[0];const searchEmbeddings=embeddings.slice(1);return this.getTopMatchesFromEmbeddings(pivotEmbedding,searchEmbeddings,numberOfMatches)}getTopVectorMatches(pivotEmbedding,searchEmbeddings,numberOfMatches){if(numberOfMatches>=searchEmbeddings.length){throw new Error("numberOfMatches should be less than the length of the searchEmbeddings")}return this.getTopMatchesFromEmbeddings(pivotEmbedding,searchEmbeddings,numberOfMatches)}getTopMatchesFromEmbeddings(pivotEmbedding,searchEmbeddings,numberOfMatches){const similarities=searchEmbeddings.map(embedding=>MatchHelpers.cosineSimilarity(pivotEmbedding,embedding));const sortedIndices=this.argsort(similarities).reverse();const topMatchesIndices=sortedIndices.slice(0,numberOfMatches);return topMatchesIndices.map(index=>({index:index,similarity:similarities[index]}))}argsort(array){const arrayObject=array.map((value,index)=>({value:value,index:index}));arrayObject.sort((a,b)=>a.value-b.value);return arrayObject.map(item=>item.index)}filterTopMatches(searchResults,originalArray){return searchResults.map(result=>originalArray[result.index])}}module.exports={SemanticSearch:SemanticSearch}},{"../controller/RemoteEmbedModel":2,"../model/input/EmbedInput":14,"../utils/MatchHelpers":35}],10:[function(require,module,exports){const{SemanticSearch}=require("./SemanticSearch");class SemanticSearchPaging extends SemanticSearch{constructor(keyValue,provider,pivotItem,numberOfMatches){super(keyValue,provider);this.pivotItem=pivotItem;this.numberOfMatches=numberOfMatches;this.textAndMatches=[];this.topMatches=[]}async addNewData(newSearchItems){const newMatches=await super.getTopMatches(this.pivotItem,newSearchItems,newSearchItems.length);const newMatchesWithText=newMatches.map(match=>({text:newSearchItems[match.index],score:match.similarity}));this.topMatches=[...this.topMatches,...newMatchesWithText].sort((a,b)=>b.score-a.score).slice(0,this.numberOfMatches)}getCurrentTopMatches(){return this.topMatches}clean(){this.topMatches=[]}}module.exports={SemanticSearchPaging:SemanticSearchPaging}},{"./SemanticSearch":9}],11:[function(require,module,exports){const{RemoteLanguageModel,SupportedLangModels}=require("../controller/RemoteLanguageModel");const LanguageModelInput=require("../model/input/LanguageModelInput");const SystemHelper=require("../utils/SystemHelper");class TextAnalyzer{constructor(keyValue,provider=SupportedLangModels.OPENAI){if(!Object.values(SupportedLangModels).includes(provider)){throw new Error(`The specified provider '${provider}' is not supported. Supported providers are: ${Object.values(SupportedLangModels).join(", ")}`)}this.provider=provider;this.remoteLanguageModel=new RemoteLanguageModel(keyValue,provider);this.systemHelper=new SystemHelper}async summarize(text,options={}){const summaryPromptTemplate=this.systemHelper.loadPrompt("summary");const prompt=summaryPromptTemplate.replace("${text}",text);const modelInput=new LanguageModelInput({prompt:prompt,maxTokens:options.maxTokens||null,temperature:options.temperature||.5});modelInput.setDefaultModels(this.provider);const[summary]=await this.remoteLanguageModel.generateText(modelInput);return summary.trim()}async sentimentAnalysis(text,options={}){const mode=this.systemHelper.loadPrompt("sentiment");const prompt=`${mode}\n\nAnalyze the sentiment of the following text: ${text}\n\nSentiment: `;const modelInput=new LanguageModelInput({prompt:prompt,maxTokens:options.maxTokens||60,temperature:options.temperature||0});modelInput.setDefaultModels(this.provider);const[sentiment]=await this.remoteLanguageModel.generateText(modelInput);const sentiment_output=JSON.parse(sentiment.trim());return sentiment_output}}module.exports={TextAnalyzer:TextAnalyzer}},{"../controller/RemoteLanguageModel":5,"../model/input/LanguageModelInput":18,"../utils/SystemHelper":40}],12:[function(require,module,exports){const{RemoteLanguageModel,SupportedLangModels}=require("./controller/RemoteLanguageModel");const{RemoteImageModel,SupportedImageModels}=require("./controller/RemoteImageModel");const{RemoteFineTuneModel,SupportedFineTuneModels}=require("./controller/RemoteFineTuneModel");const{RemoteSpeechModel,SupportedSpeechModels}=require("./controller/RemoteSpeechModel");const{RemoteEmbedModel,SupportedEmbedModels}=require("./controller/RemoteEmbedModel");const{Chatbot,SupportedChatModels}=require("./function/Chatbot");const{SemanticSearch}=require("./function/SemanticSearch");const{SemanticSearchPaging}=require("./function/SemanticSearchPaging");const{TextAnalyzer}=require("./function/TextAnalyzer");const{Gen}=require("./function/Gen");const LanguageModelInput=require("./model/input/LanguageModelInput");const ImageModelInput=require("./model/input/ImageModelInput");const Text2SpeechInput=require("./model/input/Text2SpeechInput");const{ChatGPTInput,ChatLLamaInput,LLamaReplicateInput,ChatGPTMessage,LLamaSageInput,CohereInput,MistralInput,GeminiInput,AnthropicInput,NvidiaInput,VLLMInput}=require("./model/input/ChatModelInput");const FunctionModelInput=require("./model/input/FunctionModelInput");const EmbedInput=require("./model/input/EmbedInput");const FineTuneInput=require("./model/input/FineTuneInput");const CohereAIWrapper=require("./wrappers/CohereAIWrapper");const GoogleAIWrapper=require("./wrappers/GoogleAIWrapper");const OpenAIWrapper=require("./wrappers/OpenAIWrapper");const StabilityAIWrapper=require("./wrappers/StabilityAIWrapper");const HuggingWrapper=require("./wrappers/HuggingWrapper");const ReplicateWrapper=require("./wrappers/ReplicateWrapper");const AWSEndpoi