uniai
Version:
To unify AI models!
1,082 lines • 264 kB
JavaScript
// interface/Enum.ts
function _array_like_to_array(arr, len) {
if (len == null || len > arr.length) len = arr.length;
for(var i = 0, arr2 = new Array(len); i < len; i++)arr2[i] = arr[i];
return arr2;
}
function _array_with_holes(arr) {
if (Array.isArray(arr)) return arr;
}
function _array_without_holes(arr) {
if (Array.isArray(arr)) return _array_like_to_array(arr);
}
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) {
try {
var info = gen[key](arg);
var value = info.value;
} catch (error) {
reject(error);
return;
}
if (info.done) {
resolve(value);
} else {
Promise.resolve(value).then(_next, _throw);
}
}
function _async_to_generator(fn) {
return function() {
var self = this, args = arguments;
return new Promise(function(resolve, reject) {
var gen = fn.apply(self, args);
function _next(value) {
asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value);
}
function _throw(err) {
asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err);
}
_next(undefined);
});
};
}
function _class_call_check(instance, Constructor) {
if (!(instance instanceof Constructor)) {
throw new TypeError("Cannot call a class as a function");
}
}
function _defineProperties(target, props) {
for(var i = 0; i < props.length; i++){
var descriptor = props[i];
descriptor.enumerable = descriptor.enumerable || false;
descriptor.configurable = true;
if ("value" in descriptor) descriptor.writable = true;
Object.defineProperty(target, descriptor.key, descriptor);
}
}
function _create_class(Constructor, protoProps, staticProps) {
if (protoProps) _defineProperties(Constructor.prototype, protoProps);
if (staticProps) _defineProperties(Constructor, staticProps);
return Constructor;
}
function _define_property(obj, key, value) {
if (key in obj) {
Object.defineProperty(obj, key, {
value: value,
enumerable: true,
configurable: true,
writable: true
});
} else {
obj[key] = value;
}
return obj;
}
function _instanceof(left, right) {
if (right != null && typeof Symbol !== "undefined" && right[Symbol.hasInstance]) {
return !!right[Symbol.hasInstance](left);
} else {
return left instanceof right;
}
}
function _iterable_to_array(iter) {
if (typeof Symbol !== "undefined" && iter[Symbol.iterator] != null || iter["@@iterator"] != null) return Array.from(iter);
}
function _iterable_to_array_limit(arr, i) {
var _i = arr == null ? null : typeof Symbol !== "undefined" && arr[Symbol.iterator] || arr["@@iterator"];
if (_i == null) return;
var _arr = [];
var _n = true;
var _d = false;
var _s, _e;
try {
for(_i = _i.call(arr); !(_n = (_s = _i.next()).done); _n = true){
_arr.push(_s.value);
if (i && _arr.length === i) break;
}
} catch (err) {
_d = true;
_e = err;
} finally{
try {
if (!_n && _i["return"] != null) _i["return"]();
} finally{
if (_d) throw _e;
}
}
return _arr;
}
function _non_iterable_rest() {
throw new TypeError("Invalid attempt to destructure non-iterable instance.\\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.");
}
function _non_iterable_spread() {
throw new TypeError("Invalid attempt to spread non-iterable instance.\\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.");
}
function _object_spread(target) {
for(var i = 1; i < arguments.length; i++){
var source = arguments[i] != null ? arguments[i] : {};
var ownKeys = Object.keys(source);
if (typeof Object.getOwnPropertySymbols === "function") {
ownKeys = ownKeys.concat(Object.getOwnPropertySymbols(source).filter(function(sym) {
return Object.getOwnPropertyDescriptor(source, sym).enumerable;
}));
}
ownKeys.forEach(function(key) {
_define_property(target, key, source[key]);
});
}
return target;
}
function _sliced_to_array(arr, i) {
return _array_with_holes(arr) || _iterable_to_array_limit(arr, i) || _unsupported_iterable_to_array(arr, i) || _non_iterable_rest();
}
function _to_consumable_array(arr) {
return _array_without_holes(arr) || _iterable_to_array(arr) || _unsupported_iterable_to_array(arr) || _non_iterable_spread();
}
function _type_of(obj) {
"@swc/helpers - typeof";
return obj && typeof Symbol !== "undefined" && obj.constructor === Symbol ? "symbol" : typeof obj;
}
function _unsupported_iterable_to_array(o, minLen) {
if (!o) return;
if (typeof o === "string") return _array_like_to_array(o, minLen);
var n = Object.prototype.toString.call(o).slice(8, -1);
if (n === "Object" && o.constructor) n = o.constructor.name;
if (n === "Map" || n === "Set") return Array.from(n);
if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _array_like_to_array(o, minLen);
}
function _ts_generator(thisArg, body) {
var f, y, t, g, _ = {
label: 0,
sent: function() {
if (t[0] & 1) throw t[1];
return t[1];
},
trys: [],
ops: []
};
return g = {
next: verb(0),
"throw": verb(1),
"return": verb(2)
}, typeof Symbol === "function" && (g[Symbol.iterator] = function() {
return this;
}), g;
function verb(n) {
return function(v) {
return step([
n,
v
]);
};
}
function step(op) {
if (f) throw new TypeError("Generator is already executing.");
while(_)try {
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
if (y = 0, t) op = [
op[0] & 2,
t.value
];
switch(op[0]){
case 0:
case 1:
t = op;
break;
case 4:
_.label++;
return {
value: op[1],
done: false
};
case 5:
_.label++;
y = op[1];
op = [
0
];
continue;
case 7:
op = _.ops.pop();
_.trys.pop();
continue;
default:
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) {
_ = 0;
continue;
}
if (op[0] === 3 && (!t || op[1] > t[0] && op[1] < t[3])) {
_.label = op[1];
break;
}
if (op[0] === 6 && _.label < t[1]) {
_.label = t[1];
t = op;
break;
}
if (t && _.label < t[2]) {
_.label = t[2];
_.ops.push(op);
break;
}
if (t[2]) _.ops.pop();
_.trys.pop();
continue;
}
op = body.call(thisArg, _);
} catch (e) {
op = [
6,
e
];
y = 0;
} finally{
f = t = 0;
}
if (op[0] & 5) throw op[1];
return {
value: op[0] ? op[1] : void 0,
done: true
};
}
}
var ChatModelProvider = /* @__PURE__ */ function(ChatModelProvider2) {
ChatModelProvider2["OpenAI"] = "openai";
ChatModelProvider2["Anthropic"] = "anthropic";
ChatModelProvider2["DeepSeek"] = "deepseek";
ChatModelProvider2["IFlyTek"] = "iflytek";
ChatModelProvider2["Baidu"] = "baidu";
ChatModelProvider2["Google"] = "google";
ChatModelProvider2["GLM"] = "glm";
ChatModelProvider2["MoonShot"] = "moonshot";
ChatModelProvider2["AliYun"] = "aliyun";
ChatModelProvider2["XAI"] = "xai";
ChatModelProvider2["Other"] = "other";
return ChatModelProvider2;
}(ChatModelProvider || {});
var EmbedModelProvider = /* @__PURE__ */ function(EmbedModelProvider2) {
EmbedModelProvider2["OpenAI"] = "openai";
EmbedModelProvider2["Google"] = "google";
EmbedModelProvider2["GLM"] = "glm";
EmbedModelProvider2["AliYun"] = "aliyun";
EmbedModelProvider2["Other"] = "other";
return EmbedModelProvider2;
}(EmbedModelProvider || {});
var ImagineModelProvider = /* @__PURE__ */ function(ImagineModelProvider2) {
ImagineModelProvider2["OpenAI"] = "openai";
ImagineModelProvider2["MidJourney"] = "midjourney";
ImagineModelProvider2["StabilityAI"] = "stability.ai";
ImagineModelProvider2["IFlyTek"] = "iflytek";
return ImagineModelProvider2;
}(ImagineModelProvider || {});
var ModelProvider = _object_spread({}, ChatModelProvider, EmbedModelProvider, ImagineModelProvider);
var OpenAIEmbedModel = /* @__PURE__ */ function(OpenAIEmbedModel2) {
OpenAIEmbedModel2["ADA"] = "text-embedding-ada-002";
OpenAIEmbedModel2["LARGE"] = "text-embedding-3-large";
OpenAIEmbedModel2["SMALL"] = "text-embedding-3-small";
return OpenAIEmbedModel2;
}(OpenAIEmbedModel || {});
var OtherEmbedModel = /* @__PURE__ */ function(OtherEmbedModel2) {
OtherEmbedModel2["BGE_M3"] = "bge-m3";
OtherEmbedModel2["BASE_CHN"] = "text2vec-base-chinese";
OtherEmbedModel2["LARGE_CHN"] = "text2vec-large-chinese";
OtherEmbedModel2["BASE_CHN_PARAPH"] = "text2vec-base-chinese-paraphrase";
OtherEmbedModel2["BASE_CHN_SENTENCE"] = "text2vec-base-chinese-sentence";
OtherEmbedModel2["BASE_MUL"] = "text2vec-base-multilingual";
OtherEmbedModel2["PARAPH_MUL_MINI"] = "paraphrase-multilingual-MiniLM-L12-v2";
return OtherEmbedModel2;
}(OtherEmbedModel || {});
var GLMEmbedModel = /* @__PURE__ */ function(GLMEmbedModel2) {
GLMEmbedModel2["EMBED_2"] = "embedding-2";
GLMEmbedModel2["EMBED_3"] = "embedding-3";
return GLMEmbedModel2;
}(GLMEmbedModel || {});
var GoogleEmbedModel = /* @__PURE__ */ function(GoogleEmbedModel2) {
GoogleEmbedModel2["GEM_EMBED"] = "gemini-embedding-exp";
return GoogleEmbedModel2;
}(GoogleEmbedModel || {});
var AliEmbedModel = /* @__PURE__ */ function(AliEmbedModel2) {
AliEmbedModel2["ALI_V3"] = "text-embedding-v3";
AliEmbedModel2["ALI_V2"] = "text-embedding-v2";
AliEmbedModel2["ALI_V1"] = "text-embedding-v1";
AliEmbedModel2["ALI_ASYNC_V2"] = "text-embedding-async-v2";
AliEmbedModel2["ALI_ASYNC_V1"] = "text-embedding-async-v1";
return AliEmbedModel2;
}(AliEmbedModel || {});
var EmbedModel = _object_spread({}, OpenAIEmbedModel, OtherEmbedModel, GLMEmbedModel, GoogleEmbedModel, AliEmbedModel);
var OpenAIChatModel = /* @__PURE__ */ function(OpenAIChatModel2) {
OpenAIChatModel2["GPT3"] = "gpt-3.5-turbo";
OpenAIChatModel2["GPT4"] = "gpt-4";
OpenAIChatModel2["GPT4_TURBO"] = "gpt-4-turbo";
OpenAIChatModel2["GPT_4O_MINI"] = "gpt-4o-mini";
OpenAIChatModel2["GPT_4_1_MINI"] = "gpt-4.1-mini";
OpenAIChatModel2["GPT_4_1_NANO"] = "gpt-4.1-nano";
OpenAIChatModel2["GPT_4_1"] = "gpt-4.1";
OpenAIChatModel2["CHAT_GPT_4O"] = "chatgpt-4o-latest";
OpenAIChatModel2["GPT_4O"] = "gpt-4o";
OpenAIChatModel2["GPT_4O_AUDIO"] = "gpt-4o-audio-preview";
OpenAIChatModel2["O1"] = "o1";
OpenAIChatModel2["O1_MINI"] = "o1-mini";
OpenAIChatModel2["O1_PRO"] = "o1-pro";
OpenAIChatModel2["O3_MINI"] = "o3-mini";
return OpenAIChatModel2;
}(OpenAIChatModel || {});
var AnthropicChatModel = /* @__PURE__ */ function(AnthropicChatModel2) {
AnthropicChatModel2["CLAUDE_4_SONNET"] = "claude-sonnet-4-20250514";
AnthropicChatModel2["CLAUDE_4_OPUS"] = "claude-opus-4-20250514";
AnthropicChatModel2["CLAUDE_3_7_SONNET"] = "claude-3-7-sonnet-20250219";
AnthropicChatModel2["CLAUDE_3_5_SONNET"] = "claude-3-5-sonnet-20241022";
AnthropicChatModel2["CLAUDE_3_5_HAIKU"] = "claude-3-5-haiku-20241022";
AnthropicChatModel2["CLAUDE_3_OPUS"] = "claude-3-opus-20240229";
AnthropicChatModel2["CLAUDE_3_SONNET"] = "claude-3-sonnet-20240229";
AnthropicChatModel2["CLAUDE_3_HAIKU"] = "claude-3-haiku-20240307";
return AnthropicChatModel2;
}(AnthropicChatModel || {});
var DeepSeekChatModel = /* @__PURE__ */ function(DeepSeekChatModel2) {
DeepSeekChatModel2["DEEPSEEK_V3"] = "deepseek-chat";
DeepSeekChatModel2["DEEPSEEK_R1"] = "deepseek-reasoner";
return DeepSeekChatModel2;
}(DeepSeekChatModel || {});
var GoogleChatModel = /* @__PURE__ */ function(GoogleChatModel2) {
GoogleChatModel2["GEM_PRO_1_5"] = "gemini-1.5-pro";
GoogleChatModel2["GEM_FLASH_1_5"] = "gemini-1.5-flash";
GoogleChatModel2["GEM_FLASH_1_5_8B"] = "gemini-1.5-flash-8b";
GoogleChatModel2["GEM_FLASH_2"] = "gemini-2.0-flash";
GoogleChatModel2["GEM_FLASH_2_LITE"] = "gemini-2.0-flash-lite";
GoogleChatModel2["GEM_PRO_2_5"] = "gemini-2.5-pro";
GoogleChatModel2["GEM_FLASH_2_5"] = "gemini-2.5-flash";
GoogleChatModel2["GEM_FLASH_2_5_LITE"] = "gemini-2.5-flash-lite";
return GoogleChatModel2;
}(GoogleChatModel || {});
var GLMChatModel = /* @__PURE__ */ function(GLMChatModel2) {
GLMChatModel2["GLM_3_TURBO"] = "glm-3-turbo";
GLMChatModel2["GLM_4"] = "glm-4";
GLMChatModel2["GLM_4_AIR"] = "glm-4-air";
GLMChatModel2["GLM_4_AIRX"] = "glm-4-airx";
GLMChatModel2["GLM_4_FLASH"] = "glm-4-flash";
GLMChatModel2["GLM_4_FLASHX"] = "glm-4-flashx";
GLMChatModel2["GLM_4V"] = "glm-4v";
GLMChatModel2["GLM_4V_PLUS"] = "glm-4v-plus";
GLMChatModel2["GLM_4_LONG"] = "glm-4-long";
GLMChatModel2["GLM_4_PLUS"] = "glm-4-plus";
return GLMChatModel2;
}(GLMChatModel || {});
var BaiduChatModel = /* @__PURE__ */ function(BaiduChatModel2) {
BaiduChatModel2["ERNIE_3_5"] = "completions";
BaiduChatModel2["ERNIE_3_5_PRE"] = "ernie-3.5-8k-preview";
BaiduChatModel2["ERNIE_3_5_128K"] = "ernie-3.5-128k";
BaiduChatModel2["ERNIE_4_0_LATEST"] = "ernie-4.0-8k-latest";
BaiduChatModel2["ERNIE_4_0_PREVIEW"] = "ernie-4.0-8k-preview";
BaiduChatModel2["ERNIE_4_0_8K"] = "completions_pro";
BaiduChatModel2["ERNIE_4_0_TURBO_LATEST"] = "ernie-4.0-turbo-8k-latest";
BaiduChatModel2["ERNIE_4_0_TURBO_PREVIEW"] = "ernie-4.0-turbo-8k-preview";
BaiduChatModel2["ERNIE_4_0_TURBO_8K"] = "ernie-4.0-turbo-8k";
BaiduChatModel2["ERNIE_4_0_TURBO_128K"] = "ernie-4.0-turbo-128k";
BaiduChatModel2["ERNIE_SPEED_8K"] = "ernie_speed";
BaiduChatModel2["ERNIE_SPEED_128K"] = "ernie-speed-128k";
BaiduChatModel2["ERNIE_SPEED_PRO_128K"] = "ernie-speed-pro-128k";
BaiduChatModel2["ERNIE_LITE_8K"] = "ernie-lite-8k";
BaiduChatModel2["ERNIE_LITE_PRO_128K"] = "ernie-lite-pro-128k";
BaiduChatModel2["ERNIE_TINY_8K"] = "ernie-tiny-8k";
BaiduChatModel2["ERNIE_CHAR_8K"] = "ernie-char-8k";
BaiduChatModel2["ERNIE_CHAR_FICTION_8K"] = "ernie-char-fiction-8k";
BaiduChatModel2["ERNIE_NOVEL_8K"] = "ernie-novel-8k";
return BaiduChatModel2;
}(BaiduChatModel || {});
var IFlyTekChatModel = /* @__PURE__ */ function(IFlyTekChatModel2) {
IFlyTekChatModel2["SPARK_LITE"] = "lite";
IFlyTekChatModel2["SPARK_PRO"] = "generalv3";
IFlyTekChatModel2["SPARK_PRO_128K"] = "pro-128k";
IFlyTekChatModel2["SPARK_MAX"] = "generalv3.5";
IFlyTekChatModel2["SPARK_MAX_32K"] = "max-32k";
IFlyTekChatModel2["SPARK_ULTRA"] = "4.0Ultra";
return IFlyTekChatModel2;
}(IFlyTekChatModel || {});
var MoonShotChatModel = /* @__PURE__ */ function(MoonShotChatModel2) {
MoonShotChatModel2["MOON_V1_8K"] = "moonshot-v1-8k";
MoonShotChatModel2["MOON_V1_32K"] = "moonshot-v1-32k";
MoonShotChatModel2["MOON_V1_128K"] = "moonshot-v1-128k";
return MoonShotChatModel2;
}(MoonShotChatModel || {});
var AliChatModel = /* @__PURE__ */ function(AliChatModel2) {
AliChatModel2["QWEN_MAX"] = "qwen-max";
AliChatModel2["QWEN_PLUS"] = "qwen-plus";
AliChatModel2["QWEN_TURBO"] = "qwen-turbo";
AliChatModel2["QWEN_LONG"] = "qwen-long";
AliChatModel2["QWEN_CODE"] = "qwen-coder-turbo";
AliChatModel2["QWEN_MATH"] = "qwen-math-plus";
AliChatModel2["QWEN_VL_MAX"] = "qwen-vl-max";
AliChatModel2["QWEN_VL_PLUS"] = "qwen-vl-plus";
return AliChatModel2;
}(AliChatModel || {});
var XAIChatModel = /* @__PURE__ */ function(XAIChatModel2) {
XAIChatModel2["GROK2"] = "grok-2";
XAIChatModel2["GROK2_VISION"] = "grok-2-vision";
XAIChatModel2["GROK3"] = "grok-3";
XAIChatModel2["GROK3_VISION"] = "grok-3-vision";
return XAIChatModel2;
}(XAIChatModel || {});
var ChatModel = _object_spread({}, OpenAIChatModel, AnthropicChatModel, DeepSeekChatModel, BaiduChatModel, GLMChatModel, IFlyTekChatModel, GoogleChatModel, OpenAIChatModel, MoonShotChatModel, AliChatModel, XAIChatModel);
var MidJourneyImagineModel = /* @__PURE__ */ function(MidJourneyImagineModel2) {
MidJourneyImagineModel2["MJ"] = "midjourney";
return MidJourneyImagineModel2;
}(MidJourneyImagineModel || {});
var OpenAIImagineModel = /* @__PURE__ */ function(OpenAIImagineModel2) {
OpenAIImagineModel2["DALL_E_2"] = "dall-e-2";
OpenAIImagineModel2["DALL_E_3"] = "dall-e-3";
return OpenAIImagineModel2;
}(OpenAIImagineModel || {});
var StabilityAIImagineModel = /* @__PURE__ */ function(StabilityAIImagineModel2) {
StabilityAIImagineModel2["SD_1_6"] = "stable-diffusion-v1-6";
StabilityAIImagineModel2["SD_XL_1024"] = "stable-diffusion-xl-1024-v1-0";
return StabilityAIImagineModel2;
}(StabilityAIImagineModel || {});
var IFlyTekImagineModel = /* @__PURE__ */ function(IFlyTekImagineModel2) {
IFlyTekImagineModel2["V2"] = "v2.1";
return IFlyTekImagineModel2;
}(IFlyTekImagineModel || {});
var ImagineModel = _object_spread({}, OpenAIImagineModel, MidJourneyImagineModel, StabilityAIImagineModel, IFlyTekImagineModel);
var ModelModel = _object_spread({}, ChatModel, ImagineModel, EmbedModel);
var MJTaskType = /* @__PURE__ */ function(MJTaskType4) {
MJTaskType4["IMAGINE"] = "IMAGINE";
MJTaskType4["UPSCALE"] = "UPSCALE";
MJTaskType4["VARIATION"] = "VARIATION";
MJTaskType4["REROLL"] = "REROLL";
MJTaskType4["DESCRIBE"] = "DESCRIBE";
MJTaskType4["BLEND"] = "BLEND";
return MJTaskType4;
}(MJTaskType || {});
var DETaskType = /* @__PURE__ */ function(DETaskType2) {
DETaskType2["GENERATION"] = "generations";
DETaskType2["EDIT"] = "edits";
DETaskType2["VARIATION"] = "variation";
return DETaskType2;
}(DETaskType || {});
var SDTaskType = /* @__PURE__ */ function(SDTaskType2) {
SDTaskType2["GENERATION"] = "generation";
return SDTaskType2;
}(SDTaskType || {});
var SPKTaskType = /* @__PURE__ */ function(SPKTaskType2) {
SPKTaskType2["GENERATION"] = "generation";
return SPKTaskType2;
}(SPKTaskType || {});
var ImgTaskType = _object_spread({}, MJTaskType, DETaskType, SDTaskType, SPKTaskType);
var ChatRoleEnum = /* @__PURE__ */ function(ChatRoleEnum2) {
ChatRoleEnum2["SYSTEM"] = "system";
ChatRoleEnum2["USER"] = "user";
ChatRoleEnum2["ASSISTANT"] = "assistant";
ChatRoleEnum2["TOOL"] = "tool";
ChatRoleEnum2["DEV"] = "developer";
return ChatRoleEnum2;
}(ChatRoleEnum || {});
var GPTChatRoleEnum = /* @__PURE__ */ function(GPTChatRoleEnum2) {
GPTChatRoleEnum2["SYSTEM"] = "system";
GPTChatRoleEnum2["USER"] = "user";
GPTChatRoleEnum2["ASSISTANT"] = "assistant";
GPTChatRoleEnum2["DEV"] = "developer";
GPTChatRoleEnum2["TOOL"] = "tool";
return GPTChatRoleEnum2;
}(GPTChatRoleEnum || {});
var AnthropicChatRoleEnum = /* @__PURE__ */ function(AnthropicChatRoleEnum2) {
AnthropicChatRoleEnum2["USER"] = "user";
AnthropicChatRoleEnum2["ASSISTANT"] = "assistant";
return AnthropicChatRoleEnum2;
}(AnthropicChatRoleEnum || {});
var DSChatRoleEnum = /* @__PURE__ */ function(DSChatRoleEnum2) {
DSChatRoleEnum2["SYSTEM"] = "system";
DSChatRoleEnum2["USER"] = "user";
DSChatRoleEnum2["ASSISTANT"] = "assistant";
DSChatRoleEnum2["TOOL"] = "tool";
return DSChatRoleEnum2;
}(DSChatRoleEnum || {});
var SPKChatRoleEnum = /* @__PURE__ */ function(SPKChatRoleEnum2) {
SPKChatRoleEnum2["USER"] = "user";
SPKChatRoleEnum2["ASSISTANT"] = "assistant";
SPKChatRoleEnum2["SYSTEM"] = "system";
SPKChatRoleEnum2["TOOL"] = "tool";
return SPKChatRoleEnum2;
}(SPKChatRoleEnum || {});
var GLMChatRoleEnum = /* @__PURE__ */ function(GLMChatRoleEnum2) {
GLMChatRoleEnum2["SYSTEM"] = "system";
GLMChatRoleEnum2["USER"] = "user";
GLMChatRoleEnum2["ASSISTANT"] = "assistant";
GLMChatRoleEnum2["TOOL"] = "tool";
return GLMChatRoleEnum2;
}(GLMChatRoleEnum || {});
var GEMChatRoleEnum = /* @__PURE__ */ function(GEMChatRoleEnum2) {
GEMChatRoleEnum2["USER"] = "user";
GEMChatRoleEnum2["MODEL"] = "model";
return GEMChatRoleEnum2;
}(GEMChatRoleEnum || {});
var BDUChatRoleEnum = /* @__PURE__ */ function(BDUChatRoleEnum2) {
BDUChatRoleEnum2["USER"] = "user";
BDUChatRoleEnum2["ASSISTANT"] = "assistant";
return BDUChatRoleEnum2;
}(BDUChatRoleEnum || {});
// src/providers/OpenAI.ts
import { PassThrough, Readable } from "stream";
import EventSourceStream from "@server-sent-stream/node";
import { decodeStream } from "iconv-lite";
// src/util.ts
import { writeFileSync } from "fs";
import axios from "axios";
import { LocalStorage } from "node-localstorage";
import path from "path";
import isBase64 from "is-base64";
var localStorage = new LocalStorage("./cache", Infinity);
var util_default = {
get: /**
* Performs an HTTP GET request.
*
* @param url - The URL to make the request to.
* @param params - Optional request parameters.
* @param config - Optional Axios request configuration.
* @returns A Promise that resolves with the response data.
*/ function get(url, params, config) {
return _async_to_generator(function() {
return _ts_generator(this, function(_state) {
switch(_state.label){
case 0:
return [
4,
axios.get(url, _object_spread({
params: params
}, config))
];
case 1:
return [
2,
_state.sent().data
];
}
});
})();
},
post: /**
* Performs an HTTP POST request.
*
* @param url - The URL to make the request to.
* @param body - The request body.
* @param config - Optional Axios request configuration.
* @returns A Promise that resolves with the response data.
*/ function post(url, body, config) {
return _async_to_generator(function() {
return _ts_generator(this, function(_state) {
switch(_state.label){
case 0:
return [
4,
axios.post(url, body, config)
];
case 1:
return [
2,
_state.sent().data
];
}
});
})();
},
/**
* Parses JSON from a string and returns it as a generic type T.
*
* @param str - The JSON string to parse.
* @returns The parsed JSON as a generic type T.
*/ json: function json(str) {
try {
if (!str) return null;
return JSON.parse(str);
} catch (e) {
return null;
}
},
getRandomKey: function getRandomKey(arr) {
return arr[Math.floor(Math.random() * arr.length)];
},
getRandomId: function getRandomId() {
var length = arguments.length > 0 && arguments[0] !== void 0 ? arguments[0] : 16;
var result = "";
while(result.length < length){
var rand = Math.floor(Math.random() * 10);
if (result.length === 0 && rand === 0) continue;
result += rand.toString();
}
return result;
},
/**
* Computes the greatest common divisor (GCD) of two numbers using Euclidean algorithm.
*
* @param a - The first number.
* @param b - The second number.
* @returns The GCD of the two numbers.
*/ getGCD: function getGCD(a, b) {
if (b === 0) return a;
return this.getGCD(b, a % b);
},
/**
* Calculates and returns the aspect ratio of a width and height.
*
* @param width - The width dimension.
* @param height - The height dimension.
* @returns The aspect ratio in the format "width:height".
*/ getAspect: function getAspect(width, height) {
if (!width || !height) return "1:1";
var gcd = this.getGCD(width, height);
var aspectRatioWidth = width / gcd;
var aspectRatioHeight = height / gcd;
return "".concat(aspectRatioWidth, ":").concat(aspectRatioHeight);
},
/**
* Stores an item in local storage with the specified key.
*
* @param key - The key under which to store the item.
* @param value - The value to be stored.
*/ setItem: function setItem(key, value) {
localStorage.setItem(key, JSON.stringify(value));
},
/**
* Retrieves an item from local storage by its key and parses it as a generic type T.
*
* @param key - The key of the item to retrieve.
* @returns The parsed item as a generic type T.
*/ getItem: function getItem(key) {
return this.json(localStorage.getItem(key));
},
writeFile: /**
* This method is used to write given data to a file. The data can either be a base64 string or an HTTP/HTTPS URL.
* If the data is a URL, the method fetches the data as a readable stream and writes it to the file.
* If it's a base64 string, it writes it directly to the file.
* @param data - The data to be written to the file. Can be a base64 string or a HTTP/HTTPS img URL.
* @param filename - The name of the file where the data will be written. If not provided, a unique filename will be generated.
* @returns - The file path where the data was written.
*/ function writeFile(data) {
var filename = arguments.length > 1 && arguments[1] !== void 0 ? arguments[1] : "";
return _async_to_generator(function() {
var filepath, res;
return _ts_generator(this, function(_state) {
switch(_state.label){
case 0:
filepath = path.join("./cache", filename);
if (!(data.startsWith("http://") || data.startsWith("https://"))) return [
3,
2
];
return [
4,
this.get(data, {}, {
responseType: "arraybuffer"
})
];
case 1:
res = _state.sent();
if (!_instanceof(res, Buffer)) throw new Error("Img is not a buffer");
writeFileSync(filepath, res);
return [
3,
3
];
case 2:
writeFileSync(filepath, Buffer.from(data, "base64"));
_state.label = 3;
case 3:
return [
2,
filepath
];
}
});
}).apply(this);
},
isBase64: function isBase641(data) {
var allowMime = arguments.length > 1 && arguments[1] !== void 0 ? arguments[1] : true;
return isBase64(data, {
allowMime: allowMime
});
}
};
// src/providers/OpenAI.ts
var STORAGE_KEY = "task_open_ai";
var API = "https://api.openai.com";
var VER = "v1";
var OpenAI = /*#__PURE__*/ function() {
"use strict";
function OpenAI(key) {
var api = arguments.length > 1 && arguments[1] !== void 0 ? arguments[1] : API;
_class_call_check(this, OpenAI);
this.key = key;
this.api = api;
}
_create_class(OpenAI, [
{
key: "embedding",
value: /**
* Fetches embeddings for input text.
*
* @param input - An array of input strings.
* @param model - The model to use for embeddings (default: text-embedding-ada-002).
* @returns A promise resolving to the embedding response.
*/ function embedding(input) {
var model = arguments.length > 1 && arguments[1] !== void 0 /* ADA */ ? arguments[1] : "text-embedding-ada-002";
var _this = this;
return _async_to_generator(function() {
var key, res, data;
return _ts_generator(this, function(_state) {
switch(_state.label){
case 0:
key = Array.isArray(_this.key) ? util_default.getRandomKey(_this.key) : _this.key;
if (!key) throw new Error("OpenAI API key is not set in config");
return [
4,
util_default.post("".concat(_this.api, "/").concat(VER, "/embeddings"), {
model: model,
input: input
}, {
headers: {
Authorization: "Bearer ".concat(key)
},
responseType: "json"
})
];
case 1:
res = _state.sent();
data = {
embedding: res.data.map(function(v) {
return v.embedding;
}),
object: "embedding",
model: model,
promptTokens: res.usage.prompt_tokens || 0,
totalTokens: res.usage.total_tokens || 0
};
return [
2,
data
];
}
});
})();
}
},
{
key: "chat",
value: /**
* Sends messages to the GPT chat model.
*
* @param messages - An array of chat messages.
* @param model - The model to use for chat (default: gpt-3.5-turbo).
* @param stream - Whether to use stream response (default: false).
* @param top - Top probability to sample (optional).
* @param temperature - Temperature for sampling (optional).
* @param maxLength - Maximum token length for response (optional).
* @param tools - Tools for model to use (optional).
* @param toolChoice - Controls which (if any) tool is called by the model: none, required, auto (optional).
* @returns A promise resolving to the chat response or a stream.
*/ function chat(messages) {
var model = arguments.length > 1 && arguments[1] !== void 0 /* GPT_4_1 */ ? arguments[1] : "gpt-4.1", stream = arguments.length > 2 && arguments[2] !== void 0 ? arguments[2] : false, top = arguments.length > 3 ? arguments[3] : void 0, temperature = arguments.length > 4 ? arguments[4] : void 0, maxLength = arguments.length > 5 ? arguments[5] : void 0, tools = arguments.length > 6 ? arguments[6] : void 0, toolChoice = arguments.length > 7 ? arguments[7] : void 0;
var _this = this;
return _async_to_generator(function() {
var key, res, data, output, parser, _res_choices__message, _res_choices_, _res_choices__message1, _res_choices_1, _res_choices__message2, _res_choices_2, _res_usage, _res_usage1, _res_usage2;
return _ts_generator(this, function(_state) {
switch(_state.label){
case 0:
key = Array.isArray(_this.key) ? util_default.getRandomKey(_this.key) : _this.key;
if (!key) throw new Error("OpenAI API key is not set in config");
if (typeof temperature === "number") {
if (temperature < 0) temperature = 0;
if (temperature > 1) temperature = 1;
}
if (typeof top === "number") {
if (top < 0) top = 0;
if (top > 1) top = 1;
}
return [
4,
util_default.post("".concat(_this.api, "/").concat(VER, "/chat/completions"), {
model: model,
messages: _this.formatMessage(messages),
stream: stream,
temperature: temperature,
top_p: top,
max_completion_tokens: maxLength,
tools: tools,
tool_choice: toolChoice
}, {
headers: {
Authorization: "Bearer ".concat(key)
},
responseType: stream ? "stream" : "json"
})
];
case 1:
res = _state.sent();
data = {
content: "",
model: model,
object: "",
promptTokens: 0,
completionTokens: 0,
totalTokens: 0
};
if (_instanceof(res, Readable)) {
output = new PassThrough();
parser = new EventSourceStream();
parser.on("data", function(e) {
var obj = util_default.json(e.data);
if (obj) {
var _obj_choices__delta, _obj_choices_, _obj_choices__delta1, _obj_choices_1, _obj_choices__delta2, _obj_choices_2, _obj_usage, _obj_usage1, _obj_usage2;
data.content = ((_obj_choices_ = obj.choices[0]) === null || _obj_choices_ === void 0 ? void 0 : (_obj_choices__delta = _obj_choices_.delta) === null || _obj_choices__delta === void 0 ? void 0 : _obj_choices__delta.content) || "";
if ((_obj_choices_1 = obj.choices[0]) === null || _obj_choices_1 === void 0 ? void 0 : (_obj_choices__delta1 = _obj_choices_1.delta) === null || _obj_choices__delta1 === void 0 ? void 0 : _obj_choices__delta1.tool_calls) data.tools = (_obj_choices_2 = obj.choices[0]) === null || _obj_choices_2 === void 0 ? void 0 : (_obj_choices__delta2 = _obj_choices_2.delta) === null || _obj_choices__delta2 === void 0 ? void 0 : _obj_choices__delta2.tool_calls;
data.model = obj.model;
data.object = obj.object;
data.promptTokens = ((_obj_usage = obj.usage) === null || _obj_usage === void 0 ? void 0 : _obj_usage.prompt_tokens) || 0;
data.completionTokens = ((_obj_usage1 = obj.usage) === null || _obj_usage1 === void 0 ? void 0 : _obj_usage1.completion_tokens) || 0;
data.totalTokens = ((_obj_usage2 = obj.usage) === null || _obj_usage2 === void 0 ? void 0 : _obj_usage2.total_tokens) || 0;
output.write(JSON.stringify(data));
}
});
parser.on("error", function(e) {
return output.destroy(e);
});
parser.on("end", function() {
return output.end();
});
res.pipe(decodeStream("utf-8")).pipe(parser);
return [
2,
output
];
} else {
;
data.content = ((_res_choices_ = res.choices[0]) === null || _res_choices_ === void 0 ? void 0 : (_res_choices__message = _res_choices_.message) === null || _res_choices__message === void 0 ? void 0 : _res_choices__message.content) || "";
if ((_res_choices_1 = res.choices[0]) === null || _res_choices_1 === void 0 ? void 0 : (_res_choices__message1 = _res_choices_1.message) === null || _res_choices__message1 === void 0 ? void 0 : _res_choices__message1.tool_calls) data.tools = (_res_choices_2 = res.choices[0]) === null || _res_choices_2 === void 0 ? void 0 : (_res_choices__message2 = _res_choices_2.message) === null || _res_choices__message2 === void 0 ? void 0 : _res_choices__message2.tool_calls;
data.model = res.model;
data.object = res.object;
data.promptTokens = ((_res_usage = res.usage) === null || _res_usage === void 0 ? void 0 : _res_usage.prompt_tokens) || 0;
data.completionTokens = ((_res_usage1 = res.usage) === null || _res_usage1 === void 0 ? void 0 : _res_usage1.completion_tokens) || 0;
data.totalTokens = ((_res_usage2 = res.usage) === null || _res_usage2 === void 0 ? void 0 : _res_usage2.total_tokens) || 0;
return [
2,
data
];
}
return [
2
];
}
});
})();
}
},
{
key: "imagine",
value: /**
* Generates images based on a prompt.
*
* @param prompt - The prompt for image generation.
* @param width - Image width (default: 1024).
* @param height - Image height (default: 1024).
* @param n - Number of images to generate (default: 1).
* @param model - Model choice (default: dall-e-3).
* @returns A promise resolving to the image generation response.
*/ function imagine(prompt) {
var width = arguments.length > 1 && arguments[1] !== void 0 ? arguments[1] : 1024, height = arguments.length > 2 && arguments[2] !== void 0 ? arguments[2] : 1024, n = arguments.length > 3 && arguments[3] !== void 0 ? arguments[3] : 1, model = arguments.length > 4 && arguments[4] !== void 0 /* DALL_E_3 */ ? arguments[4] : "dall-e-3";
var _this = this;
return _async_to_generator(function() {
var key, res, id, imgs, _tmp, _tmp1, _i, i, _, time, task, tasks;
return _ts_generator(this, function(_state) {
switch(_state.label){
case 0:
key = Array.isArray(_this.key) ? util_default.getRandomKey(_this.key) : _this.key;
if (!key) throw new Error("OpenAI API key is not set in config");
return [
4,
util_default.post("".concat(_this.api, "/").concat(VER, "/images/", "generations" /* GENERATION */ ), {
model: model,
prompt: prompt,
n: n,
size: "".concat(width, "x").concat(height),
response_format: "b64_json"
}, {
headers: {
Authorization: "Bearer ".concat(key)
},
responseType: "json"
})
];
case 1:
res = _state.sent();
id = util_default.getRandomId();
imgs = [];
_tmp = [];
for(_tmp1 in res.data)_tmp.push(_tmp1);
_i = 0;
_state.label = 2;
case 2:
if (!(_i < _tmp.length)) return [
3,
5
];
i = _tmp[_i];
_ = imgs.push;
return [
4,
util_default.writeFile(res.data[i].b64_json, "".concat(id, "-").concat(i, ".png"))
];
case 3:
_.apply(imgs, [
_state.sent()
]);
_state.label = 4;
case 4:
_i++;
return [
3,
2
];
case 5:
time = Date.now();
task = {
id: id,
type: "generations" /* GENERATION */ ,
info: "success",
progress: 100,
imgs: imgs,
fail: "",
created: time,
model: model
};
tasks = util_default.getItem(STORAGE_KEY) || [];
tasks.push(task);
util_default.setItem(STORAGE_KEY, tasks);
return [
2,
{
taskId: task.id,
time: time
}
];
}
});
})();
}
},
{
/**
* Simulate tasks.
*
* @param id - The task ID to retrieve (optional).
* @returns An array of task responses or a specific task by ID.
*/ key: "task",
value: function task(id) {
var tasks = util_default.getItem(STORAGE_KEY) || [];
if (id) return tasks.filter(function(v) {
return v.id === id;
});
else return tasks;
}
},
{
/**
* Formats chat messages according to the GPT model's message format.
*
* @param messages - An array of chat messages.
* @returns Formatted messages compatible with the GPT model.
*/ key: "formatMessage",
value: function formatMessage(messages) {
var prompt = [];
var _iteratorNormalCompletion = true, _didIteratorError = false, _iteratorError = undefined;
try {
for(var _iterator = messages[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true){
var _step_value = _step.value, role = _step_value.role, content = _step_value.content, img = _step_value.img, tool = _step_value.tool, audio = _step_value.audio;
switch(role){
case "user" /* USER */ :
if (img || audio) {
var contentArr = [];
if (content.trim()) contentArr.push({
type: "text",
text: content
});
if (img) contentArr.push({
type: "image_url",
image_url: {
url: img
}
});
if (audio) contentArr.push({
type: "input_audio",
input_audio: {
data: audio,
format: "wav"
}
});
prompt.push({
role: role,
content: contentArr
});
} else prompt.push({
role: role,
content: content
});
break;
case "tool" /* TOOL */ :
prompt.push({
role: role,
content: content,
tool_call_id: tool
});
break;
default:
prompt.push({
role: role,
content: content
});
break;
}
}
} catch (err) {
_didIteratorError = true;
_iteratorError = err;
} finally{
try {
if (!_iteratorNormalCompletion && _iterator.return != null) {
_iterator.return();
}
} finally{
if (_didIteratorError) {
throw _iteratorError;
}
}
}
return prompt;
}
}
]);
return OpenAI;
}();
// src/providers/Anthropic.ts
import { PassThrough as PassThrough2, Readable as Readable2 } from "stream";
import EventSourceStream2 from "@server-sent-stream/node";
import { decodeStream as decodeStream2 } from "iconv-lite";
import { extname } from "path";
import { readFileSync } from "fs";
var API2 = "https://api.anthropic.com";
var VER2 = "v1";
var Anthropic = /*#__PURE__*/ function() {
"use strict";
function Anthropic(key) {
var api = arguments.length > 1 && arguments[1] !== void 0 ? arguments[1] : API2;
_class_call_check(this, Anthropic);
this.key = key;
this.api = api;
}
_create_class(Anthropic, [
{
key: "chat",
value: /**
* Sends messages to the Claude chat model.
*
* @param messages - An array of chat messages.
* @param model - The model to use for chat (default: claude-3-5-sonnet).
* @param stream - Whether to use stream response (default: false).
* @param top - Top probability to sample (optional).
* @param temperature - Temperature for sampling (optional).
* @param maxLength