modelmix
Version:
🧬 ModelMix - Unified API for Diverse AI LLM.
1,436 lines (1,213 loc) • 60.7 kB
JavaScript
const axios = require('axios');
const fs = require('fs');
const { fromBuffer } = require('file-type');
const { inspect } = require('util');
const log = require('lemonlog')('ModelMix');
const Bottleneck = require('bottleneck');
const path = require('path');
const generateJsonSchema = require('./schema');
const { Client } = require("@modelcontextprotocol/sdk/client/index.js");
const { StdioClientTransport } = require("@modelcontextprotocol/sdk/client/stdio.js");
const { MCPToolsManager } = require('./mcp-tools');
class ModelMix {
constructor({ options = {}, config = {} } = {}) {
this.models = [];
this.messages = [];
this.tools = {};
this.toolClient = {};
this.mcp = {};
this.mcpToolsManager = new MCPToolsManager();
this.options = {
max_tokens: 5000,
temperature: 1, // 1 --> More creative, 0 --> More deterministic.
...options
};
// Standard Bottleneck configuration
const defaultBottleneckConfig = {
maxConcurrent: 8, // Maximum number of concurrent requests
minTime: 500, // Minimum time between requests (in ms)
};
this.config = {
system: 'You are an assistant.',
max_history: 1, // Default max history
debug: false,
bottleneck: defaultBottleneckConfig,
...config
}
this.limiter = new Bottleneck(this.config.bottleneck);
}
replace(keyValues) {
this.config.replace = { ...this.config.replace, ...keyValues };
return this;
}
static new({ options = {}, config = {} } = {}) {
return new ModelMix({ options, config });
}
new() {
return new ModelMix({ options: this.options, config: this.config });
}
static formatJSON(obj) {
return inspect(obj, {
depth: null,
colors: true,
maxArrayLength: null,
breakLength: 80,
compact: false
});
}
static formatMessage(message) {
if (typeof message !== 'string') return message;
try {
return ModelMix.formatJSON(JSON.parse(message.trim()));
} catch (e) {
return message;
}
}
attach(key, provider) {
if (this.models.some(model => model.key === key)) {
return this;
}
if (this.messages.length > 0) {
throw new Error("Cannot add models after message generation has started.");
}
this.models.push({ key, provider });
return this;
}
gpt41({ options = {}, config = {} } = {}) {
return this.attach('gpt-4.1', new MixOpenAI({ options, config }));
}
gpt41mini({ options = {}, config = {} } = {}) {
return this.attach('gpt-4.1-mini', new MixOpenAI({ options, config }));
}
gpt41nano({ options = {}, config = {} } = {}) {
return this.attach('gpt-4.1-nano', new MixOpenAI({ options, config }));
}
gpt4o({ options = {}, config = {} } = {}) {
return this.attach('gpt-4o', new MixOpenAI({ options, config }));
}
o4mini({ options = {}, config = {} } = {}) {
return this.attach('o4-mini', new MixOpenAI({ options, config }));
}
o3({ options = {}, config = {} } = {}) {
return this.attach('o3', new MixOpenAI({ options, config }));
}
gpt45({ options = {}, config = {} } = {}) {
return this.attach('gpt-4.5-preview', new MixOpenAI({ options, config }));
}
gpt5({ options = {}, config = {} } = {}) {
return this.attach('gpt-5', new MixOpenAI({ options, config }));
}
gpt5mini({ options = {}, config = {} } = {}) {
return this.attach('gpt-5-mini', new MixOpenAI({ options, config }));
}
gpt5nano({ options = {}, config = {} } = {}) {
return this.attach('gpt-5-nano', new MixOpenAI({ options, config }));
}
gpt51({ options = {}, config = {} } = {}) {
return this.attach('gpt-5.1', new MixOpenAI({ options, config }));
}
gpt52({ options = {}, config = {} } = {}) {
return this.attach('gpt-5.2', new MixOpenAI({ options, config }));
}
gpt52chat({ options = {}, config = {} } = {}) {
return this.attach('gpt-5.2-chat-latest', new MixOpenAI({ options, config }));
}
gptOss({ options = {}, config = {}, mix = { together: false, cerebras: false, groq: true } } = {}) {
if (mix.together) return this.attach('openai/gpt-oss-120b', new MixTogether({ options, config }));
if (mix.cerebras) return this.attach('gpt-oss-120b', new MixCerebras({ options, config }));
if (mix.groq) return this.attach('openai/gpt-oss-120b', new MixGroq({ options, config }));
return this;
}
opus45({ options = {}, config = {} } = {}) {
return this.attach('claude-opus-4-5-20251101', new MixAnthropic({ options, config }));
}
opus41({ options = {}, config = {} } = {}) {
return this.attach('claude-opus-4-1-20250805', new MixAnthropic({ options, config }));
}
opus41think({ options = {}, config = {} } = {}) {
options = { ...MixAnthropic.thinkingOptions, ...options };
return this.attach('claude-opus-4-1-20250805', new MixAnthropic({ options, config }));
}
sonnet4({ options = {}, config = {} } = {}) {
return this.attach('claude-sonnet-4-20250514', new MixAnthropic({ options, config }));
}
sonnet4think({ options = {}, config = {} } = {}) {
options = { ...MixAnthropic.thinkingOptions, ...options };
return this.attach('claude-sonnet-4-20250514', new MixAnthropic({ options, config }));
}
sonnet45({ options = {}, config = {} } = {}) {
return this.attach('claude-sonnet-4-5-20250929', new MixAnthropic({ options, config }));
}
sonnet45think({ options = {}, config = {} } = {}) {
options = { ...MixAnthropic.thinkingOptions, ...options };
return this.attach('claude-sonnet-4-5-20250929', new MixAnthropic({ options, config }));
}
sonnet37({ options = {}, config = {} } = {}) {
return this.attach('claude-3-7-sonnet-20250219', new MixAnthropic({ options, config }));
}
sonnet37think({ options = {}, config = {} } = {}) {
options = { ...MixAnthropic.thinkingOptions, ...options };
return this.attach('claude-3-7-sonnet-20250219', new MixAnthropic({ options, config }));
}
haiku35({ options = {}, config = {} } = {}) {
return this.attach('claude-3-5-haiku-20241022', new MixAnthropic({ options, config }));
}
haiku45({ options = {}, config = {} } = {}) {
return this.attach('claude-haiku-4-5-20251001', new MixAnthropic({ options, config }));
}
haiku45think({ options = {}, config = {} } = {}) {
options = { ...MixAnthropic.thinkingOptions, ...options };
return this.attach('claude-haiku-4-5-20251001', new MixAnthropic({ options, config }));
}
gemini25flash({ options = {}, config = {} } = {}) {
return this.attach('gemini-2.5-flash', new MixGoogle({ options, config }));
}
gemini3pro({ options = {}, config = {} } = {}) {
return this.attach('gemini-3-pro-preview', new MixGoogle({ options, config }));
}
gemini3flash({ options = {}, config = {} } = {}) {
return this.attach('gemini-3-flash-preview', new MixGoogle({ options, config }));
}
gemini25pro({ options = {}, config = {} } = {}) {
return this.attach('gemini-2.5-pro', new MixGoogle({ options, config }));
}
sonarPro({ options = {}, config = {} } = {}) {
return this.attach('sonar-pro', new MixPerplexity({ options, config }));
}
sonar({ options = {}, config = {} } = {}) {
return this.attach('sonar', new MixPerplexity({ options, config }));
}
grok3({ options = {}, config = {} } = {}) {
return this.attach('grok-3', new MixGrok({ options, config }));
}
grok3mini({ options = {}, config = {} } = {}) {
return this.attach('grok-3-mini', new MixGrok({ options, config }));
}
grok4({ options = {}, config = {} } = {}) {
return this.attach('grok-4-0709', new MixGrok({ options, config }));
}
grok41think({ options = {}, config = {} } = {}) {
return this.attach('grok-4-1-fast-reasoning', new MixGrok({ options, config }));
}
grok41({ options = {}, config = {} } = {}) {
return this.attach('grok-4-1-fast-non-reasoning', new MixGrok({ options, config }));
}
qwen3({ options = {}, config = {}, mix = { together: true, cerebras: false } } = {}) {
if (mix.together) this.attach('Qwen/Qwen3-235B-A22B-fp8-tput', new MixTogether({ options, config }));
if (mix.cerebras) this.attach('qwen-3-32b', new MixCerebras({ options, config }));
return this;
}
scout({ options = {}, config = {}, mix = { groq: true, together: false, cerebras: false } } = {}) {
if (mix.groq) this.attach('meta-llama/llama-4-scout-17b-16e-instruct', new MixGroq({ options, config }));
if (mix.together) this.attach('meta-llama/Llama-4-Scout-17B-16E-Instruct', new MixTogether({ options, config }));
if (mix.cerebras) this.attach('llama-4-scout-17b-16e-instruct', new MixCerebras({ options, config }));
return this;
}
maverick({ options = {}, config = {}, mix = { groq: true, together: false, lambda: false } } = {}) {
if (mix.groq) this.attach('meta-llama/llama-4-maverick-17b-128e-instruct', new MixGroq({ options, config }));
if (mix.together) this.attach('meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8', new MixTogether({ options, config }));
if (mix.lambda) this.attach('llama-4-maverick-17b-128e-instruct-fp8', new MixLambda({ options, config }));
return this;
}
deepseekR1({ options = {}, config = {}, mix = { groq: true, together: false, cerebras: false } } = {}) {
if (mix.groq) this.attach('deepseek-r1-distill-llama-70b', new MixGroq({ options, config }));
if (mix.together) this.attach('deepseek-ai/DeepSeek-R1', new MixTogether({ options, config }));
if (mix.cerebras) this.attach('deepseek-r1-distill-llama-70b', new MixCerebras({ options, config }));
return this;
}
hermes3({ options = {}, config = {}, mix = { lambda: true } } = {}) {
this.attach('Hermes-3-Llama-3.1-405B-FP8', new MixLambda({ options, config }));
return this;
}
kimiK2({ options = {}, config = {}, mix = { together: false, groq: true } } = {}) {
if (mix.together) this.attach('moonshotai/Kimi-K2-Instruct-0905', new MixTogether({ options, config }));
if (mix.groq) this.attach('moonshotai/kimi-k2-instruct-0905', new MixGroq({ options, config }));
return this;
}
kimiK2think({ options = {}, config = {} } = {}) {
return this.attach('moonshotai/Kimi-K2-Thinking', new MixTogether({ options, config }));
}
lmstudio({ options = {}, config = {} } = {}) {
return this.attach('lmstudio', new MixLMStudio({ options, config }));
}
minimaxM2({ options = {}, config = {} } = {}) {
return this.attach('MiniMax-M2', new MixMiniMax({ options, config }));
}
minimaxM2Stable({ options = {}, config = {} } = {}) {
return this.attach('MiniMax-M2-Stable', new MixMiniMax({ options, config }));
}
addText(text, { role = "user" } = {}) {
const content = [{
type: "text",
text
}];
this.messages.push({ role, content });
return this;
}
addTextFromFile(filePath, { role = "user" } = {}) {
const content = this.readFile(filePath);
this.addText(content, { role });
return this;
}
setSystem(text) {
this.config.system = text;
return this;
}
setSystemFromFile(filePath) {
const content = this.readFile(filePath);
this.setSystem(content);
return this;
}
addImageFromBuffer(buffer, { role = "user" } = {}) {
this.messages.push({
role,
content: [{
type: "image",
source: {
type: "buffer",
data: buffer
}
}]
});
return this;
}
addImage(filePath, { role = "user" } = {}) {
const absolutePath = path.resolve(filePath);
if (!fs.existsSync(absolutePath)) {
throw new Error(`Image file not found: ${filePath}`);
}
this.messages.push({
role,
content: [{
type: "image",
source: {
type: "file",
data: filePath
}
}]
});
return this;
}
addImageFromUrl(url, { role = "user" } = {}) {
let source;
if (url.startsWith('data:')) {
// Parse data URL: data:image/jpeg;base64,/9j/4AAQ...
const match = url.match(/^data:([^;]+);base64,(.+)$/);
if (match) {
source = {
type: "base64",
media_type: match[1],
data: match[2]
};
} else {
throw new Error('Invalid data URL format');
}
} else {
source = {
type: "url",
data: url
};
}
this.messages.push({
role,
content: [{
type: "image",
source
}]
});
return this;
}
async processImages() {
for (let i = 0; i < this.messages.length; i++) {
const message = this.messages[i];
if (!Array.isArray(message.content)) continue;
for (let j = 0; j < message.content.length; j++) {
const content = message.content[j];
if (content.type !== 'image' || content.source.type === 'base64') continue;
try {
let buffer, mimeType;
switch (content.source.type) {
case 'url':
const response = await axios.get(content.source.data, { responseType: 'arraybuffer' });
buffer = Buffer.from(response.data);
mimeType = response.headers['content-type'];
break;
case 'file':
buffer = this.readFile(content.source.data, { encoding: null });
break;
case 'buffer':
buffer = content.source.data;
break;
}
// Detect mimeType if not provided
if (!mimeType) {
const fileType = await fromBuffer(buffer);
if (!fileType || !fileType.mime.startsWith('image/')) {
throw new Error(`Invalid image - unable to detect valid image format`);
}
mimeType = fileType.mime;
}
// Update the content with processed image
message.content[j] = {
type: "image",
source: {
type: "base64",
media_type: mimeType,
data: buffer.toString('base64')
}
};
} catch (error) {
console.error(`Error processing image:`, error);
// Remove failed image from content
message.content.splice(j, 1);
j--;
}
}
}
}
async message() {
let raw = await this.execute({ options: { stream: false } });
return raw.message;
}
async json(schemaExample = null, schemaDescription = {}, { type = 'json_object', addExample = false, addSchema = true, addNote = false } = {}) {
let options = {
response_format: { type },
stream: false,
}
// Apply template replacements to system before adding extra instructions
let systemWithReplacements = this._template(this.config.system, this.config.replace);
let config = {
system: systemWithReplacements,
}
if (schemaExample) {
config.schema = generateJsonSchema(schemaExample, schemaDescription);
if (addSchema) {
config.system += "\n\nOutput JSON Schema: \n```\n" + JSON.stringify(config.schema) + "\n```";
}
if (addExample) {
config.system += "\n\nOutput JSON Example: \n```\n" + JSON.stringify(schemaExample) + "\n```";
}
if (addNote) {
config.system += "\n\nOutput JSON Escape: double quotes, backslashes, and control characters inside JSON strings.\nEnsure the output contains no comments.";
}
}
const { message } = await this.execute({ options, config });
return JSON.parse(this._extractBlock(message));
}
_extractBlock(response) {
const block = response.match(/```(?:\w+)?\s*([\s\S]*?)```/);
return block ? block[1].trim() : response;
}
async block({ addSystemExtra = true } = {}) {
// Apply template replacements to system before adding extra instructions
let systemWithReplacements = this._template(this.config.system, this.config.replace);
let config = {
system: systemWithReplacements,
}
if (addSystemExtra) {
config.system += "\nReturn the result of the task between triple backtick block code tags ```";
}
const { message } = await this.execute({ options: { stream: false }, config });
return this._extractBlock(message);
}
async raw() {
return this.execute({ options: { stream: false } });
}
async stream(callback) {
this.streamCallback = callback;
return this.execute({ options: { stream: true } });
}
replaceKeyFromFile(key, filePath) {
try {
const content = this.readFile(filePath);
this.replace({ [key]: this._template(content, this.config.replace) });
} catch (error) {
// Gracefully handle file read errors without throwing
log.warn(`replaceKeyFromFile: ${error.message}`);
}
return this;
}
_template(input, replace) {
if (!replace) return input;
for (const k in replace) {
input = input.split(/([¿?¡!,"';:\(\)\.\s])/).map(x => x === k ? replace[k] : x).join("");
}
return input;
}
groupByRoles(messages) {
return messages.reduce((acc, currentMessage, index) => {
if (index === 0 || currentMessage.role !== messages[index - 1].role) {
// acc.push({
// role: currentMessage.role,
// content: currentMessage.content
// });
acc.push(currentMessage);
} else {
acc[acc.length - 1].content = acc[acc.length - 1].content.concat(currentMessage.content);
}
return acc;
}, []);
}
applyTemplate() {
if (!this.config.replace) return;
this.config.system = this._template(this.config.system, this.config.replace);
this.messages = this.messages.map(message => {
if (message.content instanceof Array) {
message.content = message.content.map(content => {
if (content.type === 'text') {
content.text = this._template(content.text, this.config.replace);
}
return content;
});
}
return message;
});
}
async prepareMessages() {
await this.processImages();
this.applyTemplate();
// Smart message slicing to preserve tool call sequences
if (this.config.max_history > 0) {
let sliceStart = Math.max(0, this.messages.length - this.config.max_history);
// If we're slicing and there's a tool message at the start,
// ensure we include the preceding assistant message with tool_calls
while (sliceStart > 0 &&
sliceStart < this.messages.length &&
this.messages[sliceStart].role === 'tool') {
sliceStart--;
// Also need to include the assistant message with tool_calls
if (sliceStart > 0 &&
this.messages[sliceStart].role === 'assistant' &&
this.messages[sliceStart].tool_calls) {
break;
}
}
this.messages = this.messages.slice(sliceStart);
}
this.messages = this.groupByRoles(this.messages);
this.options.messages = this.messages;
}
readFile(filePath, { encoding = 'utf8' } = {}) {
try {
const absolutePath = path.resolve(filePath);
return fs.readFileSync(absolutePath, { encoding });
} catch (error) {
if (error.code === 'ENOENT') {
throw new Error(`File not found: ${filePath}`);
} else if (error.code === 'EACCES') {
throw new Error(`Permission denied: ${filePath}`);
} else {
throw new Error(`Error reading file ${filePath}: ${error.message}`);
}
}
}
async execute({ config = {}, options = {} } = {}) {
if (!this.models || this.models.length === 0) {
throw new Error("No models specified. Use methods like .gpt5(), .sonnet4() first.");
}
return this.limiter.schedule(async () => {
await this.prepareMessages();
if (this.messages.length === 0) {
throw new Error("No user messages have been added. Use addText(prompt), addTextFromFile(filePath), addImage(filePath), or addImageFromUrl(url) to add a prompt.");
}
let lastError = null;
for (let i = 0; i < this.models.length; i++) {
const currentModel = this.models[i];
const currentModelKey = currentModel.key;
const providerInstance = currentModel.provider;
const optionsTools = providerInstance.getOptionsTools(this.tools);
// Create clean copies for each provider to avoid contamination
const currentOptions = {
...this.options,
...providerInstance.options,
...optionsTools,
...options,
model: currentModelKey
};
const currentConfig = {
...this.config,
...providerInstance.config,
...config,
};
if (currentConfig.debug) {
const isPrimary = i === 0;
log.debug(`[${currentModelKey}] Attempt #${i + 1}` + (isPrimary ? ' (Primary)' : ' (Fallback)'));
}
try {
if (currentOptions.stream && this.streamCallback) {
providerInstance.streamCallback = this.streamCallback;
}
const result = await providerInstance.create({ options: currentOptions, config: currentConfig });
if (result.toolCalls && result.toolCalls.length > 0) {
if (result.message) {
if (result.signature) {
this.messages.push({
role: "assistant", content: [{
type: "thinking",
thinking: result.think,
signature: result.signature
}]
});
} else {
this.addText(result.message, { role: "assistant" });
}
}
this.messages.push({ role: "assistant", content: null, tool_calls: result.toolCalls });
const content = await this.processToolCalls(result.toolCalls);
this.messages.push({ role: 'tool', content });
return this.execute();
}
if (currentConfig.debug) {
console.log(`\nRequest successful: ${currentModelKey}`);
if (result.response) {
console.log('\nRAW RESPONSE:');
console.log(ModelMix.formatJSON(result.response));
}
if (result.message) {
console.log('\nMESSAGE:');
console.log(ModelMix.formatMessage(result.message));
}
if (result.think) {
console.log('\nTHINKING:');
console.log(result.think);
}
console.log('');
}
return result;
} catch (error) {
lastError = error;
log.warn(`Model ${currentModelKey} failed (Attempt #${i + 1}/${this.models.length}).`);
if (error.message) log.warn(`Error: ${error.message}`);
if (error.statusCode) log.warn(`Status Code: ${error.statusCode}`);
if (error.details) log.warn(`Details:\n${ModelMix.formatJSON(error.details)}`);
if (i === this.models.length - 1) {
console.error(`All ${this.models.length} model(s) failed. Throwing last error from ${currentModelKey}.`);
throw lastError;
} else {
const nextModelKey = this.models[i + 1].key;
log.info(`-> Proceeding to next model: ${nextModelKey}`);
}
}
}
log.error("Fallback logic completed without success or throwing the final error.");
throw lastError || new Error("Failed to get response from any model, and no specific error was caught.");
});
}
async processToolCalls(toolCalls) {
const result = []
for (const toolCall of toolCalls) {
// Handle different tool call formats more robustly
let toolName, toolArgs, toolId;
try {
if (toolCall.function) {
// Formato OpenAI/normalizado
toolName = toolCall.function.name;
toolArgs = typeof toolCall.function.arguments === 'string'
? JSON.parse(toolCall.function.arguments)
: toolCall.function.arguments;
toolId = toolCall.id;
} else if (toolCall.name) {
// Formato directo (posible formato alternativo)
toolName = toolCall.name;
toolArgs = toolCall.input || toolCall.arguments || {};
toolId = toolCall.id;
} else {
log.error('Unknown tool call format:\n', toolCall);
continue;
}
// Validar que tenemos los datos necesarios
if (!toolName) {
log.error('Tool call missing name:\n', toolCall);
continue;
}
// Verificar si es una herramienta local registrada
if (this.mcpToolsManager.hasTool(toolName)) {
const response = await this.mcpToolsManager.executeTool(toolName, toolArgs);
result.push({
name: toolName,
tool_call_id: toolId,
content: response.content.map(item => item.text).join("\n")
});
} else {
// Usar el cliente MCP externo
const client = this.toolClient[toolName];
if (!client) {
throw new Error(`No client found for tool: ${toolName}`);
}
const response = await client.callTool({
name: toolName,
arguments: toolArgs
});
result.push({
name: toolName,
tool_call_id: toolId,
content: response.content.map(item => item.text).join("\n")
});
}
} catch (error) {
console.error(`Error processing tool call ${toolName}:`, error);
result.push({
name: toolName || 'unknown',
tool_call_id: toolId || 'unknown',
content: `Error: ${error.message}`
});
}
}
return result;
}
async addMCP() {
const key = arguments[0];
if (this.mcp[key]) {
log.info(`MCP ${key} already attached.`);
return;
}
if (this.config.max_history < 3) {
log.warn(`MCP ${key} requires at least 3 max_history. Setting to 3.`);
this.config.max_history = 3;
}
const env = {}
for (const key in process.env) {
if (['OPENAI', 'ANTHR', 'GOOGLE', 'GROQ', 'TOGET', 'LAMBDA', 'PPLX', 'XAI', 'CEREBR'].some(prefix => key.startsWith(prefix))) continue;
env[key] = process.env[key];
}
const transport = new StdioClientTransport({
command: "npx",
args: ["-y", ...arguments],
env
});
// Crear el cliente MCP
this.mcp[key] = new Client({
name: key,
version: "1.0.0"
});
await this.mcp[key].connect(transport);
const { tools } = await this.mcp[key].listTools();
this.tools[key] = tools;
for (const tool of tools) {
this.toolClient[tool.name] = this.mcp[key];
}
}
addTool(toolDefinition, callback) {
if (this.config.max_history < 3) {
log.warn(`MCP ${toolDefinition.name} requires at least 3 max_history. Setting to 3.`);
this.config.max_history = 3;
}
this.mcpToolsManager.registerTool(toolDefinition, callback);
// Agregar la herramienta al sistema de tools para que sea incluida en las requests
if (!this.tools.local) {
this.tools.local = [];
}
this.tools.local.push({
name: toolDefinition.name,
description: toolDefinition.description,
inputSchema: toolDefinition.inputSchema
});
return this;
}
addTools(toolsWithCallbacks) {
for (const { tool, callback } of toolsWithCallbacks) {
this.addTool(tool, callback);
}
return this;
}
removeTool(toolName) {
this.mcpToolsManager.removeTool(toolName);
// Also remove from the tools system
if (this.tools.local) {
this.tools.local = this.tools.local.filter(tool => tool.name !== toolName);
}
return this;
}
listTools() {
const localTools = this.mcpToolsManager.getToolsForMCP();
const mcpTools = Object.values(this.tools).flat();
return {
local: localTools,
mcp: mcpTools.filter(tool => !localTools.find(local => local.name === tool.name))
};
}
}
class MixCustom {
constructor({ config = {}, options = {}, headers = {} } = {}) {
this.config = this.getDefaultConfig(config);
this.options = this.getDefaultOptions(options);
this.headers = this.getDefaultHeaders(headers);
this.streamCallback = null; // Define streamCallback here
}
getDefaultOptions(customOptions) {
return {
...customOptions
};
}
getDefaultConfig(customConfig) {
return {
url: '',
apiKey: '',
...customConfig
};
}
getDefaultHeaders(customHeaders) {
return {
'accept': 'application/json',
'content-type': 'application/json',
'authorization': `Bearer ${this.config.apiKey}`,
...customHeaders
};
}
convertMessages(messages, config) {
return MixOpenAI.convertMessages(messages, config);
}
async create({ config = {}, options = {} } = {}) {
try {
options.messages = this.convertMessages(options.messages, config);
if (config.debug) {
console.log('\nREQUEST:');
console.log('\nCONFIG:');
const configToLog = { ...config };
delete configToLog.debug;
console.log(ModelMix.formatJSON(configToLog));
console.log('\nOPTIONS:');
console.log(ModelMix.formatJSON(options));
}
if (options.stream) {
return this.processStream(await axios.post(this.config.url, options, {
headers: this.headers,
responseType: 'stream'
}));
} else {
return this.processResponse(await axios.post(this.config.url, options, {
headers: this.headers
}));
}
} catch (error) {
throw this.handleError(error, { config, options });
}
}
handleError(error, { config, options }) {
let errorMessage = 'An error occurred in MixCustom';
let statusCode = null;
let errorDetails = null;
if (error.isAxiosError) {
statusCode = error.response ? error.response.status : null;
errorMessage = `Request to ${this.config.url} failed with status code ${statusCode}`;
errorDetails = error.response ? error.response.data : null;
}
const formattedError = {
message: errorMessage,
statusCode,
details: errorDetails,
stack: error.stack,
config: config,
options: options
};
return formattedError;
}
processStream(response) {
return new Promise((resolve, reject) => {
let raw = [];
let message = '';
let buffer = '';
response.data.on('data', chunk => {
buffer += chunk.toString();
let boundary;
while ((boundary = buffer.indexOf('\n')) !== -1) {
const dataStr = buffer.slice(0, boundary).trim();
buffer = buffer.slice(boundary + 1);
const firstBraceIndex = dataStr.indexOf('{');
if (dataStr === '[DONE]' || firstBraceIndex === -1) continue;
const jsonStr = dataStr.slice(firstBraceIndex);
try {
const data = JSON.parse(jsonStr);
if (this.streamCallback) {
const delta = this.extractDelta(data);
message += delta;
this.streamCallback({ response: data, message, delta });
raw.push(data);
}
} catch (error) {
console.error('Error parsing JSON:', error);
}
}
});
response.data.on('end', () => resolve({
response: raw,
message: message.trim(),
toolCalls: [],
think: null
}));
response.data.on('error', reject);
});
}
extractDelta(data) {
return data.choices[0].delta.content;
}
static extractMessage(data) {
const message = data.choices[0].message?.content?.trim() || '';
const endTagIndex = message.indexOf('</think>');
if (message.startsWith('<think>') && endTagIndex !== -1) {
return message.substring(endTagIndex + 8).trim();
}
return message;
}
static extractThink(data) {
if (data.choices[0].message?.reasoning_content) {
return data.choices[0].message.reasoning_content;
} else if (data.choices[0].message?.reasoning) {
return data.choices[0].message.reasoning;
}
const message = data.choices[0].message?.content?.trim() || '';
const endTagIndex = message.indexOf('</think>');
if (message.startsWith('<think>') && endTagIndex !== -1) {
return message.substring(7, endTagIndex).trim();
}
return null;
}
static extractToolCalls(data) {
return data.choices[0].message?.tool_calls?.map(call => ({
id: call.id,
type: 'function',
function: {
name: call.function.name,
arguments: call.function.arguments
}
})) || []
}
processResponse(response) {
return {
message: MixCustom.extractMessage(response.data),
think: MixCustom.extractThink(response.data),
toolCalls: MixCustom.extractToolCalls(response.data),
response: response.data
}
}
getOptionsTools(tools) {
return MixOpenAI.getOptionsTools(tools);
}
}
class MixOpenAI extends MixCustom {
getDefaultConfig(customConfig) {
if (!process.env.OPENAI_API_KEY) {
throw new Error('OpenAI API key not found. Please provide it in config or set OPENAI_API_KEY environment variable.');
}
return super.getDefaultConfig({
url: 'https://api.openai.com/v1/chat/completions',
apiKey: process.env.OPENAI_API_KEY,
...customConfig
});
}
async create({ config = {}, options = {} } = {}) {
// Remove max_tokens and temperature for o1/o3 models
if (options.model?.startsWith('o')) {
delete options.max_tokens;
delete options.temperature;
}
// Use max_completion_tokens and remove temperature for GPT-5 models
if (options.model?.includes('gpt-5')) {
if (options.max_tokens) {
options.max_completion_tokens = options.max_tokens;
delete options.max_tokens;
}
delete options.temperature;
}
return super.create({ config, options });
}
static convertMessages(messages, config) {
const content = config.system;
messages = [{ role: 'system', content }, ...messages || []];
const results = []
for (const message of messages) {
if (message.tool_calls) {
results.push({ role: 'assistant', tool_calls: message.tool_calls })
continue;
}
if (message.role === 'tool') {
for (const content of message.content) {
results.push({
role: 'tool',
tool_call_id: content.tool_call_id,
content: content.content
})
}
continue;
}
if (Array.isArray(message.content)) {
message.content = message.content.filter(content => content !== null && content !== undefined).map(content => {
if (content && content.type === 'image') {
const { media_type, data } = content.source;
return {
type: 'image_url',
image_url: {
url: `data:${media_type};base64,${data}`
}
};
}
return content;
});
}
results.push(message);
}
return results;
}
static getOptionsTools(tools) {
const options = {};
options.tools = [];
for (const tool in tools) {
for (const item of tools[tool]) {
options.tools.push({
type: 'function',
function: {
name: item.name,
description: item.description,
parameters: item.inputSchema
}
});
}
}
// options.tool_choice = "auto";
return options;
}
}
class MixAnthropic extends MixCustom {
static thinkingOptions = {
thinking: {
"type": "enabled",
"budget_tokens": 1024
},
temperature: 1
};
getDefaultConfig(customConfig) {
if (!process.env.ANTHROPIC_API_KEY) {
throw new Error('Anthropic API key not found. Please provide it in config or set ANTHROPIC_API_KEY environment variable.');
}
return super.getDefaultConfig({
url: 'https://api.anthropic.com/v1/messages',
apiKey: process.env.ANTHROPIC_API_KEY,
...customConfig
});
}
async create({ config = {}, options = {} } = {}) {
delete options.response_format;
options.system = config.system;
try {
return await super.create({ config, options });
} catch (error) {
// Log the error details for debugging
if (error.response && error.response.data) {
log.error('Anthropic API Error:\n', error.response.data);
}
throw error;
}
}
convertMessages(messages, config) {
return MixAnthropic.convertMessages(messages, config);
}
static convertMessages(messages, config) {
// Filter out orphaned tool results for Anthropic
const filteredMessages = [];
for (let i = 0; i < messages.length; i++) {
if (messages[i].role === 'tool') {
// Check if there's a preceding assistant message with tool_calls
let foundToolCall = false;
for (let j = i - 1; j >= 0; j--) {
if (messages[j].role === 'assistant' && messages[j].tool_calls) {
foundToolCall = true;
break;
}
}
if (!foundToolCall) {
// Skip orphaned tool results
continue;
}
}
filteredMessages.push(messages[i]);
}
return filteredMessages.map(message => {
if (message.role === 'tool') {
return {
role: "user",
content: message.content.map(content => ({
type: "tool_result",
tool_use_id: content.tool_call_id,
content: content.content
}))
}
}
// Handle messages with tool_calls (assistant messages that call tools)
if (message.tool_calls) {
const content = message.tool_calls.map(call => ({
type: 'tool_use',
id: call.id,
name: call.function.name,
input: JSON.parse(call.function.arguments)
}));
return { role: 'assistant', content };
}
// Handle content conversion for other messages
if (message.content && Array.isArray(message.content)) {
message.content = message.content.filter(content => content !== null && content !== undefined).map(content => {
if (content && content.type === 'function') {
return {
type: 'tool_use',
id: content.id,
name: content.function.name,
input: JSON.parse(content.function.arguments)
}
}
return content;
});
}
return message;
});
}
getDefaultHeaders(customHeaders) {
return super.getDefaultHeaders({
'x-api-key': this.config.apiKey,
'anthropic-version': '2023-06-01',
...customHeaders
});
}
extractDelta(data) {
if (data.delta && data.delta.text) return data.delta.text;
return '';
}
static extractToolCalls(data) {
return data.content.map(item => {
if (item.type === 'tool_use') {
return {
id: item.id,
type: 'function',
function: {
name: item.name,
arguments: JSON.stringify(item.input)
}
};
}
return null;
}).filter(item => item !== null);
}
static extractMessage(data) {
if (data.content?.[1]?.text) {
return data.content[1].text;
}
return data.content[0].text;
}
static extractThink(data) {
return data.content[0]?.thinking || null;
}
static extractSignature(data) {
return data.content[0]?.signature || null;
}
processResponse(response) {
return {
message: MixAnthropic.extractMessage(response.data),
think: MixAnthropic.extractThink(response.data),
toolCalls: MixAnthropic.extractToolCalls(response.data),
response: response.data,
signature: MixAnthropic.extractSignature(response.data)
}
}
getOptionsTools(tools) {
return MixAnthropic.getOptionsTools(tools);
}
static getOptionsTools(tools) {
const options = {};
options.tools = [];
for (const tool in tools) {
for (const item of tools[tool]) {
options.tools.push({
name: item.name,
description: item.description,
input_schema: item.inputSchema
});
}
}
return options;
}
}
class MixMiniMax extends MixOpenAI {
getDefaultConfig(customConfig) {
if (!process.env.MINIMAX_API_KEY) {
throw new Error('MiniMax API key not found. Please provide it in config or set MINIMAX_API_KEY environment variable.');
}
return MixCustom.prototype.getDefaultConfig.call(this, {
url: 'https://api.minimax.io/v1/chat/completions',
apiKey: process.env.MINIMAX_API_KEY,
...customConfig
});
}
extractDelta(data) {
// MiniMax might send different formats during streaming
if (data.choices && data.choices[0] && data.choices[0].delta && data.choices[0].delta.content) {
return data.choices[0].delta.content;
}
return '';
}
}
class MixPerplexity extends MixCustom {
getDefaultConfig(customConfig) {
if (!process.env.PPLX_API_KEY) {
throw new Error('Perplexity API key not found. Please provide it in config or set PPLX_API_KEY environment variable.');
}
return super.getDefaultConfig({
url: 'https://api.perplexity.ai/chat/completions',
apiKey: process.env.PPLX_API_KEY,
...customConfig
});
}
async create({ config = {}, options = {} } = {}) {
if (config.schema) {
options.response_format = {
type: 'json_schema',
json_schema: { schema: config.schema }
};
}
return super.create({ config, options });
}
}
class MixOllama extends MixCustom {
getDefaultConfig(customConfig) {
return super.getDefaultConfig({
url: 'http://localhost:11434/api/chat',
...customConfig
});
}
getDefaultOptions(customOptions) {
return {
options: customOptions,
};
}
extractDelta(data) {
if (data.message && data.message.content) return data.message.content;
return '';
}
extractMessage(data) {
return data.message.content.trim();
}
convertMessages(messages, config) {
return MixOllama.convertMessages(messages, config);
}
static convertMessages(messages, config) {
const content = config.system;
messages = [{ role: 'system', content }, ...messages || []];
return messages.map(entry => {
let content = '';
let images = [];
entry.content.forEach(item => {
if (item.type === 'text') {
content += item.text + ' ';
} else if (item.type === 'image') {
images.push(item.source.data);
}
});
return {
role: entry.role,
content: content.trim(),
images: images
};
});
}
}
class MixGrok extends MixOpenAI {
getDefaultConfig(customConfig) {
if (!process.env.XAI_API_KEY) {
throw new Error('Grok API key not found. Pl