@just-every/ensemble
Version:
LLM provider abstraction layer with unified streaming interface
844 lines • 42.7 kB
JavaScript
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || (function () {
var ownKeys = function(o) {
ownKeys = Object.getOwnPropertyNames || function (o) {
var ar = [];
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
return ar;
};
return ownKeys(o);
};
return function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
__setModuleDefault(result, mod);
return result;
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.OpenAIChat = void 0;
exports.addImagesToInput = addImagesToInput;
const base_provider_js_1 = require("./base_provider.cjs");
const openai_1 = __importStar(require("openai"));
const uuid_1 = require("uuid");
const index_js_1 = require("../index.cjs");
const llm_logger_js_1 = require("../utils/llm_logger.cjs");
const pause_controller_js_1 = require("../utils/pause_controller.cjs");
const image_utils_js_1 = require("../utils/image_utils.cjs");
const delta_buffer_js_1 = require("../utils/delta_buffer.cjs");
const citation_tracker_js_1 = require("../utils/citation_tracker.cjs");
const event_controller_js_1 = require("../utils/event_controller.cjs");
const model_data_js_1 = require("../data/model_data.cjs");
const SIMULATED_TOOL_CALL_REGEX = /\n?\s*(?:```(?:json)?\s*)?\s*TOOL_CALLS:\s*(\[.*\])(?:\s*```)?/gs;
const TOOL_CALL_CLEANUP_REGEX = /\n?\s*(?:```(?:json)?\s*)?\s*TOOL_CALLS:\s*\[.*\](?:\s*```)?/gms;
function formatToolsForPrompt(tools) {
if (!tools || tools.length === 0) {
return 'No tools are available for use.';
}
const toolDescriptions = tools
.map(tool => {
if (tool.type !== 'function' || !tool.function) {
return ` - Unknown tool type: ${tool.type}`;
}
const func = tool.function;
const parameters = func.parameters && typeof func.parameters === 'object' ? func.parameters : {};
const properties = 'properties' in parameters ? parameters.properties : {};
const requiredParams = 'required' in parameters && Array.isArray(parameters.required) ? parameters.required : [];
const paramsJson = JSON.stringify(properties, null, 2);
return ` - Name: ${func.name}\n Description: ${func.description || 'No description'}\n Parameters (JSON Schema): ${paramsJson}\n Required Parameters: ${requiredParams.join(', ') || 'None'}`;
})
.join('\n\n');
return `You have the following tools available:\n${toolDescriptions}\n\nTo use one or more tools, output the following JSON structure containing an ARRAY of tool calls on a new line *at the very end* of your response, and *only* if you intend to call tool(s). Ensure the arguments value in each call is a JSON *string*: \n\`\`\`json\nTOOL_CALLS: [ {"id": "call_001", "type": "function", "function": {"name": "function_name_1", "arguments": "{\\"arg1\\": \\"value1\\"}"}}, {"id": "call_002", "type": "function", "function": {"name": "function_name_2", "arguments": "{\\"argA\\": true, \\"argB\\": 123}"}} ]\n\`\`\`\nReplace \`function_name\` and arguments accordingly for each tool call you want to make. Put all desired calls in the array. IMPORTANT: Always include an 'id' field with a unique string for each call. Do not add any text after the TOOL_CALLS line. If you are not calling any tools, respond normally without the TOOL_CALLS structure.`;
}
const CLEANUP_PLACEHOLDER = '[Simulated Tool Calls Removed]';
async function resolveAsyncEnums(params) {
if (!params || typeof params !== 'object') {
return params;
}
const resolved = { ...params };
if (resolved.properties) {
const resolvedProps = {};
for (const [key, value] of Object.entries(resolved.properties)) {
if (value && typeof value === 'object') {
const propCopy = { ...value };
if (typeof propCopy.enum === 'function') {
try {
const enumValue = await propCopy.enum();
if (Array.isArray(enumValue) && enumValue.length > 0) {
propCopy.enum = enumValue;
}
else {
delete propCopy.enum;
}
}
catch {
delete propCopy.enum;
}
}
resolvedProps[key] = await resolveAsyncEnums(propCopy);
}
else {
resolvedProps[key] = value;
}
}
resolved.properties = resolvedProps;
}
return resolved;
}
async function convertToOpenAITools(tools) {
return await Promise.all(tools.map(async (tool) => ({
type: 'function',
function: {
name: tool.definition.function.name,
description: tool.definition.function.description,
parameters: await resolveAsyncEnums(tool.definition.function.parameters),
},
})));
}
async function addImagesToInput(input, images, source) {
for (const [image_id, imageData] of Object.entries(images)) {
try {
const processedImages = await (0, image_utils_js_1.resizeAndSplitForOpenAI)(imageData);
const messageContent = [];
if (processedImages.length === 1) {
messageContent.push({
type: 'text',
text: `This is [image #${image_id}] from the ${source}`,
});
}
else {
messageContent.push({
type: 'text',
text: `This is [image #${image_id}] from the ${source} (split into ${processedImages.length} parts, each up to 768px high)`,
});
}
for (const imageSegment of processedImages) {
messageContent.push({
type: 'image_url',
image_url: {
url: imageSegment,
},
});
}
input.push({
role: 'user',
content: messageContent,
});
}
catch (error) {
console.error(`Error processing image ${image_id}:`, error);
input.push({
role: 'user',
content: [
{
type: 'text',
text: `This is [image #${image_id}] from the ${source} (raw image)`,
},
{
type: 'image_url',
image_url: {
url: imageData,
},
},
],
});
}
}
return input;
}
async function mapMessagesToOpenAI(messages, model) {
let result = [];
const isMistralViaOpenRouter = model.includes('mistral') || model.includes('magistral');
for (const msg of messages) {
const message = { ...msg };
if (msg.type === 'function_call_output') {
if (isMistralViaOpenRouter) {
const userMessage = {
role: 'user',
content: `[Function Response]\nFunction: ${msg.name || 'unknown'}\nResult: ${msg.output || 'No output'}`,
};
result = await (0, image_utils_js_1.appendMessageWithImage)(model, result, userMessage, 'content', addImagesToInput);
}
else {
const toolMessage = {
role: 'tool',
tool_call_id: msg.call_id,
content: msg.output || '',
};
result = await (0, image_utils_js_1.appendMessageWithImage)(model, result, toolMessage, 'content', addImagesToInput);
}
}
else if (msg.type === 'function_call') {
const functionCallMsg = message;
if (isMistralViaOpenRouter) {
result.push({
role: 'assistant',
content: `[Function Call]\nFunction: ${functionCallMsg.name || 'unknown'}\nArguments: ${functionCallMsg.arguments || '{}'}`,
});
}
else {
result.push({
role: 'assistant',
tool_calls: [
{
id: functionCallMsg.call_id,
type: 'function',
function: {
name: functionCallMsg.name || '',
arguments: functionCallMsg.arguments || '',
},
},
],
});
}
}
else if (!msg.type || msg.type === 'message' || msg.type === 'thinking') {
if ('content' in message) {
delete message.type;
delete message.timestamp;
delete message.model;
delete message.pinned;
delete message.status;
delete message.thinking_id;
delete message.signature;
delete message.id;
if (message.role === 'developer')
message.role = 'system';
if (!['system', 'user', 'assistant'].includes(message.role)) {
message.role = 'user';
}
result = await (0, image_utils_js_1.appendMessageWithImage)(model, result, message, 'content', addImagesToInput);
}
}
}
return result.filter(Boolean);
}
class OpenAIChat extends base_provider_js_1.BaseModelProvider {
_client;
provider;
baseURL;
commonParams = {};
apiKey;
defaultHeaders;
constructor(provider, apiKey, baseURL, defaultHeaders, commonParams) {
super(provider || 'openai');
this.provider = provider || 'openai';
this.apiKey = apiKey;
this.baseURL = baseURL;
this.commonParams = commonParams || {};
this.defaultHeaders = defaultHeaders || {
'User-Agent': 'magi',
};
}
getEnvVarName() {
switch (this.provider) {
case 'openrouter':
return 'OPENROUTER_API_KEY';
case 'xai':
return 'XAI_API_KEY';
case 'deepseek':
return 'DEEPSEEK_API_KEY';
case 'openai':
default:
return 'OPENAI_API_KEY';
}
}
get client() {
if (!this._client) {
const apiKey = this.apiKey || process.env[this.getEnvVarName()];
if (!apiKey) {
throw new Error(`Failed to initialize OpenAI client for ${this.provider}. API key is missing.`);
}
this._client = new openai_1.default({
apiKey: apiKey,
baseURL: this.baseURL,
defaultHeaders: this.defaultHeaders,
});
}
return this._client;
}
prepareParameters(requestParams) {
return requestParams;
}
_parseAndPrepareSimulatedToolCalls(aggregatedContent, messageId) {
const matches = Array.from(aggregatedContent.matchAll(SIMULATED_TOOL_CALL_REGEX));
let jsonArrayString = null;
let matchIndex = -1;
if (matches.length > 0) {
const lastMatch = matches[matches.length - 1];
if (lastMatch && lastMatch[1]) {
jsonArrayString = lastMatch[1];
matchIndex = lastMatch.index ?? -1;
}
}
else {
if (aggregatedContent.includes('TOOL_CALLS')) {
console.warn(`(${this.provider}) TOOL_CALLS found but regex didn't match globally. Content snippet:`, aggregatedContent.substring(Math.max(0, aggregatedContent.indexOf('TOOL_CALLS') - 20), Math.min(aggregatedContent.length, aggregatedContent.indexOf('TOOL_CALLS') + 300)));
}
}
if (jsonArrayString !== null && matchIndex !== -1) {
try {
let parsedToolCallArray;
try {
parsedToolCallArray = JSON.parse(jsonArrayString);
}
catch (initialParseError) {
console.error(`(${this.provider}) Failed initial parse. Error: ${initialParseError}. JSON String: ${jsonArrayString}`);
throw initialParseError;
}
if (!Array.isArray(parsedToolCallArray)) {
if (typeof parsedToolCallArray === 'object' && parsedToolCallArray !== null) {
parsedToolCallArray = [parsedToolCallArray];
}
else {
throw new Error('Parsed JSON is not an array or object.');
}
}
const validSimulatedCalls = [];
for (const callData of parsedToolCallArray) {
if (callData && typeof callData === 'object') {
const toolCall = {
id: callData.id || `sim_${(0, uuid_1.v4)()}`,
type: 'function',
function: {
name: '',
arguments: '{}',
},
};
const funcDetails = callData.function;
if (typeof funcDetails === 'object' && funcDetails !== null) {
if (typeof funcDetails.name === 'string') {
toolCall.function.name = funcDetails.name;
}
if (funcDetails.arguments !== undefined) {
if (typeof funcDetails.arguments === 'string') {
try {
JSON.parse(funcDetails.arguments);
toolCall.function.arguments = funcDetails.arguments;
}
catch {
console.warn(`(${this.provider}) Argument string is not valid JSON, wrapping in quotes:`, funcDetails.arguments);
toolCall.function.arguments = JSON.stringify(funcDetails.arguments);
}
}
else {
toolCall.function.arguments = JSON.stringify(funcDetails.arguments);
}
}
}
else if (typeof callData.name === 'string') {
toolCall.function.name = callData.name;
if (callData.arguments !== undefined) {
if (typeof callData.arguments === 'string') {
try {
JSON.parse(callData.arguments);
toolCall.function.arguments = callData.arguments;
}
catch {
console.warn(`(${this.provider}) Argument string is not valid JSON, wrapping in quotes:`, callData.arguments);
toolCall.function.arguments = JSON.stringify(callData.arguments);
}
}
else {
toolCall.function.arguments = JSON.stringify(callData.arguments);
}
}
}
if (toolCall.function.name && toolCall.function.name.length > 0) {
validSimulatedCalls.push(toolCall);
}
else {
console.warn(`(${this.provider}) Invalid tool call object, missing name:`, callData);
}
}
else {
console.warn(`(${this.provider}) Skipping invalid item in tool call array:`, callData);
}
}
if (validSimulatedCalls.length > 0) {
let textBeforeToolCall = aggregatedContent.substring(0, matchIndex).trim();
textBeforeToolCall = textBeforeToolCall.replaceAll(TOOL_CALL_CLEANUP_REGEX, CLEANUP_PLACEHOLDER);
const eventsToYield = [];
if (textBeforeToolCall) {
eventsToYield.push({
type: 'message_complete',
content: textBeforeToolCall,
message_id: messageId,
});
}
for (const validSimulatedCall of validSimulatedCalls) {
eventsToYield.push({
type: 'tool_start',
tool_call: validSimulatedCall,
});
}
return { handled: true, eventsToYield };
}
else {
console.warn(`(${this.provider}) Last TOOL_CALLS array found but contained no valid tool call objects after processing.`);
}
}
catch (parseError) {
console.error(`(${this.provider}) Found last TOOL_CALLS pattern, but failed during processing: ${parseError}. JSON String: ${jsonArrayString}`);
}
}
const cleanedContent = aggregatedContent.replaceAll(TOOL_CALL_CLEANUP_REGEX, CLEANUP_PLACEHOLDER);
return { handled: false, cleanedContent: cleanedContent };
}
async *createResponseStream(messages, model, agent) {
const { getToolsFromAgent } = await Promise.resolve().then(() => __importStar(require("../utils/agent.cjs")));
const toolsPromise = agent ? getToolsFromAgent(agent) : Promise.resolve([]);
const tools = await toolsPromise;
const settings = agent?.modelSettings;
let requestId;
try {
const chatMessages = await mapMessagesToOpenAI(messages, model);
if (chatMessages.length === 0) {
chatMessages.push({
role: 'user',
content: 'Please begin.',
});
}
let requestParams = {
model,
messages: chatMessages,
stream: true,
};
if (settings?.temperature !== undefined)
requestParams.temperature = settings.temperature;
if (settings?.top_p !== undefined)
requestParams.top_p = settings.top_p;
if (settings?.max_tokens)
requestParams.max_tokens = settings.max_tokens;
if (settings?.tool_choice)
requestParams.tool_choice =
settings.tool_choice;
if (settings?.json_schema) {
requestParams.response_format = {
type: 'json_schema',
json_schema: settings.json_schema,
};
}
const modelEntry = (0, model_data_js_1.findModel)(model);
const shouldSimulateTools = modelEntry?.features?.simulate_tools === true;
const supportsNativeTools = modelEntry?.features?.tool_use === true && !shouldSimulateTools;
if (tools && tools.length > 0) {
if (supportsNativeTools) {
requestParams.tools = await convertToOpenAITools(tools);
}
else if (shouldSimulateTools) {
const openAITools = await convertToOpenAITools(tools);
const toolInfoForPrompt = formatToolsForPrompt(openAITools);
const messages = [...requestParams.messages];
messages.push({ role: 'system', content: toolInfoForPrompt });
requestParams.messages = messages;
requestParams.tools = undefined;
}
else {
console.warn(`(${this.provider}) Model '${model}' doesn't support tool calling. Tools will be ignored.`);
requestParams.tools = undefined;
}
}
const overrideParams = { ...this.commonParams };
const REASONING_EFFORT_CONFIGS = ['low', 'medium', 'high'];
for (const effort of REASONING_EFFORT_CONFIGS) {
const suffix = `-${effort}`;
if (model.endsWith(suffix)) {
overrideParams.reasoning = {
effort: effort,
};
model = model.slice(0, -suffix.length);
requestParams.model = model;
break;
}
}
requestParams = {
...requestParams,
...overrideParams,
};
requestParams = this.prepareParameters(requestParams);
requestId = (0, llm_logger_js_1.log_llm_request)(agent.agent_id, this.provider, model, requestParams);
const { waitWhilePaused } = await Promise.resolve().then(() => __importStar(require("../utils/pause_controller.cjs")));
await waitWhilePaused(100, agent.abortSignal);
const stream = await this.client.chat.completions.create(requestParams);
let aggregatedContent = '';
let aggregatedThinking = '';
const messageId = (0, uuid_1.v4)();
let messageIndex = 0;
const partialToolCallsByIndex = new Map();
let finishReason = null;
let usage = undefined;
const citationTracker = (0, citation_tracker_js_1.createCitationTracker)();
const chunks = [];
try {
const deltaBuffers = new Map();
for await (const chunk of stream) {
chunks.push(chunk);
if ((0, pause_controller_js_1.isPaused)()) {
await waitWhilePaused(100, agent.abortSignal);
}
const choice = chunk.choices[0];
if (!choice?.delta)
continue;
const delta = choice.delta;
if (delta.content) {
aggregatedContent += delta.content;
for (const ev of (0, delta_buffer_js_1.bufferDelta)(deltaBuffers, messageId, delta.content, content => ({
type: 'message_delta',
content,
message_id: messageId,
order: messageIndex++,
}))) {
yield ev;
}
}
const extendedDelta = delta;
if (extendedDelta.reasoning) {
aggregatedContent += extendedDelta.reasoning;
for (const ev of (0, delta_buffer_js_1.bufferDelta)(deltaBuffers, messageId, extendedDelta.reasoning, content => ({
type: 'message_delta',
content,
message_id: messageId,
order: messageIndex++,
}))) {
yield ev;
}
}
if (Array.isArray(extendedDelta.annotations)) {
for (const ann of extendedDelta.annotations) {
if (ann.type === 'url_citation' && ann.url_citation?.url) {
const marker = (0, citation_tracker_js_1.formatCitation)(citationTracker, {
title: ann.url_citation.title || ann.url_citation.url,
url: ann.url_citation.url,
});
aggregatedContent += marker;
yield {
type: 'message_delta',
content: marker,
message_id: messageId,
order: messageIndex++,
};
}
}
}
const extendedChunk = chunk;
if (Array.isArray(extendedChunk.citations) && extendedChunk.citations.length > 0) {
for (const url of extendedChunk.citations) {
if (typeof url === 'string' && !citationTracker.citations.has(url)) {
const title = url.split('/').pop() || url;
const marker = (0, citation_tracker_js_1.formatCitation)(citationTracker, {
title,
url,
});
if (marker) {
aggregatedContent += marker;
yield {
type: 'message_delta',
content: marker,
message_id: messageId,
order: messageIndex++,
};
}
}
}
}
if ('reasoning_content' in delta) {
const thinking_content = delta.reasoning_content;
if (thinking_content) {
aggregatedThinking += thinking_content;
yield {
type: 'message_delta',
content: '',
message_id: messageId,
thinking_content,
order: messageIndex++,
};
}
}
if ('thinking_content' in delta) {
const thinking_content = delta.thinking_content;
if (thinking_content) {
aggregatedThinking += thinking_content;
yield {
type: 'message_delta',
content: '',
message_id: messageId,
thinking_content,
order: messageIndex++,
};
}
}
if (delta.tool_calls) {
for (const toolCallDelta of delta.tool_calls) {
const typedDelta = toolCallDelta;
const index = typedDelta.index;
if (typeof index !== 'number')
continue;
let partialCall = partialToolCallsByIndex.get(index);
if (!partialCall) {
partialCall = {
id: typedDelta.id || '',
type: 'function',
function: {
name: typedDelta.function?.name || '',
arguments: typedDelta.function?.arguments || '',
},
};
partialToolCallsByIndex.set(index, partialCall);
}
else {
if (typedDelta.id)
partialCall.id = typedDelta.id;
if (typedDelta.function?.name)
partialCall.function.name = typedDelta.function.name;
if (typedDelta.function?.arguments) {
const newArgs = typedDelta.function.arguments;
const accumulatedArgs = partialCall.function.arguments + newArgs;
try {
JSON.parse(accumulatedArgs);
partialCall.function.arguments = accumulatedArgs;
}
catch {
partialCall.function.arguments = accumulatedArgs;
}
}
}
}
}
if (choice.finish_reason)
finishReason = choice.finish_reason;
if (chunk.usage)
usage = chunk.usage;
}
if (citationTracker.citations.size > 0) {
const footnotes = (0, citation_tracker_js_1.generateFootnotes)(citationTracker);
aggregatedContent += footnotes;
yield {
type: 'message_delta',
content: footnotes,
message_id: messageId,
order: messageIndex++,
};
}
if (usage) {
const calculatedUsage = index_js_1.costTracker.addUsage({
model: model,
input_tokens: usage.prompt_tokens || 0,
output_tokens: usage.completion_tokens || 0,
cached_tokens: usage.prompt_tokens_details?.cached_tokens || 0,
metadata: {
total_tokens: usage.total_tokens || 0,
reasoning_tokens: usage.completion_tokens_details?.reasoning_tokens || 0,
},
});
if (!(0, event_controller_js_1.hasEventHandler)()) {
yield {
type: 'cost_update',
usage: {
...calculatedUsage,
total_tokens: usage.total_tokens || (usage.prompt_tokens || 0) + (usage.completion_tokens || 0),
},
};
}
}
else {
let inputText = '';
for (const msg of chatMessages) {
if (typeof msg.content === 'string') {
inputText += msg.content + '\n';
}
else if (Array.isArray(msg.content)) {
for (const part of msg.content) {
if ('text' in part && typeof part.text === 'string') {
inputText += part.text + '\n';
}
}
}
}
const { CostTracker } = await Promise.resolve().then(() => __importStar(require("../utils/cost_tracker.cjs")));
const estimatedInputTokens = CostTracker.estimateTokens(inputText);
const estimatedOutputTokens = CostTracker.estimateTokens(aggregatedContent);
const calculatedUsage = index_js_1.costTracker.addEstimatedUsage(model, inputText, aggregatedContent, {
provider: this.provider,
});
if (!(0, event_controller_js_1.hasEventHandler)()) {
yield {
type: 'cost_update',
usage: {
...calculatedUsage,
total_tokens: estimatedInputTokens + estimatedOutputTokens,
},
};
}
}
for (const ev of (0, delta_buffer_js_1.flushBufferedDeltas)(deltaBuffers, (id, content) => ({
type: 'message_delta',
content,
message_id: id,
order: messageIndex++,
}))) {
yield ev;
}
if (finishReason === 'stop') {
const parseResult = this._parseAndPrepareSimulatedToolCalls(aggregatedContent, messageId);
if (parseResult.handled && parseResult.eventsToYield) {
for (const event of parseResult.eventsToYield) {
yield event;
}
}
else {
yield {
type: 'message_complete',
content: parseResult.cleanedContent ?? '',
message_id: messageId,
thinking_content: aggregatedThinking,
};
}
}
else if (finishReason === 'tool_calls') {
const completedToolCalls = Array.from(partialToolCallsByIndex.values()).filter(call => call.id && call.function.name);
if (completedToolCalls.length > 0) {
for (const completedToolCall of completedToolCalls) {
if (completedToolCall.function.arguments) {
try {
const parsed = JSON.parse(completedToolCall.function.arguments);
completedToolCall.function.arguments = JSON.stringify(parsed);
}
catch (error) {
console.warn(`(${this.provider}) Invalid JSON in tool arguments for ${completedToolCall.function.name}, attempting to fix: ${error}`);
const argStr = completedToolCall.function.arguments;
const matches = argStr.match(/\{(?:[^{}]|(?:\{[^{}]*\}))*\}/);
if (matches && matches[0]) {
try {
const parsed = JSON.parse(matches[0]);
completedToolCall.function.arguments = JSON.stringify(parsed);
}
catch {
completedToolCall.function.arguments = '{}';
console.error(`(${this.provider}) Could not parse arguments, using empty object`);
}
}
else {
completedToolCall.function.arguments = '{}';
}
}
}
yield {
type: 'tool_start',
tool_call: completedToolCall,
};
}
}
else {
(0, llm_logger_js_1.log_llm_error)(requestId, `Error (${this.provider}): Model indicated tool calls, but none were parsed correctly.`);
console.warn(`(${this.provider}) Finish reason 'tool_calls', but no complete native tool calls parsed.`);
yield {
type: 'error',
error: `Error (${this.provider}): Model indicated tool calls, but none were parsed correctly.`,
};
}
}
else if (finishReason === 'length') {
const cleanedPartialContent = aggregatedContent.replaceAll(TOOL_CALL_CLEANUP_REGEX, CLEANUP_PLACEHOLDER);
(0, llm_logger_js_1.log_llm_error)(requestId, `Error (${this.provider}): Response truncated (max_tokens). Partial: ${cleanedPartialContent.substring(0, 100)}...`);
yield {
type: 'error',
error: `Error (${this.provider}): Response truncated (max_tokens). Partial: ${cleanedPartialContent.substring(0, 100)}...`,
};
}
else if (finishReason) {
const cleanedReasonContent = aggregatedContent.replaceAll(TOOL_CALL_CLEANUP_REGEX, CLEANUP_PLACEHOLDER);
(0, llm_logger_js_1.log_llm_error)(requestId, `Error (${this.provider}): Response stopped due to: ${finishReason}. Content: ${cleanedReasonContent.substring(0, 100)}...`);
yield {
type: 'error',
error: `Error (${this.provider}): Response stopped due to: ${finishReason}. Content: ${cleanedReasonContent.substring(0, 100)}...`,
};
}
else {
if (aggregatedContent) {
console.warn(`(${this.provider}) Stream finished without finish_reason, yielding cleaned content.`);
const parseResult = this._parseAndPrepareSimulatedToolCalls(aggregatedContent, messageId);
if (parseResult.handled && parseResult.eventsToYield) {
for (const event of parseResult.eventsToYield) {
yield event;
}
}
else {
yield {
type: 'message_complete',
content: parseResult.cleanedContent ?? '',
message_id: messageId,
thinking_content: aggregatedThinking,
};
}
}
else if (partialToolCallsByIndex.size > 0) {
(0, llm_logger_js_1.log_llm_error)(requestId, `Error (${this.provider}): Stream ended unexpectedly during native tool call generation.`);
console.warn(`(${this.provider}) Stream finished without finish_reason during native tool call generation.`);
yield {
type: 'error',
error: `Error (${this.provider}): Stream ended unexpectedly during native tool call generation.`,
};
}
else {
(0, llm_logger_js_1.log_llm_error)(requestId, `Error (${this.provider}): Stream finished unexpectedly empty.`);
console.warn(`(${this.provider}) Stream finished empty without reason, content, or tool calls.`);
yield {
type: 'error',
error: `Error (${this.provider}): Stream finished unexpectedly empty.`,
};
}
}
}
catch (streamError) {
(0, llm_logger_js_1.log_llm_error)(requestId, streamError);
console.error(`(${this.provider}) Error processing chat completions stream:`, streamError);
yield {
type: 'error',
error: `Stream processing error (${this.provider} ${model}): ` +
(streamError instanceof openai_1.default.APIError || streamError instanceof openai_1.APIError
? `${streamError.status} ${streamError.name} ${streamError.message} ${JSON.stringify(streamError.error)}`
: streamError instanceof Error
? streamError.stack
: Object.getPrototypeOf(streamError) + ' ' + String(streamError)),
};
}
finally {
partialToolCallsByIndex.clear();
(0, llm_logger_js_1.log_llm_response)(requestId, chunks);
}
}
catch (error) {
(0, llm_logger_js_1.log_llm_error)(requestId, error);
console.error(`Error running ${this.provider} chat completions stream:`, error);
yield {
type: 'error',
error: `API Error (${this.provider} - ${model}): ` +
(error instanceof openai_1.default.APIError || error instanceof openai_1.APIError
? `${error.status} ${error.name} ${error.message}`
: error instanceof Error
? error.stack
: Object.getPrototypeOf(error) + ' ' + String(error)),
};
}
}
}
exports.OpenAIChat = OpenAIChat;
//# sourceMappingURL=openai_chat.js.map
;