UNPKG

jorel

Version:

A unified wrapper for working with LLMs from multiple providers, including streams, images, documents & automatic tool use.

109 lines (108 loc) 4.54 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.convertLlmMessagesToAnthropicMessages = void 0; const utils_1 = require("../../media/utils"); const tools_1 = require("../../tools"); const validMediaTypes = ["image/jpeg", "image/png", "image/gif", "image/webp"]; const validateMediaType = (mediaType) => { if (!mediaType) throw new Error("Missing media type"); if (!validMediaTypes.includes(mediaType)) { throw new Error("Unsupported media type"); } return mediaType; }; /** Convert unified LLM messages to Anthropic messages */ const convertLlmMessagesToAnthropicMessages = async (messages) => { const systemMessages = messages.filter((m) => m.role === "system"); const chatMessages = messages.filter((m) => m.role !== "system"); const systemMessage = systemMessages.map((m) => m.content).join("\n"); const convertedChatMessages = []; for (const message of chatMessages) { if (message.role === "assistant") { // Note: Anthropic's thinking blocks cannot be passed back as input (they require a signature field) // We only send the main content back convertedChatMessages.push({ role: "assistant", content: message.content, }); } else if (message.role === "assistant_with_tools") { const content = []; // Note: Anthropic's thinking blocks cannot be passed back as input (they require a signature field) // We only send the main content and tool calls back if (message.content) { content.push({ type: "text", text: message.content, }); } if (message.toolCalls) { for (const toolCall of message.toolCalls) { content.push({ type: "tool_use", id: toolCall.request.id, name: toolCall.request.function.name, input: toolCall.request.function.arguments, }); } } convertedChatMessages.push({ role: "assistant", content, }); if (message.toolCalls) { for (const toolCall of message.toolCalls.filter((tc) => tc.executionState === "completed" || tc.executionState === "error" || tc.executionState === "cancelled")) { convertedChatMessages.push({ role: "user", content: [ { type: "tool_result", tool_use_id: toolCall.request.id, content: toolCall.executionState === "error" ? `Error: ${toolCall.error?.message || "Cancelled"}` : tools_1.LlmToolKit.serialize(toolCall.result || {}), }, ], }); } } } else if (message.role === "user") { const content = []; for (const _content of message.content) { if (_content.type === "text") { content.push({ type: "text", text: _content.text, }); } else if (_content.type === "imageData") { content.push({ type: "image", source: { data: (0, utils_1.getBase64PartFromDataUrl)(_content.data), media_type: validateMediaType(_content.mimeType), type: "base64", }, }); } else if (_content.type === "imageUrl") { throw new Error(`Image URLs are currently not supported by Anthropic`); } else { throw new Error(`Unsupported content type`); } } convertedChatMessages.push({ role: message.role, content, }); } } return { systemMessage, chatMessages: convertedChatMessages, }; }; exports.convertLlmMessagesToAnthropicMessages = convertLlmMessagesToAnthropicMessages;